v0.14.0
elastic.cpp
Go to the documentation of this file.
1 /**
2  * @file elastic.cpp
3  * @brief elastic example
4  * @version 0.13.2
5  * @date 2022-09-19
6  *
7  * @copyright Copyright (c) 2022
8  *
9  */
10 
11 #include <MoFEM.hpp>
12 
13 using namespace MoFEM;
14 
15 constexpr int BASE_DIM = 1; //< Dimension of the base functions
16 
17 //! [Define dimension]
18 constexpr int SPACE_DIM =
19  EXECUTABLE_DIMENSION; //< Space dimension of problem, mesh
20 //! [Define dimension]
23  : AssemblyType::PETSC; //< selected assembly type
24 
25 constexpr IntegrationType I =
26  IntegrationType::GAUSS; //< selected integration type
27 
28 //! [Define entities]
31 using BoundaryEle =
35 //! [Define entities]
36 
37 //! [OpK]
39  I>::OpGradSymTensorGrad<BASE_DIM, SPACE_DIM, SPACE_DIM, 0>;
40 //! [OpK]
41 //! [OpInternalForce]
43  I>::OpGradTimesSymTensor<BASE_DIM, SPACE_DIM, SPACE_DIM>;
44 //! [OpInternalForce]
45 struct DomainBCs {};
46 struct BoundaryBCs {};
47 
54 
55 template <int DIM> struct PostProcEleByDim;
56 
57 template <> struct PostProcEleByDim<2> {
61 };
62 
63 template <> struct PostProcEleByDim<3> {
67 };
68 
72 
73 #include <ElasticSpring.hpp>
74 #include <FluidLevel.hpp>
75 #include <CalculateTraction.hpp>
76 #include <NaturalDomainBC.hpp>
77 #include <NaturalBoundaryBC.hpp>
78 
79 constexpr double young_modulus = 1;
80 constexpr double poisson_ratio = 0.3;
81 constexpr double bulk_modulus_K = young_modulus / (3 * (1 - 2 * poisson_ratio));
82 constexpr double shear_modulus_G = young_modulus / (2 * (1 + poisson_ratio));
83 
84 PetscBool is_plane_strain = PETSC_FALSE;
85 
86 struct Example {
87 
88  Example(MoFEM::Interface &m_field) : mField(m_field) {}
89 
90  MoFEMErrorCode runProblem();
91 
92 private:
93  MoFEM::Interface &mField;
94 
95  PetscBool doEvalField;
96  std::array<double, SPACE_DIM> fieldEvalCoords;
97  boost::shared_ptr<FieldEvaluatorInterface::SetPtsData> fieldEvalData;
98  boost::shared_ptr<MatrixDouble> vectorFieldPtr;
99 
100  MoFEMErrorCode readMesh();
101  MoFEMErrorCode setupProblem();
102  MoFEMErrorCode boundaryCondition();
103  MoFEMErrorCode assembleSystem();
104  MoFEMErrorCode solveSystem();
105  MoFEMErrorCode outputResults();
106  MoFEMErrorCode checkResults();
107 
109  boost::ptr_deque<ForcesAndSourcesCore::UserDataOperator> &pipeline,
110  std::string field_name, std::string block_name,
111  boost::shared_ptr<MatrixDouble> mat_D_Ptr, Sev sev);
112 };
113 
115  boost::ptr_deque<ForcesAndSourcesCore::UserDataOperator> &pipeline,
116  std::string field_name, std::string block_name,
117  boost::shared_ptr<MatrixDouble> mat_D_Ptr, Sev sev) {
119 
120  struct OpMatBlocks : public DomainEleOp {
121  OpMatBlocks(std::string field_name, boost::shared_ptr<MatrixDouble> m,
122  double bulk_modulus_K, double shear_modulus_G,
123  MoFEM::Interface &m_field, Sev sev,
124  std::vector<const CubitMeshSets *> meshset_vec_ptr)
125  : DomainEleOp(field_name, DomainEleOp::OPROW), matDPtr(m),
126  bulkModulusKDefault(bulk_modulus_K),
127  shearModulusGDefault(shear_modulus_G) {
128  std::fill(&(doEntities[MBEDGE]), &(doEntities[MBMAXTYPE]), false);
129  CHK_THROW_MESSAGE(extractBlockData(m_field, meshset_vec_ptr, sev),
130  "Can not get data from block");
131  }
132 
133  MoFEMErrorCode doWork(int side, EntityType type,
136 
137  for (auto &b : blockData) {
138 
139  if (b.blockEnts.find(getFEEntityHandle()) != b.blockEnts.end()) {
140  CHKERR getMatDPtr(matDPtr, b.bulkModulusK, b.shearModulusG);
142  }
143  }
144 
145  CHKERR getMatDPtr(matDPtr, bulkModulusKDefault, shearModulusGDefault);
147  }
148 
149  private:
150  boost::shared_ptr<MatrixDouble> matDPtr;
151 
152  struct BlockData {
153  double bulkModulusK;
154  double shearModulusG;
155  Range blockEnts;
156  };
157 
158  double bulkModulusKDefault;
159  double shearModulusGDefault;
160  std::vector<BlockData> blockData;
161 
163  extractBlockData(MoFEM::Interface &m_field,
164  std::vector<const CubitMeshSets *> meshset_vec_ptr,
165  Sev sev) {
167 
168  for (auto m : meshset_vec_ptr) {
169  MOFEM_TAG_AND_LOG("WORLD", sev, "MatBlock") << *m;
170  std::vector<double> block_data;
171  CHKERR m->getAttributes(block_data);
172  if (block_data.size() < 2) {
173  SETERRQ(PETSC_COMM_SELF, MOFEM_DATA_INCONSISTENCY,
174  "Expected that block has two attributes");
175  }
176  auto get_block_ents = [&]() {
177  Range ents;
178  CHKERR
179  m_field.get_moab().get_entities_by_handle(m->meshset, ents, true);
180  return ents;
181  };
182 
183  double young_modulus = block_data[0];
184  double poisson_ratio = block_data[1];
185  double bulk_modulus_K = young_modulus / (3 * (1 - 2 * poisson_ratio));
186  double shear_modulus_G = young_modulus / (2 * (1 + poisson_ratio));
187 
188  MOFEM_TAG_AND_LOG("WORLD", sev, "MatBlock")
189  << "E = " << young_modulus << " nu = " << poisson_ratio;
190 
191  blockData.push_back(
192  {bulk_modulus_K, shear_modulus_G, get_block_ents()});
193  }
194  MOFEM_LOG_CHANNEL("WORLD");
196  }
197 
198  MoFEMErrorCode getMatDPtr(boost::shared_ptr<MatrixDouble> mat_D_ptr,
199  double bulk_modulus_K, double shear_modulus_G) {
201  //! [Calculate elasticity tensor]
202  auto set_material_stiffness = [&]() {
208  double A = 1.;
209  if (SPACE_DIM == 2 && !is_plane_strain) {
210  A = 2 * shear_modulus_G /
211  (bulk_modulus_K + (4. / 3.) * shear_modulus_G);
212  }
213  auto t_D = getFTensor4DdgFromMat<SPACE_DIM, SPACE_DIM, 0>(*mat_D_ptr);
214  t_D(i, j, k, l) =
215  2 * shear_modulus_G * ((t_kd(i, k) ^ t_kd(j, l)) / 4.) +
216  A * (bulk_modulus_K - (2. / 3.) * shear_modulus_G) * t_kd(i, j) *
217  t_kd(k, l);
218  };
219  //! [Calculate elasticity tensor]
220  constexpr auto size_symm = (SPACE_DIM * (SPACE_DIM + 1)) / 2;
221  mat_D_ptr->resize(size_symm * size_symm, 1);
222  set_material_stiffness();
224  }
225  };
226 
227  pipeline.push_back(new OpMatBlocks(
228  field_name, mat_D_Ptr, bulk_modulus_K, shear_modulus_G, mField, sev,
229 
230  // Get blockset using regular expression
231  mField.getInterface<MeshsetsManager>()->getCubitMeshsetPtr(std::regex(
232 
233  (boost::format("%s(.*)") % block_name).str()
234 
235  ))
236 
237  ));
238 
240 }
241 
242 //! [Run problem]
245  CHKERR readMesh();
246  CHKERR setupProblem();
247  CHKERR boundaryCondition();
248  CHKERR assembleSystem();
249  CHKERR solveSystem();
250  CHKERR outputResults();
251  CHKERR checkResults();
253 }
254 //! [Run problem]
255 
256 //! [Read mesh]
259  auto simple = mField.getInterface<Simple>();
260  CHKERR simple->getOptions();
261  CHKERR simple->loadFile();
263 }
264 //! [Read mesh]
265 
266 //! [Set up problem]
269  Simple *simple = mField.getInterface<Simple>();
270 
271  enum bases { AINSWORTH, DEMKOWICZ, LASBASETOPT };
272  const char *list_bases[LASBASETOPT] = {"ainsworth", "demkowicz"};
273  PetscInt choice_base_value = AINSWORTH;
274  CHKERR PetscOptionsGetEList(PETSC_NULL, NULL, "-base", list_bases,
275  LASBASETOPT, &choice_base_value, PETSC_NULL);
276 
278  switch (choice_base_value) {
279  case AINSWORTH:
281  MOFEM_LOG("WORLD", Sev::inform)
282  << "Set AINSWORTH_LEGENDRE_BASE for displacements";
283  break;
284  case DEMKOWICZ:
285  base = DEMKOWICZ_JACOBI_BASE;
286  MOFEM_LOG("WORLD", Sev::inform)
287  << "Set DEMKOWICZ_JACOBI_BASE for displacements";
288  break;
289  default:
290  base = LASTBASE;
291  break;
292  }
293 
294  // Add field
295  CHKERR simple->addDomainField("U", H1, base, SPACE_DIM);
296  CHKERR simple->addBoundaryField("U", H1, base, SPACE_DIM);
297  int order = 3;
298  CHKERR PetscOptionsGetInt(PETSC_NULL, "", "-order", &order, PETSC_NULL);
299 
300  CHKERR simple->addDataField("GEOMETRY", H1, base, SPACE_DIM);
301 
302  CHKERR simple->setFieldOrder("U", order);
303  CHKERR simple->setFieldOrder("GEOMETRY", 2);
304  CHKERR simple->setUp();
305 
306  auto project_ho_geometry = [&]() {
307  Projection10NodeCoordsOnField ent_method(mField, "GEOMETRY");
308  return mField.loop_dofs("GEOMETRY", ent_method);
309  };
310  CHKERR project_ho_geometry();
311 
312  CHKERR PetscOptionsGetBool(PETSC_NULL, "", "-plane_strain", &is_plane_strain,
313  PETSC_NULL);
314 
315  int coords_dim = SPACE_DIM;
316  CHKERR PetscOptionsGetRealArray(NULL, NULL, "-field_eval_coords",
317  fieldEvalCoords.data(), &coords_dim,
318  &doEvalField);
319 
320  if (doEvalField) {
321  vectorFieldPtr = boost::make_shared<MatrixDouble>();
322  fieldEvalData =
323  mField.getInterface<FieldEvaluatorInterface>()->getData<DomainEle>();
324 
325  if constexpr (SPACE_DIM == 3) {
326  CHKERR mField.getInterface<FieldEvaluatorInterface>()->buildTree3D(
327  fieldEvalData, simple->getDomainFEName());
328  } else {
329  CHKERR mField.getInterface<FieldEvaluatorInterface>()->buildTree2D(
330  fieldEvalData, simple->getDomainFEName());
331  }
332 
333  fieldEvalData->setEvalPoints(fieldEvalCoords.data(), 1);
334  auto no_rule = [](int, int, int) { return -1; };
335  auto field_eval_fe_ptr = fieldEvalData->feMethodPtr.lock();
336  field_eval_fe_ptr->getRuleHook = no_rule;
337 
338  field_eval_fe_ptr->getOpPtrVector().push_back(
339  new OpCalculateVectorFieldValues<SPACE_DIM>("U", vectorFieldPtr));
340  }
341 
343 }
344 //! [Set up problem]
345 
346 //! [Boundary condition]
349  auto pip = mField.getInterface<PipelineManager>();
350  auto simple = mField.getInterface<Simple>();
351  auto bc_mng = mField.getInterface<BcManager>();
352 
353  CHKERR bc_mng->removeBlockDOFsOnEntities(simple->getProblemName(), "REMOVE_X",
354  "U", 0, 0);
355  CHKERR bc_mng->removeBlockDOFsOnEntities(simple->getProblemName(), "REMOVE_Y",
356  "U", 1, 1);
357  CHKERR bc_mng->removeBlockDOFsOnEntities(simple->getProblemName(), "REMOVE_Z",
358  "U", 2, 2);
359  CHKERR bc_mng->removeBlockDOFsOnEntities(simple->getProblemName(),
360  "REMOVE_ALL", "U", 0, 3);
361  CHKERR bc_mng->pushMarkDOFsOnEntities<DisplacementCubitBcData>(
362  simple->getProblemName(), "U");
363 
364  // adding MPCs
365  CHKERR bc_mng->addBlockDOFsToMPCs(simple->getProblemName(), "U");
366 
368 }
369 //! [Boundary condition]
370 
371 //! [Push operators to pipeline]
374  auto pip = mField.getInterface<PipelineManager>();
375  auto simple = mField.getInterface<Simple>();
376  auto bc_mng = mField.getInterface<BcManager>();
377 
378  //! [Integration rule]
379  auto integration_rule = [](int, int, int approx_order) {
380  return 2 * approx_order + 1;
381  };
382 
383  auto integration_rule_bc = [](int, int, int approx_order) {
384  return 2 * approx_order + 1;
385  };
386 
387  CHKERR pip->setDomainRhsIntegrationRule(integration_rule);
388  CHKERR pip->setDomainLhsIntegrationRule(integration_rule);
389  CHKERR pip->setBoundaryRhsIntegrationRule(integration_rule_bc);
390  CHKERR pip->setBoundaryLhsIntegrationRule(integration_rule_bc);
391  //! [Integration rule]
392 
394  pip->getOpDomainLhsPipeline(), {H1}, "GEOMETRY");
396  pip->getOpDomainRhsPipeline(), {H1}, "GEOMETRY");
398  pip->getOpBoundaryRhsPipeline(), {NOSPACE}, "GEOMETRY");
400  pip->getOpBoundaryLhsPipeline(), {NOSPACE}, "GEOMETRY");
401 
402  //! [Push domain stiffness matrix to pipeline]
403  auto mat_D_ptr = boost::make_shared<MatrixDouble>();
404 
405  // Assemble domain stiffness matrix
406  CHKERR addMatBlockOps(pip->getOpDomainLhsPipeline(), "U", "MAT_ELASTIC",
407  mat_D_ptr, Sev::verbose);
408  pip->getOpDomainLhsPipeline().push_back(new OpK("U", "U", mat_D_ptr));
409  //! [Push domain stiffness matrix to pipeline]
410 
411  //! [Push Internal forces]
412  auto mat_grad_ptr = boost::make_shared<MatrixDouble>();
413  auto mat_strain_ptr = boost::make_shared<MatrixDouble>();
414  auto mat_stress_ptr = boost::make_shared<MatrixDouble>();
415  pip->getOpDomainRhsPipeline().push_back(
417  mat_grad_ptr));
418  CHKERR addMatBlockOps(pip->getOpDomainRhsPipeline(), "U", "MAT_ELASTIC",
419  mat_D_ptr, Sev::inform);
420  pip->getOpDomainRhsPipeline().push_back(
421  new OpSymmetrizeTensor<SPACE_DIM>(mat_grad_ptr, mat_strain_ptr));
422  pip->getOpDomainRhsPipeline().push_back(
424  mat_strain_ptr, mat_stress_ptr, mat_D_ptr));
425 
426  pip->getOpDomainRhsPipeline().push_back(
427  new OpInternalForce("U", mat_stress_ptr,
428  [](double, double, double) constexpr { return -1; }));
429  //! [Push Internal forces]
430 
431  //! [Push Body forces]
433  pip->getOpDomainRhsPipeline(), mField, "U", Sev::inform);
434  //! [Push Body forces]
435 
436  //! [Push natural boundary conditions]
437  // Add force boundary condition
439  pip->getOpBoundaryRhsPipeline(), mField, "U", -1, Sev::inform);
440  // Add case for mix type of BCs
442  pip->getOpBoundaryLhsPipeline(), mField, "U", Sev::verbose);
443  //! [Push natural boundary conditions]
445 }
446 //! [Push operators to pipeline]
447 
448 struct SetUpSchur {
449  static boost::shared_ptr<SetUpSchur>
450  createSetUpSchur(MoFEM::Interface &m_field);
451  virtual MoFEMErrorCode setUp(SmartPetscObj<KSP> solver) = 0;
452 
453 protected:
454  SetUpSchur() = default;
455 };
456 
457 //! [Solve]
460  auto simple = mField.getInterface<Simple>();
461  auto pip = mField.getInterface<PipelineManager>();
462  auto solver = pip->createKSP();
463  CHKERR KSPSetFromOptions(solver);
464 
465  auto dm = simple->getDM();
466  auto D = createDMVector(dm);
467  auto F = vectorDuplicate(D);
468 
469  auto set_essential_bc = [&]() {
471  // This is low level pushing finite elements (pipelines) to solver
472  auto ksp_ctx_ptr = getDMKspCtx(dm);
473 
474  auto pre_proc_rhs = boost::make_shared<FEMethod>();
475  auto post_proc_rhs = boost::make_shared<FEMethod>();
476  auto post_proc_lhs = boost::make_shared<FEMethod>();
477 
478  auto get_pre_proc_hook = [&]() {
479  return EssentialPreProc<DisplacementCubitBcData>(mField, pre_proc_rhs,
480  {});
481  };
482  pre_proc_rhs->preProcessHook = get_pre_proc_hook();
483 
484  auto get_post_proc_hook_rhs = [this, post_proc_rhs]() {
486 
488  post_proc_rhs, 1.)();
489  CHKERR EssentialPostProcRhs<MPCsType>(mField, post_proc_rhs, 1.)();
491  };
492 
493  auto get_post_proc_hook_lhs = [this, post_proc_lhs]() {
495 
497  post_proc_lhs, 1.)();
498  CHKERR EssentialPostProcLhs<MPCsType>(mField, post_proc_lhs)();
500  };
501 
502  post_proc_rhs->postProcessHook = get_post_proc_hook_rhs;
503  post_proc_lhs->postProcessHook = get_post_proc_hook_lhs;
504 
505  ksp_ctx_ptr->getPreProcComputeRhs().push_front(pre_proc_rhs);
506  ksp_ctx_ptr->getPostProcComputeRhs().push_back(post_proc_rhs);
507  ksp_ctx_ptr->getPostProcSetOperators().push_back(post_proc_lhs);
509  };
510 
511  auto setup_and_solve = [&]() {
513  BOOST_LOG_SCOPED_THREAD_ATTR("Timeline", attrs::timer());
514  MOFEM_LOG("TIMER", Sev::inform) << "KSPSetUp";
515  CHKERR KSPSetUp(solver);
516  MOFEM_LOG("TIMER", Sev::inform) << "KSPSetUp <= Done";
517  MOFEM_LOG("TIMER", Sev::inform) << "KSPSolve";
518  CHKERR KSPSolve(solver, F, D);
519  MOFEM_LOG("TIMER", Sev::inform) << "KSPSolve <= Done";
521  };
522 
523  MOFEM_LOG_CHANNEL("TIMER");
524  MOFEM_LOG_TAG("TIMER", "timer");
525 
526  CHKERR set_essential_bc();
527 
529  auto schur_ptr = SetUpSchur::createSetUpSchur(mField);
530  CHKERR schur_ptr->setUp(solver);
531  CHKERR setup_and_solve();
532  } else {
533  CHKERR setup_and_solve();
534  }
535 
536  CHKERR VecGhostUpdateBegin(D, INSERT_VALUES, SCATTER_FORWARD);
537  CHKERR VecGhostUpdateEnd(D, INSERT_VALUES, SCATTER_FORWARD);
538  CHKERR DMoFEMMeshToLocalVector(dm, D, INSERT_VALUES, SCATTER_REVERSE);
539 
540  if (doEvalField) {
541  if constexpr (SPACE_DIM == 3) {
542  CHKERR mField.getInterface<FieldEvaluatorInterface>()->evalFEAtThePoint3D(
543  fieldEvalCoords.data(), 1e-12, simple->getProblemName(),
544  simple->getDomainFEName(), fieldEvalData, mField.get_comm_rank(),
545  mField.get_comm_rank(), nullptr, MF_EXIST, QUIET);
546  } else {
547  CHKERR mField.getInterface<FieldEvaluatorInterface>()->evalFEAtThePoint2D(
548  fieldEvalCoords.data(), 1e-12, simple->getProblemName(),
549  simple->getDomainFEName(), fieldEvalData, mField.get_comm_rank(),
550  mField.get_comm_rank(), nullptr, MF_EXIST, QUIET);
551  }
552 
553  if (vectorFieldPtr->size1()) {
554  auto t_disp = getFTensor1FromMat<SPACE_DIM>(*vectorFieldPtr);
555  if constexpr (SPACE_DIM == 2)
556  MOFEM_LOG("FieldEvaluator", Sev::inform)
557  << "U_X: " << t_disp(0) << " U_Y: " << t_disp(1);
558  else
559  MOFEM_LOG("FieldEvaluator", Sev::inform)
560  << "U_X: " << t_disp(0) << " U_Y: " << t_disp(1)
561  << " U_Z: " << t_disp(2);
562  }
563 
564  MOFEM_LOG_SYNCHRONISE(mField.get_comm());
565  }
566 
568 }
569 //! [Solve]
570 
571 //! [Postprocess results]
572 MoFEMErrorCode Example::outputResults() {
574  auto simple = mField.getInterface<Simple>();
575  auto pip = mField.getInterface<PipelineManager>();
576  auto det_ptr = boost::make_shared<VectorDouble>();
577  auto jac_ptr = boost::make_shared<MatrixDouble>();
578  auto inv_jac_ptr = boost::make_shared<MatrixDouble>();
579  //! [Postprocess clean]
580  pip->getDomainRhsFE().reset();
581  pip->getDomainLhsFE().reset();
582  pip->getBoundaryRhsFE().reset();
583  pip->getBoundaryLhsFE().reset();
584  //! [Postprocess clean]
585 
586  //! [Postprocess initialise]
587  auto post_proc_mesh = boost::make_shared<moab::Core>();
588  auto post_proc_begin = boost::make_shared<PostProcBrokenMeshInMoabBaseBegin>(
589  mField, post_proc_mesh);
590  auto post_proc_end = boost::make_shared<PostProcBrokenMeshInMoabBaseEnd>(
591  mField, post_proc_mesh);
592  //! [Postprocess initialise]
593 
594  auto calculate_stress_ops = [&](auto &pip) {
596  auto mat_grad_ptr = boost::make_shared<MatrixDouble>();
597  auto mat_strain_ptr = boost::make_shared<MatrixDouble>();
598  auto mat_stress_ptr = boost::make_shared<MatrixDouble>();
600  "U", mat_grad_ptr));
601  auto mat_D_ptr = boost::make_shared<MatrixDouble>();
602  CHKERR addMatBlockOps(pip, "U", "MAT_ELASTIC", mat_D_ptr, Sev::verbose);
603  pip.push_back(
604  new OpSymmetrizeTensor<SPACE_DIM>(mat_grad_ptr, mat_strain_ptr));
606  mat_strain_ptr, mat_stress_ptr, mat_D_ptr));
607  auto u_ptr = boost::make_shared<MatrixDouble>();
608  pip.push_back(new OpCalculateVectorFieldValues<SPACE_DIM>("U", u_ptr));
609  auto x_ptr = boost::make_shared<MatrixDouble>();
610  pip.push_back(
611  new OpCalculateVectorFieldValues<SPACE_DIM>("GEOMETRY", x_ptr));
612  return boost::make_tuple(u_ptr, x_ptr, mat_strain_ptr, mat_stress_ptr);
613  };
614 
615  auto post_proc_domain = [&](auto post_proc_mesh) {
616  auto post_proc_fe =
617  boost::make_shared<PostProcEleDomain>(mField, post_proc_mesh);
619 
620  auto [u_ptr, x_ptr, mat_strain_ptr, mat_stress_ptr] =
621  calculate_stress_ops(post_proc_fe->getOpPtrVector());
622 
623  post_proc_fe->getOpPtrVector().push_back(
624 
625  new OpPPMap(
626 
627  post_proc_fe->getPostProcMesh(), post_proc_fe->getMapGaussPts(),
628 
629  {},
630 
631  {{"U", u_ptr}, {"GEOMETRY", x_ptr}},
632 
633  {},
634 
635  {{"STRAIN", mat_strain_ptr}, {"STRESS", mat_stress_ptr}}
636 
637  )
638 
639  );
640  return post_proc_fe;
641  };
642 
643  auto post_proc_boundary = [&](auto post_proc_mesh) {
644  auto post_proc_fe =
645  boost::make_shared<PostProcEleBdy>(mField, post_proc_mesh);
647  post_proc_fe->getOpPtrVector(), {}, "GEOMETRY");
648  auto op_loop_side =
649  new OpLoopSide<SideEle>(mField, simple->getDomainFEName(), SPACE_DIM);
650  // push ops to side element, through op_loop_side operator
651  auto [u_ptr, x_ptr, mat_strain_ptr, mat_stress_ptr] =
652  calculate_stress_ops(op_loop_side->getOpPtrVector());
653  post_proc_fe->getOpPtrVector().push_back(op_loop_side);
654  auto mat_traction_ptr = boost::make_shared<MatrixDouble>();
655  post_proc_fe->getOpPtrVector().push_back(
656  new ElasticExample::OpCalculateTraction(mat_stress_ptr,
657  mat_traction_ptr));
658 
660 
661  post_proc_fe->getOpPtrVector().push_back(
662 
663  new OpPPMap(
664 
665  post_proc_fe->getPostProcMesh(), post_proc_fe->getMapGaussPts(),
666 
667  {},
668 
669  {{"U", u_ptr}, {"GEOMETRY", x_ptr}, {"T", mat_traction_ptr}},
670 
671  {},
672 
673  {{"STRAIN", mat_strain_ptr}, {"STRESS", mat_stress_ptr}}
674 
675  )
676 
677  );
678  return post_proc_fe;
679  };
680 
681  PetscBool post_proc_skin_only = PETSC_FALSE;
682  if (SPACE_DIM == 3) {
683  post_proc_skin_only = PETSC_TRUE;
684  CHKERR PetscOptionsGetBool(PETSC_NULL, "", "-post_proc_skin_only",
685  &post_proc_skin_only, PETSC_NULL);
686  }
687  if (post_proc_skin_only == PETSC_FALSE) {
688  pip->getDomainRhsFE() = post_proc_domain(post_proc_mesh);
689  } else {
690  pip->getBoundaryRhsFE() = post_proc_boundary(post_proc_mesh);
691  }
692 
694  post_proc_begin->getFEMethod());
695  CHKERR pip->loopFiniteElements();
697  post_proc_end->getFEMethod());
698 
699  CHKERR post_proc_end->writeFile("out_elastic.h5m");
701 }
702 //! [Postprocess results]
703 
704 //! [Check]
706  MOFEM_LOG_CHANNEL("WORLD");
707  auto simple = mField.getInterface<Simple>();
708  auto pip = mField.getInterface<PipelineManager>();
710  pip->getDomainRhsFE().reset();
711  pip->getDomainLhsFE().reset();
712  pip->getBoundaryRhsFE().reset();
713  pip->getBoundaryLhsFE().reset();
714 
715  auto integration_rule = [](int, int, int p_data) { return 2 * p_data + 1; };
716  CHKERR pip->setDomainRhsIntegrationRule(integration_rule);
717  CHKERR pip->setBoundaryRhsIntegrationRule(integration_rule);
718 
720  pip->getOpDomainRhsPipeline(), {H1}, "GEOMETRY");
722  pip->getOpBoundaryRhsPipeline(), {}, "GEOMETRY");
723 
724  auto mat_grad_ptr = boost::make_shared<MatrixDouble>();
725  auto mat_strain_ptr = boost::make_shared<MatrixDouble>();
726  auto mat_stress_ptr = boost::make_shared<MatrixDouble>();
727 
728  pip->getOpDomainRhsPipeline().push_back(
730  mat_grad_ptr));
731  pip->getOpDomainRhsPipeline().push_back(
732  new OpSymmetrizeTensor<SPACE_DIM>(mat_grad_ptr, mat_strain_ptr));
733 
734  auto mat_D_ptr = boost::make_shared<MatrixDouble>();
735  CHKERR addMatBlockOps(pip->getOpDomainRhsPipeline(), "U", "MAT_ELASTIC",
736  mat_D_ptr, Sev::verbose);
737  pip->getOpDomainRhsPipeline().push_back(
739  mat_strain_ptr, mat_stress_ptr, mat_D_ptr));
740 
741  pip->getOpDomainRhsPipeline().push_back(
742  new OpInternalForce("U", mat_stress_ptr));
743 
744  pip->getOpBoundaryRhsPipeline().push_back(
746  mat_strain_ptr, mat_stress_ptr, mat_D_ptr));
748  pip->getOpDomainRhsPipeline(), mField, "U", Sev::verbose);
750  pip->getOpBoundaryRhsPipeline(), mField, "U", -1, Sev::verbose);
751 
752  auto dm = simple->getDM();
753  auto res = createDMVector(dm);
754  CHKERR VecSetDM(res, PETSC_NULL);
755 
756  pip->getDomainRhsFE()->f = res;
757  pip->getBoundaryRhsFE()->f = res;
758 
759  CHKERR VecZeroEntries(res);
760 
761  CHKERR mField.getInterface<FieldBlas>()->fieldScale(-1, "U");
762  CHKERR pip->loopFiniteElements();
763  CHKERR mField.getInterface<FieldBlas>()->fieldScale(-1, "U");
764 
765  CHKERR VecGhostUpdateBegin(res, ADD_VALUES, SCATTER_REVERSE);
766  CHKERR VecGhostUpdateEnd(res, ADD_VALUES, SCATTER_REVERSE);
767  CHKERR VecAssemblyBegin(res);
768  CHKERR VecAssemblyEnd(res);
769 
770  auto zero_residual_at_constrains = [&]() {
772  auto fe_post_proc_ptr = boost::make_shared<FEMethod>();
773  auto get_post_proc_hook_rhs = [&]() {
776  mField, fe_post_proc_ptr, res)();
778  mField, fe_post_proc_ptr, 0, res)();
779  CHKERR EssentialPostProcRhs<MPCsType>(mField, fe_post_proc_ptr, 0, res)();
781  };
782  fe_post_proc_ptr->postProcessHook = get_post_proc_hook_rhs;
783  CHKERR DMoFEMPostProcessFiniteElements(dm, fe_post_proc_ptr.get());
785  };
786 
787  CHKERR zero_residual_at_constrains();
788 
789  double nrm2;
790  CHKERR VecNorm(res, NORM_2, &nrm2);
791  MOFEM_LOG_CHANNEL("WORLD");
792  MOFEM_LOG_C("WORLD", Sev::inform, "residual = %3.4e\n", nrm2);
793 
794  PetscBool test = PETSC_FALSE;
795  CHKERR PetscOptionsGetBool(PETSC_NULL, "", "-test", &test, PETSC_NULL);
796  if (test == PETSC_TRUE) {
797 
798  auto post_proc_residual = [&](auto dm, auto f_res, auto out_name) {
800  auto post_proc_fe =
801  boost::make_shared<PostProcBrokenMeshInMoab<DomainEle>>(mField);
803  auto u_vec = boost::make_shared<MatrixDouble>();
804  post_proc_fe->getOpPtrVector().push_back(
805  new OpCalculateVectorFieldValues<SPACE_DIM>("U", u_vec, f_res));
806  post_proc_fe->getOpPtrVector().push_back(
807 
808  new OpPPMap(
809 
810  post_proc_fe->getPostProcMesh(), post_proc_fe->getMapGaussPts(),
811 
812  {},
813 
814  {{"RES", u_vec}},
815 
816  {}, {})
817 
818  );
819 
820  CHKERR DMoFEMLoopFiniteElements(dm, simple->getDomainFEName(),
821  post_proc_fe);
822  post_proc_fe->writeFile(out_name);
824  };
825 
826  CHKERR post_proc_residual(simple->getDM(), res, "res.h5m");
827 
828  constexpr double eps = 1e-8;
829  if (nrm2 > eps)
830  SETERRQ(PETSC_COMM_WORLD, MOFEM_DATA_INCONSISTENCY,
831  "Residual is not zero");
832  }
833 
835 }
836 //! [Check]
837 
838 static char help[] = "...\n\n";
839 
840 int main(int argc, char *argv[]) {
841 
842  // Initialisation of MoFEM/PETSc and MOAB data structures
843  const char param_file[] = "param_file.petsc";
844  MoFEM::Core::Initialize(&argc, &argv, param_file, help);
845 
846  auto core_log = logging::core::get();
847  core_log->add_sink(
849 
850  core_log->add_sink(
851  LogManager::createSink(LogManager::getStrmSync(), "FieldEvaluator"));
852  LogManager::setLog("FieldEvaluator");
853  MOFEM_LOG_TAG("FieldEvaluator", "field_eval");
854 
855  try {
856 
857  //! [Register MoFEM discrete manager in PETSc]
858  DMType dm_name = "DMMOFEM";
859  CHKERR DMRegister_MoFEM(dm_name);
860  DMType dm_name_mg = "DMMOFEM_MG";
862  //! [Register MoFEM discrete manager in PETSc
863 
864  //! [Create MoAB]
865  moab::Core mb_instance; ///< mesh database
866  moab::Interface &moab = mb_instance; ///< mesh database interface
867  //! [Create MoAB]
868 
869  //! [Create MoFEM]
870  MoFEM::Core core(moab); ///< finite element database
871  MoFEM::Interface &m_field = core; ///< finite element database interface
872  //! [Create MoFEM]
873 
874  //! [Example]
875  Example ex(m_field);
876  CHKERR ex.runProblem();
877  //! [Example]
878  }
879  CATCH_ERRORS;
880 
882 }
883 
884 struct SetUpSchurImpl : public SetUpSchur {
885 
886  SetUpSchurImpl(MoFEM::Interface &m_field) : SetUpSchur(), mField(m_field) {
887  if (S) {
890  "Is expected that schur matrix is not allocated. This is "
891  "possible only is PC is set up twice");
892  }
893  }
894  virtual ~SetUpSchurImpl() = default;
895 
896  MoFEMErrorCode setUp(SmartPetscObj<KSP> solver);
897 
898 private:
899  MoFEMErrorCode setEntities();
900  MoFEMErrorCode createSubDM();
901  MoFEMErrorCode setOperator();
902  MoFEMErrorCode setPC(PC pc);
903  MoFEMErrorCode setDiagonalPC(PC pc);
904 
906 
907  MoFEM::Interface &mField;
908 
909  SmartPetscObj<DM> schurDM;
910  SmartPetscObj<DM> blockDM;
913 };
914 
917  auto simple = mField.getInterface<Simple>();
918  auto pip = mField.getInterface<PipelineManager>();
919  PC pc;
920  CHKERR KSPGetPC(solver, &pc);
921  PetscBool is_pcfs = PETSC_FALSE;
922  PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &is_pcfs);
923  if (is_pcfs) {
924  if (S) {
927  "Is expected that schur matrix is not allocated. This is "
928  "possible only is PC is set up twice");
929  }
930  CHKERR setEntities();
931  CHKERR createSubDM();
932 
933  // Add data to DM storage
934  S = createDMMatrix(schurDM);
935  CHKERR MatSetDM(S, PETSC_NULL);
936  CHKERR MatSetBlockSize(S, SPACE_DIM);
937  CHKERR MatSetOption(S, MAT_SYMMETRIC, PETSC_TRUE);
938 
939  CHKERR setOperator();
940  CHKERR setPC(pc);
941 
942  if constexpr (A == AssemblyType::BLOCK_SCHUR) {
943  // Set DM to use shell block matrix
944  DM solver_dm;
945  CHKERR KSPGetDM(solver, &solver_dm);
946  CHKERR DMSetMatType(solver_dm, MATSHELL);
947  }
948  CHKERR KSPSetUp(solver);
949  CHKERR setDiagonalPC(pc);
950 
951  } else {
952  pip->getOpBoundaryLhsPipeline().push_front(createOpSchurAssembleBegin());
953  pip->getOpBoundaryLhsPipeline().push_back(createOpSchurAssembleEnd({}, {}));
954  pip->getOpDomainLhsPipeline().push_front(createOpSchurAssembleBegin());
955  pip->getOpDomainLhsPipeline().push_back(createOpSchurAssembleEnd({}, {}));
956  }
958 }
959 
962  auto simple = mField.getInterface<Simple>();
963  CHKERR mField.get_moab().get_entities_by_dimension(simple->getMeshset(),
964  SPACE_DIM, volEnts);
965  CHKERR mField.get_moab().get_entities_by_handle(simple->getMeshset(),
966  subEnts);
967  subEnts = subtract(subEnts, volEnts);
969 };
970 
973  auto simple = mField.getInterface<Simple>();
974 
975  auto create_dm = [&](const char *name, auto &ents, auto dm_type) {
976  auto dm = createDM(mField.get_comm(), dm_type);
977  auto create_dm_imp = [&]() {
979  CHKERR DMMoFEMCreateSubDM(dm, simple->getDM(), name);
980  CHKERR DMMoFEMSetSquareProblem(dm, PETSC_TRUE);
981  CHKERR DMMoFEMAddElement(dm, simple->getDomainFEName());
982  auto sub_ents_ptr = boost::make_shared<Range>(ents);
983  CHKERR DMMoFEMAddSubFieldRow(dm, "U", sub_ents_ptr);
984  CHKERR DMMoFEMAddSubFieldCol(dm, "U", sub_ents_ptr);
985  CHKERR DMSetUp(dm);
987  };
988  CHK_THROW_MESSAGE(create_dm_imp(),
989  "Error in creating schurDM. It is possible that schurDM is "
990  "already created");
991  return dm;
992  };
993 
994  schurDM = create_dm("SCHUR", subEnts, "DMMOFEM_MG");
995  blockDM = create_dm("BLOCK", volEnts, "DMMOFEM");
996 
997  if constexpr (A == AssemblyType::BLOCK_SCHUR) {
998 
999  auto get_nested_mat_data = [&]() {
1000  auto block_mat_data =
1002 
1003  {{
1004 
1005  simple->getDomainFEName(),
1006 
1007  {{"U", "U"}
1008 
1009  }}}
1010 
1011  );
1012 
1014 
1015  {schurDM, blockDM}, block_mat_data,
1016 
1017  {"U"}, {boost::make_shared<Range>(volEnts)}
1018 
1019  );
1020  };
1021 
1022  auto nested_mat_data = get_nested_mat_data();
1023 
1024  CHKERR DMMoFEMSetNestSchurData(simple->getDM(), nested_mat_data);
1025  }
1026 
1028 }
1029 
1032  auto simple = mField.getInterface<Simple>();
1033  auto pip = mField.getInterface<PipelineManager>();
1034 
1035  // Boundary
1036  auto dm_is = getDMSubData(schurDM)->getSmartRowIs();
1037  auto ao_up = createAOMappingIS(dm_is, PETSC_NULL);
1038  pip->getOpBoundaryLhsPipeline().push_front(createOpSchurAssembleBegin());
1039  pip->getOpBoundaryLhsPipeline().push_back(createOpSchurAssembleEnd(
1040  {"U"}, {boost::make_shared<Range>(volEnts)}, ao_up, S, true, true));
1041  // Domain
1042  pip->getOpDomainLhsPipeline().push_front(createOpSchurAssembleBegin());
1043  pip->getOpDomainLhsPipeline().push_back(createOpSchurAssembleEnd(
1044  {"U"}, {boost::make_shared<Range>(volEnts)}, ao_up, S, true, true));
1045 
1046  auto pre_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
1047  auto post_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
1048 
1049  pre_proc_schur_lhs_ptr->preProcessHook = [this]() {
1051  if (S) {
1052  CHKERR MatZeroEntries(S);
1053  }
1054  MOFEM_LOG("TIMER", Sev::inform) << "Lhs Assemble Begin";
1056  };
1057 
1058  post_proc_schur_lhs_ptr->postProcessHook = [this, post_proc_schur_lhs_ptr,
1059  ao_up]() {
1061  CHKERR MatAssemblyBegin(S, MAT_FINAL_ASSEMBLY);
1062  CHKERR MatAssemblyEnd(S, MAT_FINAL_ASSEMBLY);
1064  mField, post_proc_schur_lhs_ptr, 1, S, ao_up)();
1065  MOFEM_LOG("TIMER", Sev::inform) << "Lhs Assemble End";
1067  };
1068 
1069  auto ksp_ctx_ptr = getDMKspCtx(simple->getDM());
1070  ksp_ctx_ptr->getPreProcSetOperators().push_front(pre_proc_schur_lhs_ptr);
1071  ksp_ctx_ptr->getPostProcSetOperators().push_back(post_proc_schur_lhs_ptr);
1072 
1074 }
1075 
1078  auto simple = mField.getInterface<Simple>();
1079  SmartPetscObj<IS> vol_is;
1080  mField.getInterface<ISManager>()->isCreateProblemFieldAndRank(
1081  simple->getProblemName(), ROW, "U", 0, SPACE_DIM, vol_is, &volEnts);
1082  CHKERR PCFieldSplitSetIS(pc, NULL, vol_is);
1083  CHKERR PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_USER, S);
1085 }
1086 
1089 
1090  auto get_pc = [](auto ksp) {
1091  PC pc_raw;
1092  CHKERR KSPGetPC(ksp, &pc_raw);
1093  return SmartPetscObj<PC>(pc_raw, true); // bump reference
1094  };
1095 
1096  if constexpr (A == AssemblyType::BLOCK_SCHUR) {
1097  auto simple = mField.getInterface<Simple>();
1098  auto A = createDMBlockMat(simple->getDM());
1099  auto P = createDMNestSchurMat(simple->getDM());
1100  CHKERR PCSetOperators(pc, A, P);
1101 
1102  KSP *subksp;
1103  CHKERR PCFieldSplitSchurGetSubKSP(pc, PETSC_NULL, &subksp);
1104  CHKERR setSchurA00MatSolvePC(get_pc(subksp[0]));
1105 
1106  auto set_pc_p_mg = [](auto dm, auto pc, auto S) {
1108  CHKERR PCSetDM(pc, dm);
1109  PetscBool same = PETSC_FALSE;
1110  PetscObjectTypeCompare((PetscObject)pc, PCMG, &same);
1111  if (same) {
1113  pc, createPCMGSetUpViaApproxOrdersCtx(dm, S, true));
1114  CHKERR PCSetFromOptions(pc);
1115  }
1117  };
1118 
1119  CHKERR set_pc_p_mg(schurDM, get_pc(subksp[1]), S);
1120 
1121  CHKERR PetscFree(subksp);
1122  }
1124 }
1125 
1126 boost::shared_ptr<SetUpSchur>
1128  return boost::shared_ptr<SetUpSchur>(new SetUpSchurImpl(m_field));
1129 }
MoFEM::NaturalBC::Assembly::LinearForm
Definition: Natural.hpp:67
MoFEMFunctionReturnHot
#define MoFEMFunctionReturnHot(a)
Last executable line of each PETSc function used for error handling. Replaces return()
Definition: definitions.h:460
MoFEM::UnknownInterface::getInterface
MoFEMErrorCode getInterface(IFACE *&iface) const
Get interface reference to pointer of interface.
Definition: UnknownInterface.hpp:93
OpK
FormsIntegrators< DomainEleOp >::Assembly< A >::BiLinearForm< I >::OpGradSymTensorGrad< BASE_DIM, SPACE_DIM, SPACE_DIM, 0 > OpK
[Define entities]
Definition: elastic.cpp:39
MoFEM::createAOMappingIS
auto createAOMappingIS(IS isapp, IS ispetsc)
Creates an application mapping using two index sets.
Definition: PetscSmartObj.hpp:318
ElasticSpring.hpp
Implementation of elastic spring bc.
SetUpSchurImpl
Definition: test_broken_space.cpp:515
MoFEM::EntitiesFieldData::EntData
Data on single entity (This is passed as argument to DataOperator::doWork)
Definition: EntitiesFieldData.hpp:128
Example::checkResults
MoFEMErrorCode checkResults()
[Postprocess results]
Definition: dynamic_first_order_con_law.cpp:1205
BASE_DIM
constexpr int BASE_DIM
Definition: elastic.cpp:15
Example::addMatBlockOps
MoFEMErrorCode addMatBlockOps(boost::ptr_deque< ForcesAndSourcesCore::UserDataOperator > &pipeline, std::string field_name, std::string block_name, boost::shared_ptr< MatrixDouble > mat_D_Ptr, Sev sev)
Definition: elastic.cpp:114
EXECUTABLE_DIMENSION
#define EXECUTABLE_DIMENSION
Definition: plastic.cpp:13
BoundaryEle
ElementsAndOps< SPACE_DIM >::BoundaryEle BoundaryEle
Definition: child_and_parent.cpp:39
shear_modulus_G
constexpr double shear_modulus_G
Definition: elastic.cpp:82
MoFEM::EssentialPreProcReaction< DisplacementCubitBcData >
Specialization for DisplacementCubitBcData.
Definition: EssentialDisplacementCubitBcData.hpp:157
Example::assembleSystem
MoFEMErrorCode assembleSystem()
[Push operators to pipeline]
Definition: dynamic_first_order_con_law.cpp:647
MoFEM::CoreTmp< 0 >
Core (interface) class.
Definition: Core.hpp:82
H1
@ H1
continuous field
Definition: definitions.h:85
Example::doEvalField
PetscBool doEvalField
Definition: elastic.cpp:95
MoFEM::DMMoFEMAddSubFieldCol
PetscErrorCode DMMoFEMAddSubFieldCol(DM dm, const char field_name[])
Definition: DMMoFEM.cpp:280
LASTBASE
@ LASTBASE
Definition: definitions.h:69
OpK
Definition: simple_elasticity.cpp:16
Example::Example
Example(MoFEM::Interface &m_field)
Definition: elastic.cpp:88
MOFEM_LOG_CHANNEL
#define MOFEM_LOG_CHANNEL(channel)
Set and reset channel.
Definition: LogManager.hpp:284
SetUpSchurImpl::setUp
MoFEMErrorCode setUp(SmartPetscObj< KSP >)
Definition: test_broken_space.cpp:528
MoFEM::OpFluxRhsImpl
Definition: Natural.hpp:39
MoFEM::PipelineManager::ElementsAndOpsByDim
Definition: PipelineManager.hpp:38
Example::fieldEvalCoords
std::array< double, SPACE_DIM > fieldEvalCoords
Definition: elastic.cpp:96
MoFEM::EssentialPostProcLhs< DisplacementCubitBcData >
Specialization for DisplacementCubitBcData.
Definition: EssentialDisplacementCubitBcData.hpp:134
MoFEM::NaturalBC::Assembly::BiLinearForm
Definition: Natural.hpp:74
MoFEM::createPCMGSetUpViaApproxOrdersCtx
boost::shared_ptr< PCMGSetUpViaApproxOrdersCtx > createPCMGSetUpViaApproxOrdersCtx(DM dm, Mat A, bool use_shell_mat)
createPCMGSetUpViaApproxOrdersCtx
Definition: PCMGSetUpViaApproxOrders.cpp:630
CHK_THROW_MESSAGE
#define CHK_THROW_MESSAGE(err, msg)
Check and throw MoFEM exception.
Definition: definitions.h:609
MoFEM::DMMoFEMSetSquareProblem
PetscErrorCode DMMoFEMSetSquareProblem(DM dm, PetscBool square_problem)
set squared problem
Definition: DMMoFEM.cpp:456
MoFEM::getDMKspCtx
auto getDMKspCtx(DM dm)
Get KSP context data structure used by DM.
Definition: DMMoFEM.hpp:1113
bulk_modulus_K
constexpr double bulk_modulus_K
Definition: elastic.cpp:81
MoFEM::Exceptions::MoFEMErrorCode
PetscErrorCode MoFEMErrorCode
MoFEM/PETSc error code.
Definition: Exceptions.hpp:56
MoFEM::OpCalculateVectorFieldValues
Get values at integration pts for tensor filed rank 1, i.e. vector field.
Definition: UserDataOperators.hpp:466
PostProcEleByDim< 2 >::SideEle
PipelineManager::ElementsAndOpsByDim< 2 >::FaceSideEle SideEle
Definition: dynamic_first_order_con_law.cpp:44
PostProcEleDomain
PostProcEleByDim< SPACE_DIM >::PostProcEleDomain PostProcEleDomain
Definition: dynamic_first_order_con_law.cpp:53
SetUpSchurImpl::setPC
MoFEMErrorCode setPC(PC pc)
Definition: contact.cpp:1226
MoFEM::EssentialPostProcRhs< DisplacementCubitBcData >
Specialization for DisplacementCubitBcData.
Definition: EssentialDisplacementCubitBcData.hpp:113
MoFEM::PETSC
@ PETSC
Definition: FormsIntegrators.hpp:105
MoFEM::PipelineManager
PipelineManager interface.
Definition: PipelineManager.hpp:24
MoFEM.hpp
MoFEM::DMoFEMMeshToLocalVector
PetscErrorCode DMoFEMMeshToLocalVector(DM dm, Vec l, InsertMode mode, ScatterMode scatter_mode)
set local (or ghosted) vector values on mesh for partition only
Definition: DMMoFEM.cpp:523
MoFEM::DisplacementCubitBcData
Definition of the displacement bc data structure.
Definition: BCData.hpp:76
MoFEM::Projection10NodeCoordsOnField
Projection of edge entities with one mid-node on hierarchical basis.
Definition: Projection10NodeCoordsOnField.hpp:24
MoFEM::CoreTmp< 0 >::Finalize
static MoFEMErrorCode Finalize()
Checks for options to be called at the conclusion of the program.
Definition: Core.cpp:112
MoFEM::EssentialPostProcRhs< MPCsType >
Specialization for DisplacementCubitBcData.
Definition: EssentialMPCsData.hpp:81
is_plane_strain
PetscBool is_plane_strain
Definition: elastic.cpp:84
MoFEM::LogManager::createSink
static boost::shared_ptr< SinkType > createSink(boost::shared_ptr< std::ostream > stream_ptr, std::string comm_filter)
Create a sink object.
Definition: LogManager.cpp:298
MoFEM::Simple
Simple interface for fast problem set-up.
Definition: Simple.hpp:27
MoFEM::getDMSubData
auto getDMSubData(DM dm)
Get sub problem data structure.
Definition: DMMoFEM.hpp:1157
SetUpSchurImpl::schurDM
SmartPetscObj< DM > schurDM
Definition: contact.cpp:1004
SetUpSchurImpl::createSubDM
MoFEMErrorCode createSubDM()
Definition: contact.cpp:1083
MoFEM::createDMMatrix
auto createDMMatrix(DM dm)
Get smart matrix from DM.
Definition: DMMoFEM.hpp:1056
MoFEM::DMMoFEMAddElement
PetscErrorCode DMMoFEMAddElement(DM dm, std::string fe_name)
add element to dm
Definition: DMMoFEM.cpp:497
order
constexpr int order
Definition: dg_projection.cpp:18
MoFEM::createDMBlockMat
auto createDMBlockMat(DM dm)
Definition: DMMoFEM.hpp:1076
SCHUR_ASSEMBLE
#define SCHUR_ASSEMBLE
Definition: contact.cpp:18
MoFEM::DeprecatedCoreInterface
Deprecated interface functions.
Definition: DeprecatedCoreInterface.hpp:16
SetUpSchurImpl::volEnts
Range volEnts
Definition: elastic.cpp:911
MoFEM::OpFluxLhsImpl
Definition: Natural.hpp:43
ROW
@ ROW
Definition: definitions.h:136
MoFEM::Interface
DeprecatedCoreInterface Interface
Definition: Interface.hpp:2010
Example::readMesh
MoFEMErrorCode readMesh()
[Run problem]
Definition: dynamic_first_order_con_law.cpp:463
PostProcEleByDim
Definition: dynamic_first_order_con_law.cpp:39
MoFEM::createOpSchurAssembleEnd
OpSchurAssembleBase * createOpSchurAssembleEnd(std::vector< std::string > fields_name, std::vector< boost::shared_ptr< Range >> field_ents, SmartPetscObj< AO > ao, SmartPetscObj< Mat > schur, bool sym_schur, bool symm_op)
Construct a new Op Schur Assemble End object.
Definition: Schur.cpp:2186
CHKERR
#define CHKERR
Inline error check.
Definition: definitions.h:548
SetUpSchurImpl::S
SmartPetscObj< Mat > S
Definition: test_broken_space.cpp:525
MoFEM::createDMVector
auto createDMVector(DM dm)
Get smart vector from DM.
Definition: DMMoFEM.hpp:1099
CalculateTraction.hpp
Calculate traction for linear problem.
MoFEM::EssentialPreProc< DisplacementCubitBcData >
Specialization for DisplacementCubitBcData.
Definition: EssentialDisplacementCubitBcData.hpp:91
MoFEM::CoreInterface::get_moab
virtual moab::Interface & get_moab()=0
MoFEM
implementation of Data Operators for Forces and Sources
Definition: Common.hpp:10
MoFEM::ISManager
Section manager is used to create indexes and sections.
Definition: ISManager.hpp:23
MoFEM::createDM
auto createDM(MPI_Comm comm, const std::string dm_type_name)
Creates smart DM object.
Definition: PetscSmartObj.hpp:141
MoFEM::BcManager
Simple interface for fast problem set-up.
Definition: BcManager.hpp:25
BoundaryEleOp
SetUpSchurImpl::setEntities
MoFEMErrorCode setEntities()
Definition: elastic.cpp:960
MOFEM_LOG_C
#define MOFEM_LOG_C(channel, severity, format,...)
Definition: LogManager.hpp:311
SetUpSchurImpl::subEnts
Range subEnts
Definition: elastic.cpp:912
MoFEM::FieldEvaluatorInterface
Field evaluator interface.
Definition: FieldEvaluator.hpp:21
Example::vectorFieldPtr
boost::shared_ptr< MatrixDouble > vectorFieldPtr
Definition: elastic.cpp:98
simple
void simple(double P1[], double P2[], double P3[], double c[], const int N)
Definition: acoustic.cpp:69
Example
[Example]
Definition: plastic.cpp:177
MoFEM::DMMoFEMCreateSubDM
PetscErrorCode DMMoFEMCreateSubDM(DM subdm, DM dm, const char problem_name[])
Must be called by user to set Sub DM MoFEM data structures.
Definition: DMMoFEM.cpp:215
convert.type
type
Definition: convert.py:64
MoFEM::FormsIntegrators::Assembly
Assembly methods.
Definition: FormsIntegrators.hpp:317
MoFEM::DMRegister_MGViaApproxOrders
MoFEMErrorCode DMRegister_MGViaApproxOrders(const char sname[])
Register DM for Multi-Grid via approximation orders.
Definition: PCMGSetUpViaApproxOrders.cpp:302
OpPPMap
OpPostProcMapInMoab< SPACE_DIM, SPACE_DIM > OpPPMap
Definition: photon_diffusion.cpp:29
SetUpSchurImpl::mField
MoFEM::Interface & mField
Definition: test_broken_space.cpp:524
MOFEM_LOG_SYNCHRONISE
#define MOFEM_LOG_SYNCHRONISE(comm)
Synchronise "SYNC" channel.
Definition: LogManager.hpp:345
MoFEM::PostProcBrokenMeshInMoabBaseContImpl
Definition: PostProcBrokenMeshInMoabBase.hpp:880
MoFEM::LogManager::getStrmSync
static boost::shared_ptr< std::ostream > getStrmSync()
Get the strm sync object.
Definition: LogManager.cpp:348
EshelbianPlasticity::P
@ P
Definition: EshelbianContact.cpp:197
Example::solveSystem
MoFEMErrorCode solveSystem()
[Solve]
Definition: dynamic_first_order_con_law.cpp:893
MoFEM::LogManager::getStrmWorld
static boost::shared_ptr< std::ostream > getStrmWorld()
Get the strm world object.
Definition: LogManager.cpp:344
MoFEM::DMRegister_MoFEM
PetscErrorCode DMRegister_MoFEM(const char sname[])
Register MoFEM problem.
Definition: DMMoFEM.cpp:43
SideEle
ElementsAndOps< SPACE_DIM >::SideEle SideEle
Definition: plastic.cpp:61
EntData
EntitiesFieldData::EntData EntData
[Define entities]
Definition: elastic.cpp:29
MoFEM::EssentialPostProcLhs< MPCsType >
Specialization for MPCsType.
Definition: EssentialMPCsData.hpp:102
MoFEM::createOpSchurAssembleBegin
OpSchurAssembleBase * createOpSchurAssembleBegin()
Definition: Schur.cpp:2181
MoFEM::GAUSS
@ GAUSS
Definition: FormsIntegrators.hpp:136
main
int main(int argc, char *argv[])
Definition: elastic.cpp:840
A
constexpr AssemblyType A
[Define dimension]
Definition: elastic.cpp:21
MoFEM::AddFluxToLhsPipelineImpl
Definition: Natural.hpp:49
MoFEM::createDMNestSchurMat
auto createDMNestSchurMat(DM dm)
Definition: DMMoFEM.hpp:1083
SPACE_DIM
constexpr int SPACE_DIM
[Define dimension]
Definition: elastic.cpp:18
doEvalField
PetscBool doEvalField
Definition: incompressible_elasticity.cpp:41
MoFEM::DMoFEMPreProcessFiniteElements
PetscErrorCode DMoFEMPreProcessFiniteElements(DM dm, MoFEM::FEMethod *method)
execute finite element method for each element in dm (problem)
Definition: DMMoFEM.cpp:546
help
static char help[]
[Check]
Definition: elastic.cpp:838
SetUpSchurImpl::setOperator
MoFEMErrorCode setOperator()
Definition: contact.cpp:1150
MoFEM::LogManager::SeverityLevel
SeverityLevel
Severity levels.
Definition: LogManager.hpp:33
MoFEM::DMoFEMPostProcessFiniteElements
PetscErrorCode DMoFEMPostProcessFiniteElements(DM dm, MoFEM::FEMethod *method)
execute finite element method for each element in dm (problem)
Definition: DMMoFEM.cpp:556
MOFEM_LOG_TAG
#define MOFEM_LOG_TAG(channel, tag)
Tag channel.
Definition: LogManager.hpp:339
MoFEM::AddFluxToRhsPipelineImpl
Definition: Natural.hpp:46
size_symm
constexpr auto size_symm
Definition: plastic.cpp:42
I
constexpr IntegrationType I
Definition: elastic.cpp:25
PostProcEleBdy
PostProcEleByDim< SPACE_DIM >::PostProcEleBdy PostProcEleBdy
Definition: dynamic_first_order_con_law.cpp:55
i
FTensor::Index< 'i', SPACE_DIM > i
Definition: hcurl_divergence_operator_2d.cpp:27
MoFEM::OpTensorTimesSymmetricTensor
Calculate .
Definition: UserDataOperators.hpp:1883
t_kd
constexpr auto t_kd
Definition: free_surface.cpp:137
BiLinearForm
field_name
constexpr auto field_name
Definition: poisson_2d_homogeneous.cpp:13
FTensor::Index< 'i', SPACE_DIM >
MoFEM::IntegrationType
IntegrationType
Form integrator integration types.
Definition: FormsIntegrators.hpp:136
NaturalDomainBC.hpp
Boundary conditions in domain, i.e. body forces.
young_modulus
constexpr double young_modulus
Definition: elastic.cpp:79
MoFEM::AddHOOps
Add operators pushing bases from local to physical configuration.
Definition: HODataOperators.hpp:413
MoFEM::OpCalculateVectorFieldGradient
Get field gradients at integration pts for scalar filed rank 0, i.e. vector field.
Definition: UserDataOperators.hpp:1535
MoFEM::setSchurA00MatSolvePC
MoFEMErrorCode setSchurA00MatSolvePC(SmartPetscObj< PC > pc)
Set PC for A00 block.
Definition: Schur.cpp:2223
DomainEleOp
DomainEle::UserDataOperator DomainEleOp
Definition: elastic.cpp:33
integration_rule
auto integration_rule
Definition: free_surface.cpp:185
OpInternalForce
FormsIntegrators< DomainEleOp >::Assembly< A >::LinearForm< I >::OpGradTimesSymTensor< BASE_DIM, SPACE_DIM, SPACE_DIM > OpInternalForce
Definition: elastic.cpp:43
Range
DomainEleOp
MoFEM::PetscOptionsGetRealArray
PetscErrorCode PetscOptionsGetRealArray(PetscOptions *, const char pre[], const char name[], PetscReal dval[], PetscInt *nmax, PetscBool *set)
Definition: DeprecatedPetsc.hpp:192
MoFEM::CoreTmp< 0 >::Initialize
static MoFEMErrorCode Initialize(int *argc, char ***args, const char file[], const char help[])
Initializes the MoFEM database PETSc, MOAB and MPI.
Definition: Core.cpp:72
MOFEM_TAG_AND_LOG
#define MOFEM_TAG_AND_LOG(channel, severity, tag)
Tag and log in channel.
Definition: LogManager.hpp:362
MOFEM_LOG
#define MOFEM_LOG(channel, severity)
Log.
Definition: LogManager.hpp:308
MoFEM::NaturalBC::Assembly
Assembly methods.
Definition: Natural.hpp:65
MoFEM::vectorDuplicate
SmartPetscObj< Vec > vectorDuplicate(Vec vec)
Create duplicate vector of smart vector.
Definition: PetscSmartObj.hpp:221
CATCH_ERRORS
#define CATCH_ERRORS
Catch errors.
Definition: definitions.h:385
DEMKOWICZ_JACOBI_BASE
@ DEMKOWICZ_JACOBI_BASE
Definition: definitions.h:66
MoFEM::AssemblyType
AssemblyType
[Storage and set boundary conditions]
Definition: FormsIntegrators.hpp:104
MoFEM::OpSymmetrizeTensor
Definition: UserDataOperators.hpp:1948
FluidLevel.hpp
Natural boundary condition applying pressure from fluid.
MoFEM::Core
CoreTmp< 0 > Core
Definition: Core.hpp:1148
Example::setupProblem
MoFEMErrorCode setupProblem()
[Run problem]
Definition: plastic.cpp:233
UserDataOperator
ForcesAndSourcesCore::UserDataOperator UserDataOperator
Definition: HookeElement.hpp:75
Example::boundaryCondition
MoFEMErrorCode boundaryCondition()
[Set up problem]
Definition: dynamic_first_order_con_law.cpp:536
j
FTensor::Index< 'j', 3 > j
Definition: matrix_function.cpp:19
eps
static const double eps
Definition: check_base_functions_derivatives_on_tet.cpp:11
Example::fieldEvalData
boost::shared_ptr< FieldEvaluatorInterface::SetPtsData > fieldEvalData
Definition: elastic.cpp:97
DomainBCs
[OpInternalForce]
Definition: elastic.cpp:45
approx_order
int approx_order
Definition: test_broken_space.cpp:50
MoFEM::createSchurNestedMatrixStruture
boost::shared_ptr< NestSchurData > createSchurNestedMatrixStruture(std::pair< SmartPetscObj< DM >, SmartPetscObj< DM >> dms, boost::shared_ptr< BlockStructure > block_mat_data_ptr, std::vector< std::string > fields_names, std::vector< boost::shared_ptr< Range >> field_ents, bool add_preconditioner_block)
Get the Schur Nest Mat Array object.
Definition: Schur.cpp:1944
AINSWORTH_LEGENDRE_BASE
@ AINSWORTH_LEGENDRE_BASE
Ainsworth Cole (Legendre) approx. base .
Definition: definitions.h:60
MOFEM_DATA_INCONSISTENCY
@ MOFEM_DATA_INCONSISTENCY
Definition: definitions.h:31
MoFEM::MeshsetsManager
Interface for managing meshsets containing materials and boundary conditions.
Definition: MeshsetsManager.hpp:104
MoFEM::BlockData
Definition: MeshsetsManager.cpp:755
MoFEM::PetscOptionsGetEList
PetscErrorCode PetscOptionsGetEList(PetscOptions *, const char pre[], const char name[], const char *const *list, PetscInt next, PetscInt *value, PetscBool *set)
Definition: DeprecatedPetsc.hpp:203
NaturalBoundaryBC.hpp
Implementation of natural boundary conditions.
FieldApproximationBase
FieldApproximationBase
approximation base
Definition: definitions.h:58
Example::runProblem
MoFEMErrorCode runProblem()
[Run problem]
Definition: plastic.cpp:214
SetUpSchurImpl::SetUpSchurImpl
SetUpSchurImpl(MoFEM::Interface &m_field)
Definition: elastic.cpp:886
ReactionDiffusionEquation::D
const double D
diffusivity
Definition: reaction_diffusion.cpp:20
MoFEM::BLOCK_SCHUR
@ BLOCK_SCHUR
Definition: FormsIntegrators.hpp:108
SetUpSchur
[Push operators to pipeline]
Definition: test_broken_space.cpp:40
m
FTensor::Index< 'm', 3 > m
Definition: shallow_wave.cpp:80
FTensor::Kronecker_Delta_symmetric
Kronecker Delta class symmetric.
Definition: Kronecker_Delta.hpp:49
MoFEM::DMMoFEMSetNestSchurData
MoFEMErrorCode DMMoFEMSetNestSchurData(DM dm, boost::shared_ptr< NestSchurData >)
Definition: DMMoFEM.cpp:1562
MoFEM::createBlockMatStructure
boost::shared_ptr< BlockStructure > createBlockMatStructure(DM dm, SchurFEOpsFEandFields schur_fe_op_vec)
Create a Mat Diag Blocks object.
Definition: Schur.cpp:1009
MoFEMFunctionBeginHot
#define MoFEMFunctionBeginHot
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
Definition: definitions.h:453
QUIET
@ QUIET
Definition: definitions.h:221
MoFEM::PCMGSetUpViaApproxOrders
MoFEMErrorCode PCMGSetUpViaApproxOrders(PC pc, boost::shared_ptr< PCMGSetUpViaApproxOrdersCtx > ctx, int verb)
Function build MG structure.
Definition: PCMGSetUpViaApproxOrders.cpp:634
MoFEM::DMMoFEMAddSubFieldRow
PetscErrorCode DMMoFEMAddSubFieldRow(DM dm, const char field_name[])
Definition: DMMoFEM.cpp:238
DomainEle
ElementsAndOps< SPACE_DIM >::DomainEle DomainEle
Definition: child_and_parent.cpp:34
MoFEM::MeshsetsManager::getCubitMeshsetPtr
MoFEMErrorCode getCubitMeshsetPtr(const int ms_id, const CubitBCType cubit_bc_type, const CubitMeshSets **cubit_meshset_ptr) const
get cubit meshset
Definition: MeshsetsManager.cpp:578
MoFEM::LogManager::setLog
static LoggerType & setLog(const std::string channel)
Set ans resset chanel logger.
Definition: LogManager.cpp:389
MoFEM::SmartPetscObj< KSP >
SetUpSchur::createSetUpSchur
static boost::shared_ptr< SetUpSchur > createSetUpSchur(MoFEM::Interface &m_field)
Definition: test_broken_space.cpp:768
k
FTensor::Index< 'k', 3 > k
Definition: matrix_function.cpp:20
MoFEM::DMoFEMLoopFiniteElements
PetscErrorCode DMoFEMLoopFiniteElements(DM dm, const char fe_name[], MoFEM::FEMethod *method, CacheTupleWeakPtr cache_ptr=CacheTupleSharedPtr())
Executes FEMethod for finite elements in DM.
Definition: DMMoFEM.cpp:586
MF_EXIST
@ MF_EXIST
Definition: definitions.h:113
convert.int
int
Definition: convert.py:64
MoFEM::PetscOptionsGetInt
PetscErrorCode PetscOptionsGetInt(PetscOptions *, const char pre[], const char name[], PetscInt *ivalue, PetscBool *set)
Definition: DeprecatedPetsc.hpp:142
SetUpSchurImpl::setDiagonalPC
MoFEMErrorCode setDiagonalPC(PC pc)
Definition: contact.cpp:1235
MoFEMFunctionReturn
#define MoFEMFunctionReturn(a)
Last executable line of each PETSc function used for error handling. Replaces return()
Definition: definitions.h:429
ElasticExample::OpCalculateTraction
Definition: CalculateTraction.hpp:12
MoFEM::FieldBlas
Basic algebra on fields.
Definition: FieldBlas.hpp:21
MoFEM::SCHUR
@ SCHUR
Definition: FormsIntegrators.hpp:106
PostProcEleByDim< 3 >::SideEle
PipelineManager::ElementsAndOpsByDim< 3 >::FaceSideEle SideEle
Definition: dynamic_first_order_con_law.cpp:50
poisson_ratio
constexpr double poisson_ratio
Definition: elastic.cpp:80
MoFEM::OpLoopSide
Element used to execute operators on side of the element.
Definition: ForcesAndSourcesCore.hpp:1290
MoFEMFunctionBegin
#define MoFEMFunctionBegin
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
Definition: definitions.h:359
l
FTensor::Index< 'l', 3 > l
Definition: matrix_function.cpp:21
BoundaryBCs
Definition: elastic.cpp:46
F
@ F
Definition: free_surface.cpp:394
MoFEM::OpPostProcMapInMoab
Post post-proc data at points from hash maps.
Definition: PostProcBrokenMeshInMoabBase.hpp:698
MoFEM::PetscOptionsGetBool
PetscErrorCode PetscOptionsGetBool(PetscOptions *, const char pre[], const char name[], PetscBool *bval, PetscBool *set)
Definition: DeprecatedPetsc.hpp:182
PlasticOps::addMatBlockOps
MoFEMErrorCode addMatBlockOps(MoFEM::Interface &m_field, std::string block_name, Pip &pip, boost::shared_ptr< MatrixDouble > mat_D_Ptr, boost::shared_ptr< CommonData::BlockParams > mat_params_ptr, double scale_value, Sev sev)
Definition: PlasticOps.hpp:248