v0.15.4
Loading...
Searching...
No Matches
SetUpSchurImpl.cpp
Go to the documentation of this file.
1/** @file SetUpSchurImpl
2 * @brief
3 * @date 2023-05-13
4 *
5 * @license{This project is released under the MIT License.}
6 *
7 */
8
10
12 : SetUpSchur(), mField(m_field), epCorePtr(ep_core_ptr) {}
13 virtual ~SetUpSchurImpl() {}
14
18
19private:
22
25
26 boost::shared_ptr<std::vector<boost::weak_ptr<NumeredDofEntity>>>
27 piolaZeroDofsVec; //< Dofs on crack surface
28 boost::shared_ptr<std::vector<unsigned char>>
29 piolaZeroDofsMarker; //< marker for crack dofs on surface
30
50
51 boost::shared_ptr<P_MultiGridData> pMGPtr;
52
54 std::vector<std::string> schur_field_list{epCorePtr->hybridSpatialDisp,
56 std::vector<boost::shared_ptr<Range>> dm_range_list{nullptr, nullptr};
57 return std::make_pair(schur_field_list, dm_range_list);
58 };
59
60 auto getA00Fields() {
61 std::vector<std::string> a00_field_list{
62
64
66
68
70
72
73 };
74 std::vector<boost::shared_ptr<Range>> range_list_ptr(a00_field_list.size(),
75 nullptr);
76 return std::make_pair(a00_field_list, range_list_ptr);
77 }
78};
79
82
83 auto create_schur_dm = [&](SmartPetscObj<DM> &dm_sub) {
85
86 dm_sub = createDM(mField.get_comm(), "DMMOFEM_MG");
87 CHKERR DMMoFEMCreateSubDM(dm_sub, epCorePtr->dmElastic, "SUB_SCHUR");
88 CHKERR DMMoFEMSetSquareProblem(dm_sub, PETSC_TRUE);
89 CHKERR DMMoFEMSetIsPartitioned(dm_sub, PETSC_TRUE);
92
93 int r_idx = 0;
94 auto [schur_field_list, schur_range_list] = getSchurFields();
95 for (auto f : schur_field_list) {
96 MOFEM_LOG("EP", Sev::inform) << "Add schur field: " << f;
97 CHKERR DMMoFEMAddSubFieldRow(dm_sub, f, schur_range_list[r_idx]);
98 CHKERR DMMoFEMAddSubFieldCol(dm_sub, f, schur_range_list[r_idx]);
99 ++r_idx;
100 }
101 CHKERR DMSetUp(dm_sub);
103 };
104
105 auto create_a00_dm = [&](SmartPetscObj<DM> &dm_sub) {
107 dm_sub = createDM(mField.get_comm(), "DMMOFEM");
108 CHKERR DMMoFEMCreateSubDM(dm_sub, epCorePtr->dmElastic, "SUB_A00");
109 CHKERR DMMoFEMSetSquareProblem(dm_sub, PETSC_TRUE);
110 CHKERR DMMoFEMSetIsPartitioned(dm_sub, PETSC_TRUE);
114
115 int r_idx = 0;
116 auto [a00_field_list, a00_range_list] = getA00Fields();
117 for (auto f : a00_field_list) {
118 MOFEM_LOG("EP", Sev::inform) << "Add a00 field: " << f;
119 CHKERR DMMoFEMAddSubFieldRow(dm_sub, f, a00_range_list[r_idx]);
120 CHKERR DMMoFEMAddSubFieldCol(dm_sub, f, a00_range_list[r_idx]);
121 ++r_idx;
122 }
123 CHKERR DMSetUp(dm_sub);
125 };
126
127 auto get_snes = [&](TS ts) {
128 SNES snes;
129 CHKERR TSGetSNES(ts, &snes);
130 return snes;
131 };
132
133 auto get_ksp = [&](SNES snes) {
134 KSP ksp;
135 CHKERR SNESGetKSP(snes, &ksp);
136 CHKERR KSPSetFromOptions(ksp);
137 return ksp;
138 };
139
140 auto get_pc = [&](KSP ksp) {
141 PC pc;
142 CHKERR KSPGetPC(ksp, &pc);
143 return pc;
144 };
145
146 auto ksp = get_ksp(get_snes(ts));
147 auto pc = get_pc(ksp);
148
149 PetscBool is_pcfs = PETSC_FALSE;
150 PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &is_pcfs);
151 if (is_pcfs) {
152
153 MOFEM_LOG("EP", Sev::inform) << "SetUpSchurImpl::setUp: PCFIELDSPLIT";
154
155 SmartPetscObj<DM> schur_dm, a00_dm;
156 CHKERR create_schur_dm(schur_dm);
157 CHKERR create_a00_dm(a00_dm);
158
159 auto dm_elastic = epCorePtr->dmElastic;
160 auto vol_elem_name = epCorePtr->elementVolumeName;
161 auto skel_elem_name = epCorePtr->skeletonElement;
162 auto contact_elem_name = epCorePtr->contactElement;
163 auto natural_bc_element_name = epCorePtr->naturalBcElement;
164
165 std::vector<std::pair<std::string, std::string>> mat_block_list = {
166
173
181
182 };
183
184 if (epCorePtr->noStretch) {
185 mat_block_list.push_back(
187 mat_block_list.push_back(
189 mat_block_list.push_back(
191 }
192
193 mat_block_list.push_back({epCorePtr->rotAxis, epCorePtr->rotAxis});
194 mat_block_list.push_back({epCorePtr->stretchTensor, epCorePtr->rotAxis});
195 mat_block_list.push_back({epCorePtr->rotAxis, epCorePtr->stretchTensor});
196
197 auto get_nested_mat_data = [&](auto schur_dm, auto block_dm) {
198 auto block_mat_data = createBlockMatStructure(
199 dm_elastic,
200
201 {
202
203 {vol_elem_name, mat_block_list},
204
205 {skel_elem_name,
206
207 {
208
214
215 }},
216
217 {contact_elem_name,
218
219 {
220
226
227 }},
228
229 {natural_bc_element_name,
230
231 {
232
236
237 }}
238
239 }
240
241 );
242
243 auto [a00_field_list, a00_range_list] = getA00Fields();
244
246
247 {schur_dm, a00_dm}, block_mat_data,
248
249 a00_field_list,
250
251 a00_range_list,
252
253 false
254
255 );
256 };
257
258 auto nested_mat_data = get_nested_mat_data(schur_dm, a00_dm);
260 CHKERR DMSetMatType(epCorePtr->dmElastic, MATSHELL);
261
264
265 if (std::abs(epCorePtr->alphaRho) >
266 std::numeric_limits<double>::epsilon()) {
267 auto swap_assemble = [](TS ts, PetscReal t, Vec u, Vec u_t, Vec utt,
268 PetscReal a, PetscReal aa, Mat A, Mat B,
269 void *ctx) {
270 return TsSetI2Jacobian(ts, t, u, u_t, utt, a, aa, B, A, ctx);
271 };
272 auto ts_ctx_ptr = getDMTsCtx(epCorePtr->dmElastic);
273 CHKERR TSSetI2Jacobian(ts, m, p, swap_assemble, ts_ctx_ptr.get());
274 } else {
275 auto swap_assemble = [](TS ts, PetscReal t, Vec u, Vec u_t, PetscReal a,
276 Mat A, Mat B, void *ctx) {
277 return TsSetIJacobian(ts, t, u, u_t, a, B, A, ctx);
278 };
279 auto ts_ctx_ptr = getDMTsCtx(epCorePtr->dmElastic);
280 CHKERR TSSetIJacobian(ts, m, p, swap_assemble, ts_ctx_ptr.get());
281 }
282 CHKERR KSPSetOperators(ksp, m, p);
283
284 auto set_assembly = [&]() {
286
287 aoS = getDMSubData(schur_dm)->getSmartRowMap();
288 S = createDMHybridisedL2Matrix(schur_dm);
289 CHKERR MatSetBlockSize(S, SPACE_DIM *
291 epCorePtr->S = S;
292 epCorePtr->aoS = aoS;
293
294 auto set_assemble = [&]() {
296 auto schur_asmb_pre_proc_lhs = boost::make_shared<FEMethod>();
297 auto schur_asmb_pre_proc_rhs = boost::make_shared<FEMethod>();
298
299 schur_asmb_pre_proc_lhs->preProcessHook = [this]() {
301 CHKERR MatZeroEntries(S);
303 };
304
305 schur_asmb_pre_proc_rhs->preProcessHook = [this,
306 schur_asmb_pre_proc_rhs]() {
308 auto prb_ptr = schur_asmb_pre_proc_rhs->problemPtr;
309 auto dofs_prb = prb_ptr->getNumeredRowDofsPtr();
310
311 auto crack_faces = epCorePtr->crackFaces;
312 piolaZeroDofsVec->clear();
313 CHKERR mField.getInterface<ProblemsManager>()
314 ->getSideDofsOnBrokenSpaceEntities(
315 *piolaZeroDofsVec, prb_ptr->getName(), ROW,
316 epCorePtr->piolaStress, *crack_faces, SPACE_DIM, 0,
317 SPACE_DIM);
318
319 piolaZeroDofsMarker->resize(dofs_prb->size(), 0);
320 piolaZeroDofsMarker->clear();
321 for (auto &dof : *piolaZeroDofsVec) {
322 if (auto dof_ptr = dof.lock()) {
323 auto idx = dof_ptr->getPetscLocalDofIdx();
324 (*piolaZeroDofsMarker)[idx] = 1;
325 }
326 }
327 // FIXME: This needs investigation, true is required for fracture
328 constexpr bool hard_coded_set_bc_debug = true;
329 if constexpr (hard_coded_set_bc_debug) {
330
331 auto problem_name = schur_asmb_pre_proc_rhs->problemPtr->getName();
332 auto crack_faces = epCorePtr->crackFaces;
333
334 SmartPetscObj<IS> crack_hybrid_is;
335 CHKERR epCorePtr->mField.getInterface<ISManager>()
336 ->isCreateProblemFieldAndRankLocal(
337 problem_name, ROW, epCorePtr->hybridSpatialDisp, 0,
338 SPACE_DIM, crack_hybrid_is, &*crack_faces);
339
340 SmartPetscObj<IS> crack_piola_is;
341 CHKERR epCorePtr->mField.getInterface<ISManager>()
342 ->isCreateProblemBrokenFieldAndRankLocal(*piolaZeroDofsVec,
343 crack_piola_is);
344
345 const double *a_x;
346 CHKERR VecGetArrayRead(schur_asmb_pre_proc_rhs->x, &a_x);
347 auto zero_by_is = [&](auto is) {
349 const PetscInt *is_array;
350 PetscInt is_size;
351 CHKERR ISGetLocalSize(is, &is_size);
352 CHKERR ISGetIndices(is, &is_array);
353 for (int i = 0; i != is_size; ++i) {
354 // FIXME: That is irregular, need investigate if needed
355 const_cast<double *>(a_x)[is_array[i]] = 0;
356 }
357 CHKERR ISRestoreIndices(is, &is_array);
359 };
360
361 CHKERR zero_by_is(crack_hybrid_is);
362 CHKERR zero_by_is(crack_piola_is);
363
364 CHKERR VecRestoreArrayRead(schur_asmb_pre_proc_rhs->x, &a_x);
365
366 CHKERR epCorePtr->mField.getInterface<VecManager>()
367 ->setLocalGhostVector(problem_name, COL,
368 schur_asmb_pre_proc_rhs->x, INSERT_VALUES,
369 SCATTER_REVERSE);
370 }
371
373 };
374
375 auto schur_asmb_post_proc_lhs = boost::make_shared<FEMethod>();
376 auto schur_asmb_post_proc_rhs = boost::make_shared<FEMethod>();
377
378 schur_asmb_post_proc_rhs->postProcessHook =
379 [this, schur_asmb_post_proc_rhs]() {
381
382 CHKERR VecGhostUpdateBegin(schur_asmb_post_proc_rhs->f,
383 ADD_VALUES, SCATTER_REVERSE);
384 CHKERR VecGhostUpdateEnd(schur_asmb_post_proc_rhs->f, ADD_VALUES,
385 SCATTER_REVERSE);
386 CHKERR VecAssemblyBegin(schur_asmb_post_proc_rhs->f);
387 CHKERR VecAssemblyEnd(schur_asmb_post_proc_rhs->f);
388 *(schur_asmb_post_proc_rhs->vecAssembleSwitch) = false;
389
390 {
391
392 auto problem_name =
393 schur_asmb_post_proc_rhs->problemPtr->getName();
394
395 auto crack_faces = epCorePtr->crackFaces;
396
397 SmartPetscObj<IS> crack_hybrid_is;
398 CHKERR epCorePtr->mField.getInterface<ISManager>()
399 ->isCreateProblemFieldAndRankLocal(
400 problem_name, ROW, epCorePtr->hybridSpatialDisp, 0,
401 SPACE_DIM, crack_hybrid_is, &*crack_faces);
402
403 SmartPetscObj<IS> crack_piola_is;
404 CHKERR epCorePtr->mField.getInterface<ISManager>()
405 ->isCreateProblemBrokenFieldAndRankLocal(*piolaZeroDofsVec,
406 crack_piola_is);
407
408 double *a_f;
409 CHKERR VecGetArray(schur_asmb_post_proc_rhs->f, &a_f);
410 const double *a_x;
411 CHKERR VecGetArrayRead(schur_asmb_post_proc_rhs->x, &a_x);
412 auto zero_by_is = [&](auto is) {
414 const PetscInt *is_array;
415 PetscInt is_size;
416 CHKERR ISGetLocalSize(is, &is_size);
417 CHKERR ISGetIndices(is, &is_array);
418 for (int i = 0; i != is_size; ++i) {
419 a_f[is_array[i]] = -a_x[is_array[i]];
420 }
421 CHKERR ISRestoreIndices(is, &is_array);
423 };
424
425 CHKERR zero_by_is(crack_hybrid_is);
426 CHKERR zero_by_is(crack_piola_is);
427
428 CHKERR VecRestoreArray(schur_asmb_post_proc_rhs->f, &a_f);
429 CHKERR VecRestoreArrayRead(schur_asmb_post_proc_rhs->x, &a_x);
430 }
431
433 };
434
435 schur_asmb_post_proc_lhs->postProcessHook =
436 [this, schur_asmb_post_proc_lhs]() {
438
439 if (pMGPtr) {
440 CHKERR pMGPtr->setUP();
441 pMGPtr.reset();
442 }
443
444 auto crack_faces = epCorePtr->crackFaces;
445
446 // Assemble matrix
447 CHKERR MatAssemblyBegin(schur_asmb_post_proc_lhs->B,
448 MAT_FINAL_ASSEMBLY);
449 CHKERR MatAssemblyEnd(schur_asmb_post_proc_lhs->B,
450 MAT_FINAL_ASSEMBLY);
451 *(schur_asmb_post_proc_lhs->matAssembleSwitch) = false;
452 {
453 SmartPetscObj<IS> crack_hybrid_is;
454 CHKERR epCorePtr->mField.getInterface<ISManager>()
455 ->isCreateProblemFieldAndRank(
456 "ELASTIC_PROBLEM", ROW, epCorePtr->hybridSpatialDisp, 0,
457 SPACE_DIM, crack_hybrid_is, &*crack_faces);
458 CHKERR MatZeroRowsColumnsIS(schur_asmb_post_proc_lhs->B,
459 crack_hybrid_is, 1, PETSC_NULLPTR,
460 PETSC_NULLPTR);
461 }
462 {
463 SmartPetscObj<IS> crack_piola_is;
464 CHKERR epCorePtr->mField.getInterface<ISManager>()
465 ->isCreateProblemBrokenFieldAndRank(*piolaZeroDofsVec,
466 crack_piola_is);
467 CHKERR MatZeroRowsColumnsIS(schur_asmb_post_proc_lhs->B,
468 crack_piola_is, 1, PETSC_NULLPTR,
469 PETSC_NULLPTR);
470 }
471
472 auto [a00_field_list, a00_range_list] = getA00Fields();
474 schur_asmb_post_proc_lhs->B, S,
475 a00_field_list, a00_range_list, aoS);
476 epCorePtr->a00FieldList = a00_field_list;
477 epCorePtr->a00RangeList = a00_range_list;
478
479 // Apply essential constrains to Schur complement
480 CHKERR MatAssemblyBegin(S, MAT_FINAL_ASSEMBLY);
481 CHKERR MatAssemblyEnd(S, MAT_FINAL_ASSEMBLY);
482
483 SmartPetscObj<IS> crack_hybrid_is;
484 CHKERR epCorePtr->mField.getInterface<ISManager>()
485 ->isCreateProblemFieldAndRank(
486 "SUB_SCHUR", ROW, epCorePtr->hybridSpatialDisp, 0,
487 SPACE_DIM, crack_hybrid_is, &*crack_faces);
488 epCorePtr->crackHybridIs = crack_hybrid_is;
489 CHKERR MatZeroRowsColumnsIS(S, crack_hybrid_is, 1, PETSC_NULLPTR,
490 PETSC_NULLPTR);
491
493 };
494
495 auto ts_ctx_ptr = getDMTsCtx(epCorePtr->dmElastic);
496 ts_ctx_ptr->getPreProcessIFunction().push_front(
497 schur_asmb_pre_proc_rhs);
498 ts_ctx_ptr->getPostProcessIFunction().push_back(
499 schur_asmb_post_proc_rhs);
500 ts_ctx_ptr->getPreProcessIJacobian().push_front(
501 schur_asmb_pre_proc_lhs);
502 ts_ctx_ptr->getPostProcessIJacobian().push_back(
503 schur_asmb_post_proc_lhs);
505 };
506
508 boost::make_shared<std::vector<boost::weak_ptr<NumeredDofEntity>>>();
509 piolaZeroDofsMarker = boost::make_shared<std::vector<unsigned char>>();
510 CHKERR set_assemble();
511
513 };
514
515 auto set_pc = [&]() {
517 auto a00_is = getDMSubData(a00_dm)->getSmartRowIs();
518 auto schur_is = getDMSubData(schur_dm)->getSmartRowIs();
519 CHKERR PCFieldSplitSetIS(pc, NULL, a00_is);
520 CHKERR PCFieldSplitSetIS(pc, NULL, schur_is);
521 CHKERR PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_USER, S);
523 };
524
525 auto set_diagonal_pc = [&]() {
527 KSP *subksp;
528 CHKERR PCFieldSplitSchurGetSubKSP(pc, PETSC_NULLPTR, &subksp);
529 auto get_pc = [](auto ksp) {
530 PC pc_raw;
531 CHKERR KSPGetPC(ksp, &pc_raw);
532 return SmartPetscObj<PC>(pc_raw, true); // bump reference
533 };
534 CHKERR setSchurA00MatSolvePC(get_pc(subksp[0]));
535
536 auto set_pc_p_mg = [&](auto dm, auto pc, auto S) {
538 CHKERR PCSetDM(pc, dm);
539 PetscBool same = PETSC_FALSE;
540 PetscObjectTypeCompare((PetscObject)pc, PCMG, &same);
541 if (same) {
542 auto smart_pc = SmartPetscObj<PC>(pc, true);
543 pMGPtr = boost::make_shared<P_MultiGridData>(dm, smart_pc, S);
544 }
545 PetscObjectTypeCompare((PetscObject)pc, PCKSP, &same);
546 if (same) {
547 MOFEM_LOG("EP", Sev::inform)
548 << "SetUpSchurImpl::setUp: fieldsplit 1 PCKSP";
549 CHKERR PCSetFromOptions(pc);
550 KSP ksp;
551 CHKERR PCKSPGetKSP(pc, &ksp);
552 CHKERR KSPSetFromOptions(ksp);
553 PC ksp_pc;
554 CHKERR KSPGetPC(ksp, &ksp_pc);
555 CHKERR PCSetFromOptions(ksp_pc);
556 PetscObjectTypeCompare((PetscObject)ksp_pc, PCMG, &same);
557 if (same) {
558 auto smart_pc = SmartPetscObj<PC>(ksp_pc, true);
559 pMGPtr = boost::make_shared<P_MultiGridData>(dm, smart_pc, S);
560 }
561 }
563 };
564
565 CHKERR set_pc_p_mg(schur_dm, get_pc(subksp[1]), S);
566
567 CHKERR PetscFree(subksp);
569 };
570
571 CHKERR set_assembly();
572 CHKERR set_pc();
573 CHKERR TSSetUp(ts);
574 CHKERR KSPSetUp(ksp);
575 CHKERR set_diagonal_pc();
576
577 } else {
578 MOFEM_LOG("EP", Sev::inform) << "SetUpSchurImpl::setUp: PCLU or other";
579
580 epCorePtr->elasticFeLhs->getOpPtrVector().push_front(
582 epCorePtr->elasticFeLhs->getOpPtrVector().push_back(
584 epCorePtr->elasticBcLhs->getOpPtrVector().push_front(
586 epCorePtr->elasticBcLhs->getOpPtrVector().push_back(
588 }
589
591}
592
593boost::shared_ptr<EshelbianCore::SetUpSchur>
595 EshelbianCore *ep_core_ptr) {
596 return boost::shared_ptr<SetUpSchur>(
597 new SetUpSchurImpl(m_field, ep_core_ptr));
598}
constexpr double a
@ COL
@ ROW
#define MoFEMFunctionBegin
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
#define MoFEMFunctionReturn(a)
Last executable line of each PETSc function used for error handling. Replaces return()
#define CHKERR
Inline error check.
PetscErrorCode DMMoFEMSetIsPartitioned(DM dm, PetscBool is_partitioned)
Definition DMMoFEM.cpp:1113
PetscErrorCode DMMoFEMCreateSubDM(DM subdm, DM dm, const char problem_name[])
Must be called by user to set Sub DM MoFEM data structures.
Definition DMMoFEM.cpp:215
PetscErrorCode DMMoFEMAddElement(DM dm, std::string fe_name)
add element to dm
Definition DMMoFEM.cpp:488
PetscErrorCode DMMoFEMSetSquareProblem(DM dm, PetscBool square_problem)
set squared problem
Definition DMMoFEM.cpp:450
PetscErrorCode DMMoFEMAddSubFieldRow(DM dm, const char field_name[])
Definition DMMoFEM.cpp:238
PetscErrorCode DMMoFEMAddSubFieldCol(DM dm, const char field_name[])
Definition DMMoFEM.cpp:280
#define MOFEM_LOG(channel, severity)
Log.
#define NBFACETRI_L2(P)
Number of base functions on triangle for L2 space.
FTensor::Index< 'i', SPACE_DIM > i
const FTensor::Tensor2< T, Dim, Dim > Vec
PetscErrorCode MoFEMErrorCode
MoFEM/PETSc error code.
PetscErrorCode TsSetIJacobian(TS ts, PetscReal t, Vec u, Vec u_t, PetscReal a, Mat A, Mat B, void *ctx)
Set function evaluating jacobian in TS solver.
Definition TsCtx.cpp:169
auto getDMTsCtx(DM dm)
Get TS context data structure used by DM.
Definition DMMoFEM.hpp:1276
OpSchurAssembleBase * createOpSchurAssembleEnd(std::vector< std::string > fields_name, std::vector< boost::shared_ptr< Range > > field_ents, SmartPetscObj< AO > ao, SmartPetscObj< Mat > schur, bool sym_schur, bool symm_op)
Construct a new Op Schur Assemble End object.
Definition Schur.cpp:2585
MoFEMErrorCode setSchurA00MatSolvePC(SmartPetscObj< PC > pc)
Set PC for A00 block.
Definition Schur.cpp:2627
boost::shared_ptr< PCMGSetUpViaApproxOrdersCtx > createPCMGSetUpViaApproxOrdersCtx(DM dm, Mat A, bool use_shell_mat)
createPCMGSetUpViaApproxOrdersCtx
auto createDMHybridisedL2Matrix(DM dm)
Get smart hybridised L2 matrix from DM.
Definition DMMoFEM.hpp:1204
auto getDMSubData(DM dm)
Get sub problem data structure.
Definition DMMoFEM.hpp:1292
PetscErrorCode TsSetI2Jacobian(TS ts, PetscReal t, Vec u, Vec u_t, Vec u_tt, PetscReal a, PetscReal aa, Mat A, Mat B, void *ctx)
Calculation Jacobian for second order PDE in time.
Definition TsCtx.cpp:519
MoFEMErrorCode PCMGSetUpViaApproxOrders(PC pc, boost::shared_ptr< PCMGSetUpViaApproxOrdersCtx > ctx, int verb)
Function build MG structure.
boost::shared_ptr< BlockStructure > createBlockMatStructure(DM dm, SchurFEOpsFEandFields schur_fe_op_vec)
Create a Mat Diag Blocks object.
Definition Schur.cpp:1082
boost::shared_ptr< NestSchurData > createSchurNestedMatrixStruture(std::pair< SmartPetscObj< DM >, SmartPetscObj< DM > > dms, boost::shared_ptr< BlockStructure > block_mat_data_ptr, std::vector< std::string > fields_names, std::vector< boost::shared_ptr< Range > > field_ents, bool add_preconditioner_block)
Get the Schur Nest Mat Array object.
Definition Schur.cpp:2343
MoFEMErrorCode DMMoFEMSetNestSchurData(DM dm, boost::shared_ptr< NestSchurData >)
Definition DMMoFEM.cpp:1554
auto createDMNestSchurMat(DM dm)
Definition DMMoFEM.hpp:1218
auto createDM(MPI_Comm comm, const std::string dm_type_name)
Creates smart DM object.
MoFEMErrorCode assembleBlockMatSchur(MoFEM::Interface &m_field, Mat B, Mat S, std::vector< std::string > fields_name, std::vector< boost::shared_ptr< Range > > field_ents, SmartPetscObj< AO > ao)
Assemble Schur matrix.
Definition Schur.cpp:1817
OpSchurAssembleBase * createOpSchurAssembleBegin()
Definition Schur.cpp:2580
auto createDMBlockMat(DM dm)
Definition DMMoFEM.hpp:1211
constexpr double t
plate stiffness
Definition plate.cpp:58
FTensor::Index< 'm', 3 > m
static boost::shared_ptr< SetUpSchur > createSetUpSchur(MoFEM::Interface &m_field, EshelbianCore *ep_core_ptr)
std::vector< boost::shared_ptr< Range > > a00RangeList
const std::string skeletonElement
MoFEM::Interface & mField
const std::string spatialL2Disp
SmartPetscObj< IS > crackHybridIs
boost::shared_ptr< FaceElementForcesAndSourcesCore > elasticBcLhs
const std::string elementVolumeName
const std::string piolaStress
std::vector< std::string > a00FieldList
const std::string bubbleField
boost::shared_ptr< VolumeElementForcesAndSourcesCore > elasticFeLhs
static PetscBool noStretch
const std::string rotAxis
const std::string contactDisp
const std::string naturalBcElement
boost::shared_ptr< Range > crackFaces
const std::string hybridSpatialDisp
SmartPetscObj< DM > dmElastic
Elastic problem.
const std::string stretchTensor
const std::string contactElement
virtual MPI_Comm & get_comm() const =0
Deprecated interface functions.
intrusive_ptr for managing petsc objects
MoFEMErrorCode getInterface(IFACE *&iface) const
Get interface reference to pointer of interface.
P_MultiGridData(SmartPetscObj< DM > dm, SmartPetscObj< PC > pc, SmartPetscObj< Mat > S)
EshelbianCore * epCorePtr
boost::shared_ptr< std::vector< unsigned char > > piolaZeroDofsMarker
SmartPetscObj< Mat > S
boost::shared_ptr< std::vector< boost::weak_ptr< NumeredDofEntity > > > piolaZeroDofsVec
boost::shared_ptr< P_MultiGridData > pMGPtr
MoFEMErrorCode setUp(TS ts)
MoFEMErrorCode setUp(SmartPetscObj< KSP >)
virtual ~SetUpSchurImpl()
MoFEMErrorCode postProc()
SmartPetscObj< AO > aoS
MoFEMErrorCode preProc()
MoFEM::Interface & mField
SetUpSchurImpl(MoFEM::Interface &m_field, EshelbianCore *ep_core_ptr)
[Push operators to pipeline]
constexpr int SPACE_DIM
[Define dimension]
Definition elastic.cpp:18
constexpr AssemblyType A
[Define dimension]
Definition elastic.cpp:21