22 ? AssemblyType::BLOCK_SCHUR
26 IntegrationType::GAUSS;
71#include <ElasticSpring.hpp>
72#include <FluidLevel.hpp>
73#include <CalculateTraction.hpp>
74#include <NaturalDomainBC.hpp>
75#include <NaturalBoundaryBC.hpp>
76#include <HookeOps.hpp>
129 enum bases { AINSWORTH, DEMKOWICZ, LASBASETOPT };
130 const char *list_bases[LASBASETOPT] = {
"ainsworth",
"demkowicz"};
131 PetscInt choice_base_value = AINSWORTH;
133 LASBASETOPT, &choice_base_value, PETSC_NULL);
136 switch (choice_base_value) {
140 <<
"Set AINSWORTH_LEGENDRE_BASE for displacements";
145 <<
"Set DEMKOWICZ_JACOBI_BASE for displacements";
164 auto project_ho_geometry = [&]() {
168 CHKERR project_ho_geometry();
183 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(),
"REMOVE_X",
185 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(),
"REMOVE_Y",
187 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(),
"REMOVE_Z",
189 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(),
190 "REMOVE_ALL",
"U", 0, 3);
192 simple->getProblemName(),
"U");
195 CHKERR bc_mng->addBlockDOFsToMPCs(
simple->getProblemName(),
"U");
217 CHKERR pip->setBoundaryRhsIntegrationRule(integration_rule_bc);
218 CHKERR pip->setBoundaryLhsIntegrationRule(integration_rule_bc);
222 pip->getOpDomainLhsPipeline(), {H1},
"GEOMETRY");
224 pip->getOpDomainRhsPipeline(), {H1},
"GEOMETRY");
226 pip->getOpBoundaryRhsPipeline(), {NOSPACE},
"GEOMETRY");
228 pip->getOpBoundaryLhsPipeline(), {NOSPACE},
"GEOMETRY");
232 CHKERR HookeOps::opFactoryDomainLhs<SPACE_DIM, A, I, DomainEleOp>(
233 mField, pip->getOpDomainLhsPipeline(),
"U",
"MAT_ELASTIC", Sev::verbose);
237 CHKERR HookeOps::opFactoryDomainRhs<SPACE_DIM, A, I, DomainEleOp>(
238 mField, pip->getOpDomainRhsPipeline(),
"U",
"MAT_ELASTIC", Sev::verbose);
242 pip->getOpDomainRhsPipeline(),
mField,
"U", Sev::inform);
248 pip->getOpBoundaryRhsPipeline(),
mField,
"U", -1, Sev::inform);
251 pip->getOpBoundaryLhsPipeline(),
mField,
"U", Sev::verbose);
258 static boost::shared_ptr<SetUpSchur>
271 auto solver = pip->createKSP();
272 CHKERR KSPSetFromOptions(solver);
274 auto dm =
simple->getDM();
278 auto set_essential_bc = [&]() {
283 auto pre_proc_rhs = boost::make_shared<FEMethod>();
284 auto post_proc_rhs = boost::make_shared<FEMethod>();
285 auto post_proc_lhs = boost::make_shared<FEMethod>();
287 auto get_pre_proc_hook = [&]() {
291 pre_proc_rhs->preProcessHook = get_pre_proc_hook();
293 auto get_post_proc_hook_rhs = [
this, post_proc_rhs]() {
297 post_proc_rhs, 1.)();
302 auto get_post_proc_hook_lhs = [
this, post_proc_lhs]() {
306 post_proc_lhs, 1.)();
311 post_proc_rhs->postProcessHook = get_post_proc_hook_rhs;
312 post_proc_lhs->postProcessHook = get_post_proc_hook_lhs;
314 ksp_ctx_ptr->getPreProcComputeRhs().push_front(pre_proc_rhs);
315 ksp_ctx_ptr->getPostProcComputeRhs().push_back(post_proc_rhs);
316 ksp_ctx_ptr->getPostProcSetOperators().push_back(post_proc_lhs);
320 auto setup_and_solve = [&]() {
322 BOOST_LOG_SCOPED_THREAD_ATTR(
"Timeline", attrs::timer());
323 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSetUp";
325 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSetUp <= Done";
326 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSolve";
328 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSolve <= Done";
335 CHKERR set_essential_bc();
337 if (A == AssemblyType::BLOCK_SCHUR || A == AssemblyType::SCHUR) {
339 CHKERR schur_ptr->setUp(solver);
345 CHKERR VecGhostUpdateBegin(
D, INSERT_VALUES, SCATTER_FORWARD);
346 CHKERR VecGhostUpdateEnd(
D, INSERT_VALUES, SCATTER_FORWARD);
349 auto evaluate_field_at_the_point = [&]() {
353 std::array<double, SPACE_DIM> field_eval_coords;
356 field_eval_coords.data(), &coords_dim,
362 auto field_eval_data =
366 ->buildTree<SPACE_DIM>(field_eval_data,
simple->getDomainFEName());
368 field_eval_data->setEvalPoints(field_eval_coords.data(), 1);
369 auto no_rule = [](
int,
int,
int) {
return -1; };
370 auto field_eval_fe_ptr = field_eval_data->feMethodPtr.lock();
371 field_eval_fe_ptr->getRuleHook = no_rule;
373 field_eval_fe_ptr->getOpPtrVector().push_back(
377 ->evalFEAtThePoint<SPACE_DIM>(
378 field_eval_coords.data(), 1e-12,
simple->getProblemName(),
379 simple->getDomainFEName(), field_eval_data,
387 <<
"U_X: " << t_disp(0) <<
" U_Y: " << t_disp(1);
390 <<
"U_X: " << t_disp(0) <<
" U_Y: " << t_disp(1)
391 <<
" U_Z: " << t_disp(2);
399 CHKERR evaluate_field_at_the_point();
410 auto det_ptr = boost::make_shared<VectorDouble>();
411 auto jac_ptr = boost::make_shared<MatrixDouble>();
412 auto inv_jac_ptr = boost::make_shared<MatrixDouble>();
414 pip->getDomainRhsFE().reset();
415 pip->getDomainLhsFE().reset();
416 pip->getBoundaryRhsFE().reset();
417 pip->getBoundaryLhsFE().reset();
421 auto post_proc_mesh = boost::make_shared<moab::Core>();
422 auto post_proc_begin = boost::make_shared<PostProcBrokenMeshInMoabBaseBegin>(
424 auto post_proc_end = boost::make_shared<PostProcBrokenMeshInMoabBaseEnd>(
429 auto calculate_stress_ops = [&](
auto &pip) {
434 auto common_ptr = HookeOps::commonDataFactory<SPACE_DIM, GAUSS, DomainEle>(
435 mField, pip,
"U",
"MAT_ELASTIC", Sev::verbose);
438 auto u_ptr = boost::make_shared<MatrixDouble>();
440 auto x_ptr = boost::make_shared<MatrixDouble>();
445 return boost::make_tuple(u_ptr, x_ptr, common_ptr->getMatStrain(),
446 common_ptr->getMatCauchyStress());
449 auto get_tag_id_on_pmesh = [&](
bool post_proc_skin) {
453 "MAT_ELASTIC", 1, MB_TYPE_INTEGER, tag_mat,
454 MB_TAG_CREAT | MB_TAG_SPARSE, &def_val_int);
455 auto meshset_vec_ptr =
457 std::regex((boost::format(
"%s(.*)") %
"MAT_ELASTIC").str()));
460 std::unique_ptr<Skinner> skin_ptr;
461 if (post_proc_skin) {
463 auto boundary_meshset =
simple->getBoundaryMeshSet();
468 for (
auto m : meshset_vec_ptr) {
472 int const id =
m->getMeshsetId();
473 ents_3d = ents_3d.subset_by_dimension(
SPACE_DIM);
474 if (post_proc_skin) {
476 CHKERR skin_ptr->find_skin(0, ents_3d,
false, skin_faces);
477 ents_3d = intersect(skin_ents, skin_faces);
485 auto post_proc_domain = [&](
auto post_proc_mesh) {
487 boost::make_shared<PostProcEleDomain>(
mField, post_proc_mesh);
490 auto [u_ptr, x_ptr, mat_strain_ptr, mat_stress_ptr] =
491 calculate_stress_ops(post_proc_fe->getOpPtrVector());
493 post_proc_fe->getOpPtrVector().push_back(
497 post_proc_fe->getPostProcMesh(), post_proc_fe->getMapGaussPts(),
501 {{
"U", u_ptr}, {
"GEOMETRY", x_ptr}},
505 {{
"STRAIN", mat_strain_ptr}, {
"STRESS", mat_stress_ptr}}
511 post_proc_fe->setTagsToTransfer({get_tag_id_on_pmesh(
false)});
515 auto post_proc_boundary = [&](
auto post_proc_mesh) {
517 boost::make_shared<PostProcEleBdy>(mField, post_proc_mesh);
519 post_proc_fe->getOpPtrVector(), {},
"GEOMETRY");
523 auto [u_ptr, x_ptr, mat_strain_ptr, mat_stress_ptr] =
524 calculate_stress_ops(op_loop_side->getOpPtrVector());
525 post_proc_fe->getOpPtrVector().push_back(op_loop_side);
526 auto mat_traction_ptr = boost::make_shared<MatrixDouble>();
527 post_proc_fe->getOpPtrVector().push_back(
533 post_proc_fe->getOpPtrVector().push_back(
537 post_proc_fe->getPostProcMesh(), post_proc_fe->getMapGaussPts(),
541 {{
"U", u_ptr}, {
"GEOMETRY", x_ptr}, {
"T", mat_traction_ptr}},
545 {{
"STRAIN", mat_strain_ptr}, {
"STRESS", mat_stress_ptr}}
551 post_proc_fe->setTagsToTransfer({get_tag_id_on_pmesh(
true)});
555 PetscBool post_proc_skin_only = PETSC_FALSE;
557 post_proc_skin_only = PETSC_TRUE;
559 &post_proc_skin_only, PETSC_NULL);
561 if (post_proc_skin_only == PETSC_FALSE) {
562 pip->getDomainRhsFE() = post_proc_domain(post_proc_mesh);
564 pip->getBoundaryRhsFE() = post_proc_boundary(post_proc_mesh);
568 post_proc_begin->getFEMethod());
569 CHKERR pip->loopFiniteElements();
571 post_proc_end->getFEMethod());
573 CHKERR post_proc_end->writeFile(
"out_elastic.h5m");
584 pip->getDomainRhsFE().reset();
585 pip->getDomainLhsFE().reset();
586 pip->getBoundaryRhsFE().reset();
587 pip->getBoundaryLhsFE().reset();
594 pip->getOpDomainRhsPipeline(), {H1},
"GEOMETRY");
596 pip->getOpBoundaryRhsPipeline(), {},
"GEOMETRY");
599 CHKERR HookeOps::opFactoryDomainRhs<SPACE_DIM, A, I, DomainEleOp>(
600 mField, pip->getOpDomainRhsPipeline(),
"U",
"MAT_ELASTIC", Sev::verbose);
603 pip->getOpDomainRhsPipeline(),
mField,
"U", Sev::verbose);
606 pip->getOpBoundaryRhsPipeline(),
mField,
"U", -1, Sev::verbose);
608 auto dm =
simple->getDM();
610 CHKERR VecSetDM(res, PETSC_NULL);
612 pip->getDomainRhsFE()->f = res;
613 pip->getBoundaryRhsFE()->f = res;
615 CHKERR VecZeroEntries(res);
617 CHKERR pip->loopFiniteElements();
620 CHKERR VecGhostUpdateBegin(res, ADD_VALUES, SCATTER_REVERSE);
621 CHKERR VecGhostUpdateEnd(res, ADD_VALUES, SCATTER_REVERSE);
622 CHKERR VecAssemblyBegin(res);
623 CHKERR VecAssemblyEnd(res);
625 auto zero_residual_at_constrains = [&]() {
627 auto fe_post_proc_ptr = boost::make_shared<FEMethod>();
628 auto get_post_proc_hook_rhs = [&]() {
631 mField, fe_post_proc_ptr, res)();
633 mField, fe_post_proc_ptr, 0, res)();
637 fe_post_proc_ptr->postProcessHook = get_post_proc_hook_rhs;
642 CHKERR zero_residual_at_constrains();
645 CHKERR VecNorm(res, NORM_2, &nrm2);
647 MOFEM_LOG_C(
"WORLD", Sev::inform,
"residual = %3.4e\n", nrm2);
652 auto post_proc_residual = [&](
auto dm,
auto f_res,
auto out_name) {
655 boost::make_shared<PostProcBrokenMeshInMoab<DomainEle>>(
mField);
657 auto u_vec = boost::make_shared<MatrixDouble>();
658 post_proc_fe->getOpPtrVector().push_back(
660 post_proc_fe->getOpPtrVector().push_back(
664 post_proc_fe->getPostProcMesh(), post_proc_fe->getMapGaussPts(),
676 post_proc_fe->writeFile(out_name);
680 CHKERR post_proc_residual(
simple->getDM(), res,
"res.h5m");
682 constexpr double eps = 1e-8;
685 "Residual is not zero");
688 if (!vectorFieldPtr || vectorFieldPtr->size1() == 0) {
690 "atom test %d failed: Field Evaluator did not provide result",
693 auto t_disp = getFTensor1FromMat<SPACE_DIM>(*vectorFieldPtr);
694 double Ux_ref = 0.46;
695 double Uy_ref = -0.015;
696 constexpr double eps = 1e-8;
697 if (fabs(t_disp(0) - Ux_ref) >
eps || fabs(t_disp(1) - Uy_ref) >
eps) {
699 "atom test %d failed: Ux_ref = %3.6e, computed = %3.6e, Uy_ref "
701 test, Ux_ref, t_disp(0), Uy_ref);
710int main(
int argc,
char *argv[]) {
713 const char param_file[] =
"param_file.petsc";
716 auto core_log = logging::core::get();
728 DMType dm_name =
"DMMOFEM";
730 DMType dm_name_mg =
"DMMOFEM_MG";
735 moab::Core mb_instance;
736 moab::Interface &moab = mb_instance;
760 "Is expected that schur matrix is not allocated. This is "
761 "possible only is PC is set up twice");
789 CHKERR KSPGetPC(solver, &pc);
790 PetscBool is_pcfs = PETSC_FALSE;
791 PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &is_pcfs);
796 "Is expected that schur matrix is not allocated. This is "
797 "possible only is PC is set up twice");
804 CHKERR MatSetDM(
S, PETSC_NULL);
806 CHKERR MatSetOption(
S, MAT_SYMMETRIC, PETSC_TRUE);
811 if constexpr (
A == AssemblyType::BLOCK_SCHUR) {
814 CHKERR KSPGetDM(solver, &solver_dm);
815 CHKERR DMSetMatType(solver_dm, MATSHELL);
844 auto create_dm = [&](
const char *name,
auto &ents,
auto dm_type) {
846 auto create_dm_imp = [&]() {
851 auto sub_ents_ptr = boost::make_shared<Range>(ents);
859 "Error in creating schurDM. It is possible that schurDM is "
867 if constexpr (
A == AssemblyType::BLOCK_SCHUR) {
869 auto get_nested_mat_data = [&]() -> boost::shared_ptr<NestSchurData> {
870 auto block_mat_data =
875 simple->getDomainFEName(),
887 {
"U"}, {boost::make_shared<Range>(
volEnts)}
892 auto nested_mat_data = get_nested_mat_data();
910 {
"U"}, {boost::make_shared<Range>(
volEnts)}, ao_up,
S,
true,
true));
914 {
"U"}, {boost::make_shared<Range>(
volEnts)}, ao_up,
S,
true,
true));
916 auto pre_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
917 auto post_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
919 pre_proc_schur_lhs_ptr->preProcessHook = [
this]() {
924 MOFEM_LOG(
"TIMER", Sev::inform) <<
"Lhs Assemble Begin";
928 post_proc_schur_lhs_ptr->postProcessHook = [
this, post_proc_schur_lhs_ptr,
931 CHKERR MatAssemblyBegin(
S, MAT_FINAL_ASSEMBLY);
932 CHKERR MatAssemblyEnd(
S, MAT_FINAL_ASSEMBLY);
934 mField, post_proc_schur_lhs_ptr, 1,
S, ao_up)();
935 MOFEM_LOG(
"TIMER", Sev::inform) <<
"Lhs Assemble End";
940 ksp_ctx_ptr->getPreProcSetOperators().push_front(pre_proc_schur_lhs_ptr);
941 ksp_ctx_ptr->getPostProcSetOperators().push_back(post_proc_schur_lhs_ptr);
952 CHKERR PCFieldSplitSetIS(pc, NULL, vol_is);
953 CHKERR PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_USER,
S);
960 auto get_pc = [](
auto ksp) {
962 CHKERR KSPGetPC(ksp, &pc_raw);
966 if constexpr (
A == AssemblyType::BLOCK_SCHUR) {
970 CHKERR PCSetOperators(pc,
A, P);
973 CHKERR PCFieldSplitSchurGetSubKSP(pc, PETSC_NULL, &subksp);
976 auto set_pc_p_mg = [](
auto dm,
auto pc,
auto S) {
979 PetscBool same = PETSC_FALSE;
980 PetscObjectTypeCompare((PetscObject)pc, PCMG, &same);
982 MOFEM_LOG(
"TIMER", Sev::inform) <<
"Set up MG";
985 CHKERR PCSetFromOptions(pc);
990 auto set_pc_ksp = [&](
auto dm,
auto pc,
auto S) {
992 PetscBool same = PETSC_FALSE;
993 PetscObjectTypeCompare((PetscObject)pc, PCKSP, &same);
995 MOFEM_LOG(
"TIMER", Sev::inform) <<
"Set up inner KSP for PCKSP";
996 CHKERR PCSetFromOptions(pc);
998 CHKERR PCKSPGetKSP(pc, &inner_ksp);
999 CHKERR KSPSetFromOptions(inner_ksp);
1001 CHKERR KSPGetPC(inner_ksp, &inner_pc);
1002 CHKERR PCSetFromOptions(inner_pc);
1003 CHKERR set_pc_p_mg(dm, inner_pc,
S);
1011 CHKERR PetscFree(subksp);
1016boost::shared_ptr<SetUpSchur>
1018 return boost::shared_ptr<SetUpSchur>(
new SetUpSchurImpl(m_field));
#define MOFEM_LOG_SYNCHRONISE(comm)
Synchronise "SYNC" channel.
#define MOFEM_LOG_C(channel, severity, format,...)
void simple(double P1[], double P2[], double P3[], double c[], const int N)
PetscBool is_plane_strain
ElementsAndOps< SPACE_DIM >::DomainEle DomainEle
ElementsAndOps< SPACE_DIM >::BoundaryEle BoundaryEle
#define CATCH_ERRORS
Catch errors.
FieldApproximationBase
approximation base
@ AINSWORTH_LEGENDRE_BASE
Ainsworth Cole (Legendre) approx. base .
#define CHK_THROW_MESSAGE(err, msg)
Check and throw MoFEM exception.
#define MoFEMFunctionReturnHot(a)
Last executable line of each PETSc function used for error handling. Replaces return()
#define MoFEMFunctionBegin
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
@ MOFEM_ATOM_TEST_INVALID
@ MOFEM_DATA_INCONSISTENCY
#define MoFEMFunctionReturn(a)
Last executable line of each PETSc function used for error handling. Replaces return()
#define CHKERR
Inline error check.
#define MoFEMFunctionBeginHot
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
constexpr int order
Order displacement.
PostProcEleByDim< SPACE_DIM >::PostProcEleDomain PostProcEleDomain
PostProcEleByDim< SPACE_DIM >::PostProcEleBdy PostProcEleBdy
PetscErrorCode DMMoFEMCreateSubDM(DM subdm, DM dm, const char problem_name[])
Must be called by user to set Sub DM MoFEM data structures.
PetscErrorCode DMMoFEMAddElement(DM dm, std::string fe_name)
add element to dm
PetscErrorCode DMMoFEMSetSquareProblem(DM dm, PetscBool square_problem)
set squared problem
PetscErrorCode DMoFEMPostProcessFiniteElements(DM dm, MoFEM::FEMethod *method)
execute finite element method for each element in dm (problem)
PetscErrorCode DMoFEMMeshToLocalVector(DM dm, Vec l, InsertMode mode, ScatterMode scatter_mode)
set local (or ghosted) vector values on mesh for partition only
PetscErrorCode DMMoFEMAddSubFieldRow(DM dm, const char field_name[])
PetscErrorCode DMRegister_MoFEM(const char sname[])
Register MoFEM problem.
MoFEMErrorCode DMRegister_MGViaApproxOrders(const char sname[])
Register DM for Multi-Grid via approximation orders.
PetscErrorCode DMoFEMLoopFiniteElements(DM dm, const char fe_name[], MoFEM::FEMethod *method, CacheTupleWeakPtr cache_ptr=CacheTupleSharedPtr())
Executes FEMethod for finite elements in DM.
auto createDMVector(DM dm)
Get smart vector from DM.
PetscErrorCode DMMoFEMAddSubFieldCol(DM dm, const char field_name[])
auto createDMMatrix(DM dm)
Get smart matrix from DM.
PetscErrorCode DMoFEMPreProcessFiniteElements(DM dm, MoFEM::FEMethod *method)
execute finite element method for each element in dm (problem)
static LoggerType & setLog(const std::string channel)
Set ans resset chanel logger.
#define MOFEM_LOG(channel, severity)
Log.
#define MOFEM_LOG_TAG(channel, tag)
Tag channel.
#define MOFEM_LOG_CHANNEL(channel)
Set and reset channel.
virtual MoFEMErrorCode loop_dofs(const Problem *problem_ptr, const std::string &field_name, RowColData rc, DofMethod &method, int lower_rank, int upper_rank, int verb=DEFAULT_VERBOSITY)=0
Make a loop over dofs.
PetscErrorCode MoFEMErrorCode
MoFEM/PETSc error code.
implementation of Data Operators for Forces and Sources
OpSchurAssembleBase * createOpSchurAssembleEnd(std::vector< std::string > fields_name, std::vector< boost::shared_ptr< Range > > field_ents, SmartPetscObj< AO > ao, SmartPetscObj< Mat > schur, bool sym_schur, bool symm_op)
Construct a new Op Schur Assemble End object.
PetscErrorCode PetscOptionsGetInt(PetscOptions *, const char pre[], const char name[], PetscInt *ivalue, PetscBool *set)
MoFEMErrorCode setSchurA00MatSolvePC(SmartPetscObj< PC > pc)
Set PC for A00 block.
PetscErrorCode PetscOptionsGetBool(PetscOptions *, const char pre[], const char name[], PetscBool *bval, PetscBool *set)
SmartPetscObj< Vec > vectorDuplicate(Vec vec)
Create duplicate vector of smart vector.
boost::shared_ptr< PCMGSetUpViaApproxOrdersCtx > createPCMGSetUpViaApproxOrdersCtx(DM dm, Mat A, bool use_shell_mat)
createPCMGSetUpViaApproxOrdersCtx
PetscErrorCode PetscOptionsGetRealArray(PetscOptions *, const char pre[], const char name[], PetscReal dval[], PetscInt *nmax, PetscBool *set)
auto getDMKspCtx(DM dm)
Get KSP context data structure used by DM.
auto getDMSubData(DM dm)
Get sub problem data structure.
MoFEMErrorCode PCMGSetUpViaApproxOrders(PC pc, boost::shared_ptr< PCMGSetUpViaApproxOrdersCtx > ctx, int verb)
Function build MG structure.
boost::shared_ptr< BlockStructure > createBlockMatStructure(DM dm, SchurFEOpsFEandFields schur_fe_op_vec)
Create a Mat Diag Blocks object.
boost::shared_ptr< NestSchurData > createSchurNestedMatrixStruture(std::pair< SmartPetscObj< DM >, SmartPetscObj< DM > > dms, boost::shared_ptr< BlockStructure > block_mat_data_ptr, std::vector< std::string > fields_names, std::vector< boost::shared_ptr< Range > > field_ents, bool add_preconditioner_block)
Get the Schur Nest Mat Array object.
auto createAOMappingIS(IS isapp, IS ispetsc)
Creates an application mapping using two index sets.
PetscErrorCode PetscOptionsGetEList(PetscOptions *, const char pre[], const char name[], const char *const *list, PetscInt next, PetscInt *value, PetscBool *set)
MoFEMErrorCode DMMoFEMSetNestSchurData(DM dm, boost::shared_ptr< NestSchurData >)
auto createDMNestSchurMat(DM dm)
auto createDM(MPI_Comm comm, const std::string dm_type_name)
Creates smart DM object.
OpSchurAssembleBase * createOpSchurAssembleBegin()
auto createDMBlockMat(DM dm)
OpPostProcMapInMoab< SPACE_DIM, SPACE_DIM > OpPPMap
static constexpr int approx_order
FTensor::Index< 'm', 3 > m
Boundary conditions marker.
MoFEMErrorCode boundaryCondition()
MoFEMErrorCode assembleSystem()
MoFEMErrorCode readMesh()
FieldApproximationBase base
Choice of finite element basis functions
MoFEMErrorCode checkResults()
MoFEMErrorCode solveSystem()
Example(MoFEM::Interface &m_field)
MoFEMErrorCode runProblem()
MoFEM::Interface & mField
Reference to MoFEM interface.
MoFEMErrorCode setupProblem()
MoFEMErrorCode outputResults()
boost::shared_ptr< MatrixDouble > vectorFieldPtr
Field values at evaluation points.
Add operators pushing bases from local to physical configuration.
Boundary condition manager for finite element problem setup.
virtual moab::Interface & get_moab()=0
virtual MPI_Comm & get_comm() const =0
virtual int get_comm_rank() const =0
static MoFEMErrorCode Initialize(int *argc, char ***args, const char file[], const char help[])
Initializes the MoFEM database PETSc, MOAB and MPI.
static MoFEMErrorCode Finalize()
Checks for options to be called at the conclusion of the program.
Deprecated interface functions.
Definition of the displacement bc data structure.
Data on single entity (This is passed as argument to DataOperator::doWork)
Class (Function) to enforce essential constrains on the left hand side diagonal.
Class (Function) to enforce essential constrains on the right hand side diagonal.
Class (Function) to calculate residual side diagonal.
Class (Function) to enforce essential constrains.
Field evaluator interface.
Section manager is used to create indexes and sections.
static boost::shared_ptr< SinkType > createSink(boost::shared_ptr< std::ostream > stream_ptr, std::string comm_filter)
Create a sink object.
static boost::shared_ptr< std::ostream > getStrmWorld()
Get the strm world object.
static boost::shared_ptr< std::ostream > getStrmSync()
Get the strm sync object.
Interface for managing meshsets containing materials and boundary conditions.
Specialization for double precision vector field values calculation.
Element used to execute operators on side of the element.
Post post-proc data at points from hash maps.
Template struct for dimension-specific finite element types.
PipelineManager interface.
MoFEMErrorCode setDomainRhsIntegrationRule(RuleHookFun rule)
Set integration rule for domain right-hand side finite element.
Projection of edge entities with one mid-node on hierarchical basis.
Simple interface for fast problem set-up.
MoFEMErrorCode getOptions()
get options
intrusive_ptr for managing petsc objects
MoFEMErrorCode getInterface(IFACE *&iface) const
Get interface reference to pointer of interface.
PipelineManager::ElementsAndOpsByDim< 2 >::FaceSideEle SideEle
PipelineManager::ElementsAndOpsByDim< 3 >::FaceSideEle SideEle
MoFEMErrorCode setUp(SmartPetscObj< KSP > solver)
virtual ~SetUpSchurImpl()=default
MoFEMErrorCode createSubDM()
MoFEMErrorCode setDiagonalPC(PC pc)
SetUpSchurImpl(MoFEM::Interface &m_field)
SmartPetscObj< DM > schurDM
MoFEMErrorCode setEntities()
MoFEMErrorCode setUp(SmartPetscObj< KSP >)
SmartPetscObj< DM > blockDM
MoFEMErrorCode setPC(PC pc)
MoFEMErrorCode setOperator()
MoFEM::Interface & mField
[Push operators to pipeline]
virtual MoFEMErrorCode setUp(SmartPetscObj< KSP > solver)=0
static boost::shared_ptr< SetUpSchur > createSetUpSchur(MoFEM::Interface &m_field)
static boost::shared_ptr< SetUpSchur > createSetUpSchur(MoFEM::Interface &m_field)
ElementsAndOps< SPACE_DIM >::SideEle SideEle
#define EXECUTABLE_DIMENSION
PetscBool do_eval_field
Evaluate field.
PetscBool is_plane_strain
constexpr int SPACE_DIM
[Define dimension]
constexpr double poisson_ratio
constexpr double shear_modulus_G
constexpr IntegrationType I
constexpr double bulk_modulus_K
constexpr AssemblyType A
[Define dimension]
constexpr double young_modulus