v0.14.0
test_broken_space.cpp

Testing broken spaces. Implementations works for 2d and 3d meshes, is aiming to test H-div broken base functions, and L2 base on skeleton.

Also, it test block matrix with Schur complement.

/**
* \example test_broken_space.cpp
*
* Testing broken spaces. Implementations works for 2d and 3d meshes, is aiming
* to test H-div broken base functions, and L2 base on skeleton.
*
* Also, it test block matrix with Schur complement.
*
*/
#include <MoFEM.hpp>
using namespace MoFEM;
static char help[] = "...\n\n";
constexpr bool debug = false;
constexpr AssemblyType AT =
: AssemblyType::PETSC; //< selected assembly type
constexpr IntegrationType IT =
IntegrationType::GAUSS; //< selected integration type
constexpr int SPACE_DIM =
EXECUTABLE_DIMENSION; //< Space dimension of problem, mesh
using BoundaryEle =
struct SetUpSchur {
static boost::shared_ptr<SetUpSchur>
createSetUpSchur(MoFEM::Interface &m_field);
virtual MoFEMErrorCode setUp(SmartPetscObj<KSP>) = 0;
protected:
SetUpSchur() = default;
virtual ~SetUpSchur() = default;
};
int approx_order = 1;
int main(int argc, char *argv[]) {
MoFEM::Core::Initialize(&argc, &argv, (char *)0, help);
try {
//! [Register MoFEM discrete manager in PETSc]
DMType dm_name = "DMMOFEM";
DMType dm_name_mg = "DMMOFEM_MG";
//! [Register MoFEM discrete manager in PETSc
moab::Core mb_instance;
moab::Interface &moab = mb_instance;
// Add logging channel for example
auto core_log = logging::core::get();
core_log->add_sink(
core_log->add_sink(
MOFEM_LOG_TAG("AT", "atom_test");
MOFEM_LOG_TAG("TIMER", "timer");
// Create MoFEM instance
MoFEM::Core core(moab);
MoFEM::Interface &m_field = core;
auto *simple = m_field.getInterface<Simple>();
CHKERR simple->getOptions();
simple->getAddBoundaryFE() = true;
CHKERR simple->loadFile();
auto add_shared_entities_on_skeleton = [&]() {
auto boundary_meshset = simple->getBoundaryMeshSet();
auto skeleton_meshset = simple->getSkeletonMeshSet();
Range bdy_ents;
CHKERR m_field.get_moab().get_entities_by_handle(boundary_meshset,
bdy_ents, true);
Range skeleton_ents;
CHKERR m_field.get_moab().get_entities_by_dimension(
0, simple->getDim() - 1, skeleton_ents, true);
skeleton_ents = subtract(skeleton_ents, bdy_ents);
CHKERR m_field.get_moab().clear_meshset(&skeleton_meshset, 1);
CHKERR m_field.get_moab().add_entities(skeleton_meshset, skeleton_ents);
};
CHKERR add_shared_entities_on_skeleton();
// Declare elements
enum bases {
AINSWORTH,
AINSWORTH_LOBATTO,
DEMKOWICZ,
BERNSTEIN,
LASBASETOP
};
const char *list_bases[] = {"ainsworth", "ainsworth_lobatto", "demkowicz",
"bernstein"};
PetscBool flg;
PetscInt choice_base_value = AINSWORTH;
CHKERR PetscOptionsGetEList(PETSC_NULL, NULL, "-base", list_bases,
LASBASETOP, &choice_base_value, &flg);
if (flg != PETSC_TRUE)
SETERRQ(PETSC_COMM_SELF, MOFEM_IMPOSSIBLE_CASE, "base not set");
if (choice_base_value == AINSWORTH)
if (choice_base_value == AINSWORTH_LOBATTO)
else if (choice_base_value == DEMKOWICZ)
else if (choice_base_value == BERNSTEIN)
enum spaces { hdiv, hcurl, last_space };
const char *list_spaces[] = {"hdiv", "hcurl"};
PetscInt choice_space_value = hdiv;
CHKERR PetscOptionsGetEList(PETSC_NULL, NULL, "-space", list_spaces,
last_space, &choice_space_value, &flg);
if (flg != PETSC_TRUE)
SETERRQ(PETSC_COMM_SELF, MOFEM_IMPOSSIBLE_CASE, "space not set");
FieldSpace space = HDIV;
if (choice_space_value == hdiv)
space = HDIV;
else if (choice_space_value == hcurl)
space = HCURL;
CHKERR PetscOptionsGetInt(PETSC_NULL, "", "-order", &approx_order,
PETSC_NULL);
CHKERR simple->addDomainBrokenField("BROKEN", space, base, 1);
CHKERR simple->addDomainField("U", L2, base, 1);
CHKERR simple->addSkeletonField("HYBRID", L2, base, 1);
CHKERR simple->setFieldOrder("BROKEN", approx_order);
CHKERR simple->setFieldOrder("U", approx_order - 1);
CHKERR simple->setFieldOrder("HYBRID", approx_order - 1);
CHKERR simple->setUp();
auto bc_mng = m_field.getInterface<BcManager>();
CHKERR bc_mng->removeSideDOFs(simple->getProblemName(), "ZERO_FLUX",
"BROKEN", SPACE_DIM, 0, 1, true);
auto integration_rule = [](int, int, int p) { return 2 * p; };
auto assemble_domain_lhs = [&](auto &pip) {
IT>::OpMixDivTimesScalar<SPACE_DIM>;
auto beta = [](const double, const double, const double) constexpr {
return 1;
};
pip.push_back(new OpHdivHdiv("BROKEN", "BROKEN", beta));
auto unity = []() constexpr { return 1; };
pip.push_back(new OpHdivU("BROKEN", "U", unity, true));
// First: Iterate over skeleton FEs adjacent to Domain FEs
// Note: BoundaryEle, i.e. uses skeleton interation rule
auto op_loop_skeleton_side = new OpLoopSide<BoundaryEle>(
m_field, simple->getSkeletonFEName(), SPACE_DIM - 1, Sev::noisy);
op_loop_skeleton_side->getSideFEPtr()->getRuleHook = integration_rule;
op_loop_skeleton_side->getOpPtrVector(), {});
// Second: Iterate over domain FEs adjacent to skelton, particularly one
// domain element.
auto broken_data_ptr =
boost::make_shared<std::vector<BrokenBaseSideData>>();
// Note: EleOnSide, i.e. uses on domain projected skeleton rule
auto op_loop_domain_side = new OpBrokenLoopSide<EleOnSide>(
m_field, simple->getDomainFEName(), SPACE_DIM, Sev::noisy);
op_loop_domain_side->getOpPtrVector(), {HDIV});
op_loop_domain_side->getOpPtrVector().push_back(
new OpGetBrokenBaseSideData<SideEleOp>("BROKEN", broken_data_ptr));
op_loop_skeleton_side->getOpPtrVector().push_back(op_loop_domain_side);
IT>::OpBrokenSpaceConstrain<1>;
op_loop_skeleton_side->getOpPtrVector().push_back(
new OpC("HYBRID", broken_data_ptr, 1., true, false));
if (debug) {
// print skeleton elements on partition
constexpr int partition = 1;
auto op_print = new BdyEleOp(NOSPACE, BdyEleOp::OPSPACE);
op_print->doWorkRhsHook = [&](DataOperator *base_op_ptr, int side,
EntityType type,
if (auto op_ptr = dynamic_cast<BdyEleOp *>(base_op_ptr)) {
auto fe_method = op_ptr->getFEMethod();
auto num_fe = fe_method->numeredEntFiniteElementPtr;
if (m_field.get_comm_rank() == partition) {
if (num_fe->getPStatus() & PSTATUS_SHARED)
MOFEM_LOG("SELF", Sev::inform) << "Num FE: " << *num_fe;
}
}
};
op_loop_skeleton_side->getOpPtrVector().push_back(op_print);
};
pip.push_back(op_loop_skeleton_side);
};
auto assemble_domain_rhs = [&](auto &pip) {
AT>::LinearForm<IT>::OpSource<1, 1>;
auto source = [&](const double x, const double y,
const double z) constexpr {
return -1; // sin(100 * (x / 10.) * M_PI_2);
};
pip.push_back(new OpDomainSource("U", source));
};
auto *pip_mng = m_field.getInterface<PipelineManager>();
CHKERR assemble_domain_lhs(pip_mng->getOpDomainLhsPipeline());
CHKERR assemble_domain_rhs(pip_mng->getOpDomainRhsPipeline());
CHKERR pip_mng->setDomainLhsIntegrationRule(integration_rule);
CHKERR pip_mng->setDomainRhsIntegrationRule(integration_rule);
CHKERR pip_mng->setSkeletonLhsIntegrationRule(integration_rule);
CHKERR pip_mng->setSkeletonRhsIntegrationRule(integration_rule);
TetPolynomialBase::switchCacheBaseOn<HDIV>(
{pip_mng->getDomainLhsFE().get(), pip_mng->getDomainRhsFE().get()});
TetPolynomialBase::switchCacheBaseOn<L2>(
{pip_mng->getDomainLhsFE().get(), pip_mng->getDomainRhsFE().get()});
auto x = createDMVector(simple->getDM());
auto f = vectorDuplicate(x);
if (AT == PETSC) {
auto ksp = pip_mng->createKSP();
CHKERR KSPSetFromOptions(ksp);
BOOST_LOG_SCOPED_THREAD_ATTR("Timeline", attrs::timer());
MOFEM_LOG("TIMER", Sev::inform) << "KSPSetUp";
CHKERR KSPSetUp(ksp);
MOFEM_LOG("TIMER", Sev::inform) << "KSPSetUp <= Done";
MOFEM_LOG("TIMER", Sev::inform) << "KSPSolve";
CHKERR KSPSolve(ksp, f, x);
MOFEM_LOG("TIMER", Sev::inform) << "KSPSolve <= Done";
CHKERR VecGhostUpdateBegin(x, INSERT_VALUES, SCATTER_FORWARD);
CHKERR VecGhostUpdateEnd(x, INSERT_VALUES, SCATTER_FORWARD);
CHKERR DMoFEMMeshToLocalVector(simple->getDM(), x, INSERT_VALUES,
SCATTER_REVERSE);
} else {
auto ksp = pip_mng->createKSP();
auto schur_ptr = SetUpSchur::createSetUpSchur(m_field);
BOOST_LOG_SCOPED_THREAD_ATTR("Timeline", attrs::timer());
MOFEM_LOG("TIMER", Sev::inform) << "KSPSetUp";
CHKERR schur_ptr->setUp(ksp);
MOFEM_LOG("TIMER", Sev::inform) << "KSPSetUp <= Done";
MOFEM_LOG("TIMER", Sev::inform) << "KSPSolve";
CHKERR KSPSolve(ksp, f, x);
MOFEM_LOG("TIMER", Sev::inform) << "KSPSolve <= Done";
CHKERR VecGhostUpdateBegin(x, INSERT_VALUES, SCATTER_FORWARD);
CHKERR VecGhostUpdateEnd(x, INSERT_VALUES, SCATTER_FORWARD);
CHKERR DMoFEMMeshToLocalVector(simple->getDM(), x, INSERT_VALUES,
SCATTER_REVERSE);
}
auto check_residual = [&](auto x, auto f) {
auto *simple = m_field.getInterface<Simple>();
auto *pip_mng = m_field.getInterface<PipelineManager>();
// auto &skeleton_rhs = pip_mng->getOpSkeletonRhsPipeline();
auto &domain_rhs = pip_mng->getOpDomainRhsPipeline();
// skeleton_rhs.clear();
domain_rhs.clear();
auto div_flux_ptr = boost::make_shared<VectorDouble>();
"BROKEN", div_flux_ptr));
AT>::LinearForm<IT>::OpBaseTimesScalarField<1>;
auto beta = [](double, double, double) constexpr { return 1; };
domain_rhs.push_back(new OpUDivFlux("U", div_flux_ptr, beta));
auto source = [&](const double x, const double y,
const double z) constexpr { return 1; };
AT>::LinearForm<IT>::OpSource<1, 1>;
domain_rhs.push_back(new OpDomainSource("U", source));
IT>::OpMixDivTimesU<3, 1, SPACE_DIM>;
AT>::LinearForm<IT>::OpBaseTimesVector<3, 3, 1>;
auto flux_ptr = boost::make_shared<MatrixDouble>();
domain_rhs.push_back(
new OpCalculateHVecVectorField<3>("BROKEN", flux_ptr));
boost::shared_ptr<VectorDouble> u_ptr =
boost::make_shared<VectorDouble>();
domain_rhs.push_back(new OpCalculateScalarFieldValues("U", u_ptr));
domain_rhs.push_back(new OpHDivH("BROKEN", u_ptr, beta));
domain_rhs.push_back(new OpHdivFlux("BROKEN", flux_ptr, beta));
// First: Iterate over skeleton FEs adjacent to Domain FEs
// Note: BoundaryEle, i.e. uses skeleton interation rule
auto op_loop_skeleton_side = new OpLoopSide<BoundaryEle>(
m_field, simple->getSkeletonFEName(), SPACE_DIM - 1, Sev::noisy);
op_loop_skeleton_side->getSideFEPtr()->getRuleHook = integration_rule;
op_loop_skeleton_side->getOpPtrVector(), {});
// Second: Iterate over domain FEs adjacent to skelton, particularly one
// domain element.
auto broken_data_ptr =
boost::make_shared<std::vector<BrokenBaseSideData>>();
// Note: EleOnSide, i.e. uses on domain projected skeleton rule
auto op_loop_domain_side = new OpBrokenLoopSide<EleOnSide>(
m_field, simple->getDomainFEName(), SPACE_DIM, Sev::noisy);
op_loop_domain_side->getOpPtrVector(), {HDIV});
op_loop_domain_side->getOpPtrVector().push_back(
new OpGetBrokenBaseSideData<SideEleOp>("BROKEN", broken_data_ptr));
auto flux_mat_ptr = boost::make_shared<MatrixDouble>();
op_loop_domain_side->getOpPtrVector().push_back(
new OpCalculateHVecTensorField<1, 3>("BROKEN", flux_mat_ptr));
op_loop_domain_side->getOpPtrVector().push_back(
new OpSetFlux<SideEleOp>(broken_data_ptr, flux_mat_ptr));
// Assemble on skeleton
op_loop_skeleton_side->getOpPtrVector().push_back(op_loop_domain_side);
using OpC_dHybrid = FormsIntegrators<BdyEleOp>::Assembly<AT>::LinearForm<
IT>::OpBrokenSpaceConstrainDHybrid<1>;
using OpC_dBroken = FormsIntegrators<BdyEleOp>::Assembly<AT>::LinearForm<
IT>::OpBrokenSpaceConstrainDFlux<1>;
op_loop_skeleton_side->getOpPtrVector().push_back(
new OpC_dHybrid("HYBRID", broken_data_ptr, 1.));
auto hybrid_ptr = boost::make_shared<MatrixDouble>();
op_loop_skeleton_side->getOpPtrVector().push_back(
new OpCalculateVectorFieldValues<1>("HYBRID", hybrid_ptr));
op_loop_skeleton_side->getOpPtrVector().push_back(
new OpC_dBroken(broken_data_ptr, hybrid_ptr, 1.));
// Add skeleton to domain pipeline
domain_rhs.push_back(op_loop_skeleton_side);
CHKERR VecZeroEntries(f);
CHKERR VecGhostUpdateBegin(f, INSERT_VALUES, SCATTER_FORWARD);
CHKERR VecGhostUpdateEnd(f, INSERT_VALUES, SCATTER_FORWARD);
pip_mng->getDomainRhsFE()->f = f;
pip_mng->getSkeletonRhsFE()->f = f;
pip_mng->getDomainRhsFE()->x = x;
pip_mng->getSkeletonRhsFE()->x = x;
simple->getDomainFEName(),
pip_mng->getDomainRhsFE());
CHKERR VecGhostUpdateBegin(f, ADD_VALUES, SCATTER_REVERSE);
CHKERR VecGhostUpdateEnd(f, ADD_VALUES, SCATTER_REVERSE);
CHKERR VecAssemblyBegin(f);
CHKERR VecAssemblyEnd(f);
double fnrm;
CHKERR VecNorm(f, NORM_2, &fnrm);
MOFEM_LOG_C("AT", Sev::inform, "Residual %3.4e", fnrm);
constexpr double eps = 1e-8;
if (fnrm > eps)
SETERRQ(PETSC_COMM_WORLD, MOFEM_ATOM_TEST_INVALID,
"Residual norm larger than accepted");
};
auto calculate_error = [&]() {
// auto &skeleton_rhs = pip_mng->getOpSkeletonRhsPipeline();
auto &domain_rhs = pip_mng->getOpDomainRhsPipeline();
// skeleton_rhs.clear();
domain_rhs.clear();
auto u_grad_ptr = boost::make_shared<MatrixDouble>();
auto flux_val_ptr = boost::make_shared<MatrixDouble>();
auto div_val_ptr = boost::make_shared<VectorDouble>();
auto source_ptr = boost::make_shared<VectorDouble>();
domain_rhs.push_back(
domain_rhs.push_back(
new OpCalculateHVecVectorField<3, SPACE_DIM>("BROKEN", flux_val_ptr));
"BROKEN", div_val_ptr));
auto source = [&](const double x, const double y,
const double z) constexpr { return -1; };
domain_rhs.push_back(new OpGetTensor0fromFunc(source_ptr, source));
enum { DIV, GRAD, LAST };
auto mpi_vec = createVectorMPI(
m_field.get_comm(), (!m_field.get_comm_rank()) ? LAST : 0, LAST);
domain_rhs.push_back(
new OpCalcNormL2Tensor0(div_val_ptr, mpi_vec, DIV, source_ptr));
domain_rhs.push_back(new OpCalcNormL2Tensor1<SPACE_DIM>(
u_grad_ptr, mpi_vec, GRAD, flux_val_ptr));
simple->getDomainFEName(),
pip_mng->getDomainRhsFE());
CHKERR VecAssemblyBegin(mpi_vec);
CHKERR VecAssemblyEnd(mpi_vec);
if (!m_field.get_comm_rank()) {
const double *error_ind;
CHKERR VecGetArrayRead(mpi_vec, &error_ind);
MOFEM_LOG("AT", Sev::inform)
<< "Approximation error ||div flux - source||: "
<< std::sqrt(error_ind[DIV]);
MOFEM_LOG("AT", Sev::inform) << "Approximation error ||grad-flux||: "
<< std::sqrt(error_ind[GRAD]);
CHKERR VecRestoreArrayRead(mpi_vec, &error_ind);
}
};
auto get_post_proc_fe = [&]() {
auto post_proc_fe = boost::make_shared<PostProcEle>(m_field);
auto op_loop_side = new OpLoopSide<EleOnSide>(
m_field, simple->getDomainFEName(), SPACE_DIM, Sev::noisy,
boost::make_shared<
post_proc_fe->getOpPtrVector().push_back(op_loop_side);
op_loop_side->getOpPtrVector(), {HDIV});
auto u_vec_ptr = boost::make_shared<VectorDouble>();
auto flux_mat_ptr = boost::make_shared<MatrixDouble>();
op_loop_side->getOpPtrVector().push_back(
new OpCalculateScalarFieldValues("U", u_vec_ptr));
op_loop_side->getOpPtrVector().push_back(
new OpCalculateHVecVectorField<3>("BROKEN", flux_mat_ptr));
op_loop_side->getOpPtrVector().push_back(
new OpPPMap(
post_proc_fe->getPostProcMesh(),
post_proc_fe->getMapGaussPts(),
{{"U", u_vec_ptr}},
{{"BROKEN", flux_mat_ptr}},
{}, {})
);
return post_proc_fe;
};
auto post_proc_fe = get_post_proc_fe();
simple->getBoundaryFEName(), post_proc_fe);
CHKERR post_proc_fe->writeFile("out_result.h5m");
CHKERR calculate_error();
CHKERR check_residual(x, f);
}
}
struct SetUpSchurImpl : public SetUpSchur {
SetUpSchurImpl(MoFEM::Interface &m_field) : SetUpSchur(), mField(m_field) {}
virtual ~SetUpSchurImpl() = default;
private:
};
auto pip_mng = mField.getInterface<PipelineManager>();
CHKERR KSPSetFromOptions(ksp);
PC pc;
CHKERR KSPGetPC(ksp, &pc);
PetscBool is_pcfs = PETSC_FALSE;
PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &is_pcfs);
if (is_pcfs) {
MOFEM_LOG("AT", Sev::inform) << "Setup Schur pc";
auto create_sub_dm = [&]() {
auto create_dm = [&](
std::string problem_name,
std::vector<std::string> fe_names,
std::vector<std::string> fields,
auto dm_type
) {
auto dm = createDM(mField.get_comm(), dm_type);
auto create_dm_imp = [&]() {
CHKERR DMMoFEMCreateSubDM(dm, simple->getDM(), problem_name.c_str());
for (auto fe : fe_names) {
}
CHKERR DMMoFEMAddElement(dm, simple->getSkeletonFEName());
for (auto field : fields) {
}
CHKERR DMSetUp(dm);
};
create_dm_imp(),
"Error in creating schurDM. It is possible that schurDM is "
"already created");
return dm;
};
auto schur_dm = create_dm(
"SCHUR",
{simple->getDomainFEName(), simple->getSkeletonFEName()},
{"HYBRID"},
"DMMOFEM_MG");
auto block_dm = create_dm(
"BLOCK",
{simple->getDomainFEName(), simple->getSkeletonFEName()},
{"BROKEN", "U"},
"DMMOFEM");
return std::make_tuple(schur_dm, block_dm);
};
auto get_nested_mat_data = [&](auto schur_dm, auto block_dm) {
auto block_mat_data = createBlockMatStructure(
simple->getDM(),
{
{
simple->getDomainFEName(),
{
{"BROKEN", "BROKEN"},
{"U", "U"},
{"BROKEN", "U"},
{"U", "BROKEN"}
}
},
{
simple->getSkeletonFEName(),
{
{"BROKEN", "HYBRID"}, {"HYBRID", "BROKEN"}
}
}
}
);
{schur_dm, block_dm}, block_mat_data,
{"BROKEN", "U"}, {nullptr, nullptr}, true
);
};
auto set_ops = [&](auto schur_dm) {
auto dm_is = getDMSubData(schur_dm)->getSmartRowIs();
auto ao_up = createAOMappingIS(dm_is, PETSC_NULL);
boost::shared_ptr<BlockStructure> block_data;
CHKERR DMMoFEMGetBlocMatData(simple->getDM(), block_data);
pip_mng->getOpDomainLhsPipeline().push_front(
pip_mng->getOpDomainLhsPipeline().push_back(
createOpSchurAssembleEnd({"BROKEN", "U"}, {nullptr, nullptr}, ao_up,
S, true, true)
);
auto pre_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
auto post_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
pre_proc_schur_lhs_ptr->preProcessHook = [this]() {
CHKERR MatZeroEntries(S);
MOFEM_LOG("AT", Sev::verbose) << "Lhs Assemble Begin";
};
post_proc_schur_lhs_ptr->postProcessHook = [this, ao_up,
post_proc_schur_lhs_ptr]() {
MOFEM_LOG("AT", Sev::verbose) << "Lhs Assemble End";
CHKERR MatAssemblyBegin(S, MAT_FINAL_ASSEMBLY);
CHKERR MatAssemblyEnd(S, MAT_FINAL_ASSEMBLY);
MOFEM_LOG("AT", Sev::verbose) << "Lhs Assemble Finish";
};
auto ksp_ctx_ptr = getDMKspCtx(simple->getDM());
ksp_ctx_ptr->getPreProcSetOperators().push_front(pre_proc_schur_lhs_ptr);
ksp_ctx_ptr->getPostProcSetOperators().push_back(post_proc_schur_lhs_ptr);
};
auto set_pc = [&](auto pc, auto block_dm) {
auto block_is = getDMSubData(block_dm)->getSmartRowIs();
CHKERR PCFieldSplitSetIS(pc, NULL, block_is);
CHKERR PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_USER, S);
};
auto set_diagonal_pc = [&](auto pc, auto schur_dm) {
if (AT == BLOCK_SCHUR) {
auto A = createDMBlockMat(simple->getDM());
auto P = createDMNestSchurMat(simple->getDM());
CHKERR PCSetOperators(pc, A, P);
}
KSP *subksp;
CHKERR PCFieldSplitSchurGetSubKSP(pc, PETSC_NULL, &subksp);
auto get_pc = [](auto ksp) {
PC pc_raw;
CHKERR KSPGetPC(ksp, &pc_raw);
return pc_raw;
};
CHKERR setSchurA00MatSolvePC(SmartPetscObj<PC>(get_pc(subksp[0]), true));
auto set_pc_p_mg = [&](auto dm, auto pc) {
CHKERR PCSetDM(pc, dm);
PetscBool same = PETSC_FALSE;
PetscObjectTypeCompare((PetscObject)pc, PCMG, &same);
if (same) {
// By default do not use shell mg mat. Implementation of SOR is slow.
CHKERR PCSetFromOptions(pc);
}
};
CHKERR set_pc_p_mg(schur_dm, get_pc(subksp[1]));
CHKERR PetscFree(subksp);
};
auto [schur_dm, block_dm] = create_sub_dm();
if (AT == BLOCK_SCHUR) {
auto nested_mat_data = get_nested_mat_data(schur_dm, block_dm);
CHKERR DMMoFEMSetNestSchurData(simple->getDM(), nested_mat_data);
}
CHKERR MatSetDM(S, PETSC_NULL);
int bs = (SPACE_DIM == 2) ? NBEDGE_L2(approx_order - 1)
CHKERR MatSetBlockSize(S, bs);
CHKERR set_ops(schur_dm);
CHKERR set_pc(pc, block_dm);
DM solver_dm;
CHKERR KSPGetDM(ksp, &solver_dm);
if (AT == BLOCK_SCHUR)
CHKERR DMSetMatType(solver_dm, MATSHELL);
CHKERR KSPSetUp(ksp);
if (AT == BLOCK_SCHUR)
CHKERR set_diagonal_pc(pc, schur_dm);
} else {
SETERRQ(PETSC_COMM_SELF, MOFEM_ATOM_TEST_INVALID,
"PC is not set to PCFIELDSPLIT");
}
}
boost::shared_ptr<SetUpSchur>
return boost::shared_ptr<SetUpSchur>(new SetUpSchurImpl(m_field));
}
NOSPACE
@ NOSPACE
Definition: definitions.h:83
MoFEM::UnknownInterface::getInterface
MoFEMErrorCode getInterface(IFACE *&iface) const
Get interface reference to pointer of interface.
Definition: UnknownInterface.hpp:93
MoFEM::DMMoFEMGetBlocMatData
MoFEMErrorCode DMMoFEMGetBlocMatData(DM dm, boost::shared_ptr< BlockStructure > &)
Get data for block mat.
Definition: DMMoFEM.cpp:1542
SPACE_DIM
constexpr int SPACE_DIM
Definition: test_broken_space.cpp:27
MoFEM::createAOMappingIS
auto createAOMappingIS(IS isapp, IS ispetsc)
Creates an application mapping using two index sets.
Definition: PetscSmartObj.hpp:318
SetUpSchurImpl
Definition: test_broken_space.cpp:515
MoFEM::EntitiesFieldData::EntData
Data on single entity (This is passed as argument to DataOperator::doWork)
Definition: EntitiesFieldData.hpp:128
MoFEM::ForcesAndSourcesCore::UserDataOperator::AdjCache
std::map< EntityHandle, std::vector< boost::weak_ptr< NumeredEntFiniteElement > >> AdjCache
Definition: ForcesAndSourcesCore.hpp:904
EXECUTABLE_DIMENSION
#define EXECUTABLE_DIMENSION
Definition: plastic.cpp:13
FormsBrokenSpaceConstraintImpl.hpp
Integrator for broken space constraints.
BoundaryEle
ElementsAndOps< SPACE_DIM >::BoundaryEle BoundaryEle
Definition: child_and_parent.cpp:39
MoFEM::CoreTmp< 0 >
Core (interface) class.
Definition: Core.hpp:82
MoFEM::DMMoFEMAddSubFieldCol
PetscErrorCode DMMoFEMAddSubFieldCol(DM dm, const char field_name[])
Definition: DMMoFEM.cpp:280
MoFEM::DataOperator
base operator to do operations at Gauss Pt. level
Definition: DataOperators.hpp:24
debug
constexpr bool debug
Definition: test_broken_space.cpp:18
OpHdivHdiv
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::BiLinearForm< GAUSS >::OpMass< 3, SPACE_DIM > OpHdivHdiv
[Linear elastic problem]
Definition: thermo_elastic.cpp:47
SetUpSchurImpl::setUp
MoFEMErrorCode setUp(SmartPetscObj< KSP >)
Definition: test_broken_space.cpp:528
EleOnSide
PipelineManager::ElementsAndOpsByDim< SPACE_DIM >::FaceSideEle EleOnSide
Definition: scalar_check_approximation.cpp:27
MoFEM::PipelineManager::ElementsAndOpsByDim
Definition: PipelineManager.hpp:38
MoFEM::CoreInterface::get_comm
virtual MPI_Comm & get_comm() const =0
MoFEM::createPCMGSetUpViaApproxOrdersCtx
boost::shared_ptr< PCMGSetUpViaApproxOrdersCtx > createPCMGSetUpViaApproxOrdersCtx(DM dm, Mat A, bool use_shell_mat)
createPCMGSetUpViaApproxOrdersCtx
Definition: PCMGSetUpViaApproxOrders.cpp:630
CHK_THROW_MESSAGE
#define CHK_THROW_MESSAGE(err, msg)
Check and throw MoFEM exception.
Definition: definitions.h:609
MoFEM::DMMoFEMSetSquareProblem
PetscErrorCode DMMoFEMSetSquareProblem(DM dm, PetscBool square_problem)
set squared problem
Definition: DMMoFEM.cpp:456
MoFEM::getDMKspCtx
auto getDMKspCtx(DM dm)
Get KSP context data structure used by DM.
Definition: DMMoFEM.hpp:1113
L2
@ L2
field with C-1 continuity
Definition: definitions.h:88
MoFEM::Exceptions::MoFEMErrorCode
PetscErrorCode MoFEMErrorCode
MoFEM/PETSc error code.
Definition: Exceptions.hpp:56
MoFEM::OpCalculateVectorFieldValues
Get values at integration pts for tensor filed rank 1, i.e. vector field.
Definition: UserDataOperators.hpp:466
MoFEM::CoreInterface::get_comm_rank
virtual int get_comm_rank() const =0
MoFEM::PETSC
@ PETSC
Definition: FormsIntegrators.hpp:105
MoFEM::PipelineManager
PipelineManager interface.
Definition: PipelineManager.hpp:24
MoFEM.hpp
A
constexpr AssemblyType A
Definition: operators_tests.cpp:30
MoFEM::DMoFEMMeshToLocalVector
PetscErrorCode DMoFEMMeshToLocalVector(DM dm, Vec l, InsertMode mode, ScatterMode scatter_mode)
set local (or ghosted) vector values on mesh for partition only
Definition: DMMoFEM.cpp:523
MoFEM::CoreTmp< 0 >::Finalize
static MoFEMErrorCode Finalize()
Checks for options to be called at the conclusion of the program.
Definition: Core.cpp:112
EntData
EntitiesFieldData::EntData EntData
Definition: test_broken_space.cpp:35
MoFEM::OpSetFlux
Definition: FormsBrokenSpaceConstraintImpl.hpp:139
MoFEM::LogManager::createSink
static boost::shared_ptr< SinkType > createSink(boost::shared_ptr< std::ostream > stream_ptr, std::string comm_filter)
Create a sink object.
Definition: LogManager.cpp:298
MoFEM::Simple
Simple interface for fast problem set-up.
Definition: Simple.hpp:27
MOFEM_IMPOSSIBLE_CASE
@ MOFEM_IMPOSSIBLE_CASE
Definition: definitions.h:35
MoFEM::getDMSubData
auto getDMSubData(DM dm)
Get sub problem data structure.
Definition: DMMoFEM.hpp:1157
MoFEM::DMMoFEMAddElement
PetscErrorCode DMMoFEMAddElement(DM dm, std::string fe_name)
add element to dm
Definition: DMMoFEM.cpp:497
MoFEM::createDMBlockMat
auto createDMBlockMat(DM dm)
Definition: DMMoFEM.hpp:1076
SCHUR_ASSEMBLE
#define SCHUR_ASSEMBLE
Definition: contact.cpp:18
MoFEM::DeprecatedCoreInterface
Deprecated interface functions.
Definition: DeprecatedCoreInterface.hpp:16
MoFEM::OpCalculateScalarFieldGradient
Get field gradients at integration pts for scalar filed rank 0, i.e. vector field.
Definition: UserDataOperators.hpp:1293
MoFEM::Interface
DeprecatedCoreInterface Interface
Definition: Interface.hpp:2010
SideEleOp
MoFEM::PostProcBrokenMeshInMoab
Definition: PostProcBrokenMeshInMoabBase.hpp:667
FieldSpace
FieldSpace
approximation spaces
Definition: definitions.h:82
MoFEM::OpGetBrokenBaseSideData
Definition: FormsBrokenSpaceConstraintImpl.hpp:68
OpHDivH
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpMixDivTimesU< 3, 1, 2 > OpHDivH
Integrate Rhs div flux base times temperature (T)
Definition: seepage.cpp:116
MoFEM::createOpSchurAssembleEnd
OpSchurAssembleBase * createOpSchurAssembleEnd(std::vector< std::string > fields_name, std::vector< boost::shared_ptr< Range >> field_ents, SmartPetscObj< AO > ao, SmartPetscObj< Mat > schur, bool sym_schur, bool symm_op)
Construct a new Op Schur Assemble End object.
Definition: Schur.cpp:2186
AT
constexpr AssemblyType AT
Definition: test_broken_space.cpp:20
CHKERR
#define CHKERR
Inline error check.
Definition: definitions.h:548
SetUpSchurImpl::S
SmartPetscObj< Mat > S
Definition: test_broken_space.cpp:525
MoFEM::createDMVector
auto createDMVector(DM dm)
Get smart vector from DM.
Definition: DMMoFEM.hpp:1099
MoFEM::OpBrokenLoopSide
Definition: FormsBrokenSpaceConstraintImpl.hpp:15
MoFEM::CoreInterface::get_moab
virtual moab::Interface & get_moab()=0
MoFEM
implementation of Data Operators for Forces and Sources
Definition: Common.hpp:10
MoFEM::createDM
auto createDM(MPI_Comm comm, const std::string dm_type_name)
Creates smart DM object.
Definition: PetscSmartObj.hpp:141
MoFEM::BcManager
Simple interface for fast problem set-up.
Definition: BcManager.hpp:25
MoFEM::createDMHybridisedL2Matrix
auto createDMHybridisedL2Matrix(DM dm)
Get smart hybridised L2 matrix from DM.
Definition: DMMoFEM.hpp:1069
MOFEM_LOG_C
#define MOFEM_LOG_C(channel, severity, format,...)
Definition: LogManager.hpp:311
MoFEM::OpCalculateHVecVectorField
Get vector field for H-div approximation.
Definition: UserDataOperators.hpp:2115
simple
void simple(double P1[], double P2[], double P3[], double c[], const int N)
Definition: acoustic.cpp:69
NBEDGE_L2
#define NBEDGE_L2(P)
Number of base functions on edge from L2 space.
Definition: h1_hdiv_hcurl_l2.h:48
MoFEM::DMMoFEMCreateSubDM
PetscErrorCode DMMoFEMCreateSubDM(DM subdm, DM dm, const char problem_name[])
Must be called by user to set Sub DM MoFEM data structures.
Definition: DMMoFEM.cpp:215
double
convert.type
type
Definition: convert.py:64
MoFEM::FormsIntegrators::Assembly
Assembly methods.
Definition: FormsIntegrators.hpp:317
MoFEM::DMRegister_MGViaApproxOrders
MoFEMErrorCode DMRegister_MGViaApproxOrders(const char sname[])
Register DM for Multi-Grid via approximation orders.
Definition: PCMGSetUpViaApproxOrders.cpp:302
OpPPMap
OpPostProcMapInMoab< SPACE_DIM, SPACE_DIM > OpPPMap
Definition: photon_diffusion.cpp:29
SetUpSchurImpl::mField
MoFEM::Interface & mField
Definition: test_broken_space.cpp:524
EshelbianPlasticity::P
@ P
Definition: EshelbianContact.cpp:197
MoFEM::OpCalculateScalarFieldValues
Get value at integration points for scalar field.
Definition: UserDataOperators.hpp:82
MoFEM::LogManager::getStrmWorld
static boost::shared_ptr< std::ostream > getStrmWorld()
Get the strm world object.
Definition: LogManager.cpp:344
MoFEM::DMRegister_MoFEM
PetscErrorCode DMRegister_MoFEM(const char sname[])
Register MoFEM problem.
Definition: DMMoFEM.cpp:43
MoFEM::OpCalculateHVecTensorField
Calculate tenor field using vectorial base, i.e. Hdiv/Hcurl.
Definition: UserDataOperators.hpp:2572
MoFEM::createOpSchurAssembleBegin
OpSchurAssembleBase * createOpSchurAssembleBegin()
Definition: Schur.cpp:2181
MoFEM::GAUSS
@ GAUSS
Definition: FormsIntegrators.hpp:136
MoFEM::createDMNestSchurMat
auto createDMNestSchurMat(DM dm)
Definition: DMMoFEM.hpp:1083
AINSWORTH_LOBATTO_BASE
@ AINSWORTH_LOBATTO_BASE
Definition: definitions.h:62
MoFEM::OpCalcNormL2Tensor1
Get norm of input MatrixDouble for Tensor1.
Definition: NormsOperators.hpp:44
MOFEM_LOG_TAG
#define MOFEM_LOG_TAG(channel, tag)
Tag channel.
Definition: LogManager.hpp:339
MoFEM::OpGetTensor0fromFunc
Get values from scalar function at integration points and save them to VectorDouble for Tensor0.
Definition: NormsOperators.hpp:105
BiLinearForm
MoFEM::IntegrationType
IntegrationType
Form integrator integration types.
Definition: FormsIntegrators.hpp:136
AINSWORTH_BERNSTEIN_BEZIER_BASE
@ AINSWORTH_BERNSTEIN_BEZIER_BASE
Definition: definitions.h:64
MoFEM::AddHOOps
Add operators pushing bases from local to physical configuration.
Definition: HODataOperators.hpp:413
MoFEM::setSchurA00MatSolvePC
MoFEMErrorCode setSchurA00MatSolvePC(SmartPetscObj< PC > pc)
Set PC for A00 block.
Definition: Schur.cpp:2223
integration_rule
auto integration_rule
Definition: free_surface.cpp:185
Range
DomainEleOp
SetUpSchurImpl::~SetUpSchurImpl
virtual ~SetUpSchurImpl()=default
MoFEM::CoreTmp< 0 >::Initialize
static MoFEMErrorCode Initialize(int *argc, char ***args, const char file[], const char help[])
Initializes the MoFEM database PETSc, MOAB and MPI.
Definition: Core.cpp:72
MOFEM_LOG
#define MOFEM_LOG(channel, severity)
Log.
Definition: LogManager.hpp:308
MoFEM::vectorDuplicate
SmartPetscObj< Vec > vectorDuplicate(Vec vec)
Create duplicate vector of smart vector.
Definition: PetscSmartObj.hpp:221
CATCH_ERRORS
#define CATCH_ERRORS
Catch errors.
Definition: definitions.h:385
DEMKOWICZ_JACOBI_BASE
@ DEMKOWICZ_JACOBI_BASE
Definition: definitions.h:66
MoFEM::AssemblyType
AssemblyType
[Storage and set boundary conditions]
Definition: FormsIntegrators.hpp:104
OpHdivU
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::BiLinearForm< GAUSS >::OpMixDivTimesScalar< 2 > OpHdivU
Definition: mixed_poisson.cpp:25
MoFEM::Core
CoreTmp< 0 > Core
Definition: Core.hpp:1148
HenckyOps::f
auto f
Definition: HenckyOps.hpp:15
UserDataOperator
ForcesAndSourcesCore::UserDataOperator UserDataOperator
Definition: HookeElement.hpp:75
eps
static const double eps
Definition: check_base_functions_derivatives_on_tet.cpp:11
NBFACETRI_L2
#define NBFACETRI_L2(P)
Number of base functions on triangle for L2 space.
Definition: h1_hdiv_hcurl_l2.h:42
approx_order
int approx_order
Definition: test_broken_space.cpp:50
MoFEM::createSchurNestedMatrixStruture
boost::shared_ptr< NestSchurData > createSchurNestedMatrixStruture(std::pair< SmartPetscObj< DM >, SmartPetscObj< DM >> dms, boost::shared_ptr< BlockStructure > block_mat_data_ptr, std::vector< std::string > fields_names, std::vector< boost::shared_ptr< Range >> field_ents, bool add_preconditioner_block)
Get the Schur Nest Mat Array object.
Definition: Schur.cpp:1944
IT
constexpr IntegrationType IT
Definition: test_broken_space.cpp:24
AINSWORTH_LEGENDRE_BASE
@ AINSWORTH_LEGENDRE_BASE
Ainsworth Cole (Legendre) approx. base .
Definition: definitions.h:60
MoFEM::OpCalcNormL2Tensor0
Get norm of input VectorDouble for Tensor0.
Definition: NormsOperators.hpp:15
MoFEM::MPC::LAST
@ LAST
HCURL
@ HCURL
field with continuous tangents
Definition: definitions.h:86
MoFEM::PetscOptionsGetEList
PetscErrorCode PetscOptionsGetEList(PetscOptions *, const char pre[], const char name[], const char *const *list, PetscInt next, PetscInt *value, PetscBool *set)
Definition: DeprecatedPetsc.hpp:203
MoFEM::createVectorMPI
auto createVectorMPI(MPI_Comm comm, PetscInt n, PetscInt N)
Create MPI Vector.
Definition: PetscSmartObj.hpp:202
MoFEM::OpCalculateHdivVectorDivergence
Calculate divergence of vector field.
Definition: UserDataOperators.hpp:2212
FieldApproximationBase
FieldApproximationBase
approximation base
Definition: definitions.h:58
BdyEleOp
BoundaryEle::UserDataOperator BdyEleOp
Definition: test_broken_space.cpp:37
SetUpSchurImpl::SetUpSchurImpl
SetUpSchurImpl(MoFEM::Interface &m_field)
Definition: test_broken_space.cpp:517
MoFEM::BLOCK_SCHUR
@ BLOCK_SCHUR
Definition: FormsIntegrators.hpp:108
help
static char help[]
Definition: test_broken_space.cpp:16
SetUpSchur
[Push operators to pipeline]
Definition: test_broken_space.cpp:40
main
int main(int argc, char *argv[])
Definition: test_broken_space.cpp:52
MOFEM_ATOM_TEST_INVALID
@ MOFEM_ATOM_TEST_INVALID
Definition: definitions.h:40
MoFEM::DMMoFEMSetNestSchurData
MoFEMErrorCode DMMoFEMSetNestSchurData(DM dm, boost::shared_ptr< NestSchurData >)
Definition: DMMoFEM.cpp:1562
MoFEM::createBlockMatStructure
boost::shared_ptr< BlockStructure > createBlockMatStructure(DM dm, SchurFEOpsFEandFields schur_fe_op_vec)
Create a Mat Diag Blocks object.
Definition: Schur.cpp:1009
MoFEM::PCMGSetUpViaApproxOrders
MoFEMErrorCode PCMGSetUpViaApproxOrders(PC pc, boost::shared_ptr< PCMGSetUpViaApproxOrdersCtx > ctx, int verb)
Function build MG structure.
Definition: PCMGSetUpViaApproxOrders.cpp:634
MoFEM::DMMoFEMAddSubFieldRow
PetscErrorCode DMMoFEMAddSubFieldRow(DM dm, const char field_name[])
Definition: DMMoFEM.cpp:238
DomainEle
ElementsAndOps< SPACE_DIM >::DomainEle DomainEle
Definition: child_and_parent.cpp:34
MoFEM::LogManager::setLog
static LoggerType & setLog(const std::string channel)
Set ans resset chanel logger.
Definition: LogManager.cpp:389
MoFEM::SmartPetscObj< KSP >
source
auto source
Definition: poisson_2d_dis_galerkin.cpp:62
SetUpSchur::createSetUpSchur
static boost::shared_ptr< SetUpSchur > createSetUpSchur(MoFEM::Interface &m_field)
Definition: test_broken_space.cpp:768
OpDomainSource
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpSource< 1, FIELD_DIM > OpDomainSource
Definition: child_and_parent.cpp:55
MoFEM::DMoFEMLoopFiniteElements
PetscErrorCode DMoFEMLoopFiniteElements(DM dm, const char fe_name[], MoFEM::FEMethod *method, CacheTupleWeakPtr cache_ptr=CacheTupleSharedPtr())
Executes FEMethod for finite elements in DM.
Definition: DMMoFEM.cpp:586
convert.int
int
Definition: convert.py:64
MoFEM::PetscOptionsGetInt
PetscErrorCode PetscOptionsGetInt(PetscOptions *, const char pre[], const char name[], PetscInt *ivalue, PetscBool *set)
Definition: DeprecatedPetsc.hpp:142
MoFEMFunctionReturn
#define MoFEMFunctionReturn(a)
Last executable line of each PETSc function used for error handling. Replaces return()
Definition: definitions.h:429
HDIV
@ HDIV
field with continuous normal traction
Definition: definitions.h:87
MoFEM::OpLoopSide
Element used to execute operators on side of the element.
Definition: ForcesAndSourcesCore.hpp:1290
MoFEMFunctionBegin
#define MoFEMFunctionBegin
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
Definition: definitions.h:359
MoFEM::OpPostProcMapInMoab
Post post-proc data at points from hash maps.
Definition: PostProcBrokenMeshInMoabBase.hpp:698
OpHdivFlux
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpBaseTimesVector< 3, SPACE_DIM, 1 > OpHdivFlux
Integrating Rhs flux base (1/k) flux (FLUX)
Definition: thermo_elastic.cpp:69