12#ifndef EXECUTABLE_DIMENSION
13 #define EXECUTABLE_DIMENSION 3
48 IntegrationType::GAUSS;
66 std::max(
static_cast<double>(std::numeric_limits<float>::min_exponent10),
80 auto r = [&](
auto tau) {
83 constexpr double eps = 1e-12;
84 return std::max(
r(tau),
eps *
r(0));
90template <
typename T,
int DIM>
97 if (
C1_k < std::numeric_limits<double>::epsilon()) {
101 t_alpha(
i,
j) =
C1_k * t_plastic_strain(
i,
j);
147#include <HenckyOps.hpp>
148#include <PlasticOps.hpp>
149#include <PlasticNaturalBCs.hpp>
152 #ifdef ENABLE_PYTHON_BINDING
153 #include <boost/python.hpp>
154 #include <boost/python/def.hpp>
155 #include <boost/python/numpy.hpp>
156namespace bp = boost::python;
157namespace np = boost::python::numpy;
166 #include <ContactOps.hpp>
171 DomainRhsBCs::OpFlux<PlasticOps::DomainBCs, 1, SPACE_DIM>;
174 BoundaryRhsBCs::OpFlux<PlasticOps::BoundaryBCs, 1, SPACE_DIM>;
177 BoundaryLhsBCs::OpFlux<PlasticOps::BoundaryBCs, 1, SPACE_DIM>;
184template <
int FE_DIM,
int PROBLEM_DIM,
int SPACE_DIM>
struct AddHOOps;
186template <>
struct AddHOOps<2, 3, 3> {
189 add(boost::ptr_deque<ForcesAndSourcesCore::UserDataOperator> &pipeline,
190 std::vector<FieldSpace> space, std::string geom_field_name);
193template <>
struct AddHOOps<1, 2, 2> {
196 add(boost::ptr_deque<ForcesAndSourcesCore::UserDataOperator> &pipeline,
197 std::vector<FieldSpace> space, std::string geom_field_name);
200template <>
struct AddHOOps<3, 3, 3> {
203 add(boost::ptr_deque<ForcesAndSourcesCore::UserDataOperator> &pipeline,
204 std::vector<FieldSpace> space, std::string geom_field_name);
207template <>
struct AddHOOps<2, 2, 2> {
210 add(boost::ptr_deque<ForcesAndSourcesCore::UserDataOperator> &pipeline,
211 std::vector<FieldSpace> space, std::string geom_field_name);
243 double getScale(
const double time) {
249 #ifdef ENABLE_PYTHON_BINDING
250 boost::shared_ptr<ContactOps::SDFPython> sdfPythonPtr;
262 PetscBool test_ops = PETSC_FALSE;
265 if (test_ops == PETSC_FALSE) {
282 auto get_ents_by_dim = [&](
const auto dim) {
295 auto get_base = [&]() {
296 auto domain_ents = get_ents_by_dim(
SPACE_DIM);
297 if (domain_ents.empty())
315 const auto base = get_base();
348 auto ents = get_ents_by_dim(0);
350 ents.merge(get_ents_by_dim(1));
352 ents.merge(get_ents_by_dim(2));
354 ents.merge(get_ents_by_dim(3));
370 auto get_skin = [&]() {
375 CHKERR skin.find_skin(0, body_ents,
false, skin_ents);
379 auto filter_blocks = [&](
auto skin) {
380 bool is_contact_block =
true;
385 (boost::format(
"%s(.*)") %
"CONTACT").str()
394 <<
"Find contact block set: " <<
m->getName();
395 auto meshset =
m->getMeshset();
396 Range contact_meshset_range;
398 meshset,
SPACE_DIM - 1, contact_meshset_range,
true);
401 contact_meshset_range);
402 contact_range.merge(contact_meshset_range);
404 if (is_contact_block) {
406 <<
"Nb entities in contact surface: " << contact_range.size();
408 skin = intersect(skin, contact_range);
415 ParallelComm *pcomm =
417 CHKERR pcomm->filter_pstatus(skin, PSTATUS_SHARED | PSTATUS_MULTISHARED,
418 PSTATUS_NOT, -1, &boundary_ents);
419 return boundary_ents;
430 auto project_ho_geometry = [&]() {
434 PetscBool project_geometry = PETSC_TRUE;
436 &project_geometry, PETSC_NULLPTR);
437 if (project_geometry) {
438 CHKERR project_ho_geometry();
441 auto get_volume = [&]() {
442 using VolOp = DomainEle::UserDataOperator;
444 std::array<double, 2> volume_and_count;
445 op_ptr->doWorkRhsHook = [&](
DataOperator *base_op_ptr,
int side,
449 auto op_ptr =
static_cast<VolOp *
>(base_op_ptr);
451 volume_and_count[
COUNT] += 1;
455 volume_and_count = {0, 0};
456 auto fe = boost::make_shared<DomainEle>(
mField);
457 fe->getOpPtrVector().push_back(op_ptr);
459 auto dm =
simple->getDM();
463 std::array<double, 2> tot_volume_and_count;
464 MPI_Allreduce(volume_and_count.data(), tot_volume_and_count.data(),
465 volume_and_count.size(), MPI_DOUBLE, MPI_SUM,
467 return tot_volume_and_count;
483 auto get_command_line_parameters = [&]() {
510 PetscBool tau_order_is_set;
513 PetscBool ep_order_is_set;
526 MOFEM_LOG(
"PLASTICITY", Sev::inform) <<
"Hardening " <<
H;
527 MOFEM_LOG(
"PLASTICITY", Sev::inform) <<
"Viscous hardening " <<
visH;
528 MOFEM_LOG(
"PLASTICITY", Sev::inform) <<
"Saturation yield stress " <<
Qinf;
529 MOFEM_LOG(
"PLASTICITY", Sev::inform) <<
"Saturation exponent " <<
b_iso;
530 MOFEM_LOG(
"PLASTICITY", Sev::inform) <<
"Kinematic hardening " <<
C1_k;
535 if (tau_order_is_set == PETSC_FALSE)
537 if (ep_order_is_set == PETSC_FALSE)
540 MOFEM_LOG(
"PLASTICITY", Sev::inform) <<
"Approximation order " <<
order;
542 <<
"Ep approximation order " <<
ep_order;
544 <<
"Tau approximation order " <<
tau_order;
546 <<
"Geometry approximation order " <<
geom_order;
548 MOFEM_LOG(
"PLASTICITY", Sev::inform) <<
"Density " <<
rho;
551 PetscBool is_scale = PETSC_TRUE;
575 CHKERR get_command_line_parameters();
578 #ifdef ENABLE_PYTHON_BINDING
579 auto file_exists = [](std::string myfile) {
580 std::ifstream file(myfile.c_str());
586 char sdf_file_name[255] =
"sdf.py";
588 sdf_file_name, 255, PETSC_NULLPTR);
590 if (file_exists(sdf_file_name)) {
591 MOFEM_LOG(
"CONTACT", Sev::inform) << sdf_file_name <<
" file found";
592 sdfPythonPtr = boost::make_shared<ContactOps::SDFPython>();
593 CHKERR sdfPythonPtr->sdfInit(sdf_file_name);
594 ContactOps::sdfPythonWeakPtr = sdfPythonPtr;
596 MOFEM_LOG(
"CONTACT", Sev::warning) << sdf_file_name <<
" file NOT found";
612 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(),
"REMOVE_X",
614 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(),
"REMOVE_Y",
616 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(),
"REMOVE_Z",
618 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(),
619 "REMOVE_ALL",
"U", 0, 3);
622 for (
auto b : {
"FIX_X",
"REMOVE_X"})
623 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(), b,
624 "SIGMA", 0, 0,
false,
true);
625 for (
auto b : {
"FIX_Y",
"REMOVE_Y"})
626 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(), b,
627 "SIGMA", 1, 1,
false,
true);
628 for (
auto b : {
"FIX_Z",
"REMOVE_Z"})
629 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(), b,
630 "SIGMA", 2, 2,
false,
true);
631 for (
auto b : {
"FIX_ALL",
"REMOVE_ALL"})
632 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(), b,
633 "SIGMA", 0, 3,
false,
true);
634 CHKERR bc_mng->removeBlockDOFsOnEntities(
635 simple->getProblemName(),
"NO_CONTACT",
"SIGMA", 0, 3,
false,
true);
639 simple->getProblemName(),
"U");
641 auto &bc_map = bc_mng->getBcMapByBlockName();
642 for (
auto bc : bc_map)
643 MOFEM_LOG(
"PLASTICITY",
Sev::verbose) <<
"Marker " << bc.first;
654 auto integration_rule_bc = [](
int,
int,
int ao) {
return 2 * ao; };
658 auto add_boundary_ops_lhs_mechanical = [&](
auto &pip) {
662 pip, {
HDIV},
"GEOMETRY");
666 CHKERR BoundaryLhsBCs::AddFluxToPipeline<OpBoundaryLhsBCs>::add(
667 pip,
mField,
"U", Sev::inform);
672 ContactOps::opFactoryBoundaryLhs<SPACE_DIM, AT, GAUSS, BoundaryEleOp>(
675 ContactOps::opFactoryBoundaryToDomainLhs<SPACE_DIM, AT, IT, DomainEle>(
676 mField, pip,
simple->getDomainFEName(),
"SIGMA",
"U",
"GEOMETRY",
683 auto add_boundary_ops_rhs_mechanical = [&](
auto &pip) {
687 pip, {
HDIV},
"GEOMETRY");
691 CHKERR BoundaryRhsBCs::AddFluxToPipeline<OpBoundaryRhsBCs>::add(
692 pip,
mField,
"U", {boost::make_shared<ScaledTimeScale>()}, Sev::inform);
695 CHKERR ContactOps::opFactoryBoundaryRhs<SPACE_DIM, AT, IT, BoundaryEleOp>(
702 auto add_domain_ops_lhs = [
this](
auto &pip) {
705 pip, {
H1,
HDIV},
"GEOMETRY");
714 auto get_inertia_and_mass_damping = [
this](
const double,
const double,
718 return (
rho /
scale) * fe_domain_lhs->ts_aa +
721 pip.push_back(
new OpMass(
"U",
"U", get_inertia_and_mass_damping));
724 CHKERR PlasticOps::opFactoryDomainLhs<SPACE_DIM, AT, IT, DomainEleOp>(
725 mField,
"MAT_PLASTIC", pip,
"U",
"EP",
"TAU");
730 auto add_domain_ops_rhs = [
this](
auto &pip) {
734 pip, {
H1,
HDIV},
"GEOMETRY");
736 CHKERR DomainRhsBCs::AddFluxToPipeline<OpDomainRhsBCs>::add(
738 {boost::make_shared<ScaledTimeScale>(
"body_force_hist.txt")},
749 auto mat_acceleration = boost::make_shared<MatrixDouble>();
751 "U", mat_acceleration));
753 new OpInertiaForce(
"U", mat_acceleration, [](
double,
double,
double) {
757 auto mat_velocity = boost::make_shared<MatrixDouble>();
761 new OpInertiaForce(
"U", mat_velocity, [](
double,
double,
double) {
767 CHKERR PlasticOps::opFactoryDomainRhs<SPACE_DIM, AT, IT, DomainEleOp>(
768 mField,
"MAT_PLASTIC", pip,
"U",
"EP",
"TAU");
771 CHKERR ContactOps::opFactoryDomainRhs<SPACE_DIM, AT, IT, DomainEleOp>(
778 CHKERR add_domain_ops_lhs(pip_mng->getOpDomainLhsPipeline());
779 CHKERR add_domain_ops_rhs(pip_mng->getOpDomainRhsPipeline());
782 CHKERR add_boundary_ops_lhs_mechanical(pip_mng->getOpBoundaryLhsPipeline());
783 CHKERR add_boundary_ops_rhs_mechanical(pip_mng->getOpBoundaryRhsPipeline());
785 CHKERR pip_mng->setDomainRhsIntegrationRule(vol_rule);
786 CHKERR pip_mng->setDomainLhsIntegrationRule(vol_rule);
788 CHKERR pip_mng->setBoundaryLhsIntegrationRule(integration_rule_bc);
789 CHKERR pip_mng->setBoundaryRhsIntegrationRule(integration_rule_bc);
791 auto create_reaction_pipeline = [&](
auto &pip) {
794 pip, {
H1},
"GEOMETRY");
795 CHKERR PlasticOps::opFactoryDomainReactions<SPACE_DIM, AT, IT, DomainEleOp>(
796 mField,
"MAT_PLASTIC", pip,
"U",
"EP",
"TAU");
843 auto set_section_monitor = [&](
auto solver) {
846 CHKERR TSGetSNES(solver, &snes);
847 CHKERR SNESMonitorSet(snes,
850 (
void *)(snes_ctx_ptr.get()),
nullptr);
854 auto create_post_process_elements = [&]() {
855 auto push_vol_ops = [
this](
auto &pip) {
857 pip, {
H1,
HDIV},
"GEOMETRY");
859 auto [common_plastic_ptr, common_hencky_ptr] =
860 PlasticOps::createCommonPlasticOps<SPACE_DIM, IT, DomainEleOp>(
861 mField,
"MAT_PLASTIC", pip,
"U",
"EP",
"TAU", 1., Sev::inform);
863 if (common_hencky_ptr) {
864 if (common_plastic_ptr->mGradPtr != common_hencky_ptr->matGradPtr)
868 return std::make_pair(common_plastic_ptr, common_hencky_ptr);
871 auto push_vol_post_proc_ops = [
this](
auto &pp_fe,
auto &&p) {
874 auto &pip = pp_fe->getOpPtrVector();
876 auto [common_plastic_ptr, common_hencky_ptr] = p;
880 auto x_ptr = boost::make_shared<MatrixDouble>();
883 auto u_ptr = boost::make_shared<MatrixDouble>();
892 pp_fe->getPostProcMesh(), pp_fe->getMapGaussPts(),
895 common_plastic_ptr->getPlasticSurfacePtr()},
896 {
"PLASTIC_MULTIPLIER",
897 common_plastic_ptr->getPlasticTauPtr()}},
899 {{
"U", u_ptr}, {
"GEOMETRY", x_ptr}},
901 {{
"GRAD", common_hencky_ptr->matGradPtr},
902 {
"FIRST_PIOLA", common_hencky_ptr->getMatFirstPiolaStress()}},
904 {{
"HENCKY_STRAIN", common_hencky_ptr->getMatLogC()},
905 {
"PLASTIC_STRAIN", common_plastic_ptr->getPlasticStrainPtr()},
906 {
"PLASTIC_FLOW", common_plastic_ptr->getPlasticFlowPtr()}}
918 pp_fe->getPostProcMesh(), pp_fe->getMapGaussPts(),
921 common_plastic_ptr->getPlasticSurfacePtr()},
922 {
"PLASTIC_MULTIPLIER",
923 common_plastic_ptr->getPlasticTauPtr()}},
925 {{
"U", u_ptr}, {
"GEOMETRY", x_ptr}},
929 {{
"STRAIN", common_plastic_ptr->mStrainPtr},
930 {
"STRESS", common_plastic_ptr->mStressPtr},
931 {
"PLASTIC_STRAIN", common_plastic_ptr->getPlasticStrainPtr()},
932 {
"PLASTIC_FLOW", common_plastic_ptr->getPlasticFlowPtr()}}
942 PetscBool post_proc_vol;
943 PetscBool post_proc_skin;
946 post_proc_vol = PETSC_TRUE;
947 post_proc_skin = PETSC_FALSE;
949 post_proc_vol = PETSC_FALSE;
950 post_proc_skin = PETSC_TRUE;
955 &post_proc_skin, PETSC_NULLPTR);
957 auto vol_post_proc = [
this, push_vol_post_proc_ops, push_vol_ops,
959 if (post_proc_vol == PETSC_FALSE)
960 return boost::shared_ptr<PostProcEle>();
961 auto pp_fe = boost::make_shared<PostProcEle>(mField);
963 push_vol_post_proc_ops(pp_fe, push_vol_ops(pp_fe->getOpPtrVector())),
964 "push_vol_post_proc_ops");
968 auto skin_post_proc = [
this, push_vol_post_proc_ops, push_vol_ops,
970 if (post_proc_skin == PETSC_FALSE)
971 return boost::shared_ptr<SkinPostProcEle>();
974 auto pp_fe = boost::make_shared<SkinPostProcEle>(mField);
977 pp_fe->getOpPtrVector().push_back(op_side);
979 pp_fe, push_vol_ops(op_side->getOpPtrVector())),
980 "push_vol_post_proc_ops");
984 return std::make_pair(vol_post_proc(), skin_post_proc());
987 auto scatter_create = [&](
auto D,
auto coeff) {
989 CHKERR is_manager->isCreateProblemFieldAndRank(
simple->getProblemName(),
990 ROW,
"U", coeff, coeff, is);
992 CHKERR ISGetLocalSize(is, &loc_size);
994 CHKERR VecCreateMPI(mField.get_comm(), loc_size, PETSC_DETERMINE, &
v);
996 CHKERR VecScatterCreate(
D, is,
v, PETSC_NULLPTR, &scatter);
1001 boost::shared_ptr<SetPtsData> field_eval_data;
1002 boost::shared_ptr<MatrixDouble> u_field_ptr;
1004 std::array<double, 3> field_eval_coords{0.0, 0.0, 0.0};
1007 field_eval_coords.data(), &coords_dim,
1010 boost::shared_ptr<std::map<std::string, boost::shared_ptr<VectorDouble>>>
1011 scalar_field_ptrs = boost::make_shared<
1012 std::map<std::string, boost::shared_ptr<VectorDouble>>>();
1013 boost::shared_ptr<std::map<std::string, boost::shared_ptr<MatrixDouble>>>
1014 vector_field_ptrs = boost::make_shared<
1015 std::map<std::string, boost::shared_ptr<MatrixDouble>>>();
1016 boost::shared_ptr<std::map<std::string, boost::shared_ptr<MatrixDouble>>>
1017 sym_tensor_field_ptrs = boost::make_shared<
1018 std::map<std::string, boost::shared_ptr<MatrixDouble>>>();
1019 boost::shared_ptr<std::map<std::string, boost::shared_ptr<MatrixDouble>>>
1020 tensor_field_ptrs = boost::make_shared<
1021 std::map<std::string, boost::shared_ptr<MatrixDouble>>>();
1024 auto u_field_ptr = boost::make_shared<MatrixDouble>();
1029 field_eval_data,
simple->getDomainFEName());
1031 field_eval_data->setEvalPoints(field_eval_coords.data(), 1);
1032 auto no_rule = [](
int,
int,
int) {
return -1; };
1033 auto field_eval_fe_ptr = field_eval_data->feMethodPtr.lock();
1034 field_eval_fe_ptr->getRuleHook = no_rule;
1037 field_eval_fe_ptr->getOpPtrVector(), {H1, HDIV},
"GEOMETRY");
1039 auto [common_plastic_ptr, common_hencky_ptr] =
1040 PlasticOps::createCommonPlasticOps<SPACE_DIM, IT, DomainEleOp>(
1041 mField,
"MAT_PLASTIC", field_eval_fe_ptr->getOpPtrVector(),
"U",
1042 "EP",
"TAU", 1., Sev::inform);
1044 field_eval_fe_ptr->getOpPtrVector().push_back(
1047 if ((common_plastic_ptr) && (common_hencky_ptr) && (scalar_field_ptrs)) {
1049 scalar_field_ptrs->insert(
1050 {
"PLASTIC_SURFACE", common_plastic_ptr->getPlasticSurfacePtr()});
1051 scalar_field_ptrs->insert(
1052 {
"PLASTIC_MULTIPLIER", common_plastic_ptr->getPlasticTauPtr()});
1053 vector_field_ptrs->insert({
"U", u_field_ptr});
1054 sym_tensor_field_ptrs->insert(
1055 {
"PLASTIC_STRAIN", common_plastic_ptr->getPlasticStrainPtr()});
1056 sym_tensor_field_ptrs->insert(
1057 {
"PLASTIC_FLOW", common_plastic_ptr->getPlasticFlowPtr()});
1058 sym_tensor_field_ptrs->insert(
1059 {
"HENCKY_STRAIN", common_hencky_ptr->getMatLogC()});
1060 tensor_field_ptrs->insert({
"GRAD", common_hencky_ptr->matGradPtr});
1061 tensor_field_ptrs->insert(
1062 {
"FIRST_PIOLA", common_hencky_ptr->getMatFirstPiolaStress()});
1064 scalar_field_ptrs->insert(
1065 {
"PLASTIC_SURFACE", common_plastic_ptr->getPlasticSurfacePtr()});
1066 scalar_field_ptrs->insert(
1067 {
"PLASTIC_MULTIPLIER", common_plastic_ptr->getPlasticTauPtr()});
1068 vector_field_ptrs->insert({
"U", u_field_ptr});
1069 sym_tensor_field_ptrs->insert(
1070 {
"STRAIN", common_plastic_ptr->mStrainPtr});
1071 sym_tensor_field_ptrs->insert(
1072 {
"STRESS", common_plastic_ptr->mStressPtr});
1073 sym_tensor_field_ptrs->insert(
1074 {
"PLASTIC_STRAIN", common_plastic_ptr->getPlasticStrainPtr()});
1075 sym_tensor_field_ptrs->insert(
1076 {
"PLASTIC_FLOW", common_plastic_ptr->getPlasticFlowPtr()});
1081 auto test_monitor_ptr = boost::make_shared<FEMethod>();
1083 auto set_time_monitor = [&](
auto dm,
auto solver) {
1086 dm, create_post_process_elements(), reactionFe, uXScatter, uYScatter,
1087 uZScatter, field_eval_coords, field_eval_data, scalar_field_ptrs,
1088 vector_field_ptrs, sym_tensor_field_ptrs, tensor_field_ptrs));
1089 boost::shared_ptr<ForcesAndSourcesCore> null;
1091 test_monitor_ptr->postProcessHook = [&]() {
1094 if (
atom_test && fabs(test_monitor_ptr->ts_t - 0.5) < 1e-12 &&
1095 test_monitor_ptr->ts_step == 25) {
1097 if (scalar_field_ptrs->at(
"PLASTIC_MULTIPLIER")->size()) {
1099 getFTensor0FromVec(*scalar_field_ptrs->at(
"PLASTIC_MULTIPLIER"));
1100 MOFEM_LOG(
"PlasticSync", Sev::inform) <<
"Eval point tau: " << t_tau;
1102 if (
atom_test == 1 && fabs(t_tau - 0.688861) > 1e-5) {
1104 "atom test %d failed: wrong plastic multiplier value",
1109 if (vector_field_ptrs->at(
"U")->size1()) {
1112 getFTensor1FromMat<SPACE_DIM>(*vector_field_ptrs->at(
"U"));
1113 MOFEM_LOG(
"PlasticSync", Sev::inform) <<
"Eval point U: " << t_disp;
1115 if (
atom_test == 1 && fabs(t_disp(0) - 0.25 / 2.) > 1e-5 ||
1116 fabs(t_disp(1) + 0.0526736) > 1e-5) {
1118 "atom test %d failed: wrong displacement value",
1123 if (sym_tensor_field_ptrs->at(
"PLASTIC_STRAIN")->size1()) {
1124 auto t_plastic_strain = getFTensor2SymmetricFromMat<SPACE_DIM>(
1125 *sym_tensor_field_ptrs->at(
"PLASTIC_STRAIN"));
1127 <<
"Eval point EP: " << t_plastic_strain;
1130 fabs(t_plastic_strain(0, 0) - 0.221943) > 1e-5 ||
1131 fabs(t_plastic_strain(0, 1)) > 1e-5 ||
1132 fabs(t_plastic_strain(1, 1) + 0.110971) > 1e-5) {
1134 "atom test %d failed: wrong plastic strain value",
1139 if (tensor_field_ptrs->at(
"FIRST_PIOLA")->size1()) {
1140 auto t_piola_stress = getFTensor2FromMat<SPACE_DIM, SPACE_DIM>(
1141 *tensor_field_ptrs->at(
"FIRST_PIOLA"));
1143 <<
"Eval point Piola stress: " << t_piola_stress;
1145 if (
atom_test == 1 && fabs((t_piola_stress(0, 0) - 198.775) /
1146 t_piola_stress(0, 0)) > 1e-5 ||
1147 fabs(t_piola_stress(0, 1)) + fabs(t_piola_stress(1, 0)) +
1148 fabs(t_piola_stress(1, 1)) >
1151 "atom test %d failed: wrong Piola stress value",
1162 monitor_ptr, null, test_monitor_ptr);
1167 auto set_schur_pc = [&](
auto solver,
1168 boost::shared_ptr<SetUpSchur> &schur_ptr) {
1171 auto name_prb =
simple->getProblemName();
1177 dm_sub =
createDM(mField.get_comm(),
"DMMOFEM");
1182 for (
auto f : {
"U"}) {
1194 dm_sub =
createDM(mField.get_comm(),
"DMMOFEM");
1200 for (
auto f : {
"SIGMA",
"EP",
"TAU"}) {
1205 for (
auto f : {
"EP",
"TAU"}) {
1215 if constexpr (
AT == AssemblyType::BLOCK_SCHUR) {
1224 auto get_nested_mat_data = [&](
auto schur_dm,
auto block_dm) {
1230 {simple->getDomainFEName(),
1246 {simple->getBoundaryFEName(),
1248 {{
"SIGMA",
"SIGMA"}, {
"U",
"SIGMA"}, {
"SIGMA",
"U"}
1258 {dm_schur, dm_block}, block_mat_data,
1260 {
"SIGMA",
"EP",
"TAU"}, {
nullptr,
nullptr,
nullptr}, true
1267 auto get_nested_mat_data = [&](
auto schur_dm,
auto block_dm) {
1268 auto block_mat_data =
1271 {{simple->getDomainFEName(),
1288 {dm_schur, dm_block}, block_mat_data,
1290 {
"EP",
"TAU"}, {
nullptr,
nullptr}, false
1297 auto nested_mat_data = get_nested_mat_data(dm_schur, dm_block);
1300 auto block_is =
getDMSubData(dm_block)->getSmartRowIs();
1301 auto ao_schur =
getDMSubData(dm_schur)->getSmartRowMap();
1307 CHKERR schur_ptr->setUp(solver);
1313 auto dm =
simple->getDM();
1316 uXScatter = scatter_create(
D, 0);
1317 uYScatter = scatter_create(
D, 1);
1319 uZScatter = scatter_create(
D, 2);
1321 auto create_solver = [pip_mng]() {
1323 return pip_mng->createTSIM();
1325 return pip_mng->createTSIM2();
1328 auto solver = create_solver();
1330 auto active_pre_lhs = []() {
1337 auto active_post_lhs = [&]() {
1339 auto get_iter = [&]() {
1344 "Can not get iter");
1348 auto iter = get_iter();
1351 std::array<int, 5> activity_data;
1352 std::fill(activity_data.begin(), activity_data.end(), 0);
1354 activity_data.data(), activity_data.size(), MPI_INT,
1355 MPI_SUM, mField.get_comm());
1357 int &active_points = activity_data[0];
1358 int &avtive_full_elems = activity_data[1];
1359 int &avtive_elems = activity_data[2];
1360 int &nb_points = activity_data[3];
1361 int &nb_elements = activity_data[4];
1365 double proc_nb_points =
1366 100 *
static_cast<double>(active_points) / nb_points;
1367 double proc_nb_active =
1368 100 *
static_cast<double>(avtive_elems) / nb_elements;
1369 double proc_nb_full_active = 100;
1371 proc_nb_full_active =
1372 100 *
static_cast<double>(avtive_full_elems) / avtive_elems;
1375 "Iter %d nb pts %d nb active pts %d (%3.3f\%) nb active "
1377 "(%3.3f\%) nb full active elems %d (%3.3f\%)",
1378 iter, nb_points, active_points, proc_nb_points,
1379 avtive_elems, proc_nb_active, avtive_full_elems,
1380 proc_nb_full_active, iter);
1387 auto add_active_dofs_elem = [&](
auto dm) {
1389 auto fe_pre_proc = boost::make_shared<FEMethod>();
1390 fe_pre_proc->preProcessHook = active_pre_lhs;
1391 auto fe_post_proc = boost::make_shared<FEMethod>();
1392 fe_post_proc->postProcessHook = active_post_lhs;
1394 ts_ctx_ptr->getPreProcessIJacobian().push_front(fe_pre_proc);
1395 ts_ctx_ptr->getPostProcessIJacobian().push_back(fe_post_proc);
1399 auto set_essential_bc = [&](
auto dm,
auto solver) {
1403 auto pre_proc_ptr = boost::make_shared<FEMethod>();
1404 auto post_proc_rhs_ptr = boost::make_shared<FEMethod>();
1405 auto post_proc_lhs_ptr = boost::make_shared<FEMethod>();
1408 auto disp_time_scale = boost::make_shared<TimeScale>();
1410 auto get_bc_hook_rhs = [
this, pre_proc_ptr, disp_time_scale]() {
1412 {disp_time_scale},
false);
1415 pre_proc_ptr->preProcessHook = get_bc_hook_rhs();
1417 auto get_post_proc_hook_rhs = [
this, post_proc_rhs_ptr]() {
1420 mField, post_proc_rhs_ptr,
nullptr, Sev::verbose)();
1422 mField, post_proc_rhs_ptr, 1.)();
1425 auto get_post_proc_hook_lhs = [
this, post_proc_lhs_ptr]() {
1427 mField, post_proc_lhs_ptr, 1.);
1429 post_proc_rhs_ptr->postProcessHook = get_post_proc_hook_rhs;
1432 ts_ctx_ptr->getPreProcessIFunction().push_front(pre_proc_ptr);
1433 ts_ctx_ptr->getPreProcessIJacobian().push_front(pre_proc_ptr);
1434 ts_ctx_ptr->getPostProcessIFunction().push_back(post_proc_rhs_ptr);
1435 post_proc_lhs_ptr->postProcessHook = get_post_proc_hook_lhs();
1436 ts_ctx_ptr->getPostProcessIJacobian().push_back(post_proc_lhs_ptr);
1443 CHKERR TSSetIJacobian(solver,
B,
B, PETSC_NULLPTR, PETSC_NULLPTR);
1445 CHKERR TSSetI2Jacobian(solver,
B,
B, PETSC_NULLPTR, PETSC_NULLPTR);
1448 CHKERR TSSetSolution(solver,
D);
1450 CHKERR TS2SetSolution(solver,
D, DD);
1452 CHKERR set_section_monitor(solver);
1453 CHKERR set_time_monitor(dm, solver);
1454 CHKERR TSSetFromOptions(solver);
1456 CHKERR add_active_dofs_elem(dm);
1457 boost::shared_ptr<SetUpSchur> schur_ptr;
1458 CHKERR set_schur_pc(solver, schur_ptr);
1459 CHKERR set_essential_bc(dm, solver);
1464 BOOST_LOG_SCOPED_THREAD_ATTR(
"Timeline", attrs::timer());
1465 MOFEM_LOG(
"TIMER", Sev::verbose) <<
"TSSetUp";
1467 MOFEM_LOG(
"TIMER", Sev::verbose) <<
"TSSetUp <= done";
1468 MOFEM_LOG(
"TIMER", Sev::verbose) <<
"TSSolve";
1469 CHKERR TSSolve(solver, NULL);
1470 MOFEM_LOG(
"TIMER", Sev::verbose) <<
"TSSolve <= done";
1487 constexpr double eps = 1e-9;
1489 auto x = opt->setRandomFields(
simple->getDM(), {
1491 {
"U", {-1e-6, 1e-6}},
1493 {
"EP", {-1e-6, 1e-6}},
1499 auto dot_x_plastic_active =
1500 opt->setRandomFields(
simple->getDM(), {
1509 auto diff_x_plastic_active =
1510 opt->setRandomFields(
simple->getDM(), {
1520 auto dot_x_elastic =
1521 opt->setRandomFields(
simple->getDM(), {
1530 auto diff_x_elastic =
1531 opt->setRandomFields(
simple->getDM(), {
1541 auto test_domain_ops = [&](
auto fe_name,
auto lhs_pipeline,
auto rhs_pipeline,
1542 auto dot_x,
auto diff_x) {
1545 auto diff_res = opt->checkCentralFiniteDifference(
1546 simple->getDM(), fe_name, rhs_pipeline, lhs_pipeline, x, dot_x,
1552 CHKERR VecNorm(diff_res, NORM_2, &fnorm);
1554 "Test consistency of tangent matrix %3.4e", fnorm);
1556 constexpr double err = 1e-5;
1559 "Norm of directional derivative too large err = %3.4e", fnorm);
1564 MOFEM_LOG(
"PLASTICITY", Sev::inform) <<
"Elastic active";
1565 CHKERR test_domain_ops(
simple->getDomainFEName(), pip->getDomainLhsFE(),
1566 pip->getDomainRhsFE(), dot_x_elastic, diff_x_elastic);
1568 MOFEM_LOG(
"PLASTICITY", Sev::inform) <<
"Plastic active";
1569 CHKERR test_domain_ops(
simple->getDomainFEName(), pip->getDomainLhsFE(),
1570 pip->getDomainRhsFE(), dot_x_plastic_active,
1571 diff_x_plastic_active);
1578static char help[] =
"...\n\n";
1580int main(
int argc,
char *argv[]) {
1583 #ifdef ENABLE_PYTHON_BINDING
1590 const char param_file[] =
"param_file.petsc";
1594 auto core_log = logging::core::get();
1596 LogManager::createSink(LogManager::getStrmWorld(),
"PLASTICITY"));
1598 LogManager::createSink(LogManager::getStrmWorld(),
"TIMER"));
1599 LogManager::setLog(
"PLASTICITY");
1604 LogManager::createSink(LogManager::getStrmWorld(),
"CONTACT"));
1605 LogManager::setLog(
"CONTACT");
1610 LogManager::createSink(LogManager::getStrmSync(),
"PlasticSync"));
1611 LogManager::setLog(
"PlasticSync");
1617 DMType dm_name =
"DMMOFEM";
1622 moab::Core mb_instance;
1623 moab::Interface &moab = mb_instance;
1647 #ifdef ENABLE_PYTHON_BINDING
1648 if (Py_FinalizeEx() < 0) {
1665 "Is expected that schur matrix is not "
1666 "allocated. This is "
1667 "possible only is if PC is set up twice");
1691 CHKERR TSGetSNES(solver, &snes);
1693 CHKERR SNESGetKSP(snes, &ksp);
1694 CHKERR KSPSetFromOptions(ksp);
1697 CHKERR KSPGetPC(ksp, &pc);
1698 PetscBool is_pcfs = PETSC_FALSE;
1699 PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &is_pcfs);
1703 "Is expected that schur matrix is not "
1704 "allocated. This is "
1705 "possible only is if PC is set up twice");
1713 CHKERR TSGetDM(solver, &solver_dm);
1714 CHKERR DMSetMatType(solver_dm, MATSHELL);
1721 auto swap_assemble = [](TS ts, PetscReal
t,
Vec u,
Vec u_t, PetscReal
a,
1722 Mat
A, Mat
B,
void *ctx) {
1725 CHKERR TSSetIJacobian(solver,
A, P, swap_assemble, ts_ctx_ptr.get());
1727 auto swap_assemble = [](TS ts, PetscReal
t,
Vec u,
Vec u_t,
Vec utt,
1728 PetscReal
a, PetscReal aa, Mat
A, Mat
B,
1732 CHKERR TSSetI2Jacobian(solver,
A, P, swap_assemble, ts_ctx_ptr.get());
1734 CHKERR KSPSetOperators(ksp,
A, P);
1736 auto set_ops = [&]() {
1742 pip_mng->getOpBoundaryLhsPipeline().push_front(
1746 {
"EP",
"TAU"}, {
nullptr,
nullptr},
aoSchur,
S,
false, false
1750 pip_mng->getOpDomainLhsPipeline().push_front(
1754 {
"EP",
"TAU"}, {
nullptr,
nullptr},
aoSchur,
S,
false, false
1759 double eps_stab = 1e-4;
1765 using OpMassStab = B::OpMass<3, SPACE_DIM * SPACE_DIM>;
1768 pip_mng->getOpBoundaryLhsPipeline().push_front(
1770 pip_mng->getOpBoundaryLhsPipeline().push_back(
1771 new OpMassStab(
"SIGMA",
"SIGMA", [eps_stab](
double,
double,
double) {
1776 {
"SIGMA",
"EP",
"TAU"}, {
nullptr,
nullptr,
nullptr},
aoSchur,
S,
1781 pip_mng->getOpDomainLhsPipeline().push_front(
1785 {
"SIGMA",
"EP",
"TAU"}, {
nullptr,
nullptr,
nullptr},
aoSchur,
S,
1793 auto set_assemble_elems = [&]() {
1795 auto schur_asmb_pre_proc = boost::make_shared<FEMethod>();
1796 schur_asmb_pre_proc->preProcessHook = [
this]() {
1799 MOFEM_LOG(
"TIMER", Sev::verbose) <<
"Lhs Assemble Begin";
1802 auto schur_asmb_post_proc = boost::make_shared<FEMethod>();
1804 schur_asmb_post_proc->postProcessHook = [
this, schur_asmb_post_proc]() {
1806 MOFEM_LOG(
"TIMER", Sev::verbose) <<
"Lhs Assemble End";
1809 CHKERR MatAssemblyBegin(
S, MAT_FINAL_ASSEMBLY);
1810 CHKERR MatAssemblyEnd(
S, MAT_FINAL_ASSEMBLY);
1817 ts_ctx_ptr->getPreProcessIJacobian().push_front(schur_asmb_pre_proc);
1818 ts_ctx_ptr->getPostProcessIJacobian().push_front(schur_asmb_post_proc);
1822 auto set_pc = [&]() {
1825 CHKERR PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_USER,
S);
1829 auto set_diagonal_pc = [&]() {
1832 CHKERR PCFieldSplitSchurGetSubKSP(pc, PETSC_NULLPTR, &subksp);
1833 auto get_pc = [](
auto ksp) {
1835 CHKERR KSPGetPC(ksp, &pc_raw);
1840 CHKERR PetscFree(subksp);
1846 CHKERR set_assemble_elems();
1850 CHKERR set_diagonal_pc();
1853 pip_mng->getOpBoundaryLhsPipeline().push_front(
1855 pip_mng->getOpBoundaryLhsPipeline().push_back(
1858 pip_mng->getOpDomainLhsPipeline().push_back(
1867boost::shared_ptr<SetUpSchur>
1871 return boost::shared_ptr<SetUpSchur>(
1878 boost::ptr_deque<ForcesAndSourcesCore::UserDataOperator> &pipeline,
1879 std::vector<FieldSpace> spaces, std::string geom_field_name) {
1886 boost::ptr_deque<ForcesAndSourcesCore::UserDataOperator> &pipeline,
1887 std::vector<FieldSpace> spaces, std::string geom_field_name) {
1893template <
int FE_DIM,
int PROBLEM_DIM,
int SPACE_DIM>
1895scaleL2(boost::ptr_deque<ForcesAndSourcesCore::UserDataOperator> &pipeline,
1896 std::string geom_field_name) {
1899 auto jac_ptr = boost::make_shared<MatrixDouble>();
1900 auto det_ptr = boost::make_shared<VectorDouble>();
1902 geom_field_name, jac_ptr));
1905 auto scale_ptr = boost::make_shared<double>(1.);
1909 auto op_scale =
new OP(
NOSPACE, OP::OPSPACE);
1910 op_scale->doWorkRhsHook = [scale_ptr, det_ptr,
1913 *scale_ptr =
scale / det_ptr->size();
1917 pipeline.push_back(op_scale);
1929 boost::ptr_deque<ForcesAndSourcesCore::UserDataOperator> &pipeline,
1930 std::vector<FieldSpace> spaces, std::string geom_field_name) {
1932 constexpr bool scale_l2 =
false;
1934 CHKERR scaleL2<3, 3, 3>(pipeline, geom_field_name);
1937 nullptr,
nullptr,
nullptr);
1942 boost::ptr_deque<ForcesAndSourcesCore::UserDataOperator> &pipeline,
1943 std::vector<FieldSpace> spaces, std::string geom_field_name) {
1945 constexpr bool scale_l2 =
false;
1947 CHKERR scaleL2<2, 2, 2>(pipeline, geom_field_name);
1950 nullptr,
nullptr,
nullptr);
static auto filter_true_skin(MoFEM::Interface &m_field, Range &&skin)
#define MOFEM_LOG_SYNCHRONISE(comm)
Synchronise "SYNC" channel.
#define MOFEM_LOG_C(channel, severity, format,...)
void simple(double P1[], double P2[], double P3[], double c[], const int N)
ElementsAndOps< SPACE_DIM >::DomainEle DomainEle
constexpr int SPACE_DIM
[Define dimension]
ElementsAndOps< SPACE_DIM >::BoundaryEle BoundaryEle
Kronecker Delta class symmetric.
#define CATCH_ERRORS
Catch errors.
@ AINSWORTH_LEGENDRE_BASE
Ainsworth Cole (Legendre) approx. base .
#define CHK_THROW_MESSAGE(err, msg)
Check and throw MoFEM exception.
#define MoFEMFunctionReturnHot(a)
Last executable line of each PETSc function used for error handling. Replaces return()
FieldSpace
approximation spaces
@ L2
field with C-1 continuity
@ HCURL
field with continuous tangents
@ HDIV
field with continuous normal traction
#define MYPCOMM_INDEX
default communicator number PCOMM
#define MoFEMFunctionBegin
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
#define CHK_MOAB_THROW(err, msg)
Check error code of MoAB function and throw MoFEM exception.
@ MOFEM_ATOM_TEST_INVALID
@ MOFEM_DATA_INCONSISTENCY
static const char *const ApproximationBaseNames[]
#define MoFEMFunctionReturn(a)
Last executable line of each PETSc function used for error handling. Replaces return()
#define CHKERR
Inline error check.
#define MoFEMFunctionBeginHot
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
constexpr int order
Order displacement.
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpBaseTimesVector< 1, SPACE_DIM, 1 > OpInertiaForce
PetscErrorCode DMMoFEMCreateSubDM(DM subdm, DM dm, const char problem_name[])
Must be called by user to set Sub DM MoFEM data structures.
PetscErrorCode DMMoFEMAddElement(DM dm, std::string fe_name)
add element to dm
PetscErrorCode DMMoFEMSetSquareProblem(DM dm, PetscBool square_problem)
set squared problem
PetscErrorCode DMMoFEMAddSubFieldRow(DM dm, const char field_name[])
PetscErrorCode DMRegister_MoFEM(const char sname[])
Register MoFEM problem.
PetscErrorCode DMoFEMLoopFiniteElements(DM dm, const char fe_name[], MoFEM::FEMethod *method, CacheTupleWeakPtr cache_ptr=CacheTupleSharedPtr())
Executes FEMethod for finite elements in DM.
auto createDMVector(DM dm)
Get smart vector from DM.
PetscErrorCode DMMoFEMAddSubFieldCol(DM dm, const char field_name[])
auto createDMMatrix(DM dm)
Get smart matrix from DM.
#define MOFEM_LOG(channel, severity)
Log.
SeverityLevel
Severity levels.
#define MOFEM_LOG_TAG(channel, tag)
Tag channel.
#define MOFEM_LOG_CHANNEL(channel)
Set and reset channel.
virtual MoFEMErrorCode loop_dofs(const Problem *problem_ptr, const std::string &field_name, RowColData rc, DofMethod &method, int lower_rank, int upper_rank, int verb=DEFAULT_VERBOSITY)=0
Make a loop over dofs.
FTensor::Index< 'i', SPACE_DIM > i
const double v
phase velocity of light in medium (cm/ns)
FTensor::Index< 'l', 3 > l
FTensor::Index< 'j', 3 > j
FTensor::Index< 'k', 3 > k
const FTensor::Tensor2< T, Dim, Dim > Vec
PetscErrorCode MoFEMErrorCode
MoFEM/PETSc error code.
implementation of Data Operators for Forces and Sources
auto type_from_handle(const EntityHandle h)
get type from entity handle
PetscErrorCode DMMoFEMTSSetMonitor(DM dm, TS ts, const std::string fe_name, boost::shared_ptr< MoFEM::FEMethod > method, boost::shared_ptr< MoFEM::BasicMethod > pre_only, boost::shared_ptr< MoFEM::BasicMethod > post_only)
Set Monitor To TS solver.
PetscErrorCode TsSetIJacobian(TS ts, PetscReal t, Vec u, Vec u_t, PetscReal a, Mat A, Mat B, void *ctx)
Set function evaluating jacobian in TS solver.
auto getDMTsCtx(DM dm)
Get TS context data structure used by DM.
OpSchurAssembleBase * createOpSchurAssembleEnd(std::vector< std::string > fields_name, std::vector< boost::shared_ptr< Range > > field_ents, SmartPetscObj< AO > ao, SmartPetscObj< Mat > schur, bool sym_schur, bool symm_op)
Construct a new Op Schur Assemble End object.
PetscErrorCode PetscOptionsGetInt(PetscOptions *, const char pre[], const char name[], PetscInt *ivalue, PetscBool *set)
MoFEMErrorCode MoFEMSNESMonitorFields(SNES snes, PetscInt its, PetscReal fgnorm, SnesCtx *ctx)
Sens monitor printing residual field by field.
MoFEMErrorCode setSchurA00MatSolvePC(SmartPetscObj< PC > pc)
Set PC for A00 block.
PetscErrorCode PetscOptionsGetBool(PetscOptions *, const char pre[], const char name[], PetscBool *bval, PetscBool *set)
PetscErrorCode PetscOptionsGetScalar(PetscOptions *, const char pre[], const char name[], PetscScalar *dval, PetscBool *set)
SmartPetscObj< Vec > vectorDuplicate(Vec vec)
Create duplicate vector of smart vector.
PetscErrorCode PetscOptionsGetRealArray(PetscOptions *, const char pre[], const char name[], PetscReal dval[], PetscInt *nmax, PetscBool *set)
auto getDMSubData(DM dm)
Get sub problem data structure.
PetscErrorCode TsSetI2Jacobian(TS ts, PetscReal t, Vec u, Vec u_t, Vec u_tt, PetscReal a, PetscReal aa, Mat A, Mat B, void *ctx)
Calculation Jacobian for second order PDE in time.
boost::shared_ptr< BlockStructure > createBlockMatStructure(DM dm, SchurFEOpsFEandFields schur_fe_op_vec)
Create a Mat Diag Blocks object.
boost::shared_ptr< NestSchurData > createSchurNestedMatrixStruture(std::pair< SmartPetscObj< DM >, SmartPetscObj< DM > > dms, boost::shared_ptr< BlockStructure > block_mat_data_ptr, std::vector< std::string > fields_names, std::vector< boost::shared_ptr< Range > > field_ents, bool add_preconditioner_block)
Get the Schur Nest Mat Array object.
PetscErrorCode PetscOptionsGetString(PetscOptions *, const char pre[], const char name[], char str[], size_t size, PetscBool *set)
MoFEMErrorCode DMMoFEMSetNestSchurData(DM dm, boost::shared_ptr< NestSchurData >)
auto getDMSnesCtx(DM dm)
Get SNES context data structure used by DM.
auto createDMNestSchurMat(DM dm)
auto createDM(MPI_Comm comm, const std::string dm_type_name)
Creates smart DM object.
OpSchurAssembleBase * createOpSchurAssembleBegin()
auto createDMBlockMat(DM dm)
MoFEMErrorCode scaleL2(boost::ptr_deque< ForcesAndSourcesCore::UserDataOperator > &pipeline, std::string geom_field_name)
OpPostProcMapInMoab< SPACE_DIM, SPACE_DIM > OpPPMap
constexpr double t
plate stiffness
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::BiLinearForm< GAUSS >::OpMass< 1, SPACE_DIM > OpMass
[Only used with Hooke equation (linear material model)]
FTensor::Index< 'm', 3 > m
double getScale(const double time)
Get scaling at given time.
static std::array< double, 2 > meshVolumeAndCount
boost::shared_ptr< DomainEle > reactionFe
MoFEMErrorCode testOperators()
[Solve]
FieldApproximationBase base
Choice of finite element basis functions
std::tuple< SmartPetscObj< Vec >, SmartPetscObj< VecScatter > > uYScatter
MoFEMErrorCode createCommonData()
[Set up problem]
MoFEMErrorCode OPs()
[Boundary condition]
MoFEMErrorCode runProblem()
[Run problem]
MoFEM::Interface & mField
Reference to MoFEM interface.
std::tuple< SmartPetscObj< Vec >, SmartPetscObj< VecScatter > > uZScatter
MoFEMErrorCode setupProblem()
[Run problem]
MoFEMErrorCode bC()
[Create common data]
std::tuple< SmartPetscObj< Vec >, SmartPetscObj< VecScatter > > uXScatter
Add operators pushing bases from local to physical configuration.
Boundary condition manager for finite element problem setup.
virtual moab::Interface & get_moab()=0
virtual MPI_Comm & get_comm() const =0
static MoFEMErrorCode Initialize(int *argc, char ***args, const char file[], const char help[])
Initializes the MoFEM database PETSc, MOAB and MPI.
static MoFEMErrorCode Finalize()
Checks for options to be called at the conclusion of the program.
base operator to do operations at Gauss Pt. level
Deprecated interface functions.
Definition of the displacement bc data structure.
Data on single entity (This is passed as argument to DataOperator::doWork)
Class (Function) to enforce essential constrains on the left hand side diagonal.
Class (Function) to enforce essential constrains on the right hand side diagonal.
Class (Function) to calculate residual side diagonal.
Class (Function) to enforce essential constrains.
Field evaluator interface.
double getMeasure() const
get measure of element
@ OPSPACE
operator do Work is execute on space data
Section manager is used to create indexes and sections.
Interface for managing meshsets containing materials and boundary conditions.
Get field gradients at integration pts for scalar field rank 0, i.e. vector field.
Approximate field values for given petsc vector.
Specialization for double precision vector field values calculation.
Operator for inverting matrices at integration points.
Element used to execute operators on side of the element.
Post post-proc data at points from hash maps.
Scale base functions by inverses of measure of element.
Calculate directional derivative of the right hand side and compare it with tangent matrix derivative...
Template struct for dimension-specific finite element types.
PipelineManager interface.
boost::shared_ptr< FEMethod > & getDomainLhsFE()
Get domain left-hand side finite element.
Projection of edge entities with one mid-node on hierarchical basis.
Simple interface for fast problem set-up.
MoFEMErrorCode getOptions()
get options
intrusive_ptr for managing petsc objects
Force scale operator for reading two columns.
double getScale(const double time)
Get scaling at a given time.
TimeScale(std::string file_name="", bool error_if_file_not_given=false, ScalingFun def_scaling_fun=[](double time) { return time;})
TimeScale constructor.
MoFEMErrorCode getInterface(IFACE *&iface) const
Get interface reference to pointer of interface.
Volume finite element base.
[Push operators to pipeline]
static std::array< int, 5 > activityData
SmartPetscObj< DM > subDM
field split sub dm
virtual ~SetUpSchurImpl()=default
SmartPetscObj< AO > aoSchur
SmartPetscObj< IS > fieldSplitIS
IS for split Schur block.
MoFEMErrorCode setUp(SmartPetscObj< KSP >)
MoFEMErrorCode postProc()
MoFEM::Interface & mField
[Push operators to pipeline]
virtual MoFEMErrorCode setUp(SmartPetscObj< KSP >)=0
static boost::shared_ptr< SetUpSchur > createSetUpSchur(MoFEM::Interface &m_field)
constexpr AssemblyType AT
constexpr IntegrationType IT
VolEle::UserDataOperator VolOp
NaturalBC< BoundaryEleOp >::Assembly< AT >::LinearForm< IT > BoundaryRhsBCs
double young_modulus
Young modulus.
double C1_k
Kinematic hardening.
BoundaryLhsBCs::OpFlux< PlasticOps::BoundaryBCs, 1, SPACE_DIM > OpBoundaryLhsBCs
BoundaryRhsBCs::OpFlux< PlasticOps::BoundaryBCs, 1, SPACE_DIM > OpBoundaryRhsBCs
double Qinf
Saturation yield stress.
FieldEvaluatorInterface::SetPtsData SetPtsData
ElementsAndOps< SPACE_DIM >::SideEle SideEle
#define EXECUTABLE_DIMENSION
PetscBool do_eval_field
Evaluate field.
PetscBool is_quasi_static
[Operators used for contact]
double visH
Viscous hardening.
double poisson_ratio
Poisson ratio.
auto kinematic_hardening(FTensor::Tensor2_symmetric< T, DIM > &t_plastic_strain, double C1_k)
PetscBool set_timer
Set timer.
DomainRhsBCs::OpFlux< PlasticOps::DomainBCs, 1, SPACE_DIM > OpDomainRhsBCs
double iso_hardening_dtau(double tau, double H, double Qinf, double b_iso)
double zeta
Viscous hardening.
NaturalBC< BoundaryEleOp >::Assembly< AT >::BiLinearForm< IT > BoundaryLhsBCs
int tau_order
Order of tau files.
double iso_hardening_exp(double tau, double b_iso)
double b_iso
Saturation exponent.
PetscBool is_large_strains
Large strains.
int geom_order
Order if fixed.
double sigmaY
Yield stress.
double iso_hardening(double tau, double H, double Qinf, double b_iso, double sigmaY)
auto kinematic_hardening_dplastic_strain(double C1_k)
NaturalBC< DomainEleOp >::Assembly< AT >::LinearForm< IT > DomainRhsBCs
int ep_order
Order of ep files.
PostProcBrokenMeshInMoab< BoundaryEle > SkinPostProcEle
constexpr FieldSpace CONTACT_SPACE
[Specialisation for assembly]
constexpr int SPACE_DIM
[Define dimension]
constexpr AssemblyType A
[Define dimension]