v0.15.0
Loading...
Searching...
No Matches
wave_equation.cpp
Go to the documentation of this file.
1/**
2 * \file wave_equation.cpp
3 * \example mofem/tutorials/scl-7/wave_equation.cpp
4 *
5 * \brief Solve the time-dependent Wave Equation
6 \f[
7 \begin{aligned}
8\frac{\partial u(\mathbf{x}, t)}{\partial t}-\Delta u(\mathbf{x}, t)
9&=f(\mathbf{x}, t) & & \forall \mathbf{x} \in \Omega, t \in(0, T), \\
10u(\mathbf{x}, 0) &=u_{0}(\mathbf{x}) & & \forall \mathbf{x} \in \Omega, \\
11u(\mathbf{x}, t) &=g(\mathbf{x}, t) & & \forall \mathbf{x} \in \partial \Omega,
12t \in(0, T). \end{aligned}
13 \f]
14 **/
15
16
17
18#include <stdlib.h>
19#include <cmath>
20#include <MoFEM.hpp>
21
22using namespace MoFEM;
23
24static char help[] = "...\n\n";
25
26template <int DIM> struct ElementsAndOps {};
27
28//! [Define dimension]
29constexpr int SPACE_DIM = 2; //< Space dimension of problem, mesh
30//! [Define dimension]
31
34using DomainEleOp = DomainEle::UserDataOperator;
36using BoundaryEleOp = BoundaryEle::UserDataOperator;
38
47
54
55constexpr double wave_speed2 = 1;
56
57/**
58 * @brief Monitor solution
59 *
60 * This functions is called by TS solver at the end of each step. It is used
61 * to output results to the hard drive.
62 */
63struct Monitor : public FEMethod {
64
65 Monitor(SmartPetscObj<DM> dm, boost::shared_ptr<PostProcEle> post_proc)
66 : dM(dm), postProc(post_proc){};
67
68 MoFEMErrorCode preProcess() { return 0; }
69 MoFEMErrorCode operator()() { return 0; }
70
71 static constexpr int saveEveryNthStep = 1;
72
75 if (ts_step % saveEveryNthStep == 0) {
77 CHKERR postProc->writeFile(
78 "out_level_" + boost::lexical_cast<std::string>(ts_step) + ".h5m");
79 }
81 }
82
83private:
85 boost::shared_ptr<PostProcEle> postProc;
86};
87
89public:
91
92 // Declaration of the main function to run analysis
94
95private:
96 // Declaration of other main functions called in runProgram()
105
106 // Main interfaces
108
109 // Object to mark boundary entities for the assembling of domain elements
110 boost::shared_ptr<std::vector<unsigned char>> boundaryMarker;
111};
112
113WaveEquation::WaveEquation(MoFEM::Interface &m_field) : mField(m_field) {}
114
125
128
129 auto *simple = mField.getInterface<Simple>();
131 CHKERR simple->addBoundaryField("U", H1, AINSWORTH_LEGENDRE_BASE, 1);
132
133 int order = 3;
134 CHKERR PetscOptionsGetInt(PETSC_NULLPTR, "", "-order", &order, PETSC_NULLPTR);
135 CHKERR simple->setFieldOrder("U", order);
136
137 CHKERR simple->setUp();
138
140}
141
144
145 auto integration_rule = [](int o_row, int o_col, int approx_order) {
146 return 2 * approx_order;
147 };
148
149 auto *pipeline_mng = mField.getInterface<PipelineManager>();
151 CHKERR pipeline_mng->setDomainLhsIntegrationRule(integration_rule);
152 CHKERR pipeline_mng->setBoundaryLhsIntegrationRule(integration_rule);
153 CHKERR pipeline_mng->setBoundaryRhsIntegrationRule(integration_rule);
154
156}
157
162
165
166 auto bc_mng = mField.getInterface<BcManager>();
167 auto *simple = mField.getInterface<Simple>();
168 CHKERR bc_mng->pushMarkDOFsOnEntities(simple->getProblemName(), "ESSENTIAL",
169 "U", 0, 0);
170
171 auto &bc_map = bc_mng->getBcMapByBlockName();
172 boundaryMarker = boost::make_shared<std::vector<char unsigned>>();
173 for (auto b : bc_map) {
174 if (std::regex_match(b.first, std::regex("(.*)ESSENTIAL(.*)"))) {
175 boundaryMarker->resize(b.second->bcMarkers.size(), 0);
176 for (int i = 0; i != b.second->bcMarkers.size(); ++i) {
177 (*boundaryMarker)[i] |= b.second->bcMarkers[i];
178 }
179 }
180 }
181
183}
184
187
188 auto add_domain_base_ops = [&](auto &pipeline) {
189 auto det_ptr = boost::make_shared<VectorDouble>();
190 auto jac_ptr = boost::make_shared<MatrixDouble>();
191 auto inv_jac_ptr = boost::make_shared<MatrixDouble>();
192 pipeline.push_back(new OpCalculateHOJac<2>(jac_ptr));
193 pipeline.push_back(new OpInvertMatrix<2>(jac_ptr, det_ptr, inv_jac_ptr));
194 pipeline.push_back(new OpSetHOInvJacToScalarBases<2>(H1, inv_jac_ptr));
195 pipeline.push_back(new OpSetHOWeightsOnFace());
196 };
197
198 auto add_domain_lhs_ops = [&](auto &pipeline) {
199 pipeline.push_back(new OpSetBc("U", true, boundaryMarker));
200 pipeline.push_back(new OpDomainGradGrad(
201 "U", "U", [](double, double, double) -> double { return 1; }));
202 auto get_c = [this](const double, const double, const double) {
203 auto pipeline_mng = mField.getInterface<PipelineManager>();
204 auto &fe_domain_lhs = pipeline_mng->getDomainLhsFE();
205 return wave_speed2 * fe_domain_lhs->ts_aa;
206 };
207 pipeline.push_back(new OpDomainMass("U", "U", get_c));
208 pipeline.push_back(new OpUnSetBc("U"));
209 };
210
211 auto add_domain_rhs_ops = [&](auto &pipeline) {
212 pipeline.push_back(new OpSetBc("U", true, boundaryMarker));
213 auto grad_u_at_gauss_pts = boost::make_shared<MatrixDouble>();
214 auto dot2_u_at_gauss_pts = boost::make_shared<VectorDouble>();
215 pipeline.push_back(new OpCalculateScalarFieldGradient<SPACE_DIM>(
216 "U", grad_u_at_gauss_pts));
217 pipeline.push_back(
218 new OpCalculateScalarFieldValuesDotDot("U", dot2_u_at_gauss_pts));
219 pipeline.push_back(new OpDomainGradTimesVec(
220 "U", grad_u_at_gauss_pts,
221 [](double, double, double) -> double { return 1; }));
222 pipeline.push_back(new OpDomainTimesScalarField(
223 "U", dot2_u_at_gauss_pts,
224 [](const double, const double, const double) { return wave_speed2; }));
225 pipeline.push_back(new OpUnSetBc("U"));
226 };
227
228 auto add_boundary_base_ops = [&](auto &pipeline) {};
229
230 auto add_lhs_base_ops = [&](auto &pipeline) {
231 pipeline.push_back(new OpSetBc("U", false, boundaryMarker));
232 pipeline.push_back(new OpBoundaryMass(
233 "U", "U", [](const double, const double, const double) { return 1; }));
234 pipeline.push_back(new OpUnSetBc("U"));
235 };
236
237 auto add_rhs_base_ops = [&](auto &pipeline) {
238 pipeline.push_back(new OpSetBc("U", false, boundaryMarker));
239 auto u_at_gauss_pts = boost::make_shared<VectorDouble>();
240 auto boundary_function = [&](const double x, const double y,
241 const double z) {
242 auto pipeline_mng = mField.getInterface<PipelineManager>();
243 auto &fe_rhs = pipeline_mng->getBoundaryRhsFE();
244 const double t = fe_rhs->ts_t;
245 if ((t <= 0.5) && (x < 0.) && (y > -1. / 3) && (y < 1. / 3))
246 return sin(4 * M_PI * t);
247 else
248 return 0.;
249 };
250 pipeline.push_back(new OpCalculateScalarFieldValues("U", u_at_gauss_pts));
251 pipeline.push_back(new OpBoundaryTimeScalarField(
252 "U", u_at_gauss_pts,
253 [](const double, const double, const double) { return 1; }));
254 pipeline.push_back(new OpBoundarySource("U", boundary_function));
255 pipeline.push_back(new OpUnSetBc("U"));
256 };
257
258 auto pipeline_mng = mField.getInterface<PipelineManager>();
259 add_domain_base_ops(pipeline_mng->getOpDomainLhsPipeline());
260 add_domain_base_ops(pipeline_mng->getOpDomainRhsPipeline());
261 add_domain_lhs_ops(pipeline_mng->getOpDomainLhsPipeline());
262 add_domain_rhs_ops(pipeline_mng->getOpDomainRhsPipeline());
263
264 add_boundary_base_ops(pipeline_mng->getOpBoundaryLhsPipeline());
265 add_boundary_base_ops(pipeline_mng->getOpBoundaryRhsPipeline());
266 add_lhs_base_ops(pipeline_mng->getOpBoundaryLhsPipeline());
267 add_rhs_base_ops(pipeline_mng->getOpBoundaryRhsPipeline());
268
270}
271
274
275 auto *simple = mField.getInterface<Simple>();
276 auto *pipeline_mng = mField.getInterface<PipelineManager>();
277
278 auto create_post_process_element = [&]() {
279 auto post_proc_fe = boost::make_shared<PostProcEle>(mField);
280
281 auto det_ptr = boost::make_shared<VectorDouble>();
282 auto jac_ptr = boost::make_shared<MatrixDouble>();
283 auto inv_jac_ptr = boost::make_shared<MatrixDouble>();
284 post_proc_fe->getOpPtrVector().push_back(new OpCalculateHOJac<2>(jac_ptr));
285 post_proc_fe->getOpPtrVector().push_back(
286 new OpInvertMatrix<2>(jac_ptr, det_ptr, inv_jac_ptr));
287 post_proc_fe->getOpPtrVector().push_back(
288 new OpSetHOInvJacToScalarBases<2>(H1, inv_jac_ptr));
289
290 auto u_ptr = boost::make_shared<VectorDouble>();
291 post_proc_fe->getOpPtrVector().push_back(
292 new OpCalculateScalarFieldValues("U", u_ptr));
293
295
296 post_proc_fe->getOpPtrVector().push_back(
297
298 new OpPPMap(post_proc_fe->getPostProcMesh(),
299 post_proc_fe->getMapGaussPts(),
300
301 {{"U", u_ptr}},
302
303 {}, {}, {}
304
305 )
306
307 );
308
309 return post_proc_fe;
310 };
311
312 auto set_time_monitor = [&](auto dm, auto solver) {
314 boost::shared_ptr<Monitor> monitor_ptr(
315 new Monitor(dm, create_post_process_element()));
316 boost::shared_ptr<ForcesAndSourcesCore> null;
317 CHKERR DMMoFEMTSSetMonitor(dm, solver, simple->getDomainFEName(),
318 monitor_ptr, null, null);
320 };
321
322 auto set_fieldsplit_preconditioner = [&](auto solver) {
324 SNES snes;
325 CHKERR TSGetSNES(solver, &snes);
326 KSP ksp;
327 CHKERR SNESGetKSP(snes, &ksp);
328 PC pc;
329 CHKERR KSPGetPC(ksp, &pc);
330 PetscBool is_pcfs = PETSC_FALSE;
331 PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &is_pcfs);
332
333 if (is_pcfs == PETSC_TRUE) {
334 auto bc_mng = mField.getInterface<BcManager>();
335 auto name_prb = simple->getProblemName();
336 auto is_all_bc = bc_mng->getBlockIS(name_prb, "ESSENTIAL", "U", 0, 0);
337 int is_all_bc_size;
338 CHKERR ISGetSize(is_all_bc, &is_all_bc_size);
339 MOFEM_LOG("EXAMPLE", Sev::inform)
340 << "Field split block size " << is_all_bc_size;
341 CHKERR PCFieldSplitSetIS(pc, PETSC_NULLPTR,
342 is_all_bc); // boundary block
343 }
345 };
346
347 auto dm = simple->getDM();
348 auto D = createDMVector(dm);
349 CHKERR DMoFEMMeshToLocalVector(dm, D, INSERT_VALUES, SCATTER_FORWARD);
350
351 auto ts = pipeline_mng->createTSIM2();
352
353 CHKERR TSSetSolution(ts, D);
354 auto DD = vectorDuplicate(D);
355 CHKERR TS2SetSolution(ts, D, DD);
356 CHKERR set_time_monitor(dm, ts);
357 CHKERR TSSetFromOptions(ts);
358 CHKERR set_fieldsplit_preconditioner(ts);
359 CHKERR TSSetUp(ts);
360 CHKERR TSSolve(ts, NULL);
361
362 CHKERR VecGhostUpdateBegin(D, INSERT_VALUES, SCATTER_FORWARD);
363 CHKERR VecGhostUpdateEnd(D, INSERT_VALUES, SCATTER_FORWARD);
364 CHKERR DMoFEMMeshToLocalVector(dm, D, INSERT_VALUES, SCATTER_REVERSE);
365
367}
368
371
372 // Processes to set output results are integrated in solveSystem()
373
375}
376
391
392int main(int argc, char *argv[]) {
393
394 // Initialisation of MoFEM/PETSc and MOAB data structures
395 const char param_file[] = "param_file.petsc";
396 MoFEM::Core::Initialize(&argc, &argv, param_file, help);
397
398 // Add logging channel for example
399 auto core_log = logging::core::get();
400 core_log->add_sink(
402 LogManager::setLog("EXAMPLE");
403 MOFEM_LOG_TAG("EXAMPLE", "example")
404
405 // Error handling
406 try {
407 // Register MoFEM discrete manager in PETSc
408 DMType dm_name = "DMMOFEM";
409 CHKERR DMRegister_MoFEM(dm_name);
410
411 // Create MOAB instance
412 moab::Core mb_instance; // mesh database
413 moab::Interface &moab = mb_instance; // mesh database interface
414
415 // Create MoFEM instance
416 MoFEM::Core core(moab); // finite element database
417 MoFEM::Interface &m_field = core; // finite element interface
418
419 // Run the main analysis
420 WaveEquation heat_problem(m_field);
421 CHKERR heat_problem.runProgram();
422 }
424
425 // Finish work: cleaning memory, getting statistics, etc.
427
428 return 0;
429}
void simple(double P1[], double P2[], double P3[], double c[], const int N)
Definition acoustic.cpp:69
int main()
ElementsAndOps< SPACE_DIM >::DomainEle DomainEle
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::BiLinearForm< GAUSS >::OpMass< 1, FIELD_DIM > OpDomainMass
ElementsAndOps< SPACE_DIM >::BoundaryEle BoundaryEle
#define CATCH_ERRORS
Catch errors.
@ AINSWORTH_LEGENDRE_BASE
Ainsworth Cole (Legendre) approx. base .
Definition definitions.h:60
#define MoFEMFunctionReturnHot(a)
Last executable line of each PETSc function used for error handling. Replaces return()
@ H1
continuous field
Definition definitions.h:85
#define MoFEMFunctionBegin
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
#define MoFEMFunctionReturn(a)
Last executable line of each PETSc function used for error handling. Replaces return()
#define CHKERR
Inline error check.
#define MoFEMFunctionBeginHot
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
constexpr int order
auto integration_rule
PetscErrorCode DMoFEMMeshToLocalVector(DM dm, Vec l, InsertMode mode, ScatterMode scatter_mode)
set local (or ghosted) vector values on mesh for partition only
Definition DMMoFEM.cpp:514
PetscErrorCode DMRegister_MoFEM(const char sname[])
Register MoFEM problem.
Definition DMMoFEM.cpp:43
PetscErrorCode DMoFEMLoopFiniteElements(DM dm, const char fe_name[], MoFEM::FEMethod *method, CacheTupleWeakPtr cache_ptr=CacheTupleSharedPtr())
Executes FEMethod for finite elements in DM.
Definition DMMoFEM.cpp:576
auto createDMVector(DM dm)
Get smart vector from DM.
Definition DMMoFEM.hpp:1234
@ PETSC
Standard PETSc assembly.
static LoggerType & setLog(const std::string channel)
Set ans resset chanel logger.
#define MOFEM_LOG(channel, severity)
Log.
#define MOFEM_LOG_TAG(channel, tag)
Tag channel.
SmartPetscObj< IS > getBlockIS(const std::string block_prefix, const std::string block_name, const std::string field_name, const std::string problem_name, int lo, int hi, SmartPetscObj< IS > is_expand=SmartPetscObj< IS >())
Create PETSc Index Set for boundary condition block.
FTensor::Index< 'i', SPACE_DIM > i
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::BiLinearForm< GAUSS >::OpGradGrad< 1, 1, SPACE_DIM > OpDomainGradGrad
Definition helmholtz.cpp:25
FormsIntegrators< EdgeEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpSource< 1, 1 > OpBoundarySource
Definition helmholtz.cpp:31
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpBaseTimesScalar< 1 > OpDomainTimesScalarField
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpGradTimesTensor< 1, 1, SPACE_DIM > OpDomainGradTimesVec
double D
PetscErrorCode MoFEMErrorCode
MoFEM/PETSc error code.
implementation of Data Operators for Forces and Sources
Definition Common.hpp:10
PetscErrorCode DMMoFEMTSSetMonitor(DM dm, TS ts, const std::string fe_name, boost::shared_ptr< MoFEM::FEMethod > method, boost::shared_ptr< MoFEM::BasicMethod > pre_only, boost::shared_ptr< MoFEM::BasicMethod > post_only)
Set Monitor To TS solver.
Definition DMMoFEM.cpp:1046
OpCalculateScalarFieldValuesFromPetscVecImpl< PetscData::CTX_SET_X_TT > OpCalculateScalarFieldValuesDotDot
PetscErrorCode PetscOptionsGetInt(PetscOptions *, const char pre[], const char name[], PetscInt *ivalue, PetscBool *set)
SmartPetscObj< Vec > vectorDuplicate(Vec vec)
Create duplicate vector of smart vector.
FormsIntegrators< BoundaryEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpBaseTimesScalar< 1 > OpBoundaryTimeScalarField
OpPostProcMapInMoab< SPACE_DIM, SPACE_DIM > OpPPMap
constexpr double t
plate stiffness
Definition plate.cpp:58
static constexpr int approx_order
FormsIntegrators< BoundaryEleOp >::Assembly< PETSC >::BiLinearForm< GAUSS >::OpMass< 1, SPACE_DIM > OpBoundaryMass
[Only used with Hencky/nonlinear material]
Definition seepage.cpp:70
Boundary condition manager for finite element problem setup.
Core (interface) class.
Definition Core.hpp:82
static MoFEMErrorCode Initialize(int *argc, char ***args, const char file[], const char help[])
Initializes the MoFEM database PETSc, MOAB and MPI.
Definition Core.cpp:72
static MoFEMErrorCode Finalize()
Checks for options to be called at the conclusion of the program.
Definition Core.cpp:118
Deprecated interface functions.
Data on single entity (This is passed as argument to DataOperator::doWork)
Structure for user loop methods on finite elements.
static boost::shared_ptr< SinkType > createSink(boost::shared_ptr< std::ostream > stream_ptr, std::string comm_filter)
Create a sink object.
static boost::shared_ptr< std::ostream > getStrmWorld()
Get the strm world object.
Get field gradients at integration pts for scalar field rank 0, i.e. vector field.
Specialization for double precision scalar field values calculation.
Operator for inverting matrices at integration points.
Post post-proc data at points from hash maps.
Set indices on entities on finite element.
Set inverse jacobian to base functions.
Modify integration weights on face to take into account higher-order geometry.
Unset indices on entities on finite element.
PipelineManager interface.
boost::shared_ptr< FEMethod > & getDomainLhsFE()
Get domain left-hand side finite element.
MoFEM::FaceElementForcesAndSourcesCore FaceEle
MoFEMErrorCode setDomainRhsIntegrationRule(RuleHookFun rule)
Set integration rule for domain right-hand side finite element.
MoFEM::EdgeElementForcesAndSourcesCore EdgeEle
boost::shared_ptr< FEMethod > & getBoundaryRhsFE()
Get boundary right-hand side finite element.
Simple interface for fast problem set-up.
Definition Simple.hpp:27
MoFEMErrorCode addDomainField(const std::string name, const FieldSpace space, const FieldApproximationBase base, const FieldCoefficientsNumber nb_of_coefficients, const TagType tag_type=MB_TAG_SPARSE, const enum MoFEMTypes bh=MF_ZERO, int verb=-1)
Add field on domain.
Definition Simple.cpp:261
intrusive_ptr for managing petsc objects
PetscInt ts_step
Current time step number.
MoFEMErrorCode getInterface(IFACE *&iface) const
Get interface reference to pointer of interface.
[Push operators to pipeline]
Monitor(SmartPetscObj< DM > dm, boost::shared_ptr< PostProcEle > post_proc)
static constexpr int saveEveryNthStep
SmartPetscObj< DM > dM
MoFEMErrorCode postProcess()
Post-processing function executed at loop completion.
boost::shared_ptr< PostProcEle > postProc
MoFEMErrorCode operator()()
Main operator function executed for each loop iteration.
MoFEMErrorCode preProcess()
Pre-processing function executed at loop initialization.
WaveEquation(MoFEM::Interface &m_field)
MoFEM::Interface & mField
MoFEMErrorCode readMesh()
MoFEMErrorCode setupProblem()
MoFEMErrorCode boundaryCondition()
MoFEMErrorCode initialCondition()
boost::shared_ptr< std::vector< unsigned char > > boundaryMarker
MoFEMErrorCode assembleSystem()
MoFEMErrorCode solveSystem()
MoFEMErrorCode outputResults()
MoFEMErrorCode runProgram()
MoFEMErrorCode setIntegrationRules()
FormsIntegrators< BoundaryEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpBaseTimesScalar< 1 > OpBoundaryTimeScalarField
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::BiLinearForm< GAUSS >::OpMass< 1, 1 > OpDomainMass
static char help[]
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpBaseTimesScalar< 1 > OpDomainTimesScalarField
constexpr double wave_speed2
constexpr int SPACE_DIM
[Define dimension]
FormsIntegrators< BoundaryEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpSource< 1, 1 > OpBoundarySource
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpGradTimesTensor< 1, 1, SPACE_DIM > OpDomainGradTimesVec
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::BiLinearForm< GAUSS >::OpGradGrad< 1, 1, SPACE_DIM > OpDomainGradGrad
FormsIntegrators< BoundaryEleOp >::Assembly< PETSC >::BiLinearForm< GAUSS >::OpMass< 1, 1 > OpBoundaryMass