v0.14.0
MatrixManager.cpp
Go to the documentation of this file.
1 /**
2  * \brief Create adjacent matrices using different indices
3  *
4  */
5 
6 #define MatrixManagerFunctionBegin \
7  MoFEMFunctionBegin; \
8  MOFEM_LOG_CHANNEL("WORLD"); \
9  MOFEM_LOG_CHANNEL("SYNC"); \
10  MOFEM_LOG_FUNCTION(); \
11  MOFEM_LOG_TAG("SYNC", "MatrixManager"); \
12  MOFEM_LOG_TAG("WORLD", "MatrixManager")
13 
14 namespace MoFEM {
15 
16 /** \brief Create compressed matrix
17 
18  \note Only function class members are allowed in this class. NO VARIABLES.
19 
20  \todo It is obsolete implementation, code should be moved to interface
21  MatrixManager.
22 
23  \todo While matrix is created is assumed that all entities on element are
24  adjacent to each other, in some cases, this is created denser matrices than it
25  should be. Some kind of filtering can be added.
26 
27  \todo Creation of the block matrices
28 
29  \todo Some efficiency improvemnt are possible
30 
31 
32  */
34 
36  MPI_Comm comm = PETSC_COMM_WORLD, int verbose = 1)
37  : Core(moab, comm, verbose) {}
38 
39  typedef FieldEntityEntFiniteElementAdjacencyMap_multiIndex::index<
44 
45  /** \brief Create matrix adjacencies
46 
47  Depending on TAG type, which some structure used to number dofs, matrix is
48  partitioned using part number stored in multi-index, or is partitioned on
49  parts based only on index number.
50 
51  See: Idx_mi_tag PetscGlobalIdx_mi_tag and PetscLocalIdx_mi_tag
52 
53  */
54  template <typename TAG>
56  createMatArrays(ProblemsByName::iterator p_miit, const MatType type,
57  std::vector<PetscInt> &i, std::vector<PetscInt> &j,
58  const bool no_diagonals = true, int verb = QUIET) const;
59 
60  /** \brief Get element adjacencies
61  */
62  template <typename TAG>
64  ProblemsByName::iterator p_miit,
65  typename boost::multi_index::index<NumeredDofEntity_multiIndex,
66  TAG>::type::iterator mit_row,
67  boost::shared_ptr<FieldEntity> mofem_ent_ptr,
68  std::vector<int> &dofs_col_view, int verb) const;
69 };
70 
71 template <typename TAG>
73  ProblemsByName::iterator p_miit,
74  typename boost::multi_index::index<NumeredDofEntity_multiIndex,
75  TAG>::type::iterator mit_row,
76  boost::shared_ptr<FieldEntity> mofem_ent_ptr,
77  std::vector<int> &dofs_col_view, int verb) const {
79 
80  BitRefLevel prb_bit = p_miit->getBitRefLevel();
81  BitRefLevel prb_mask = p_miit->getBitRefLevelMask();
82 
83  const EmptyFieldBlocks &empty_field_blocks = p_miit->getEmptyFieldBlocks();
84 
85  dofs_col_view.clear();
86  for (auto r = entFEAdjacencies.get<Unique_mi_tag>().equal_range(
87  mofem_ent_ptr->getLocalUniqueId());
88  r.first != r.second; ++r.first) {
89 
90  if (r.first->byWhat & BYROW) {
91 
92  if ((r.first->entFePtr->getId() & p_miit->getBitFEId()).none()) {
93  // if element is not part of problem
94  continue;
95  }
96 
97  const BitRefLevel &fe_bit = r.first->entFePtr->getBitRefLevel();
98  // if entity is not problem refinement level
99  if ((fe_bit & prb_bit).any() && (fe_bit & prb_mask) == fe_bit) {
100 
101  auto check_block = [&empty_field_blocks](auto &r_if, auto &col_id) {
102  for(auto &b : empty_field_blocks) {
103  if((b.first & r_if).any() && (b.second & col_id).any()) {
104  return false;
105  }
106  }
107  return true;
108  };
109 
110  for (auto &it : r.first->entFePtr->getColFieldEnts()) {
111  if (auto e = it.lock()) {
112 
113  if (check_block((*mit_row)->getId(), e->getId())) {
114 
115  if (auto cache = e->entityCacheColDofs.lock()) {
116  const auto lo = cache->loHi[0];
117  const auto hi = cache->loHi[1];
118  for (auto vit = lo; vit != hi; ++vit) {
119 
120  const int idx = TAG::get_index(vit);
121  if (PetscLikely(idx >= 0))
122  dofs_col_view.push_back(idx);
123 
124 #ifndef NDEBUG
125  const DofIdx nb_dofs_col = p_miit->getNbDofsCol();
126  if (PetscUnlikely(idx >= nb_dofs_col)) {
127  MOFEM_LOG("SELF", Sev::error)
128  << "Problem with dof: " << std::endl
129  << "Rank " << rAnk << " : " << *(*vit);
130  SETERRQ(
131  mofemComm, PETSC_ERR_ARG_SIZ,
132  "Index of dof larger than number of DOFs in column");
133  }
134 #endif
135  }
136 
137  } else {
138  SETERRQ(mofemComm, MOFEM_DATA_INCONSISTENCY, "Cache not set");
139  }
140  }
141  }
142  }
143  }
144  }
145  }
146 
148 }
149 
150 template <typename TAG>
152  ProblemsByName::iterator p_miit, const MatType type,
153  std::vector<PetscInt> &i, std::vector<PetscInt> &j, const bool no_diagonals,
154  int verb) const {
156 
157  PetscLogEventBegin(MOFEM_EVENT_createMat, 0, 0, 0, 0);
158 
159  auto cache = boost::make_shared<std::vector<EntityCacheNumeredDofs>>(
160  entsFields.size());
161 
162  size_t idx = 0;
163  for (auto it = entsFields.begin(); it != entsFields.end(); ++it, ++idx) {
164 
165  const auto uid = (*it)->getLocalUniqueId();
166  auto r = entFEAdjacencies.get<Unique_mi_tag>().equal_range(uid);
167  for (auto lo = r.first; lo != r.second; ++lo) {
168 
169  if ((lo->getBitFEId() & p_miit->getBitFEId()).any()) {
170 
171  const BitRefLevel &prb_bit = p_miit->getBitRefLevel();
172  const BitRefLevel &prb_mask = p_miit->getBitRefLevelMask();
173  const BitRefLevel &fe_bit = lo->entFePtr->getBitRefLevel();
174 
175  // if entity is not problem refinement level
176  if ((fe_bit & prb_mask) != fe_bit)
177  continue;
178  if ((fe_bit & prb_bit).none())
179  continue;
180 
181  auto dit = p_miit->numeredColDofsPtr->lower_bound(uid);
182 
183  decltype(dit) hi_dit;
184  if (dit != p_miit->numeredColDofsPtr->end())
185  hi_dit = p_miit->numeredColDofsPtr->upper_bound(
186  uid | static_cast<UId>(MAX_DOFS_ON_ENTITY - 1));
187  else
188  hi_dit = dit;
189 
190  (*it)->entityCacheColDofs =
191  boost::shared_ptr<EntityCacheNumeredDofs>(cache, &((*cache)[idx]));
192  (*cache)[idx].loHi = {dit, hi_dit};
193 
194  break;
195  }
196  }
197  }
198 
199  using NumeredDofEntitysByIdx =
200  typename boost::multi_index::index<NumeredDofEntity_multiIndex,
201  TAG>::type;
202 
203  // Get multi-indices for rows and columns
204  const NumeredDofEntitysByIdx &dofs_row_by_idx =
205  p_miit->numeredRowDofsPtr->get<TAG>();
206  int nb_dofs_row = p_miit->getNbDofsRow();
207  if (nb_dofs_row == 0) {
208  SETERRQ1(mofemComm, MOFEM_DATA_INCONSISTENCY, "problem <%s> has zero rows",
209  p_miit->getName().c_str());
210  }
211 
212  // Get adjacencies form other processors
213  std::map<int, std::vector<int>> adjacent_dofs_on_other_parts;
214 
215  // If not partitioned set petsc layout for matrix. If partitioned need to get
216  // adjacencies form other parts. Note if algebra is only partitioned no need
217  // to collect adjacencies form other entities. Those are already on mesh
218  // which is assumed that is on each processor the same.
219  typename boost::multi_index::index<NumeredDofEntity_multiIndex,
220  TAG>::type::iterator miit_row,
221  hi_miit_row;
222 
223  if (TAG::IamNotPartitioned) {
224 
225  // Get range of local indices
226  PetscLayout layout;
227  CHKERR PetscLayoutCreate(mofemComm, &layout);
228  CHKERR PetscLayoutSetBlockSize(layout, 1);
229  CHKERR PetscLayoutSetSize(layout, nb_dofs_row);
230  CHKERR PetscLayoutSetUp(layout);
231  PetscInt rstart, rend;
232  CHKERR PetscLayoutGetRange(layout, &rstart, &rend);
233  CHKERR PetscLayoutDestroy(&layout);
234 
235  if (verb >= VERBOSE) {
236  MOFEM_LOG("SYNC", Sev::noisy)
237  << "row lower " << rstart << " row upper " << rend;
239  }
240 
241  miit_row = dofs_row_by_idx.lower_bound(rstart);
242  hi_miit_row = dofs_row_by_idx.lower_bound(rend);
243  if (std::distance(miit_row, hi_miit_row) != rend - rstart) {
244  SETERRQ4(
245  mofemComm, PETSC_ERR_ARG_SIZ,
246  "data inconsistency, std::distance(miit_row,hi_miit_row) != rend - "
247  "rstart (%d != %d - %d = %d) ",
248  std::distance(miit_row, hi_miit_row), rend, rstart, rend - rstart);
249  }
250 
251  } else {
252 
253  MPI_Comm comm;
254  // // Make sure it is a PETSc mofemComm
255  CHKERR PetscCommDuplicate(get_comm(), &comm, NULL);
256 
257  // get adjacent nodes on other partitions
258  std::vector<std::vector<int>> dofs_vec(sIze);
259 
260  boost::shared_ptr<FieldEntity> mofem_ent_ptr;
261  std::vector<int> dofs_col_view;
262 
263  typename boost::multi_index::index<NumeredDofEntity_multiIndex,
264  TAG>::type::iterator mit_row,
265  hi_mit_row;
266 
267  mit_row = dofs_row_by_idx.begin();
268  hi_mit_row = dofs_row_by_idx.end();
269  for (; mit_row != hi_mit_row; mit_row++) {
270 
271  // Shared or multishared row and not owned. Those data should be send to
272  // other side.
273 
274  // Get entity adjacencies, no need to repeat that operation for dofs when
275  // are on the same entity. For simplicity is assumed that those sheered
276  // the same adjacencies.
277  unsigned char pstatus = (*mit_row)->getPStatus();
278  if ((pstatus & PSTATUS_NOT_OWNED) &&
279  (pstatus & (PSTATUS_SHARED | PSTATUS_MULTISHARED))) {
280 
281  bool get_adj_col = true;
282  if (mofem_ent_ptr) {
283  if (mofem_ent_ptr->getLocalUniqueId() ==
284  (*mit_row)->getFieldEntityPtr()->getLocalUniqueId()) {
285  get_adj_col = false;
286  }
287  }
288 
289  if (get_adj_col) {
290  // Get entity adjacencies
291  mofem_ent_ptr = (*mit_row)->getFieldEntityPtr();
292  CHKERR getEntityAdjacenies<TAG>(p_miit, mit_row, mofem_ent_ptr,
293  dofs_col_view, verb);
294  // Sort, unique and resize dofs_col_view
295  {
296  std::sort(dofs_col_view.begin(), dofs_col_view.end());
297  std::vector<int>::iterator new_end =
298  std::unique(dofs_col_view.begin(), dofs_col_view.end());
299  int new_size = std::distance(dofs_col_view.begin(), new_end);
300  dofs_col_view.resize(new_size);
301  }
302  // Add that row. Patterns is that first index is row index, second is
303  // size of adjacencies after that follows column adjacencies.
304  int owner = (*mit_row)->getOwnerProc();
305  dofs_vec[owner].emplace_back(TAG::get_index(mit_row)); // row index
306  dofs_vec[owner].emplace_back(
307  dofs_col_view.size()); // nb. of column adjacencies
308  // add adjacent cools
309  dofs_vec[owner].insert(dofs_vec[owner].end(), dofs_col_view.begin(),
310  dofs_col_view.end());
311  }
312  }
313  }
314 
315  int nsends = 0; // number of messages to send
316  std::vector<int> dofs_vec_length(sIze); // length of the message to proc
317  for (int proc = 0; proc < sIze; proc++) {
318 
319  if (!dofs_vec[proc].empty()) {
320 
321  dofs_vec_length[proc] = dofs_vec[proc].size();
322  nsends++;
323 
324  } else {
325 
326  dofs_vec_length[proc] = 0;
327  }
328  }
329 
330  std::vector<MPI_Status> status(sIze);
331 
332  // Computes the number of messages a node expects to receive
333  int nrecvs; // number of messages received
334  CHKERR PetscGatherNumberOfMessages(comm, NULL, &dofs_vec_length[0],
335  &nrecvs);
336 
337  // Computes info about messages that a MPI-node will receive, including
338  // (from-id,length) pairs for each message.
339  int *onodes; // list of node-ids from which messages are expected
340  int *olengths; // corresponding message lengths
341  CHKERR PetscGatherMessageLengths(comm, nsends, nrecvs, &dofs_vec_length[0],
342  &onodes, &olengths);
343 
344  // Gets a unique new tag from a PETSc communicator.
345  int tag;
346  CHKERR PetscCommGetNewTag(comm, &tag);
347 
348  // Allocate a buffer sufficient to hold messages of size specified in
349  // olengths. And post Irecvs on these buffers using node info from onodes
350  int **rbuf; // must bee freed by user
351  MPI_Request *r_waits; // must bee freed by user
352 
353  // rbuf has a pointers to messages. It has size of of nrecvs (number of
354  // messages) +1. In the first index a block is allocated,
355  // such that rbuf[i] = rbuf[i-1]+olengths[i-1].
356  CHKERR PetscPostIrecvInt(comm, tag, nrecvs, onodes, olengths, &rbuf,
357  &r_waits);
358 
359  MPI_Request *s_waits; // status of sens messages
360  CHKERR PetscMalloc1(nsends, &s_waits);
361 
362  // Send messages
363  for (int proc = 0, kk = 0; proc < sIze; proc++) {
364  if (!dofs_vec_length[proc])
365  continue; // no message to send to this proc
366  CHKERR MPI_Isend(&(dofs_vec[proc])[0], // buffer to send
367  dofs_vec_length[proc], // message length
368  MPIU_INT, proc, // to proc
369  tag, comm, s_waits + kk);
370  kk++;
371  }
372 
373  // Wait for received
374  if (nrecvs) {
375  CHKERR MPI_Waitall(nrecvs, r_waits, &status[0]);
376  }
377  // Wait for send messages
378  if (nsends) {
379  CHKERR MPI_Waitall(nsends, s_waits, &status[0]);
380  }
381 
382  for (int kk = 0; kk < nrecvs; kk++) {
383 
384  int len = olengths[kk];
385  int *data_from_proc = rbuf[kk];
386 
387  for (int ii = 0; ii < len;) {
388 
389  int row_idx = data_from_proc[ii++]; // get row number
390  int nb_adj_dofs = data_from_proc[ii++]; // get nb. of adjacent dofs
391 
392  if (debug) {
393 
394  DofByGlobalPetscIndex::iterator dit;
395  dit = p_miit->numeredRowDofsPtr->get<PetscGlobalIdx_mi_tag>().find(
396  row_idx);
397  if (dit ==
398  p_miit->numeredRowDofsPtr->get<PetscGlobalIdx_mi_tag>().end()) {
400  "dof %d can not be found in problem", row_idx);
401  }
402  }
403 
404  for (int jj = 0; jj < nb_adj_dofs; jj++) {
405  adjacent_dofs_on_other_parts[row_idx].push_back(data_from_proc[ii++]);
406  }
407  }
408  }
409 
410  // Cleaning
411  CHKERR PetscFree(s_waits);
412  CHKERR PetscFree(rbuf[0]);
413  CHKERR PetscFree(rbuf);
414  CHKERR PetscFree(r_waits);
415  CHKERR PetscFree(onodes);
416  CHKERR PetscFree(olengths);
417 
418  miit_row = dofs_row_by_idx.begin();
419  hi_miit_row = dofs_row_by_idx.end();
420 
421  CHKERR PetscCommDestroy(&comm);
422  }
423 
424  boost::shared_ptr<FieldEntity> mofem_ent_ptr;
425  int row_last_evaluated_idx = -1;
426 
427  std::vector<int> dofs_vec;
428  std::vector<int> dofs_col_view;
429 
430  // loop local rows
431  int nb_loc_row_from_iterators = 0;
432  unsigned int rows_to_fill = p_miit->getNbLocalDofsRow();
433  i.reserve(rows_to_fill + 1);
434  for (; miit_row != hi_miit_row; miit_row++) {
435 
436  if (!TAG::IamNotPartitioned) {
437  if (static_cast<int>((*miit_row)->getPart()) != rAnk)
438  continue;
439  }
440  // This is only for cross-check if everything is ok
441  nb_loc_row_from_iterators++;
442 
443  // add next row to compressed matrix
444  i.push_back(j.size());
445 
446  // Get entity adjacencies, no need to repeat that operation for dofs when
447  // are on the same entity. For simplicity is assumed that those share the
448  // same adjacencies.
449  if ((!mofem_ent_ptr)
450  ? 1
451  : (mofem_ent_ptr->getLocalUniqueId() !=
452  (*miit_row)->getFieldEntityPtr()->getLocalUniqueId())) {
453 
454  // get entity adjacencies
455  mofem_ent_ptr = (*miit_row)->getFieldEntityPtr();
456  CHKERR getEntityAdjacenies<TAG>(p_miit, miit_row, mofem_ent_ptr,
457  dofs_col_view, verb);
458  row_last_evaluated_idx = TAG::get_index(miit_row);
459 
460  dofs_vec.resize(0);
461  // insert dofs_col_view
462  dofs_vec.insert(dofs_vec.end(), dofs_col_view.begin(),
463  dofs_col_view.end());
464 
465  unsigned char pstatus = (*miit_row)->getPStatus();
466  if (pstatus > 0) {
467  std::map<int, std::vector<int>>::iterator mit;
468  mit = adjacent_dofs_on_other_parts.find(row_last_evaluated_idx);
469  if (mit == adjacent_dofs_on_other_parts.end()) {
470  // NOTE: Dof can adjacent to other part but no elements are there
471  // which use that dof std::cerr << *miit_row << std::endl; SETERRQ1(
472  // get_comm(),MOFEM_DATA_INCONSISTENCY,
473  // "data inconsistency row_last_evaluated_idx = %d",
474  // row_last_evaluated_idx
475  // );
476  } else {
477  dofs_vec.insert(dofs_vec.end(), mit->second.begin(),
478  mit->second.end());
479  }
480  }
481 
482  // sort and make unique
483  sort(dofs_vec.begin(), dofs_vec.end());
484  std::vector<int>::iterator new_end =
485  unique(dofs_vec.begin(), dofs_vec.end());
486  int new_size = std::distance(dofs_vec.begin(), new_end);
487  dofs_vec.resize(new_size);
488  }
489 
490  // Try to be smart reserving memory
491  if (j.capacity() < j.size() + dofs_vec.size()) {
492 
493  // TODO: [CORE-55] Improve algorithm estimating size of compressed matrix
494  unsigned int nb_nonzero = j.size() + dofs_vec.size();
495  unsigned int average_row_fill = nb_nonzero / i.size() + 1;
496  j.reserve(rows_to_fill * average_row_fill);
497  }
498 
499  auto hi_diit = dofs_vec.end();
500  for (auto diit = dofs_vec.begin(); diit != hi_diit; diit++) {
501 
502  if (no_diagonals) {
503  if (*diit == TAG::get_index(miit_row)) {
504  continue;
505  }
506  }
507  j.push_back(*diit);
508  }
509  }
510 
511  // build adj matrix
512  i.push_back(j.size());
513 
514  if (strcmp(type, MATMPIADJ) == 0) {
515 
516  // Adjacency matrix used to partition problems, f.e. METIS
517  if (i.size() - 1 != (unsigned int)nb_loc_row_from_iterators) {
518  SETERRQ2(
519  get_comm(), PETSC_ERR_ARG_SIZ,
520  "Number of rows from iterator is different than size of rows in "
521  "compressed row "
522  "matrix (unsigned int)nb_local_dofs_row != i.size() - 1, i.e. %d != "
523  "%d",
524  (unsigned int)nb_loc_row_from_iterators, i.size() - 1);
525  }
526 
527  } else if (strcmp(type, MATMPIAIJ) == 0) {
528 
529  // Compressed MPIADJ matrix
530  if (i.size() - 1 != (unsigned int)nb_loc_row_from_iterators) {
531  SETERRQ2(
532  get_comm(), PETSC_ERR_ARG_SIZ,
533  "Number of rows from iterator is different than size of rows in "
534  "compressed row "
535  "matrix (unsigned int)nb_local_dofs_row != i.size() - 1, i.e. %d != "
536  "%d",
537  (unsigned int)nb_loc_row_from_iterators, i.size() - 1);
538  }
539  PetscInt nb_local_dofs_row = p_miit->getNbLocalDofsRow();
540  if ((unsigned int)nb_local_dofs_row != i.size() - 1) {
541  SETERRQ2(
542  get_comm(), PETSC_ERR_ARG_SIZ,
543  "Number of rows is different than size of rows in compressed row "
544  "matrix (unsigned int)nb_local_dofs_row != i.size() - 1, i.e. %d != "
545  "%d",
546  (unsigned int)nb_local_dofs_row, i.size() - 1);
547  }
548 
549  } else if (strcmp(type, MATAIJ) == 0) {
550 
551  // Sequential compressed ADJ matrix
552  if (i.size() - 1 != (unsigned int)nb_loc_row_from_iterators) {
553  SETERRQ2(
554  get_comm(), PETSC_ERR_ARG_SIZ,
555  "Number of rows form iterator is different than size of rows in "
556  "compressed row "
557  "matrix (unsigned int)nb_local_dofs_row != i.size() - 1, i.e. %d != "
558  "%d",
559  (unsigned int)nb_loc_row_from_iterators, i.size() - 1);
560  }
561  PetscInt nb_local_dofs_row = p_miit->getNbLocalDofsRow();
562  if ((unsigned int)nb_local_dofs_row != i.size() - 1) {
563  SETERRQ2(
564  get_comm(), PETSC_ERR_ARG_SIZ,
565  "Number of rows is different than size of rows in compressed row "
566  "matrix (unsigned int)nb_local_dofs_row != i.size() - 1, i.e. %d != "
567  "%d",
568  (unsigned int)nb_local_dofs_row, i.size() - 1);
569  }
570 
571  } else if (strcmp(type, MATAIJCUSPARSE) == 0) {
572 
573  if (i.size() - 1 != (unsigned int)nb_loc_row_from_iterators) {
574  SETERRQ(get_comm(), PETSC_ERR_ARG_SIZ, "data inconsistency");
575  }
576  PetscInt nb_local_dofs_row = p_miit->getNbLocalDofsRow();
577  if ((unsigned int)nb_local_dofs_row != i.size() - 1) {
578  SETERRQ(get_comm(), PETSC_ERR_ARG_SIZ, "data inconsistency");
579  }
580 
581  } else {
582 
583  SETERRQ(get_comm(), PETSC_ERR_ARG_NULL, "not implemented");
584  }
585 
586  PetscLogEventEnd(MOFEM_EVENT_createMat, 0, 0, 0, 0);
588 }
589 
591 MatrixManager::query_interface(boost::typeindex::type_index type_index,
592  UnknownInterface **iface) const {
593  *iface = const_cast<MatrixManager *>(this);
594  return 0;
595 }
596 
598  : cOre(const_cast<MoFEM::Core &>(core)) {
599  PetscLogEventRegister("MatrixManagerCreateMPIAIJ", 0,
601  PetscLogEventRegister("MatrixManagerCreateMPIAIJWithArrays", 0,
603  PetscLogEventRegister("MatrixManagerCreateMPIAdjWithArrays", 0,
605  PetscLogEventRegister("MatrixManagerCreateMPIAIJCUSPARSEWithArrays", 0,
607  PetscLogEventRegister("MatrixManagerCreateSeqAIJCUSPARSEWithArrays", 0,
609  PetscLogEventRegister("MatrixManagerCreateSeqAIJWithArrays", 0,
611  PetscLogEventRegister("MatrixManagerCheckMPIAIJWithArraysMatrixFillIn", 0,
613 }
614 
615 template <>
616 MoFEMErrorCode MatrixManager::createMPIAIJWithArrays<PetscGlobalIdx_mi_tag>(
617  const std::string name, Mat *Aij, int verb) {
619 
620  MoFEM::CoreInterface &m_field = cOre;
621  CreateRowComressedADJMatrix *core_ptr =
622  static_cast<CreateRowComressedADJMatrix *>(&cOre);
623  PetscLogEventBegin(MOFEM_EVENT_createMPIAIJWithArrays, 0, 0, 0, 0);
624 
625  auto problems_ptr = m_field.get_problems();
626  auto &prb = problems_ptr->get<Problem_mi_tag>();
627  auto p_miit = prb.find(name);
628  if (p_miit == prb.end()) {
629  SETERRQ1(m_field.get_comm(), MOFEM_NOT_FOUND,
630  "problem < %s > is not found (top tip: check spelling)",
631  name.c_str());
632  }
633 
634  std::vector<int> i_vec, j_vec;
635  j_vec.reserve(10000);
637  p_miit, MATMPIAIJ, i_vec, j_vec, false, verb);
638 
639  int nb_row_dofs = p_miit->getNbDofsRow();
640  int nb_col_dofs = p_miit->getNbDofsCol();
641  int nb_local_dofs_row = p_miit->getNbLocalDofsRow();
642  int nb_local_dofs_col = p_miit->getNbLocalDofsCol();
643 
644  CHKERR ::MatCreateMPIAIJWithArrays(
645  m_field.get_comm(), nb_local_dofs_row, nb_local_dofs_col, nb_row_dofs,
646  nb_col_dofs, &*i_vec.begin(), &*j_vec.begin(), PETSC_NULL, Aij);
647 
648  PetscLogEventEnd(MOFEM_EVENT_createMPIAIJWithArrays, 0, 0, 0, 0);
650 }
651 
652 template <>
654 MatrixManager::createMPIAIJCUSPARSEWithArrays<PetscGlobalIdx_mi_tag>(
655  const std::string name, Mat *Aij, int verb) {
657 
658  MoFEM::CoreInterface &m_field = cOre;
659  CreateRowComressedADJMatrix *core_ptr =
660  static_cast<CreateRowComressedADJMatrix *>(&cOre);
661  PetscLogEventBegin(MOFEM_EVENT_createMPIAIJCUSPARSEWithArrays, 0, 0, 0, 0);
662 
663  auto problems_ptr = m_field.get_problems();
664  auto &prb = problems_ptr->get<Problem_mi_tag>();
665  auto p_miit = prb.find(name);
666  if (p_miit == prb.end()) {
667  SETERRQ1(m_field.get_comm(), MOFEM_NOT_FOUND,
668  "problem < %s > is not found (top tip: check spelling)",
669  name.c_str());
670  }
671 
672  std::vector<int> i_vec, j_vec;
673  j_vec.reserve(10000);
675  p_miit, MATAIJCUSPARSE, i_vec, j_vec, false, verb);
676 
677  int nb_local_dofs_row = p_miit->getNbLocalDofsRow();
678  int nb_local_dofs_col = p_miit->getNbLocalDofsCol();
679 
680  auto get_layout = [&]() {
681  int start_ranges, end_ranges;
682  PetscLayout layout;
683  CHKERR PetscLayoutCreate(m_field.get_comm(), &layout);
684  CHKERR PetscLayoutSetBlockSize(layout, 1);
685  CHKERR PetscLayoutSetLocalSize(layout, nb_local_dofs_col);
686  CHKERR PetscLayoutSetUp(layout);
687  CHKERR PetscLayoutGetRange(layout, &start_ranges, &end_ranges);
688  CHKERR PetscLayoutDestroy(&layout);
689  return std::make_pair(start_ranges, end_ranges);
690  };
691 
692  auto get_nnz = [&](auto &d_nnz, auto &o_nnz) {
694  auto layout = get_layout();
695  int j = 0;
696  for (int i = 0; i != nb_local_dofs_row; ++i) {
697  for (; j != i_vec[i + 1]; ++j) {
698  if (j_vec[j] < layout.second && j_vec[j] >= layout.first)
699  ++(d_nnz[i]);
700  else
701  ++(o_nnz[i]);
702  }
703  }
705  };
706 
707  std::vector<int> d_nnz(nb_local_dofs_row, 0), o_nnz(nb_local_dofs_row, 0);
708  CHKERR get_nnz(d_nnz, o_nnz);
709 
710 #ifdef PETSC_HAVE_CUDA
711  CHKERR ::MatCreateAIJCUSPARSE(m_field.get_comm(), nb_local_dofs_row,
712  nb_local_dofs_col, nb_row_dofs, nb_col_dofs, 0,
713  &*d_nnz.begin(), 0, &*o_nnz.begin(), Aij);
714 #else
715  SETERRQ(PETSC_COMM_SELF, MOFEM_NOT_IMPLEMENTED,
716  "Error: To use this matrix type compile PETSc with CUDA.");
717 #endif
718 
719  PetscLogEventEnd(MOFEM_EVENT_createMPIAIJCUSPARSEWithArrays, 0, 0, 0, 0);
720 
722 }
723 
724 template <>
726 MatrixManager::createSeqAIJCUSPARSEWithArrays<PetscLocalIdx_mi_tag>(
727  const std::string name, Mat *Aij, int verb) {
729 
730 #ifdef PETSC_HAVE_CUDA
731  // CHKERR ::MatCreateSeqAIJCUSPARSE(MPI_Comm comm, PetscInt m, PetscInt n,
732  // PetscInt nz, const PetscInt nnz[], Mat *A);
733  SETERRQ(PETSC_COMM_SELF, MOFEM_NOT_IMPLEMENTED,
734  "Not implemented type of matrix yet, try MPI version (aijcusparse)");
735 #endif
736 
738 }
739 
740 template <>
742 MatrixManager::createMPIAIJ<PetscGlobalIdx_mi_tag>(const std::string name,
743  Mat *Aij, int verb) {
744  MoFEM::CoreInterface &m_field = cOre;
745  CreateRowComressedADJMatrix *core_ptr =
746  static_cast<CreateRowComressedADJMatrix *>(&cOre);
748  PetscLogEventBegin(MOFEM_EVENT_createMPIAIJ, 0, 0, 0, 0);
749 
750  auto problems_ptr = m_field.get_problems();
751  auto &prb = problems_ptr->get<Problem_mi_tag>();
752  auto p_miit = prb.find(name);
753  if (p_miit == prb.end()) {
754  SETERRQ1(m_field.get_comm(), MOFEM_NOT_FOUND,
755  "problem < %s > is not found (top tip: check spelling)",
756  name.c_str());
757  }
758 
759  std::vector<int> i_vec, j_vec;
760  j_vec.reserve(10000);
762  p_miit, MATMPIAIJ, i_vec, j_vec, false, verb);
763 
764  int nb_row_dofs = p_miit->getNbDofsRow();
765  int nb_col_dofs = p_miit->getNbDofsCol();
766  int nb_local_dofs_row = p_miit->getNbLocalDofsRow();
767  int nb_local_dofs_col = p_miit->getNbLocalDofsCol();
768 
769  auto get_layout = [&]() {
770  int start_ranges, end_ranges;
771  PetscLayout layout;
772  CHKERR PetscLayoutCreate(m_field.get_comm(), &layout);
773  CHKERR PetscLayoutSetBlockSize(layout, 1);
774  CHKERR PetscLayoutSetLocalSize(layout, nb_local_dofs_col);
775  CHKERR PetscLayoutSetUp(layout);
776  CHKERR PetscLayoutGetRange(layout, &start_ranges, &end_ranges);
777  CHKERR PetscLayoutDestroy(&layout);
778  return std::make_pair(start_ranges, end_ranges);
779  };
780 
781  auto get_nnz = [&](auto &d_nnz, auto &o_nnz) {
783  auto layout = get_layout();
784  int j = 0;
785  for (int i = 0; i != nb_local_dofs_row; ++i) {
786  for (; j != i_vec[i + 1]; ++j) {
787  if (j_vec[j] < layout.second && j_vec[j] >= layout.first)
788  ++(d_nnz[i]);
789  else
790  ++(o_nnz[i]);
791  }
792  }
794  };
795 
796  std::vector<int> d_nnz(nb_local_dofs_row, 0), o_nnz(nb_local_dofs_row, 0);
797  CHKERR get_nnz(d_nnz, o_nnz);
798 
799  CHKERR MatCreate(m_field.get_comm(), Aij);
800  CHKERR MatSetSizes(*Aij, nb_local_dofs_row, nb_local_dofs_col, nb_row_dofs,
801  nb_col_dofs);
802  CHKERR MatSetType(*Aij, MATMPIAIJ);
803  CHKERR MatMPIAIJSetPreallocation(*Aij, 0, &*d_nnz.begin(), 0,
804  &*o_nnz.begin());
805 
806  PetscLogEventEnd(MOFEM_EVENT_createMPIAIJ, 0, 0, 0, 0);
808 }
809 
810 template <>
812 MatrixManager::createMPIAdjWithArrays<Idx_mi_tag>(const std::string name,
813  Mat *Adj, int verb) {
814  MoFEM::CoreInterface &m_field = cOre;
815  CreateRowComressedADJMatrix *core_ptr =
816  static_cast<CreateRowComressedADJMatrix *>(&cOre);
818  PetscLogEventBegin(MOFEM_EVENT_createMPIAdjWithArrays, 0, 0, 0, 0);
819 
820  auto problems_ptr = m_field.get_problems();
821  auto &prb = problems_ptr->get<Problem_mi_tag>();
822  auto p_miit = prb.find(name);
823  if (p_miit == prb.end()) {
824  SETERRQ1(m_field.get_comm(), MOFEM_NOT_FOUND,
825  "problem < %s > is not found (top tip: check spelling)",
826  name.c_str());
827  }
828 
829  std::vector<int> i_vec, j_vec;
830  j_vec.reserve(10000);
831  CHKERR core_ptr->createMatArrays<Idx_mi_tag>(p_miit, MATMPIADJ, i_vec, j_vec,
832  true, verb);
833  int *_i, *_j;
834  CHKERR PetscMalloc(i_vec.size() * sizeof(int), &_i);
835  CHKERR PetscMalloc(j_vec.size() * sizeof(int), &_j);
836  copy(i_vec.begin(), i_vec.end(), _i);
837  copy(j_vec.begin(), j_vec.end(), _j);
838 
839  int nb_col_dofs = p_miit->getNbDofsCol();
840  CHKERR MatCreateMPIAdj(m_field.get_comm(), i_vec.size() - 1, nb_col_dofs, _i,
841  _j, PETSC_NULL, Adj);
842  CHKERR MatSetOption(*Adj, MAT_STRUCTURALLY_SYMMETRIC, PETSC_TRUE);
843 
844  PetscLogEventEnd(MOFEM_EVENT_createMPIAdjWithArrays, 0, 0, 0, 0);
846 }
847 
848 template <>
849 MoFEMErrorCode MatrixManager::createSeqAIJWithArrays<PetscLocalIdx_mi_tag>(
850  const std::string name, Mat *Aij, int verb) {
851  MoFEM::CoreInterface &m_field = cOre;
852  CreateRowComressedADJMatrix *core_ptr =
853  static_cast<CreateRowComressedADJMatrix *>(&cOre);
855  PetscLogEventBegin(MOFEM_EVENT_createMPIAIJWithArrays, 0, 0, 0, 0);
856 
857  auto problems_ptr = m_field.get_problems();
858  auto &prb = problems_ptr->get<Problem_mi_tag>();
859  auto p_miit = prb.find(name);
860  if (p_miit == prb.end()) {
861  SETERRQ1(m_field.get_comm(), MOFEM_NOT_FOUND,
862  "problem < %s > is not found (top tip: check spelling)",
863  name.c_str());
864  }
865 
866  std::vector<int> i_vec, j_vec;
867  j_vec.reserve(10000);
868  CHKERR core_ptr->createMatArrays<PetscGlobalIdx_mi_tag>(p_miit, MATAIJ, i_vec,
869  j_vec, false, verb);
870 
871  int nb_local_dofs_row = p_miit->getNbLocalDofsRow();
872  int nb_local_dofs_col = p_miit->getNbLocalDofsCol();
873 
874  double *_a;
875  CHKERR PetscMalloc(j_vec.size() * sizeof(double), &_a);
876 
877  Mat tmpMat;
878  CHKERR ::MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, nb_local_dofs_row,
879  nb_local_dofs_col, &*i_vec.begin(),
880  &*j_vec.begin(), _a, &tmpMat);
881  CHKERR MatDuplicate(tmpMat, MAT_SHARE_NONZERO_PATTERN, Aij);
882  CHKERR MatDestroy(&tmpMat);
883 
884  CHKERR PetscFree(_a);
885 
886  PetscLogEventEnd(MOFEM_EVENT_createMPIAIJWithArrays, 0, 0, 0, 0);
888 }
889 
890 MoFEMErrorCode MatrixManager::checkMatrixFillIn(const std::string problem_name,
891  int row_print, int col_print,
892  Mat A, int verb) {
893  MoFEM::CoreInterface &m_field = cOre;
895 
896  PetscLogEventBegin(MOFEM_EVENT_checkMatrixFillIn, 0, 0, 0, 0);
897 
898  struct TestMatrixFillIn : public FEMethod {
899  CoreInterface *mFieldPtr;
900 
901  Mat A;
902 
903  int rowPrint, colPrint;
904 
905  TestMatrixFillIn(CoreInterface *m_field_ptr, Mat a, int row_print,
906  int col_print)
907  : mFieldPtr(m_field_ptr), A(a), rowPrint(row_print),
908  colPrint(col_print){};
909 
910  MoFEMErrorCode preProcess() {
913  }
914 
915  MoFEMErrorCode operator()() {
917 
918  if (refinedFiniteElementsPtr->find(
919  numeredEntFiniteElementPtr->getEnt()) ==
920  refinedFiniteElementsPtr->end()) {
921  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
922  "data inconsistency");
923  }
924 
925  auto row_dofs = getRowDofsPtr();
926  auto col_dofs = getColDofsPtr();
927 
928  for (auto cit = col_dofs->begin(); cit != col_dofs->end(); cit++) {
929 
930  if (refinedEntitiesPtr->find((*cit)->getEnt()) ==
931  refinedEntitiesPtr->end()) {
932  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
933  "data inconsistency");
934  }
935  if (!(*cit)->getActive()) {
936  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
937  "data inconsistency");
938  }
939 
940  FieldEntityEntFiniteElementAdjacencyMap_multiIndex::index<
941  Composite_Unique_mi_tag>::type::iterator ait;
942  ait = adjacenciesPtr->get<Composite_Unique_mi_tag>().find(
943  boost::make_tuple((*cit)->getFieldEntityPtr()->getLocalUniqueId(),
944  numeredEntFiniteElementPtr->getLocalUniqueId()));
945  if (ait == adjacenciesPtr->end()) {
946  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
947  "adjacencies data inconsistency");
948  } else {
949  UId uid = ait->getEntUniqueId();
950  if (entitiesPtr->find(uid) == entitiesPtr->end()) {
951  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
952  "data inconsistency");
953  }
954  if (dofsPtr->find((*cit)->getLocalUniqueId()) == dofsPtr->end()) {
955  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
956  "data inconsistency");
957  }
958  }
959 
960  if ((*cit)->getEntType() != MBVERTEX) {
961 
962  auto range =
963  col_dofs->get<Ent_mi_tag>().equal_range((*cit)->getEnt());
964  int nb_dofs_on_ent = std::distance(range.first, range.second);
965 
966  int max_order = (*cit)->getMaxOrder();
967  if ((*cit)->getNbOfCoeffs() * (*cit)->getOrderNbDofs(max_order) !=
968  nb_dofs_on_ent) {
969 
970  /* It could be that you have
971  removed DOFs from problem, and for example if this was vector filed
972  with components {Ux,Uy,Uz}, you removed on Uz element.*/
973 
974  MOFEM_LOG("SELF", Sev::warning)
975  << "Warning: Number of Dofs in Col different than number "
976  "of dofs for given entity order "
977  << (*cit)->getNbOfCoeffs() * (*cit)->getOrderNbDofs(max_order)
978  << " " << nb_dofs_on_ent;
979  }
980  }
981  }
982 
983  for (auto rit = row_dofs->begin(); rit != row_dofs->end(); rit++) {
984 
985  if (refinedEntitiesPtr->find((*rit)->getEnt()) ==
986  refinedEntitiesPtr->end()) {
987  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
988  "data inconsistency");
989  }
990  if (!(*rit)->getActive()) {
991  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
992  "data inconsistency");
993  }
994 
995  FieldEntityEntFiniteElementAdjacencyMap_multiIndex::index<
996  Composite_Unique_mi_tag>::type::iterator ait;
997  ait = adjacenciesPtr->get<Composite_Unique_mi_tag>().find(
998  boost::make_tuple((*rit)->getFieldEntityPtr()->getLocalUniqueId(),
999  numeredEntFiniteElementPtr->getLocalUniqueId()));
1000  if (ait == adjacenciesPtr->end()) {
1002  MOFEM_LOG("SELF", Sev::error) << *(*rit);
1003  MOFEM_LOG("SELF", Sev::error) << *(*rit);
1004  MOFEM_LOG("SELF", Sev::error) << *numeredEntFiniteElementPtr;
1005  MOFEM_LOG("SELF", Sev::error) << "dof: " << (*rit)->getBitRefLevel();
1006  MOFEM_LOG("SELF", Sev::error)
1007  << "fe: " << numeredEntFiniteElementPtr->getBitRefLevel();
1008  MOFEM_LOG("SELF", Sev::error)
1009  << "problem: " << problemPtr->getBitRefLevel();
1010  MOFEM_LOG("SELF", Sev::error)
1011  << "problem mask: " << problemPtr->getBitRefLevelMask();
1012  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
1013  "adjacencies data inconsistency");
1014  } else {
1015  UId uid = ait->getEntUniqueId();
1016  if (entitiesPtr->find(uid) == entitiesPtr->end()) {
1017  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
1018  "data inconsistency");
1019  }
1020  if (dofsPtr->find((*rit)->getLocalUniqueId()) == dofsPtr->end()) {
1021  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
1022  "data inconsistency");
1023  }
1024  }
1025  int row = (*rit)->getPetscGlobalDofIdx();
1026 
1027  auto col_dofs = getColDofsPtr();
1028  for (auto cit = col_dofs->begin(); cit != col_dofs->end(); cit++) {
1029 
1030  int col = (*cit)->getPetscGlobalDofIdx();
1031 
1032  if (row == rowPrint && col == colPrint) {
1033  MOFEM_LOG("SELF", Sev::noisy) << "fe:\n"
1034  << *numeredEntFiniteElementPtr;
1035  MOFEM_LOG("SELF", Sev::noisy) << "row:\n" << *(*rit);
1036  MOFEM_LOG("SELF", Sev::noisy) << "col:\n" << *(*cit);
1037  MOFEM_LOG("SELF", Sev::noisy)
1038  << "fe:\n"
1039  << numeredEntFiniteElementPtr->getBitRefLevel();
1040  MOFEM_LOG("SELF", Sev::noisy) << "row:\n"
1041  << (*rit)->getBitRefLevel();
1042  MOFEM_LOG("SELF", Sev::noisy) << "col:\n"
1043  << (*cit)->getBitRefLevel();
1044  }
1045 
1046  CHKERR MatSetValue(A, row, col, 1, INSERT_VALUES);
1047  }
1048 
1049  if ((*rit)->getEntType() != MBVERTEX) {
1050 
1051  auto range =
1052  row_dofs->get<Ent_mi_tag>().equal_range((*rit)->getEnt());
1053  int nb_dofs_on_ent = std::distance(range.first, range.second);
1054 
1055  int max_order = (*rit)->getMaxOrder();
1056  if ((*rit)->getNbOfCoeffs() * (*rit)->getOrderNbDofs(max_order) !=
1057  nb_dofs_on_ent) {
1058 
1059  /* It could be that you have removed DOFs from problem, and for
1060  * example if this was vector filed with components {Ux,Uy,Uz}, you
1061  * removed on Uz element. */
1062 
1063  MOFEM_LOG("SELF", Sev::warning)
1064  << "Warning: Number of Dofs in Row different than number "
1065  "of dofs for given entity order "
1066  << (*rit)->getNbOfCoeffs() * (*rit)->getOrderNbDofs(max_order)
1067  << " " << nb_dofs_on_ent;
1068  }
1069  }
1070  }
1071 
1073  }
1074 
1075  MoFEMErrorCode postProcess() {
1077 
1078  // cerr << mFieldPtr->get_comm_rank() << endl;
1079  CHKERR MatAssemblyBegin(A, MAT_FLUSH_ASSEMBLY);
1080  CHKERR MatAssemblyEnd(A, MAT_FLUSH_ASSEMBLY);
1081 
1083  }
1084  };
1085 
1086  // create matrix
1087  CHKERR MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
1088 
1089  if (verb >= VERY_VERBOSE) {
1090  MatView(A, PETSC_VIEWER_STDOUT_WORLD);
1091  }
1092 
1093  if (verb >= NOISY) {
1094  MatView(A, PETSC_VIEWER_DRAW_WORLD);
1095  std::string wait;
1096  std::cin >> wait;
1097  }
1098 
1099  TestMatrixFillIn method(&m_field, A, row_print, col_print);
1100 
1101  // get problem
1102  auto problems_ptr = m_field.get_problems();
1103  auto &prb_set = problems_ptr->get<Problem_mi_tag>();
1104  auto p_miit = prb_set.find(problem_name);
1105  if (p_miit == prb_set.end())
1106  SETERRQ1(m_field.get_comm(), MOFEM_DATA_INCONSISTENCY,
1107  "problem < %s > not found (top tip: check spelling)",
1108  problem_name.c_str());
1109  MOFEM_LOG_C("WORLD", Sev::inform, "check problem < %s >",
1110  problem_name.c_str());
1111 
1112  // loop all elements in problem and check if assemble is without error
1113  auto fe_ptr = m_field.get_finite_elements();
1114  for (auto &fe : *fe_ptr) {
1115  MOFEM_LOG_C("WORLD", Sev::verbose, "\tcheck element %s",
1116  fe->getName().c_str());
1117  CHKERR m_field.loop_finite_elements(problem_name, fe->getName(), method,
1118  nullptr, MF_EXIST,
1119  CacheTupleSharedPtr(), verb);
1120  }
1121 
1122  CHKERR MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);
1123  CHKERR MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);
1124 
1125  PetscLogEventEnd(MOFEM_EVENT_checkMatrixFillIn, 0, 0, 0, 0);
1126 
1128 }
1129 
1130 template <>
1132 MatrixManager::checkMPIAIJWithArraysMatrixFillIn<PetscGlobalIdx_mi_tag>(
1133  const std::string problem_name, int row_print, int col_print, int verb) {
1135  // create matrix
1137  CHKERR createMPIAIJWithArrays<PetscGlobalIdx_mi_tag>(problem_name, A, verb);
1138  CHKERR MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
1139  CHKERR checkMatrixFillIn(problem_name, row_print, col_print, A, verb);
1141 }
1142 
1143 template <>
1144 MoFEMErrorCode MatrixManager::checkMPIAIJMatrixFillIn<PetscGlobalIdx_mi_tag>(
1145  const std::string problem_name, int row_print, int col_print, int verb) {
1147  // create matrix
1149  CHKERR createMPIAIJ<PetscGlobalIdx_mi_tag>(problem_name, A, verb);
1150  CHKERR MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
1151  CHKERR checkMatrixFillIn(problem_name, row_print, col_print, A, verb);
1153 }
1154 
1155 } // namespace MoFEM
MoFEMFunctionReturnHot
#define MoFEMFunctionReturnHot(a)
Last executable line of each PETSc function used for error handling. Replaces return()
Definition: definitions.h:447
MoFEM::CreateRowComressedADJMatrix::CreateRowComressedADJMatrix
CreateRowComressedADJMatrix(moab::Interface &moab, MPI_Comm comm=PETSC_COMM_WORLD, int verbose=1)
Definition: MatrixManager.cpp:35
MoFEM::Ent_mi_tag
Definition: TagMultiIndices.hpp:21
MoFEM::CoreInterface::loop_finite_elements
virtual MoFEMErrorCode loop_finite_elements(const std::string problem_name, const std::string &fe_name, FEMethod &method, boost::shared_ptr< NumeredEntFiniteElement_multiIndex > fe_ptr=nullptr, MoFEMTypes bh=MF_EXIST, CacheTupleWeakPtr cache_ptr=CacheTupleSharedPtr(), int verb=DEFAULT_VERBOSITY)=0
Make a loop over finite elements.
MoFEM::MatrixManager::MOFEM_EVENT_createMPIAIJCUSPARSEWithArrays
PetscLogEvent MOFEM_EVENT_createMPIAIJCUSPARSEWithArrays
Definition: MatrixManager.hpp:298
MoFEM::CoreTmp< 0 >
Core (interface) class.
Definition: Core.hpp:82
NOISY
@ NOISY
Definition: definitions.h:211
MoFEM::FEMethod
structure for User Loop Methods on finite elements
Definition: LoopMethods.hpp:369
MoFEM::CoreTmp< 0 >::rAnk
int rAnk
MOFEM communicator rank.
Definition: Core.hpp:970
MatrixManagerFunctionBegin
#define MatrixManagerFunctionBegin
Create adjacent matrices using different indices.
Definition: MatrixManager.cpp:6
MoFEM::Problem_mi_tag
Definition: TagMultiIndices.hpp:70
MoFEM::CoreInterface::get_comm
virtual MPI_Comm & get_comm() const =0
MoFEM::Exceptions::MoFEMErrorCode
PetscErrorCode MoFEMErrorCode
MoFEM/PETSc error code.
Definition: Exceptions.hpp:56
MoFEM::CreateRowComressedADJMatrix::AdjByEnt
FieldEntityEntFiniteElementAdjacencyMap_multiIndex::index< Unique_mi_tag >::type AdjByEnt
Definition: MatrixManager.cpp:40
MOFEM_LOG_ATTRIBUTES
#define MOFEM_LOG_ATTRIBUTES(channel, bit)
Add attributes to channel.
Definition: LogManager.hpp:296
A
constexpr AssemblyType A
Definition: operators_tests.cpp:30
MoFEM::MatrixManager::MOFEM_EVENT_createSeqAIJCUSPARSEWithArrays
PetscLogEvent MOFEM_EVENT_createSeqAIJCUSPARSEWithArrays
Definition: MatrixManager.hpp:299
MoFEM::CoreInterface::get_finite_elements
virtual const FiniteElement_multiIndex * get_finite_elements() const =0
Get the finite elements object.
MoFEM::CoreTmp< 0 >::entFEAdjacencies
FieldEntityEntFiniteElementAdjacencyMap_multiIndex entFEAdjacencies
adjacencies of elements to dofs
Definition: Core.hpp:311
MoFEM::CreateRowComressedADJMatrix::ProblemsByName
Problem_multiIndex::index< Problem_mi_tag >::type ProblemsByName
Definition: MatrixManager.cpp:41
MoFEM::MatrixManager::MOFEM_EVENT_checkMatrixFillIn
PetscLogEvent MOFEM_EVENT_checkMatrixFillIn
Definition: MatrixManager.hpp:300
MoFEM::CreateRowComressedADJMatrix::createMatArrays
MoFEMErrorCode createMatArrays(ProblemsByName::iterator p_miit, const MatType type, std::vector< PetscInt > &i, std::vector< PetscInt > &j, const bool no_diagonals=true, int verb=QUIET) const
Create matrix adjacencies.
Definition: MatrixManager.cpp:151
sdf.r
int r
Definition: sdf.py:8
MoFEM::MatrixManager::query_interface
MoFEMErrorCode query_interface(boost::typeindex::type_index type_index, UnknownInterface **iface) const
Definition: MatrixManager.cpp:591
MoFEM::Interface
DeprecatedCoreInterface Interface
Definition: Interface.hpp:1975
NumeredDofEntity_multiIndex
multi_index_container< boost::shared_ptr< NumeredDofEntity >, indexed_by< ordered_unique< tag< Unique_mi_tag >, const_mem_fun< NumeredDofEntity::interface_type_DofEntity, UId, &NumeredDofEntity::getLocalUniqueId > >, ordered_non_unique< tag< Part_mi_tag >, member< NumeredDofEntity, unsigned int, &NumeredDofEntity::pArt > >, ordered_non_unique< tag< Idx_mi_tag >, member< NumeredDofEntity, DofIdx, &NumeredDofEntity::dofIdx > >, ordered_non_unique< tag< PetscGlobalIdx_mi_tag >, member< NumeredDofEntity, DofIdx, &NumeredDofEntity::petscGloablDofIdx > >, ordered_non_unique< tag< PetscLocalIdx_mi_tag >, member< NumeredDofEntity, DofIdx, &NumeredDofEntity::petscLocalDofIdx > >, ordered_non_unique< tag< Ent_mi_tag >, const_mem_fun< NumeredDofEntity::interface_type_DofEntity, EntityHandle, &NumeredDofEntity::getEnt > > > > NumeredDofEntity_multiIndex
MultiIndex container keeps NumeredDofEntity.
Definition: DofsMultiIndices.hpp:469
MoFEM::LogManager::BitScope
@ BitScope
Definition: LogManager.hpp:49
MoFEM::MatrixManager::MOFEM_EVENT_createMPIAIJWithArrays
PetscLogEvent MOFEM_EVENT_createMPIAIJWithArrays
Definition: MatrixManager.hpp:295
VERBOSE
@ VERBOSE
Definition: definitions.h:209
CHKERR
#define CHKERR
Inline error check.
Definition: definitions.h:535
MoFEM::CoreTmp< 0 >::MOFEM_EVENT_createMat
PetscLogEvent MOFEM_EVENT_createMat
Definition: Core.hpp:958
MoFEM::Composite_Unique_mi_tag
Definition: TagMultiIndices.hpp:76
MoFEM
implementation of Data Operators for Forces and Sources
Definition: Common.hpp:10
a
constexpr double a
Definition: approx_sphere.cpp:30
MOFEM_LOG_C
#define MOFEM_LOG_C(channel, severity, format,...)
Definition: LogManager.hpp:311
MoFEM::Types::UId
uint128_t UId
Unique Id.
Definition: Types.hpp:31
convert.type
type
Definition: convert.py:64
MOFEM_LOG_SYNCHRONISE
#define MOFEM_LOG_SYNCHRONISE(comm)
Synchronise "SYNC" channel.
Definition: LogManager.hpp:345
MoFEM::MatrixManager::MOFEM_EVENT_createMPIAdjWithArrays
PetscLogEvent MOFEM_EVENT_createMPIAdjWithArrays
Definition: MatrixManager.hpp:296
MoFEM::CoreTmp< 0 >::sIze
int sIze
MoFEM communicator size.
Definition: Core.hpp:969
MoFEM::MatrixManager::MatrixManager
MatrixManager(const MoFEM::Core &core)
Definition: MatrixManager.cpp:597
MoFEM::PetscGlobalIdx_mi_tag
Definition: TagMultiIndices.hpp:38
MoFEM::CoreInterface::get_problems
virtual const Problem_multiIndex * get_problems() const =0
Get the problems object.
debug
static const bool debug
Definition: dm_create_subdm.cpp:12
MoFEM::CacheTupleSharedPtr
boost::shared_ptr< CacheTuple > CacheTupleSharedPtr
Definition: FEMultiIndices.hpp:495
MoFEM::MatrixManager::MOFEM_EVENT_createMPIAIJ
PetscLogEvent MOFEM_EVENT_createMPIAIJ
Definition: MatrixManager.hpp:294
i
FTensor::Index< 'i', SPACE_DIM > i
Definition: hcurl_divergence_operator_2d.cpp:27
MoFEM::CoreTmp< 0 >::moab
std::reference_wrapper< moab::Interface > moab
moab database
Definition: Core.hpp:321
MoFEM::CoreTmp< 0 >::get_comm
MPI_Comm & get_comm() const
Definition: Core.hpp:975
MOFEM_NOT_FOUND
@ MOFEM_NOT_FOUND
Definition: definitions.h:33
MoFEM::CoreTmp< 0 >::mofemComm
MPI_Comm mofemComm
MoFEM communicator.
Definition: Core.hpp:966
MoFEM::MatrixManager
Matrix manager is used to build and partition problems.
Definition: MatrixManager.hpp:21
MoFEM::UnknownInterface
base class for all interface classes
Definition: UnknownInterface.hpp:34
MAX_DOFS_ON_ENTITY
#define MAX_DOFS_ON_ENTITY
Maximal number of DOFs on entity.
Definition: definitions.h:236
MoFEM::CoreTmp< 0 >::verbose
int verbose
Verbosity level.
Definition: Core.hpp:993
MoFEM::CreateRowComressedADJMatrix
Create compressed matrix.
Definition: MatrixManager.cpp:33
MOFEM_LOG
#define MOFEM_LOG(channel, severity)
Log.
Definition: LogManager.hpp:308
j
FTensor::Index< 'j', 3 > j
Definition: matrix_function.cpp:19
MOFEM_DATA_INCONSISTENCY
@ MOFEM_DATA_INCONSISTENCY
Definition: definitions.h:31
MoFEM::Idx_mi_tag
Definition: TagMultiIndices.hpp:31
BYROW
@ BYROW
Definition: definitions.h:131
MoFEM::Types::BitRefLevel
std::bitset< BITREFLEVEL_SIZE > BitRefLevel
Bit structure attached to each entity identifying to what mesh entity is attached.
Definition: Types.hpp:40
MoFEMFunctionBeginHot
#define MoFEMFunctionBeginHot
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
Definition: definitions.h:440
QUIET
@ QUIET
Definition: definitions.h:208
MoFEM::Unique_mi_tag
Definition: TagMultiIndices.hpp:18
MoFEM::CoreInterface
Interface.
Definition: Interface.hpp:27
MoFEM::CoreTmp< 0 >::entsFields
FieldEntity_multiIndex entsFields
entities on fields
Definition: Core.hpp:304
MoFEM::SmartPetscObj< Mat >
MoFEM::EmptyFieldBlocks
Problem::EmptyFieldBlocks EmptyFieldBlocks
Definition: ProblemsMultiIndices.hpp:563
MF_EXIST
@ MF_EXIST
Definition: definitions.h:100
MoFEMFunctionReturn
#define MoFEMFunctionReturn(a)
Last executable line of each PETSc function used for error handling. Replaces return()
Definition: definitions.h:416
MoFEM::MatrixManager::cOre
MoFEM::Core & cOre
Definition: MatrixManager.hpp:26
MOFEM_NOT_IMPLEMENTED
@ MOFEM_NOT_IMPLEMENTED
Definition: definitions.h:32
MoFEM::CreateRowComressedADJMatrix::getEntityAdjacenies
MoFEMErrorCode getEntityAdjacenies(ProblemsByName::iterator p_miit, typename boost::multi_index::index< NumeredDofEntity_multiIndex, TAG >::type::iterator mit_row, boost::shared_ptr< FieldEntity > mofem_ent_ptr, std::vector< int > &dofs_col_view, int verb) const
Get element adjacencies.
Definition: MatrixManager.cpp:72
VERY_VERBOSE
@ VERY_VERBOSE
Definition: definitions.h:210
MoFEMFunctionBegin
#define MoFEMFunctionBegin
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
Definition: definitions.h:346
MoFEM::MatrixManager::checkMatrixFillIn
MoFEMErrorCode checkMatrixFillIn(const std::string problem_name, int row_print, int col_print, Mat A, int verb=QUIET)
check if matrix fill in correspond to finite element indices
Definition: MatrixManager.cpp:890
MoFEM::MatrixManager::MOFEM_EVENT_createSeqAIJWithArrays
PetscLogEvent MOFEM_EVENT_createSeqAIJWithArrays
Definition: MatrixManager.hpp:297
MoFEM::CreateRowComressedADJMatrix::DofByGlobalPetscIndex
NumeredDofEntity_multiIndex::index< PetscGlobalIdx_mi_tag >::type DofByGlobalPetscIndex
Definition: MatrixManager.cpp:43
MoFEM::Types::DofIdx
int DofIdx
Index of DOF.
Definition: Types.hpp:18