v0.14.0
MatrixManager.cpp
Go to the documentation of this file.
1 /**
2  * \brief Create adjacent matrices using different indices
3  *
4  */
5 
6 #define MatrixManagerFunctionBegin \
7  MoFEMFunctionBegin; \
8  MOFEM_LOG_CHANNEL("WORLD"); \
9  MOFEM_LOG_CHANNEL("SYNC"); \
10  MOFEM_LOG_FUNCTION(); \
11  MOFEM_LOG_TAG("SYNC", "MatrixManager"); \
12  MOFEM_LOG_TAG("WORLD", "MatrixManager")
13 
14 namespace MoFEM {
15 
16 /** \brief Create compressed matrix
17 
18  \note Only function class members are allowed in this class. NO VARIABLES.
19 
20  \todo It is obsolete implementation, code should be moved to interface
21  MatrixManager.
22 
23  \todo While matrix is created is assumed that all entities on element are
24  adjacent to each other, in some cases, this is created denser matrices than it
25  should be. Some kind of filtering can be added.
26 
27  \todo Creation of the block matrices
28 
29  \todo Some efficiency improvement are possible
30 
31 
32  */
34 
36  MPI_Comm comm = PETSC_COMM_WORLD,
37  int verbose = 1)
38  : Core(moab, comm, verbose) {}
39 
41 
42  /** \brief Create matrix adjacencies
43 
44  Depending on TAG type, which some structure used to number dofs, matrix is
45  partitioned using part number stored in multi-index, or is partitioned on
46  parts based only on index number.
47 
48  See: Idx_mi_tag PetscGlobalIdx_mi_tag and PetscLocalIdx_mi_tag
49 
50  */
51  template <typename TAG>
53  createMatArrays(ProblemsByName::iterator p_miit, const MatType type,
54  std::vector<PetscInt> &i, std::vector<PetscInt> &j,
55  const bool no_diagonals = true, int verb = QUIET) const;
56 
57 private:
58  using AdjByEnt = FieldEntityEntFiniteElementAdjacencyMap_multiIndex::index<
60 
61  using DofByGlobalPetscIndex =
63 
64  /** \brief Get element adjacencies
65  */
66  template <typename TAG>
68  ProblemsByName::iterator p_miit,
69  typename boost::multi_index::index<NumeredDofEntity_multiIndex,
70  TAG>::type::iterator mit_row,
71  boost::shared_ptr<FieldEntity> mofem_ent_ptr,
72  std::vector<int> &dofs_col_view, int verb) const;
73 };
74 
75 /**
76  * @deprecated do not use, instead use CreateRowCompressedADJMatrix
77  */
79 
80 template <typename TAG>
82  ProblemsByName::iterator p_miit,
83  typename boost::multi_index::index<NumeredDofEntity_multiIndex,
84  TAG>::type::iterator mit_row,
85  boost::shared_ptr<FieldEntity> mofem_ent_ptr,
86  std::vector<int> &dofs_col_view, int verb) const {
88 
89  BitRefLevel prb_bit = p_miit->getBitRefLevel();
90  BitRefLevel prb_mask = p_miit->getBitRefLevelMask();
91 
92  const EmptyFieldBlocks &empty_field_blocks = p_miit->getEmptyFieldBlocks();
93 
94  dofs_col_view.clear();
95  for (auto r = entFEAdjacencies.get<Unique_mi_tag>().equal_range(
96  mofem_ent_ptr->getLocalUniqueId());
97  r.first != r.second; ++r.first) {
98 
99  if (r.first->byWhat & BYROW) {
100 
101  if ((r.first->entFePtr->getId() & p_miit->getBitFEId()).none()) {
102  // if element is not part of problem
103  continue;
104  }
105 
106  const BitRefLevel &fe_bit = r.first->entFePtr->getBitRefLevel();
107  // if entity is not problem refinement level
108  if ((fe_bit & prb_bit).any() && (fe_bit & prb_mask) == fe_bit) {
109 
110  auto check_block = [&empty_field_blocks](auto &r_if, auto &col_id) {
111  for (auto &b : empty_field_blocks) {
112  if ((b.first & r_if).any() && (b.second & col_id).any()) {
113  return false;
114  }
115  }
116  return true;
117  };
118 
119  for (auto &it : r.first->entFePtr->getColFieldEnts()) {
120  if (auto e = it.lock()) {
121 
122  if (check_block((*mit_row)->getId(), e->getId())) {
123 
124  if (auto cache = e->entityCacheColDofs.lock()) {
125  const auto lo = cache->loHi[0];
126  const auto hi = cache->loHi[1];
127  for (auto vit = lo; vit != hi; ++vit) {
128 
129  const int idx = TAG::get_index(vit);
130  if (PetscLikely(idx >= 0))
131  dofs_col_view.push_back(idx);
132 
133 #ifndef NDEBUG
134  const DofIdx nb_dofs_col = p_miit->getNbDofsCol();
135  if (PetscUnlikely(idx >= nb_dofs_col)) {
136  MOFEM_LOG("SELF", Sev::error)
137  << "Problem with dof: " << std::endl
138  << "Rank " << rAnk << " : " << *(*vit);
139  SETERRQ(
140  mofemComm, PETSC_ERR_ARG_SIZ,
141  "Index of dof larger than number of DOFs in column");
142  }
143 #endif
144  }
145 
146  } else {
147  SETERRQ(mofemComm, MOFEM_DATA_INCONSISTENCY, "Cache not set");
148  }
149  }
150  }
151  }
152  }
153  }
154  }
155 
157 }
158 
159 template <typename TAG>
161  ProblemsByName::iterator p_miit, const MatType type,
162  std::vector<PetscInt> &i, std::vector<PetscInt> &j, const bool no_diagonals,
163  int verb) const {
165 
166  PetscLogEventBegin(MOFEM_EVENT_createMat, 0, 0, 0, 0);
167 
168  auto cache = boost::make_shared<std::vector<EntityCacheNumeredDofs>>(
169  entsFields.size());
170 
171  size_t idx = 0;
172  for (auto it = entsFields.begin(); it != entsFields.end(); ++it, ++idx) {
173 
174  const auto uid = (*it)->getLocalUniqueId();
175  auto r = entFEAdjacencies.get<Unique_mi_tag>().equal_range(uid);
176  for (auto lo = r.first; lo != r.second; ++lo) {
177 
178  if ((lo->getBitFEId() & p_miit->getBitFEId()).any()) {
179 
180  const BitRefLevel &prb_bit = p_miit->getBitRefLevel();
181  const BitRefLevel &prb_mask = p_miit->getBitRefLevelMask();
182  const BitRefLevel &fe_bit = lo->entFePtr->getBitRefLevel();
183 
184  // if entity is not problem refinement level
185  if ((fe_bit & prb_mask) != fe_bit)
186  continue;
187  if ((fe_bit & prb_bit).none())
188  continue;
189 
190  auto dit = p_miit->numeredColDofsPtr->lower_bound(uid);
191 
192  decltype(dit) hi_dit;
193  if (dit != p_miit->numeredColDofsPtr->end())
194  hi_dit = p_miit->numeredColDofsPtr->upper_bound(
195  uid | static_cast<UId>(MAX_DOFS_ON_ENTITY - 1));
196  else
197  hi_dit = dit;
198 
199  (*it)->entityCacheColDofs =
200  boost::shared_ptr<EntityCacheNumeredDofs>(cache, &((*cache)[idx]));
201  (*cache)[idx].loHi = {dit, hi_dit};
202 
203  break;
204  }
205  }
206  }
207 
208  using NumeredDofEntitysByIdx =
209  typename boost::multi_index::index<NumeredDofEntity_multiIndex,
210  TAG>::type;
211 
212  // Get multi-indices for rows and columns
213  const NumeredDofEntitysByIdx &dofs_row_by_idx =
214  p_miit->numeredRowDofsPtr->get<TAG>();
215  int nb_dofs_row = p_miit->getNbDofsRow();
216  if (nb_dofs_row == 0) {
217  SETERRQ1(mofemComm, MOFEM_DATA_INCONSISTENCY, "problem <%s> has zero rows",
218  p_miit->getName().c_str());
219  }
220 
221  // Get adjacencies form other processors
222  std::map<int, std::vector<int>> adjacent_dofs_on_other_parts;
223 
224  // If not partitioned set petsc layout for matrix. If partitioned need to get
225  // adjacencies form other parts. Note if algebra is only partitioned no need
226  // to collect adjacencies form other entities. Those are already on mesh
227  // which is assumed that is on each processor the same.
228  typename boost::multi_index::index<NumeredDofEntity_multiIndex,
229  TAG>::type::iterator miit_row,
230  hi_miit_row;
231 
232  if (TAG::IamNotPartitioned) {
233 
234  // Get range of local indices
235  PetscLayout layout;
236  CHKERR PetscLayoutCreate(mofemComm, &layout);
237  CHKERR PetscLayoutSetBlockSize(layout, 1);
238  CHKERR PetscLayoutSetSize(layout, nb_dofs_row);
239  CHKERR PetscLayoutSetUp(layout);
240  PetscInt rstart, rend;
241  CHKERR PetscLayoutGetRange(layout, &rstart, &rend);
242  CHKERR PetscLayoutDestroy(&layout);
243 
244  if (verb >= VERBOSE) {
245  MOFEM_LOG("SYNC", Sev::noisy)
246  << "row lower " << rstart << " row upper " << rend;
248  }
249 
250  miit_row = dofs_row_by_idx.lower_bound(rstart);
251  hi_miit_row = dofs_row_by_idx.lower_bound(rend);
252  if (std::distance(miit_row, hi_miit_row) != rend - rstart) {
253  SETERRQ4(
254  mofemComm, PETSC_ERR_ARG_SIZ,
255  "data inconsistency, std::distance(miit_row,hi_miit_row) != rend - "
256  "rstart (%d != %d - %d = %d) ",
257  std::distance(miit_row, hi_miit_row), rend, rstart, rend - rstart);
258  }
259 
260  } else {
261 
262  MPI_Comm comm;
263  // // Make sure it is a PETSc mofemComm
264  CHKERR PetscCommDuplicate(get_comm(), &comm, NULL);
265 
266  // get adjacent nodes on other partitions
267  std::vector<std::vector<int>> dofs_vec(sIze);
268 
269  boost::shared_ptr<FieldEntity> mofem_ent_ptr;
270  std::vector<int> dofs_col_view;
271 
272  typename boost::multi_index::index<NumeredDofEntity_multiIndex,
273  TAG>::type::iterator mit_row,
274  hi_mit_row;
275 
276  mit_row = dofs_row_by_idx.begin();
277  hi_mit_row = dofs_row_by_idx.end();
278  for (; mit_row != hi_mit_row; mit_row++) {
279 
280  // Shared or multi-shared row and not owned. Those data should be send to
281  // other side.
282 
283  // Get entity adjacencies, no need to repeat that operation for dofs when
284  // are on the same entity. For simplicity is assumed that those sheered
285  // the same adjacencies.
286  unsigned char pstatus = (*mit_row)->getPStatus();
287  if ((pstatus & PSTATUS_NOT_OWNED) &&
288  (pstatus & (PSTATUS_SHARED | PSTATUS_MULTISHARED))) {
289 
290  bool get_adj_col = true;
291  if (mofem_ent_ptr) {
292  if (mofem_ent_ptr->getLocalUniqueId() ==
293  (*mit_row)->getFieldEntityPtr()->getLocalUniqueId()) {
294  get_adj_col = false;
295  }
296  }
297 
298  if (get_adj_col) {
299  // Get entity adjacencies
300  mofem_ent_ptr = (*mit_row)->getFieldEntityPtr();
301  CHKERR getEntityAdjacencies<TAG>(p_miit, mit_row, mofem_ent_ptr,
302  dofs_col_view, verb);
303  // Sort, unique and resize dofs_col_view
304  {
305  std::sort(dofs_col_view.begin(), dofs_col_view.end());
306  std::vector<int>::iterator new_end =
307  std::unique(dofs_col_view.begin(), dofs_col_view.end());
308  int new_size = std::distance(dofs_col_view.begin(), new_end);
309  dofs_col_view.resize(new_size);
310  }
311  // Add that row. Patterns is that first index is row index, second is
312  // size of adjacencies after that follows column adjacencies.
313  int owner = (*mit_row)->getOwnerProc();
314  dofs_vec[owner].emplace_back(TAG::get_index(mit_row)); // row index
315  dofs_vec[owner].emplace_back(
316  dofs_col_view.size()); // nb. of column adjacencies
317  // add adjacent cools
318  dofs_vec[owner].insert(dofs_vec[owner].end(), dofs_col_view.begin(),
319  dofs_col_view.end());
320  }
321  }
322  }
323 
324  int nsends = 0; // number of messages to send
325  std::vector<int> dofs_vec_length(sIze); // length of the message to proc
326  for (int proc = 0; proc < sIze; proc++) {
327 
328  if (!dofs_vec[proc].empty()) {
329 
330  dofs_vec_length[proc] = dofs_vec[proc].size();
331  nsends++;
332 
333  } else {
334 
335  dofs_vec_length[proc] = 0;
336  }
337  }
338 
339  std::vector<MPI_Status> status(sIze);
340 
341  // Computes the number of messages a node expects to receive
342  int nrecvs; // number of messages received
343  CHKERR PetscGatherNumberOfMessages(comm, NULL, &dofs_vec_length[0],
344  &nrecvs);
345 
346  // Computes info about messages that a MPI-node will receive, including
347  // (from-id,length) pairs for each message.
348  int *onodes; // list of node-ids from which messages are expected
349  int *olengths; // corresponding message lengths
350  CHKERR PetscGatherMessageLengths(comm, nsends, nrecvs, &dofs_vec_length[0],
351  &onodes, &olengths);
352 
353  // Gets a unique new tag from a PETSc communicator.
354  int tag;
355  CHKERR PetscCommGetNewTag(comm, &tag);
356 
357  // Allocate a buffer sufficient to hold messages of size specified in
358  // olengths. And post Irecvs on these buffers using node info from onodes
359  int **rbuf; // must bee freed by user
360  MPI_Request *r_waits; // must bee freed by user
361 
362  // rbuf has a pointers to messages. It has size of of nrecvs (number of
363  // messages) +1. In the first index a block is allocated,
364  // such that rbuf[i] = rbuf[i-1]+olengths[i-1].
365  CHKERR PetscPostIrecvInt(comm, tag, nrecvs, onodes, olengths, &rbuf,
366  &r_waits);
367 
368  MPI_Request *s_waits; // status of sens messages
369  CHKERR PetscMalloc1(nsends, &s_waits);
370 
371  // Send messages
372  for (int proc = 0, kk = 0; proc < sIze; proc++) {
373  if (!dofs_vec_length[proc])
374  continue; // no message to send to this proc
375  CHKERR MPI_Isend(&(dofs_vec[proc])[0], // buffer to send
376  dofs_vec_length[proc], // message length
377  MPIU_INT, proc, // to proc
378  tag, comm, s_waits + kk);
379  kk++;
380  }
381 
382  // Wait for received
383  if (nrecvs) {
384  CHKERR MPI_Waitall(nrecvs, r_waits, &status[0]);
385  }
386  // Wait for send messages
387  if (nsends) {
388  CHKERR MPI_Waitall(nsends, s_waits, &status[0]);
389  }
390 
391  for (int kk = 0; kk < nrecvs; kk++) {
392 
393  int len = olengths[kk];
394  int *data_from_proc = rbuf[kk];
395 
396  for (int ii = 0; ii < len;) {
397 
398  int row_idx = data_from_proc[ii++]; // get row number
399  int nb_adj_dofs = data_from_proc[ii++]; // get nb. of adjacent dofs
400 
401  if (debug) {
402 
403  DofByGlobalPetscIndex::iterator dit;
404  dit = p_miit->numeredRowDofsPtr->get<PetscGlobalIdx_mi_tag>().find(
405  row_idx);
406  if (dit ==
407  p_miit->numeredRowDofsPtr->get<PetscGlobalIdx_mi_tag>().end()) {
409  "dof %d can not be found in problem", row_idx);
410  }
411  }
412 
413  for (int jj = 0; jj < nb_adj_dofs; jj++) {
414  adjacent_dofs_on_other_parts[row_idx].push_back(data_from_proc[ii++]);
415  }
416  }
417  }
418 
419  // Cleaning
420  CHKERR PetscFree(s_waits);
421  CHKERR PetscFree(rbuf[0]);
422  CHKERR PetscFree(rbuf);
423  CHKERR PetscFree(r_waits);
424  CHKERR PetscFree(onodes);
425  CHKERR PetscFree(olengths);
426 
427  miit_row = dofs_row_by_idx.begin();
428  hi_miit_row = dofs_row_by_idx.end();
429 
430  CHKERR PetscCommDestroy(&comm);
431  }
432 
433  boost::shared_ptr<FieldEntity> mofem_ent_ptr;
434  int row_last_evaluated_idx = -1;
435 
436  std::vector<int> dofs_vec;
437  std::vector<int> dofs_col_view;
438 
439  // loop local rows
440  int nb_loc_row_from_iterators = 0;
441  unsigned int rows_to_fill = p_miit->getNbLocalDofsRow();
442  i.reserve(rows_to_fill + 1);
443  for (; miit_row != hi_miit_row; miit_row++) {
444 
445  if (!TAG::IamNotPartitioned) {
446  if (static_cast<int>((*miit_row)->getPart()) != rAnk)
447  continue;
448  }
449  // This is only for cross-check if everything is ok
450  nb_loc_row_from_iterators++;
451 
452  // add next row to compressed matrix
453  i.push_back(j.size());
454 
455  // Get entity adjacencies, no need to repeat that operation for dofs when
456  // are on the same entity. For simplicity is assumed that those share the
457  // same adjacencies.
458  if ((!mofem_ent_ptr)
459  ? 1
460  : (mofem_ent_ptr->getLocalUniqueId() !=
461  (*miit_row)->getFieldEntityPtr()->getLocalUniqueId())) {
462 
463  // get entity adjacencies
464  mofem_ent_ptr = (*miit_row)->getFieldEntityPtr();
465  CHKERR getEntityAdjacencies<TAG>(p_miit, miit_row, mofem_ent_ptr,
466  dofs_col_view, verb);
467  row_last_evaluated_idx = TAG::get_index(miit_row);
468 
469  dofs_vec.resize(0);
470  // insert dofs_col_view
471  dofs_vec.insert(dofs_vec.end(), dofs_col_view.begin(),
472  dofs_col_view.end());
473 
474  unsigned char pstatus = (*miit_row)->getPStatus();
475  if (pstatus > 0) {
476  std::map<int, std::vector<int>>::iterator mit;
477  mit = adjacent_dofs_on_other_parts.find(row_last_evaluated_idx);
478  if (mit == adjacent_dofs_on_other_parts.end()) {
479  // NOTE: Dof can adjacent to other part but no elements are there
480  // which use that dof std::cerr << *miit_row << std::endl; SETERRQ1(
481  // get_comm(),MOFEM_DATA_INCONSISTENCY,
482  // "data inconsistency row_last_evaluated_idx = %d",
483  // row_last_evaluated_idx
484  // );
485  } else {
486  dofs_vec.insert(dofs_vec.end(), mit->second.begin(),
487  mit->second.end());
488  }
489  }
490 
491  // sort and make unique
492  sort(dofs_vec.begin(), dofs_vec.end());
493  std::vector<int>::iterator new_end =
494  unique(dofs_vec.begin(), dofs_vec.end());
495  int new_size = std::distance(dofs_vec.begin(), new_end);
496  dofs_vec.resize(new_size);
497  }
498 
499  // Try to be smart reserving memory
500  if (j.capacity() < j.size() + dofs_vec.size()) {
501 
502  // TODO: [CORE-55] Improve algorithm estimating size of compressed matrix
503  unsigned int nb_nonzero = j.size() + dofs_vec.size();
504  unsigned int average_row_fill = nb_nonzero / i.size() + 1;
505  j.reserve(rows_to_fill * average_row_fill);
506  }
507 
508  auto hi_diit = dofs_vec.end();
509  for (auto diit = dofs_vec.begin(); diit != hi_diit; diit++) {
510 
511  if (no_diagonals) {
512  if (*diit == TAG::get_index(miit_row)) {
513  continue;
514  }
515  }
516  j.push_back(*diit);
517  }
518  }
519 
520  // build adj matrix
521  i.push_back(j.size());
522 
523  if (strcmp(type, MATMPIADJ) == 0) {
524 
525  // Adjacency matrix used to partition problems, f.e. METIS
526  if (i.size() - 1 != (unsigned int)nb_loc_row_from_iterators) {
527  SETERRQ2(
528  get_comm(), PETSC_ERR_ARG_SIZ,
529  "Number of rows from iterator is different than size of rows in "
530  "compressed row "
531  "matrix (unsigned int)nb_local_dofs_row != i.size() - 1, i.e. %d != "
532  "%d",
533  (unsigned int)nb_loc_row_from_iterators, i.size() - 1);
534  }
535 
536  } else if (strcmp(type, MATMPIAIJ) == 0) {
537 
538  // Compressed MPIADJ matrix
539  if (i.size() - 1 != (unsigned int)nb_loc_row_from_iterators) {
540  SETERRQ2(
541  get_comm(), PETSC_ERR_ARG_SIZ,
542  "Number of rows from iterator is different than size of rows in "
543  "compressed row "
544  "matrix (unsigned int)nb_local_dofs_row != i.size() - 1, i.e. %d != "
545  "%d",
546  (unsigned int)nb_loc_row_from_iterators, i.size() - 1);
547  }
548  PetscInt nb_local_dofs_row = p_miit->getNbLocalDofsRow();
549  if ((unsigned int)nb_local_dofs_row != i.size() - 1) {
550  SETERRQ2(
551  get_comm(), PETSC_ERR_ARG_SIZ,
552  "Number of rows is different than size of rows in compressed row "
553  "matrix (unsigned int)nb_local_dofs_row != i.size() - 1, i.e. %d != "
554  "%d",
555  (unsigned int)nb_local_dofs_row, i.size() - 1);
556  }
557 
558  } else if (strcmp(type, MATAIJ) == 0) {
559 
560  // Sequential compressed ADJ matrix
561  if (i.size() - 1 != (unsigned int)nb_loc_row_from_iterators) {
562  SETERRQ2(
563  get_comm(), PETSC_ERR_ARG_SIZ,
564  "Number of rows form iterator is different than size of rows in "
565  "compressed row "
566  "matrix (unsigned int)nb_local_dofs_row != i.size() - 1, i.e. %d != "
567  "%d",
568  (unsigned int)nb_loc_row_from_iterators, i.size() - 1);
569  }
570  PetscInt nb_local_dofs_row = p_miit->getNbLocalDofsRow();
571  if ((unsigned int)nb_local_dofs_row != i.size() - 1) {
572  SETERRQ2(
573  get_comm(), PETSC_ERR_ARG_SIZ,
574  "Number of rows is different than size of rows in compressed row "
575  "matrix (unsigned int)nb_local_dofs_row != i.size() - 1, i.e. %d != "
576  "%d",
577  (unsigned int)nb_local_dofs_row, i.size() - 1);
578  }
579 
580  } else if (strcmp(type, MATAIJCUSPARSE) == 0) {
581 
582  if (i.size() - 1 != (unsigned int)nb_loc_row_from_iterators) {
583  SETERRQ(get_comm(), PETSC_ERR_ARG_SIZ, "data inconsistency");
584  }
585  PetscInt nb_local_dofs_row = p_miit->getNbLocalDofsRow();
586  if ((unsigned int)nb_local_dofs_row != i.size() - 1) {
587  SETERRQ(get_comm(), PETSC_ERR_ARG_SIZ, "data inconsistency");
588  }
589 
590  } else {
591 
592  SETERRQ(get_comm(), PETSC_ERR_ARG_NULL, "not implemented");
593  }
594 
595  PetscLogEventEnd(MOFEM_EVENT_createMat, 0, 0, 0, 0);
597 }
598 
600 MatrixManager::query_interface(boost::typeindex::type_index type_index,
601  UnknownInterface **iface) const {
602  *iface = const_cast<MatrixManager *>(this);
603  return 0;
604 }
605 
607  : cOre(const_cast<MoFEM::Core &>(core)) {
608  PetscLogEventRegister("MatMngCrtMPIAIJ", 0, &MOFEM_EVENT_createMPIAIJ);
609  PetscLogEventRegister("MatMngCrtMPIAIJWthArr", 0,
611  PetscLogEventRegister("MatMngCrtMPIAdjWithArr", 0,
613  PetscLogEventRegister("MatMngCrtMPIAIJCUSPARSEWthArr", 0,
615  PetscLogEventRegister("MatMngCrtSeqAIJCUSPARSEWthArrs", 0,
617  PetscLogEventRegister("MatMngCrtSeqAIJWthArrs", 0,
619  PetscLogEventRegister("MatMngCrtCheckMPIAIJWthArrFillIn", 0,
621 }
622 
623 template <>
624 MoFEMErrorCode MatrixManager::createMPIAIJWithArrays<PetscGlobalIdx_mi_tag>(
625  const std::string name, Mat *Aij, int verb) {
627 
628  MoFEM::CoreInterface &m_field = cOre;
629  CreateRowCompressedADJMatrix *core_ptr =
630  static_cast<CreateRowCompressedADJMatrix *>(&cOre);
631  PetscLogEventBegin(MOFEM_EVENT_createMPIAIJWithArrays, 0, 0, 0, 0);
632 
633  auto problems_ptr = m_field.get_problems();
634  auto &prb = problems_ptr->get<Problem_mi_tag>();
635  auto p_miit = prb.find(name);
636  if (p_miit == prb.end()) {
637  SETERRQ1(m_field.get_comm(), MOFEM_NOT_FOUND,
638  "problem < %s > is not found (top tip: check spelling)",
639  name.c_str());
640  }
641 
642  std::vector<int> i_vec, j_vec;
643  j_vec.reserve(10000);
645  p_miit, MATMPIAIJ, i_vec, j_vec, false, verb);
646 
647  int nb_row_dofs = p_miit->getNbDofsRow();
648  int nb_col_dofs = p_miit->getNbDofsCol();
649  int nb_local_dofs_row = p_miit->getNbLocalDofsRow();
650  int nb_local_dofs_col = p_miit->getNbLocalDofsCol();
651 
652  CHKERR ::MatCreateMPIAIJWithArrays(
653  m_field.get_comm(), nb_local_dofs_row, nb_local_dofs_col, nb_row_dofs,
654  nb_col_dofs, &*i_vec.begin(), &*j_vec.begin(), PETSC_NULL, Aij);
655 
656  PetscLogEventEnd(MOFEM_EVENT_createMPIAIJWithArrays, 0, 0, 0, 0);
658 }
659 
660 template <>
662 MatrixManager::createMPIAIJCUSPARSEWithArrays<PetscGlobalIdx_mi_tag>(
663  const std::string name, Mat *Aij, int verb) {
665 
666  MoFEM::CoreInterface &m_field = cOre;
667  CreateRowCompressedADJMatrix *core_ptr =
668  static_cast<CreateRowCompressedADJMatrix *>(&cOre);
669  PetscLogEventBegin(MOFEM_EVENT_createMPIAIJCUSPARSEWithArrays, 0, 0, 0, 0);
670 
671  auto problems_ptr = m_field.get_problems();
672  auto &prb = problems_ptr->get<Problem_mi_tag>();
673  auto p_miit = prb.find(name);
674  if (p_miit == prb.end()) {
675  SETERRQ1(m_field.get_comm(), MOFEM_NOT_FOUND,
676  "problem < %s > is not found (top tip: check spelling)",
677  name.c_str());
678  }
679 
680  std::vector<int> i_vec, j_vec;
681  j_vec.reserve(10000);
683  p_miit, MATAIJCUSPARSE, i_vec, j_vec, false, verb);
684 
685  int nb_local_dofs_row = p_miit->getNbLocalDofsRow();
686  int nb_local_dofs_col = p_miit->getNbLocalDofsCol();
687 
688  auto get_layout = [&]() {
689  int start_ranges, end_ranges;
690  PetscLayout layout;
691  CHKERR PetscLayoutCreate(m_field.get_comm(), &layout);
692  CHKERR PetscLayoutSetBlockSize(layout, 1);
693  CHKERR PetscLayoutSetLocalSize(layout, nb_local_dofs_col);
694  CHKERR PetscLayoutSetUp(layout);
695  CHKERR PetscLayoutGetRange(layout, &start_ranges, &end_ranges);
696  CHKERR PetscLayoutDestroy(&layout);
697  return std::make_pair(start_ranges, end_ranges);
698  };
699 
700  auto get_nnz = [&](auto &d_nnz, auto &o_nnz) {
702  auto layout = get_layout();
703  int j = 0;
704  for (int i = 0; i != nb_local_dofs_row; ++i) {
705  for (; j != i_vec[i + 1]; ++j) {
706  if (j_vec[j] < layout.second && j_vec[j] >= layout.first)
707  ++(d_nnz[i]);
708  else
709  ++(o_nnz[i]);
710  }
711  }
713  };
714 
715  std::vector<int> d_nnz(nb_local_dofs_row, 0), o_nnz(nb_local_dofs_row, 0);
716  CHKERR get_nnz(d_nnz, o_nnz);
717 
718 #ifdef PETSC_HAVE_CUDA
719  CHKERR ::MatCreateAIJCUSPARSE(m_field.get_comm(), nb_local_dofs_row,
720  nb_local_dofs_col, nb_row_dofs, nb_col_dofs, 0,
721  &*d_nnz.begin(), 0, &*o_nnz.begin(), Aij);
722 #else
723  SETERRQ(PETSC_COMM_SELF, MOFEM_NOT_IMPLEMENTED,
724  "Error: To use this matrix type compile PETSc with CUDA.");
725 #endif
726 
727  PetscLogEventEnd(MOFEM_EVENT_createMPIAIJCUSPARSEWithArrays, 0, 0, 0, 0);
728 
730 }
731 
732 template <>
734 MatrixManager::createSeqAIJCUSPARSEWithArrays<PetscLocalIdx_mi_tag>(
735  const std::string name, Mat *Aij, int verb) {
737 
738 #ifdef PETSC_HAVE_CUDA
739  // CHKERR ::MatCreateSeqAIJCUSPARSE(MPI_Comm comm, PetscInt m, PetscInt n,
740  // PetscInt nz, const PetscInt nnz[], Mat
741  // *A);
742  SETERRQ(PETSC_COMM_SELF, MOFEM_NOT_IMPLEMENTED,
743  "Not implemented type of matrix yet, try MPI version (aijcusparse)");
744 #endif
745 
747 }
748 
749 template <>
751 MatrixManager::createMPIAIJ<PetscGlobalIdx_mi_tag>(const std::string name,
752  Mat *Aij, int verb) {
753  MoFEM::CoreInterface &m_field = cOre;
754  CreateRowCompressedADJMatrix *core_ptr =
755  static_cast<CreateRowCompressedADJMatrix *>(&cOre);
757  PetscLogEventBegin(MOFEM_EVENT_createMPIAIJ, 0, 0, 0, 0);
758 
759  auto problems_ptr = m_field.get_problems();
760  auto &prb = problems_ptr->get<Problem_mi_tag>();
761  auto p_miit = prb.find(name);
762  if (p_miit == prb.end()) {
763  SETERRQ1(m_field.get_comm(), MOFEM_NOT_FOUND,
764  "problem < %s > is not found (top tip: check spelling)",
765  name.c_str());
766  }
767 
768  std::vector<int> i_vec, j_vec;
769  j_vec.reserve(10000);
771  p_miit, MATMPIAIJ, i_vec, j_vec, false, verb);
772 
773  int nb_row_dofs = p_miit->getNbDofsRow();
774  int nb_col_dofs = p_miit->getNbDofsCol();
775  int nb_local_dofs_row = p_miit->getNbLocalDofsRow();
776  int nb_local_dofs_col = p_miit->getNbLocalDofsCol();
777 
778  auto get_layout = [&]() {
779  int start_ranges, end_ranges;
780  PetscLayout layout;
781  CHKERR PetscLayoutCreate(m_field.get_comm(), &layout);
782  CHKERR PetscLayoutSetBlockSize(layout, 1);
783  CHKERR PetscLayoutSetLocalSize(layout, nb_local_dofs_col);
784  CHKERR PetscLayoutSetUp(layout);
785  CHKERR PetscLayoutGetRange(layout, &start_ranges, &end_ranges);
786  CHKERR PetscLayoutDestroy(&layout);
787  return std::make_pair(start_ranges, end_ranges);
788  };
789 
790  auto get_nnz = [&](auto &d_nnz, auto &o_nnz) {
792  auto layout = get_layout();
793  int j = 0;
794  for (int i = 0; i != nb_local_dofs_row; ++i) {
795  for (; j != i_vec[i + 1]; ++j) {
796  if (j_vec[j] < layout.second && j_vec[j] >= layout.first)
797  ++(d_nnz[i]);
798  else
799  ++(o_nnz[i]);
800  }
801  }
803  };
804 
805  std::vector<int> d_nnz(nb_local_dofs_row, 0), o_nnz(nb_local_dofs_row, 0);
806  CHKERR get_nnz(d_nnz, o_nnz);
807 
808  CHKERR MatCreate(m_field.get_comm(), Aij);
809  CHKERR MatSetSizes(*Aij, nb_local_dofs_row, nb_local_dofs_col, nb_row_dofs,
810  nb_col_dofs);
811  CHKERR MatSetType(*Aij, MATMPIAIJ);
812  CHKERR MatMPIAIJSetPreallocation(*Aij, 0, &*d_nnz.begin(), 0,
813  &*o_nnz.begin());
814 
815  PetscLogEventEnd(MOFEM_EVENT_createMPIAIJ, 0, 0, 0, 0);
817 }
818 
819 template <>
821 MatrixManager::createMPIAdjWithArrays<Idx_mi_tag>(const std::string name,
822  Mat *Adj, int verb) {
823  MoFEM::CoreInterface &m_field = cOre;
824  CreateRowCompressedADJMatrix *core_ptr =
825  static_cast<CreateRowCompressedADJMatrix *>(&cOre);
827  PetscLogEventBegin(MOFEM_EVENT_createMPIAdjWithArrays, 0, 0, 0, 0);
828 
829  auto problems_ptr = m_field.get_problems();
830  auto &prb = problems_ptr->get<Problem_mi_tag>();
831  auto p_miit = prb.find(name);
832  if (p_miit == prb.end()) {
833  SETERRQ1(m_field.get_comm(), MOFEM_NOT_FOUND,
834  "problem < %s > is not found (top tip: check spelling)",
835  name.c_str());
836  }
837 
838  std::vector<int> i_vec, j_vec;
839  j_vec.reserve(10000);
840  CHKERR core_ptr->createMatArrays<Idx_mi_tag>(p_miit, MATMPIADJ, i_vec, j_vec,
841  true, verb);
842  int *_i, *_j;
843  CHKERR PetscMalloc(i_vec.size() * sizeof(int), &_i);
844  CHKERR PetscMalloc(j_vec.size() * sizeof(int), &_j);
845  copy(i_vec.begin(), i_vec.end(), _i);
846  copy(j_vec.begin(), j_vec.end(), _j);
847 
848  int nb_col_dofs = p_miit->getNbDofsCol();
849  CHKERR MatCreateMPIAdj(m_field.get_comm(), i_vec.size() - 1, nb_col_dofs, _i,
850  _j, PETSC_NULL, Adj);
851  CHKERR MatSetOption(*Adj, MAT_STRUCTURALLY_SYMMETRIC, PETSC_TRUE);
852 
853  PetscLogEventEnd(MOFEM_EVENT_createMPIAdjWithArrays, 0, 0, 0, 0);
855 }
856 
857 template <>
858 MoFEMErrorCode MatrixManager::createSeqAIJWithArrays<PetscLocalIdx_mi_tag>(
859  const std::string name, Mat *Aij, int verb) {
860  MoFEM::CoreInterface &m_field = cOre;
861  CreateRowCompressedADJMatrix *core_ptr =
862  static_cast<CreateRowCompressedADJMatrix *>(&cOre);
864  PetscLogEventBegin(MOFEM_EVENT_createMPIAIJWithArrays, 0, 0, 0, 0);
865 
866  auto problems_ptr = m_field.get_problems();
867  auto &prb = problems_ptr->get<Problem_mi_tag>();
868  auto p_miit = prb.find(name);
869  if (p_miit == prb.end()) {
870  SETERRQ1(m_field.get_comm(), MOFEM_NOT_FOUND,
871  "problem < %s > is not found (top tip: check spelling)",
872  name.c_str());
873  }
874 
875  std::vector<int> i_vec, j_vec;
876  j_vec.reserve(10000);
877  CHKERR core_ptr->createMatArrays<PetscGlobalIdx_mi_tag>(p_miit, MATAIJ, i_vec,
878  j_vec, false, verb);
879 
880  int nb_local_dofs_row = p_miit->getNbLocalDofsRow();
881  int nb_local_dofs_col = p_miit->getNbLocalDofsCol();
882 
883  double *_a;
884  CHKERR PetscMalloc(j_vec.size() * sizeof(double), &_a);
885 
886  Mat tmpMat;
887  CHKERR ::MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, nb_local_dofs_row,
888  nb_local_dofs_col, &*i_vec.begin(),
889  &*j_vec.begin(), _a, &tmpMat);
890  CHKERR MatDuplicate(tmpMat, MAT_SHARE_NONZERO_PATTERN, Aij);
891  CHKERR MatDestroy(&tmpMat);
892 
893  CHKERR PetscFree(_a);
894 
895  PetscLogEventEnd(MOFEM_EVENT_createMPIAIJWithArrays, 0, 0, 0, 0);
897 }
898 
899 MoFEMErrorCode MatrixManager::checkMatrixFillIn(const std::string problem_name,
900  int row_print, int col_print,
901  Mat A, int verb) {
902  MoFEM::CoreInterface &m_field = cOre;
904 
905  PetscLogEventBegin(MOFEM_EVENT_checkMatrixFillIn, 0, 0, 0, 0);
906 
907  struct TestMatrixFillIn : public FEMethod {
908  CoreInterface *mFieldPtr;
909 
910  Mat A;
911 
912  int rowPrint, colPrint;
913 
914  TestMatrixFillIn(CoreInterface *m_field_ptr, Mat a, int row_print,
915  int col_print)
916  : mFieldPtr(m_field_ptr), A(a), rowPrint(row_print),
917  colPrint(col_print){};
918 
919  MoFEMErrorCode preProcess() {
922  }
923 
924  MoFEMErrorCode operator()() {
926 
927  if (refinedFiniteElementsPtr->find(
928  numeredEntFiniteElementPtr->getEnt()) ==
929  refinedFiniteElementsPtr->end()) {
930  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
931  "data inconsistency");
932  }
933 
934  auto row_dofs = getRowDofsPtr();
935  auto col_dofs = getColDofsPtr();
936 
937  for (auto cit = col_dofs->begin(); cit != col_dofs->end(); cit++) {
938 
939  if (refinedEntitiesPtr->find((*cit)->getEnt()) ==
940  refinedEntitiesPtr->end()) {
941  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
942  "data inconsistency");
943  }
944  if (!(*cit)->getActive()) {
945  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
946  "data inconsistency");
947  }
948 
949  FieldEntityEntFiniteElementAdjacencyMap_multiIndex::index<
950  Composite_Unique_mi_tag>::type::iterator ait;
951  ait = adjacenciesPtr->get<Composite_Unique_mi_tag>().find(
952  boost::make_tuple((*cit)->getFieldEntityPtr()->getLocalUniqueId(),
953  numeredEntFiniteElementPtr->getLocalUniqueId()));
954  if (ait == adjacenciesPtr->end()) {
955  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
956  "adjacencies data inconsistency");
957  } else {
958  UId uid = ait->getEntUniqueId();
959  if (entitiesPtr->find(uid) == entitiesPtr->end()) {
960  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
961  "data inconsistency");
962  }
963  if (dofsPtr->find((*cit)->getLocalUniqueId()) == dofsPtr->end()) {
964  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
965  "data inconsistency");
966  }
967  }
968 
969  if ((*cit)->getEntType() != MBVERTEX) {
970 
971  auto range =
972  col_dofs->get<Ent_mi_tag>().equal_range((*cit)->getEnt());
973  int nb_dofs_on_ent = std::distance(range.first, range.second);
974 
975  int max_order = (*cit)->getMaxOrder();
976  if ((*cit)->getNbOfCoeffs() * (*cit)->getOrderNbDofs(max_order) !=
977  nb_dofs_on_ent) {
978 
979  /* It could be that you have
980  removed DOFs from problem, and for example if this was vector filed
981  with components {Ux,Uy,Uz}, you removed on Uz element.*/
982 
983  MOFEM_LOG("SELF", Sev::warning)
984  << "Warning: Number of Dofs in Col different than number "
985  "of dofs for given entity order "
986  << (*cit)->getNbOfCoeffs() * (*cit)->getOrderNbDofs(max_order)
987  << " " << nb_dofs_on_ent;
988  }
989  }
990  }
991 
992  for (auto rit = row_dofs->begin(); rit != row_dofs->end(); rit++) {
993 
994  if (refinedEntitiesPtr->find((*rit)->getEnt()) ==
995  refinedEntitiesPtr->end()) {
996  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
997  "data inconsistency");
998  }
999  if (!(*rit)->getActive()) {
1000  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
1001  "data inconsistency");
1002  }
1003 
1004  FieldEntityEntFiniteElementAdjacencyMap_multiIndex::index<
1005  Composite_Unique_mi_tag>::type::iterator ait;
1006  ait = adjacenciesPtr->get<Composite_Unique_mi_tag>().find(
1007  boost::make_tuple((*rit)->getFieldEntityPtr()->getLocalUniqueId(),
1008  numeredEntFiniteElementPtr->getLocalUniqueId()));
1009  if (ait == adjacenciesPtr->end()) {
1011  MOFEM_LOG("SELF", Sev::error) << *(*rit);
1012  MOFEM_LOG("SELF", Sev::error) << *(*rit);
1013  MOFEM_LOG("SELF", Sev::error) << *numeredEntFiniteElementPtr;
1014  MOFEM_LOG("SELF", Sev::error) << "dof: " << (*rit)->getBitRefLevel();
1015  MOFEM_LOG("SELF", Sev::error)
1016  << "fe: " << numeredEntFiniteElementPtr->getBitRefLevel();
1017  MOFEM_LOG("SELF", Sev::error)
1018  << "problem: " << problemPtr->getBitRefLevel();
1019  MOFEM_LOG("SELF", Sev::error)
1020  << "problem mask: " << problemPtr->getBitRefLevelMask();
1021  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
1022  "adjacencies data inconsistency");
1023  } else {
1024  UId uid = ait->getEntUniqueId();
1025  if (entitiesPtr->find(uid) == entitiesPtr->end()) {
1026  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
1027  "data inconsistency");
1028  }
1029  if (dofsPtr->find((*rit)->getLocalUniqueId()) == dofsPtr->end()) {
1030  SETERRQ(mFieldPtr->get_comm(), MOFEM_DATA_INCONSISTENCY,
1031  "data inconsistency");
1032  }
1033  }
1034  int row = (*rit)->getPetscGlobalDofIdx();
1035 
1036  auto col_dofs = getColDofsPtr();
1037  for (auto cit = col_dofs->begin(); cit != col_dofs->end(); cit++) {
1038 
1039  int col = (*cit)->getPetscGlobalDofIdx();
1040 
1041  if (row == rowPrint && col == colPrint) {
1042  MOFEM_LOG("SELF", Sev::noisy) << "fe:\n"
1043  << *numeredEntFiniteElementPtr;
1044  MOFEM_LOG("SELF", Sev::noisy) << "row:\n" << *(*rit);
1045  MOFEM_LOG("SELF", Sev::noisy) << "col:\n" << *(*cit);
1046  MOFEM_LOG("SELF", Sev::noisy)
1047  << "fe:\n"
1048  << numeredEntFiniteElementPtr->getBitRefLevel();
1049  MOFEM_LOG("SELF", Sev::noisy) << "row:\n"
1050  << (*rit)->getBitRefLevel();
1051  MOFEM_LOG("SELF", Sev::noisy) << "col:\n"
1052  << (*cit)->getBitRefLevel();
1053  }
1054 
1055  CHKERR MatSetValue(A, row, col, 1, INSERT_VALUES);
1056  }
1057 
1058  if ((*rit)->getEntType() != MBVERTEX) {
1059 
1060  auto range =
1061  row_dofs->get<Ent_mi_tag>().equal_range((*rit)->getEnt());
1062  int nb_dofs_on_ent = std::distance(range.first, range.second);
1063 
1064  int max_order = (*rit)->getMaxOrder();
1065  if ((*rit)->getNbOfCoeffs() * (*rit)->getOrderNbDofs(max_order) !=
1066  nb_dofs_on_ent) {
1067 
1068  /* It could be that you have removed DOFs from problem, and for
1069  * example if this was vector filed with components {Ux,Uy,Uz}, you
1070  * removed on Uz element. */
1071 
1072  MOFEM_LOG("SELF", Sev::warning)
1073  << "Warning: Number of Dofs in Row different than number "
1074  "of dofs for given entity order "
1075  << (*rit)->getNbOfCoeffs() * (*rit)->getOrderNbDofs(max_order)
1076  << " " << nb_dofs_on_ent;
1077  }
1078  }
1079  }
1080 
1082  }
1083 
1084  MoFEMErrorCode postProcess() {
1086 
1087  // cerr << mFieldPtr->get_comm_rank() << endl;
1088  CHKERR MatAssemblyBegin(A, MAT_FLUSH_ASSEMBLY);
1089  CHKERR MatAssemblyEnd(A, MAT_FLUSH_ASSEMBLY);
1090 
1092  }
1093  };
1094 
1095  // create matrix
1096  CHKERR MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
1097 
1098  if (verb >= VERY_VERBOSE) {
1099  MatView(A, PETSC_VIEWER_STDOUT_WORLD);
1100  }
1101 
1102  if (verb >= NOISY) {
1103  MatView(A, PETSC_VIEWER_DRAW_WORLD);
1104  std::string wait;
1105  std::cin >> wait;
1106  }
1107 
1108  TestMatrixFillIn method(&m_field, A, row_print, col_print);
1109 
1110  // get problem
1111  auto problems_ptr = m_field.get_problems();
1112  auto &prb_set = problems_ptr->get<Problem_mi_tag>();
1113  auto p_miit = prb_set.find(problem_name);
1114  if (p_miit == prb_set.end())
1115  SETERRQ1(m_field.get_comm(), MOFEM_DATA_INCONSISTENCY,
1116  "problem < %s > not found (top tip: check spelling)",
1117  problem_name.c_str());
1118  MOFEM_LOG_C("WORLD", Sev::inform, "check problem < %s >",
1119  problem_name.c_str());
1120 
1121  // loop all elements in problem and check if assemble is without error
1122  auto fe_ptr = m_field.get_finite_elements();
1123  for (auto &fe : *fe_ptr) {
1124  MOFEM_LOG_C("WORLD", Sev::verbose, "\tcheck element %s",
1125  fe->getName().c_str());
1126  CHKERR m_field.loop_finite_elements(problem_name, fe->getName(), method,
1127  nullptr, MF_EXIST,
1128  CacheTupleSharedPtr(), verb);
1129  }
1130 
1131  CHKERR MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);
1132  CHKERR MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);
1133 
1134  PetscLogEventEnd(MOFEM_EVENT_checkMatrixFillIn, 0, 0, 0, 0);
1135 
1137 }
1138 
1139 template <>
1141 MatrixManager::checkMPIAIJWithArraysMatrixFillIn<PetscGlobalIdx_mi_tag>(
1142  const std::string problem_name, int row_print, int col_print, int verb) {
1144  // create matrix
1146  CHKERR createMPIAIJWithArrays<PetscGlobalIdx_mi_tag>(problem_name, A, verb);
1147  CHKERR MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
1148  CHKERR checkMatrixFillIn(problem_name, row_print, col_print, A, verb);
1150 }
1151 
1152 template <>
1153 MoFEMErrorCode MatrixManager::checkMPIAIJMatrixFillIn<PetscGlobalIdx_mi_tag>(
1154  const std::string problem_name, int row_print, int col_print, int verb) {
1156  // create matrix
1158  CHKERR createMPIAIJ<PetscGlobalIdx_mi_tag>(problem_name, A, verb);
1159  CHKERR MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
1160  CHKERR checkMatrixFillIn(problem_name, row_print, col_print, A, verb);
1162 }
1163 
1164 template <>
1165 MoFEMErrorCode MatrixManager::createHybridL2MPIAIJ<PetscGlobalIdx_mi_tag>(
1166  const std::string problem_name, SmartPetscObj<Mat> &aij_ptr, int verb) {
1167  MoFEM::CoreInterface &m_field = cOre;
1169 
1170  auto prb_ptr = m_field.get_problems();
1171  auto p_miit = prb_ptr->get<Problem_mi_tag>().find(problem_name);
1172  if (p_miit == prb_ptr->get<Problem_mi_tag>().end()) {
1173  SETERRQ1(m_field.get_comm(), MOFEM_NOT_FOUND,
1174  "problem < %s > is not found (top tip: check spelling)",
1175  problem_name.c_str());
1176  }
1177 
1178  auto nb_rows = p_miit->getNbDofsRow();
1179  auto nb_cols = p_miit->getNbDofsCol();
1180  auto nb_loc_rows = p_miit->getNbLocalDofsRow();
1181  auto nb_loc_cols = p_miit->getNbLocalDofsCol();
1182 
1183  auto row_ptr = p_miit->getNumeredRowDofsPtr();
1184  auto col_ptr = p_miit->getNumeredColDofsPtr();
1185 
1186  BitFieldId fields_ids;
1187  for (auto &c : *col_ptr) {
1188  fields_ids |= c->getId();
1189  }
1190  auto fields = m_field.get_fields();
1191  std::vector<int> fields_bit_numbers;
1192  for (auto &f : *fields) {
1193  if ((fields_ids & f->getId()).any()) {
1194  fields_bit_numbers.push_back(f->getBitNumber());
1195  }
1196  }
1197 
1198  Range adj;
1199  EntityHandle prev_ent = 0;
1200  int prev_dim = -1;
1201 
1202  auto get_adj = [&](auto ent, auto dim) {
1203  if (prev_ent == ent && prev_dim == dim) {
1204  return adj;
1205  } else {
1206  adj.clear();
1207  Range bridge;
1208  CHKERR m_field.get_moab().get_adjacencies(&ent, 1, dim + 1, false,
1209  bridge);
1210  CHKERR m_field.get_moab().get_adjacencies(bridge, dim, false, adj,
1211  moab::Interface::UNION);
1212  prev_ent = ent;
1213  prev_dim = dim;
1214  return adj;
1215  }
1216  };
1217 
1218  int prev_bit_number = -1;
1219  EntityHandle prev_first_ent = 0;
1220  EntityHandle prev_second_ent = 0;
1221  using IT = boost::multi_index::index<NumeredDofEntity_multiIndex,
1222  Unique_mi_tag>::type::iterator;
1223  std::pair<IT, IT> pair_lo_hi;
1224  auto get_col_it = [&](auto bit_number, auto first_ent, auto second_ent) {
1225  if (bit_number == prev_bit_number && first_ent == prev_first_ent &&
1226  second_ent == prev_second_ent) {
1227  return pair_lo_hi;
1228  } else {
1229  auto lo_it = col_ptr->get<Unique_mi_tag>().lower_bound(
1230  DofEntity::getLoFieldEntityUId(bit_number, first_ent));
1231  auto hi_it = col_ptr->get<Unique_mi_tag>().upper_bound(
1232  DofEntity::getHiFieldEntityUId(bit_number, second_ent));
1233  pair_lo_hi = std::make_pair(lo_it, hi_it);
1234  prev_bit_number = bit_number;
1235  prev_first_ent = first_ent;
1236  prev_second_ent = second_ent;
1237  return pair_lo_hi;
1238  }
1239  };
1240 
1241  auto create_ghost_vec = [&]() {
1243  CHKERR m_field.getInterface<VecManager>()->vecCreateGhost(problem_name, ROW,
1244  v);
1245  CHKERR VecZeroEntries(v);
1246  CHKERR VecGhostUpdateBegin(v, INSERT_VALUES, SCATTER_FORWARD);
1247  CHKERR VecGhostUpdateEnd(v, INSERT_VALUES, SCATTER_FORWARD);
1248  return v;
1249  };
1250 
1251  auto v_o_nnz = create_ghost_vec();
1252  auto v_d_nnz = create_ghost_vec();
1253 
1254 
1255  double *o_nnz_real;
1256  CHKERR VecGetArray(v_o_nnz, &o_nnz_real);
1257  double *d_nnz_real;
1258  CHKERR VecGetArray(v_d_nnz, &d_nnz_real);
1259 
1260  for (auto r = row_ptr->begin(); r != row_ptr->end(); ++r) {
1261 
1262  auto row_loc_idx = (*r)->getPetscLocalDofIdx();
1263  if (row_loc_idx < 0)
1264  continue;
1265 
1266  auto ent = (*r)->getEnt();
1267  auto dim = dimension_from_handle(ent);
1268  auto adj = get_adj(ent, dim);
1269 
1270  for (auto p = adj.pair_begin(); p != adj.pair_end(); ++p) {
1271  auto first_ent = p->first;
1272  auto second_ent = p->second;
1273 
1274  for (auto bit_number : fields_bit_numbers) {
1275 
1276  auto [lo_it, hi_it] = get_col_it(bit_number, first_ent, second_ent);
1277 
1278  for (; lo_it != hi_it; ++lo_it) {
1279  auto col_loc_idx = (*lo_it)->getPetscLocalDofIdx();
1280  if (col_loc_idx < 0)
1281  continue;
1282 
1283  if (
1284 
1285  (*lo_it)->getOwnerProc() == (*r)->getOwnerProc()
1286 
1287  ) {
1288  d_nnz_real[row_loc_idx] += 1;
1289  } else {
1290  o_nnz_real[row_loc_idx] += 1;
1291  }
1292  }
1293  }
1294  }
1295  }
1296 
1297  CHKERR VecRestoreArray(v_o_nnz, &o_nnz_real);
1298  CHKERR VecRestoreArray(v_d_nnz, &d_nnz_real);
1299 
1300  auto update_vec = [&](auto v) {
1302  CHKERR VecGhostUpdateBegin(v, ADD_VALUES, SCATTER_REVERSE);
1303  CHKERR VecGhostUpdateEnd(v, ADD_VALUES, SCATTER_REVERSE);
1304  CHKERR VecAssemblyBegin(v);
1305  CHKERR VecAssemblyEnd(v);
1307  };
1308 
1309  CHKERR update_vec(v_o_nnz);
1310  CHKERR update_vec(v_d_nnz);
1311 
1312  int o_nz = 0;
1313  int d_nz = 0;
1314 
1315  CHKERR VecGetArray(v_d_nnz, &d_nnz_real);
1316  CHKERR VecGetArray(v_o_nnz, &o_nnz_real);
1317 
1318  std::vector<int> d_nnz(nb_loc_rows, 0);
1319  std::vector<int> o_nnz(nb_loc_rows, 0);
1320  for (auto r = row_ptr->begin(); r != row_ptr->end(); ++r) {
1321 
1322  auto row_loc_idx = (*r)->getPetscLocalDofIdx();
1323 
1324  if (
1325 
1326  row_loc_idx >= 0 && row_loc_idx < nb_loc_rows
1327 
1328  ) {
1329  auto row_loc_idx = (*r)->getPetscLocalDofIdx();
1330  d_nz += o_nnz_real[row_loc_idx];
1331  d_nnz[row_loc_idx] = d_nnz_real[row_loc_idx];
1332  o_nz += o_nnz_real[row_loc_idx];
1333  o_nnz[row_loc_idx] = o_nnz_real[row_loc_idx];
1334  }
1335 
1336  }
1337 
1338  CHKERR VecRestoreArray(v_o_nnz, &o_nnz_real);
1339  CHKERR VecRestoreArray(v_d_nnz, &d_nnz_real);
1340 
1341  if (verb >= QUIET) {
1342  MOFEM_LOG("SYNC", Sev::verbose)
1343  << "Hybrid L2 matrix d_nz: " << d_nz << " o_nz: " << o_nz;
1344  MOFEM_LOG_SEVERITY_SYNC(m_field.get_comm(), Sev::verbose);
1345  }
1346 
1347  if (verb >= VERBOSE) {
1348  MOFEM_LOG("SYNC", Sev::noisy) << "Hybrid L2 matrix";
1349  int idx = 0;
1350  for (auto &d : d_nnz) {
1351  MOFEM_LOG("SYNC", Sev::noisy) << idx << ": " << d;
1352  ++idx;
1353  }
1354  MOFEM_LOG_SEVERITY_SYNC(m_field.get_comm(), Sev::noisy);
1355  }
1356 
1357  Mat a_raw;
1358  CHKERR MatCreateAIJ(m_field.get_comm(), nb_loc_rows, nb_loc_cols, nb_rows,
1359  nb_cols, 0, &*d_nnz.begin(), 0, &*o_nnz.begin(), &a_raw);
1360  CHKERR MatSetOption(a_raw, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
1361  aij_ptr = SmartPetscObj<Mat>(a_raw);
1362 
1363  MOFEM_LOG_CHANNEL("WORLD");
1364  MOFEM_LOG_CHANNEL("SYNC");
1365 
1367 }
1368 
1369 } // namespace MoFEM
MoFEMFunctionReturnHot
#define MoFEMFunctionReturnHot(a)
Last executable line of each PETSc function used for error handling. Replaces return()
Definition: definitions.h:460
MoFEM::UnknownInterface::getInterface
MoFEMErrorCode getInterface(IFACE *&iface) const
Get interface reference to pointer of interface.
Definition: UnknownInterface.hpp:93
MoFEM::CreateRowCompressedADJMatrix::createMatArrays
MoFEMErrorCode createMatArrays(ProblemsByName::iterator p_miit, const MatType type, std::vector< PetscInt > &i, std::vector< PetscInt > &j, const bool no_diagonals=true, int verb=QUIET) const
Create matrix adjacencies.
Definition: MatrixManager.cpp:160
MoFEM::dimension_from_handle
auto dimension_from_handle(const EntityHandle h)
get entity dimension form handle
Definition: Templates.hpp:1914
MoFEM::Ent_mi_tag
Definition: TagMultiIndices.hpp:21
MoFEM::CreateRowCompressedADJMatrix
Create compressed matrix.
Definition: MatrixManager.cpp:33
MoFEM::CoreInterface::loop_finite_elements
virtual MoFEMErrorCode loop_finite_elements(const std::string problem_name, const std::string &fe_name, FEMethod &method, boost::shared_ptr< NumeredEntFiniteElement_multiIndex > fe_ptr=nullptr, MoFEMTypes bh=MF_EXIST, CacheTupleWeakPtr cache_ptr=CacheTupleSharedPtr(), int verb=DEFAULT_VERBOSITY)=0
Make a loop over finite elements.
MoFEM::MatrixManager::MOFEM_EVENT_createMPIAIJCUSPARSEWithArrays
PetscLogEvent MOFEM_EVENT_createMPIAIJCUSPARSEWithArrays
Definition: MatrixManager.hpp:317
MoFEM::CoreTmp< 0 >
Core (interface) class.
Definition: Core.hpp:82
MOFEM_LOG_SEVERITY_SYNC
#define MOFEM_LOG_SEVERITY_SYNC(comm, severity)
Synchronise "SYNC" on curtain severity level.
Definition: LogManager.hpp:352
MOFEM_LOG_CHANNEL
#define MOFEM_LOG_CHANNEL(channel)
Set and reset channel.
Definition: LogManager.hpp:284
EntityHandle
NOISY
@ NOISY
Definition: definitions.h:224
MoFEM::FEMethod
structure for User Loop Methods on finite elements
Definition: LoopMethods.hpp:369
MoFEM::CoreTmp< 0 >::rAnk
int rAnk
MOFEM communicator rank.
Definition: Core.hpp:1016
MatrixManagerFunctionBegin
#define MatrixManagerFunctionBegin
Create adjacent matrices using different indices.
Definition: MatrixManager.cpp:6
MoFEM::Problem_mi_tag
Definition: TagMultiIndices.hpp:70
MoFEM::CoreInterface::get_comm
virtual MPI_Comm & get_comm() const =0
MoFEM::Exceptions::MoFEMErrorCode
PetscErrorCode MoFEMErrorCode
MoFEM/PETSc error code.
Definition: Exceptions.hpp:56
MoFEM::Types::BitFieldId
std::bitset< BITFIELDID_SIZE > BitFieldId
Field Id.
Definition: Types.hpp:42
MOFEM_LOG_ATTRIBUTES
#define MOFEM_LOG_ATTRIBUTES(channel, bit)
Add attributes to channel.
Definition: LogManager.hpp:296
A
constexpr AssemblyType A
Definition: operators_tests.cpp:30
MoFEM::MatrixManager::MOFEM_EVENT_createSeqAIJCUSPARSEWithArrays
PetscLogEvent MOFEM_EVENT_createSeqAIJCUSPARSEWithArrays
Definition: MatrixManager.hpp:318
MoFEM::CoreInterface::get_finite_elements
virtual const FiniteElement_multiIndex * get_finite_elements() const =0
Get the finite elements object.
MoFEM::CoreTmp< 0 >::entFEAdjacencies
FieldEntityEntFiniteElementAdjacencyMap_multiIndex entFEAdjacencies
adjacencies of elements to dofs
Definition: Core.hpp:311
MoFEM::MatrixManager::MOFEM_EVENT_checkMatrixFillIn
PetscLogEvent MOFEM_EVENT_checkMatrixFillIn
Definition: MatrixManager.hpp:319
sdf.r
int r
Definition: sdf.py:8
MoFEM::MatrixManager::query_interface
MoFEMErrorCode query_interface(boost::typeindex::type_index type_index, UnknownInterface **iface) const
Definition: MatrixManager.cpp:600
ROW
@ ROW
Definition: definitions.h:136
MoFEM::Interface
DeprecatedCoreInterface Interface
Definition: Interface.hpp:2010
NumeredDofEntity_multiIndex
multi_index_container< boost::shared_ptr< NumeredDofEntity >, indexed_by< ordered_unique< tag< Unique_mi_tag >, const_mem_fun< NumeredDofEntity::interface_type_DofEntity, UId, &NumeredDofEntity::getLocalUniqueId > >, ordered_non_unique< tag< Part_mi_tag >, member< NumeredDofEntity, unsigned int, &NumeredDofEntity::pArt > >, ordered_non_unique< tag< Idx_mi_tag >, member< NumeredDofEntity, DofIdx, &NumeredDofEntity::dofIdx > >, ordered_non_unique< tag< PetscGlobalIdx_mi_tag >, member< NumeredDofEntity, DofIdx, &NumeredDofEntity::petscGloablDofIdx > >, ordered_non_unique< tag< PetscLocalIdx_mi_tag >, member< NumeredDofEntity, DofIdx, &NumeredDofEntity::petscLocalDofIdx > >, ordered_non_unique< tag< Ent_mi_tag >, const_mem_fun< NumeredDofEntity::interface_type_DofEntity, EntityHandle, &NumeredDofEntity::getEnt > > > > NumeredDofEntity_multiIndex
MultiIndex container keeps NumeredDofEntity.
Definition: DofsMultiIndices.hpp:469
c
const double c
speed of light (cm/ns)
Definition: initial_diffusion.cpp:39
MoFEM::LogManager::BitScope
@ BitScope
Definition: LogManager.hpp:49
MoFEM::MatrixManager::MOFEM_EVENT_createMPIAIJWithArrays
PetscLogEvent MOFEM_EVENT_createMPIAIJWithArrays
Definition: MatrixManager.hpp:314
VERBOSE
@ VERBOSE
Definition: definitions.h:222
CHKERR
#define CHKERR
Inline error check.
Definition: definitions.h:548
MoFEM::CoreTmp< 0 >::MOFEM_EVENT_createMat
PetscLogEvent MOFEM_EVENT_createMat
Definition: Core.hpp:1004
MoFEM::CoreInterface::get_moab
virtual moab::Interface & get_moab()=0
MoFEM::Composite_Unique_mi_tag
Definition: TagMultiIndices.hpp:76
MoFEM
implementation of Data Operators for Forces and Sources
Definition: Common.hpp:10
MoFEM::CreateRowCompressedADJMatrix::DofByGlobalPetscIndex
NumeredDofEntity_multiIndex::index< PetscGlobalIdx_mi_tag >::type DofByGlobalPetscIndex
Definition: MatrixManager.cpp:62
a
constexpr double a
Definition: approx_sphere.cpp:30
MOFEM_LOG_C
#define MOFEM_LOG_C(channel, severity, format,...)
Definition: LogManager.hpp:311
MoFEM::Types::UId
uint128_t UId
Unique Id.
Definition: Types.hpp:31
MoFEM::CreateRowCompressedADJMatrix::ProblemsByName
Problem_multiIndex::index< Problem_mi_tag >::type ProblemsByName
Definition: MatrixManager.cpp:40
convert.type
type
Definition: convert.py:64
MOFEM_LOG_SYNCHRONISE
#define MOFEM_LOG_SYNCHRONISE(comm)
Synchronise "SYNC" channel.
Definition: LogManager.hpp:345
MoFEM::MatrixManager::MOFEM_EVENT_createMPIAdjWithArrays
PetscLogEvent MOFEM_EVENT_createMPIAdjWithArrays
Definition: MatrixManager.hpp:315
MoFEM::CoreTmp< 0 >::sIze
int sIze
MoFEM communicator size.
Definition: Core.hpp:1015
MoFEM::MatrixManager::MatrixManager
MatrixManager(const MoFEM::Core &core)
Definition: MatrixManager.cpp:606
MoFEM::PetscGlobalIdx_mi_tag
Definition: TagMultiIndices.hpp:38
MoFEM::CoreInterface::get_problems
virtual const Problem_multiIndex * get_problems() const =0
Get the problems object.
debug
static const bool debug
Definition: dm_create_subdm.cpp:12
MoFEM::CacheTupleSharedPtr
boost::shared_ptr< CacheTuple > CacheTupleSharedPtr
Definition: FEMultiIndices.hpp:495
MoFEM::MatrixManager::MOFEM_EVENT_createMPIAIJ
PetscLogEvent MOFEM_EVENT_createMPIAIJ
Definition: MatrixManager.hpp:313
i
FTensor::Index< 'i', SPACE_DIM > i
Definition: hcurl_divergence_operator_2d.cpp:27
MoFEM::DofEntity::getLoFieldEntityUId
static UId getLoFieldEntityUId(const FieldBitNumber bit, const EntityHandle ent)
Definition: DofsMultiIndices.hpp:69
MoFEM::CoreTmp< 0 >::moab
std::reference_wrapper< moab::Interface > moab
moab database
Definition: Core.hpp:321
MoFEM::CoreTmp< 0 >::get_comm
MPI_Comm & get_comm() const
Definition: Core.hpp:1021
MOFEM_NOT_FOUND
@ MOFEM_NOT_FOUND
Definition: definitions.h:33
MoFEM::VecManager
Vector manager is used to create vectors \mofem_vectors.
Definition: VecManager.hpp:23
MoFEM::CoreTmp< 0 >::mofemComm
MPI_Comm mofemComm
MoFEM communicator.
Definition: Core.hpp:1012
MoFEM::MatrixManager
Matrix manager is used to build and partition problems.
Definition: MatrixManager.hpp:21
MoFEM::CoreInterface::get_fields
virtual const Field_multiIndex * get_fields() const =0
Get the fields object.
MoFEM::UnknownInterface
base class for all interface classes
Definition: UnknownInterface.hpp:34
MAX_DOFS_ON_ENTITY
#define MAX_DOFS_ON_ENTITY
Maximal number of DOFs on entity.
Definition: definitions.h:249
MoFEM::CoreTmp< 0 >::verbose
int verbose
Verbosity level.
Definition: Core.hpp:1039
v
const double v
phase velocity of light in medium (cm/ns)
Definition: initial_diffusion.cpp:40
Range
MoFEM::CreateRowCompressedADJMatrix::AdjByEnt
FieldEntityEntFiniteElementAdjacencyMap_multiIndex::index< Unique_mi_tag >::type AdjByEnt
Definition: MatrixManager.cpp:59
MOFEM_LOG
#define MOFEM_LOG(channel, severity)
Log.
Definition: LogManager.hpp:308
MoFEM::DofEntity::getHiFieldEntityUId
static UId getHiFieldEntityUId(const FieldBitNumber bit, const EntityHandle ent)
Definition: DofsMultiIndices.hpp:75
HenckyOps::f
auto f
Definition: HenckyOps.hpp:15
j
FTensor::Index< 'j', 3 > j
Definition: matrix_function.cpp:19
IT
constexpr IntegrationType IT
Definition: test_broken_space.cpp:24
MOFEM_DATA_INCONSISTENCY
@ MOFEM_DATA_INCONSISTENCY
Definition: definitions.h:31
MoFEM::Idx_mi_tag
Definition: TagMultiIndices.hpp:31
sdf_hertz_2d_axisymm_plane.d
float d
Definition: sdf_hertz_2d_axisymm_plane.py:4
MoFEM::CreateRowCompressedADJMatrix::getEntityAdjacencies
MoFEMErrorCode getEntityAdjacencies(ProblemsByName::iterator p_miit, typename boost::multi_index::index< NumeredDofEntity_multiIndex, TAG >::type::iterator mit_row, boost::shared_ptr< FieldEntity > mofem_ent_ptr, std::vector< int > &dofs_col_view, int verb) const
Get element adjacencies.
Definition: MatrixManager.cpp:81
BYROW
@ BYROW
Definition: definitions.h:144
MoFEM::Types::BitRefLevel
std::bitset< BITREFLEVEL_SIZE > BitRefLevel
Bit structure attached to each entity identifying to what mesh entity is attached.
Definition: Types.hpp:40
MoFEMFunctionBeginHot
#define MoFEMFunctionBeginHot
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
Definition: definitions.h:453
QUIET
@ QUIET
Definition: definitions.h:221
MoFEM::Unique_mi_tag
Definition: TagMultiIndices.hpp:18
MoFEM::CoreInterface
Interface.
Definition: Interface.hpp:27
MoFEM::CoreTmp< 0 >::entsFields
FieldEntity_multiIndex entsFields
entities on fields
Definition: Core.hpp:304
MoFEM::SmartPetscObj< Mat >
MoFEM::EmptyFieldBlocks
Problem::EmptyFieldBlocks EmptyFieldBlocks
Definition: ProblemsMultiIndices.hpp:567
MF_EXIST
@ MF_EXIST
Definition: definitions.h:113
MoFEMFunctionReturn
#define MoFEMFunctionReturn(a)
Last executable line of each PETSc function used for error handling. Replaces return()
Definition: definitions.h:429
MoFEM::MatrixManager::cOre
MoFEM::Core & cOre
Definition: MatrixManager.hpp:26
MoFEM::CreateRowCompressedADJMatrix::CreateRowCompressedADJMatrix
CreateRowCompressedADJMatrix(moab::Interface &moab, MPI_Comm comm=PETSC_COMM_WORLD, int verbose=1)
Definition: MatrixManager.cpp:35
MOFEM_NOT_IMPLEMENTED
@ MOFEM_NOT_IMPLEMENTED
Definition: definitions.h:32
VERY_VERBOSE
@ VERY_VERBOSE
Definition: definitions.h:223
MoFEMFunctionBegin
#define MoFEMFunctionBegin
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
Definition: definitions.h:359
MoFEM::MatrixManager::checkMatrixFillIn
MoFEMErrorCode checkMatrixFillIn(const std::string problem_name, int row_print, int col_print, Mat A, int verb=QUIET)
check if matrix fill in correspond to finite element indices
Definition: MatrixManager.cpp:899
MoFEM::MatrixManager::MOFEM_EVENT_createSeqAIJWithArrays
PetscLogEvent MOFEM_EVENT_createSeqAIJWithArrays
Definition: MatrixManager.hpp:316
MoFEM::Types::DofIdx
int DofIdx
Index of DOF.
Definition: Types.hpp:18