v0.14.0
MatPartitioningApply_Parmetis_MoFEM.cpp
Go to the documentation of this file.
1 #ifdef PARMETIS
2 
3 #include <definitions.h>
4 #include <Includes.hpp>
5 
6 #if PETSC_VERSION_GE(3, 6, 0)
7 #include <petsc/private/matimpl.h>
8 #else
9 #include <petsc-private/matimpl.h>
10 #endif
11 
12 #include <parmetis.h>
13 
14 /*
15  The first 5 elements of this structure are the input control array to
16  Metis
17 */
18 typedef struct {
19  PetscInt cuts; /* number of cuts made (output) */
20  PetscInt foldfactor;
21  PetscInt parallel; /* use parallel partitioner for coarse problem */
22  PetscInt indexing; /* 0 indicates C indexing, 1 Fortran */
23  PetscInt printout; /* indicates if one wishes Metis to print info */
24  PetscBool repartition;
25 } MatPartitioning_Parmetis;
26 
27 /*
28  MATMPIAdj format - Compressed row storage for storing adjacency lists, and
29  possibly weights This is for grid reorderings (to reduce bandwidth) grid
30  partitionings, etc. This is NOT currently a dynamic data-structure.
31 
32 */
33 typedef struct {
34  PetscInt nz;
35  PetscInt *diag; /* pointers to diagonal elements, if they exist */
36  PetscInt *i; /* pointer to beginning of each row */
37  PetscInt *j; /* column values: j + i[k] is start of row k */
38  PetscInt *values; /* numerical values */
39  PetscBool symmetric; /* user indicates the nonzero structure is symmetric */
40  PetscBool freeaij; /* free a, i,j at destroy */
41  PetscBool freeaijwithfree; /* use free() to free i,j instead of PetscFree() */
42  PetscScalar *rowvalues; /* scalar work space for MatGetRow() */
43  PetscInt rowvalues_alloc;
44 } Mat_MPIAdj;
45 
46 #define CHKERRQPARMETIS(n, func) \
47  if (n == METIS_ERROR_INPUT) \
48  SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, \
49  "ParMETIS error due to wrong inputs and/or options for %s", \
50  func); \
51  else if (n == METIS_ERROR_MEMORY) \
52  SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, \
53  "ParMETIS error due to insufficient memory in %s", func); \
54  else if (n == METIS_ERROR) \
55  SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "ParMETIS general error in %s", \
56  func);
57 
58 #define PetscStackCallParmetis(func, args) \
59  do { \
60  PetscStackPush(#func); \
61  status = func args; \
62  PetscStackPop; \
63  CHKERRQPARMETIS(status, #func); \
64  } while (0)
65 
66 namespace MoFEM {
67 
68 #undef __FUNCT__
69 #define __FUNCT__ "MatPartitioningApply_Parmetis_MoFEM"
70 PetscErrorCode MatPartitioningApply_Parmetis_MoFEM(MatPartitioning part,
71  IS *partitioning) {
72  MatPartitioning_Parmetis *pmetis = (MatPartitioning_Parmetis *)part->data;
73  PetscErrorCode ierr;
74  PetscInt *locals = NULL;
75  Mat mat = part->adj, amat, pmat;
76  PetscBool flg;
77  PetscInt bs = 1;
78 
80  ierr = PetscObjectTypeCompare((PetscObject)mat, MATMPIADJ, &flg);
81  CHKERRQ(ierr);
82  if (flg) {
83  amat = mat;
84  PetscObjectReference((PetscObject)amat);
85  CHKERRQ(ierr);
86  } else {
87  /* bs indicates if the converted matrix is "reduced" from the original and
88  hence the resulting partition results need to be stretched to match the
89  original matrix */
90  ierr = MatConvert(mat, MATMPIADJ, MAT_INITIAL_MATRIX, &amat);
91  CHKERRQ(ierr);
92  if (amat->rmap->n > 0)
93  bs = mat->rmap->n / amat->rmap->n;
94  }
95  ierr = MatMPIAdjCreateNonemptySubcommMat(amat, &pmat);
96  CHKERRQ(ierr);
97  ierr = MPI_Barrier(PetscObjectComm((PetscObject)part));
98  CHKERRQ(ierr);
99 
100  if (pmat) {
101  MPI_Comm pcomm, comm;
102  Mat_MPIAdj *adj = (Mat_MPIAdj *)pmat->data;
103  PetscInt *vtxdist = pmat->rmap->range;
104  PetscInt *xadj = adj->i;
105  PetscInt *adjncy = adj->j;
106  PetscInt itmp = 0, wgtflag = 0, numflag = 0, ncon = 1, nparts = part->n,
107  options[24], i, j;
108  real_t *tpwgts, *ubvec, itr = 0.1;
109  int status;
110 
111  ierr = PetscObjectGetComm((PetscObject)pmat, &pcomm);
112  CHKERRQ(ierr);
113 #if defined(PETSC_USE_DEBUG)
114  /* check that matrix has no diagonal entries */
115  {
116  PetscInt rstart;
117  ierr = MatGetOwnershipRange(pmat, &rstart, NULL);
118  CHKERRQ(ierr);
119  for (i = 0; i < pmat->rmap->n; i++) {
120  for (j = xadj[i]; j < xadj[i + 1]; j++) {
121  if (adjncy[j] == i + rstart)
122  SETERRQ1(
123  PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG,
124  "Row %d has diagonal entry; Parmetis forbids diagonal entry",
125  i + rstart);
126  }
127  }
128  }
129 #endif
130 
131  // use vertex_weights
132  if (part->vertex_weights) {
133  wgtflag = 2;
134  }
135 
136  ierr = PetscMalloc1(amat->rmap->n, &locals);
137  CHKERRQ(ierr);
138 
139  if (PetscLogPrintInfo) {
140  itmp = pmetis->printout;
141  pmetis->printout = 127;
142  }
143  ierr = PetscMalloc1(ncon * nparts, &tpwgts);
144  CHKERRQ(ierr);
145  for (i = 0; i < ncon; i++) {
146  for (j = 0; j < nparts; j++) {
147  if (part->part_weights) {
148  tpwgts[i * nparts + j] = part->part_weights[i * nparts + j];
149  } else {
150  tpwgts[i * nparts + j] = 1. / nparts;
151  }
152  }
153  }
154  ierr = PetscMalloc1(ncon, &ubvec);
155  CHKERRQ(ierr);
156  for (i = 0; i < ncon; i++) {
157  ubvec[i] = 1.05;
158  }
159  /* This sets the defaults */
160  options[0] = 0;
161  for (i = 1; i < 24; i++) {
162  options[i] = -1;
163  }
164  /* Duplicate the communicator to be sure that ParMETIS attribute caching
165  * does not interfere with PETSc. */
166  ierr = MPI_Comm_dup(pcomm, &comm);
167  CHKERRQ(ierr);
168  if (pmetis->repartition) {
169  PetscStackCallParmetis(
170  ParMETIS_V3_AdaptiveRepart,
171  ((idx_t *)vtxdist, (idx_t *)xadj, (idx_t *)adjncy,
172  (idx_t *)part->vertex_weights, (idx_t *)part->vertex_weights,
173  (idx_t *)adj->values, (idx_t *)&wgtflag, (idx_t *)&numflag,
174  (idx_t *)&ncon, (idx_t *)&nparts, tpwgts, ubvec, &itr,
175  (idx_t *)options, (idx_t *)&pmetis->cuts, (idx_t *)locals, &comm));
176  } else {
177  PetscStackCallParmetis(ParMETIS_V3_PartKway,
178  ((idx_t *)vtxdist, (idx_t *)xadj, (idx_t *)adjncy,
179  (idx_t *)part->vertex_weights,
180  (idx_t *)adj->values, (idx_t *)&wgtflag,
181  (idx_t *)&numflag, (idx_t *)&ncon,
182  (idx_t *)&nparts, tpwgts, ubvec, (idx_t *)options,
183  (idx_t *)&pmetis->cuts, (idx_t *)locals, &comm));
184  }
185  ierr = MPI_Comm_free(&comm);
186  CHKERRQ(ierr);
187 
188  ierr = PetscFree(tpwgts);
189  CHKERRQ(ierr);
190  ierr = PetscFree(ubvec);
191  CHKERRQ(ierr);
192  if (PetscLogPrintInfo)
193  pmetis->printout = itmp;
194  }
195 
196  if (bs > 1) {
197  PetscInt i, j, *newlocals;
198  ierr = PetscMalloc1(bs * amat->rmap->n, &newlocals);
199  CHKERRQ(ierr);
200  for (i = 0; i < amat->rmap->n; i++) {
201  for (j = 0; j < bs; j++) {
202  newlocals[bs * i + j] = locals[i];
203  }
204  }
205  ierr = PetscFree(locals);
206  CHKERRQ(ierr);
207  ierr =
208  ISCreateGeneral(PetscObjectComm((PetscObject)part), bs * amat->rmap->n,
209  newlocals, PETSC_OWN_POINTER, partitioning);
210  CHKERRQ(ierr);
211  } else {
212  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)part), amat->rmap->n,
213  locals, PETSC_OWN_POINTER, partitioning);
214  CHKERRQ(ierr);
215  }
216 
217  ierr = MatDestroy(&pmat);
218  CHKERRQ(ierr);
219  ierr = MatDestroy(&amat);
220  CHKERRQ(ierr);
222 }
223 
224 } // namespace MoFEM
225 
226 #endif // PARMETIS
MoFEMFunctionReturnHot
#define MoFEMFunctionReturnHot(a)
Last executable line of each PETSc function used for error handling. Replaces return()
Definition: definitions.h:447
Includes.hpp
MoFEM
implementation of Data Operators for Forces and Sources
Definition: Common.hpp:10
definitions.h
useful compiler directives and definitions
i
FTensor::Index< 'i', SPACE_DIM > i
Definition: hcurl_divergence_operator_2d.cpp:27
j
FTensor::Index< 'j', 3 > j
Definition: matrix_function.cpp:19
MoFEM::Exceptions::ierr
static MoFEMErrorCodeGeneric< PetscErrorCode > ierr
Definition: Exceptions.hpp:76
MoFEMFunctionBeginHot
#define MoFEMFunctionBeginHot
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
Definition: definitions.h:440