Actual source code: partptscotch.c
1: #include <petsc/private/partitionerimpl.h>
3: #if defined(PETSC_HAVE_PTSCOTCH)
4: EXTERN_C_BEGIN
5: #include <ptscotch.h>
6: EXTERN_C_END
7: #endif
9: PetscBool PTScotchPartitionerCite = PETSC_FALSE;
10: const char PTScotchPartitionerCitation[] = "@article{PTSCOTCH,\n"
11: " author = {C. Chevalier and F. Pellegrini},\n"
12: " title = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
13: " journal = {Parallel Computing},\n"
14: " volume = {34},\n"
15: " number = {6},\n"
16: " pages = {318--331},\n"
17: " year = {2008},\n"
18: " doi = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
19: "}\n";
21: typedef struct {
22: MPI_Comm pcomm;
23: PetscInt strategy;
24: PetscReal imbalance;
25: } PetscPartitioner_PTScotch;
27: #if defined(PETSC_HAVE_PTSCOTCH)
29: #define PetscCallPTSCOTCH(...) \
30: do { \
31: PetscCheck(!(__VA_ARGS__), PETSC_COMM_SELF, PETSC_ERR_LIB, "Error calling PT-Scotch library"); \
32: } while (0)
34: static int PTScotch_Strategy(PetscInt strategy)
35: {
36: switch (strategy) {
37: case 0:
38: return SCOTCH_STRATDEFAULT;
39: case 1:
40: return SCOTCH_STRATQUALITY;
41: case 2:
42: return SCOTCH_STRATSPEED;
43: case 3:
44: return SCOTCH_STRATBALANCE;
45: case 4:
46: return SCOTCH_STRATSAFETY;
47: case 5:
48: return SCOTCH_STRATSCALABILITY;
49: case 6:
50: return SCOTCH_STRATRECURSIVE;
51: case 7:
52: return SCOTCH_STRATREMAP;
53: default:
54: return SCOTCH_STRATDEFAULT;
55: }
56: }
58: static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[], SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
59: {
60: SCOTCH_Arch archdat;
61: SCOTCH_Graph grafdat;
62: SCOTCH_Strat stradat;
63: SCOTCH_Num vertnbr = n;
64: SCOTCH_Num edgenbr = xadj[n];
65: SCOTCH_Num *velotab = vtxwgt;
66: SCOTCH_Num *edlotab = adjwgt;
67: SCOTCH_Num flagval = strategy;
68: double kbalval = imbalance;
70: PetscFunctionBegin;
71: {
72: PetscBool flg = PETSC_TRUE;
73: PetscCall(PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight", NULL, "3.13", "Use -petscpartitioner_use_vertex_weights"));
74: PetscCall(PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL));
75: if (!flg) velotab = NULL;
76: }
77: PetscCallPTSCOTCH(SCOTCH_graphInit(&grafdat));
78: PetscCallPTSCOTCH(SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab));
79: PetscCallPTSCOTCH(SCOTCH_stratInit(&stradat));
80: PetscCallPTSCOTCH(SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval));
81: PetscCallPTSCOTCH(SCOTCH_archInit(&archdat));
82: if (tpart) {
83: PetscCallPTSCOTCH(SCOTCH_archCmpltw(&archdat, nparts, tpart));
84: } else {
85: PetscCallPTSCOTCH(SCOTCH_archCmplt(&archdat, nparts));
86: }
87: PetscCallPTSCOTCH(SCOTCH_graphMap(&grafdat, &archdat, &stradat, part));
88: SCOTCH_archExit(&archdat);
89: SCOTCH_stratExit(&stradat);
90: SCOTCH_graphExit(&grafdat);
91: PetscFunctionReturn(PETSC_SUCCESS);
92: }
94: static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[], SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
95: {
96: PetscMPIInt procglbnbr;
97: PetscMPIInt proclocnum;
98: SCOTCH_Arch archdat;
99: SCOTCH_Dgraph grafdat;
100: SCOTCH_Dmapping mappdat;
101: SCOTCH_Strat stradat;
102: SCOTCH_Num vertlocnbr;
103: SCOTCH_Num edgelocnbr;
104: SCOTCH_Num *veloloctab = vtxwgt;
105: SCOTCH_Num *edloloctab = adjwgt;
106: SCOTCH_Num flagval = strategy;
107: double kbalval = imbalance;
109: PetscFunctionBegin;
110: {
111: PetscBool flg = PETSC_TRUE;
112: PetscCall(PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight", NULL, "3.13", "Use -petscpartitioner_use_vertex_weights"));
113: PetscCall(PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL));
114: if (!flg) veloloctab = NULL;
115: }
116: PetscCallMPI(MPI_Comm_size(comm, &procglbnbr));
117: PetscCallMPI(MPI_Comm_rank(comm, &proclocnum));
118: vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
119: edgelocnbr = xadj[vertlocnbr];
121: PetscCallPTSCOTCH(SCOTCH_dgraphInit(&grafdat, comm));
122: PetscCallPTSCOTCH(SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab));
123: PetscCallPTSCOTCH(SCOTCH_stratInit(&stradat));
124: PetscCallPTSCOTCH(SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval));
125: PetscCallPTSCOTCH(SCOTCH_archInit(&archdat));
126: if (tpart) { /* target partition weights */
127: PetscCallPTSCOTCH(SCOTCH_archCmpltw(&archdat, nparts, tpart));
128: } else {
129: PetscCallPTSCOTCH(SCOTCH_archCmplt(&archdat, nparts));
130: }
131: PetscCallPTSCOTCH(SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part));
132: PetscCallPTSCOTCH(SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat));
133: SCOTCH_dgraphMapExit(&grafdat, &mappdat);
134: SCOTCH_archExit(&archdat);
135: SCOTCH_stratExit(&stradat);
136: SCOTCH_dgraphExit(&grafdat);
137: PetscFunctionReturn(PETSC_SUCCESS);
138: }
140: #endif /* PETSC_HAVE_PTSCOTCH */
142: static const char *const PTScotchStrategyList[] = {"DEFAULT", "QUALITY", "SPEED", "BALANCE", "SAFETY", "SCALABILITY", "RECURSIVE", "REMAP"};
144: static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
145: {
146: PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *)part->data;
148: PetscFunctionBegin;
149: PetscCallMPI(MPI_Comm_free(&p->pcomm));
150: PetscCall(PetscFree(part->data));
151: PetscFunctionReturn(PETSC_SUCCESS);
152: }
154: static PetscErrorCode PetscPartitionerView_PTScotch_ASCII(PetscPartitioner part, PetscViewer viewer)
155: {
156: PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *)part->data;
158: PetscFunctionBegin;
159: PetscCall(PetscViewerASCIIPushTab(viewer));
160: PetscCall(PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n", PTScotchStrategyList[p->strategy]));
161: PetscCall(PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n", (double)p->imbalance));
162: PetscCall(PetscViewerASCIIPopTab(viewer));
163: PetscFunctionReturn(PETSC_SUCCESS);
164: }
166: static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
167: {
168: PetscBool iascii;
170: PetscFunctionBegin;
173: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
174: if (iascii) PetscCall(PetscPartitionerView_PTScotch_ASCII(part, viewer));
175: PetscFunctionReturn(PETSC_SUCCESS);
176: }
178: static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscPartitioner part, PetscOptionItems *PetscOptionsObject)
179: {
180: PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *)part->data;
181: const char *const *slist = PTScotchStrategyList;
182: PetscInt nlist = PETSC_STATIC_ARRAY_LENGTH(PTScotchStrategyList);
183: PetscBool flag;
185: PetscFunctionBegin;
186: PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner PTScotch Options");
187: PetscCall(PetscOptionsEList("-petscpartitioner_ptscotch_strategy", "Partitioning strategy", "", slist, nlist, slist[p->strategy], &p->strategy, &flag));
188: PetscCall(PetscOptionsReal("-petscpartitioner_ptscotch_imbalance", "Load imbalance ratio", "", p->imbalance, &p->imbalance, &flag));
189: PetscOptionsHeadEnd();
190: PetscFunctionReturn(PETSC_SUCCESS);
191: }
193: static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
194: {
195: #if defined(PETSC_HAVE_PTSCOTCH)
196: MPI_Comm comm;
197: PetscInt nvtxs = numVertices; /* The number of vertices in full graph */
198: PetscInt *vtxdist; /* Distribution of vertices across processes */
199: PetscInt *xadj = start; /* Start of edge list for each vertex */
200: PetscInt *adjncy = adjacency; /* Edge lists for all vertices */
201: PetscInt *vwgt = NULL; /* Vertex weights */
202: PetscInt *adjwgt = NULL; /* Edge weights */
203: PetscInt v, i, *assignment, *points;
204: PetscMPIInt size, rank, p;
205: PetscBool hasempty = PETSC_FALSE;
206: PetscInt *tpwgts = NULL;
208: PetscFunctionBegin;
209: PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
210: PetscCallMPI(MPI_Comm_size(comm, &size));
211: PetscCallMPI(MPI_Comm_rank(comm, &rank));
212: PetscCall(PetscMalloc2(size + 1, &vtxdist, PetscMax(nvtxs, 1), &assignment));
213: /* Calculate vertex distribution */
214: vtxdist[0] = 0;
215: PetscCallMPI(MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm));
216: for (p = 2; p <= size; ++p) {
217: hasempty = (PetscBool)(hasempty || !vtxdist[p - 1] || !vtxdist[p]);
218: vtxdist[p] += vtxdist[p - 1];
219: }
220: /* null graph */
221: if (vtxdist[size] == 0) {
222: PetscCall(PetscFree2(vtxdist, assignment));
223: PetscCall(ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition));
224: PetscFunctionReturn(PETSC_SUCCESS);
225: }
227: /* Calculate vertex weights */
228: if (vertSection) {
229: PetscCall(PetscMalloc1(nvtxs, &vwgt));
230: for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionGetDof(vertSection, v, &vwgt[v]));
231: }
233: /* Calculate partition weights */
234: if (targetSection) {
235: PetscInt sumw;
237: PetscCall(PetscCalloc1(nparts, &tpwgts));
238: for (p = 0, sumw = 0; p < nparts; ++p) {
239: PetscCall(PetscSectionGetDof(targetSection, p, &tpwgts[p]));
240: sumw += tpwgts[p];
241: }
242: if (!sumw) PetscCall(PetscFree(tpwgts));
243: }
245: {
246: PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *)part->data;
247: int strat = PTScotch_Strategy(pts->strategy);
248: double imbal = (double)pts->imbalance;
250: for (p = 0; !vtxdist[p + 1] && p < size; ++p)
251: ;
252: if (vtxdist[p + 1] == vtxdist[size]) {
253: if (rank == p) PetscCall(PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment));
254: } else {
255: MPI_Comm pcomm = pts->pcomm;
257: if (hasempty) {
258: PetscInt cnt;
260: PetscCallMPI(MPI_Comm_split(pts->pcomm, !!nvtxs, rank, &pcomm));
261: for (p = 0, cnt = 0; p < size; p++) {
262: if (vtxdist[p + 1] != vtxdist[p]) {
263: vtxdist[cnt + 1] = vtxdist[p + 1];
264: cnt++;
265: }
266: }
267: };
268: if (nvtxs) PetscCall(PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm));
269: if (hasempty) PetscCallMPI(MPI_Comm_free(&pcomm));
270: }
271: }
272: PetscCall(PetscFree(vwgt));
273: PetscCall(PetscFree(tpwgts));
275: /* Convert to PetscSection+IS */
276: for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionAddDof(partSection, assignment[v], 1));
277: PetscCall(PetscMalloc1(nvtxs, &points));
278: for (p = 0, i = 0; p < nparts; ++p) {
279: for (v = 0; v < nvtxs; ++v) {
280: if (assignment[v] == p) points[i++] = v;
281: }
282: }
283: PetscCheck(i == nvtxs, comm, PETSC_ERR_PLIB, "Number of points %" PetscInt_FMT " should be %" PetscInt_FMT, i, nvtxs);
284: PetscCall(ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition));
286: PetscCall(PetscFree2(vtxdist, assignment));
287: PetscFunctionReturn(PETSC_SUCCESS);
288: #else
289: SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
290: #endif
291: }
293: static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
294: {
295: PetscFunctionBegin;
296: part->noGraph = PETSC_FALSE;
297: part->ops->view = PetscPartitionerView_PTScotch;
298: part->ops->destroy = PetscPartitionerDestroy_PTScotch;
299: part->ops->partition = PetscPartitionerPartition_PTScotch;
300: part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
301: PetscFunctionReturn(PETSC_SUCCESS);
302: }
304: /*MC
305: PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
307: Level: intermediate
309: Options Database Keys:
310: + -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
311: - -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio
313: Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch
315: .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
316: M*/
318: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
319: {
320: PetscPartitioner_PTScotch *p;
322: PetscFunctionBegin;
324: PetscCall(PetscNew(&p));
325: part->data = p;
327: PetscCallMPI(MPI_Comm_dup(PetscObjectComm((PetscObject)part), &p->pcomm));
328: p->strategy = 0;
329: p->imbalance = 0.01;
331: PetscCall(PetscPartitionerInitialize_PTScotch(part));
332: PetscCall(PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionerCite));
333: PetscFunctionReturn(PETSC_SUCCESS);
334: }