Actual source code: partition.c
2: #include <petsc/private/matimpl.h>
4: /* Logging support */
5: PetscClassId MAT_PARTITIONING_CLASSID;
7: /*
8: Simplest partitioning, keeps the current partitioning.
9: */
10: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part, IS *partitioning)
11: {
12: PetscInt m;
13: PetscMPIInt rank, size;
15: PetscFunctionBegin;
16: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
17: if (part->n != size) {
18: const char *prefix;
19: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)part, &prefix));
20: SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor", prefix ? prefix : "");
21: }
22: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank));
24: PetscCall(MatGetLocalSize(part->adj, &m, NULL));
25: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)part), m, rank, 0, partitioning));
26: PetscFunctionReturn(PETSC_SUCCESS);
27: }
29: /*
30: partition an index to rebalance the computation
31: */
32: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part, IS *partitioning)
33: {
34: PetscInt m, M, nparts, *indices, r, d, *parts, i, start, end, loc;
36: PetscFunctionBegin;
37: PetscCall(MatGetSize(part->adj, &M, NULL));
38: PetscCall(MatGetLocalSize(part->adj, &m, NULL));
39: nparts = part->n;
40: PetscCall(PetscMalloc1(nparts, &parts));
41: d = M / nparts;
42: for (i = 0; i < nparts; i++) parts[i] = d;
43: r = M % nparts;
44: for (i = 0; i < r; i++) parts[i] += 1;
45: for (i = 1; i < nparts; i++) parts[i] += parts[i - 1];
46: PetscCall(PetscMalloc1(m, &indices));
47: PetscCall(MatGetOwnershipRange(part->adj, &start, &end));
48: for (i = start; i < end; i++) {
49: PetscCall(PetscFindInt(i, nparts, parts, &loc));
50: if (loc < 0) loc = -(loc + 1);
51: else loc = loc + 1;
52: indices[i - start] = loc;
53: }
54: PetscCall(PetscFree(parts));
55: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), m, indices, PETSC_OWN_POINTER, partitioning));
56: PetscFunctionReturn(PETSC_SUCCESS);
57: }
59: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part, IS *partitioning)
60: {
61: PetscInt cell, n, N, p, rstart, rend, *color;
62: PetscMPIInt size;
64: PetscFunctionBegin;
65: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
66: PetscCheck(part->n == size, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Currently only supports one domain per processor");
67: p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
68: PetscCheck(p * p == part->n, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires \"perfect square\" number of domains");
70: PetscCall(MatGetSize(part->adj, &N, NULL));
71: n = (PetscInt)PetscSqrtReal((PetscReal)N);
72: PetscCheck(n * n == N, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires square domain");
73: PetscCheck(n % p == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "Square partitioning requires p to divide n");
74: PetscCall(MatGetOwnershipRange(part->adj, &rstart, &rend));
75: PetscCall(PetscMalloc1(rend - rstart, &color));
76: /* for (int cell=rstart; cell<rend; cell++) color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); */
77: for (cell = rstart; cell < rend; cell++) color[cell - rstart] = ((cell % n) / (n / p)) + p * ((cell / n) / (n / p));
78: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), rend - rstart, color, PETSC_OWN_POINTER, partitioning));
79: PetscFunctionReturn(PETSC_SUCCESS);
80: }
82: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
83: {
84: PetscFunctionBegin;
85: part->ops->apply = MatPartitioningApply_Current;
86: part->ops->view = NULL;
87: part->ops->destroy = NULL;
88: PetscFunctionReturn(PETSC_SUCCESS);
89: }
91: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
92: {
93: PetscFunctionBegin;
94: part->ops->apply = MatPartitioningApply_Average;
95: part->ops->view = NULL;
96: part->ops->destroy = NULL;
97: PetscFunctionReturn(PETSC_SUCCESS);
98: }
100: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
101: {
102: PetscFunctionBegin;
103: part->ops->apply = MatPartitioningApply_Square;
104: part->ops->view = NULL;
105: part->ops->destroy = NULL;
106: PetscFunctionReturn(PETSC_SUCCESS);
107: }
109: /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
110: seps[ 0 : 2*p) : the start and end node of each subdomain
111: seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
112: levels[ 0 : p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
113: The arrays must be large enough
114: */
115: PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
116: {
117: PetscInt l2p, i, pTree, pStartTree;
119: PetscFunctionBegin;
120: l2p = PetscLog2Real(p);
121: PetscCheck(!(l2p - (PetscInt)PetscLog2Real(p)), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%" PetscInt_FMT " is not a power of 2", p);
122: if (!p) PetscFunctionReturn(PETSC_SUCCESS);
123: PetscCall(PetscArrayzero(seps, 2 * p - 2));
124: PetscCall(PetscArrayzero(level, p - 1));
125: seps[2 * p - 2] = sizes[2 * p - 2];
126: pTree = p;
127: pStartTree = 0;
128: while (pTree != 1) {
129: for (i = pStartTree; i < pStartTree + pTree; i++) {
130: seps[i] += sizes[i];
131: seps[pStartTree + pTree + (i - pStartTree) / 2] += seps[i];
132: }
133: pStartTree += pTree;
134: pTree = pTree / 2;
135: }
136: seps[2 * p - 2] -= sizes[2 * p - 2];
138: pStartTree = 2 * p - 2;
139: pTree = 1;
140: while (pStartTree > 0) {
141: for (i = pStartTree; i < pStartTree + pTree; i++) {
142: PetscInt k = 2 * i - (pStartTree + 2 * pTree);
143: PetscInt n = seps[k + 1];
145: seps[k + 1] = seps[i] - sizes[k + 1];
146: seps[k] = seps[k + 1] + sizes[k + 1] - n - sizes[k];
147: level[i - p] = -pTree - i + pStartTree;
148: }
149: pTree *= 2;
150: pStartTree -= pTree;
151: }
152: /* I know there should be a formula */
153: PetscCall(PetscSortIntWithArrayPair(p - 1, seps + p, sizes + p, level));
154: for (i = 2 * p - 2; i >= 0; i--) {
155: seps[2 * i] = seps[i];
156: seps[2 * i + 1] = seps[i] + PetscMax(sizes[i] - 1, 0);
157: }
158: PetscFunctionReturn(PETSC_SUCCESS);
159: }
161: /* ===========================================================================================*/
163: PetscFunctionList MatPartitioningList = NULL;
164: PetscBool MatPartitioningRegisterAllCalled = PETSC_FALSE;
166: /*@C
167: MatPartitioningRegister - Adds a new sparse matrix partitioning to the matrix package.
169: Not Collective
171: Input Parameters:
172: + sname - name of partitioning (for example `MATPARTITIONINGCURRENT`) or `MATPARTITIONINGPARMETIS`
173: - function - function pointer that creates the partitioning type
175: Level: developer
177: Sample usage:
178: .vb
179: MatPartitioningRegister("my_part",MyPartCreate);
180: .ve
182: Then, your partitioner can be chosen with the procedural interface via
183: $ MatPartitioningSetType(part,"my_part")
184: or at runtime via the option
185: $ -mat_partitioning_type my_part
187: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
188: @*/
189: PetscErrorCode MatPartitioningRegister(const char sname[], PetscErrorCode (*function)(MatPartitioning))
190: {
191: PetscFunctionBegin;
192: PetscCall(MatInitializePackage());
193: PetscCall(PetscFunctionListAdd(&MatPartitioningList, sname, function));
194: PetscFunctionReturn(PETSC_SUCCESS);
195: }
197: /*@C
198: MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
199: from the partitioning context.
201: Not collective
203: Input Parameter:
204: . partitioning - the partitioning context
206: Output Parameter:
207: . type - partitioner type
209: Level: intermediate
211: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
212: @*/
213: PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning, MatPartitioningType *type)
214: {
215: PetscFunctionBegin;
218: *type = ((PetscObject)partitioning)->type_name;
219: PetscFunctionReturn(PETSC_SUCCESS);
220: }
222: /*@C
223: MatPartitioningSetNParts - Set how many partitions need to be created;
224: by default this is one per processor. Certain partitioning schemes may
225: in fact only support that option.
227: Collective
229: Input Parameters:
230: + partitioning - the partitioning context
231: - n - the number of partitions
233: Level: intermediate
235: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`
236: @*/
237: PetscErrorCode MatPartitioningSetNParts(MatPartitioning part, PetscInt n)
238: {
239: PetscFunctionBegin;
240: part->n = n;
241: PetscFunctionReturn(PETSC_SUCCESS);
242: }
244: /*@
245: MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.
247: Collective
249: Input Parameter:
250: . matp - the matrix partitioning object
252: Output Parameter:
253: . partitioning - the partitioning. For each local node, a positive value indicates the processor
254: number the node has been assigned to. Negative x values indicate the separator level -(x+1).
256: Level: intermediate
258: Note:
259: The user can define additional partitionings; see `MatPartitioningRegister()`.
261: .seealso: [](chapter_matrices), `Mat`, `MatPartitioningApplyND()`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
262: `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
263: `ISPartitioningCount()`
264: @*/
265: PetscErrorCode MatPartitioningApplyND(MatPartitioning matp, IS *partitioning)
266: {
267: PetscFunctionBegin;
270: PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
271: PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
272: PetscCall(PetscLogEventBegin(MAT_PartitioningND, matp, 0, 0, 0));
273: PetscUseTypeMethod(matp, applynd, partitioning);
274: PetscCall(PetscLogEventEnd(MAT_PartitioningND, matp, 0, 0, 0));
276: PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
277: PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
278: PetscFunctionReturn(PETSC_SUCCESS);
279: }
281: /*@
282: MatPartitioningApply - Gets a partitioning for the graph represented by a sparse matrix.
284: Collective
286: Input Parameter:
287: . matp - the matrix partitioning object
289: Output Parameter:
290: . partitioning - the partitioning. For each local node this tells the processor
291: number that that node is assigned to.
293: Options Database Keys:
294: + -mat_partitioning_type <type> - set the partitioning package or algorithm to use
295: - -mat_partitioning_view - display information about the partitioning object
297: Level: beginner
299: The user can define additional partitionings; see `MatPartitioningRegister()`.
301: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
302: `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
303: `ISPartitioningCount()`
304: @*/
305: PetscErrorCode MatPartitioningApply(MatPartitioning matp, IS *partitioning)
306: {
307: PetscBool viewbalance, improve;
309: PetscFunctionBegin;
312: PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
313: PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
314: PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
315: PetscUseTypeMethod(matp, apply, partitioning);
316: PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
318: PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
319: PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
321: PetscObjectOptionsBegin((PetscObject)matp);
322: viewbalance = PETSC_FALSE;
323: PetscCall(PetscOptionsBool("-mat_partitioning_view_imbalance", "Display imbalance information of a partition", NULL, PETSC_FALSE, &viewbalance, NULL));
324: improve = PETSC_FALSE;
325: PetscCall(PetscOptionsBool("-mat_partitioning_improve", "Improve the quality of a partition", NULL, PETSC_FALSE, &improve, NULL));
326: PetscOptionsEnd();
328: if (improve) PetscCall(MatPartitioningImprove(matp, partitioning));
330: if (viewbalance) PetscCall(MatPartitioningViewImbalance(matp, *partitioning));
331: PetscFunctionReturn(PETSC_SUCCESS);
332: }
334: /*@
335: MatPartitioningImprove - Improves the quality of a given partition.
337: Collective
339: Input Parameters:
340: + matp - the matrix partitioning object
341: - partitioning - the partitioning. For each local node this tells the processor
342: number that that node is assigned to.
344: Output Parameter:
345: . partitioning - the partitioning. For each local node this tells the processor
346: number that that node is assigned to.
348: Options Database Key:
349: . -mat_partitioning_improve - improve the quality of the given partition
351: Level: beginner
353: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningCreate()`,
354: `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
355: `ISPartitioningCount()`
356: @*/
357: PetscErrorCode MatPartitioningImprove(MatPartitioning matp, IS *partitioning)
358: {
359: PetscFunctionBegin;
362: PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
363: PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
364: PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
365: PetscTryTypeMethod(matp, improve, partitioning);
366: PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
367: PetscFunctionReturn(PETSC_SUCCESS);
368: }
370: /*@
371: MatPartitioningViewImbalance - Display partitioning imbalance information.
373: Collective
375: Input Parameters:
376: + matp - the matrix partitioning object
377: - partitioning - the partitioning. For each local node this tells the processor
378: number that that node is assigned to.
380: Options Database Key:
381: . -mat_partitioning_view_balance - view the balance information from the last partitioning
383: Level: beginner
385: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningView()`
386: @*/
387: PetscErrorCode MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
388: {
389: PetscInt nparts, *subdomainsizes, *subdomainsizes_tmp, nlocal, i, maxsub, minsub, avgsub;
390: const PetscInt *indices;
391: PetscViewer viewer;
393: PetscFunctionBegin;
396: nparts = matp->n;
397: PetscCall(PetscCalloc2(nparts, &subdomainsizes, nparts, &subdomainsizes_tmp));
398: PetscCall(ISGetLocalSize(partitioning, &nlocal));
399: PetscCall(ISGetIndices(partitioning, &indices));
400: for (i = 0; i < nlocal; i++) subdomainsizes_tmp[indices[i]] += matp->vertex_weights ? matp->vertex_weights[i] : 1;
401: PetscCallMPI(MPI_Allreduce(subdomainsizes_tmp, subdomainsizes, nparts, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)matp)));
402: PetscCall(ISRestoreIndices(partitioning, &indices));
403: minsub = PETSC_MAX_INT, maxsub = PETSC_MIN_INT, avgsub = 0;
404: for (i = 0; i < nparts; i++) {
405: minsub = PetscMin(minsub, subdomainsizes[i]);
406: maxsub = PetscMax(maxsub, subdomainsizes[i]);
407: avgsub += subdomainsizes[i];
408: }
409: avgsub /= nparts;
410: PetscCall(PetscFree2(subdomainsizes, subdomainsizes_tmp));
411: PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp), &viewer));
412: PetscCall(MatPartitioningView(matp, viewer));
413: PetscCall(PetscViewerASCIIPrintf(viewer, "Partitioning Imbalance Info: Max %" PetscInt_FMT ", Min %" PetscInt_FMT ", Avg %" PetscInt_FMT ", R %g\n", maxsub, minsub, avgsub, (double)(maxsub / (PetscReal)minsub)));
414: PetscFunctionReturn(PETSC_SUCCESS);
415: }
417: /*@
418: MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
419: partitioned.
421: Collective
423: Input Parameters:
424: + part - the partitioning context
425: - adj - the adjacency matrix, this can be any `MatType` but the natural representation is `MATMPIADJ`
427: Level: beginner
429: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
430: @*/
431: PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part, Mat adj)
432: {
433: PetscFunctionBegin;
436: part->adj = adj;
437: PetscFunctionReturn(PETSC_SUCCESS);
438: }
440: /*@
441: MatPartitioningDestroy - Destroys the partitioning context.
443: Collective
445: Input Parameter:
446: . part - the partitioning context
448: Level: beginner
450: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
451: @*/
452: PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
453: {
454: PetscFunctionBegin;
455: if (!*part) PetscFunctionReturn(PETSC_SUCCESS);
457: if (--((PetscObject)(*part))->refct > 0) {
458: *part = NULL;
459: PetscFunctionReturn(PETSC_SUCCESS);
460: }
462: if ((*part)->ops->destroy) PetscCall((*(*part)->ops->destroy)((*part)));
463: PetscCall(PetscFree((*part)->vertex_weights));
464: PetscCall(PetscFree((*part)->part_weights));
465: PetscCall(PetscHeaderDestroy(part));
466: PetscFunctionReturn(PETSC_SUCCESS);
467: }
469: /*@C
470: MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
472: Logically Collective
474: Input Parameters:
475: + part - the partitioning context
476: - weights - the weights, on each process this array must have the same size as the number of local rows times the value passed with `MatPartitioningSetNumberVertexWeights()` or
477: 1 if that is not provided
479: Level: beginner
481: Notes:
482: The array weights is freed by PETSc so the user should not free the array. In C/C++
483: the array must be obtained with a call to `PetscMalloc()`, not malloc().
485: The weights may not be used by some partitioners
487: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetPartitionWeights()`, `MatPartitioningSetNumberVertexWeights()`
488: @*/
489: PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part, const PetscInt weights[])
490: {
491: PetscFunctionBegin;
493: PetscCall(PetscFree(part->vertex_weights));
494: part->vertex_weights = (PetscInt *)weights;
495: PetscFunctionReturn(PETSC_SUCCESS);
496: }
498: /*@C
499: MatPartitioningSetPartitionWeights - Sets the weights for each partition.
501: Logically Collective
503: Input Parameters:
504: + part - the partitioning context
505: - weights - An array of size nparts that is used to specify the fraction of
506: vertex weight that should be distributed to each sub-domain for
507: the balance constraint. If all of the sub-domains are to be of
508: the same size, then each of the nparts elements should be set
509: to a value of 1/nparts. Note that the sum of all of the weights
510: should be one.
512: Level: beginner
514: Note:
515: The array weights is freed by PETSc so the user should not free the array. In C/C++
516: the array must be obtained with a call to `PetscMalloc()`, not malloc().
518: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`
519: @*/
520: PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part, const PetscReal weights[])
521: {
522: PetscFunctionBegin;
524: PetscCall(PetscFree(part->part_weights));
525: part->part_weights = (PetscReal *)weights;
526: PetscFunctionReturn(PETSC_SUCCESS);
527: }
529: /*@
530: MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.
532: Logically Collective
534: Input Parameters:
535: + part - the partitioning context
536: - use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
537: that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
538: edge weights are stored in an ADJ matrix.
540: Options Database Key:
541: . -mat_partitioning_use_edge_weights - (true or false)
543: Level: beginner
545: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`
546: @*/
547: PetscErrorCode MatPartitioningSetUseEdgeWeights(MatPartitioning part, PetscBool use_edge_weights)
548: {
549: PetscFunctionBegin;
551: part->use_edge_weights = use_edge_weights;
552: PetscFunctionReturn(PETSC_SUCCESS);
553: }
555: /*@
556: MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.
558: Logically Collective
560: Input Parameters:
561: . part - the partitioning context
563: Output Parameters:
564: . use_edge_weights - the flag indicateing whether or not to edge weights are used.
566: Level: beginner
568: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`,
569: `MatPartitioningSetUseEdgeWeights`
570: @*/
571: PetscErrorCode MatPartitioningGetUseEdgeWeights(MatPartitioning part, PetscBool *use_edge_weights)
572: {
573: PetscFunctionBegin;
576: *use_edge_weights = part->use_edge_weights;
577: PetscFunctionReturn(PETSC_SUCCESS);
578: }
580: /*@
581: MatPartitioningCreate - Creates a partitioning context.
583: Collective
585: Input Parameter:
586: . comm - MPI communicator
588: Output Parameter:
589: . newp - location to put the context
591: Level: beginner
593: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetType()`, `MatPartitioningApply()`, `MatPartitioningDestroy()`,
594: `MatPartitioningSetAdjacency()`
595: @*/
596: PetscErrorCode MatPartitioningCreate(MPI_Comm comm, MatPartitioning *newp)
597: {
598: MatPartitioning part;
599: PetscMPIInt size;
601: PetscFunctionBegin;
602: *newp = NULL;
604: PetscCall(MatInitializePackage());
605: PetscCall(PetscHeaderCreate(part, MAT_PARTITIONING_CLASSID, "MatPartitioning", "Matrix/graph partitioning", "MatOrderings", comm, MatPartitioningDestroy, MatPartitioningView));
606: part->vertex_weights = NULL;
607: part->part_weights = NULL;
608: part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */
610: PetscCallMPI(MPI_Comm_size(comm, &size));
611: part->n = (PetscInt)size;
612: part->ncon = 1;
614: *newp = part;
615: PetscFunctionReturn(PETSC_SUCCESS);
616: }
618: /*@C
619: MatPartitioningViewFromOptions - View a partitioning context from the options database
621: Collective
623: Input Parameters:
624: + A - the partitioning context
625: . obj - Optional object that provides the prefix used in the options database check
626: - name - command line option
628: Options Database:
629: . -mat_partitioning_view [viewertype]:... - the viewer and its options
631: Level: intermediate
633: Note:
634: .vb
635: If no value is provided ascii:stdout is used
636: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
637: for example ascii::ascii_info prints just the information about the object not all details
638: unless :append is given filename opens in write mode, overwriting what was already there
639: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
640: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
641: socket[:port] defaults to the standard output port
642: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
643: .ve
645: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningView()`, `PetscObjectViewFromOptions()`, `MatPartitioningCreate()`
646: @*/
647: PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A, PetscObject obj, const char name[])
648: {
649: PetscFunctionBegin;
651: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
652: PetscFunctionReturn(PETSC_SUCCESS);
653: }
655: /*@C
656: MatPartitioningView - Prints the partitioning data structure.
658: Collective
660: Input Parameters:
661: + part - the partitioning context
662: - viewer - optional visualization context
664: Level: intermediate
666: Note:
667: The available visualization contexts include
668: + `PETSC_VIEWER_STDOUT_SELF` - standard output (default)
669: - `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard
670: output where only the first processor opens
671: the file. All other processors send their
672: data to the first processor to print.
674: The user can open alternative visualization contexts with
675: . `PetscViewerASCIIOpen()` - output to a specified file
677: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `PetscViewer`, `PetscViewerASCIIOpen()`
678: @*/
679: PetscErrorCode MatPartitioningView(MatPartitioning part, PetscViewer viewer)
680: {
681: PetscBool iascii;
683: PetscFunctionBegin;
685: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part), &viewer));
687: PetscCheckSameComm(part, 1, viewer, 2);
689: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
690: if (iascii) {
691: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)part, viewer));
692: if (part->vertex_weights) PetscCall(PetscViewerASCIIPrintf(viewer, " Using vertex weights\n"));
693: }
694: PetscCall(PetscViewerASCIIPushTab(viewer));
695: PetscTryTypeMethod(part, view, viewer);
696: PetscCall(PetscViewerASCIIPopTab(viewer));
697: PetscFunctionReturn(PETSC_SUCCESS);
698: }
700: /*@C
701: MatPartitioningSetType - Sets the type of partitioner to use
703: Collective
705: Input Parameters:
706: + part - the partitioning context.
707: - type - a known method
709: Options Database Key:
710: . -mat_partitioning_type <type> - (for instance, parmetis), use -help for a list of available methods or see `MatPartitioningType`
712: Level: intermediate
714: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`, `MatPartitioningType`
715: @*/
716: PetscErrorCode MatPartitioningSetType(MatPartitioning part, MatPartitioningType type)
717: {
718: PetscBool match;
719: PetscErrorCode (*r)(MatPartitioning);
721: PetscFunctionBegin;
725: PetscCall(PetscObjectTypeCompare((PetscObject)part, type, &match));
726: if (match) PetscFunctionReturn(PETSC_SUCCESS);
728: PetscTryTypeMethod(part, destroy);
729: part->ops->destroy = NULL;
731: part->setupcalled = 0;
732: part->data = NULL;
733: PetscCall(PetscMemzero(part->ops, sizeof(struct _MatPartitioningOps)));
735: PetscCall(PetscFunctionListFind(MatPartitioningList, type, &r));
736: PetscCheck(r, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown partitioning type %s", type);
738: PetscCall((*r)(part));
740: PetscCall(PetscFree(((PetscObject)part)->type_name));
741: PetscCall(PetscStrallocpy(type, &((PetscObject)part)->type_name));
742: PetscFunctionReturn(PETSC_SUCCESS);
743: }
745: /*@
746: MatPartitioningSetFromOptions - Sets various partitioning options from the
747: options database for the partitioning object
749: Collective
751: Input Parameter:
752: . part - the partitioning context.
754: Options Database Keys:
755: + -mat_partitioning_type <type> - (for instance, parmetis), use -help for a list of available methods
756: - -mat_partitioning_nparts - number of subgraphs
758: Level: beginner
760: Note:
761: If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
762: no installed partitioners it does no repartioning.
764: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`
765: @*/
766: PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
767: {
768: PetscBool flag;
769: char type[256];
770: const char *def;
772: PetscFunctionBegin;
773: PetscObjectOptionsBegin((PetscObject)part);
774: if (!((PetscObject)part)->type_name) {
775: #if defined(PETSC_HAVE_PARMETIS)
776: def = MATPARTITIONINGPARMETIS;
777: #elif defined(PETSC_HAVE_CHACO)
778: def = MATPARTITIONINGCHACO;
779: #elif defined(PETSC_HAVE_PARTY)
780: def = MATPARTITIONINGPARTY;
781: #elif defined(PETSC_HAVE_PTSCOTCH)
782: def = MATPARTITIONINGPTSCOTCH;
783: #else
784: def = MATPARTITIONINGCURRENT;
785: #endif
786: } else {
787: def = ((PetscObject)part)->type_name;
788: }
789: PetscCall(PetscOptionsFList("-mat_partitioning_type", "Type of partitioner", "MatPartitioningSetType", MatPartitioningList, def, type, 256, &flag));
790: if (flag) PetscCall(MatPartitioningSetType(part, type));
792: PetscCall(PetscOptionsInt("-mat_partitioning_nparts", "number of fine parts", NULL, part->n, &part->n, &flag));
794: PetscCall(PetscOptionsBool("-mat_partitioning_use_edge_weights", "whether or not to use edge weights", NULL, part->use_edge_weights, &part->use_edge_weights, &flag));
796: /*
797: Set the type if it was never set.
798: */
799: if (!((PetscObject)part)->type_name) PetscCall(MatPartitioningSetType(part, def));
801: PetscTryTypeMethod(part, setfromoptions, PetscOptionsObject);
802: PetscOptionsEnd();
803: PetscFunctionReturn(PETSC_SUCCESS);
804: }
806: /*@C
807: MatPartitioningSetNumberVertexWeights - Sets the number of weights per vertex
809: Not collective
811: Input Parameters:
812: + partitioning - the partitioning context
813: - ncon - the number of weights
815: Level: intermediate
817: .seealso: [](chapter_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`
818: @*/
819: PetscErrorCode MatPartitioningSetNumberVertexWeights(MatPartitioning partitioning, PetscInt ncon)
820: {
821: PetscFunctionBegin;
823: partitioning->ncon = ncon;
824: PetscFunctionReturn(PETSC_SUCCESS);
825: }