Actual source code: partition.c
1: #define PETSCMAT_DLL
3: #include include/private/matimpl.h
5: /* Logging support */
6: PetscCookie MAT_PARTITIONING_COOKIE = 0;
8: /*
9: Simplest partitioning, keeps the current partitioning.
10: */
13: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
14: {
16: PetscInt m;
17: PetscMPIInt rank,size;
20: MPI_Comm_size(((PetscObject)part)->comm,&size);
21: if (part->n != size) {
22: SETERRQ(PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -matpartitioning_type parmetis or chaco or scotch for more than one subdomain per processor");
23: }
24: MPI_Comm_rank(((PetscObject)part)->comm,&rank);
26: MatGetLocalSize(part->adj,&m,PETSC_NULL);
27: ISCreateStride(((PetscObject)part)->comm,m,rank,0,partitioning);
28: return(0);
29: }
33: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
34: {
36: PetscInt cell,n,N,p,rstart,rend,*color;
37: PetscMPIInt size;
40: MPI_Comm_size(((PetscObject)part)->comm,&size);
41: if (part->n != size) {
42: SETERRQ(PETSC_ERR_SUP,"Currently only supports one domain per processor");
43: }
44: p = (PetscInt)sqrt((double)part->n);
45: if (p*p != part->n) {
46: SETERRQ(PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");
47: }
48: MatGetSize(part->adj,&N,PETSC_NULL);
49: n = (PetscInt)sqrt((double)N);
50: if (n*n != N) { /* This condition is NECESSARY, but NOT SUFFICIENT in order to the domain be square */
51: SETERRQ(PETSC_ERR_SUP,"Square partitioning requires square domain");
52: }
53: if (n%p != 0) {
54: SETERRQ(PETSC_ERR_SUP,"Square partitioning requires p to divide n");
55: }
56: MatGetOwnershipRange(part->adj,&rstart,&rend);
57: PetscMalloc((rend-rstart)*sizeof(PetscInt),&color);
58: /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
59: for (cell=rstart; cell<rend; cell++) {
60: color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
61: }
62: ISCreateGeneral(((PetscObject)part)->comm,rend-rstart,color,partitioning);
63: PetscFree(color);
65: return(0);
66: }
71: PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
72: {
74: part->ops->apply = MatPartitioningApply_Current;
75: part->ops->view = 0;
76: part->ops->destroy = 0;
77: return(0);
78: }
84: PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
85: {
87: part->ops->apply = MatPartitioningApply_Square;
88: part->ops->view = 0;
89: part->ops->destroy = 0;
90: return(0);
91: }
94: /* ===========================================================================================*/
96: #include petscsys.h
98: PetscFList MatPartitioningList = 0;
99: PetscTruth MatPartitioningRegisterAllCalled = PETSC_FALSE;
104: PetscErrorCode MatPartitioningRegister(const char sname[],const char path[],const char name[],PetscErrorCode (*function)(MatPartitioning))
105: {
107: char fullname[PETSC_MAX_PATH_LEN];
110: PetscFListConcat(path,name,fullname);
111: PetscFListAdd(&MatPartitioningList,sname,fullname,(void (*)(void))function);
112: return(0);
113: }
117: /*@C
118: MatPartitioningRegisterDestroy - Frees the list of partitioning routines.
120: Not Collective
122: Level: developer
124: .keywords: matrix, register, destroy
126: .seealso: MatPartitioningRegisterDynamic(), MatPartitioningRegisterAll()
127: @*/
128: PetscErrorCode MatPartitioningRegisterDestroy(void)
129: {
133: PetscFListDestroy(&MatPartitioningList);
134: return(0);
135: }
139: /*@C
140: MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
141: from the partitioning context.
143: Not collective
145: Input Parameter:
146: . partitioning - the partitioning context
148: Output Parameter:
149: . type - partitioner type
151: Level: intermediate
153: Not Collective
155: .keywords: Partitioning, get, method, name, type
156: @*/
157: PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
158: {
160: *type = ((PetscObject)partitioning)->type_name;
161: return(0);
162: }
166: /*@C
167: MatPartitioningSetNParts - Set how many partitions need to be created;
168: by default this is one per processor. Certain partitioning schemes may
169: in fact only support that option.
171: Not collective
173: Input Parameter:
174: . partitioning - the partitioning context
175: . n - the number of partitions
177: Level: intermediate
179: Not Collective
181: .keywords: Partitioning, set
183: .seealso: MatPartitioningCreate(), MatPartitioningApply()
184: @*/
185: PetscErrorCode MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
186: {
188: part->n = n;
189: return(0);
190: }
194: /*@
195: MatPartitioningApply - Gets a partitioning for a matrix.
197: Collective on Mat
199: Input Parameters:
200: . matp - the matrix partitioning object
202: Output Parameters:
203: . partitioning - the partitioning. For each local node this tells the processor
204: number that that node is assigned to.
206: Options Database Keys:
207: To specify the partitioning through the options database, use one of
208: the following
209: $ -mat_partitioning_type parmetis, -mat_partitioning current
210: To see the partitioning result
211: $ -mat_partitioning_view
213: Level: beginner
215: The user can define additional partitionings; see MatPartitioningRegisterDynamic().
217: .keywords: matrix, get, partitioning
219: .seealso: MatPartitioningRegisterDynamic(), MatPartitioningCreate(),
220: MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
221: ISPartitioningCount()
222: @*/
223: PetscErrorCode MatPartitioningApply(MatPartitioning matp,IS *partitioning)
224: {
226: PetscTruth flag;
231: if (!matp->adj->assembled) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
232: if (matp->adj->factor) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
233: if (!matp->ops->apply) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
235: (*matp->ops->apply)(matp,partitioning);
238: PetscOptionsHasName(PETSC_NULL,"-mat_partitioning_view",&flag);
239: if (flag) {
240: PetscViewer viewer;
241: PetscViewerASCIIGetStdout(((PetscObject)matp)->comm,&viewer);
242: MatPartitioningView(matp,viewer);
243: ISView(*partitioning,viewer);
244: }
245: return(0);
246: }
247:
250: /*@
251: MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
252: partitioned.
254: Collective on MatPartitioning and Mat
256: Input Parameters:
257: + part - the partitioning context
258: - adj - the adjacency matrix
260: Level: beginner
262: .keywords: Partitioning, adjacency
264: .seealso: MatPartitioningCreate()
265: @*/
266: PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
267: {
271: part->adj = adj;
272: return(0);
273: }
277: /*@
278: MatPartitioningDestroy - Destroys the partitioning context.
280: Collective on Partitioning
282: Input Parameters:
283: . part - the partitioning context
285: Level: beginner
287: .keywords: Partitioning, destroy, context
289: .seealso: MatPartitioningCreate()
290: @*/
291: PetscErrorCode MatPartitioningDestroy(MatPartitioning part)
292: {
297: if (--((PetscObject)part)->refct > 0) return(0);
299: if (part->ops->destroy) {
300: (*part->ops->destroy)(part);
301: }
302: PetscFree(part->vertex_weights);
303: PetscFree(part->part_weights);
304: PetscHeaderDestroy(part);
305: return(0);
306: }
310: /*@C
311: MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
313: Collective on Partitioning
315: Input Parameters:
316: + part - the partitioning context
317: - weights - the weights
319: Level: beginner
321: Notes:
322: The array weights is freed by PETSc so the user should not free the array. In C/C++
323: the array must be obtained with a call to PetscMalloc(), not malloc().
325: .keywords: Partitioning, destroy, context
327: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
328: @*/
329: PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
330: {
336: PetscFree(part->vertex_weights);
337: part->vertex_weights = (PetscInt*)weights;
338: return(0);
339: }
343: /*@C
344: MatPartitioningSetPartitionWeights - Sets the weights for each partition.
346: Collective on Partitioning
348: Input Parameters:
349: + part - the partitioning context
350: - weights - the weights
352: Level: beginner
354: Notes:
355: The array weights is freed by PETSc so the user should not free the array. In C/C++
356: the array must be obtained with a call to PetscMalloc(), not malloc().
358: .keywords: Partitioning, destroy, context
360: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
361: @*/
362: PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
363: {
369: PetscFree(part->part_weights);
370: part->part_weights = (PetscReal*)weights;
371: return(0);
372: }
376: /*@
377: MatPartitioningCreate - Creates a partitioning context.
379: Collective on MPI_Comm
381: Input Parameter:
382: . comm - MPI communicator
384: Output Parameter:
385: . newp - location to put the context
387: Level: beginner
389: .keywords: Partitioning, create, context
391: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
392: MatPartitioningSetAdjacency()
394: @*/
395: PetscErrorCode MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
396: {
397: MatPartitioning part;
398: PetscErrorCode ierr;
399: PetscMPIInt size;
402: *newp = 0;
404: PetscHeaderCreate(part,_p_MatPartitioning,struct _MatPartitioningOps,MAT_PARTITIONING_COOKIE,-1,"MatPartitioning",comm,MatPartitioningDestroy,
405: MatPartitioningView);
406: part->vertex_weights = PETSC_NULL;
407: part->part_weights = PETSC_NULL;
408: MPI_Comm_size(comm,&size);
409: part->n = (PetscInt)size;
411: *newp = part;
412: return(0);
413: }
417: /*@C
418: MatPartitioningView - Prints the partitioning data structure.
420: Collective on MatPartitioning
422: Input Parameters:
423: . part - the partitioning context
424: . viewer - optional visualization context
426: Level: intermediate
428: Note:
429: The available visualization contexts include
430: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
431: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
432: output where only the first processor opens
433: the file. All other processors send their
434: data to the first processor to print.
436: The user can open alternative visualization contexts with
437: . PetscViewerASCIIOpen() - output to a specified file
439: .keywords: Partitioning, view
441: .seealso: PetscViewerASCIIOpen()
442: @*/
443: PetscErrorCode MatPartitioningView(MatPartitioning part,PetscViewer viewer)
444: {
445: PetscErrorCode ierr;
446: PetscTruth iascii;
447: MatPartitioningType name;
451: if (!viewer) {
452: PetscViewerASCIIGetStdout(((PetscObject)part)->comm,&viewer);
453: }
457: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
458: if (iascii) {
459: MatPartitioningGetType(part,&name);
460: PetscViewerASCIIPrintf(viewer,"MatPartitioning Object: %s\n",name);
461: if (part->vertex_weights) {
462: PetscViewerASCIIPrintf(viewer," Using vertex weights\n");
463: }
464: } else {
465: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this MatParitioning",((PetscObject)viewer)->type_name);
466: }
468: if (part->ops->view) {
469: PetscViewerASCIIPushTab(viewer);
470: (*part->ops->view)(part,viewer);
471: PetscViewerASCIIPopTab(viewer);
472: }
474: return(0);
475: }
479: /*@C
480: MatPartitioningSetType - Sets the type of partitioner to use
482: Collective on MatPartitioning
484: Input Parameter:
485: . part - the partitioning context.
486: . type - a known method
488: Options Database Command:
489: $ -mat_partitioning_type <type>
490: $ Use -help for a list of available methods
491: $ (for instance, parmetis)
493: Level: intermediate
495: .keywords: partitioning, set, method, type
497: .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType
499: @*/
500: PetscErrorCode MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
501: {
502: PetscErrorCode ierr,(*r)(MatPartitioning);
503: PetscTruth match;
509: PetscTypeCompare((PetscObject)part,type,&match);
510: if (match) return(0);
512: if (part->setupcalled) {
513: (*part->ops->destroy)(part);
514: part->data = 0;
515: part->setupcalled = 0;
516: }
518: PetscFListFind(MatPartitioningList,((PetscObject)part)->comm,type,(void (**)(void)) &r);
520: if (!r) {SETERRQ1(PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);}
522: part->ops->destroy = (PetscErrorCode (*)(MatPartitioning)) 0;
523: part->ops->view = (PetscErrorCode (*)(MatPartitioning,PetscViewer)) 0;
524: (*r)(part);
526: PetscStrfree(((PetscObject)part)->type_name);
527: PetscStrallocpy(type,&((PetscObject)part)->type_name);
528: return(0);
529: }
533: /*@
534: MatPartitioningSetFromOptions - Sets various partitioning options from the
535: options database.
537: Collective on MatPartitioning
539: Input Parameter:
540: . part - the partitioning context.
542: Options Database Command:
543: $ -mat_partitioning_type <type>
544: $ Use -help for a list of available methods
545: $ (for instance, parmetis)
547: Level: beginner
549: .keywords: partitioning, set, method, type
550: @*/
551: PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
552: {
554: PetscTruth flag;
555: char type[256];
556: const char *def;
559: if (!MatPartitioningRegisterAllCalled){ MatPartitioningRegisterAll(0);}
560: PetscOptionsBegin(((PetscObject)part)->comm,((PetscObject)part)->prefix,"Partitioning options","MatOrderings");
561: if (!((PetscObject)part)->type_name) {
562: #if defined(PETSC_HAVE_PARMETIS)
563: def = MAT_PARTITIONING_PARMETIS;
564: #else
565: def = MAT_PARTITIONING_CURRENT;
566: #endif
567: } else {
568: def = ((PetscObject)part)->type_name;
569: }
570: PetscOptionsList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
571: if (flag) {
572: MatPartitioningSetType(part,type);
573: }
574: /*
575: Set the type if it was never set.
576: */
577: if (!((PetscObject)part)->type_name) {
578: MatPartitioningSetType(part,def);
579: }
581: if (part->ops->setfromoptions) {
582: (*part->ops->setfromoptions)(part);
583: }
584: PetscOptionsEnd();
585: return(0);
586: }