Actual source code: partition.c
1: /*$Id: partition.c,v 1.60 2001/06/21 21:17:23 bsmith Exp $*/
2:
3: #include src/mat/matimpl.h
5: /* Logging support */
6: int MAT_PARTITIONING_COOKIE;
8: /*
9: Simplest partitioning, keeps the current partitioning.
10: */
13: static int MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
14: {
15: int ierr,m,rank,size;
18: MPI_Comm_size(part->comm,&size);
19: if (part->n != size) {
20: SETERRQ(PETSC_ERR_SUP,"Currently only supports one domain per processor");
21: }
22: MPI_Comm_rank(part->comm,&rank);
24: MatGetLocalSize(part->adj,&m,PETSC_NULL);
25: ISCreateStride(part->comm,m,rank,0,partitioning);
26: return(0);
27: }
31: static int MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
32: {
33: int cell,ierr,n,N,p,rstart,rend,*color,size;
36: MPI_Comm_size(part->comm,&size);
37: if (part->n != size) {
38: SETERRQ(PETSC_ERR_SUP,"Currently only supports one domain per processor");
39: }
40: p = (int)sqrt((double)part->n);
41: if (p*p != part->n) {
42: SETERRQ(PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");
43: }
44: MatGetSize(part->adj,&N,PETSC_NULL);
45: n = (int)sqrt((double)N);
46: if (n*n != N) { /* This condition is NECESSARY, but NOT SUFFICIENT in order to the domain be square */
47: SETERRQ(PETSC_ERR_SUP,"Square partitioning requires square domain");
48: }
49: if (n%p != 0) {
50: SETERRQ(PETSC_ERR_SUP,"Square partitioning requires p to divide n");
51: }
52: MatGetOwnershipRange(part->adj,&rstart,&rend);
53: PetscMalloc((rend-rstart)*sizeof(int),&color);
54: /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
55: for (cell=rstart; cell<rend; cell++) {
56: color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
57: }
58: ISCreateGeneral(part->comm,rend-rstart,color,partitioning);
59: PetscFree(color);
61: return(0);
62: }
64: EXTERN_C_BEGIN
67: int MatPartitioningCreate_Current(MatPartitioning part)
68: {
70: part->ops->apply = MatPartitioningApply_Current;
71: part->ops->view = 0;
72: part->ops->destroy = 0;
73: return(0);
74: }
75: EXTERN_C_END
77: EXTERN_C_BEGIN
80: int MatPartitioningCreate_Square(MatPartitioning part)
81: {
83: part->ops->apply = MatPartitioningApply_Square;
84: part->ops->view = 0;
85: part->ops->destroy = 0;
86: return(0);
87: }
88: EXTERN_C_END
90: /* ===========================================================================================*/
92: #include petscsys.h
94: PetscFList MatPartitioningList = 0;
95: PetscTruth MatPartitioningRegisterAllCalled = PETSC_FALSE;
100: int MatPartitioningRegister(char *sname,char *path,char *name,int (*function)(MatPartitioning))
101: {
102: int ierr;
103: char fullname[256];
106: PetscFListConcat(path,name,fullname);
107: PetscFListAdd(&MatPartitioningList,sname,fullname,(void (*)(void))function);
108: return(0);
109: }
113: /*@C
114: MatPartitioningRegisterDestroy - Frees the list of partitioning routines.
116: Not Collective
118: Level: developer
120: .keywords: matrix, register, destroy
122: .seealso: MatPartitioningRegisterDynamic(), MatPartitioningRegisterAll()
123: @*/
124: int MatPartitioningRegisterDestroy(void)
125: {
129: if (MatPartitioningList) {
130: PetscFListDestroy(&MatPartitioningList);
131: MatPartitioningList = 0;
132: }
133: return(0);
134: }
138: /*@C
139: MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
140: from the partitioning context.
142: Not collective
144: Input Parameter:
145: . partitioning - the partitioning context
147: Output Parameter:
148: . type - partitioner type
150: Level: intermediate
152: Not Collective
154: .keywords: Partitioning, get, method, name, type
155: @*/
156: int MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
157: {
159: *type = partitioning->type_name;
160: return(0);
161: }
165: /*@C
166: MatPartitioningSetNParts - Set how many partitions need to be created;
167: by default this is one per processor. Certain partitioning schemes may
168: in fact only support that option.
170: Not collective
172: Input Parameter:
173: . partitioning - the partitioning context
174: . n - the number of partitions
176: Level: intermediate
178: Not Collective
180: .keywords: Partitioning, set
182: .seealso: MatPartitioningCreate(), MatPartitioningApply()
183: @*/
184: int MatPartitioningSetNParts(MatPartitioning part,int n)
185: {
187: part->n = n;
188: return(0);
189: }
193: /*@C
194: MatPartitioningApply - Gets a partitioning for a matrix.
196: Collective on Mat
198: Input Parameters:
199: . matp - the matrix partitioning object
201: Output Parameters:
202: . partitioning - the partitioning. For each local node this tells the processor
203: number that that node is assigned to.
205: Options Database Keys:
206: To specify the partitioning through the options database, use one of
207: the following
208: $ -mat_partitioning_type parmetis, -mat_partitioning current
209: To see the partitioning result
210: $ -mat_partitioning_view
212: Level: beginner
214: The user can define additional partitionings; see MatPartitioningRegisterDynamic().
216: .keywords: matrix, get, partitioning
218: .seealso: MatPartitioningRegisterDynamic(), MatPartitioningCreate(),
219: MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
220: ISPartitioningCount()
221: @*/
222: int MatPartitioningApply(MatPartitioning matp,IS *partitioning)
223: {
224: int ierr;
225: PetscTruth flag;
229: if (!matp->adj->assembled) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
230: if (matp->adj->factor) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
231: if (!matp->ops->apply) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
232: PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
233: (*matp->ops->apply)(matp,partitioning);
234: PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);
236: PetscOptionsHasName(PETSC_NULL,"-mat_partitioning_view",&flag);
237: if (flag) {
238: MatPartitioningView(matp,PETSC_VIEWER_STDOUT_(matp->comm));
239: ISView(*partitioning,PETSC_VIEWER_STDOUT_(matp->comm));
240: }
241: return(0);
242: }
243:
246: /*@C
247: MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
248: partitioned.
250: Collective on MatPartitioning and Mat
252: Input Parameters:
253: + part - the partitioning context
254: - adj - the adjacency matrix
256: Level: beginner
258: .keywords: Partitioning, adjacency
260: .seealso: MatPartitioningCreate()
261: @*/
262: int MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
263: {
267: part->adj = adj;
268: return(0);
269: }
273: /*@C
274: MatPartitioningDestroy - Destroys the partitioning context.
276: Collective on Partitioning
278: Input Parameters:
279: . part - the partitioning context
281: Level: beginner
283: .keywords: Partitioning, destroy, context
285: .seealso: MatPartitioningCreate()
286: @*/
287: int MatPartitioningDestroy(MatPartitioning part)
288: {
293: if (--part->refct > 0) return(0);
295: if (part->ops->destroy) {
296: (*part->ops->destroy)(part);
297: }
298: if (part->vertex_weights){
299: PetscFree(part->vertex_weights);
300: }
301: if (part->part_weights){
302: PetscFree(part->part_weights);
303: }
304: PetscLogObjectDestroy(part);
305: PetscHeaderDestroy(part);
306: return(0);
307: }
311: /*@C
312: MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
314: Collective on Partitioning
316: Input Parameters:
317: + part - the partitioning context
318: - weights - the weights
320: Level: beginner
322: Notes:
323: The array weights is freed by PETSc so the user should not free the array. In C/C++
324: the array must be obtained with a call to PetscMalloc(), not malloc().
326: .keywords: Partitioning, destroy, context
328: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
329: @*/
330: int MatPartitioningSetVertexWeights(MatPartitioning part,const int weights[])
331: {
337: if (part->vertex_weights){
338: PetscFree(part->vertex_weights);
339: }
340: part->vertex_weights = (int *)weights;
341: return(0);
342: }
346: /*@C
347: MatPartitioningSetPartitionWeights - Sets the weights for each partition.
349: Collective on Partitioning
351: Input Parameters:
352: + part - the partitioning context
353: - weights - the weights
355: Level: beginner
357: Notes:
358: The array weights is freed by PETSc so the user should not free the array. In C/C++
359: the array must be obtained with a call to PetscMalloc(), not malloc().
361: .keywords: Partitioning, destroy, context
363: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
364: @*/
365: int MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
366: {
372: if (part->part_weights){
373: PetscFree(part->part_weights);
374: }
375: part->part_weights = (PetscReal*)weights;
376: return(0);
377: }
381: /*@C
382: MatPartitioningCreate - Creates a partitioning context.
384: Collective on MPI_Comm
386: Input Parameter:
387: . comm - MPI communicator
389: Output Parameter:
390: . newp - location to put the context
392: Level: beginner
394: .keywords: Partitioning, create, context
396: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
397: MatPartitioningSetAdjacency()
399: @*/
400: int MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
401: {
402: MatPartitioning part;
403: int ierr;
406: *newp = 0;
408: PetscHeaderCreate(part,_p_MatPartitioning,struct _MatPartitioningOps,MAT_PARTITIONING_COOKIE,-1,"MatPartitioning",comm,MatPartitioningDestroy,
409: MatPartitioningView);
410: PetscLogObjectCreate(part);
411: part->type = -1;
412: part->vertex_weights = PETSC_NULL;
413: part->part_weights = PETSC_NULL;
414: MPI_Comm_size(comm,&part->n);
416: *newp = part;
417: return(0);
418: }
422: /*@C
423: MatPartitioningView - Prints the partitioning data structure.
425: Collective on MatPartitioning
427: Input Parameters:
428: . part - the partitioning context
429: . viewer - optional visualization context
431: Level: intermediate
433: Note:
434: The available visualization contexts include
435: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
436: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
437: output where only the first processor opens
438: the file. All other processors send their
439: data to the first processor to print.
441: The user can open alternative visualization contexts with
442: . PetscViewerASCIIOpen() - output to a specified file
444: .keywords: Partitioning, view
446: .seealso: PetscViewerASCIIOpen()
447: @*/
448: int MatPartitioningView(MatPartitioning part,PetscViewer viewer)
449: {
450: int ierr;
451: PetscTruth isascii;
452: MatPartitioningType name;
456: if (!viewer) viewer = PETSC_VIEWER_STDOUT_(part->comm);
460: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
461: if (isascii) {
462: MatPartitioningGetType(part,&name);
463: PetscViewerASCIIPrintf(viewer,"MatPartitioning Object: %s\n",name);
464: if (part->vertex_weights) {
465: PetscViewerASCIIPrintf(viewer," Using vertex weights\n");
466: }
467: } else {
468: SETERRQ1(1,"Viewer type %s not supported for this MatParitioning",((PetscObject)viewer)->type_name);
469: }
471: if (part->ops->view) {
472: PetscViewerASCIIPushTab(viewer);
473: (*part->ops->view)(part,viewer);
474: PetscViewerASCIIPopTab(viewer);
475: }
477: return(0);
478: }
482: /*@C
483: MatPartitioningSetType - Sets the type of partitioner to use
485: Collective on MatPartitioning
487: Input Parameter:
488: . part - the partitioning context.
489: . type - a known method
491: Options Database Command:
492: $ -mat_partitioning_type <type>
493: $ Use -help for a list of available methods
494: $ (for instance, parmetis)
496: Level: intermediate
498: .keywords: partitioning, set, method, type
500: .seealso: MatPartitioningCreate(), MatPartitioningApply()
502: @*/
503: int MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
504: {
505: int ierr,(*r)(MatPartitioning);
506: PetscTruth match;
512: PetscTypeCompare((PetscObject)part,type,&match);
513: if (match) return(0);
515: if (part->setupcalled) {
516: (*part->ops->destroy)(part);
517: part->data = 0;
518: part->setupcalled = 0;
519: }
521: /* Get the function pointers for the method requested */
522: if (!MatPartitioningRegisterAllCalled){ MatPartitioningRegisterAll(0);}
523: PetscFListFind(part->comm,MatPartitioningList,type,(void (**)(void)) &r);
525: if (!r) {SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Unknown partitioning type %s",type);}
527: part->ops->destroy = (int (*)(MatPartitioning)) 0;
528: part->ops->view = (int (*)(MatPartitioning,PetscViewer)) 0;
529: (*r)(part);
531: PetscStrfree(part->type_name);
532: PetscStrallocpy(type,&part->type_name);
533: return(0);
534: }
538: /*@
539: MatPartitioningSetFromOptions - Sets various partitioning options from the
540: options database.
542: Collective on MatPartitioning
544: Input Parameter:
545: . part - the partitioning context.
547: Options Database Command:
548: $ -mat_partitioning_type <type>
549: $ Use -help for a list of available methods
550: $ (for instance, parmetis)
552: Level: beginner
554: .keywords: partitioning, set, method, type
555: @*/
556: int MatPartitioningSetFromOptions(MatPartitioning part)
557: {
558: int ierr;
559: PetscTruth flag;
560: char type[256],*def;
563: if (!MatPartitioningRegisterAllCalled){ MatPartitioningRegisterAll(0);}
564: PetscOptionsBegin(part->comm,part->prefix,"Partitioning options","MatOrderings");
565: if (!part->type_name) {
566: #if defined(PETSC_HAVE_PARMETIS)
567: def = MAT_PARTITIONING_PARMETIS;
568: #else
569: def = MAT_PARTITIONING_CURRENT;
570: #endif
571: } else {
572: def = part->type_name;
573: }
574: PetscOptionsList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
575: if (flag) {
576: MatPartitioningSetType(part,type);
577: }
578: /*
579: Set the type if it was never set.
580: */
581: if (!part->type_name) {
582: MatPartitioningSetType(part,def);
583: }
585: if (part->ops->setfromoptions) {
586: (*part->ops->setfromoptions)(part);
587: }
588: PetscOptionsEnd();
589: return(0);
590: }