Actual source code: partition.c
1: /*$Id: partition.c,v 1.59 2001/04/10 19:36:00 bsmith Exp $*/
2:
3: #include "src/mat/matimpl.h" /*I "petscmat.h" I*/
5: /*
6: Simplest partitioning, keeps the current partitioning.
7: */
8: static int MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
9: {
10: int ierr,m,rank,size;
13: MPI_Comm_size(part->comm,&size);
14: if (part->n != size) {
15: SETERRQ(PETSC_ERR_SUP,"Currently only supports one domain per processor");
16: }
17: MPI_Comm_rank(part->comm,&rank);
19: MatGetLocalSize(part->adj,&m,PETSC_NULL);
20: ISCreateStride(part->comm,m,rank,0,partitioning);
21: return(0);
22: }
24: static int MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
25: {
26: int cell,ierr,n,N,p,rstart,rend,*color,size;
29: MPI_Comm_size(part->comm,&size);
30: if (part->n != size) {
31: SETERRQ(PETSC_ERR_SUP,"Currently only supports one domain per processor");
32: }
33: p = (int)sqrt((double)part->n);
34: if (p*p != part->n) {
35: SETERRQ(PETSC_ERR_SUP,"Square partitioning requires "perfect square" number of domains");
36: }
37: MatGetSize(part->adj,&N,PETSC_NULL);
38: n = (int)sqrt((double)N);
39: if (n*n != N) { /* This condition is NECESSARY, but NOT SUFFICIENT in order to the domain be square */
40: SETERRQ(PETSC_ERR_SUP,"Square partitioning requires square domain");
41: }
42: if (n%p != 0) {
43: SETERRQ(PETSC_ERR_SUP,"Square partitioning requires p to divide n");
44: }
45: MatGetOwnershipRange(part->adj,&rstart,&rend);
46: PetscMalloc((rend-rstart)*sizeof(int),&color);
47: /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
48: for (cell=rstart; cell<rend; cell++) {
49: color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
50: }
51: ISCreateGeneral(part->comm,rend-rstart,color,partitioning);
52: PetscFree(color);
54: return(0);
55: }
57: EXTERN_C_BEGIN
58: int MatPartitioningCreate_Current(MatPartitioning part)
59: {
61: part->ops->apply = MatPartitioningApply_Current;
62: part->ops->view = 0;
63: part->ops->destroy = 0;
64: return(0);
65: }
66: EXTERN_C_END
68: EXTERN_C_BEGIN
69: int MatPartitioningCreate_Square(MatPartitioning part)
70: {
72: part->ops->apply = MatPartitioningApply_Square;
73: part->ops->view = 0;
74: part->ops->destroy = 0;
75: return(0);
76: }
77: EXTERN_C_END
79: /* ===========================================================================================*/
81: #include "petscsys.h"
83: PetscFList MatPartitioningList = 0;
84: PetscTruth MatPartitioningRegisterAllCalled = PETSC_FALSE;
86: /*MC
87: MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
88: matrix package.
90: Synopsis:
91: MatPartitioningRegisterDynamic(char *name_partitioning,char *path,char *name_create,int (*routine_create)(MatPartitioning))
93: Not Collective
95: Input Parameters:
96: + sname - name of partitioning (for example MATPARTITIONING_CURRENT) or parmetis
97: . path - location of library where creation routine is
98: . name - name of function that creates the partitioning type, a string
99: - function - function pointer that creates the partitioning type
101: Level: developer
103: If dynamic libraries are used, then the fourth input argument (function)
104: is ignored.
106: Sample usage:
107: .vb
108: MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
109: "MyPartCreate",MyPartCreate);
110: .ve
112: Then, your partitioner can be chosen with the procedural interface via
113: $ MatPartitioningSetType(part,"my_part")
114: or at runtime via the option
115: $ -mat_partitioning_type my_part
117: $PETSC_ARCH and $BOPT occuring in pathname will be replaced with appropriate values.
119: .keywords: matrix, partitioning, register
121: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
122: M*/
124: int MatPartitioningRegister(char *sname,char *path,char *name,int (*function)(MatPartitioning))
125: {
126: int ierr;
127: char fullname[256];
130: PetscFListConcat(path,name,fullname);
131: PetscFListAdd(&MatPartitioningList,sname,fullname,(void (*)())function);
132: return(0);
133: }
135: /*@C
136: MatPartitioningRegisterDestroy - Frees the list of partitioning routines.
138: Not Collective
140: Level: developer
142: .keywords: matrix, register, destroy
144: .seealso: MatPartitioningRegisterDynamic(), MatPartitioningRegisterAll()
145: @*/
146: int MatPartitioningRegisterDestroy(void)
147: {
151: if (MatPartitioningList) {
152: PetscFListDestroy(&MatPartitioningList);
153: MatPartitioningList = 0;
154: }
155: return(0);
156: }
158: /*@C
159: MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
160: from the partitioning context.
162: Not collective
164: Input Parameter:
165: . partitioning - the partitioning context
167: Output Parameter:
168: . type - partitioner type
170: Level: intermediate
172: Not Collective
174: .keywords: Partitioning, get, method, name, type
175: @*/
176: int MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
177: {
179: *type = partitioning->type_name;
180: return(0);
181: }
183: /*@C
184: MatPartitioningApply - Gets a partitioning for a matrix.
186: Collective on Mat
188: Input Parameters:
189: . matp - the matrix partitioning object
191: Output Parameters:
192: . partitioning - the partitioning. For each local node this tells the processor
193: number that that node is assigned to.
195: Options Database Keys:
196: To specify the partitioning through the options database, use one of
197: the following
198: $ -mat_partitioning_type parmetis, -mat_partitioning current
199: To see the partitioning result
200: $ -mat_partitioning_view
202: Level: beginner
204: The user can define additional partitionings; see MatPartitioningRegisterDynamic().
206: .keywords: matrix, get, partitioning
208: .seealso: MatPartitioningGetTypeFromOptions(), MatPartitioningRegisterDynamic(), MatPartitioningCreate(),
209: MatPartitioningDestroy(), MatPartitiongSetAdjacency(), ISPartitioningToNumbering(),
210: ISPartitioningCount()
211: @*/
212: int MatPartitioningApply(MatPartitioning matp,IS *partitioning)
213: {
214: int ierr;
215: PetscTruth flag;
219: if (!matp->adj->assembled) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
220: if (matp->adj->factor) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
221: if (!matp->ops->apply) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
222: PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
223: (*matp->ops->apply)(matp,partitioning);
224: PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);
226: PetscOptionsHasName(PETSC_NULL,"-mat_partitioning_view",&flag);
227: if (flag) {
228: MatPartitioningView(matp,PETSC_VIEWER_STDOUT_(matp->comm));
229: ISView(*partitioning,PETSC_VIEWER_STDOUT_(matp->comm));
230: }
231: return(0);
232: }
233:
234: /*@C
235: MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
236: partitioned.
238: Collective on MatPartitioning and Mat
240: Input Parameters:
241: + part - the partitioning context
242: - adj - the adjacency matrix
244: Level: beginner
246: .keywords: Partitioning, adjacency
248: .seealso: MatPartitioningCreate()
249: @*/
250: int MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
251: {
255: part->adj = adj;
256: return(0);
257: }
259: /*@C
260: MatPartitioningDestroy - Destroys the partitioning context.
262: Collective on Partitioning
264: Input Parameters:
265: . part - the partitioning context
267: Level: beginner
269: .keywords: Partitioning, destroy, context
271: .seealso: MatPartitioningCreate()
272: @*/
273: int MatPartitioningDestroy(MatPartitioning part)
274: {
279: if (--part->refct > 0) return(0);
281: if (part->ops->destroy) {
282: (*part->ops->destroy)(part);
283: }
284: if (part->vertex_weights){
285: PetscFree(part->vertex_weights);
286: }
287: PetscLogObjectDestroy(part);
288: PetscHeaderDestroy(part);
289: return(0);
290: }
292: /*@C
293: MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
295: Collective on Partitioning
297: Input Parameters:
298: + part - the partitioning context
299: - weights - the weights
301: Level: beginner
303: Notes:
304: The array weights is freed by PETSc so the user should not free the array. In C/C++
305: the array must be obtained with a call to PetscMalloc(), not malloc().
307: .keywords: Partitioning, destroy, context
309: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetAdjacency()
310: @*/
311: int MatPartitioningSetVertexWeights(MatPartitioning part,int *weights)
312: {
318: if (part->vertex_weights){
319: PetscFree(part->vertex_weights);
320: }
321: part->vertex_weights = weights;
322: return(0);
323: }
325: /*@C
326: MatPartitioningCreate - Creates a partitioning context.
328: Collective on MPI_Comm
330: Input Parameter:
331: . comm - MPI communicator
333: Output Parameter:
334: . newp - location to put the context
336: Level: beginner
338: .keywords: Partitioning, create, context
340: .seealso: MatPartitioningSetUp(), MatPartitioningApply(), MatPartitioningDestroy(),
341: MatPartitioningSetAdjacency()
343: @*/
344: int MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
345: {
346: MatPartitioning part;
347: int ierr;
350: *newp = 0;
352: PetscHeaderCreate(part,_p_MatPartitioning,struct _MatPartitioningOps,MATPARTITIONING_COOKIE,-1,"MatPartitioning",comm,MatPartitioningDestroy,
353: MatPartitioningView);
354: PetscLogObjectCreate(part);
355: part->type = -1;
356: part->vertex_weights = 0;
357: MPI_Comm_size(comm,&part->n);
359: *newp = part;
360: return(0);
361: }
363: /*@C
364: MatPartitioningView - Prints the partitioning data structure.
366: Collective on MatPartitioning
368: Input Parameters:
369: . part - the partitioning context
370: . viewer - optional visualization context
372: Level: intermediate
374: Note:
375: The available visualization contexts include
376: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
377: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
378: output where only the first processor opens
379: the file. All other processors send their
380: data to the first processor to print.
382: The user can open alternative visualization contexts with
383: . PetscViewerASCIIOpen() - output to a specified file
385: .keywords: Partitioning, view
387: .seealso: PetscViewerASCIIOpen()
388: @*/
389: int MatPartitioningView(MatPartitioning part,PetscViewer viewer)
390: {
391: int ierr;
392: PetscTruth isascii;
393: MatPartitioningType name;
397: if (!viewer) viewer = PETSC_VIEWER_STDOUT_(part->comm);
401: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
402: if (isascii) {
403: MatPartitioningGetType(part,&name);
404: PetscViewerASCIIPrintf(viewer,"MatPartitioning Object: %sn",name);
405: if (part->vertex_weights) {
406: PetscViewerASCIIPrintf(viewer," Using vertex weightsn");
407: }
408: } else {
409: SETERRQ1(1,"Viewer type %s not supported for this MatParitioning",((PetscObject)viewer)->type_name);
410: }
412: if (part->ops->view) {
413: PetscViewerASCIIPushTab(viewer);
414: (*part->ops->view)(part,viewer);
415: PetscViewerASCIIPopTab(viewer);
416: }
418: return(0);
419: }
421: /*@C
422: MatPartitioningSetType - Sets the type of partitioner to use
424: Collective on MatPartitioning
426: Input Parameter:
427: . part - the partitioning context.
428: . type - a known method
430: Options Database Command:
431: $ -mat_partitioning_type <type>
432: $ Use -help for a list of available methods
433: $ (for instance, parmetis)
435: Level: intermediate
437: .keywords: partitioning, set, method, type
439: .seealso: MatPartitioningCreate(), MatPartitioningApply()
441: @*/
442: int MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
443: {
444: int ierr,(*r)(MatPartitioning);
445: PetscTruth match;
451: PetscTypeCompare((PetscObject)part,type,&match);
452: if (match) return(0);
454: if (part->setupcalled) {
455: (*part->ops->destroy)(part);
456: part->data = 0;
457: part->setupcalled = 0;
458: }
460: /* Get the function pointers for the method requested */
461: if (!MatPartitioningRegisterAllCalled){ MatPartitioningRegisterAll(0);}
462: PetscFListFind(part->comm,MatPartitioningList,type,(void (**)(void)) &r);
464: if (!r) {SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Unknown partitioning type %s",type);}
466: part->ops->destroy = (int (*)(MatPartitioning)) 0;
467: part->ops->view = (int (*)(MatPartitioning,PetscViewer)) 0;
468: (*r)(part);
470: PetscStrfree(part->type_name);
471: PetscStrallocpy(type,&part->type_name);
472: return(0);
473: }
475: /*@
476: MatPartitioningSetFromOptions - Sets various partitioning options from the
477: options database.
479: Collective on MatPartitioning
481: Input Parameter:
482: . part - the partitioning context.
484: Options Database Command:
485: $ -mat_partitioning_type <type>
486: $ Use -help for a list of available methods
487: $ (for instance, parmetis)
489: Level: beginner
491: .keywords: partitioning, set, method, type
492: @*/
493: int MatPartitioningSetFromOptions(MatPartitioning part)
494: {
495: int ierr;
496: PetscTruth flag;
497: char type[256],*def;
500: if (!MatPartitioningRegisterAllCalled){ MatPartitioningRegisterAll(0);}
501: PetscOptionsBegin(part->comm,part->prefix,"Partitioning options","MatOrderings");
502: if (!part->type_name) {
503: #if defined(PETSC_HAVE_PARMETIS)
504: def = MATPARTITIONING_PARMETIS;
505: #else
506: def = MATPARTITIONING_CURRENT;
507: #endif
508: } else {
509: def = part->type_name;
510: }
511: PetscOptionsList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
512: if (flag) {
513: MatPartitioningSetType(part,type);
514: }
515: /*
516: Set the type if it was never set.
517: */
518: if (!part->type_name) {
519: MatPartitioningSetType(part,def);
520: }
522: if (part->ops->setfromoptions) {
523: (*part->ops->setfromoptions)(part);
524: }
525: PetscOptionsEnd();
526: return(0);
527: }