Actual source code: partition.c

  1: 
 2:  #include src/mat/matimpl.h

  4: /* Logging support */
  5: PetscCookie MAT_PARTITIONING_COOKIE = 0;

  7: /*
  8:    Simplest partitioning, keeps the current partitioning.
  9: */
 12: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
 13: {
 15:   PetscInt       m;
 16:   PetscMPIInt    rank,size;

 19:   MPI_Comm_size(part->comm,&size);
 20:   if (part->n != size) {
 21:     SETERRQ(PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -matpartitioning_type parmetis or chaco or scotch for more than one subdomain per processor");
 22:   }
 23:   MPI_Comm_rank(part->comm,&rank);

 25:   MatGetLocalSize(part->adj,&m,PETSC_NULL);
 26:   ISCreateStride(part->comm,m,rank,0,partitioning);
 27:   return(0);
 28: }

 32: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
 33: {
 35:   PetscInt       cell,n,N,p,rstart,rend,*color;
 36:   PetscMPIInt    size;

 39:   MPI_Comm_size(part->comm,&size);
 40:   if (part->n != size) {
 41:     SETERRQ(PETSC_ERR_SUP,"Currently only supports one domain per processor");
 42:   }
 43:   p = (PetscInt)sqrt((double)part->n);
 44:   if (p*p != part->n) {
 45:     SETERRQ(PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");
 46:   }
 47:   MatGetSize(part->adj,&N,PETSC_NULL);
 48:   n = (PetscInt)sqrt((double)N);
 49:   if (n*n != N) {  /* This condition is NECESSARY, but NOT SUFFICIENT in order to the domain be square */
 50:     SETERRQ(PETSC_ERR_SUP,"Square partitioning requires square domain");
 51:   }
 52:   if (n%p != 0) {
 53:     SETERRQ(PETSC_ERR_SUP,"Square partitioning requires p to divide n");
 54:   }
 55:   MatGetOwnershipRange(part->adj,&rstart,&rend);
 56:   PetscMalloc((rend-rstart)*sizeof(PetscInt),&color);
 57:   /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
 58:   for (cell=rstart; cell<rend; cell++) {
 59:     color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
 60:   }
 61:   ISCreateGeneral(part->comm,rend-rstart,color,partitioning);
 62:   PetscFree(color);

 64:   return(0);
 65: }

 70: PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
 71: {
 73:   part->ops->apply   = MatPartitioningApply_Current;
 74:   part->ops->view    = 0;
 75:   part->ops->destroy = 0;
 76:   return(0);
 77: }

 83: PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
 84: {
 86:   part->ops->apply   = MatPartitioningApply_Square;
 87:   part->ops->view    = 0;
 88:   part->ops->destroy = 0;
 89:   return(0);
 90: }

 93: /* ===========================================================================================*/

 95:  #include petscsys.h

 97: PetscFList      MatPartitioningList = 0;
 98: PetscTruth MatPartitioningRegisterAllCalled = PETSC_FALSE;


103: PetscErrorCode MatPartitioningRegister(const char sname[],const char path[],const char name[],PetscErrorCode (*function)(MatPartitioning))
104: {
106:   char fullname[PETSC_MAX_PATH_LEN];

109:   PetscFListConcat(path,name,fullname);
110:   PetscFListAdd(&MatPartitioningList,sname,fullname,(void (*)(void))function);
111:   return(0);
112: }

116: /*@C
117:    MatPartitioningRegisterDestroy - Frees the list of partitioning routines.

119:   Not Collective

121:   Level: developer

123: .keywords: matrix, register, destroy

125: .seealso: MatPartitioningRegisterDynamic(), MatPartitioningRegisterAll()
126: @*/
127: PetscErrorCode MatPartitioningRegisterDestroy(void)
128: {

132:   if (MatPartitioningList) {
133:     PetscFListDestroy(&MatPartitioningList);
134:     MatPartitioningList = 0;
135:   }
136:   return(0);
137: }

141: /*@C
142:    MatPartitioningGetType - Gets the Partitioning method type and name (as a string) 
143:         from the partitioning context.

145:    Not collective

147:    Input Parameter:
148: .  partitioning - the partitioning context

150:    Output Parameter:
151: .  type - partitioner type

153:    Level: intermediate

155:    Not Collective

157: .keywords: Partitioning, get, method, name, type
158: @*/
159: PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
160: {
162:   *type = partitioning->type_name;
163:   return(0);
164: }

168: /*@C
169:    MatPartitioningSetNParts - Set how many partitions need to be created;
170:         by default this is one per processor. Certain partitioning schemes may
171:         in fact only support that option.

173:    Not collective

175:    Input Parameter:
176: .  partitioning - the partitioning context
177: .  n - the number of partitions

179:    Level: intermediate

181:    Not Collective

183: .keywords: Partitioning, set

185: .seealso: MatPartitioningCreate(), MatPartitioningApply()
186: @*/
187: PetscErrorCode MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
188: {
190:   part->n = n;
191:   return(0);
192: }

196: /*@C
197:    MatPartitioningApply - Gets a partitioning for a matrix.

199:    Collective on Mat

201:    Input Parameters:
202: .  matp - the matrix partitioning object

204:    Output Parameters:
205: .   partitioning - the partitioning. For each local node this tells the processor
206:                    number that that node is assigned to.

208:    Options Database Keys:
209:    To specify the partitioning through the options database, use one of
210:    the following 
211: $    -mat_partitioning_type parmetis, -mat_partitioning current
212:    To see the partitioning result
213: $    -mat_partitioning_view

215:    Level: beginner

217:    The user can define additional partitionings; see MatPartitioningRegisterDynamic().

219: .keywords: matrix, get, partitioning

221: .seealso:  MatPartitioningRegisterDynamic(), MatPartitioningCreate(),
222:            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
223:            ISPartitioningCount()
224: @*/
225: PetscErrorCode MatPartitioningApply(MatPartitioning matp,IS *partitioning)
226: {
228:   PetscTruth flag;

233:   if (!matp->adj->assembled) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
234:   if (matp->adj->factor) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
235:   if (!matp->ops->apply) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
236:   PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
237:   (*matp->ops->apply)(matp,partitioning);
238:   PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);

240:   PetscOptionsHasName(PETSC_NULL,"-mat_partitioning_view",&flag);
241:   if (flag) {
242:     MatPartitioningView(matp,PETSC_VIEWER_STDOUT_(matp->comm));
243:     ISView(*partitioning,PETSC_VIEWER_STDOUT_(matp->comm));
244:   }
245:   return(0);
246: }
247: 
250: /*@C
251:    MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
252:       partitioned.

254:    Collective on MatPartitioning and Mat

256:    Input Parameters:
257: +  part - the partitioning context
258: -  adj - the adjacency matrix

260:    Level: beginner

262: .keywords: Partitioning, adjacency

264: .seealso: MatPartitioningCreate()
265: @*/
266: PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
267: {
271:   part->adj = adj;
272:   return(0);
273: }

277: /*@C
278:    MatPartitioningDestroy - Destroys the partitioning context.

280:    Collective on Partitioning

282:    Input Parameters:
283: .  part - the partitioning context

285:    Level: beginner

287: .keywords: Partitioning, destroy, context

289: .seealso: MatPartitioningCreate()
290: @*/
291: PetscErrorCode MatPartitioningDestroy(MatPartitioning part)
292: {

297:   if (--part->refct > 0) return(0);

299:   if (part->ops->destroy) {
300:     (*part->ops->destroy)(part);
301:   }
302:   if (part->vertex_weights){
303:     PetscFree(part->vertex_weights);
304:   }
305:   if (part->part_weights){
306:     PetscFree(part->part_weights);
307:   }
308:   PetscLogObjectDestroy(part);
309:   PetscHeaderDestroy(part);
310:   return(0);
311: }

315: /*@C
316:    MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.

318:    Collective on Partitioning

320:    Input Parameters:
321: +  part - the partitioning context
322: -  weights - the weights

324:    Level: beginner

326:    Notes:
327:       The array weights is freed by PETSc so the user should not free the array. In C/C++
328:    the array must be obtained with a call to PetscMalloc(), not malloc().

330: .keywords: Partitioning, destroy, context

332: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
333: @*/
334: PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
335: {


341:   if (part->vertex_weights){
342:     PetscFree(part->vertex_weights);
343:   }
344:   part->vertex_weights = (PetscInt*)weights;
345:   return(0);
346: }

350: /*@C
351:    MatPartitioningSetPartitionWeights - Sets the weights for each partition.

353:    Collective on Partitioning

355:    Input Parameters:
356: +  part - the partitioning context
357: -  weights - the weights

359:    Level: beginner

361:    Notes:
362:       The array weights is freed by PETSc so the user should not free the array. In C/C++
363:    the array must be obtained with a call to PetscMalloc(), not malloc().

365: .keywords: Partitioning, destroy, context

367: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
368: @*/
369: PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
370: {


376:   if (part->part_weights){
377:     PetscFree(part->part_weights);
378:   }
379:   part->part_weights = (PetscReal*)weights;
380:   return(0);
381: }

385: /*@C
386:    MatPartitioningCreate - Creates a partitioning context.

388:    Collective on MPI_Comm

390:    Input Parameter:
391: .   comm - MPI communicator 

393:    Output Parameter:
394: .  newp - location to put the context

396:    Level: beginner

398: .keywords: Partitioning, create, context

400: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
401:           MatPartitioningSetAdjacency()

403: @*/
404: PetscErrorCode MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
405: {
406:   MatPartitioning part;
407:   PetscErrorCode  ierr;
408:   PetscMPIInt     size;

411:   *newp          = 0;

413:   PetscHeaderCreate(part,_p_MatPartitioning,struct _MatPartitioningOps,MAT_PARTITIONING_COOKIE,-1,"MatPartitioning",comm,MatPartitioningDestroy,
414:                     MatPartitioningView);
415:   PetscLogObjectCreate(part);
416:   part->type           = -1;
417:   part->vertex_weights = PETSC_NULL;
418:   part->part_weights   = PETSC_NULL;
419:   MPI_Comm_size(comm,&size);
420:   part->n = (PetscInt)size;

422:   *newp = part;
423:   return(0);
424: }

428: /*@C 
429:    MatPartitioningView - Prints the partitioning data structure.

431:    Collective on MatPartitioning

433:    Input Parameters:
434: .  part - the partitioning context
435: .  viewer - optional visualization context

437:    Level: intermediate

439:    Note:
440:    The available visualization contexts include
441: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
442: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
443:          output where only the first processor opens
444:          the file.  All other processors send their 
445:          data to the first processor to print. 

447:    The user can open alternative visualization contexts with
448: .     PetscViewerASCIIOpen() - output to a specified file

450: .keywords: Partitioning, view

452: .seealso: PetscViewerASCIIOpen()
453: @*/
454: PetscErrorCode MatPartitioningView(MatPartitioning part,PetscViewer viewer)
455: {
457:   PetscTruth          iascii;
458:   MatPartitioningType name;

462:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(part->comm);

466:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
467:   if (iascii) {
468:     MatPartitioningGetType(part,&name);
469:     PetscViewerASCIIPrintf(viewer,"MatPartitioning Object: %s\n",name);
470:     if (part->vertex_weights) {
471:       PetscViewerASCIIPrintf(viewer,"  Using vertex weights\n");
472:     }
473:   } else {
474:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this MatParitioning",((PetscObject)viewer)->type_name);
475:   }

477:   if (part->ops->view) {
478:     PetscViewerASCIIPushTab(viewer);
479:     (*part->ops->view)(part,viewer);
480:     PetscViewerASCIIPopTab(viewer);
481:   }

483:   return(0);
484: }

488: /*@C
489:    MatPartitioningSetType - Sets the type of partitioner to use

491:    Collective on MatPartitioning

493:    Input Parameter:
494: .  part - the partitioning context.
495: .  type - a known method

497:    Options Database Command:
498: $  -mat_partitioning_type  <type>
499: $      Use -help for a list of available methods
500: $      (for instance, parmetis)

502:    Level: intermediate

504: .keywords: partitioning, set, method, type

506: .seealso: MatPartitioningCreate(), MatPartitioningApply()

508: @*/
509: PetscErrorCode MatPartitioningSetType(MatPartitioning part,const MatPartitioningType type)
510: {
511:   PetscErrorCode ierr,(*r)(MatPartitioning);
512:   PetscTruth match;


518:   PetscTypeCompare((PetscObject)part,type,&match);
519:   if (match) return(0);

521:   if (part->setupcalled) {
522:      (*part->ops->destroy)(part);
523:     part->data        = 0;
524:     part->setupcalled = 0;
525:   }

527:   /* Get the function pointers for the method requested */
528:   if (!MatPartitioningRegisterAllCalled){ MatPartitioningRegisterAll(0);}
529:    PetscFListFind(part->comm,MatPartitioningList,type,(void (**)(void)) &r);

531:   if (!r) {SETERRQ1(PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);}

533:   part->ops->destroy      = (PetscErrorCode (*)(MatPartitioning)) 0;
534:   part->ops->view         = (PetscErrorCode (*)(MatPartitioning,PetscViewer)) 0;
535:   (*r)(part);

537:   PetscStrfree(part->type_name);
538:   PetscStrallocpy(type,&part->type_name);
539:   return(0);
540: }

544: /*@
545:    MatPartitioningSetFromOptions - Sets various partitioning options from the 
546:         options database.

548:    Collective on MatPartitioning

550:    Input Parameter:
551: .  part - the partitioning context.

553:    Options Database Command:
554: $  -mat_partitioning_type  <type>
555: $      Use -help for a list of available methods
556: $      (for instance, parmetis)

558:    Level: beginner

560: .keywords: partitioning, set, method, type
561: @*/
562: PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
563: {
565:   PetscTruth flag;
566:   char       type[256];
567:   const char *def;

570:   if (!MatPartitioningRegisterAllCalled){ MatPartitioningRegisterAll(0);}
571:   PetscOptionsBegin(part->comm,part->prefix,"Partitioning options","MatOrderings");
572:     if (!part->type_name) {
573: #if defined(PETSC_HAVE_PARMETIS)
574:       def = MAT_PARTITIONING_PARMETIS;
575: #else
576:       def = MAT_PARTITIONING_CURRENT;
577: #endif
578:     } else {
579:       def = part->type_name;
580:     }
581:     PetscOptionsList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
582:     if (flag) {
583:       MatPartitioningSetType(part,type);
584:     }
585:     /*
586:       Set the type if it was never set.
587:     */
588:     if (!part->type_name) {
589:       MatPartitioningSetType(part,def);
590:     }

592:     if (part->ops->setfromoptions) {
593:       (*part->ops->setfromoptions)(part);
594:     }
595:   PetscOptionsEnd();
596:   return(0);
597: }