Actual source code: pmetis.c

  1: /*$Id: pmetis.c,v 1.46 2001/09/07 20:10:38 bsmith Exp $*/
  2: 
  3: #include "src/mat/impls/adj/mpi/mpiadj.h"    /*I "petscmat.h" I*/

  5: /* 
  6:    Currently using ParMetis-2.0. The following include file has
  7:    to be changed to par_kmetis.h for ParMetis-1.0
  8: */
  9: EXTERN_C_BEGIN
 10: #include "parmetis.h"
 11: EXTERN_C_END

 13: /*
 14:       The first 5 elements of this structure are the input control array to Metis
 15: */
 16: typedef struct {
 17:   int cuts;         /* number of cuts made (output) */
 18:   int foldfactor;
 19:   int parallel;     /* use parallel partitioner for coarse problem */
 20:   int indexing;     /* 0 indicates C indexing, 1 Fortran */
 21:   int printout;     /* indicates if one wishes Metis to print info */
 22: } MatPartitioning_Parmetis;

 24: /*
 25:    Uses the ParMETIS parallel matrix partitioner to partition the matrix in parallel
 26: */
 27: #undef __FUNCT__  
 29: static int MatPartitioningApply_Parmetis(MatPartitioning part,IS *partitioning)
 30: {
 31:   int                      ierr,*locals,size,rank;
 32:   int                      *vtxdist,*xadj,*adjncy,itmp = 0;
 33:   Mat                      mat = part->adj,newmat;
 34:   Mat_MPIAdj               *adj = (Mat_MPIAdj *)mat->data;
 35:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis*)part->data;
 36:   PetscTruth               flg;

 39:   MPI_Comm_size(mat->comm,&size);
 40:   if (part->n != size) {
 41:     SETERRQ(PETSC_ERR_SUP,"Supports exactly one domain per processor");
 42:   }

 44:   PetscTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
 45:   if (!flg) {
 46:     MatConvert(mat,MATMPIADJ,&newmat);
 47:     adj  = (Mat_MPIAdj *)newmat->data;
 48:   }

 50:   vtxdist = adj->rowners;
 51:   xadj    = adj->i;
 52:   adjncy  = adj->j;
 53:   ierr    = MPI_Comm_rank(part->comm,&rank);
 54:   if (!(vtxdist[rank+1] - vtxdist[rank])) {
 55:     SETERRQ(1,"Does not support any processor with no entries");
 56:   }
 57: #if defined(PETSC_USE_BOPT_g)
 58:   /* check that matrix has no diagonal entries */
 59:   {
 60:     int i,j,rstart;
 61:     MatGetOwnershipRange(mat,&rstart,PETSC_NULL);
 62:     for (i=0; i<mat->m; i++) {
 63:       for (j=xadj[i]; j<xadj[i+1]; j++) {
 64:         if (adjncy[j] == i+rstart) SETERRQ1(1,"Row %d has illigal diagonal entry",i+rstart);
 65:       }
 66:     }
 67:   }
 68: #endif

 70:   PetscMalloc((mat->m+1)*sizeof(int),&locals);

 72:   if (PetscLogPrintInfo) {itmp = parmetis->printout; parmetis->printout = 127;}
 73:   PARKMETIS(vtxdist,xadj,part->vertex_weights,adjncy,adj->values,locals,(int*)parmetis,part->comm);
 74:   if (PetscLogPrintInfo) {parmetis->printout = itmp;}

 76:   ISCreateGeneral(part->comm,mat->m,locals,partitioning);
 77:   PetscFree(locals);

 79:   if (!flg) {
 80:     MatDestroy(newmat);
 81:   }
 82:   return(0);
 83: }


 86: #undef __FUNCT__  
 88: int MatPartitioningView_Parmetis(MatPartitioning part,PetscViewer viewer)
 89: {
 90:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;
 91:   int                      ierr,rank;
 92:   PetscTruth               isascii;

 95:   MPI_Comm_rank(part->comm,&rank);
 96:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
 97:   if (isascii) {
 98:     if (parmetis->parallel == 2) {
 99:       PetscViewerASCIIPrintf(viewer,"  Using parallel coarse grid partitionern");
100:     } else {
101:       PetscViewerASCIIPrintf(viewer,"  Using sequential coarse grid partitionern");
102:     }
103:     PetscViewerASCIIPrintf(viewer,"  Using %d fold factorn",parmetis->foldfactor);
104:     PetscViewerASCIISynchronizedPrintf(viewer,"  [%d]Number of cuts found %dn",rank,parmetis->cuts);
105:     PetscViewerFlush(viewer);
106:   } else {
107:     SETERRQ1(1,"Viewer type %s not supported for this Parmetis partitioner",((PetscObject)viewer)->type_name);
108:   }

110:   return(0);
111: }

113: #undef __FUNCT__  
115: /*@
116:      MatPartitioningParmetisSetCoarseSequential - Use the sequential code to 
117:          do the partitioning of the coarse grid.

119:   Collective on MatPartitioning

121:   Input Parameter:
122: .  part - the partitioning context

124:    Level: advanced

126: @*/
127: int MatPartitioningParmetisSetCoarseSequential(MatPartitioning part)
128: {
129:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;

132:   parmetis->parallel = 1;
133:   return(0);
134: }
135: #undef __FUNCT__  
137: int MatPartitioningSetFromOptions_Parmetis(MatPartitioning part)
138: {
139:   int        ierr;
140:   PetscTruth flag;

143:   PetscOptionsHead("Set ParMeTiS partitioning options");
144:     PetscOptionsName("-mat_partitioning_parmetis_coarse_sequential","Use sequential coarse partitioner","MatPartitioningParmetisSetCoarseSequential",&flag);
145:     if (flag) {
146:       MatPartitioningParmetisSetCoarseSequential(part);
147:     }
148:   PetscOptionsTail();
149:   return(0);
150: }


153: #undef __FUNCT__  
155: int MatPartitioningDestroy_Parmetis(MatPartitioning part)
156: {
157:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;

161:   PetscFree(parmetis);
162:   return(0);
163: }

165: EXTERN_C_BEGIN
166: #undef __FUNCT__  
168: int MatPartitioningCreate_Parmetis(MatPartitioning part)
169: {
170:   int                      ierr;
171:   MatPartitioning_Parmetis *parmetis;

174:   ierr  = PetscNew(MatPartitioning_Parmetis,&parmetis);

176:   parmetis->cuts       = 0;   /* output variable */
177:   parmetis->foldfactor = 150; /*folding factor */
178:   parmetis->parallel   = 2;   /* use parallel partitioner for coarse grid */
179:   parmetis->indexing   = 0;   /* index numbering starts from 0 */
180:   parmetis->printout   = 0;   /* print no output while running */

182:   part->ops->apply          = MatPartitioningApply_Parmetis;
183:   part->ops->view           = MatPartitioningView_Parmetis;
184:   part->ops->destroy        = MatPartitioningDestroy_Parmetis;
185:   part->ops->setfromoptions = MatPartitioningSetFromOptions_Parmetis;
186:   part->data                = (void*)parmetis;
187:   return(0);
188: }
189: EXTERN_C_END