Actual source code: pmetis.c

  1: /*$Id: pmetis.c,v 1.46 2001/09/07 20:10:38 bsmith Exp $*/
  2: 
  3: #include "src/mat/impls/adj/mpi/mpiadj.h"    /*I "petscmat.h" I*/

  5: /* 
  6:    Currently using ParMetis-2.0. The following include file has
  7:    to be changed to par_kmetis.h for ParMetis-1.0
  8: */
  9: EXTERN_C_BEGIN
 10: #include "parmetis.h"
 11: EXTERN_C_END

 13: /*
 14:       The first 5 elements of this structure are the input control array to Metis
 15: */
 16: typedef struct {
 17:   int cuts;         /* number of cuts made (output) */
 18:   int foldfactor;
 19:   int parallel;     /* use parallel partitioner for coarse problem */
 20:   int indexing;     /* 0 indicates C indexing, 1 Fortran */
 21:   int printout;     /* indicates if one wishes Metis to print info */
 22:   MPI_Comm comm_pmetis;
 23: } MatPartitioning_Parmetis;

 25: /*
 26:    Uses the ParMETIS parallel matrix partitioner to partition the matrix in parallel
 27: */
 30: static int MatPartitioningApply_Parmetis(MatPartitioning part,IS *partitioning)
 31: {
 32:   int                      ierr,*locals,size,rank;
 33:   int                      *vtxdist,*xadj,*adjncy,itmp = 0;
 34:   Mat                      mat = part->adj,newmat;
 35:   Mat_MPIAdj               *adj = (Mat_MPIAdj *)mat->data;
 36:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis*)part->data;
 37:   PetscTruth               flg;

 40:   MPI_Comm_size(mat->comm,&size);
 41:   /*
 42:   if (part->n != size) {
 43:     SETERRQ(PETSC_ERR_SUP,"Supports exactly one domain per processor");
 44:   }
 45:   */

 47:   PetscTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
 48:   if (!flg) {
 49:     MatConvert(mat,MATMPIADJ,&newmat);
 50:     adj  = (Mat_MPIAdj *)newmat->data;
 51:   }

 53:   vtxdist = adj->rowners;
 54:   xadj    = adj->i;
 55:   adjncy  = adj->j;
 56:   MPI_Comm_rank(part->comm,&rank);
 57:   if (!(vtxdist[rank+1] - vtxdist[rank])) {
 58:     SETERRQ(1,"Does not support any processor with no entries");
 59:   }
 60: #if defined(PETSC_USE_BOPT_g)
 61:   /* check that matrix has no diagonal entries */
 62:   {
 63:     int i,j,rstart;
 64:     MatGetOwnershipRange(mat,&rstart,PETSC_NULL);
 65:     for (i=0; i<mat->m; i++) {
 66:       for (j=xadj[i]; j<xadj[i+1]; j++) {
 67:         if (adjncy[j] == i+rstart) SETERRQ1(1,"Row %d has illigal diagonal entry",i+rstart);
 68:       }
 69:     }
 70:   }
 71: #endif

 73:   PetscMalloc((mat->m+1)*sizeof(int),&locals);

 75:   if (PetscLogPrintInfo) {itmp = parmetis->printout; parmetis->printout = 127;}
 76:   if (part->n == size) {
 77:     /* just for now, keep the old behaviour around as explicit call */
 78:     PARKMETIS(vtxdist,xadj,part->vertex_weights,adjncy,adj->values,locals,(int*)parmetis,parmetis->comm_pmetis);
 79:   } else {
 80:     int wgtflag=0, numflag=0, ncon=1, nparts=part->n, options[3], edgecut, i,j;
 81:     float *tpwgts,*ubvec;
 82:     PetscMalloc(ncon*nparts*sizeof(float),&tpwgts);
 83:     for (i=0; i<ncon; i++) {
 84:       for (j=0; j<nparts; j++) {
 85:         if (part->part_weights) {
 86:           tpwgts[i*nparts+j] = part->part_weights[i*nparts+j];
 87:         } else {
 88:           tpwgts[i*nparts+j] = 1./nparts;
 89:         }
 90:       }
 91:     }
 92:     PetscMalloc(ncon*sizeof(float),&ubvec);
 93:     for (i=0; i<ncon; i++) {
 94:       ubvec[i] = 1.05;
 95:     }
 96:     options[0] = 0;
 97:     ParMETIS_V3_PartKway
 98:       (vtxdist,xadj,adjncy,part->vertex_weights,adj->values,
 99:        &wgtflag,&numflag,&ncon,&nparts,tpwgts,ubvec,
100:        options,&edgecut,locals,&parmetis->comm_pmetis);
101:     PetscFree(tpwgts);
102:     PetscFree(ubvec);
103:   }
104:   if (PetscLogPrintInfo) {parmetis->printout = itmp;}

106:   ISCreateGeneral(part->comm,mat->m,locals,partitioning);
107:   PetscFree(locals);

109:   if (!flg) {
110:     MatDestroy(newmat);
111:   }
112:   return(0);
113: }


118: int MatPartitioningView_Parmetis(MatPartitioning part,PetscViewer viewer)
119: {
120:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;
121:   int                      ierr,rank;
122:   PetscTruth               isascii;

125:   MPI_Comm_rank(part->comm,&rank);
126:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
127:   if (isascii) {
128:     if (parmetis->parallel == 2) {
129:       PetscViewerASCIIPrintf(viewer,"  Using parallel coarse grid partitioner\n");
130:     } else {
131:       PetscViewerASCIIPrintf(viewer,"  Using sequential coarse grid partitioner\n");
132:     }
133:     PetscViewerASCIIPrintf(viewer,"  Using %d fold factor\n",parmetis->foldfactor);
134:     PetscViewerASCIISynchronizedPrintf(viewer,"  [%d]Number of cuts found %d\n",rank,parmetis->cuts);
135:     PetscViewerFlush(viewer);
136:   } else {
137:     SETERRQ1(1,"Viewer type %s not supported for this Parmetis partitioner",((PetscObject)viewer)->type_name);
138:   }

140:   return(0);
141: }

145: /*@
146:      MatPartitioningParmetisSetCoarseSequential - Use the sequential code to 
147:          do the partitioning of the coarse grid.

149:   Collective on MatPartitioning

151:   Input Parameter:
152: .  part - the partitioning context

154:    Level: advanced

156: @*/
157: int MatPartitioningParmetisSetCoarseSequential(MatPartitioning part)
158: {
159:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;

162:   parmetis->parallel = 1;
163:   return(0);
164: }
167: int MatPartitioningSetFromOptions_Parmetis(MatPartitioning part)
168: {
169:   int        ierr;
170:   PetscTruth flag;

173:   PetscOptionsHead("Set ParMeTiS partitioning options");
174:     PetscOptionsName("-mat_partitioning_parmetis_coarse_sequential","Use sequential coarse partitioner","MatPartitioningParmetisSetCoarseSequential",&flag);
175:     if (flag) {
176:       MatPartitioningParmetisSetCoarseSequential(part);
177:     }
178:   PetscOptionsTail();
179:   return(0);
180: }


185: int MatPartitioningDestroy_Parmetis(MatPartitioning part)
186: {
187:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;

191:   MPI_Comm_free(&(parmetis->comm_pmetis));
192:   PetscFree(parmetis);
193:   return(0);
194: }

196: EXTERN_C_BEGIN
199: int MatPartitioningCreate_Parmetis(MatPartitioning part)
200: {
201:   int                      ierr;
202:   MatPartitioning_Parmetis *parmetis;

205:   PetscNew(MatPartitioning_Parmetis,&parmetis);

207:   parmetis->cuts       = 0;   /* output variable */
208:   parmetis->foldfactor = 150; /*folding factor */
209:   parmetis->parallel   = 2;   /* use parallel partitioner for coarse grid */
210:   parmetis->indexing   = 0;   /* index numbering starts from 0 */
211:   parmetis->printout   = 0;   /* print no output while running */

213:   MPI_Comm_dup(part->comm,&(parmetis->comm_pmetis));

215:   part->ops->apply          = MatPartitioningApply_Parmetis;
216:   part->ops->view           = MatPartitioningView_Parmetis;
217:   part->ops->destroy        = MatPartitioningDestroy_Parmetis;
218:   part->ops->setfromoptions = MatPartitioningSetFromOptions_Parmetis;
219:   part->data                = (void*)parmetis;
220:   return(0);
221: }
222: EXTERN_C_END