Actual source code: scotch.c

  1: #define PETSCMAT_DLL

 3:  #include src/mat/impls/adj/mpi/mpiadj.h

  5: #ifdef PETSC_HAVE_UNISTD_H
  6: #include <unistd.h>
  7: #endif

  9: #ifdef PETSC_HAVE_STDLIB_H
 10: #include <stdlib.h>
 11: #endif

 13: #include "petscfix.h"

 15: /* 
 16:    Currently using Scotch-3.4
 17: */
 19: #include "scotch.h"

 22: typedef struct {
 23:     char arch[PETSC_MAX_PATH_LEN];
 24:     int multilevel;
 25:     char strategy[30];
 26:     int global_method;          /* global method */
 27:     int local_method;           /* local method */
 28:     int nbvtxcoarsed;           /* number of vertices for the coarse graph */
 29:     int map;                    /* to know if we map on archptr or just partionate the graph */
 30:     char *mesg_log;
 31:     char host_list[PETSC_MAX_PATH_LEN];
 32: } MatPartitioning_Scotch;

 34: #define SIZE_LOG 10000          /* size of buffer for msg_log */

 38: static PetscErrorCode MatPartitioningApply_Scotch(MatPartitioning part, IS * partitioning)
 39: {
 41:     int  *parttab, *locals = PETSC_NULL, rank, i, size;
 42:     size_t                 j;
 43:     Mat                    mat = part->adj, matMPI, matSeq;
 44:     int                    nb_locals = mat->m;
 45:     Mat_MPIAdj             *adj = (Mat_MPIAdj *) mat->data;
 46:     MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
 47:     PetscTruth             flg;
 48: #ifdef PETSC_HAVE_UNISTD_H
 49:     int                    fd_stdout, fd_pipe[2], count,err;
 50: #endif


 54:     /* check if the matrix is sequential, use MatGetSubMatrices if necessary */
 55:     MPI_Comm_size(((PetscObject)mat)->comm, &size);
 56:     PetscTypeCompare((PetscObject) mat, MATMPIADJ, &flg);
 57:     if (size > 1) {
 58:         int M, N;
 59:         IS isrow, iscol;
 60:         Mat *A;

 62:         if (flg) {
 63:             SETERRQ(0, "Distributed matrix format MPIAdj is not supported for sequential partitioners");
 64:         }
 65:         PetscPrintf(((PetscObject)part)->comm, "Converting distributed matrix to sequential: this could be a performance loss\n");

 67:         MatGetSize(mat, &M, &N);
 68:         ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow);
 69:         ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol);
 70:         MatGetSubMatrices(mat, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &A);
 71:         matSeq = *A;
 72:         PetscFree(A);
 73:         ISDestroy(isrow);
 74:         ISDestroy(iscol);
 75:     } else
 76:         matSeq = mat;

 78:     /* convert the the matrix to MPIADJ type if necessary */
 79:     if (!flg) {
 80:         MatConvert(matSeq, MATMPIADJ, MAT_INITIAL_MATRIX, &matMPI);
 81:     } else {
 82:         matMPI = matSeq;
 83:     }

 85:     adj = (Mat_MPIAdj *) matMPI->data;  /* finaly adj contains adjacency graph */

 87:     MPI_Comm_rank(((PetscObject)part)->comm, &rank);

 89:     {
 90:         /* definition of Scotch library arguments */
 91:         SCOTCH_Strat stratptr;  /* scotch strategy */
 92:         SCOTCH_Graph grafptr;   /* scotch graph */
 93:         SCOTCH_Mapping mappptr; /* scotch mapping format */
 94:         int vertnbr = mat->M;   /* number of vertices in full graph */
 95:         int *verttab = adj->i;  /* start of edge list for each vertex */
 96:         int *edgetab = adj->j;  /* edge list data */
 97:         int edgenbr = adj->nz;  /* number of edges */
 98:         int *velotab = NULL;    /* not used by petsc interface */
 99:         int *vlbltab = NULL;
100:         int *edlotab = NULL;
101:         int baseval = 0;        /* 0 for C array indexing */
102:         int flagval = 3;        /* (cf doc scotch no weight edge & vertices) */
103:         char strategy[256];

105:         PetscMalloc((mat->M) * sizeof(int), &parttab);

107:         /* redirect output to buffer scotch -> mesg_log */
108: #ifdef PETSC_HAVE_UNISTD_H
109:         fd_stdout = dup(1);
110:         pipe(fd_pipe);
111:         close(1);
112:         dup2(fd_pipe[1], 1);
113:         PetscMalloc(SIZE_LOG * sizeof(char), &(scotch->mesg_log));
114: #endif

116:         /* library call */

118:         /* Construction of the scotch graph object */
119:         SCOTCH_graphInit(&grafptr);
120:         SCOTCH_graphBuild(&grafptr, vertnbr, verttab, velotab,
121:             vlbltab, edgenbr, edgetab, edlotab, baseval, flagval);
122:         SCOTCH_graphCheck(&grafptr);

124:         /* Construction of the strategy */
125:         if (scotch->strategy[0] != 0) {
126:             PetscStrcpy(strategy, scotch->strategy);
127:         } else {
128:             PetscStrcpy(strategy, "b{strat=");

130:             if (scotch->multilevel) {
131:                 /* PetscStrcat(strategy,"m{vert=");
132:                    sprintf(strategy+strlen(strategy),"%d",scotch->nbvtxcoarsed);
133:                    PetscStrcat(strategy,",asc="); */
134:                 sprintf(strategy, "b{strat=m{vert=%d,asc=",
135:                     scotch->nbvtxcoarsed);
136:             } else
137:                 PetscStrcpy(strategy, "b{strat=");

139:             switch (scotch->global_method) {
140:             case MP_SCOTCH_GREEDY:
141:                 PetscStrcat(strategy, "h");
142:                 break;
143:             case MP_SCOTCH_GPS:
144:                 PetscStrcat(strategy, "g");
145:                 break;
146:             case MP_SCOTCH_GR_GPS:
147:                 PetscStrcat(strategy, "g|h");
148:             }

150:             switch (scotch->local_method) {
151:             case MP_SCOTCH_KERNIGHAN_LIN:
152:                 if (scotch->multilevel)
153:                     PetscStrcat(strategy, ",low=f}");
154:                 else
155:                     PetscStrcat(strategy, " f");
156:                 break;
157:             case MP_SCOTCH_NONE:
158:                 if (scotch->multilevel)
159:                     PetscStrcat(strategy, ",asc=x}");
160:             default:
161:                 break;
162:             }

164:             PetscStrcat(strategy, " x}");
165:         }

167:         PetscPrintf(((PetscObject)part)->comm, "strategy=[%s]\n", strategy);

169:         SCOTCH_stratInit(&stratptr);
170:         SCOTCH_stratMap(&stratptr, strategy);

172:         /* check for option mapping */
173:         if (!scotch->map) {
174:             SCOTCH_graphPart(&grafptr, &stratptr, part->n, parttab);
175:             PetscPrintf(PETSC_COMM_SELF, "Partition simple without mapping\n");
176:         } else {
177:             SCOTCH_Graph grafarch;
178:             SCOTCH_Num *listtab;
179:             SCOTCH_Num listnbr = 0;
180:             SCOTCH_Arch archptr;        /* file in scotch architecture format */
181:             SCOTCH_Strat archstrat;
182:             int arch_total_size, *parttab_tmp,err;
183:             int cpt;
184:             char buf[256];
185:             FILE *file1, *file2;
186:             char host_buf[256];

188:             /* generate the graph that represents the arch */
189:             file1 = fopen(scotch->arch, "r");
190:             if (!file1) SETERRQ1(PETSC_ERR_FILE_OPEN, "Scotch: unable to open architecture file %s", scotch->arch);

192:             SCOTCH_graphInit(&grafarch);
193:             SCOTCH_graphLoad(&grafarch, file1, baseval, 3);

195:             SCOTCH_graphCheck(&grafarch);
196:             SCOTCH_graphSize(&grafarch, &arch_total_size, &cpt);

198:             err = fclose(file1);
199:             if (err) SETERRQ(PETSC_ERR_SYS,"fclose() failed on file");

201:             printf("total size = %d\n", arch_total_size);

203:             /* generate the list of nodes currently working */
204:             PetscGetHostName(host_buf, 256);
205:             PetscStrlen(host_buf, &j);

207:             file2 = fopen(scotch->host_list, "r");
208:             if (!file2) SETERRQ1(PETSC_ERR_FILE_OPEN, "Scotch: unable to open host list file %s", scotch->host_list);

210:             i = -1;
211:             flg = PETSC_FALSE;
212:             while (!feof(file2) && !flg) {
213:                 i++;
214:                 fgets(buf, 256, file2);
215:                 PetscStrncmp(buf, host_buf, j, &flg);
216:             }
217:             err = fclose(file2);
218:             if (err) SETERRQ(PETSC_ERR_SYS,"fclose() failed on file");
219:             if (!flg) SETERRQ1(PETSC_ERR_LIB, "Scotch: unable to find '%s' in host list file", host_buf);

221:             listnbr = size;
222:             PetscMalloc(sizeof(SCOTCH_Num) * listnbr, &listtab);

224:             MPI_Allgather(&i, 1, MPI_INT, listtab, 1, MPI_INT, ((PetscObject)part)->comm);

226:             printf("listnbr = %d, listtab = ", listnbr);
227:             for (i = 0; i < listnbr; i++)
228:                 printf("%d ", listtab[i]);

230:             printf("\n");
231:             err = fflush(stdout);
232:             if (err) SETERRQ(PETSC_ERR_SYS,"fflush() failed on file");

234:             SCOTCH_stratInit(&archstrat);
235:             SCOTCH_stratBipart(&archstrat, "fx");

237:             SCOTCH_archInit(&archptr);
238:             SCOTCH_archBuild(&archptr, &grafarch, listnbr, listtab,
239:                 &archstrat);

241:             PetscMalloc((mat->M) * sizeof(int), &parttab_tmp);
242:             SCOTCH_mapInit(&mappptr, &grafptr, &archptr, parttab_tmp);

244:             SCOTCH_mapCompute(&mappptr, &stratptr);

246:             SCOTCH_mapView(&mappptr, stdout);

248:             /* now we have to set in the real parttab at the good place */
249:             /* because the ranks order are different than position in */
250:             /* the arch graph */
251:             for (i = 0; i < mat->M; i++) {
252:                 parttab[i] = parttab_tmp[i];
253:             }

255:             PetscFree(listtab);
256:             SCOTCH_archExit(&archptr);
257:             SCOTCH_mapExit(&mappptr);
258:             SCOTCH_stratExit(&archstrat);
259:         }

261:         /* dump to mesg_log... */
262: #ifdef PETSC_HAVE_UNISTD_H
263:         err = fflush(stdout);
264:         if (err) SETERRQ(PETSC_ERR_SYS,"fflush() failed on stdout");

266:         count = read(fd_pipe[0], scotch->mesg_log, (SIZE_LOG - 1) * sizeof(char));
267:         if (count < 0)
268:             count = 0;
269:         scotch->mesg_log[count] = 0;
270:         close(1);
271:         dup2(fd_stdout, 1);
272:         close(fd_stdout);
273:         close(fd_pipe[0]);
274:         close(fd_pipe[1]);
275: #endif

277:         SCOTCH_graphExit(&grafptr);
278:         SCOTCH_stratExit(&stratptr);
279:     }

281:     if (ierr)
282:         SETERRQ(PETSC_ERR_LIB, scotch->mesg_log);

284:     /* Creation of the index set */

286:     MPI_Comm_rank(((PetscObject)part)->comm, &rank);
287:     MPI_Comm_size(((PetscObject)part)->comm, &size);
288:     nb_locals = mat->M / size;
289:     locals = parttab + rank * nb_locals;
290:     if (rank < mat->M % size) {
291:         nb_locals++;
292:         locals += rank;
293:     } else
294:         locals += mat->M % size;
295:     ISCreateGeneral(((PetscObject)part)->comm, nb_locals, locals, partitioning);

297:     /* destroying old objects */
298:     PetscFree(parttab);
299:     if (matSeq != mat) {
300:         MatDestroy(matSeq);
301:     }
302:     if (matMPI != mat) {
303:         MatDestroy(matMPI);
304:     }

306:     return(0);
307: }


312: PetscErrorCode MatPartitioningView_Scotch(MatPartitioning part, PetscViewer viewer)
313: {
314:   MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
315:   PetscErrorCode         ierr;
316:   PetscMPIInt            rank;
317:   PetscTruth             iascii;
318: 
320:   MPI_Comm_rank(((PetscObject)part)->comm, &rank);
321:   PetscTypeCompare((PetscObject) viewer, PETSC_VIEWER_ASCII, &iascii);
322:   if (iascii) {
323:     if (!rank && scotch->mesg_log) {
324:       PetscViewerASCIIPrintf(viewer, "%s\n", scotch->mesg_log);
325:     }
326:   } else {
327:     SETERRQ1(PETSC_ERR_SUP, "Viewer type %s not supported for this Scotch partitioner",((PetscObject)viewer)->type_name);
328:   }
329:   return(0);
330: }

334: /*@
335:      MatPartitioningScotchSetGlobal - Set method for global partitioning.

337:   Input Parameter:
338: .  part - the partitioning context
339: .  method - MP_SCOTCH_GREED, MP_SCOTCH_GIBBS or MP_SCOTCH_GR_GI (the combination of two)
340:    Level: advanced

342: @*/
343: PetscErrorCode  MatPartitioningScotchSetGlobal(MatPartitioning part,
344:     MPScotchGlobalType global)
345: {
346:     MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;


350:     switch (global) {
351:     case MP_SCOTCH_GREEDY:
352:     case MP_SCOTCH_GPS:
353:     case MP_SCOTCH_GR_GPS:
354:         scotch->global_method = global;
355:         break;
356:     default:
357:         SETERRQ(PETSC_ERR_SUP, "Scotch: Unknown or unsupported option");
358:     }

360:     return(0);
361: }

365: /*@
366:     MatPartitioningScotchSetCoarseLevel - Set the coarse level 
367:     
368:   Input Parameter:
369: .  part - the partitioning context
370: .  level - the coarse level in range [0.0,1.0]

372:    Level: advanced

374: @*/
375: PetscErrorCode  MatPartitioningScotchSetCoarseLevel(MatPartitioning part, PetscReal level)
376: {
377:     MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;


381:     if (level < 0 || level > 1.0) {
382:         SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,
383:             "Scocth: level of coarsening out of range [0.0-1.0]");
384:     } else
385:         scotch->nbvtxcoarsed = (int)(part->adj->N * level);

387:     if (scotch->nbvtxcoarsed < 20)
388:         scotch->nbvtxcoarsed = 20;

390:     return(0);
391: }

395: /*@C
396:     MatPartitioningScotchSetStrategy - Set the strategy to be used by Scotch.
397:     This is an alternative way of specifying the global method, the local
398:     method, the coarse level and the multilevel option.
399:     
400:   Input Parameter:
401: .  part - the partitioning context
402: .  level - the strategy in Scotch format. Check Scotch documentation.

404:    Level: advanced

406: .seealso: MatPartitioningScotchSetGlobal(), MatPartitioningScotchSetLocal(), MatPartitioningScotchSetCoarseLevel(), MatPartitioningScotchSetMultilevel(), 
407: @*/
408: PetscErrorCode  MatPartitioningScotchSetStrategy(MatPartitioning part, char *strat)
409: {
410:     MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;


414:     PetscStrcpy(scotch->strategy, strat);
415:     return(0);
416: }


421: /*@
422:      MatPartitioningScotchSetLocal - Set method for local partitioning.

424:   Input Parameter:
425: .  part - the partitioning context
426: .  method - MP_SCOTCH_KERNIGHAN_LIN or MP_SCOTCH_NONE

428:    Level: advanced

430: @*/
431: PetscErrorCode  MatPartitioningScotchSetLocal(MatPartitioning part, MPScotchLocalType local)
432: {
433:     MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;


437:     switch (local) {
438:     case MP_SCOTCH_KERNIGHAN_LIN:
439:     case MP_SCOTCH_NONE:
440:         scotch->local_method = local;
441:         break;
442:     default:
443:         SETERRQ(PETSC_ERR_ARG_CORRUPT, "Scotch: Unknown or unsupported option");
444:     }

446:     return(0);
447: }

451: /*@C
452:      MatPartitioningScotchSetArch - Specify the file that describes the
453:      architecture used for mapping. The format of this file is documented in
454:      the Scotch manual.

456:   Input Parameter:
457: .  part - the partitioning context
458: .  file - the name of file
459:    Level: advanced

461:   Note:
462:   If the name is not set, then the default "archgraph.src" is used.

464: .seealso: MatPartitioningScotchSetHostList(),MatPartitioningScotchSetMapping()
465: @*/
466: PetscErrorCode  MatPartitioningScotchSetArch(MatPartitioning part, const char *filename)
467: {
468:     MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;


472:     PetscStrcpy(scotch->arch, filename);

474:     return(0);
475: }

479: /*@C
480:      MatPartitioningScotchSetHostList - Specify host list file for mapping.

482:   Input Parameter:
483: .  part - the partitioning context
484: .  file - the name of file

486:    Level: advanced

488:   Notes:
489:   The file must consist in a list of hostnames (one per line). These hosts
490:   are the ones referred to in the architecture file (see 
491:   MatPartitioningScotchSetArch()): the first host corresponds to index 0,
492:   the second one to index 1, and so on.
493:   
494:   If the name is not set, then the default "host_list" is used.
495:   
496: .seealso: MatPartitioningScotchSetArch(), MatPartitioningScotchSetMapping()
497: @*/
498: PetscErrorCode  MatPartitioningScotchSetHostList(MatPartitioning part, const char *filename)
499: {
500:     MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;


504:     PetscStrcpy(scotch->host_list, filename);

506:     return(0);
507: }

511: /*@
512:      MatPartitioningScotchSetMultilevel - Activates multilevel partitioning.

514:   Input Parameter:
515: .  part - the partitioning context

517:    Level: advanced

519: @*/
520: PetscErrorCode  MatPartitioningScotchSetMultilevel(MatPartitioning part)
521: {
522:     MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;


526:     scotch->multilevel = 1;

528:     return(0);
529: }


534: /*@
535:      MatPartitioningScotchSetMapping - Activates architecture mapping for the 
536:      partitioning algorithm. Architecture mapping tries to enhance the quality
537:      of partitioning by using network topology information. 

539:   Input Parameter:
540: .  part - the partitioning context

542:    Level: advanced

544: .seealso: MatPartitioningScotchSetArch(),MatPartitioningScotchSetHostList()
545: @*/
546: PetscErrorCode  MatPartitioningScotchSetMapping(MatPartitioning part)
547: {
548:     MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;


552:     scotch->map = 1;

554:     return(0);
555: }

559: PetscErrorCode MatPartitioningSetFromOptions_Scotch(MatPartitioning part)
560: {
562:     PetscTruth flag;
563:     char name[PETSC_MAX_PATH_LEN];
564:     int i;
565:     PetscReal r;

567:     const char *global[] = { "greedy", "gps", "gr_gps" };
568:     const char *local[] = { "kernighan-lin", "none" };

571:     PetscOptionsHead("Set Scotch partitioning options");

573:     PetscOptionsEList("-mat_partitioning_scotch_global",
574:         "Global method to use", "MatPartitioningScotchSetGlobal", global, 3,
575:         global[0], &i, &flag);
576:     if (flag)
577:         MatPartitioningScotchSetGlobal(part, (MPScotchGlobalType)i);

579:     PetscOptionsEList("-mat_partitioning_scotch_local",
580:         "Local method to use", "MatPartitioningScotchSetLocal", local, 2,
581:         local[0], &i, &flag);
582:     if (flag)
583:         MatPartitioningScotchSetLocal(part, (MPScotchLocalType)i);

585:     PetscOptionsName("-mat_partitioning_scotch_mapping", "Use mapping",
586:         "MatPartitioningScotchSetMapping", &flag);
587:     if (flag)
588:         MatPartitioningScotchSetMapping(part);

590:     PetscOptionsString("-mat_partitioning_scotch_arch",
591:         "architecture file in scotch format", "MatPartitioningScotchSetArch",
592:         "archgraph.src", name, PETSC_MAX_PATH_LEN, &flag);
593:     if (flag)
594:         MatPartitioningScotchSetArch(part, name);

596:     PetscOptionsString("-mat_partitioning_scotch_hosts",
597:         "host list filename", "MatPartitioningScotchSetHostList",
598:         "host_list", name, PETSC_MAX_PATH_LEN, &flag);
599:     if (flag)
600:         MatPartitioningScotchSetHostList(part, name);

602:     PetscOptionsReal("-mat_partitioning_scotch_coarse_level",
603:         "coarse level", "MatPartitioningScotchSetCoarseLevel", 0, &r,
604:         &flag);
605:     if (flag)
606:         MatPartitioningScotchSetCoarseLevel(part, r);

608:     PetscOptionsName("-mat_partitioning_scotch_mul", "Use coarse level",
609:         "MatPartitioningScotchSetMultilevel", &flag);
610:     if (flag)
611:         MatPartitioningScotchSetMultilevel(part);

613:     PetscOptionsString("-mat_partitioning_scotch_strategy",
614:         "Scotch strategy string",
615:         "MatPartitioningScotchSetStrategy", "", name, PETSC_MAX_PATH_LEN,
616:         &flag);
617:     if (flag)
618:         MatPartitioningScotchSetStrategy(part, name);

620:     PetscOptionsTail();
621:     return(0);
622: }

626: PetscErrorCode MatPartitioningDestroy_Scotch(MatPartitioning part)
627: {
628:     MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
629:     PetscErrorCode         ierr;

632:     PetscFree(scotch->mesg_log);
633:     PetscFree(scotch);
634:     return(0);
635: }


638: /*MC
639:    MAT_PARTITIONING_SCOTCH - Creates a partitioning context via the external package SCOTCH.

641:    Collective on MPI_Comm

643:    Input Parameter:
644: .  part - the partitioning context

646:    Options Database Keys:
647: +  -mat_partitioning_scotch_global <greedy> (one of) greedy gps gr_gps
648: .  -mat_partitioning_scotch_local <kernighan-lin> (one of) kernighan-lin none
649: .  -mat_partitioning_scotch_mapping: Use mapping (MatPartitioningScotchSetMapping)
650: .  -mat_partitioning_scotch_arch <archgraph.src>: architecture file in scotch format (MatPartitioningScotchSetArch)
651: .  -mat_partitioning_scotch_hosts <host_list>: host list filename (MatPartitioningScotchSetHostList)
652: .  -mat_partitioning_scotch_coarse_level <0>: coarse level (MatPartitioningScotchSetCoarseLevel)
653: .  -mat_partitioning_scotch_mul: Use coarse level (MatPartitioningScotchSetMultilevel)
654: -  -mat_partitioning_scotch_strategy <>: Scotch strategy string (MatPartitioningScotchSetStrategy)

656:    Level: beginner

658:    Notes: See http://www.labri.fr/Perso/~pelegrin/scotch/

660: .keywords: Partitioning, create, context

662: .seealso: MatPartitioningSetType(), MatPartitioningType

664: M*/

669: PetscErrorCode  MatPartitioningCreate_Scotch(MatPartitioning part)
670: {
672:     MatPartitioning_Scotch *scotch;

675:     PetscNewLog(part,MatPartitioning_Scotch, &scotch);
676:     part->data = (void*) scotch;

678:     scotch->map = 0;
679:     scotch->global_method = MP_SCOTCH_GR_GPS;
680:     scotch->local_method = MP_SCOTCH_KERNIGHAN_LIN;
681:     PetscStrcpy(scotch->arch, "archgraph.src");
682:     scotch->nbvtxcoarsed = 200;
683:     PetscStrcpy(scotch->strategy, "");
684:     scotch->multilevel = 0;
685:     scotch->mesg_log = NULL;

687:     PetscStrcpy(scotch->host_list, "host_list");

689:     part->ops->apply = MatPartitioningApply_Scotch;
690:     part->ops->view = MatPartitioningView_Scotch;
691:     part->ops->destroy = MatPartitioningDestroy_Scotch;
692:     part->ops->setfromoptions = MatPartitioningSetFromOptions_Scotch;

694:     return(0);
695: }