Actual source code: scotch.c
1: #include "src/mat/impls/adj/mpi/mpiadj.h" /*I "petscmat.h" I*/
3: #ifdef PETSC_HAVE_UNISTD_H
4: #include <unistd.h>
5: #endif
7: #ifdef PETSC_HAVE_STDLIB_H
8: #include <stdlib.h>
9: #endif
11: #include "petscfix.h"
13: /*
14: Currently using Scotch-3.4
15: */
17: #include "scotch.h"
20: typedef struct {
21: char arch[PETSC_MAX_PATH_LEN];
22: int multilevel;
23: char strategy[30];
24: int global_method; /* global method */
25: int local_method; /* local method */
26: int nbvtxcoarsed; /* number of vertices for the coarse graph */
27: int map; /* to know if we map on archptr or just partionate the graph */
28: char *mesg_log;
29: char host_list[PETSC_MAX_PATH_LEN];
30: } MatPartitioning_Scotch;
32: #define SIZE_LOG 10000 /* size of buffer for msg_log */
36: static PetscErrorCode MatPartitioningApply_Scotch(MatPartitioning part, IS * partitioning)
37: {
39: int *parttab, *locals = PETSC_NULL, rank, i, size;
40: size_t j;
41: Mat mat = part->adj, matMPI, matSeq;
42: int nb_locals = mat->m;
43: Mat_MPIAdj *adj = (Mat_MPIAdj *) mat->data;
44: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
45: PetscTruth flg;
46: #ifdef PETSC_HAVE_UNISTD_H
47: int fd_stdout, fd_pipe[2], count;
48: #endif
52: /* check if the matrix is sequential, use MatGetSubMatrices if necessary */
53: MPI_Comm_size(mat->comm, &size);
54: PetscTypeCompare((PetscObject) mat, MATMPIADJ, &flg);
55: if (size > 1) {
56: int M, N;
57: IS isrow, iscol;
58: Mat *A;
60: if (flg) {
61: SETERRQ(0, "Distributed matrix format MPIAdj is not supported for sequential partitioners");
62: }
63: PetscPrintf(part->comm, "Converting distributed matrix to sequential: this could be a performance loss\n");
65: MatGetSize(mat, &M, &N);
66: ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow);
67: ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol);
68: MatGetSubMatrices(mat, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &A);
69: ISDestroy(isrow);
70: ISDestroy(iscol);
71: matSeq = *A;
72: } else
73: matSeq = mat;
75: /* convert the the matrix to MPIADJ type if necessary */
76: if (!flg) {
77: MatConvert(matSeq, MATMPIADJ, &matMPI);
78: } else
79: matMPI = matSeq;
81: adj = (Mat_MPIAdj *) matMPI->data; /* finaly adj contains adjacency graph */
83: MPI_Comm_rank(part->comm, &rank);
85: {
86: /* definition of Scotch library arguments */
87: SCOTCH_Strat stratptr; /* scotch strategy */
88: SCOTCH_Graph grafptr; /* scotch graph */
89: SCOTCH_Mapping mappptr; /* scotch mapping format */
90: int vertnbr = mat->M; /* number of vertices in full graph */
91: int *verttab = adj->i; /* start of edge list for each vertex */
92: int *edgetab = adj->j; /* edge list data */
93: int edgenbr = adj->nz; /* number of edges */
94: int *velotab = NULL; /* not used by petsc interface */
95: int *vlbltab = NULL;
96: int *edlotab = NULL;
97: int baseval = 0; /* 0 for C array indexing */
98: int flagval = 3; /* (cf doc scotch no weight edge & vertices) */
99: char strategy[256];
101: PetscMalloc((mat->M) * sizeof(int), &parttab);
103: /* redirect output to buffer scotch -> mesg_log */
104: #ifdef PETSC_HAVE_UNISTD_H
105: fd_stdout = dup(1);
106: pipe(fd_pipe);
107: close(1);
108: dup2(fd_pipe[1], 1);
109: PetscMalloc(SIZE_LOG * sizeof(char), &(scotch->mesg_log));
110: #endif
112: /* library call */
114: /* Construction of the scotch graph object */
115: SCOTCH_graphInit(&grafptr);
116: SCOTCH_graphBuild(&grafptr, vertnbr, verttab, velotab,
117: vlbltab, edgenbr, edgetab, edlotab, baseval, flagval);
118: SCOTCH_graphCheck(&grafptr);
120: /* Construction of the strategy */
121: if (scotch->strategy[0] != 0) /* strcmp(scotch->strategy,"") */
122: PetscStrcpy(strategy, scotch->strategy);
123: else {
124: PetscStrcpy(strategy, "b{strat=");
126: if (scotch->multilevel) {
127: /* PetscStrcat(strategy,"m{vert=");
128: sprintf(strategy+strlen(strategy),"%d",scotch->nbvtxcoarsed);
129: PetscStrcat(strategy,",asc="); */
130: sprintf(strategy, "b{strat=m{vert=%d,asc=",
131: scotch->nbvtxcoarsed);
132: } else
133: PetscStrcpy(strategy, "b{strat=");
135: switch (scotch->global_method) {
136: case MP_SCOTCH_GREEDY:
137: PetscStrcat(strategy, "h");
138: break;
139: case MP_SCOTCH_GPS:
140: PetscStrcat(strategy, "g");
141: break;
142: case MP_SCOTCH_GR_GPS:
143: PetscStrcat(strategy, "g|h");
144: }
146: switch (scotch->local_method) {
147: case MP_SCOTCH_KERNIGHAN_LIN:
148: if (scotch->multilevel)
149: PetscStrcat(strategy, ",low=f}");
150: else
151: PetscStrcat(strategy, " f");
152: break;
153: case MP_SCOTCH_NONE:
154: if (scotch->multilevel)
155: PetscStrcat(strategy, ",asc=x}");
156: default:
157: break;
158: }
160: PetscStrcat(strategy, " x}");
161: }
163: PetscPrintf(part->comm, "strategy=[%s]\n", strategy);
165: SCOTCH_stratInit(&stratptr);
166: SCOTCH_stratMap(&stratptr, strategy);
168: /* check for option mapping */
169: if (!scotch->map) {
170: SCOTCH_graphPart(&grafptr, &stratptr, part->n, parttab);
171: PetscPrintf(PETSC_COMM_SELF, "Partition simple without mapping\n");
172: } else {
173: SCOTCH_Graph grafarch;
174: SCOTCH_Num *listtab;
175: SCOTCH_Num listnbr = 0;
176: SCOTCH_Arch archptr; /* file in scotch architecture format */
177: SCOTCH_Strat archstrat;
178: int arch_total_size, *parttab_tmp;
179: int cpt;
180: char buf[256];
181: FILE *file1, *file2;
182: char host_buf[256];
184: /* generate the graph that represents the arch */
185: file1 = fopen(scotch->arch, "r");
186: if (!file1)
187: SETERRQ1(PETSC_ERR_FILE_OPEN, "Scotch: unable to open architecture file %s", scotch->arch);
189: SCOTCH_graphInit(&grafarch);
190: SCOTCH_graphLoad(&grafarch, file1, baseval, 3);
192: SCOTCH_graphCheck(&grafarch);
193: SCOTCH_graphSize(&grafarch, &arch_total_size, &cpt);
195: fclose(file1);
196: printf("total size = %d\n", arch_total_size);
198: /* generate the list of nodes currently working */
199: PetscGetHostName(host_buf, 256);
200: PetscStrlen(host_buf, &j);
202: file2 = fopen(scotch->host_list, "r");
203: if (!file2)
204: SETERRQ1(PETSC_ERR_FILE_OPEN, "Scotch: unable to open host list file %s", scotch->host_list);
206: i = -1;
207: flg = PETSC_FALSE;
208: while (!feof(file2) && !flg) {
209: i++;
210: fgets(buf, 256, file2);
211: PetscStrncmp(buf, host_buf, j, &flg);
212: }
213: fclose(file2);
214: if (!flg) {
215: SETERRQ1(PETSC_ERR_LIB, "Scotch: unable to find '%s' in host list file", host_buf);
216: }
218: listnbr = size;
219: PetscMalloc(sizeof(SCOTCH_Num) * listnbr, &listtab);
221: MPI_Allgather(&i, 1, MPI_INT, listtab, 1, MPI_INT, part->comm);
223: printf("listnbr = %d, listtab = ", listnbr);
224: for (i = 0; i < listnbr; i++)
225: printf("%d ", listtab[i]);
227: printf("\n");
228: fflush(stdout);
230: SCOTCH_stratInit(&archstrat);
231: SCOTCH_stratBipart(&archstrat, "fx");
233: SCOTCH_archInit(&archptr);
234: SCOTCH_archBuild(&archptr, &grafarch, listnbr, listtab,
235: &archstrat);
237: PetscMalloc((mat->M) * sizeof(int), &parttab_tmp);
238: SCOTCH_mapInit(&mappptr, &grafptr, &archptr, parttab_tmp);
240: SCOTCH_mapCompute(&mappptr, &stratptr);
242: SCOTCH_mapView(&mappptr, stdout);
244: /* now we have to set in the real parttab at the good place */
245: /* because the ranks order are different than position in */
246: /* the arch graph */
247: for (i = 0; i < mat->M; i++) {
248: parttab[i] = parttab_tmp[i];
249: }
251: PetscFree(listtab);
252: SCOTCH_archExit(&archptr);
253: SCOTCH_mapExit(&mappptr);
254: SCOTCH_stratExit(&archstrat);
255: }
257: /* dump to mesg_log... */
258: #ifdef PETSC_HAVE_UNISTD_H
259: fflush(stdout);
260: count = read(fd_pipe[0], scotch->mesg_log, (SIZE_LOG - 1) * sizeof(char));
261: if (count < 0)
262: count = 0;
263: scotch->mesg_log[count] = 0;
264: close(1);
265: dup2(fd_stdout, 1);
266: close(fd_stdout);
267: close(fd_pipe[0]);
268: close(fd_pipe[1]);
269: #endif
271: SCOTCH_graphExit(&grafptr);
272: SCOTCH_stratExit(&stratptr);
273: }
275: if (ierr)
276: SETERRQ(PETSC_ERR_LIB, scotch->mesg_log);
278: /* Creation of the index set */
280: MPI_Comm_rank(part->comm, &rank);
281: MPI_Comm_size(part->comm, &size);
282: nb_locals = mat->M / size;
283: locals = parttab + rank * nb_locals;
284: if (rank < mat->M % size) {
285: nb_locals++;
286: locals += rank;
287: } else
288: locals += mat->M % size;
289: ISCreateGeneral(part->comm, nb_locals, locals, partitioning);
291: /* destroying old objects */
292: PetscFree(parttab);
293: if (matSeq != mat) {
294: MatDestroy(matSeq);
295: }
296: if (matMPI != mat) {
297: MatDestroy(matMPI);
298: }
300: return(0);
301: }
306: PetscErrorCode MatPartitioningView_Scotch(MatPartitioning part, PetscViewer viewer)
307: {
308: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
309: PetscErrorCode ierr;
310: PetscMPIInt rank;
311: PetscTruth iascii;
312:
314: MPI_Comm_rank(part->comm, &rank);
315: PetscTypeCompare((PetscObject) viewer, PETSC_VIEWER_ASCII, &iascii);
316: if (iascii) {
317: if (!rank && scotch->mesg_log) {
318: PetscViewerASCIIPrintf(viewer, "%s\n", scotch->mesg_log);
319: }
320: } else {
321: SETERRQ1(PETSC_ERR_SUP, "Viewer type %s not supported for this Scotch partitioner",((PetscObject) viewer)->type_name);
322: }
323: return(0);
324: }
328: /*@
329: MatPartitioningScotchSetGlobal - Set method for global partitioning.
331: Input Parameter:
332: . part - the partitioning context
333: . method - MP_SCOTCH_GREED, MP_SCOTCH_GIBBS or MP_SCOTCH_GR_GI (the combination of two)
334: Level: advanced
336: @*/
337: PetscErrorCode MatPartitioningScotchSetGlobal(MatPartitioning part,
338: MPScotchGlobalType global)
339: {
340: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
344: switch (global) {
345: case MP_SCOTCH_GREEDY:
346: case MP_SCOTCH_GPS:
347: case MP_SCOTCH_GR_GPS:
348: scotch->global_method = global;
349: break;
350: default:
351: SETERRQ(PETSC_ERR_SUP, "Scotch: Unknown or unsupported option");
352: }
354: return(0);
355: }
359: /*@
360: MatPartitioningScotchSetCoarseLevel - Set the coarse level
361:
362: Input Parameter:
363: . part - the partitioning context
364: . level - the coarse level in range [0.0,1.0]
366: Level: advanced
368: @*/
369: PetscErrorCode MatPartitioningScotchSetCoarseLevel(MatPartitioning part, PetscReal level)
370: {
371: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
375: if (level < 0 || level > 1.0) {
376: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,
377: "Scocth: level of coarsening out of range [0.0-1.0]");
378: } else
379: scotch->nbvtxcoarsed = (int)(part->adj->N * level);
381: if (scotch->nbvtxcoarsed < 20)
382: scotch->nbvtxcoarsed = 20;
384: return(0);
385: }
389: /*@C
390: MatPartitioningScotchSetStrategy - Set the strategy to be used by Scotch.
391: This is an alternative way of specifying the global method, the local
392: method, the coarse level and the multilevel option.
393:
394: Input Parameter:
395: . part - the partitioning context
396: . level - the strategy in Scotch format. Check Scotch documentation.
398: Level: advanced
400: .seealso: MatPartitioningScotchSetGlobal(), MatPartitioningScotchSetLocal(), MatPartitioningScotchSetCoarseLevel(), MatPartitioningScotchSetMultilevel(),
401: @*/
402: PetscErrorCode MatPartitioningScotchSetStrategy(MatPartitioning part, char *strat)
403: {
404: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
408: PetscStrcpy(scotch->strategy, strat);
409: return(0);
410: }
415: /*@
416: MatPartitioningScotchSetLocal - Set method for local partitioning.
418: Input Parameter:
419: . part - the partitioning context
420: . method - MP_SCOTCH_KERNIGHAN_LIN or MP_SCOTCH_NONE
422: Level: advanced
424: @*/
425: PetscErrorCode MatPartitioningScotchSetLocal(MatPartitioning part, MPScotchLocalType local)
426: {
427: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
431: switch (local) {
432: case MP_SCOTCH_KERNIGHAN_LIN:
433: case MP_SCOTCH_NONE:
434: scotch->local_method = local;
435: break;
436: default:
437: SETERRQ(PETSC_ERR_ARG_CORRUPT, "Scotch: Unknown or unsupported option");
438: }
440: return(0);
441: }
445: /*@C
446: MatPartitioningScotchSetArch - Specify the file that describes the
447: architecture used for mapping. The format of this file is documented in
448: the Scotch manual.
450: Input Parameter:
451: . part - the partitioning context
452: . file - the name of file
453: Level: advanced
455: Note:
456: If the name is not set, then the default "archgraph.src" is used.
458: .seealso: MatPartitioningScotchSetHostList(),MatPartitioningScotchSetMapping()
459: @*/
460: PetscErrorCode MatPartitioningScotchSetArch(MatPartitioning part, const char *filename)
461: {
462: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
466: PetscStrcpy(scotch->arch, filename);
468: return(0);
469: }
473: /*@C
474: MatPartitioningScotchSetHostList - Specify host list file for mapping.
476: Input Parameter:
477: . part - the partitioning context
478: . file - the name of file
480: Level: advanced
482: Notes:
483: The file must consist in a list of hostnames (one per line). These hosts
484: are the ones referred to in the architecture file (see
485: MatPartitioningScotchSetArch()): the first host corresponds to index 0,
486: the second one to index 1, and so on.
487:
488: If the name is not set, then the default "host_list" is used.
489:
490: .seealso: MatPartitioningScotchSetArch(), MatPartitioningScotchSetMapping()
491: @*/
492: PetscErrorCode MatPartitioningScotchSetHostList(MatPartitioning part, const char *filename)
493: {
494: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
498: PetscStrcpy(scotch->host_list, filename);
500: return(0);
501: }
505: /*@
506: MatPartitioningScotchSetMultilevel - Activates multilevel partitioning.
508: Input Parameter:
509: . part - the partitioning context
511: Level: advanced
513: @*/
514: PetscErrorCode MatPartitioningScotchSetMultilevel(MatPartitioning part)
515: {
516: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
520: scotch->multilevel = 1;
522: return(0);
523: }
528: /*@
529: MatPartitioningScotchSetMapping - Activates architecture mapping for the
530: partitioning algorithm. Architecture mapping tries to enhance the quality
531: of partitioning by using network topology information.
533: Input Parameter:
534: . part - the partitioning context
536: Level: advanced
538: .seealso: MatPartitioningScotchSetArch(),MatPartitioningScotchSetHostList()
539: @*/
540: PetscErrorCode MatPartitioningScotchSetMapping(MatPartitioning part)
541: {
542: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
546: scotch->map = 1;
548: return(0);
549: }
553: PetscErrorCode MatPartitioningSetFromOptions_Scotch(MatPartitioning part)
554: {
556: PetscTruth flag;
557: char name[PETSC_MAX_PATH_LEN];
558: int i;
559: PetscReal r;
561: const char *global[] = { "greedy", "gps", "gr_gps" };
562: const char *local[] = { "kernighan-lin", "none" };
565: PetscOptionsHead("Set Scotch partitioning options");
567: PetscOptionsEList("-mat_partitioning_scotch_global",
568: "Global method to use", "MatPartitioningScotchSetGlobal", global, 3,
569: global[0], &i, &flag);
570: if (flag)
571: MatPartitioningScotchSetGlobal(part, (MPScotchGlobalType)i);
573: PetscOptionsEList("-mat_partitioning_scotch_local",
574: "Local method to use", "MatPartitioningScotchSetLocal", local, 2,
575: local[0], &i, &flag);
576: if (flag)
577: MatPartitioningScotchSetLocal(part, (MPScotchLocalType)i);
579: PetscOptionsName("-mat_partitioning_scotch_mapping", "Use mapping",
580: "MatPartitioningScotchSetMapping", &flag);
581: if (flag)
582: MatPartitioningScotchSetMapping(part);
584: PetscOptionsString("-mat_partitioning_scotch_arch",
585: "architecture file in scotch format", "MatPartitioningScotchSetArch",
586: "archgraph.src", name, PETSC_MAX_PATH_LEN, &flag);
587: if (flag)
588: MatPartitioningScotchSetArch(part, name);
590: PetscOptionsString("-mat_partitioning_scotch_hosts",
591: "host list filename", "MatPartitioningScotchSetHostList",
592: "host_list", name, PETSC_MAX_PATH_LEN, &flag);
593: if (flag)
594: MatPartitioningScotchSetHostList(part, name);
596: PetscOptionsReal("-mat_partitioning_scotch_coarse_level",
597: "coarse level", "MatPartitioningScotchSetCoarseLevel", 0, &r,
598: &flag);
599: if (flag)
600: MatPartitioningScotchSetCoarseLevel(part, r);
602: PetscOptionsName("-mat_partitioning_scotch_mul", "Use coarse level",
603: "MatPartitioningScotchSetMultilevel", &flag);
604: if (flag)
605: MatPartitioningScotchSetMultilevel(part);
607: PetscOptionsString("-mat_partitioning_scotch_strategy",
608: "Scotch strategy string",
609: "MatPartitioningScotchSetStrategy", "", name, PETSC_MAX_PATH_LEN,
610: &flag);
611: if (flag)
612: MatPartitioningScotchSetStrategy(part, name);
614: PetscOptionsTail();
615: return(0);
616: }
621: PetscErrorCode MatPartitioningDestroy_Scotch(MatPartitioning part)
622: {
623: MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data;
628: if (scotch->mesg_log) {
629: PetscFree(scotch->mesg_log);
630: }
631: PetscFree(scotch);
633: return(0);
634: }
639: PetscErrorCode MatPartitioningCreate_Scotch(MatPartitioning part)
640: {
642: MatPartitioning_Scotch *scotch;
645: PetscNew(MatPartitioning_Scotch, &scotch);
647: scotch->map = 0;
648: scotch->global_method = MP_SCOTCH_GR_GPS;
649: scotch->local_method = MP_SCOTCH_KERNIGHAN_LIN;
650: PetscStrcpy(scotch->arch, "archgraph.src");
651: scotch->nbvtxcoarsed = 200;
652: PetscStrcpy(scotch->strategy, "");
653: scotch->multilevel = 0;
654: scotch->mesg_log = NULL;
656: PetscStrcpy(scotch->host_list, "host_list");
658: part->ops->apply = MatPartitioningApply_Scotch;
659: part->ops->view = MatPartitioningView_Scotch;
660: part->ops->destroy = MatPartitioningDestroy_Scotch;
661: part->ops->setfromoptions = MatPartitioningSetFromOptions_Scotch;
662: part->data = (void*) scotch;
664: return(0);
665: }