Actual source code: fdmpiaij.c
2: #include src/mat/impls/aij/mpi/mpiaij.h
4: EXTERN PetscErrorCode CreateColmap_MPIAIJ_Private(Mat);
5: EXTERN PetscErrorCode MatGetColumnIJ_SeqAIJ(Mat,PetscInt,PetscTruth,PetscInt*,PetscInt*[],PetscInt*[],PetscTruth*);
6: EXTERN PetscErrorCode MatRestoreColumnIJ_SeqAIJ(Mat,PetscInt,PetscTruth,PetscInt*,PetscInt*[],PetscInt*[],PetscTruth*);
10: PetscErrorCode MatFDColoringCreate_MPIAIJ(Mat mat,ISColoring iscoloring,MatFDColoring c)
11: {
12: Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data;
14: PetscMPIInt size,*ncolsonproc,*disp,nn;
15: PetscInt i,*is,n,nrows,j,k,m,*rows = 0,*A_ci,*A_cj,ncols,col;
16: PetscInt nis = iscoloring->n,nctot,*cols,*B_ci,*B_cj;
17: PetscInt *rowhit,M = mat->m,cstart = aij->cstart,cend = aij->cend,colb;
18: PetscInt *columnsforrow,l;
19: IS *isa;
20: PetscTruth done,flg;
23: if (!mat->assembled) {
24: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Matrix must be assembled first; MatAssemblyBegin/End();");
25: }
27: ISColoringGetIS(iscoloring,PETSC_IGNORE,&isa);
28: c->M = mat->M; /* set the global rows and columns and local rows */
29: c->N = mat->N;
30: c->m = mat->m;
31: c->rstart = aij->rstart;
33: c->ncolors = nis;
34: PetscMalloc(nis*sizeof(PetscInt),&c->ncolumns);
35: PetscMalloc(nis*sizeof(PetscInt*),&c->columns);
36: PetscMalloc(nis*sizeof(PetscInt),&c->nrows);
37: PetscMalloc(nis*sizeof(PetscInt*),&c->rows);
38: PetscMalloc(nis*sizeof(PetscInt*),&c->columnsforrow);
39: PetscLogObjectMemory(c,5*nis*sizeof(PetscInt));
41: /* Allow access to data structures of local part of matrix */
42: if (!aij->colmap) {
43: CreateColmap_MPIAIJ_Private(mat);
44: }
45: /*
46: Calls the _SeqAIJ() version of these routines to make sure it does not
47: get the reduced (by inodes) version of I and J
48: */
49: MatGetColumnIJ_SeqAIJ(aij->A,0,PETSC_FALSE,&ncols,&A_ci,&A_cj,&done);
50: MatGetColumnIJ_SeqAIJ(aij->B,0,PETSC_FALSE,&ncols,&B_ci,&B_cj,&done);
52: MPI_Comm_size(mat->comm,&size);
53: PetscMalloc(2*size*sizeof(PetscInt*),&ncolsonproc);
54: disp = ncolsonproc + size;
56: PetscMalloc((M+1)*sizeof(PetscInt),&rowhit);
57: PetscMalloc((M+1)*sizeof(PetscInt),&columnsforrow);
59: /*
60: Temporary option to allow for debugging/testing
61: */
62: PetscOptionsHasName(PETSC_NULL,"-matfdcoloring_slow",&flg);
64: for (i=0; i<nis; i++) {
65: ISGetLocalSize(isa[i],&n);
66: ISGetIndices(isa[i],&is);
67: c->ncolumns[i] = n;
68: c->ncolumns[i] = n;
69: if (n) {
70: PetscMalloc(n*sizeof(PetscInt),&c->columns[i]);
71: PetscLogObjectMemory(c,n*sizeof(PetscInt));
72: PetscMemcpy(c->columns[i],is,n*sizeof(PetscInt));
73: } else {
74: c->columns[i] = 0;
75: }
77: /* Determine the total (parallel) number of columns of this color */
78: nn = (PetscMPIInt)n;
79: MPI_Allgather(&nn,1,MPI_INT,ncolsonproc,1,MPI_INT,mat->comm);
80: nctot = 0; for (j=0; j<size; j++) {nctot += ncolsonproc[j];}
81: if (!nctot) {
82: PetscLogInfo((PetscObject)mat,"MatFDColoringCreate_MPIAIJ: Coloring of matrix has some unneeded colors with no corresponding rows\n");
83: }
85: disp[0] = 0;
86: for (j=1; j<size; j++) {
87: disp[j] = disp[j-1] + ncolsonproc[j-1];
88: }
89:
90: /* Get complete list of columns for color on each processor */
91: PetscMalloc((nctot+1)*sizeof(PetscInt),&cols);
92: MPI_Allgatherv(is,n,MPIU_INT,cols,ncolsonproc,disp,MPIU_INT,mat->comm);
94: /*
95: Mark all rows affect by these columns
96: */
97: if (!flg) {/*-----------------------------------------------------------------------------*/
98: /* crude, fast version */
99: PetscMemzero(rowhit,M*sizeof(PetscInt));
100: /* loop over columns*/
101: for (j=0; j<nctot; j++) {
102: col = cols[j];
103: if (col >= cstart && col < cend) {
104: /* column is in diagonal block of matrix */
105: rows = A_cj + A_ci[col-cstart];
106: m = A_ci[col-cstart+1] - A_ci[col-cstart];
107: } else {
108: #if defined (PETSC_USE_CTABLE)
109: PetscTableFind(aij->colmap,col+1,&colb);CHKERRQ(ierr)
110: colb --;
111: #else
112: colb = aij->colmap[col] - 1;
113: #endif
114: if (colb == -1) {
115: m = 0;
116: } else {
117: rows = B_cj + B_ci[colb];
118: m = B_ci[colb+1] - B_ci[colb];
119: }
120: }
121: /* loop over columns marking them in rowhit */
122: for (k=0; k<m; k++) {
123: rowhit[*rows++] = col + 1;
124: }
125: }
127: /* count the number of hits */
128: nrows = 0;
129: for (j=0; j<M; j++) {
130: if (rowhit[j]) nrows++;
131: }
132: c->nrows[i] = nrows;
133: PetscMalloc((nrows+1)*sizeof(PetscInt),&c->rows[i]);
134: PetscMalloc((nrows+1)*sizeof(PetscInt),&c->columnsforrow[i]);
135: PetscLogObjectMemory(c,2*(nrows+1)*sizeof(PetscInt));
136: nrows = 0;
137: for (j=0; j<M; j++) {
138: if (rowhit[j]) {
139: c->rows[i][nrows] = j;
140: c->columnsforrow[i][nrows] = rowhit[j] - 1;
141: nrows++;
142: }
143: }
144: } else {/*-------------------------------------------------------------------------------*/
145: /* slow version, using rowhit as a linked list */
146: PetscInt currentcol,fm,mfm;
147: rowhit[M] = M;
148: nrows = 0;
149: /* loop over columns*/
150: for (j=0; j<nctot; j++) {
151: col = cols[j];
152: if (col >= cstart && col < cend) {
153: /* column is in diagonal block of matrix */
154: rows = A_cj + A_ci[col-cstart];
155: m = A_ci[col-cstart+1] - A_ci[col-cstart];
156: } else {
157: #if defined (PETSC_USE_CTABLE)
158: PetscTableFind(aij->colmap,col+1,&colb);
159: colb --;
160: #else
161: colb = aij->colmap[col] - 1;
162: #endif
163: if (colb == -1) {
164: m = 0;
165: } else {
166: rows = B_cj + B_ci[colb];
167: m = B_ci[colb+1] - B_ci[colb];
168: }
169: }
170: /* loop over columns marking them in rowhit */
171: fm = M; /* fm points to first entry in linked list */
172: for (k=0; k<m; k++) {
173: currentcol = *rows++;
174: /* is it already in the list? */
175: do {
176: mfm = fm;
177: fm = rowhit[fm];
178: } while (fm < currentcol);
179: /* not in list so add it */
180: if (fm != currentcol) {
181: nrows++;
182: columnsforrow[currentcol] = col;
183: /* next three lines insert new entry into linked list */
184: rowhit[mfm] = currentcol;
185: rowhit[currentcol] = fm;
186: fm = currentcol;
187: /* fm points to present position in list since we know the columns are sorted */
188: } else {
189: SETERRQ(PETSC_ERR_PLIB,"Invalid coloring of matrix detected");
190: }
191: }
192: }
193: c->nrows[i] = nrows;
194: PetscMalloc((nrows+1)*sizeof(PetscInt),&c->rows[i]);
195: PetscMalloc((nrows+1)*sizeof(PetscInt),&c->columnsforrow[i]);
196: PetscLogObjectMemory(c,(nrows+1)*sizeof(PetscInt));
197: /* now store the linked list of rows into c->rows[i] */
198: nrows = 0;
199: fm = rowhit[M];
200: do {
201: c->rows[i][nrows] = fm;
202: c->columnsforrow[i][nrows++] = columnsforrow[fm];
203: fm = rowhit[fm];
204: } while (fm < M);
205: } /* ---------------------------------------------------------------------------------------*/
206: PetscFree(cols);
207: }
209: /* Optimize by adding the vscale, and scaleforrow[][] fields */
210: /*
211: vscale will contain the "diagonal" on processor scalings followed by the off processor
212: */
213: VecCreateGhost(mat->comm,aij->A->m,PETSC_DETERMINE,aij->B->n,aij->garray,&c->vscale);CHKERRQ(ierr)
214: PetscMalloc(c->ncolors*sizeof(PetscInt*),&c->vscaleforrow);
215: for (k=0; k<c->ncolors; k++) {
216: PetscMalloc((c->nrows[k]+1)*sizeof(PetscInt),&c->vscaleforrow[k]);
217: for (l=0; l<c->nrows[k]; l++) {
218: col = c->columnsforrow[k][l];
219: if (col >= cstart && col < cend) {
220: /* column is in diagonal block of matrix */
221: colb = col - cstart;
222: } else {
223: /* column is in "off-processor" part */
224: #if defined (PETSC_USE_CTABLE)
225: PetscTableFind(aij->colmap,col+1,&colb);
226: colb --;
227: #else
228: colb = aij->colmap[col] - 1;
229: #endif
230: colb += cend - cstart;
231: }
232: c->vscaleforrow[k][l] = colb;
233: }
234: }
235: ISColoringRestoreIS(iscoloring,&isa);
237: PetscFree(rowhit);
238: PetscFree(columnsforrow);
239: PetscFree(ncolsonproc);
240: MatRestoreColumnIJ_SeqAIJ(aij->A,0,PETSC_FALSE,&ncols,&A_ci,&A_cj,&done);
241: MatRestoreColumnIJ_SeqAIJ(aij->B,0,PETSC_FALSE,&ncols,&B_ci,&B_cj,&done);
242: return(0);
243: }