Actual source code: fdmpiaij.c

  1: /*$Id: fdmpiaij.c,v 1.41 2001/06/21 21:16:31 bsmith Exp $*/

 3:  #include src/mat/impls/aij/mpi/mpiaij.h
 4:  #include src/vec/vecimpl.h

  6: EXTERN int CreateColmap_MPIAIJ_Private(Mat);
  7: EXTERN int MatGetColumnIJ_SeqAIJ(Mat,int,PetscTruth,int*,int*[],int*[],PetscTruth*);
  8: EXTERN int MatRestoreColumnIJ_SeqAIJ(Mat,int,PetscTruth,int*,int*[],int*[],PetscTruth*);

 12: int MatFDColoringCreate_MPIAIJ(Mat mat,ISColoring iscoloring,MatFDColoring c)
 13: {
 14:   Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data;
 15:   int        i,*is,n,nrows,j,k,m,*rows = 0,ierr,*A_ci,*A_cj,ncols,col;
 16:   int        nis = iscoloring->n,*ncolsonproc,size,nctot,*cols,*disp,*B_ci,*B_cj;
 17:   int        *rowhit,M = mat->m,cstart = aij->cstart,cend = aij->cend,colb;
 18:   int        *columnsforrow,l;
 19:   IS         *isa;
 20:   PetscTruth done,flg;

 23:   if (!mat->assembled) {
 24:     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Matrix must be assembled first; MatAssemblyBegin/End();");
 25:   }

 27:   ISColoringGetIS(iscoloring,PETSC_IGNORE,&isa);
 28:   c->M             = mat->M;  /* set the global rows and columns and local rows */
 29:   c->N             = mat->N;
 30:   c->m             = mat->m;
 31:   c->rstart        = aij->rstart;

 33:   c->ncolors       = nis;
 34:   PetscMalloc(nis*sizeof(int),&c->ncolumns);
 35:   PetscMalloc(nis*sizeof(int*),&c->columns);
 36:   PetscMalloc(nis*sizeof(int),&c->nrows);
 37:   PetscMalloc(nis*sizeof(int*),&c->rows);
 38:   PetscMalloc(nis*sizeof(int*),&c->columnsforrow);
 39:   PetscLogObjectMemory(c,5*nis*sizeof(int));

 41:   /* Allow access to data structures of local part of matrix */
 42:   if (!aij->colmap) {
 43:     CreateColmap_MPIAIJ_Private(mat);
 44:   }
 45:   /*
 46:       Calls the _SeqAIJ() version of these routines to make sure it does not 
 47:      get the reduced (by inodes) version of I and J
 48:   */
 49:   MatGetColumnIJ_SeqAIJ(aij->A,0,PETSC_FALSE,&ncols,&A_ci,&A_cj,&done);
 50:   MatGetColumnIJ_SeqAIJ(aij->B,0,PETSC_FALSE,&ncols,&B_ci,&B_cj,&done);

 52:   MPI_Comm_size(mat->comm,&size);
 53:   PetscMalloc(2*size*sizeof(int*),&ncolsonproc);
 54:   disp = ncolsonproc + size;

 56:   PetscMalloc((M+1)*sizeof(int),&rowhit);
 57:   PetscMalloc((M+1)*sizeof(int),&columnsforrow);

 59:   /*
 60:      Temporary option to allow for debugging/testing
 61:   */
 62:   PetscOptionsHasName(PETSC_NULL,"-matfdcoloring_slow",&flg);

 64:   for (i=0; i<nis; i++) {
 65:     ISGetLocalSize(isa[i],&n);
 66:     ISGetIndices(isa[i],&is);
 67:     c->ncolumns[i] = n;
 68:     c->ncolumns[i] = n;
 69:     if (n) {
 70:       PetscMalloc(n*sizeof(int),&c->columns[i]);
 71:       PetscLogObjectMemory(c,n*sizeof(int));
 72:       PetscMemcpy(c->columns[i],is,n*sizeof(int));
 73:     } else {
 74:       c->columns[i]  = 0;
 75:     }

 77:     /* Determine the total (parallel) number of columns of this color */
 78:     MPI_Allgather(&n,1,MPI_INT,ncolsonproc,1,MPI_INT,mat->comm);
 79:     nctot = 0; for (j=0; j<size; j++) {nctot += ncolsonproc[j];}
 80:     if (!nctot) {
 81:       PetscLogInfo((PetscObject)mat,"MatFDColoringCreate_MPIAIJ: Coloring of matrix has some unneeded colors with no corresponding rows\n");
 82:     }

 84:     disp[0] = 0;
 85:     for (j=1; j<size; j++) {
 86:       disp[j] = disp[j-1] + ncolsonproc[j-1];
 87:     }
 88: 
 89:     /* Get complete list of columns for color on each processor */
 90:     PetscMalloc((nctot+1)*sizeof(int),&cols);
 91:     MPI_Allgatherv(is,n,MPI_INT,cols,ncolsonproc,disp,MPI_INT,mat->comm);

 93:     /*
 94:        Mark all rows affect by these columns
 95:     */
 96:     if (!flg) {/*-----------------------------------------------------------------------------*/
 97:       /* crude, fast version */
 98:       PetscMemzero(rowhit,M*sizeof(int));
 99:       /* loop over columns*/
100:       for (j=0; j<nctot; j++) {
101:         col  = cols[j];
102:         if (col >= cstart && col < cend) {
103:           /* column is in diagonal block of matrix */
104:           rows = A_cj + A_ci[col-cstart];
105:           m    = A_ci[col-cstart+1] - A_ci[col-cstart];
106:         } else {
107: #if defined (PETSC_USE_CTABLE)
108:           PetscTableFind(aij->colmap,col+1,&colb);CHKERRQ(ierr)
109:           colb --;
110: #else
111:           colb = aij->colmap[col] - 1;
112: #endif
113:           if (colb == -1) {
114:             m = 0;
115:           } else {
116:             rows = B_cj + B_ci[colb];
117:             m    = B_ci[colb+1] - B_ci[colb];
118:           }
119:         }
120:         /* loop over columns marking them in rowhit */
121:         for (k=0; k<m; k++) {
122:           rowhit[*rows++] = col + 1;
123:         }
124:       }

126:       /* count the number of hits */
127:       nrows = 0;
128:       for (j=0; j<M; j++) {
129:         if (rowhit[j]) nrows++;
130:       }
131:       c->nrows[i]         = nrows;
132:       PetscMalloc((nrows+1)*sizeof(int),&c->rows[i]);
133:       PetscMalloc((nrows+1)*sizeof(int),&c->columnsforrow[i]);
134:       PetscLogObjectMemory(c,2*(nrows+1)*sizeof(int));
135:       nrows = 0;
136:       for (j=0; j<M; j++) {
137:         if (rowhit[j]) {
138:           c->rows[i][nrows]           = j;
139:           c->columnsforrow[i][nrows] = rowhit[j] - 1;
140:           nrows++;
141:         }
142:       }
143:     } else {/*-------------------------------------------------------------------------------*/
144:       /* slow version, using rowhit as a linked list */
145:       int currentcol,fm,mfm;
146:       rowhit[M] = M;
147:       nrows     = 0;
148:       /* loop over columns*/
149:       for (j=0; j<nctot; j++) {
150:         col  = cols[j];
151:         if (col >= cstart && col < cend) {
152:           /* column is in diagonal block of matrix */
153:           rows = A_cj + A_ci[col-cstart];
154:           m    = A_ci[col-cstart+1] - A_ci[col-cstart];
155:         } else {
156: #if defined (PETSC_USE_CTABLE)
157:           PetscTableFind(aij->colmap,col+1,&colb);
158:           colb --;
159: #else
160:           colb = aij->colmap[col] - 1;
161: #endif
162:           if (colb == -1) {
163:             m = 0;
164:           } else {
165:             rows = B_cj + B_ci[colb];
166:             m    = B_ci[colb+1] - B_ci[colb];
167:           }
168:         }
169:         /* loop over columns marking them in rowhit */
170:         fm    = M; /* fm points to first entry in linked list */
171:         for (k=0; k<m; k++) {
172:           currentcol = *rows++;
173:           /* is it already in the list? */
174:           do {
175:             mfm  = fm;
176:             fm   = rowhit[fm];
177:           } while (fm < currentcol);
178:           /* not in list so add it */
179:           if (fm != currentcol) {
180:             nrows++;
181:             columnsforrow[currentcol] = col;
182:             /* next three lines insert new entry into linked list */
183:             rowhit[mfm]               = currentcol;
184:             rowhit[currentcol]        = fm;
185:             fm                        = currentcol;
186:             /* fm points to present position in list since we know the columns are sorted */
187:           } else {
188:             SETERRQ(PETSC_ERR_PLIB,"Invalid coloring of matrix detected");
189:           }
190:         }
191:       }
192:       c->nrows[i]         = nrows;
193:       PetscMalloc((nrows+1)*sizeof(int),&c->rows[i]);
194:       PetscMalloc((nrows+1)*sizeof(int),&c->columnsforrow[i]);
195:       PetscLogObjectMemory(c,(nrows+1)*sizeof(int));
196:       /* now store the linked list of rows into c->rows[i] */
197:       nrows = 0;
198:       fm    = rowhit[M];
199:       do {
200:         c->rows[i][nrows]            = fm;
201:         c->columnsforrow[i][nrows++] = columnsforrow[fm];
202:         fm                           = rowhit[fm];
203:       } while (fm < M);
204:     } /* ---------------------------------------------------------------------------------------*/
205:     PetscFree(cols);
206:   }

208:   /* Optimize by adding the vscale, and scaleforrow[][] fields */
209:   /*
210:        vscale will contain the "diagonal" on processor scalings followed by the off processor
211:   */
212:   VecCreateGhost(mat->comm,aij->A->m,PETSC_DETERMINE,aij->B->n,aij->garray,&c->vscale);CHKERRQ(ierr)
213:   PetscMalloc(c->ncolors*sizeof(int*),&c->vscaleforrow);
214:   for (k=0; k<c->ncolors; k++) {
215:     PetscMalloc((c->nrows[k]+1)*sizeof(int),&c->vscaleforrow[k]);
216:     for (l=0; l<c->nrows[k]; l++) {
217:       col = c->columnsforrow[k][l];
218:       if (col >= cstart && col < cend) {
219:         /* column is in diagonal block of matrix */
220:         colb = col - cstart;
221:       } else {
222:         /* column  is in "off-processor" part */
223: #if defined (PETSC_USE_CTABLE)
224:         PetscTableFind(aij->colmap,col+1,&colb);
225:         colb --;
226: #else
227:         colb = aij->colmap[col] - 1;
228: #endif
229:         colb += cend - cstart;
230:       }
231:       c->vscaleforrow[k][l] = colb;
232:     }
233:   }
234:   ISColoringRestoreIS(iscoloring,&isa);

236:   PetscFree(rowhit);
237:   PetscFree(columnsforrow);
238:   PetscFree(ncolsonproc);
239:   MatRestoreColumnIJ_SeqAIJ(aij->A,0,PETSC_FALSE,&ncols,&A_ci,&A_cj,&done);
240:   MatRestoreColumnIJ_SeqAIJ(aij->B,0,PETSC_FALSE,&ncols,&B_ci,&B_cj,&done);
241:   return(0);
242: }