Actual source code: mpiadj.c

  1: /*$Id: mpiadj.c,v 1.60 2001/03/23 23:22:17 balay Exp $*/

  3: /*
  4:     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
  5: */
 6:  #include petscsys.h
 7:  #include src/mat/impls/adj/mpi/mpiadj.h

  9: int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
 10: {
 11:   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
 12:   int               ierr,i,j,m = A->m;
 13:   char              *name;
 14:   PetscViewerFormat format;

 17:   PetscObjectGetName((PetscObject)viewer,&name);
 18:   PetscViewerGetFormat(viewer,&format);
 19:   if (format == PETSC_VIEWER_ASCII_INFO) {
 20:     return(0);
 21:   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
 22:     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
 23:   } else {
 24:     PetscViewerASCIIUseTabs(viewer,PETSC_NO);
 25:     for (i=0; i<m; i++) {
 26:       PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);
 27:       for (j=a->i[i]; j<a->i[i+1]; j++) {
 28:         PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);
 29:       }
 30:       PetscViewerASCIISynchronizedPrintf(viewer,"n");
 31:     }
 32:     PetscViewerASCIIUseTabs(viewer,PETSC_YES);
 33:   }
 34:   PetscViewerFlush(viewer);
 35:   return(0);
 36: }

 38: int MatView_MPIAdj(Mat A,PetscViewer viewer)
 39: {
 40:   int        ierr;
 41:   PetscTruth isascii;

 44:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
 45:   if (isascii) {
 46:     MatView_MPIAdj_ASCII(A,viewer);
 47:   } else {
 48:     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
 49:   }
 50:   return(0);
 51: }

 53: int MatDestroy_MPIAdj(Mat mat)
 54: {
 55:   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
 56:   int        ierr;

 59: #if defined(PETSC_USE_LOG)
 60:   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
 61: #endif
 62:   if (a->diag) {PetscFree(a->diag);}
 63:   if (a->freeaij) {
 64:     PetscFree(a->i);
 65:     PetscFree(a->j);
 66:     if (a->values) {PetscFree(a->values);}
 67:   }
 68:   PetscFree(a->rowners);
 69:   PetscFree(a);
 70:   return(0);
 71: }

 73: int MatSetOption_MPIAdj(Mat A,MatOption op)
 74: {
 75:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;

 78:   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
 79:     a->symmetric = PETSC_TRUE;
 80:   } else {
 81:     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignoredn");
 82:   }
 83:   return(0);
 84: }


 87: /*
 88:      Adds diagonal pointers to sparse matrix structure.
 89: */

 91: int MatMarkDiagonal_MPIAdj(Mat A)
 92: {
 93:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
 94:   int        i,j,*diag,m = A->m,ierr;

 97:   PetscMalloc((m+1)*sizeof(int),&diag);
 98:   PetscLogObjectMemory(A,(m+1)*sizeof(int));
 99:   for (i=0; i<A->m; i++) {
100:     for (j=a->i[i]; j<a->i[i+1]; j++) {
101:       if (a->j[j] == i) {
102:         diag[i] = j;
103:         break;
104:       }
105:     }
106:   }
107:   a->diag = diag;
108:   return(0);
109: }

111: int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
112: {
113:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
115:   if (m) *m = a->rstart;
116:   if (n) *n = a->rend;
117:   return(0);
118: }

120: int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
121: {
122:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
123:   int        *itmp;

126:   row -= a->rstart;

128:   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");

130:   *nz = a->i[row+1] - a->i[row];
131:   if (v) *v = PETSC_NULL;
132:   if (idx) {
133:     itmp = a->j + a->i[row];
134:     if (*nz) {
135:       *idx = itmp;
136:     }
137:     else *idx = 0;
138:   }
139:   return(0);
140: }

142: int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
143: {
145:   return(0);
146: }

148: int MatGetBlockSize_MPIAdj(Mat A,int *bs)
149: {
151:   *bs = 1;
152:   return(0);
153: }


156: int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
157: {
158:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
159:   int         ierr;
160:   PetscTruth  flag;

163:   PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);
164:   if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");

166:   /* If the  matrix dimensions are not equal,or no of nonzeros */
167:   if ((A->m != B->m) ||(a->nz != b->nz)) {
168:     flag = PETSC_FALSE;
169:   }
170: 
171:   /* if the a->i are the same */
172:   PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);
173: 
174:   /* if a->j are the same */
175:   PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);

177:   MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);
178:   return(0);
179: }

181: int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
182: {
183:   int        ierr,size,i;
184:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;

187:   MPI_Comm_size(A->comm,&size);
188:   if (size > 1) {*done = PETSC_FALSE; return(0);}
189:   *m    = A->m;
190:   *ia   = a->i;
191:   *ja   = a->j;
192:   *done = PETSC_TRUE;
193:   if (oshift) {
194:     for (i=0; i<(*ia)[*m]; i++) {
195:       (*ja)[i]++;
196:     }
197:     for (i=0; i<=(*m); i++) (*ia)[i]++;
198:   }
199:   return(0);
200: }

202: int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
203: {
204:   int        i;
205:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;

208:   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
209:   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
210:   if (oshift) {
211:     for (i=0; i<=(*m); i++) (*ia)[i]--;
212:     for (i=0; i<(*ia)[*m]; i++) {
213:       (*ja)[i]--;
214:     }
215:   }
216:   return(0);
217: }

219: /* -------------------------------------------------------------------*/
220: static struct _MatOps MatOps_Values = {0,
221:        MatGetRow_MPIAdj,
222:        MatRestoreRow_MPIAdj,
223:        0,
224:        0,
225:        0,
226:        0,
227:        0,
228:        0,
229:        0,
230:        0,
231:        0,
232:        0,
233:        0,
234:        0,
235:        0,
236:        MatEqual_MPIAdj,
237:        0,
238:        0,
239:        0,
240:        0,
241:        0,
242:        0,
243:        MatSetOption_MPIAdj,
244:        0,
245:        0,
246:        0,
247:        0,
248:        0,
249:        0,
250:        0,
251:        0,
252:        MatGetOwnershipRange_MPIAdj,
253:        0,
254:        0,
255:        0,
256:        0,
257:        0,
258:        0,
259:        0,
260:        0,
261:        0,
262:        0,
263:        0,
264:        0,
265:        0,
266:        0,
267:        0,
268:        0,
269:        0,
270:        0,
271:        0,
272:        MatGetBlockSize_MPIAdj,
273:        MatGetRowIJ_MPIAdj,
274:        MatRestoreRowIJ_MPIAdj,
275:        0,
276:        0,
277:        0,
278:        0,
279:        0,
280:        0,
281:        0,
282:        0,
283:        MatDestroy_MPIAdj,
284:        MatView_MPIAdj,
285:        MatGetMaps_Petsc};


288: EXTERN_C_BEGIN
289: int MatCreate_MPIAdj(Mat B)
290: {
291:   Mat_MPIAdj *b;
292:   int        ii,ierr,size,rank;

295:   MPI_Comm_size(B->comm,&size);
296:   MPI_Comm_rank(B->comm,&rank);

298:   ierr                = PetscNew(Mat_MPIAdj,&b);
299:   B->data             = (void*)b;
300:   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));
301:   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
302:   B->factor           = 0;
303:   B->lupivotthreshold = 1.0;
304:   B->mapping          = 0;
305:   B->assembled        = PETSC_FALSE;
306: 
307:   MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);
308:   B->n = B->N;

310:   /* the information in the maps duplicates the information computed below, eventually 
311:      we should remove the duplicate information that is not contained in the maps */
312:   MapCreateMPI(B->comm,B->m,B->M,&B->rmap);
313:   /* we don't know the "local columns" so just use the row information :-(*/
314:   MapCreateMPI(B->comm,B->m,B->M,&B->cmap);

316:   PetscMalloc((size+1)*sizeof(int),&b->rowners);
317:   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
318:   MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);
319:   b->rowners[0] = 0;
320:   for (ii=2; ii<=size; ii++) {
321:     b->rowners[ii] += b->rowners[ii-1];
322:   }
323:   b->rstart = b->rowners[rank];
324:   b->rend   = b->rowners[rank+1];

326:   return(0);
327: }
328: EXTERN_C_END

330: int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
331: {
332:   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
333:   int        ierr;
334: #if defined(PETSC_USE_BOPT_g)
335:   int        ii;
336: #endif

339:   B->preallocated = PETSC_TRUE;
340: #if defined(PETSC_USE_BOPT_g)
341:   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %dn",i[0]);
342:   for (ii=1; ii<B->m; ii++) {
343:     if (i[ii] < 0 || i[ii] < i[ii-1]) {
344:       SETERRQ4(1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
345:     }
346:   }
347:   for (ii=0; ii<i[B->m]; ii++) {
348:     if (j[ii] < 0 || j[ii] >= B->N) {
349:       SETERRQ2(1,"Column index %d out of range %dn",ii,j[ii]);
350:     }
351:   }
352: #endif

354:   b->j      = j;
355:   b->i      = i;
356:   b->values = values;

358:   b->nz               = i[B->m];
359:   b->diag             = 0;
360:   b->symmetric        = PETSC_FALSE;
361:   b->freeaij          = PETSC_TRUE;

363:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
364:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
365:   return(0);
366: }

368: /*@C
369:    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
370:    The matrix does not have numerical values associated with it, but is
371:    intended for ordering (to reduce bandwidth etc) and partitioning.

373:    Collective on MPI_Comm

375:    Input Parameters:
376: +  comm - MPI communicator
377: .  m - number of local rows
378: .  n - number of columns
379: .  i - the indices into j for the start of each row
380: .  j - the column indices for each row (sorted for each row).
381:        The indices in i and j start with zero (NOT with one).
382: -  values -[optional] edge weights

384:    Output Parameter:
385: .  A - the matrix 

387:    Level: intermediate

389:    Notes: This matrix object does not support most matrix operations, include
390:    MatSetValues().
391:    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
392:    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you 
393:     call from Fortran you need not create the arrays with PetscMalloc().
394:    Should not include the matrix diagonals.

396:    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC

398: .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
399: @*/
400: int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
401: {
402:   int        ierr;

405:   MatCreate(comm,m,n,PETSC_DETERMINE,n,A);
406:   MatSetType(*A,MATMPIADJ);
407:   MatMPIAdjSetPreallocation(*A,i,j,values);
408:   return(0);
409: }

411: EXTERN_C_BEGIN
412: int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
413: {
414:   int      i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
415:   Scalar   *ra;
416:   MPI_Comm comm;

419:   MatGetSize(A,PETSC_NULL,&N);
420:   MatGetLocalSize(A,&m,PETSC_NULL);
421:   MatGetOwnershipRange(A,&rstart,PETSC_NULL);
422: 
423:   /* count the number of nonzeros per row */
424:   for (i=0; i<m; i++) {
425:     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);
426:     for (j=0; j<len; j++) {
427:       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
428:     }
429:     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);
430:     nzeros += len;
431:   }

433:   /* malloc space for nonzeros */
434:   PetscMalloc((nzeros+1)*sizeof(int),&a);
435:   PetscMalloc((N+1)*sizeof(int),&ia);
436:   PetscMalloc((nzeros+1)*sizeof(int),&ja);

438:   nzeros = 0;
439:   ia[0]  = 0;
440:   for (i=0; i<m; i++) {
441:     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);
442:     cnt     = 0;
443:     for (j=0; j<len; j++) {
444:       if (rj[j] != i+rstart) { /* if not diagonal */
445:         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
446:         ja[nzeros+cnt++] = rj[j];
447:       }
448:     }
449:     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);
450:     nzeros += cnt;
451:     ia[i+1] = nzeros;
452:   }

454:   PetscObjectGetComm((PetscObject)A,&comm);
455:   MatCreateMPIAdj(comm,m,N,ia,ja,a,B);

457:   return(0);
458: }
459: EXTERN_C_END