Actual source code: mpiadj.c

  1: /*$Id: mpiadj.c,v 1.66 2001/08/07 03:02:59 balay Exp $*/

  3: /*
  4:     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 
  5: */
 6:  #include src/mat/impls/adj/mpi/mpiadj.h
 7:  #include petscsys.h

 11: int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
 12: {
 13:   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
 14:   int               ierr,i,j,m = A->m;
 15:   char              *name;
 16:   PetscViewerFormat format;

 19:   PetscObjectGetName((PetscObject)A,&name);
 20:   PetscViewerGetFormat(viewer,&format);
 21:   if (format == PETSC_VIEWER_ASCII_INFO) {
 22:     return(0);
 23:   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
 24:     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
 25:   } else {
 26:     PetscViewerASCIIUseTabs(viewer,PETSC_NO);
 27:     for (i=0; i<m; i++) {
 28:       PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);
 29:       for (j=a->i[i]; j<a->i[i+1]; j++) {
 30:         PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);
 31:       }
 32:       PetscViewerASCIISynchronizedPrintf(viewer,"\n");
 33:     }
 34:     PetscViewerASCIIUseTabs(viewer,PETSC_YES);
 35:   }
 36:   PetscViewerFlush(viewer);
 37:   return(0);
 38: }

 42: int MatView_MPIAdj(Mat A,PetscViewer viewer)
 43: {
 44:   int        ierr;
 45:   PetscTruth isascii;

 48:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
 49:   if (isascii) {
 50:     MatView_MPIAdj_ASCII(A,viewer);
 51:   } else {
 52:     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
 53:   }
 54:   return(0);
 55: }

 59: int MatDestroy_MPIAdj(Mat mat)
 60: {
 61:   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
 62:   int        ierr;

 65: #if defined(PETSC_USE_LOG)
 66:   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
 67: #endif
 68:   if (a->diag) {PetscFree(a->diag);}
 69:   if (a->freeaij) {
 70:     PetscFree(a->i);
 71:     PetscFree(a->j);
 72:     if (a->values) {PetscFree(a->values);}
 73:   }
 74:   PetscFree(a->rowners);
 75:   PetscFree(a);
 76:   return(0);
 77: }

 81: int MatSetOption_MPIAdj(Mat A,MatOption op)
 82: {
 83:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;

 86:   switch (op) {
 87:   case MAT_STRUCTURALLY_SYMMETRIC:
 88:     a->symmetric = PETSC_TRUE;
 89:     break;
 90:   default:
 91:     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
 92:     break;
 93:   }
 94:   return(0);
 95: }


 98: /*
 99:      Adds diagonal pointers to sparse matrix structure.
100: */

104: int MatMarkDiagonal_MPIAdj(Mat A)
105: {
106:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
107:   int        i,j,*diag,m = A->m,ierr;

110:   PetscMalloc((m+1)*sizeof(int),&diag);
111:   PetscLogObjectMemory(A,(m+1)*sizeof(int));
112:   for (i=0; i<A->m; i++) {
113:     for (j=a->i[i]; j<a->i[i+1]; j++) {
114:       if (a->j[j] == i) {
115:         diag[i] = j;
116:         break;
117:       }
118:     }
119:   }
120:   a->diag = diag;
121:   return(0);
122: }

126: int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
127: {
128:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
129:   int        *itmp;

132:   row -= a->rstart;

134:   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");

136:   *nz = a->i[row+1] - a->i[row];
137:   if (v) *v = PETSC_NULL;
138:   if (idx) {
139:     itmp = a->j + a->i[row];
140:     if (*nz) {
141:       *idx = itmp;
142:     }
143:     else *idx = 0;
144:   }
145:   return(0);
146: }

150: int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
151: {
153:   return(0);
154: }

158: int MatGetBlockSize_MPIAdj(Mat A,int *bs)
159: {
161:   *bs = 1;
162:   return(0);
163: }


168: int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
169: {
170:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
171:   int         ierr;
172:   PetscTruth  flag;

175:   /* If the  matrix dimensions are not equal,or no of nonzeros */
176:   if ((A->m != B->m) ||(a->nz != b->nz)) {
177:     flag = PETSC_FALSE;
178:   }
179: 
180:   /* if the a->i are the same */
181:   PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);
182: 
183:   /* if a->j are the same */
184:   PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);

186:   MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);
187:   return(0);
188: }

192: int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
193: {
194:   int        ierr,size,i;
195:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;

198:   MPI_Comm_size(A->comm,&size);
199:   if (size > 1) {*done = PETSC_FALSE; return(0);}
200:   *m    = A->m;
201:   *ia   = a->i;
202:   *ja   = a->j;
203:   *done = PETSC_TRUE;
204:   if (oshift) {
205:     for (i=0; i<(*ia)[*m]; i++) {
206:       (*ja)[i]++;
207:     }
208:     for (i=0; i<=(*m); i++) (*ia)[i]++;
209:   }
210:   return(0);
211: }

215: int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
216: {
217:   int        i;
218:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;

221:   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
222:   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
223:   if (oshift) {
224:     for (i=0; i<=(*m); i++) (*ia)[i]--;
225:     for (i=0; i<(*ia)[*m]; i++) {
226:       (*ja)[i]--;
227:     }
228:   }
229:   return(0);
230: }

232: /* -------------------------------------------------------------------*/
233: static struct _MatOps MatOps_Values = {0,
234:        MatGetRow_MPIAdj,
235:        MatRestoreRow_MPIAdj,
236:        0,
237: /* 4*/ 0,
238:        0,
239:        0,
240:        0,
241:        0,
242:        0,
243: /*10*/ 0,
244:        0,
245:        0,
246:        0,
247:        0,
248: /*15*/ 0,
249:        MatEqual_MPIAdj,
250:        0,
251:        0,
252:        0,
253: /*20*/ 0,
254:        0,
255:        0,
256:        MatSetOption_MPIAdj,
257:        0,
258: /*25*/ 0,
259:        0,
260:        0,
261:        0,
262:        0,
263: /*30*/ 0,
264:        0,
265:        0,
266:        0,
267:        0,
268: /*35*/ 0,
269:        0,
270:        0,
271:        0,
272:        0,
273: /*40*/ 0,
274:        0,
275:        0,
276:        0,
277:        0,
278: /*45*/ 0,
279:        0,
280:        0,
281:        0,
282:        0,
283: /*50*/ MatGetBlockSize_MPIAdj,
284:        MatGetRowIJ_MPIAdj,
285:        MatRestoreRowIJ_MPIAdj,
286:        0,
287:        0,
288: /*55*/ 0,
289:        0,
290:        0,
291:        0,
292:        0,
293: /*60*/ 0,
294:        MatDestroy_MPIAdj,
295:        MatView_MPIAdj,
296:        MatGetPetscMaps_Petsc,
297:        0,
298: /*65*/ 0,
299:        0,
300:        0,
301:        0,
302:        0,
303: /*70*/ 0,
304:        0,
305:        0,
306:        0,
307:        0,
308: /*75*/ 0,
309:        0,
310:        0,
311:        0,
312:        0,
313: /*80*/ 0,
314:        0,
315:        0,
316:        0,
317:        0,
318: /*85*/ 0
319: };

321: EXTERN_C_BEGIN
324: int MatMPIAdjSetPreallocation_MPIAdj(Mat B,int *i,int *j,int *values)
325: {
326:   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
327:   int        ierr;
328: #if defined(PETSC_USE_BOPT_g)
329:   int        ii;
330: #endif

333:   B->preallocated = PETSC_TRUE;
334: #if defined(PETSC_USE_BOPT_g)
335:   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
336:   for (ii=1; ii<B->m; ii++) {
337:     if (i[ii] < 0 || i[ii] < i[ii-1]) {
338:       SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
339:     }
340:   }
341:   for (ii=0; ii<i[B->m]; ii++) {
342:     if (j[ii] < 0 || j[ii] >= B->N) {
343:       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
344:     }
345:   }
346: #endif

348:   b->j      = j;
349:   b->i      = i;
350:   b->values = values;

352:   b->nz               = i[B->m];
353:   b->diag             = 0;
354:   b->symmetric        = PETSC_FALSE;
355:   b->freeaij          = PETSC_TRUE;

357:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
358:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
359:   return(0);
360: }
361: EXTERN_C_END

363: /*MC
364:    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
365:    intended for use constructing orderings and partitionings.

367:   Level: beginner

369: .seealso: MatCreateMPIAdj
370: M*/

372: EXTERN_C_BEGIN
375: int MatCreate_MPIAdj(Mat B)
376: {
377:   Mat_MPIAdj *b;
378:   int        ii,ierr,size,rank;

381:   MPI_Comm_size(B->comm,&size);
382:   MPI_Comm_rank(B->comm,&rank);

384:   PetscNew(Mat_MPIAdj,&b);
385:   B->data             = (void*)b;
386:   PetscMemzero(b,sizeof(Mat_MPIAdj));
387:   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
388:   B->factor           = 0;
389:   B->lupivotthreshold = 1.0;
390:   B->mapping          = 0;
391:   B->assembled        = PETSC_FALSE;
392: 
393:   MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);
394:   B->n = B->N;

396:   /* the information in the maps duplicates the information computed below, eventually 
397:      we should remove the duplicate information that is not contained in the maps */
398:   PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);
399:   /* we don't know the "local columns" so just use the row information :-(*/
400:   PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);

402:   PetscMalloc((size+1)*sizeof(int),&b->rowners);
403:   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
404:   MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);
405:   b->rowners[0] = 0;
406:   for (ii=2; ii<=size; ii++) {
407:     b->rowners[ii] += b->rowners[ii-1];
408:   }
409:   b->rstart = b->rowners[rank];
410:   b->rend   = b->rowners[rank+1];
411:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
412:                                     "MatMPIAdjSetPreallocation_MPIAdj",
413:                                      MatMPIAdjSetPreallocation_MPIAdj);
414:   return(0);
415: }
416: EXTERN_C_END

420: /*@C
421:    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements

423:    Collective on MPI_Comm

425:    Input Parameters:
426: +  A - the matrix
427: .  i - the indices into j for the start of each row
428: .  j - the column indices for each row (sorted for each row).
429:        The indices in i and j start with zero (NOT with one).
430: -  values - [optional] edge weights

432:    Level: intermediate

434: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
435: @*/
436: int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
437: {
438:   int ierr,(*f)(Mat,int*,int*,int*);

441:   PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);
442:   if (f) {
443:     (*f)(B,i,j,values);
444:   }
445:   return(0);
446: }

450: /*@C
451:    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
452:    The matrix does not have numerical values associated with it, but is
453:    intended for ordering (to reduce bandwidth etc) and partitioning.

455:    Collective on MPI_Comm

457:    Input Parameters:
458: +  comm - MPI communicator
459: .  m - number of local rows
460: .  n - number of columns
461: .  i - the indices into j for the start of each row
462: .  j - the column indices for each row (sorted for each row).
463:        The indices in i and j start with zero (NOT with one).
464: -  values -[optional] edge weights

466:    Output Parameter:
467: .  A - the matrix 

469:    Level: intermediate

471:    Notes: This matrix object does not support most matrix operations, include
472:    MatSetValues().
473:    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
474:    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you 
475:     call from Fortran you need not create the arrays with PetscMalloc().
476:    Should not include the matrix diagonals.

478:    If you already have a matrix, you can create its adjacency matrix by a call
479:    to MatConvert, specifying a type of MATMPIADJ.

481:    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC

483: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
484: @*/
485: int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
486: {
487:   int        ierr;

490:   MatCreate(comm,m,n,PETSC_DETERMINE,n,A);
491:   MatSetType(*A,MATMPIADJ);
492:   MatMPIAdjSetPreallocation(*A,i,j,values);
493:   return(0);
494: }

496: EXTERN_C_BEGIN
499: int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *newmat) {
500:   Mat          B;
501:   int          i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
502:   PetscScalar  *ra;
503:   MPI_Comm     comm;

506:   MatGetSize(A,PETSC_NULL,&N);
507:   MatGetLocalSize(A,&m,PETSC_NULL);
508:   MatGetOwnershipRange(A,&rstart,PETSC_NULL);
509: 
510:   /* count the number of nonzeros per row */
511:   for (i=0; i<m; i++) {
512:     MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);
513:     for (j=0; j<len; j++) {
514:       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
515:     }
516:     MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);
517:     nzeros += len;
518:   }

520:   /* malloc space for nonzeros */
521:   PetscMalloc((nzeros+1)*sizeof(int),&a);
522:   PetscMalloc((N+1)*sizeof(int),&ia);
523:   PetscMalloc((nzeros+1)*sizeof(int),&ja);

525:   nzeros = 0;
526:   ia[0]  = 0;
527:   for (i=0; i<m; i++) {
528:     MatGetRow(A,i+rstart,&len,&rj,&ra);
529:     cnt     = 0;
530:     for (j=0; j<len; j++) {
531:       if (rj[j] != i+rstart) { /* if not diagonal */
532:         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
533:         ja[nzeros+cnt++] = rj[j];
534:       }
535:     }
536:     MatRestoreRow(A,i+rstart,&len,&rj,&ra);
537:     nzeros += cnt;
538:     ia[i+1] = nzeros;
539:   }

541:   PetscObjectGetComm((PetscObject)A,&comm);
542:   MatCreateMPIAdj(comm,m,N,ia,ja,a,&B);

544:   /* Fake support for "inplace" convert. */
545:   if (*newmat == A) {
546:     MatDestroy(A);
547:   }
548:   *newmat = B;
549:   return(0);
550: }
551: EXTERN_C_END