Actual source code: mpiadj.c

  1: /*
  2:     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 
  3: */
 4:  #include src/mat/impls/adj/mpi/mpiadj.h
 5:  #include petscsys.h

  9: PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
 10: {
 11:   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
 12:   PetscErrorCode    ierr;
 13:   PetscInt          i,j,m = A->m;
 14:   char              *name;
 15:   PetscViewerFormat format;

 18:   PetscObjectGetName((PetscObject)A,&name);
 19:   PetscViewerGetFormat(viewer,&format);
 20:   if (format == PETSC_VIEWER_ASCII_INFO) {
 21:     return(0);
 22:   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
 23:     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
 24:   } else {
 25:     PetscViewerASCIIUseTabs(viewer,PETSC_NO);
 26:     for (i=0; i<m; i++) {
 27:       PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+a->rstart);
 28:       for (j=a->i[i]; j<a->i[i+1]; j++) {
 29:         PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
 30:       }
 31:       PetscViewerASCIISynchronizedPrintf(viewer,"\n");
 32:     }
 33:     PetscViewerASCIIUseTabs(viewer,PETSC_YES);
 34:   }
 35:   PetscViewerFlush(viewer);
 36:   return(0);
 37: }

 41: PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
 42: {
 44:   PetscTruth     iascii;

 47:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
 48:   if (iascii) {
 49:     MatView_MPIAdj_ASCII(A,viewer);
 50:   } else {
 51:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
 52:   }
 53:   return(0);
 54: }

 58: PetscErrorCode MatDestroy_MPIAdj(Mat mat)
 59: {
 60:   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;

 64: #if defined(PETSC_USE_LOG)
 65:   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->m,mat->n,a->nz);
 66: #endif
 67:   if (a->diag) {PetscFree(a->diag);}
 68:   if (a->freeaij) {
 69:     PetscFree(a->i);
 70:     PetscFree(a->j);
 71:     if (a->values) {PetscFree(a->values);}
 72:   }
 73:   PetscFree(a->rowners);
 74:   PetscFree(a);

 76:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);
 77:   return(0);
 78: }

 82: PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
 83: {
 84:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;

 87:   switch (op) {
 88:   case MAT_SYMMETRIC:
 89:   case MAT_STRUCTURALLY_SYMMETRIC:
 90:   case MAT_HERMITIAN:
 91:     a->symmetric = PETSC_TRUE;
 92:     break;
 93:   case MAT_NOT_SYMMETRIC:
 94:   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
 95:   case MAT_NOT_HERMITIAN:
 96:     a->symmetric = PETSC_FALSE;
 97:     break;
 98:   case MAT_SYMMETRY_ETERNAL:
 99:   case MAT_NOT_SYMMETRY_ETERNAL:
100:     break;
101:   default:
102:     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
103:     break;
104:   }
105:   return(0);
106: }


109: /*
110:      Adds diagonal pointers to sparse matrix structure.
111: */

115: PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
116: {
117:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
119:   PetscInt       i,j,*diag,m = A->m;

122:   PetscMalloc((m+1)*sizeof(PetscInt),&diag);
123:   PetscLogObjectMemory(A,(m+1)*sizeof(PetscInt));
124:   for (i=0; i<A->m; i++) {
125:     for (j=a->i[i]; j<a->i[i+1]; j++) {
126:       if (a->j[j] == i) {
127:         diag[i] = j;
128:         break;
129:       }
130:     }
131:   }
132:   a->diag = diag;
133:   return(0);
134: }

138: PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
139: {
140:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
141:   PetscInt   *itmp;

144:   row -= a->rstart;

146:   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");

148:   *nz = a->i[row+1] - a->i[row];
149:   if (v) *v = PETSC_NULL;
150:   if (idx) {
151:     itmp = a->j + a->i[row];
152:     if (*nz) {
153:       *idx = itmp;
154:     }
155:     else *idx = 0;
156:   }
157:   return(0);
158: }

162: PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
163: {
165:   return(0);
166: }

170: PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
171: {
172:   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
174:   PetscTruth     flag;

177:   /* If the  matrix dimensions are not equal,or no of nonzeros */
178:   if ((A->m != B->m) ||(a->nz != b->nz)) {
179:     flag = PETSC_FALSE;
180:   }
181: 
182:   /* if the a->i are the same */
183:   PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(PetscInt),&flag);
184: 
185:   /* if a->j are the same */
186:   PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);

188:   MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);
189:   return(0);
190: }

194: PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
195: {
197:   PetscMPIInt    size;
198:   PetscInt       i;
199:   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;

202:   MPI_Comm_size(A->comm,&size);
203:   if (size > 1) {*done = PETSC_FALSE; return(0);}
204:   *m    = A->m;
205:   *ia   = a->i;
206:   *ja   = a->j;
207:   *done = PETSC_TRUE;
208:   if (oshift) {
209:     for (i=0; i<(*ia)[*m]; i++) {
210:       (*ja)[i]++;
211:     }
212:     for (i=0; i<=(*m); i++) (*ia)[i]++;
213:   }
214:   return(0);
215: }

219: PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
220: {
221:   PetscInt   i;
222:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;

225:   if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
226:   if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
227:   if (oshift) {
228:     for (i=0; i<=(*m); i++) (*ia)[i]--;
229:     for (i=0; i<(*ia)[*m]; i++) {
230:       (*ja)[i]--;
231:     }
232:   }
233:   return(0);
234: }

236: /* -------------------------------------------------------------------*/
237: static struct _MatOps MatOps_Values = {0,
238:        MatGetRow_MPIAdj,
239:        MatRestoreRow_MPIAdj,
240:        0,
241: /* 4*/ 0,
242:        0,
243:        0,
244:        0,
245:        0,
246:        0,
247: /*10*/ 0,
248:        0,
249:        0,
250:        0,
251:        0,
252: /*15*/ 0,
253:        MatEqual_MPIAdj,
254:        0,
255:        0,
256:        0,
257: /*20*/ 0,
258:        0,
259:        0,
260:        MatSetOption_MPIAdj,
261:        0,
262: /*25*/ 0,
263:        0,
264:        0,
265:        0,
266:        0,
267: /*30*/ 0,
268:        0,
269:        0,
270:        0,
271:        0,
272: /*35*/ 0,
273:        0,
274:        0,
275:        0,
276:        0,
277: /*40*/ 0,
278:        0,
279:        0,
280:        0,
281:        0,
282: /*45*/ 0,
283:        0,
284:        0,
285:        0,
286:        0,
287: /*50*/ 0,
288:        MatGetRowIJ_MPIAdj,
289:        MatRestoreRowIJ_MPIAdj,
290:        0,
291:        0,
292: /*55*/ 0,
293:        0,
294:        0,
295:        0,
296:        0,
297: /*60*/ 0,
298:        MatDestroy_MPIAdj,
299:        MatView_MPIAdj,
300:        MatGetPetscMaps_Petsc,
301:        0,
302: /*65*/ 0,
303:        0,
304:        0,
305:        0,
306:        0,
307: /*70*/ 0,
308:        0,
309:        0,
310:        0,
311:        0,
312: /*75*/ 0,
313:        0,
314:        0,
315:        0,
316:        0,
317: /*80*/ 0,
318:        0,
319:        0,
320:        0,
321:        0,
322: /*85*/ 0,
323:        0,
324:        0,
325:        0,
326:        0,
327: /*90*/ 0,
328:        0,
329:        0,
330:        0,
331:        0,
332: /*95*/ 0,
333:        0,
334:        0,
335:        0};

340: PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
341: {
342:   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
344: #if defined(PETSC_USE_BOPT_g)
345:   PetscInt       ii;
346: #endif

349:   B->preallocated = PETSC_TRUE;
350: #if defined(PETSC_USE_BOPT_g)
351:   if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
352:   for (ii=1; ii<B->m; ii++) {
353:     if (i[ii] < 0 || i[ii] < i[ii-1]) {
354:       SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
355:     }
356:   }
357:   for (ii=0; ii<i[B->m]; ii++) {
358:     if (j[ii] < 0 || j[ii] >= B->N) {
359:       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
360:     }
361:   }
362: #endif

364:   b->j      = j;
365:   b->i      = i;
366:   b->values = values;

368:   b->nz               = i[B->m];
369:   b->diag             = 0;
370:   b->symmetric        = PETSC_FALSE;
371:   b->freeaij          = PETSC_TRUE;

373:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
374:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
375:   return(0);
376: }

379: /*MC
380:    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
381:    intended for use constructing orderings and partitionings.

383:   Level: beginner

385: .seealso: MatCreateMPIAdj
386: M*/

391: PetscErrorCode MatCreate_MPIAdj(Mat B)
392: {
393:   Mat_MPIAdj     *b;
395:   PetscInt       ii;
396:   PetscMPIInt    size,rank;

399:   MPI_Comm_size(B->comm,&size);
400:   MPI_Comm_rank(B->comm,&rank);

402:   PetscNew(Mat_MPIAdj,&b);
403:   B->data             = (void*)b;
404:   PetscMemzero(b,sizeof(Mat_MPIAdj));
405:   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
406:   B->factor           = 0;
407:   B->lupivotthreshold = 1.0;
408:   B->mapping          = 0;
409:   B->assembled        = PETSC_FALSE;
410: 
411:   PetscSplitOwnership(B->comm,&B->m,&B->M);
412:   B->n = B->N = PetscMax(B->N,B->n);

414:   /* the information in the maps duplicates the information computed below, eventually 
415:      we should remove the duplicate information that is not contained in the maps */
416:   PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);
417:   /* we don't know the "local columns" so just use the row information :-(*/
418:   PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);

420:   PetscMalloc((size+1)*sizeof(PetscInt),&b->rowners);
421:   PetscLogObjectMemory(B,(size+2)*sizeof(PetscInt)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
422:   MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);
423:   b->rowners[0] = 0;
424:   for (ii=2; ii<=size; ii++) {
425:     b->rowners[ii] += b->rowners[ii-1];
426:   }
427:   b->rstart = b->rowners[rank];
428:   b->rend   = b->rowners[rank+1];
429:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
430:                                     "MatMPIAdjSetPreallocation_MPIAdj",
431:                                      MatMPIAdjSetPreallocation_MPIAdj);
432:   return(0);
433: }

438: /*@C
439:    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements

441:    Collective on MPI_Comm

443:    Input Parameters:
444: +  A - the matrix
445: .  i - the indices into j for the start of each row
446: .  j - the column indices for each row (sorted for each row).
447:        The indices in i and j start with zero (NOT with one).
448: -  values - [optional] edge weights

450:    Level: intermediate

452: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
453: @*/
454: PetscErrorCode MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
455: {
456:   PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);

459:   PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);
460:   if (f) {
461:     (*f)(B,i,j,values);
462:   }
463:   return(0);
464: }

468: /*@C
469:    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
470:    The matrix does not have numerical values associated with it, but is
471:    intended for ordering (to reduce bandwidth etc) and partitioning.

473:    Collective on MPI_Comm

475:    Input Parameters:
476: +  comm - MPI communicator
477: .  m - number of local rows
478: .  n - number of columns
479: .  i - the indices into j for the start of each row
480: .  j - the column indices for each row (sorted for each row).
481:        The indices in i and j start with zero (NOT with one).
482: -  values -[optional] edge weights

484:    Output Parameter:
485: .  A - the matrix 

487:    Level: intermediate

489:    Notes: This matrix object does not support most matrix operations, include
490:    MatSetValues().
491:    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
492:    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you 
493:     call from Fortran you need not create the arrays with PetscMalloc().
494:    Should not include the matrix diagonals.

496:    If you already have a matrix, you can create its adjacency matrix by a call
497:    to MatConvert, specifying a type of MATMPIADJ.

499:    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC

501: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
502: @*/
503: PetscErrorCode MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
504: {

508:   MatCreate(comm,m,n,PETSC_DETERMINE,n,A);
509:   MatSetType(*A,MATMPIADJ);
510:   MatMPIAdjSetPreallocation(*A,i,j,values);
511:   return(0);
512: }

517: PetscErrorCode MatConvertTo_MPIAdj(Mat A,MatType type,Mat *newmat)
518: {
519:   Mat               B;
520:   PetscErrorCode    ierr;
521:   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
522:   const PetscInt    *rj;
523:   const PetscScalar *ra;
524:   MPI_Comm          comm;

527:   MatGetSize(A,PETSC_NULL,&N);
528:   MatGetLocalSize(A,&m,PETSC_NULL);
529:   MatGetOwnershipRange(A,&rstart,PETSC_NULL);
530: 
531:   /* count the number of nonzeros per row */
532:   for (i=0; i<m; i++) {
533:     MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);
534:     for (j=0; j<len; j++) {
535:       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
536:     }
537:     MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);
538:     nzeros += len;
539:   }

541:   /* malloc space for nonzeros */
542:   PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);
543:   PetscMalloc((N+1)*sizeof(PetscInt),&ia);
544:   PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);

546:   nzeros = 0;
547:   ia[0]  = 0;
548:   for (i=0; i<m; i++) {
549:     MatGetRow(A,i+rstart,&len,&rj,&ra);
550:     cnt     = 0;
551:     for (j=0; j<len; j++) {
552:       if (rj[j] != i+rstart) { /* if not diagonal */
553:         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
554:         ja[nzeros+cnt++] = rj[j];
555:       }
556:     }
557:     MatRestoreRow(A,i+rstart,&len,&rj,&ra);
558:     nzeros += cnt;
559:     ia[i+1] = nzeros;
560:   }

562:   PetscObjectGetComm((PetscObject)A,&comm);
563:   MatCreate(comm,m,N,PETSC_DETERMINE,N,&B);
564:   MatSetType(B,type);
565:   MatMPIAdjSetPreallocation(B,ia,ja,a);

567:   /* Fake support for "inplace" convert. */
568:   if (*newmat == A) {
569:     MatDestroy(A);
570:   }
571:   *newmat = B;
572:   return(0);
573: }