Actual source code: mesi.c


  3: /*$Id: mesi.c,v 1.1 2001/09/12 03:30:08 bsmith Exp bsmith $*/
  4: /*
  5:     Defines the basic matrix operations for the AIJ (compressed row)
  6:   matrix storage format.
  7: */

 9:  #include src/mat/matimpl.h
 10:  #include petscsys.h
 11:  #include esi/petsc/vector.h
 12:  #include esi/petsc/matrix.h

 14: typedef struct {
 15:   int                                   rstart,rend; /* range of local rows */
 16:   esi::Operator<double,int>             *eop;
 17:   esi::MatrixData<int>                  *emat;
 18:   esi::MatrixRowReadAccess<double,int>  *rmat;
 19:   esi::MatrixRowWriteAccess<double,int> *wmat;
 20: } Mat_ESI;

 22: EXTERN int MatLoad_ESI(PetscViewer,MatType,Mat*);

 24: /*
 25:     Wraps a PETSc matrix to look like an ESI matrix and stashes the wrapper inside the
 26:   PETSc matrix. If PETSc matrix already had wrapper uses that instead.
 27: */
 30: int MatESIWrap(Mat xin,::esi::Operator<double,int> **v)
 31: {
 32:   esi::petsc::Matrix<double,int> *t;
 33:   int                            ierr;

 36:   if (!xin->esimat) {
 37:     t = new esi::petsc::Matrix<double,int>(xin);
 38:     t->getInterface("esi::Operator",xin->esimat);
 39:   }
 40:   *v = reinterpret_cast<esi::Operator<double,int>* >(xin->esimat);
 41:   return(0);
 42: }

 46: /*@C
 47:      MatESISetOperator - Takes a PETSc matrix sets it to type ESI and 
 48:        provides the ESI operator that it wraps to look like a PETSc matrix.

 50: @*/
 51:  int MatESISetOperator(Mat xin,esi::Operator<double,int> *v)
 52: {
 53:   Mat_ESI    *x = (Mat_ESI*)xin->data;
 54:   PetscTruth tesi;
 55:   int        ierr;


 59:   v->getInterface("esi::MatrixData",reinterpret_cast<void*&>(x->emat));
 60:   v->getInterface("esi::MatrixRowReadAccess",reinterpret_cast<void*&>(x->rmat));
 61:   v->getInterface("esi::MatrixRowWriteAccess",reinterpret_cast<void*&>(x->wmat));
 62:   if (!x->emat) SETERRQ(1,"PETSc currently requires esi::Operator to support esi::MatrixData interface");

 64:   PetscTypeCompare((PetscObject)xin,0,&tesi);
 65:   if (tesi) {
 66:     MatSetType(xin,MATESI);
 67:   }
 68:   PetscTypeCompare((PetscObject)xin,MATESI,&tesi);
 69:   if (tesi) {
 70:     int                    m,n,M,N;
 71:     esi::IndexSpace<int>   *rmap,*cmap;

 73:     x->emat->getIndexSpaces(rmap,cmap);

 75:     rmap->getGlobalSize(M);
 76:     if (xin->M == -1) xin->M = M;
 77:     else if (xin->M != M) SETERRQ2(1,"Global rows of Mat %d not equal size of esi::MatrixData %d",xin->M,M);

 79:     cmap->getGlobalSize(N);
 80:     if (xin->N == -1) xin->N = N;
 81:     else if (xin->N != N) SETERRQ2(1,"Global columns of Mat %d not equal size of esi::MatrixData %d",xin->N,N);

 83:     rmap->getLocalSize(m);
 84:     if (xin->m == -1) xin->m = m;
 85:     else if (xin->m != m) SETERRQ2(1,"Local rows of Mat %d not equal size of esi::MatrixData %d",xin->m,m);

 87:     cmap->getLocalSize(n);
 88:     if (xin->n == -1) xin->n = n;
 89:     else if (xin->n != n) SETERRQ2(1,"Local columns of Mat %d not equal size of esi::MatrixData %d",xin->n,n);

 91:     x->eop  = v;
 92:     v->addReference();
 93:     if (!xin->rmap){
 94:       PetscMapCreateMPI(xin->comm,m,M,&xin->rmap);
 95:     }
 96:     if (!xin->cmap){
 97:       PetscMapCreateMPI(xin->comm,n,N,&xin->cmap);
 98:     }
 99:     PetscMapGetLocalRange(xin->rmap,&x->rstart,&x->rend);
100:     MatStashCreate_Private(xin->comm,1,&xin->stash);
101:   }
102:   return(0);
103: }

105: extern PetscFList CCAList;

109: /*@
110:     MatESISetType - Given a PETSc matrix of type ESI loads the ESI constructor
111:           by name and wraps the ESI operator to look like a PETSc matrix.
112: @*/
113: int MatESISetType(Mat V,char *name)
114: {
115:   int                                  ierr;
116:   ::esi::Operator<double,int>          *ve;
117:   ::esi::Operator<double,int>::Factory *f;
118:   ::esi::Operator<double,int>::Factory *(*r)(void);
119:   ::esi::IndexSpace<int>               *rmap,*cmap;

122:   PetscFListFind(V->comm,CCAList,name,(void(**)(void))&r);
123:   if (!r) SETERRQ1(1,"Unable to load esi::OperatorFactory constructor %s",name);
124:   f    = (*r)();
125:   if (V->m == PETSC_DECIDE) {
126:     PetscSplitOwnership(V->comm,&V->m,&V->M);
127:   }
128:   ESICreateIndexSpace("MPI",&V->comm,V->m,rmap);
129:   if (V->n == PETSC_DECIDE) {
130:     PetscSplitOwnership(V->comm,&V->n,&V->N);
131:   }
132:   ESICreateIndexSpace("MPI",&V->comm,V->n,cmap);
133:   f->create(*rmap,*cmap,ve);
134:   rmap->deleteReference();
135:   cmap->deleteReference();
136:   delete f;
137:   MatESISetOperator(V,ve);
138:   ve->deleteReference();
139:   return(0);
140: }

144: int MatESISetFromOptions(Mat V)
145: {
146:   char       string[PETSC_MAX_PATH_LEN];
147:   PetscTruth flg;
148:   int        ierr;
149: 
151:   PetscTypeCompare((PetscObject)V,MATESI,&flg);
152:   if (flg) {
153:     PetscOptionsGetString(V->prefix,"-mat_esi_type",string,1024,&flg);
154:     if (flg) {
155:       MatESISetType(V,string);
156:     }
157:   }
158:   return(0);
159: }

161: /* ------------------------------------------------------------------------------------*/

165: int MatSetValues_ESI(Mat mat,int m,const int im[],int n,const int in[],const PetscScalar v[],InsertMode addv)
166: {
167:   Mat_ESI *iesi = (Mat_ESI*)mat->data;
168:   int      ierr,i,j,rstart = iesi->rstart,rend = iesi->rend;
169: 
171:   for (i=0; i<m; i++) {
172:     if (im[i] < 0) continue;
173: #if defined(PETSC_USE_BOPT_g)
174:     if (im[i] >= mat->M) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row too large");
175: #endif
176:     if (im[i] >= rstart && im[i] < rend) {
177:       for (j=0; j<n; j++) {
178:           iesi->wmat->copyIntoRow(im[i],(double *)&v[i+j*m],(int *)&in[j],1);
179:        }
180:     } else {
181:       MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);
182:     }
183:   }
184:   return(0);
185: }

189: int MatAssemblyBegin_ESI(Mat mat,MatAssemblyType mode)
190: {
191:   int         ierr,nstash,reallocs,*rowners;
192:   InsertMode  addv;


196:   /* make sure all processors are either in INSERTMODE or ADDMODE */
197:   MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,mat->comm);
198:   if (addv == (ADD_VALUES|INSERT_VALUES)) {
199:     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
200:   }
201:   mat->insertmode = addv; /* in case this processor had no cache */

203:   PetscMapGetGlobalRange(mat->rmap,&rowners);
204:   MatStashScatterBegin_Private(&mat->stash,rowners);
205:   MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);
206:   PetscLogInfo(0,"MatAssemblyBegin_ESI:Stash has %d entries, uses %d mallocs.\n",nstash,reallocs);
207:   return(0);
208: }


213: int MatAssemblyEnd_ESI(Mat mat,MatAssemblyType mode)
214: {
215:   Mat_ESI     *a = (Mat_ESI*)mat->data;
216:   int         i,j,rstart,ncols,n,ierr,flg;
217:   int         *row,*col;
218:   PetscScalar *val;
219:   InsertMode  addv = mat->insertmode;

222:   while (1) {
223:     MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);
224:     if (!flg) break;
225:      for (i=0; i<n;) {
226:       /* Now identify the consecutive vals belonging to the same row */
227:       for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; }
228:       if (j < n) ncols = j-i;
229:       else       ncols = n-i;
230:       /* Now assemble all these values with a single function call */
231:       MatSetValues_ESI(mat,1,row+i,ncols,col+i,val+i,addv);
232:       i = j;
233:     }
234:   }
235:   MatStashScatterEnd_Private(&mat->stash);
236:   a->wmat->loadComplete();
237:   return(0);
238: }

242: int MatMult_ESI(Mat A,Vec xx,Vec yy)
243: {
244:   Mat_ESI                 *a = (Mat_ESI*)A->data;
245:   int                     ierr;
246:   esi::Vector<double,int> *x,*y;

249:   VecESIWrap(xx,&x);
250:   VecESIWrap(yy,&y);
251:   a->eop->apply(*x,*y);
252:   return(0);
253: }

257: int MatDestroy_ESI(Mat v)
258: {
259:   Mat_ESI *vs = (Mat_ESI*)v->data;
260:   int     ierr;

263:   if (vs->eop) {
264:     vs->eop->deleteReference();
265:   }
266:   MatStashDestroy_Private(&v->bstash);
267:   MatStashDestroy_Private(&v->stash);
268:   PetscFree(vs);
269:   return(0);
270: }

274: int MatView_ESI(Mat A,PetscViewer viewer)
275: {
276:   Mat_ESI              *a = (Mat_ESI*)A->data;
277:   int                  ierr,i,rstart,m,*cols,nz,j;
278:   PetscTruth           issocket,isascii,isbinary,isdraw;
279:   esi::IndexSpace<int> *rmap,*cmap;
280:   PetscScalar          *values;

283:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);
284:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
285:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
286:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
287:   if (isascii) {
288:     PetscViewerASCIIUseTabs(viewer,PETSC_NO);
289:     cols   = new int[100];
290:     values = new PetscScalar[100];
291:     a->emat->getIndexSpaces(rmap,cmap);
292:     rmap->getLocalPartitionOffset(rstart);
293:     rmap->getLocalSize(m);
294:     for (i=rstart; i<rstart+m; i++) {
295:       PetscViewerASCIIPrintf(viewer,"row %d:",i);
296:       a->rmat->copyOutRow(i,values,cols,100,nz);
297:       for (j=0; j<nz; j++) {
298:         PetscViewerASCIIPrintf(viewer," %d %g ",cols[j],values[j]);
299:       }
300:       PetscViewerASCIIPrintf(viewer,"\n");
301:     }
302:     PetscViewerASCIIUseTabs(viewer,PETSC_YES);
303:     PetscViewerFlush(viewer);
304:   } else {
305:     SETERRQ1(1,"Viewer type %s not supported by SeqAIJ matrices",((PetscObject)viewer)->type_name);
306:   }
307:   return(0);
308: }


311: /* -------------------------------------------------------------------*/
312: static struct _MatOps MatOps_Values = {
313:        MatSetValues_ESI,
314:        0,
315:        0,
316:        MatMult_ESI,
317: /* 4*/ 0,
318:        0,
319:        0,
320:        0,
321:        0,
322:        0,
323: /*10*/ 0,
324:        0,
325:        0,
326:        0,
327:        0,
328: /*15*/ 0,
329:        0,
330:        0,
331:        0,
332:        0,
333: /*20*/ MatAssemblyBegin_ESI,
334:        MatAssemblyEnd_ESI,
335:        0,
336:        0,
337:        0,
338: /*25*/ 0,
339:        0,
340:        0,
341:        0,
342:        0,
343: /*30*/ 0,
344:        0,
345:        0,
346:        0,
347:        0,
348: /*35*/ 0,
349:        0,
350:        0,
351:        0,
352:        0,
353: /*40*/ 0,
354:        0,
355:        0,
356:        0,
357:        0,
358: /*45*/ 0,
359:        0,
360:        0,
361:        0,
362:        0,
363: /*50*/ 0,
364:        0,
365:        0,
366:        0,
367:        0,
368: /*55*/ 0,
369:        0,
370:        0,
371:        0,
372:        0,
373: /*60*/ 0,
374:        MatDestroy_ESI,
375:        MatView_ESI,
376:        0,
377:        0,
378: /*65*/ 0,
379:        0,
380:        0,
381:        0,
382:        0,
383: /*70*/ 0,
384:        0,
385:        0,
386:        0,
387:        0,
388: /*75*/ 0,
389:        0,
390:        0,
391:        0,
392:        0,
393: /*80*/ 0,
394:        0,
395:        0,
396:        0,
397:        0,
398: /*85*/ MatLoad_ESI
399: };

401: /*MC
402:   MATESI - MATESI = "esi" - A matrix type for use with the Equation Solver Interface (ESI).

404:   More information about the Equation Solver Interface (ESI) can be found at:
405:   http://z.ca.sandia.gov/esi/

407:   Level: advanced

409: .seealso: MATPETSCESI
410: M*/

412: EXTERN_C_BEGIN
415: int MatCreate_ESI(Mat B)
416: {
417:   int        ierr;
418:   Mat_ESI    *b;


422:   PetscNew(Mat_ESI,&b);
423:   B->data             = (void*)b;
424:   PetscMemzero(b,sizeof(Mat_ESI));
425:   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
426:   B->factor           = 0;
427:   B->lupivotthreshold = 1.0;
428:   B->mapping          = 0;
429:   PetscOptionsGetReal(PETSC_NULL,"-mat_lu_pivotthreshold",&B->lupivotthreshold,PETSC_NULL);

431:   b->emat = 0;
432:   return(0);
433: }
434: EXTERN_C_END

438: int MatLoad_ESI(PetscViewer viewer,MatType type,Mat *newmat)
439: {
440:   Mat          A;
441:   PetscScalar  *vals,*svals;
442:   MPI_Comm     comm = ((PetscObject)viewer)->comm;
443:   MPI_Status   status;
444:   int          i,nz,ierr,j,rstart,rend,fd;
445:   int          header[4],rank,size,*rowlengths = 0,M,N,m,*rowners,maxnz,*cols;
446:   int          *ourlens,*sndcounts = 0,*procsnz = 0,*offlens,jj,*mycols,*smycols;
447:   int          tag = ((PetscObject)viewer)->tag,cend,cstart,n;

450:   MPI_Comm_size(comm,&size);
451:   MPI_Comm_rank(comm,&rank);
452:   if (!rank) {
453:     PetscViewerBinaryGetDescriptor(viewer,&fd);
454:     PetscBinaryRead(fd,(char *)header,4,PETSC_INT);
455:     if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
456:     if (header[3] < 0) {
457:       SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Matrix in special format on disk, cannot load as MPIAIJ");
458:     }
459:   }

461:   MPI_Bcast(header+1,3,MPI_INT,0,comm);
462:   M = header[1]; N = header[2];
463:   /* determine ownership of all rows */
464:   m = M/size + ((M % size) > rank);
465:   PetscMalloc((size+2)*sizeof(int),&rowners);
466:   MPI_Allgather(&m,1,MPI_INT,rowners+1,1,MPI_INT,comm);
467:   rowners[0] = 0;
468:   for (i=2; i<=size; i++) {
469:     rowners[i] += rowners[i-1];
470:   }
471:   rstart = rowners[rank];
472:   rend   = rowners[rank+1];

474:   /* distribute row lengths to all processors */
475:   PetscMalloc(2*(rend-rstart+1)*sizeof(int),&ourlens);
476:   offlens = ourlens + (rend-rstart);
477:   if (!rank) {
478:     PetscMalloc(M*sizeof(int),&rowlengths);
479:     PetscBinaryRead(fd,rowlengths,M,PETSC_INT);
480:     PetscMalloc(size*sizeof(int),&sndcounts);
481:     for (i=0; i<size; i++) sndcounts[i] = rowners[i+1] - rowners[i];
482:     MPI_Scatterv(rowlengths,sndcounts,rowners,MPI_INT,ourlens,rend-rstart,MPI_INT,0,comm);
483:     PetscFree(sndcounts);
484:   } else {
485:     MPI_Scatterv(0,0,0,MPI_INT,ourlens,rend-rstart,MPI_INT,0,comm);
486:   }

488:   if (!rank) {
489:     /* calculate the number of nonzeros on each processor */
490:     PetscMalloc(size*sizeof(int),&procsnz);
491:     PetscMemzero(procsnz,size*sizeof(int));
492:     for (i=0; i<size; i++) {
493:       for (j=rowners[i]; j< rowners[i+1]; j++) {
494:         procsnz[i] += rowlengths[j];
495:       }
496:     }
497:     PetscFree(rowlengths);

499:     /* determine max buffer needed and allocate it */
500:     maxnz = 0;
501:     for (i=0; i<size; i++) {
502:       maxnz = PetscMax(maxnz,procsnz[i]);
503:     }
504:     PetscMalloc(maxnz*sizeof(int),&cols);

506:     /* read in my part of the matrix column indices  */
507:     nz   = procsnz[0];
508:     PetscMalloc(nz*sizeof(int),&mycols);
509:     PetscBinaryRead(fd,mycols,nz,PETSC_INT);

511:     /* read in every one elses and ship off */
512:     for (i=1; i<size; i++) {
513:       nz   = procsnz[i];
514:       PetscBinaryRead(fd,cols,nz,PETSC_INT);
515:       MPI_Send(cols,nz,MPI_INT,i,tag,comm);
516:     }
517:     PetscFree(cols);
518:   } else {
519:     /* determine buffer space needed for message */
520:     nz = 0;
521:     for (i=0; i<m; i++) {
522:       nz += ourlens[i];
523:     }
524:     PetscMalloc((nz+1)*sizeof(int),&mycols);

526:     /* receive message of column indices*/
527:     MPI_Recv(mycols,nz,MPI_INT,0,tag,comm,&status);
528:     MPI_Get_count(&status,MPI_INT,&maxnz);
529:     if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
530:   }

532:   /* determine column ownership if matrix is not square */
533:   if (N != M) {
534:     n      = N/size + ((N % size) > rank);
535:     MPI_Scan(&n,&cend,1,MPI_INT,MPI_SUM,comm);
536:     cstart = cend - n;
537:   } else {
538:     cstart = rstart;
539:     cend   = rend;
540:     n      = cend - cstart;
541:   }

543:   /* loop over local rows, determining number of off diagonal entries */
544:   PetscMemzero(offlens,m*sizeof(int));
545:   jj = 0;
546:   for (i=0; i<m; i++) {
547:     for (j=0; j<ourlens[i]; j++) {
548:       if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++;
549:       jj++;
550:     }
551:   }

553:   /* create our matrix */
554:   for (i=0; i<m; i++) {
555:     ourlens[i] -= offlens[i];
556:   }
557:   MatCreate(comm,m,n,M,N,newmat);
558:   MatSetType(*newmat,type);
559:   MatSetFromOptions(*newmat);
560:   A = *newmat;
561:   MatSetOption(A,MAT_COLUMNS_SORTED);
562:   for (i=0; i<m; i++) {
563:     ourlens[i] += offlens[i];
564:   }

566:   if (!rank) {
567:     PetscMalloc(maxnz*sizeof(PetscScalar),&vals);

569:     /* read in my part of the matrix numerical values  */
570:     nz   = procsnz[0];
571:     PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);
572: 
573:     /* insert into matrix */
574:     jj      = rstart;
575:     smycols = mycols;
576:     svals   = vals;
577:     for (i=0; i<m; i++) {
578:       MatSetValues(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);
579:       smycols += ourlens[i];
580:       svals   += ourlens[i];
581:       jj++;
582:     }

584:     /* read in other processors and ship out */
585:     for (i=1; i<size; i++) {
586:       nz   = procsnz[i];
587:       PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);
588:       MPI_Send(vals,nz,MPIU_SCALAR,i,A->tag,comm);
589:     }
590:     PetscFree(procsnz);
591:   } else {
592:     /* receive numeric values */
593:     PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);

595:     /* receive message of values*/
596:     MPI_Recv(vals,nz,MPIU_SCALAR,0,A->tag,comm,&status);
597:     MPI_Get_count(&status,MPIU_SCALAR,&maxnz);
598:     if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");

600:     /* insert into matrix */
601:     jj      = rstart;
602:     smycols = mycols;
603:     svals   = vals;
604:     for (i=0; i<m; i++) {
605:       MatSetValues(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);
606:       smycols += ourlens[i];
607:       svals   += ourlens[i];
608:       jj++;
609:     }
610:   }
611:   PetscFree(ourlens);
612:   PetscFree(vals);
613:   PetscFree(mycols);
614:   PetscFree(rowners);

616:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
617:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
618:   return(0);
619: }

621: /*MC
622:   MATPETSCESI - MATPETSCESI = "petscesi" - A matrix type which wraps a PETSc matrix as an ESI matrix.

624:   More information about the Equation Solver Interface (ESI) can be found at:
625:   http://z.ca.sandia.gov/esi/

627:   Level: advanced

629: .seealso: MATESI
630: M*/

632: EXTERN_C_BEGIN
635: int MatCreate_PetscESI(Mat V)
636: {
637:   int                            ierr;
638:   Mat                            v;
639:   esi::petsc::Matrix<double,int> *ve;

642:   V->ops->destroy = 0;  /* since this is called from MatSetType() we have to make sure it doesn't get destroyed twice */
643:   MatSetType(V,MATESI);
644:   MatCreate(V->comm,V->m,V->n,V->M,V->N,&v);
645:   PetscObjectSetOptionsPrefix((PetscObject)v,"esi_");
646:   MatSetFromOptions(v);
647:   ve   = new esi::petsc::Matrix<double,int>(v);
648:   MatESISetOperator(V,ve);
649:   ve->deleteReference();
650:   PetscObjectDereference((PetscObject)v);
651:   return(0);
652: }
653: EXTERN_C_END