Actual source code: mesi.c
3: /*$Id: mesi.c,v 1.1 2001/09/12 03:30:08 bsmith Exp bsmith $*/
4: /*
5: Defines the basic matrix operations for the AIJ (compressed row)
6: matrix storage format.
7: */
9: #include src/mat/matimpl.h
10: #include petscsys.h
11: #include esi/petsc/vector.h
12: #include esi/petsc/matrix.h
14: typedef struct {
15: int rstart,rend; /* range of local rows */
16: esi::Operator<double,int> *eop;
17: esi::MatrixData<int> *emat;
18: esi::MatrixRowReadAccess<double,int> *rmat;
19: esi::MatrixRowWriteAccess<double,int> *wmat;
20: } Mat_ESI;
22: /*
23: Wraps a PETSc matrix to look like an ESI matrix and stashes the wrapper inside the
24: PETSc matrix. If PETSc matrix already had wrapper uses that instead.
25: */
26: #undef __FUNCT__
28: int MatESIWrap(Mat xin,::esi::Operator<double,int> **v)
29: {
30: esi::petsc::Matrix<double,int> *t;
31: int ierr;
34: if (!xin->esimat) {
35: t = new esi::petsc::Matrix<double,int>(xin);
36: t->getInterface("esi::Operator",xin->esimat);
37: }
38: *v = reinterpret_cast<esi::Operator<double,int>* >(xin->esimat);
39: return(0);
40: }
42: #undef __FUNCT__
44: /*@C
45: MatESISetOperator - Takes a PETSc matrix sets it to type ESI and
46: provides the ESI operator that it wraps to look like a PETSc matrix.
48: @*/
49: int MatESISetOperator(Mat xin,esi::Operator<double,int> *v)
50: {
51: Mat_ESI *x = (Mat_ESI*)xin->data;
52: PetscTruth tesi;
53: int ierr;
57: v->getInterface("esi::MatrixData",reinterpret_cast<void*&>(x->emat));
58: v->getInterface("esi::MatrixRowReadAccess",reinterpret_cast<void*&>(x->rmat));
59: v->getInterface("esi::MatrixRowWriteAccess",reinterpret_cast<void*&>(x->wmat));
60: if (!x->emat) SETERRQ(1,"PETSc currently requires esi::Operator to support esi::MatrixData interface");
62: PetscTypeCompare((PetscObject)xin,0,&tesi);
63: if (tesi) {
64: MatSetType(xin,MATESI);
65: }
66: PetscTypeCompare((PetscObject)xin,MATESI,&tesi);
67: if (tesi) {
68: int m,n,M,N;
69: esi::IndexSpace<int> *rmap,*cmap;
71: x->emat->getIndexSpaces(rmap,cmap);
73: rmap->getGlobalSize(M);
74: if (xin->M == -1) xin->M = M;
75: else if (xin->M != M) SETERRQ2(1,"Global rows of Mat %d not equal size of esi::MatrixData %d",xin->M,M);
77: cmap->getGlobalSize(N);
78: if (xin->N == -1) xin->N = N;
79: else if (xin->N != N) SETERRQ2(1,"Global columns of Mat %d not equal size of esi::MatrixData %d",xin->N,N);
81: rmap->getLocalSize(m);
82: if (xin->m == -1) xin->m = m;
83: else if (xin->m != m) SETERRQ2(1,"Local rows of Mat %d not equal size of esi::MatrixData %d",xin->m,m);
85: cmap->getLocalSize(n);
86: if (xin->n == -1) xin->n = n;
87: else if (xin->n != n) SETERRQ2(1,"Local columns of Mat %d not equal size of esi::MatrixData %d",xin->n,n);
89: x->eop = v;
90: v->addReference();
91: if (!xin->rmap){
92: PetscMapCreateMPI(xin->comm,m,M,&xin->rmap);
93: }
94: if (!xin->cmap){
95: PetscMapCreateMPI(xin->comm,n,N,&xin->cmap);
96: }
97: PetscMapGetLocalRange(xin->rmap,&x->rstart,&x->rend);
98: MatStashCreate_Private(xin->comm,1,&xin->stash);
99: }
100: return(0);
101: }
103: extern PetscFList CCAList;
105: #undef __FUNCT__
107: /*@
108: MatESISetType - Given a PETSc matrix of type ESI loads the ESI constructor
109: by name and wraps the ESI operator to look like a PETSc matrix.
110: @*/
111: int MatESISetType(Mat V,char *name)
112: {
113: int ierr;
114: ::esi::Operator<double,int> *ve;
115: ::esi::Operator<double,int>::Factory *f;
116: ::esi::Operator<double,int>::Factory *(*r)(void);
117: ::esi::IndexSpace<int> *rmap,*cmap;
120: PetscFListFind(V->comm,CCAList,name,(void(**)(void))&r);
121: if (!r) SETERRQ1(1,"Unable to load esi::OperatorFactory constructor %s",name);
122: f = (*r)();
123: if (V->m == PETSC_DECIDE) {
124: PetscSplitOwnership(V->comm,&V->m,&V->M);
125: }
126: ESICreateIndexSpace("MPI",&V->comm,V->m,rmap);
127: if (V->n == PETSC_DECIDE) {
128: PetscSplitOwnership(V->comm,&V->n,&V->N);
129: }
130: ESICreateIndexSpace("MPI",&V->comm,V->n,cmap);
131: f->create(*rmap,*cmap,ve);
132: rmap->deleteReference();
133: cmap->deleteReference();
134: delete f;
135: MatESISetOperator(V,ve);
136: ve->deleteReference();
137: return(0);
138: }
140: #undef __FUNCT__
142: int MatESISetFromOptions(Mat V)
143: {
144: char string[PETSC_MAX_PATH_LEN];
145: PetscTruth flg;
146: int ierr;
147:
149: PetscTypeCompare((PetscObject)V,MATESI,&flg);
150: if (flg) {
151: PetscOptionsGetString(V->prefix,"-mat_esi_type",string,1024,&flg);
152: if (flg) {
153: MatESISetType(V,string);
154: }
155: }
156: return(0);
157: }
159: /* ------------------------------------------------------------------------------------*/
161: #undef __FUNCT__
163: int MatSetValues_ESI(Mat mat,int m,int *im,int n,int *in,PetscScalar *v,InsertMode addv)
164: {
165: Mat_ESI *iesi = (Mat_ESI*)mat->data;
166: int ierr,i,j,rstart = iesi->rstart,rend = iesi->rend;
167:
169: for (i=0; i<m; i++) {
170: if (im[i] < 0) continue;
171: #if defined(PETSC_USE_BOPT_g)
172: if (im[i] >= mat->M) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row too large");
173: #endif
174: if (im[i] >= rstart && im[i] < rend) {
175: for (j=0; j<n; j++) {
176: iesi->wmat->copyIntoRow(im[i],&v[i+j*m],&in[j],1);
177: }
178: } else {
179: MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);
180: }
181: }
182: return(0);
183: }
185: #undef __FUNCT__
187: int MatAssemblyBegin_ESI(Mat mat,MatAssemblyType mode)
188: {
189: int ierr,nstash,reallocs,*rowners;
190: InsertMode addv;
194: /* make sure all processors are either in INSERTMODE or ADDMODE */
195: MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,mat->comm);
196: if (addv == (ADD_VALUES|INSERT_VALUES)) {
197: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
198: }
199: mat->insertmode = addv; /* in case this processor had no cache */
201: PetscMapGetGlobalRange(mat->rmap,&rowners);
202: MatStashScatterBegin_Private(&mat->stash,rowners);
203: MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);
204: PetscLogInfo(0,"MatAssemblyBegin_ESI:Stash has %d entries, uses %d mallocs.n",nstash,reallocs);
205: return(0);
206: }
209: #undef __FUNCT__
211: int MatAssemblyEnd_ESI(Mat mat,MatAssemblyType mode)
212: {
213: Mat_ESI *a = (Mat_ESI*)mat->data;
214: int i,j,rstart,ncols,n,ierr,flg;
215: int *row,*col;
216: PetscScalar *val;
217: InsertMode addv = mat->insertmode;
220: while (1) {
221: MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);
222: if (!flg) break;
223: for (i=0; i<n;) {
224: /* Now identify the consecutive vals belonging to the same row */
225: for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; }
226: if (j < n) ncols = j-i;
227: else ncols = n-i;
228: /* Now assemble all these values with a single function call */
229: MatSetValues_ESI(mat,1,row+i,ncols,col+i,val+i,addv);
230: i = j;
231: }
232: }
233: MatStashScatterEnd_Private(&mat->stash);
234: a->wmat->loadComplete();
235: return(0);
236: }
238: #undef __FUNCT__
240: int MatMult_ESI(Mat A,Vec xx,Vec yy)
241: {
242: Mat_ESI *a = (Mat_ESI*)A->data;
243: int ierr;
244: esi::Vector<double,int> *x,*y;
247: VecESIWrap(xx,&x);
248: VecESIWrap(yy,&y);
249: a->eop->apply(*x,*y);
250: return(0);
251: }
253: #undef __FUNCT__
255: int MatDestroy_ESI(Mat v)
256: {
257: Mat_ESI *vs = (Mat_ESI*)v->data;
258: int ierr;
261: if (vs->eop) {
262: vs->eop->deleteReference();
263: }
264: MatStashDestroy_Private(&v->bstash);
265: MatStashDestroy_Private(&v->stash);
266: PetscFree(vs);
267: return(0);
268: }
270: #undef __FUNCT__
272: int MatView_ESI(Mat A,PetscViewer viewer)
273: {
274: Mat_ESI *a = (Mat_ESI*)A->data;
275: int ierr,i,rstart,m,*cols,nz,j;
276: PetscTruth issocket,isascii,isbinary,isdraw;
277: esi::IndexSpace<int> *rmap,*cmap;
278: PetscScalar *values;
281: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);
282: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
283: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
284: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
285: if (isascii) {
286: ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);
287: cols = new int[100];
288: values = new PetscScalar[100];
289: ierr = a->emat->getIndexSpaces(rmap,cmap);
290: ierr = rmap->getLocalPartitionOffset(rstart);
291: ierr = rmap->getLocalSize(m);
292: for (i=rstart; i<rstart+m; i++) {
293: PetscViewerASCIIPrintf(viewer,"row %d:",i);
294: a->rmat->copyOutRow(i,values,cols,100,nz);
295: for (j=0; j<nz; j++) {
296: PetscViewerASCIIPrintf(viewer," %d %g ",cols[j],values[j]);
297: }
298: PetscViewerASCIIPrintf(viewer,"n");
299: }
300: PetscViewerASCIIUseTabs(viewer,PETSC_YES);
301: PetscViewerFlush(viewer);
302: } else {
303: SETERRQ1(1,"Viewer type %s not supported by SeqAIJ matrices",((PetscObject)viewer)->type_name);
304: }
305: return(0);
306: }
309: /* -------------------------------------------------------------------*/
310: static struct _MatOps MatOps_Values = {
311: MatSetValues_ESI,
312: 0,
313: 0,
314: MatMult_ESI,
315: 0,
316: 0,
317: 0,
318: 0,
319: 0,
320: 0,
321: 0,
322: 0,
323: 0,
324: 0,
325: 0,
326: 0,
327: 0,
328: 0,
329: 0,
330: 0,
331: MatAssemblyBegin_ESI,
332: MatAssemblyEnd_ESI,
333: 0,
334: 0,
335: 0,
336: 0,
337: 0,
338: 0,
339: 0,
340: 0,
341: 0,
342: 0,
343: 0,
344: 0,
345: 0,
346: 0,
347: 0,
348: 0,
349: 0,
350: 0,
351: 0,
352: 0,
353: 0,
354: 0,
355: 0,
356: 0,
357: 0,
358: 0,
359: 0,
360: 0,
361: 0,
362: 0,
363: 0,
364: 0,
365: 0,
366: 0,
367: 0,
368: 0,
369: 0,
370: 0,
371: 0,
372: MatDestroy_ESI,
373: MatView_ESI,
374: 0};
376: EXTERN_C_BEGIN
377: #undef __FUNCT__
379: int MatCreate_ESI(Mat B)
380: {
381: int ierr;
382: Mat_ESI *b;
386: ierr = PetscNew(Mat_ESI,&b);
387: B->data = (void*)b;
388: ierr = PetscMemzero(b,sizeof(Mat_ESI));
389: ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
390: B->factor = 0;
391: B->lupivotthreshold = 1.0;
392: B->mapping = 0;
393: PetscOptionsGetReal(PETSC_NULL,"-mat_lu_pivotthreshold",&B->lupivotthreshold,PETSC_NULL);
395: b->emat = 0;
396: return(0);
397: }
398: EXTERN_C_END
400: EXTERN_C_BEGIN
401: #undef __FUNCT__
403: int MatLoad_ESI(PetscViewer viewer,MatType type,Mat *newmat)
404: {
405: Mat A;
406: PetscScalar *vals,*svals;
407: MPI_Comm comm = ((PetscObject)viewer)->comm;
408: MPI_Status status;
409: int i,nz,ierr,j,rstart,rend,fd;
410: int header[4],rank,size,*rowlengths = 0,M,N,m,*rowners,maxnz,*cols;
411: int *ourlens,*sndcounts = 0,*procsnz = 0,*offlens,jj,*mycols,*smycols;
412: int tag = ((PetscObject)viewer)->tag,cend,cstart,n;
415: MPI_Comm_size(comm,&size);
416: MPI_Comm_rank(comm,&rank);
417: if (!rank) {
418: PetscViewerBinaryGetDescriptor(viewer,&fd);
419: PetscBinaryRead(fd,(char *)header,4,PETSC_INT);
420: if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
421: if (header[3] < 0) {
422: SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Matrix in special format on disk, cannot load as MPIAIJ");
423: }
424: }
426: MPI_Bcast(header+1,3,MPI_INT,0,comm);
427: M = header[1]; N = header[2];
428: /* determine ownership of all rows */
429: m = M/size + ((M % size) > rank);
430: PetscMalloc((size+2)*sizeof(int),&rowners);
431: MPI_Allgather(&m,1,MPI_INT,rowners+1,1,MPI_INT,comm);
432: rowners[0] = 0;
433: for (i=2; i<=size; i++) {
434: rowners[i] += rowners[i-1];
435: }
436: rstart = rowners[rank];
437: rend = rowners[rank+1];
439: /* distribute row lengths to all processors */
440: ierr = PetscMalloc(2*(rend-rstart+1)*sizeof(int),&ourlens);
441: offlens = ourlens + (rend-rstart);
442: if (!rank) {
443: PetscMalloc(M*sizeof(int),&rowlengths);
444: PetscBinaryRead(fd,rowlengths,M,PETSC_INT);
445: PetscMalloc(size*sizeof(int),&sndcounts);
446: for (i=0; i<size; i++) sndcounts[i] = rowners[i+1] - rowners[i];
447: MPI_Scatterv(rowlengths,sndcounts,rowners,MPI_INT,ourlens,rend-rstart,MPI_INT,0,comm);
448: PetscFree(sndcounts);
449: } else {
450: MPI_Scatterv(0,0,0,MPI_INT,ourlens,rend-rstart,MPI_INT,0,comm);
451: }
453: if (!rank) {
454: /* calculate the number of nonzeros on each processor */
455: PetscMalloc(size*sizeof(int),&procsnz);
456: PetscMemzero(procsnz,size*sizeof(int));
457: for (i=0; i<size; i++) {
458: for (j=rowners[i]; j< rowners[i+1]; j++) {
459: procsnz[i] += rowlengths[j];
460: }
461: }
462: PetscFree(rowlengths);
464: /* determine max buffer needed and allocate it */
465: maxnz = 0;
466: for (i=0; i<size; i++) {
467: maxnz = PetscMax(maxnz,procsnz[i]);
468: }
469: PetscMalloc(maxnz*sizeof(int),&cols);
471: /* read in my part of the matrix column indices */
472: nz = procsnz[0];
473: PetscMalloc(nz*sizeof(int),&mycols);
474: PetscBinaryRead(fd,mycols,nz,PETSC_INT);
476: /* read in every one elses and ship off */
477: for (i=1; i<size; i++) {
478: nz = procsnz[i];
479: PetscBinaryRead(fd,cols,nz,PETSC_INT);
480: MPI_Send(cols,nz,MPI_INT,i,tag,comm);
481: }
482: PetscFree(cols);
483: } else {
484: /* determine buffer space needed for message */
485: nz = 0;
486: for (i=0; i<m; i++) {
487: nz += ourlens[i];
488: }
489: PetscMalloc((nz+1)*sizeof(int),&mycols);
491: /* receive message of column indices*/
492: MPI_Recv(mycols,nz,MPI_INT,0,tag,comm,&status);
493: MPI_Get_count(&status,MPI_INT,&maxnz);
494: if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
495: }
497: /* determine column ownership if matrix is not square */
498: if (N != M) {
499: n = N/size + ((N % size) > rank);
500: ierr = MPI_Scan(&n,&cend,1,MPI_INT,MPI_SUM,comm);
501: cstart = cend - n;
502: } else {
503: cstart = rstart;
504: cend = rend;
505: n = cend - cstart;
506: }
508: /* loop over local rows, determining number of off diagonal entries */
509: PetscMemzero(offlens,m*sizeof(int));
510: jj = 0;
511: for (i=0; i<m; i++) {
512: for (j=0; j<ourlens[i]; j++) {
513: if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++;
514: jj++;
515: }
516: }
518: /* create our matrix */
519: for (i=0; i<m; i++) {
520: ourlens[i] -= offlens[i];
521: }
522: MatCreate(comm,m,n,M,N,newmat);
523: MatSetType(*newmat,type);
524: MatSetFromOptions(*newmat);
525: A = *newmat;
526: MatSetOption(A,MAT_COLUMNS_SORTED);
527: for (i=0; i<m; i++) {
528: ourlens[i] += offlens[i];
529: }
531: if (!rank) {
532: PetscMalloc(maxnz*sizeof(PetscScalar),&vals);
534: /* read in my part of the matrix numerical values */
535: nz = procsnz[0];
536: PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);
537:
538: /* insert into matrix */
539: jj = rstart;
540: smycols = mycols;
541: svals = vals;
542: for (i=0; i<m; i++) {
543: MatSetValues(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);
544: smycols += ourlens[i];
545: svals += ourlens[i];
546: jj++;
547: }
549: /* read in other processors and ship out */
550: for (i=1; i<size; i++) {
551: nz = procsnz[i];
552: PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);
553: MPI_Send(vals,nz,MPIU_SCALAR,i,A->tag,comm);
554: }
555: PetscFree(procsnz);
556: } else {
557: /* receive numeric values */
558: PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);
560: /* receive message of values*/
561: MPI_Recv(vals,nz,MPIU_SCALAR,0,A->tag,comm,&status);
562: MPI_Get_count(&status,MPIU_SCALAR,&maxnz);
563: if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
565: /* insert into matrix */
566: jj = rstart;
567: smycols = mycols;
568: svals = vals;
569: for (i=0; i<m; i++) {
570: ierr = MatSetValues(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);
571: smycols += ourlens[i];
572: svals += ourlens[i];
573: jj++;
574: }
575: }
576: PetscFree(ourlens);
577: PetscFree(vals);
578: PetscFree(mycols);
579: PetscFree(rowners);
581: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
582: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
583: return(0);
584: }
585: EXTERN_C_END
588: EXTERN_C_BEGIN
589: #undef __FUNCT__
591: int MatCreate_PetscESI(Mat V)
592: {
593: int ierr;
594: Mat v;
595: esi::petsc::Matrix<double,int> *ve;
598: V->ops->destroy = 0; /* since this is called from MatSetType() we have to make sure it doesn't get destroyed twice */
599: MatSetType(V,MATESI);
600: MatCreate(V->comm,V->m,V->n,V->M,V->N,&v);
601: PetscObjectSetOptionsPrefix((PetscObject)v,"esi_");
602: MatSetFromOptions(v);
603: ve = new esi::petsc::Matrix<double,int>(v);
604: MatESISetOperator(V,ve);
605: ve->deleteReference();
606: PetscObjectDereference((PetscObject)v);
607: return(0);
608: }
609: EXTERN_C_END