Actual source code: mesi.c
2: /*$Id: mesi.c,v 1.1 2001/09/12 03:30:08 bsmith Exp bsmith $*/
3: /*
4: Defines the basic matrix operations for the AIJ (compressed row)
5: matrix storage format.
6: */
8: #include src/mat/matimpl.h
9: #include petscsys.h
10: #include esi/petsc/vector.h
11: #include esi/petsc/matrix.h
13: typedef struct {
14: int rstart,rend; /* range of local rows */
15: esi::Operator<double,int> *eop;
16: esi::MatrixData<int> *emat;
17: esi::MatrixRowReadAccess<double,int> *rmat;
18: esi::MatrixRowWriteAccess<double,int> *wmat;
19: } Mat_ESI;
21: /*@C
22: MatESISetOperator - Takes a PETSc matrix sets it to type ESI and
23: provides the ESI operator that it wraps to look like a PETSc matrix.
25: @*/
26: int MatESISetOperator(Mat xin,esi::Operator<double,int> *v)
27: {
28: Mat_ESI *x = (Mat_ESI*)xin->data;
29: PetscTruth tesi;
30: int ierr;
34: v->getInterface("esi::MatrixData",reinterpret_cast<void*&>(x->emat));
35: v->getInterface("esi::MatrixRowReadAccess",reinterpret_cast<void*&>(x->rmat));
36: v->getInterface("esi::MatrixRowWriteAccess",reinterpret_cast<void*&>(x->wmat));
37: if (!x->emat) SETERRQ(1,"PETSc currently requires esi::Operator to support esi::MatrixData interface");
39: PetscTypeCompare((PetscObject)xin,0,&tesi);
40: if (tesi) {
41: MatSetType(xin,MATESI);
42: }
43: PetscTypeCompare((PetscObject)xin,MATESI,&tesi);
44: if (tesi) {
45: int m,n,M,N;
46: esi::IndexSpace<int> *rmap,*cmap;
48: x->emat->getIndexSpaces(rmap,cmap);
50: rmap->getGlobalSize(M);
51: if (xin->M == -1) xin->M = M;
52: else if (xin->M != M) SETERRQ2(1,"Global rows of Mat %d not equal size of esi::MatrixData %d",xin->M,M);
54: cmap->getGlobalSize(N);
55: if (xin->N == -1) xin->N = N;
56: else if (xin->N != N) SETERRQ2(1,"Global columns of Mat %d not equal size of esi::MatrixData %d",xin->N,N);
58: rmap->getLocalSize(m);
59: if (xin->m == -1) xin->m = m;
60: else if (xin->m != m) SETERRQ2(1,"Local rows of Mat %d not equal size of esi::MatrixData %d",xin->m,m);
62: cmap->getLocalSize(n);
63: if (xin->n == -1) xin->n = n;
64: else if (xin->n != n) SETERRQ2(1,"Local columns of Mat %d not equal size of esi::MatrixData %d",xin->n,n);
66: x->eop = v;
67: v->addReference();
68: if (!xin->rmap){
69: PetscMapCreateMPI(xin->comm,m,M,&xin->rmap);
70: }
71: if (!xin->cmap){
72: PetscMapCreateMPI(xin->comm,n,N,&xin->cmap);
73: }
74: PetscMapGetLocalRange(xin->rmap,&x->rstart,&x->rend);
75: MatStashCreate_Private(xin->comm,1,&xin->stash);
76: }
77: return(0);
78: }
80: extern PetscFList CCAList;
82: /*@
83: MatESISetType - Given a PETSc matrix of type ESI loads the ESI constructor
84: by name and wraps the ESI operator to look like a PETSc matrix.
85: @*/
86: int MatESISetType(Mat V,char *name)
87: {
88: int ierr;
89: ::esi::Operator<double,int> *ve;
90: ::esi::OperatorFactory<double,int> *f;
91: #if defined(PETSC_HAVE_CCA)
92: ::gov::cca::Component *(*r)(void);
93: #else
94: ::esi::OperatorFactory<double,int> *(*r)(void);
95: #endif
96: ::esi::IndexSpace<int> *rmap,*cmap;
99: PetscFListFind(V->comm,CCAList,name,(void(**)(void))&r);
100: if (!r) SETERRQ1(1,"Unable to load esi::OperatorFactory constructor %s",name);
101: #if defined(PETSC_HAVE_CCA)
102: gov::cca::Component *component = (*r)();
103: gov::cca::Port *port = dynamic_cast<gov::cca::Port*>(component);
104: f = dynamic_cast<esi::OperatorFactory<double,int>*>(port);
105: #else
106: f = (*r)();
107: #endif
108: if (V->m == PETSC_DECIDE) {
109: PetscSplitOwnership(V->comm,&V->m,&V->M);
110: }
111: ESICreateIndexSpace("MPI",&V->comm,V->m,rmap);
112: if (V->n == PETSC_DECIDE) {
113: PetscSplitOwnership(V->comm,&V->n,&V->N);
114: }
115: ESICreateIndexSpace("MPI",&V->comm,V->n,cmap);
116: f->getOperator(*rmap,*cmap,ve);
117: rmap->deleteReference();
118: cmap->deleteReference();
119: delete f;
120: MatESISetOperator(V,ve);
121: ve->deleteReference();
122: return(0);
123: }
125: int MatESISetFromOptions(Mat V)
126: {
127: char string[1024];
128: PetscTruth flg;
129: int ierr;
130:
132: PetscTypeCompare((PetscObject)V,MATESI,&flg);
133: if (flg) {
134: PetscOptionsGetString(V->prefix,"-mat_esi_type",string,1024,&flg);
135: if (flg) {
136: MatESISetType(V,string);
137: }
138: }
139: return(0);
140: }
142: /* ------------------------------------------------------------------------------------*/
144: int MatSetValues_ESI(Mat mat,int m,int *im,int n,int *in,PetscScalar *v,InsertMode addv)
145: {
146: Mat_ESI *iesi = (Mat_ESI*)mat->data;
147: int ierr,i,j,rstart = iesi->rstart,rend = iesi->rend;
148:
150: for (i=0; i<m; i++) {
151: if (im[i] < 0) continue;
152: #if defined(PETSC_USE_BOPT_g)
153: if (im[i] >= mat->M) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row too large");
154: #endif
155: if (im[i] >= rstart && im[i] < rend) {
156: for (j=0; j<n; j++) {
157: iesi->wmat->copyIntoRow(im[i],&v[i+j*m],&in[j],1);
158: }
159: } else {
160: MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);
161: }
162: }
163: return(0);
164: }
166: int MatAssemblyBegin_ESI(Mat mat,MatAssemblyType mode)
167: {
168: int ierr,nstash,reallocs,*rowners;
169: InsertMode addv;
173: /* make sure all processors are either in INSERTMODE or ADDMODE */
174: MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,mat->comm);
175: if (addv == (ADD_VALUES|INSERT_VALUES)) {
176: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
177: }
178: mat->insertmode = addv; /* in case this processor had no cache */
180: PetscMapGetGlobalRange(mat->rmap,&rowners);
181: MatStashScatterBegin_Private(&mat->stash,rowners);
182: MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);
183: PetscLogInfo(0,"MatAssemblyBegin_ESI:Stash has %d entries, uses %d mallocs.n",nstash,reallocs);
184: return(0);
185: }
188: int MatAssemblyEnd_ESI(Mat mat,MatAssemblyType mode)
189: {
190: Mat_ESI *a = (Mat_ESI*)mat->data;
191: int i,j,rstart,ncols,n,ierr,flg;
192: int *row,*col;
193: PetscScalar *val;
194: InsertMode addv = mat->insertmode;
197: while (1) {
198: MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);
199: if (!flg) break;
200: for (i=0; i<n;) {
201: /* Now identify the consecutive vals belonging to the same row */
202: for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; }
203: if (j < n) ncols = j-i;
204: else ncols = n-i;
205: /* Now assemble all these values with a single function call */
206: MatSetValues_ESI(mat,1,row+i,ncols,col+i,val+i,addv);
207: i = j;
208: }
209: }
210: MatStashScatterEnd_Private(&mat->stash);
211: a->wmat->loadComplete();
212: return(0);
213: }
215: int MatMult_ESI(Mat A,Vec xx,Vec yy)
216: {
217: Mat_ESI *a = (Mat_ESI*)A->data;
218: int ierr;
219: esi::Vector<double,int> *x,*y;
222: VecESIWrap(xx,&x);
223: VecESIWrap(yy,&y);
224: a->eop->apply(*x,*y);
225: return(0);
226: }
228: int MatDestroy_ESI(Mat v)
229: {
230: Mat_ESI *vs = (Mat_ESI*)v->data;
231: int ierr;
234: if (vs->eop) {
235: vs->eop->deleteReference();
236: }
237: MatStashDestroy_Private(&v->bstash);
238: MatStashDestroy_Private(&v->stash);
239: PetscFree(vs);
240: return(0);
241: }
243: int MatView_ESI(Mat A,PetscViewer viewer)
244: {
245: Mat_ESI *a = (Mat_ESI*)A->data;
246: int ierr,i,rstart,m,*cols,nz,j;
247: PetscTruth issocket,isascii,isbinary,isdraw;
248: esi::IndexSpace<int> *rmap,*cmap;
250: PetscScalar *values;
253: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);
254: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
255: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
256: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
257: if (isascii) {
258: ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);
259: cols = new int[100];
260: values = new PetscScalar[100];
261: ierr = a->emat->getIndexSpaces(rmap,cmap);
262: ierr = rmap->getLocalPartitionOffset(rstart);
263: ierr = rmap->getLocalSize(m);
264: for (i=rstart; i<rstart+m; i++) {
265: PetscViewerASCIIPrintf(viewer,"row %d:",i);
266: a->rmat->copyOutRow(i,values,cols,100,nz);
267: for (j=0; j<nz; j++) {
268: PetscViewerASCIIPrintf(viewer," %d %g ",cols[j],values[j]);
269: }
270: PetscViewerASCIIPrintf(viewer,"n");
271: }
272: PetscViewerASCIIUseTabs(viewer,PETSC_YES);
273: PetscViewerFlush(viewer);
274: } else {
275: SETERRQ1(1,"Viewer type %s not supported by SeqAIJ matrices",((PetscObject)viewer)->type_name);
276: }
277: return(0);
278: }
281: /* -------------------------------------------------------------------*/
282: static struct _MatOps MatOps_Values = {
283: MatSetValues_ESI,
284: 0,
285: 0,
286: MatMult_ESI,
287: 0,
288: 0,
289: 0,
290: 0,
291: 0,
292: 0,
293: 0,
294: 0,
295: 0,
296: 0,
297: 0,
298: 0,
299: 0,
300: 0,
301: 0,
302: 0,
303: MatAssemblyBegin_ESI,
304: MatAssemblyEnd_ESI,
305: 0,
306: 0,
307: 0,
308: 0,
309: 0,
310: 0,
311: 0,
312: 0,
313: 0,
314: 0,
315: 0,
316: 0,
317: 0,
318: 0,
319: 0,
320: 0,
321: 0,
322: 0,
323: 0,
324: 0,
325: 0,
326: 0,
327: 0,
328: 0,
329: 0,
330: 0,
331: 0,
332: 0,
333: 0,
334: 0,
335: 0,
336: 0,
337: 0,
338: 0,
339: 0,
340: 0,
341: 0,
342: 0,
343: 0,
344: MatDestroy_ESI,
345: MatView_ESI,
346: 0};
348: EXTERN_C_BEGIN
349: int MatCreate_ESI(Mat B)
350: {
351: int ierr;
352: Mat_ESI *b;
356: ierr = PetscNew(Mat_ESI,&b);
357: B->data = (void*)b;
358: ierr = PetscMemzero(b,sizeof(Mat_ESI));
359: ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
360: B->factor = 0;
361: B->lupivotthreshold = 1.0;
362: B->mapping = 0;
363: PetscOptionsGetReal(PETSC_NULL,"-mat_lu_pivotthreshold",&B->lupivotthreshold,PETSC_NULL);
365: b->emat = 0;
366: return(0);
367: }
368: EXTERN_C_END
370: EXTERN_C_BEGIN
371: int MatLoad_ESI(PetscViewer viewer,MatType type,Mat *newmat)
372: {
373: Mat A;
374: PetscScalar *vals,*svals;
375: MPI_Comm comm = ((PetscObject)viewer)->comm;
376: MPI_Status status;
377: int i,nz,ierr,j,rstart,rend,fd;
378: int header[4],rank,size,*rowlengths = 0,M,N,m,*rowners,maxnz,*cols;
379: int *ourlens,*sndcounts = 0,*procsnz = 0,*offlens,jj,*mycols,*smycols;
380: int tag = ((PetscObject)viewer)->tag,cend,cstart,n;
383: MPI_Comm_size(comm,&size);
384: MPI_Comm_rank(comm,&rank);
385: if (!rank) {
386: PetscViewerBinaryGetDescriptor(viewer,&fd);
387: PetscBinaryRead(fd,(char *)header,4,PETSC_INT);
388: if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
389: if (header[3] < 0) {
390: SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Matrix in special format on disk, cannot load as MPIAIJ");
391: }
392: }
394: MPI_Bcast(header+1,3,MPI_INT,0,comm);
395: M = header[1]; N = header[2];
396: /* determine ownership of all rows */
397: m = M/size + ((M % size) > rank);
398: PetscMalloc((size+2)*sizeof(int),&rowners);
399: MPI_Allgather(&m,1,MPI_INT,rowners+1,1,MPI_INT,comm);
400: rowners[0] = 0;
401: for (i=2; i<=size; i++) {
402: rowners[i] += rowners[i-1];
403: }
404: rstart = rowners[rank];
405: rend = rowners[rank+1];
407: /* distribute row lengths to all processors */
408: ierr = PetscMalloc(2*(rend-rstart+1)*sizeof(int),&ourlens);
409: offlens = ourlens + (rend-rstart);
410: if (!rank) {
411: PetscMalloc(M*sizeof(int),&rowlengths);
412: PetscBinaryRead(fd,rowlengths,M,PETSC_INT);
413: PetscMalloc(size*sizeof(int),&sndcounts);
414: for (i=0; i<size; i++) sndcounts[i] = rowners[i+1] - rowners[i];
415: MPI_Scatterv(rowlengths,sndcounts,rowners,MPI_INT,ourlens,rend-rstart,MPI_INT,0,comm);
416: PetscFree(sndcounts);
417: } else {
418: MPI_Scatterv(0,0,0,MPI_INT,ourlens,rend-rstart,MPI_INT,0,comm);
419: }
421: if (!rank) {
422: /* calculate the number of nonzeros on each processor */
423: PetscMalloc(size*sizeof(int),&procsnz);
424: PetscMemzero(procsnz,size*sizeof(int));
425: for (i=0; i<size; i++) {
426: for (j=rowners[i]; j< rowners[i+1]; j++) {
427: procsnz[i] += rowlengths[j];
428: }
429: }
430: PetscFree(rowlengths);
432: /* determine max buffer needed and allocate it */
433: maxnz = 0;
434: for (i=0; i<size; i++) {
435: maxnz = PetscMax(maxnz,procsnz[i]);
436: }
437: PetscMalloc(maxnz*sizeof(int),&cols);
439: /* read in my part of the matrix column indices */
440: nz = procsnz[0];
441: PetscMalloc(nz*sizeof(int),&mycols);
442: PetscBinaryRead(fd,mycols,nz,PETSC_INT);
444: /* read in every one elses and ship off */
445: for (i=1; i<size; i++) {
446: nz = procsnz[i];
447: PetscBinaryRead(fd,cols,nz,PETSC_INT);
448: MPI_Send(cols,nz,MPI_INT,i,tag,comm);
449: }
450: PetscFree(cols);
451: } else {
452: /* determine buffer space needed for message */
453: nz = 0;
454: for (i=0; i<m; i++) {
455: nz += ourlens[i];
456: }
457: PetscMalloc((nz+1)*sizeof(int),&mycols);
459: /* receive message of column indices*/
460: MPI_Recv(mycols,nz,MPI_INT,0,tag,comm,&status);
461: MPI_Get_count(&status,MPI_INT,&maxnz);
462: if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
463: }
465: /* determine column ownership if matrix is not square */
466: if (N != M) {
467: n = N/size + ((N % size) > rank);
468: ierr = MPI_Scan(&n,&cend,1,MPI_INT,MPI_SUM,comm);
469: cstart = cend - n;
470: } else {
471: cstart = rstart;
472: cend = rend;
473: n = cend - cstart;
474: }
476: /* loop over local rows, determining number of off diagonal entries */
477: PetscMemzero(offlens,m*sizeof(int));
478: jj = 0;
479: for (i=0; i<m; i++) {
480: for (j=0; j<ourlens[i]; j++) {
481: if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++;
482: jj++;
483: }
484: }
486: /* create our matrix */
487: for (i=0; i<m; i++) {
488: ourlens[i] -= offlens[i];
489: }
490: MatCreate(comm,m,n,M,N,newmat);
491: MatSetType(*newmat,MATESI);
492: MatSetFromOptions(*newmat);
493: A = *newmat;
494: MatSetOption(A,MAT_COLUMNS_SORTED);
495: for (i=0; i<m; i++) {
496: ourlens[i] += offlens[i];
497: }
499: if (!rank) {
500: PetscMalloc(maxnz*sizeof(PetscScalar),&vals);
502: /* read in my part of the matrix numerical values */
503: nz = procsnz[0];
504: PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);
505:
506: /* insert into matrix */
507: jj = rstart;
508: smycols = mycols;
509: svals = vals;
510: for (i=0; i<m; i++) {
511: MatSetValues(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);
512: smycols += ourlens[i];
513: svals += ourlens[i];
514: jj++;
515: }
517: /* read in other processors and ship out */
518: for (i=1; i<size; i++) {
519: nz = procsnz[i];
520: PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);
521: MPI_Send(vals,nz,MPIU_SCALAR,i,A->tag,comm);
522: }
523: PetscFree(procsnz);
524: } else {
525: /* receive numeric values */
526: PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);
528: /* receive message of values*/
529: MPI_Recv(vals,nz,MPIU_SCALAR,0,A->tag,comm,&status);
530: MPI_Get_count(&status,MPIU_SCALAR,&maxnz);
531: if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
533: /* insert into matrix */
534: jj = rstart;
535: smycols = mycols;
536: svals = vals;
537: for (i=0; i<m; i++) {
538: ierr = MatSetValues(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);
539: smycols += ourlens[i];
540: svals += ourlens[i];
541: jj++;
542: }
543: }
544: PetscFree(ourlens);
545: PetscFree(vals);
546: PetscFree(mycols);
547: PetscFree(rowners);
549: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
550: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
551: return(0);
552: }
553: EXTERN_C_END
556: EXTERN_C_BEGIN
557: int MatCreate_PetscESI(Mat V)
558: {
559: int ierr;
560: Mat v;
561: esi::petsc::Matrix<double,int> *ve;
564: V->ops->destroy = 0; /* since this is called from MatSetType() we have to make sure it doesn't get destroyed twice */
565: MatSetType(V,MATESI);
566: MatCreate(V->comm,V->m,V->n,V->M,V->N,&v);
567: MatSetType(v,MATMPIAIJ);
568: ve = new esi::petsc::Matrix<double,int>(v);
569: MatESISetOperator(V,ve);
570: ve->deleteReference();
571: PetscObjectDereference((PetscObject)v);
572: return(0);
573: }
574: EXTERN_C_END