Actual source code: bdiag.c
1: #define PETSCMAT_DLL
3: /* Block diagonal matrix format */
5: #include src/mat/impls/bdiag/seq/bdiag.h
6: #include src/inline/ilu.h
10: PetscErrorCode MatDestroy_SeqBDiag(Mat A)
11: {
12: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
14: PetscInt i,bs = A->rmap.bs;
17: #if defined(PETSC_USE_LOG)
18: PetscLogObjectState((PetscObject)A,"Rows=%D, Cols=%D, NZ=%D, BSize=%D, NDiag=%D",A->rmap.N,A->cmap.n,a->nz,A->rmap.bs,a->nd);
19: #endif
20: if (!a->user_alloc) { /* Free the actual diagonals */
21: for (i=0; i<a->nd; i++) {
22: if (a->diag[i] > 0) {
23: PetscScalar *dummy = a->diagv[i] + bs*bs*a->diag[i];
24: PetscFree(dummy);
25: } else {
26: PetscFree(a->diagv[i]);
27: }
28: }
29: }
30: PetscFree(a->pivot);
31: PetscFree(a->diagv);
32: PetscFree(a->diag);
33: PetscFree(a->colloc);
34: PetscFree(a->dvalue);
35: PetscFree(a->solvework);
36: PetscFree(a);
38: PetscObjectChangeTypeName((PetscObject)A,0);
39: PetscObjectComposeFunction((PetscObject)A,"MatSeqBDiagSetPreallocation_C","",PETSC_NULL);
40: return(0);
41: }
45: PetscErrorCode MatAssemblyEnd_SeqBDiag(Mat A,MatAssemblyType mode)
46: {
47: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
48: PetscInt i,k,temp,*diag = a->diag,*bdlen = a->bdlen;
49: PetscScalar *dtemp,**dv = a->diagv;
53: if (mode == MAT_FLUSH_ASSEMBLY) return(0);
55: /* Sort diagonals */
56: for (i=0; i<a->nd; i++) {
57: for (k=i+1; k<a->nd; k++) {
58: if (diag[i] < diag[k]) {
59: temp = diag[i];
60: diag[i] = diag[k];
61: diag[k] = temp;
62: temp = bdlen[i];
63: bdlen[i] = bdlen[k];
64: bdlen[k] = temp;
65: dtemp = dv[i];
66: dv[i] = dv[k];
67: dv[k] = dtemp;
68: }
69: }
70: }
72: /* Set location of main diagonal */
73: for (i=0; i<a->nd; i++) {
74: if (!a->diag[i]) {a->mainbd = i; break;}
75: }
76: PetscInfo3(A,"Number diagonals %D,memory used %D, block size %D\n",a->nd,a->maxnz,A->rmap.bs);
77: return(0);
78: }
82: PetscErrorCode MatSetOption_SeqBDiag(Mat A,MatOption op,PetscTruth flg)
83: {
84: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
88: switch (op) {
89: case MAT_NEW_NONZERO_LOCATIONS:
90: a->nonew = (flg ? 0 : 1);
91: break;
92: case MAT_NEW_DIAGONALS:
93: a->nonew_diag = (flg ? 1 : 0);
94: break;
95: case MAT_ROW_ORIENTED:
96: a->roworiented = flg;
97: break;
98: case MAT_IGNORE_OFF_PROC_ENTRIES:
99: case MAT_NEW_NONZERO_LOCATION_ERR:
100: case MAT_NEW_NONZERO_ALLOCATION_ERR:
101: case MAT_USE_HASH_TABLE:
102: case MAT_SYMMETRIC:
103: case MAT_STRUCTURALLY_SYMMETRIC:
104: case MAT_HERMITIAN:
105: case MAT_SYMMETRY_ETERNAL:
106: PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);
107: break;
108: default:
109: SETERRQ1(PETSC_ERR_SUP,"unknown option %d",op);
110: }
111: return(0);
112: }
116: static PetscErrorCode MatGetDiagonal_SeqBDiag_N(Mat A,Vec v)
117: {
118: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
120: PetscInt i,j,n,len,ibase,bs = A->rmap.bs,iloc;
121: PetscScalar *x,*dd,zero = 0.0;
124: if (A->factor) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
125: VecSet(v,zero);
126: VecGetLocalSize(v,&n);
127: if (n != A->rmap.N) SETERRQ(PETSC_ERR_ARG_SIZ,"Nonconforming mat and vec");
128: if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal not set");
129: len = PetscMin(a->mblock,a->nblock);
130: dd = a->diagv[a->mainbd];
131: VecGetArray(v,&x);
132: for (i=0; i<len; i++) {
133: ibase = i*bs*bs; iloc = i*bs;
134: for (j=0; j<bs; j++) x[j + iloc] = dd[ibase + j*(bs+1)];
135: }
136: VecRestoreArray(v,&x);
137: return(0);
138: }
142: static PetscErrorCode MatGetDiagonal_SeqBDiag_1(Mat A,Vec v)
143: {
144: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
146: PetscInt i,n,len;
147: PetscScalar *x,*dd,zero = 0.0;
150: VecSet(v,zero);
151: VecGetLocalSize(v,&n);
152: if (n != A->rmap.N) SETERRQ(PETSC_ERR_ARG_SIZ,"Nonconforming mat and vec");
153: if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal not set");
154: dd = a->diagv[a->mainbd];
155: len = PetscMin(A->rmap.n,A->cmap.n);
156: VecGetArray(v,&x);
157: for (i=0; i<len; i++) x[i] = dd[i];
158: VecRestoreArray(v,&x);
159: return(0);
160: }
164: PetscErrorCode MatZeroEntries_SeqBDiag(Mat A)
165: {
166: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
167: PetscInt d,i,len,bs = A->rmap.bs;
168: PetscScalar *dv;
171: for (d=0; d<a->nd; d++) {
172: dv = a->diagv[d];
173: if (a->diag[d] > 0) {
174: dv += bs*bs*a->diag[d];
175: }
176: len = a->bdlen[d]*bs*bs;
177: for (i=0; i<len; i++) dv[i] = 0.0;
178: }
179: return(0);
180: }
184: PetscErrorCode MatZeroRows_SeqBDiag(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag)
185: {
186: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
188: PetscInt i,m = A->rmap.N - 1,nz;
189: PetscScalar *dd;
190: PetscScalar *val;
193: for (i=0; i<N; i++) {
194: if (rows[i]<0 || rows[i]>m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"row out of range");
195: MatGetRow_SeqBDiag(A,rows[i],&nz,PETSC_NULL,&val);
196: PetscMemzero((void*)val,nz*sizeof(PetscScalar));
197: MatRestoreRow_SeqBDiag(A,rows[i],&nz,PETSC_NULL,&val);
198: }
199: if (diag != 0.0) {
200: if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal does not exist");
201: dd = a->diagv[a->mainbd];
202: for (i=0; i<N; i++) dd[rows[i]] = diag;
203: }
204: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
205: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
206: return(0);
207: }
211: PetscErrorCode MatGetSubMatrix_SeqBDiag(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *submat)
212: {
214: PetscInt nznew,*smap,i,j,oldcols = A->cmap.n;
215: PetscInt *irow,*icol,newr,newc,*cwork,nz,bs;
216: PetscInt *col;
217: PetscScalar *vwork;
218: PetscScalar *val;
219: Mat newmat;
222: if (scall == MAT_REUSE_MATRIX) { /* no support for reuse so simply destroy all */
223: MatDestroy(*submat);
224: }
226: ISGetIndices(isrow,&irow);
227: ISGetIndices(iscol,&icol);
228: ISGetLocalSize(isrow,&newr);
229: ISGetLocalSize(iscol,&newc);
231: PetscMalloc((oldcols+1)*sizeof(PetscInt),&smap);
232: PetscMalloc((newc+1)*sizeof(PetscInt),&cwork);
233: PetscMalloc((newc+1)*sizeof(PetscScalar),&vwork);
234: PetscMemzero((char*)smap,oldcols*sizeof(PetscInt));
235: for (i=0; i<newc; i++) smap[icol[i]] = i+1;
237: /* Determine diagonals; then create submatrix */
238: bs = A->rmap.bs; /* Default block size remains the same */
239: MatCreate(((PetscObject)A)->comm,&newmat);
240: MatSetSizes(newmat,newr,newc,newr,newc);
241: MatSetType(newmat,((PetscObject)A)->type_name);
242: MatSeqBDiagSetPreallocation(newmat,0,bs,PETSC_NULL,PETSC_NULL);
244: /* Fill new matrix */
245: for (i=0; i<newr; i++) {
246: MatGetRow_SeqBDiag(A,irow[i],&nz,&col,&val);
247: nznew = 0;
248: for (j=0; j<nz; j++) {
249: if (smap[col[j]]) {
250: cwork[nznew] = smap[col[j]] - 1;
251: vwork[nznew++] = val[j];
252: }
253: }
254: MatSetValues(newmat,1,&i,nznew,cwork,vwork,INSERT_VALUES);
255: MatRestoreRow_SeqBDiag(A,i,&nz,&col,&val);
256: }
257: MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);
258: MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);
260: /* Free work space */
261: PetscFree(smap);
262: PetscFree(cwork);
263: PetscFree(vwork);
264: ISRestoreIndices(isrow,&irow);
265: ISRestoreIndices(iscol,&icol);
266: *submat = newmat;
267: return(0);
268: }
272: PetscErrorCode MatGetSubMatrices_SeqBDiag(Mat A,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *B[])
273: {
275: PetscInt i;
278: if (scall == MAT_INITIAL_MATRIX) {
279: PetscMalloc((n+1)*sizeof(Mat),B);
280: }
282: for (i=0; i<n; i++) {
283: MatGetSubMatrix_SeqBDiag(A,irow[i],icol[i],scall,&(*B)[i]);
284: }
285: return(0);
286: }
290: PetscErrorCode MatScale_SeqBDiag(Mat inA,PetscScalar alpha)
291: {
292: Mat_SeqBDiag *a = (Mat_SeqBDiag*)inA->data;
293: PetscInt i,bs = inA->rmap.bs;
294: PetscScalar oalpha = alpha;
295: PetscBLASInt one = 1,len;
299: for (i=0; i<a->nd; i++) {
300: len = (PetscBLASInt)bs*bs*a->bdlen[i];
301: if (a->diag[i] > 0) {
302: BLASscal_(&len,&oalpha,a->diagv[i] + bs*bs*a->diag[i],&one);
303: } else {
304: BLASscal_(&len,&oalpha,a->diagv[i],&one);
305: }
306: }
307: PetscLogFlops(a->nz);
308: return(0);
309: }
313: PetscErrorCode MatDiagonalScale_SeqBDiag(Mat A,Vec ll,Vec rr)
314: {
315: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
316: PetscScalar *l,*r,*dv;
318: PetscInt d,j,len;
319: PetscInt nd = a->nd,bs = A->rmap.bs,diag,m,n;
322: if (ll) {
323: VecGetSize(ll,&m);
324: if (m != A->rmap.N) SETERRQ(PETSC_ERR_ARG_SIZ,"Left scaling vector wrong length");
325: if (bs == 1) {
326: VecGetArray(ll,&l);
327: for (d=0; d<nd; d++) {
328: dv = a->diagv[d];
329: diag = a->diag[d];
330: len = a->bdlen[d];
331: if (diag > 0) for (j=0; j<len; j++) dv[j+diag] *= l[j+diag];
332: else for (j=0; j<len; j++) dv[j] *= l[j];
333: }
334: VecRestoreArray(ll,&l);
335: PetscLogFlops(a->nz);
336: } else SETERRQ(PETSC_ERR_SUP,"Not yet done for bs>1");
337: }
338: if (rr) {
339: VecGetSize(rr,&n);
340: if (n != A->cmap.n) SETERRQ(PETSC_ERR_ARG_SIZ,"Right scaling vector wrong length");
341: if (bs == 1) {
342: VecGetArray(rr,&r);
343: for (d=0; d<nd; d++) {
344: dv = a->diagv[d];
345: diag = a->diag[d];
346: len = a->bdlen[d];
347: if (diag > 0) for (j=0; j<len; j++) dv[j+diag] *= r[j];
348: else for (j=0; j<len; j++) dv[j] *= r[j-diag];
349: }
350: VecRestoreArray(rr,&r);
351: PetscLogFlops(a->nz);
352: } else SETERRQ(PETSC_ERR_SUP,"Not yet done for bs>1");
353: }
354: return(0);
355: }
357: static PetscErrorCode MatDuplicate_SeqBDiag(Mat,MatDuplicateOption,Mat *);
361: PetscErrorCode MatSetUpPreallocation_SeqBDiag(Mat A)
362: {
366: MatSeqBDiagSetPreallocation(A,PETSC_DEFAULT,PETSC_DEFAULT,0,0);
367: return(0);
368: }
370: /* -------------------------------------------------------------------*/
371: static struct _MatOps MatOps_Values = {MatSetValues_SeqBDiag_N,
372: MatGetRow_SeqBDiag,
373: MatRestoreRow_SeqBDiag,
374: MatMult_SeqBDiag_N,
375: /* 4*/ MatMultAdd_SeqBDiag_N,
376: MatMultTranspose_SeqBDiag_N,
377: MatMultTransposeAdd_SeqBDiag_N,
378: MatSolve_SeqBDiag_N,
379: 0,
380: 0,
381: /*10*/ 0,
382: 0,
383: 0,
384: MatRelax_SeqBDiag_N,
385: MatTranspose_SeqBDiag,
386: /*15*/ MatGetInfo_SeqBDiag,
387: 0,
388: MatGetDiagonal_SeqBDiag_N,
389: MatDiagonalScale_SeqBDiag,
390: MatNorm_SeqBDiag,
391: /*20*/ 0,
392: MatAssemblyEnd_SeqBDiag,
393: 0,
394: MatSetOption_SeqBDiag,
395: MatZeroEntries_SeqBDiag,
396: /*25*/ MatZeroRows_SeqBDiag,
397: 0,
398: MatLUFactorNumeric_SeqBDiag_N,
399: 0,
400: 0,
401: /*30*/ MatSetUpPreallocation_SeqBDiag,
402: MatILUFactorSymbolic_SeqBDiag,
403: 0,
404: 0,
405: 0,
406: /*35*/ MatDuplicate_SeqBDiag,
407: 0,
408: 0,
409: MatILUFactor_SeqBDiag,
410: 0,
411: /*40*/ 0,
412: MatGetSubMatrices_SeqBDiag,
413: 0,
414: MatGetValues_SeqBDiag_N,
415: 0,
416: /*45*/ 0,
417: MatScale_SeqBDiag,
418: 0,
419: 0,
420: 0,
421: /*50*/ 0,
422: 0,
423: 0,
424: 0,
425: 0,
426: /*55*/ 0,
427: 0,
428: 0,
429: 0,
430: 0,
431: /*60*/ 0,
432: MatDestroy_SeqBDiag,
433: MatView_SeqBDiag,
434: 0,
435: 0,
436: /*65*/ 0,
437: 0,
438: 0,
439: 0,
440: 0,
441: /*70*/ 0,
442: 0,
443: 0,
444: 0,
445: 0,
446: /*75*/ 0,
447: 0,
448: 0,
449: 0,
450: 0,
451: /*80*/ 0,
452: 0,
453: 0,
454: 0,
455: MatLoad_SeqBDiag,
456: /*85*/ 0,
457: 0,
458: 0,
459: 0,
460: 0,
461: /*90*/ 0,
462: 0,
463: 0,
464: 0,
465: 0,
466: /*95*/ 0,
467: 0,
468: 0,
469: 0};
473: /*@C
474: MatSeqBDiagSetPreallocation - Sets the nonzero structure and (optionally) arrays.
476: Collective on MPI_Comm
478: Input Parameters:
479: + B - the matrix
480: . nd - number of block diagonals (optional)
481: . bs - each element of a diagonal is an bs x bs dense matrix
482: . diag - optional array of block diagonal numbers (length nd).
483: For a matrix element A[i,j], where i=row and j=column, the
484: diagonal number is
485: $ diag = i/bs - j/bs (integer division)
486: Set diag=PETSC_NULL on input for PETSc to dynamically allocate memory as
487: needed (expensive).
488: - diagv - pointer to actual diagonals (in same order as diag array),
489: if allocated by user. Otherwise, set diagv=PETSC_NULL on input for PETSc
490: to control memory allocation.
492: Options Database Keys:
493: . -mat_block_size <bs> - Sets blocksize
494: . -mat_bdiag_diags <s1,s2,s3,...> - Sets diagonal numbers
496: Notes:
497: See the users manual for further details regarding this storage format.
499: Fortran Note:
500: Fortran programmers cannot set diagv; this value is ignored.
502: Level: intermediate
504: .keywords: matrix, block, diagonal, sparse
506: .seealso: MatCreate(), MatCreateMPIBDiag(), MatSetValues()
507: @*/
508: PetscErrorCode MatSeqBDiagSetPreallocation(Mat B,PetscInt nd,PetscInt bs,const PetscInt diag[],PetscScalar *diagv[])
509: {
510: PetscErrorCode ierr,(*f)(Mat,PetscInt,PetscInt,const PetscInt[],PetscScalar*[]);
513: PetscObjectQueryFunction((PetscObject)B,"MatSeqBDiagSetPreallocation_C",(void (**)(void))&f);
514: if (f) {
515: (*f)(B,nd,bs,diag,diagv);
516: }
517: return(0);
518: }
523: PetscErrorCode MatSeqBDiagSetPreallocation_SeqBDiag(Mat B,PetscInt nd,PetscInt bs,PetscInt *diag,PetscScalar **diagv)
524: {
525: Mat_SeqBDiag *b;
527: PetscInt i,nda,sizetot, nd2 = 128,idiag[128];
528: PetscTruth flg1;
532: B->preallocated = PETSC_TRUE;
533: if (bs == PETSC_DEFAULT) bs = 1;
534: if (!bs) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Blocksize cannot be zero");
535: if (nd == PETSC_DEFAULT) nd = 0;
536: PetscOptionsGetInt(PETSC_NULL,"-mat_block_size",&bs,PETSC_NULL);
537: PetscOptionsGetIntArray(PETSC_NULL,"-mat_bdiag_diags",idiag,&nd2,&flg1);
538: if (flg1) {
539: diag = idiag;
540: nd = nd2;
541: }
543: B->rmap.bs = B->cmap.bs = bs;
544: PetscMapSetUp(&B->rmap);
545: PetscMapSetUp(&B->cmap);
547: if ((B->cmap.n%bs) || (B->rmap.N%bs)) SETERRQ(PETSC_ERR_ARG_SIZ,"Invalid block size");
548: if (!nd) nda = nd + 1;
549: else nda = nd;
550: b = (Mat_SeqBDiag*)B->data;
552: PetscOptionsHasName(PETSC_NULL,"-mat_no_unroll",&flg1);
553: if (!flg1) {
554: switch (bs) {
555: case 1:
556: B->ops->setvalues = MatSetValues_SeqBDiag_1;
557: B->ops->getvalues = MatGetValues_SeqBDiag_1;
558: B->ops->getdiagonal = MatGetDiagonal_SeqBDiag_1;
559: B->ops->mult = MatMult_SeqBDiag_1;
560: B->ops->multadd = MatMultAdd_SeqBDiag_1;
561: B->ops->multtranspose = MatMultTranspose_SeqBDiag_1;
562: B->ops->multtransposeadd= MatMultTransposeAdd_SeqBDiag_1;
563: B->ops->relax = MatRelax_SeqBDiag_1;
564: B->ops->solve = MatSolve_SeqBDiag_1;
565: B->ops->lufactornumeric = MatLUFactorNumeric_SeqBDiag_1;
566: break;
567: case 2:
568: B->ops->mult = MatMult_SeqBDiag_2;
569: B->ops->multadd = MatMultAdd_SeqBDiag_2;
570: B->ops->solve = MatSolve_SeqBDiag_2;
571: break;
572: case 3:
573: B->ops->mult = MatMult_SeqBDiag_3;
574: B->ops->multadd = MatMultAdd_SeqBDiag_3;
575: B->ops->solve = MatSolve_SeqBDiag_3;
576: break;
577: case 4:
578: B->ops->mult = MatMult_SeqBDiag_4;
579: B->ops->multadd = MatMultAdd_SeqBDiag_4;
580: B->ops->solve = MatSolve_SeqBDiag_4;
581: break;
582: case 5:
583: B->ops->mult = MatMult_SeqBDiag_5;
584: B->ops->multadd = MatMultAdd_SeqBDiag_5;
585: B->ops->solve = MatSolve_SeqBDiag_5;
586: break;
587: }
588: }
590: b->mblock = B->rmap.N/bs;
591: b->nblock = B->cmap.n/bs;
592: b->nd = nd;
593: B->rmap.bs = bs;
594: b->ndim = 0;
595: b->mainbd = -1;
596: b->pivot = 0;
598: PetscMalloc(2*nda*sizeof(PetscInt),&b->diag);
599: b->bdlen = b->diag + nda;
600: PetscMalloc((B->cmap.n+1)*sizeof(PetscInt),&b->colloc);
601: PetscMalloc(nda*sizeof(PetscScalar*),&b->diagv);
602: sizetot = 0;
604: if (diagv) { /* user allocated space */
605: b->user_alloc = PETSC_TRUE;
606: for (i=0; i<nd; i++) b->diagv[i] = diagv[i];
607: } else b->user_alloc = PETSC_FALSE;
609: for (i=0; i<nd; i++) {
610: b->diag[i] = diag[i];
611: if (diag[i] > 0) { /* lower triangular */
612: b->bdlen[i] = PetscMin(b->nblock,b->mblock - diag[i]);
613: } else { /* upper triangular */
614: b->bdlen[i] = PetscMin(b->mblock,b->nblock + diag[i]);
615: }
616: sizetot += b->bdlen[i];
617: }
618: sizetot *= bs*bs;
619: b->maxnz = sizetot;
620: PetscMalloc((B->cmap.n+1)*sizeof(PetscScalar),&b->dvalue);
621: PetscLogObjectMemory(B,(nda*(bs+2))*sizeof(PetscInt) + bs*nda*sizeof(PetscScalar)
622: + nda*sizeof(PetscScalar*) + sizeof(Mat_SeqBDiag)
623: + sizeof(struct _p_Mat) + sizetot*sizeof(PetscScalar));
625: if (!b->user_alloc) {
626: for (i=0; i<nd; i++) {
627: PetscMalloc(bs*bs*b->bdlen[i]*sizeof(PetscScalar),&b->diagv[i]);
628: PetscMemzero(b->diagv[i],bs*bs*b->bdlen[i]*sizeof(PetscScalar));
629: }
630: b->nonew = 0; b->nonew_diag = 0;
631: } else { /* diagonals are set on input; don't allow dynamic allocation */
632: b->nonew = 1; b->nonew_diag = 1;
633: }
635: /* adjust diagv so one may access rows with diagv[diag][row] for all rows */
636: for (i=0; i<nd; i++) {
637: if (diag[i] > 0) {
638: b->diagv[i] -= bs*bs*diag[i];
639: }
640: }
642: b->nz = b->maxnz; /* Currently not keeping track of exact count */
643: b->roworiented = PETSC_TRUE;
644: B->info.nz_unneeded = (double)b->maxnz;
645: return(0);
646: }
651: static PetscErrorCode MatDuplicate_SeqBDiag(Mat A,MatDuplicateOption cpvalues,Mat *matout)
652: {
653: Mat_SeqBDiag *newmat,*a = (Mat_SeqBDiag*)A->data;
655: PetscInt i,len,diag,bs = A->rmap.bs;
656: Mat mat;
659: MatCreate(((PetscObject)A)->comm,matout);
660: MatSetSizes(*matout,A->rmap.N,A->cmap.n,A->rmap.N,A->cmap.n);
661: MatSetType(*matout,((PetscObject)A)->type_name);
662: MatSeqBDiagSetPreallocation(*matout,a->nd,bs,a->diag,PETSC_NULL);
664: /* Copy contents of diagonals */
665: mat = *matout;
666: newmat = (Mat_SeqBDiag*)mat->data;
667: if (cpvalues == MAT_COPY_VALUES) {
668: for (i=0; i<a->nd; i++) {
669: len = a->bdlen[i] * bs * bs * sizeof(PetscScalar);
670: diag = a->diag[i];
671: if (diag > 0) {
672: PetscMemcpy(newmat->diagv[i]+bs*bs*diag,a->diagv[i]+bs*bs*diag,len);
673: } else {
674: PetscMemcpy(newmat->diagv[i],a->diagv[i],len);
675: }
676: }
677: }
678: MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
679: MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
680: return(0);
681: }
685: PetscErrorCode MatLoad_SeqBDiag(PetscViewer viewer, MatType type,Mat *A)
686: {
687: Mat B;
689: PetscMPIInt size;
690: int fd;
691: PetscInt *scols,i,nz,header[4],nd = 128;
692: PetscInt bs,*rowlengths = 0,M,N,*cols,extra_rows,*diag = 0;
693: PetscInt idiag[128];
694: PetscScalar *vals,*svals;
695: MPI_Comm comm;
696: PetscTruth flg;
697:
699: PetscObjectGetComm((PetscObject)viewer,&comm);
700: MPI_Comm_size(comm,&size);
701: if (size > 1) SETERRQ(PETSC_ERR_ARG_SIZ,"view must have one processor");
702: PetscViewerBinaryGetDescriptor(viewer,&fd);
703: PetscBinaryRead(fd,header,4,PETSC_INT);
704: if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Not matrix object");
705: M = header[1]; N = header[2]; nz = header[3];
706: if (M != N) SETERRQ(PETSC_ERR_SUP,"Can only load square matrices");
707: if (header[3] < 0) {
708: SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format, cannot load as SeqBDiag");
709: }
711: /*
712: This code adds extra rows to make sure the number of rows is
713: divisible by the blocksize
714: */
715: bs = 1;
716: PetscOptionsGetInt(PETSC_NULL,"-matload_block_size",&bs,PETSC_NULL);
717: extra_rows = bs - M + bs*(M/bs);
718: if (extra_rows == bs) extra_rows = 0;
719: if (extra_rows) {
720: PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");
721: }
723: /* read row lengths */
724: PetscMalloc((M+extra_rows)*sizeof(PetscInt),&rowlengths);
725: PetscBinaryRead(fd,rowlengths,M,PETSC_INT);
726: for (i=0; i<extra_rows; i++) rowlengths[M+i] = 1;
728: /* load information about diagonals */
729: PetscOptionsGetIntArray(PETSC_NULL,"-matload_bdiag_diags",idiag,&nd,&flg);
730: if (flg) {
731: diag = idiag;
732: }
734: /* create our matrix */
735: MatCreate(comm,A);
736: MatSetSizes(*A,M+extra_rows,M+extra_rows,M+extra_rows,M+extra_rows);
737: MatSetType(*A,type);
738: MatSeqBDiagSetPreallocation(*A,nd,bs,diag,PETSC_NULL);
739: B = *A;
741: /* read column indices and nonzeros */
742: PetscMalloc(nz*sizeof(PetscInt),&scols);
743: cols = scols;
744: PetscBinaryRead(fd,cols,nz,PETSC_INT);
745: PetscMalloc(nz*sizeof(PetscScalar),&svals);
746: vals = svals;
747: PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);
748: /* insert into matrix */
750: for (i=0; i<M; i++) {
751: MatSetValues(B,1,&i,rowlengths[i],scols,svals,INSERT_VALUES);
752: scols += rowlengths[i]; svals += rowlengths[i];
753: }
754: vals[0] = 1.0;
755: for (i=M; i<M+extra_rows; i++) {
756: MatSetValues(B,1,&i,1,&i,vals,INSERT_VALUES);
757: }
759: PetscFree(cols);
760: PetscFree(vals);
761: PetscFree(rowlengths);
763: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
764: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
765: return(0);
766: }
768: /*MC
769: MATSEQBDIAG - MATSEQBDIAG = "seqbdiag" - A matrix type to be used for sequential block diagonal matrices.
771: Options Database Keys:
772: . -mat_type seqbdiag - sets the matrix type to "seqbdiag" during a call to MatSetFromOptions()
774: Level: beginner
776: .seealso: MatCreateSeqBDiag
777: M*/
782: PetscErrorCode MatCreate_SeqBDiag(Mat B)
783: {
784: Mat_SeqBDiag *b;
786: PetscMPIInt size;
789: MPI_Comm_size(((PetscObject)B)->comm,&size);
790: if (size > 1) SETERRQ(PETSC_ERR_ARG_WRONG,"Comm must be of size 1");
793: PetscNewLog(B,Mat_SeqBDiag,&b);
794: B->data = (void*)b;
795: PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
796: B->factor = 0;
797: B->mapping = 0;
799: b->ndim = 0;
800: b->mainbd = -1;
801: b->pivot = 0;
803: b->roworiented = PETSC_TRUE;
804: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatSeqBDiagSetPreallocation_C",
805: "MatSeqBDiagSetPreallocation_SeqBDiag",
806: MatSeqBDiagSetPreallocation_SeqBDiag);
808: PetscObjectChangeTypeName((PetscObject)B,MATSEQBDIAG);
809: return(0);
810: }
815: /*@C
816: MatCreateSeqBDiag - Creates a sequential block diagonal matrix.
818: Collective on MPI_Comm
820: Input Parameters:
821: + comm - MPI communicator, set to PETSC_COMM_SELF
822: . m - number of rows
823: . n - number of columns
824: . nd - number of block diagonals (optional)
825: . bs - each element of a diagonal is an bs x bs dense matrix
826: . diag - optional array of block diagonal numbers (length nd).
827: For a matrix element A[i,j], where i=row and j=column, the
828: diagonal number is
829: $ diag = i/bs - j/bs (integer division)
830: Set diag=PETSC_NULL on input for PETSc to dynamically allocate memory as
831: needed (expensive).
832: - diagv - pointer to actual diagonals (in same order as diag array),
833: if allocated by user. Otherwise, set diagv=PETSC_NULL on input for PETSc
834: to control memory allocation.
836: Output Parameters:
837: . A - the matrix
839: Options Database Keys:
840: . -mat_block_size <bs> - Sets blocksize
841: . -mat_bdiag_diags <s1,s2,s3,...> - Sets diagonal numbers
843: Notes:
844: See the users manual for further details regarding this storage format.
846: Fortran Note:
847: Fortran programmers cannot set diagv; this value is ignored.
849: Level: intermediate
851: .keywords: matrix, block, diagonal, sparse
853: .seealso: MatCreate(), MatCreateMPIBDiag(), MatSetValues()
854: @*/
855: PetscErrorCode MatCreateSeqBDiag(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt nd,PetscInt bs,const PetscInt diag[],PetscScalar *diagv[],Mat *A)
856: {
860: MatCreate(comm,A);
861: MatSetSizes(*A,m,n,m,n);
862: MatSetType(*A,MATSEQBDIAG);
863: MatSeqBDiagSetPreallocation(*A,nd,bs,diag,diagv);
864: return(0);
865: }