Actual source code: bdiag.c

  1: /*$Id: bdiag.c,v 1.198 2001/08/07 03:02:53 balay Exp $*/

  3: /* Block diagonal matrix format */

 5:  #include src/mat/impls/bdiag/seq/bdiag.h
 6:  #include src/vec/vecimpl.h
 7:  #include src/inline/ilu.h

 11: int MatDestroy_SeqBDiag(Mat A)
 12: {
 13:   Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
 14:   int          i,bs = a->bs,ierr;

 17: #if defined(PETSC_USE_LOG)
 18:   PetscLogObjectState((PetscObject)A,"Rows=%d, Cols=%d, NZ=%d, BSize=%d, NDiag=%d",A->m,A->n,a->nz,a->bs,a->nd);
 19: #endif
 20:   if (!a->user_alloc) { /* Free the actual diagonals */
 21:     for (i=0; i<a->nd; i++) {
 22:       if (a->diag[i] > 0) {
 23:         PetscFree(a->diagv[i] + bs*bs*a->diag[i]);
 24:       } else {
 25:         PetscFree(a->diagv[i]);
 26:       }
 27:     }
 28:   }
 29:   if (a->pivot) {PetscFree(a->pivot);}
 30:   PetscFree(a->diagv);
 31:   PetscFree(a->diag);
 32:   PetscFree(a->colloc);
 33:   PetscFree(a->dvalue);
 34:   PetscFree(a);
 35:   return(0);
 36: }

 40: int MatAssemblyEnd_SeqBDiag(Mat A,MatAssemblyType mode)
 41: {
 42:   Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
 43:   int          i,k,temp,*diag = a->diag,*bdlen = a->bdlen;
 44:   PetscScalar  *dtemp,**dv = a->diagv;

 47:   if (mode == MAT_FLUSH_ASSEMBLY) return(0);

 49:   /* Sort diagonals */
 50:   for (i=0; i<a->nd; i++) {
 51:     for (k=i+1; k<a->nd; k++) {
 52:       if (diag[i] < diag[k]) {
 53:         temp     = diag[i];
 54:         diag[i]  = diag[k];
 55:         diag[k]  = temp;
 56:         temp     = bdlen[i];
 57:         bdlen[i] = bdlen[k];
 58:         bdlen[k] = temp;
 59:         dtemp    = dv[i];
 60:         dv[i]    = dv[k];
 61:         dv[k]    = dtemp;
 62:       }
 63:     }
 64:   }

 66:   /* Set location of main diagonal */
 67:   for (i=0; i<a->nd; i++) {
 68:     if (!a->diag[i]) {a->mainbd = i; break;}
 69:   }
 70:   PetscLogInfo(A,"MatAssemblyEnd_SeqBDiag:Number diagonals %d,memory used %d, block size %d\n",a->nd,a->maxnz,a->bs);
 71:   return(0);
 72: }

 76: int MatSetOption_SeqBDiag(Mat A,MatOption op)
 77: {
 78:   Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;

 81:   switch (op) {
 82:   case MAT_NO_NEW_NONZERO_LOCATIONS:
 83:     a->nonew       = 1;
 84:     break;
 85:   case MAT_YES_NEW_NONZERO_LOCATIONS:
 86:     a->nonew       = 0;
 87:     break;
 88:   case MAT_NO_NEW_DIAGONALS:
 89:     a->nonew_diag  = 1;
 90:     break;
 91:   case MAT_YES_NEW_DIAGONALS:
 92:     a->nonew_diag  = 0;
 93:     break;
 94:   case MAT_COLUMN_ORIENTED:
 95:     a->roworiented = PETSC_FALSE;
 96:     break;
 97:   case MAT_ROW_ORIENTED:
 98:     a->roworiented = PETSC_TRUE;
 99:     break;
100:   case MAT_ROWS_SORTED:
101:   case MAT_ROWS_UNSORTED:
102:   case MAT_COLUMNS_SORTED:
103:   case MAT_COLUMNS_UNSORTED:
104:   case MAT_IGNORE_OFF_PROC_ENTRIES:
105:   case MAT_NEW_NONZERO_LOCATION_ERR:
106:   case MAT_NEW_NONZERO_ALLOCATION_ERR:
107:   case MAT_USE_HASH_TABLE:
108:     PetscLogInfo(A,"MatSetOption_SeqBDiag:Option ignored\n");
109:     break;
110:   default:
111:     SETERRQ(PETSC_ERR_SUP,"unknown option");
112:   }
113:   return(0);
114: }

118: int MatPrintHelp_SeqBDiag(Mat A)
119: {
120:   static PetscTruth called = PETSC_FALSE;
121:   MPI_Comm          comm = A->comm;
122:   int               ierr;

125:   if (called) {return(0);} else called = PETSC_TRUE;
126:   (*PetscHelpPrintf)(comm," Options for MATSEQBDIAG and MATMPIBDIAG matrix formats:\n");
127:   (*PetscHelpPrintf)(comm,"  -mat_block_size <block_size>\n");
128:   (*PetscHelpPrintf)(comm,"  -mat_bdiag_diags <d1,d2,d3,...> (diagonal numbers)\n");
129:   (*PetscHelpPrintf)(comm,"   (for example) -mat_bdiag_diags -5,-1,0,1,5\n");
130:   return(0);
131: }

135: static int MatGetDiagonal_SeqBDiag_N(Mat A,Vec v)
136: {
137:   Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
138:   int          ierr,i,j,n,len,ibase,bs = a->bs,iloc;
139:   PetscScalar  *x,*dd,zero = 0.0;

142:   if (A->factor) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
143:   VecSet(&zero,v);
144:   VecGetLocalSize(v,&n);
145:   if (n != A->m) SETERRQ(PETSC_ERR_ARG_SIZ,"Nonconforming mat and vec");
146:   if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal not set");
147:   len = PetscMin(a->mblock,a->nblock);
148:   dd = a->diagv[a->mainbd];
149:   VecGetArray(v,&x);
150:   for (i=0; i<len; i++) {
151:     ibase = i*bs*bs;  iloc = i*bs;
152:     for (j=0; j<bs; j++) x[j + iloc] = dd[ibase + j*(bs+1)];
153:   }
154:   VecRestoreArray(v,&x);
155:   return(0);
156: }

160: static int MatGetDiagonal_SeqBDiag_1(Mat A,Vec v)
161: {
162:   Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
163:   int          ierr,i,n,len;
164:   PetscScalar  *x,*dd,zero = 0.0;

167:   VecSet(&zero,v);
168:   VecGetLocalSize(v,&n);
169:   if (n != A->m) SETERRQ(PETSC_ERR_ARG_SIZ,"Nonconforming mat and vec");
170:   if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal not set");
171:   dd = a->diagv[a->mainbd];
172:   len = PetscMin(A->m,A->n);
173:   VecGetArray(v,&x);
174:   for (i=0; i<len; i++) x[i] = dd[i];
175:   VecRestoreArray(v,&x);
176:   return(0);
177: }

181: int MatZeroEntries_SeqBDiag(Mat A)
182: {
183:   Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
184:   int          d,i,len,bs = a->bs;
185:   PetscScalar  *dv;

188:   for (d=0; d<a->nd; d++) {
189:     dv  = a->diagv[d];
190:     if (a->diag[d] > 0) {
191:       dv += bs*bs*a->diag[d];
192:     }
193:     len = a->bdlen[d]*bs*bs;
194:     for (i=0; i<len; i++) dv[i] = 0.0;
195:   }
196:   return(0);
197: }

201: int MatGetBlockSize_SeqBDiag(Mat A,int *bs)
202: {
203:   Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;

206:   *bs = a->bs;
207:   return(0);
208: }

212: int MatZeroRows_SeqBDiag(Mat A,IS is,const PetscScalar *diag)
213: {
214:   Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
215:   int          i,ierr,N,*rows,m = A->m - 1,nz,*col;
216:   PetscScalar  *dd,*val;

219:   ISGetLocalSize(is,&N);
220:   ISGetIndices(is,&rows);
221:   for (i=0; i<N; i++) {
222:     if (rows[i]<0 || rows[i]>m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"row out of range");
223:     MatGetRow(A,rows[i],&nz,&col,&val);
224:     PetscMemzero(val,nz*sizeof(PetscScalar));
225:     MatSetValues(A,1,&rows[i],nz,col,val,INSERT_VALUES);
226:     MatRestoreRow(A,rows[i],&nz,&col,&val);
227:   }
228:   if (diag) {
229:     if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal does not exist");
230:     dd = a->diagv[a->mainbd];
231:     for (i=0; i<N; i++) dd[rows[i]] = *diag;
232:   }
233:   ISRestoreIndices(is,&rows);
234:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
235:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
236:   return(0);
237: }

241: int MatGetSubMatrix_SeqBDiag(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *submat)
242: {
243:   Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
244:   int          nznew,*smap,i,j,ierr,oldcols = A->n;
245:   int          *irow,*icol,newr,newc,*cwork,*col,nz,bs;
246:   PetscScalar  *vwork,*val;
247:   Mat          newmat;

250:   if (scall == MAT_REUSE_MATRIX) { /* no support for reuse so simply destroy all */
251:     MatDestroy(*submat);
252:   }

254:   ISGetIndices(isrow,&irow);
255:   ISGetIndices(iscol,&icol);
256:   ISGetLocalSize(isrow,&newr);
257:   ISGetLocalSize(iscol,&newc);

259:   PetscMalloc((oldcols+1)*sizeof(int),&smap);
260:   PetscMalloc((newc+1)*sizeof(int),&cwork);
261:   PetscMalloc((newc+1)*sizeof(PetscScalar),&vwork);
262:   PetscMemzero((char*)smap,oldcols*sizeof(int));
263:   for (i=0; i<newc; i++) smap[icol[i]] = i+1;

265:   /* Determine diagonals; then create submatrix */
266:   bs = a->bs; /* Default block size remains the same */
267:   MatCreateSeqBDiag(A->comm,newr,newc,0,bs,0,0,&newmat);

269:   /* Fill new matrix */
270:   for (i=0; i<newr; i++) {
271:     MatGetRow(A,irow[i],&nz,&col,&val);
272:     nznew = 0;
273:     for (j=0; j<nz; j++) {
274:       if (smap[col[j]]) {
275:         cwork[nznew]   = smap[col[j]] - 1;
276:         vwork[nznew++] = val[j];
277:       }
278:     }
279:     MatSetValues(newmat,1,&i,nznew,cwork,vwork,INSERT_VALUES);
280:     MatRestoreRow(A,i,&nz,&col,&val);
281:   }
282:   MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);
283:   MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);

285:   /* Free work space */
286:   PetscFree(smap);
287:   PetscFree(cwork);
288:   PetscFree(vwork);
289:   ISRestoreIndices(isrow,&irow);
290:   ISRestoreIndices(iscol,&icol);
291:   *submat = newmat;
292:   return(0);
293: }

297: int MatGetSubMatrices_SeqBDiag(Mat A,int n,const IS irow[],const IS icol[],MatReuse scall,Mat *B[])
298: {
299:   int ierr,i;

302:   if (scall == MAT_INITIAL_MATRIX) {
303:     PetscMalloc((n+1)*sizeof(Mat),B);
304:   }

306:   for (i=0; i<n; i++) {
307:     MatGetSubMatrix_SeqBDiag(A,irow[i],icol[i],scall,&(*B)[i]);
308:   }
309:   return(0);
310: }

314: int MatScale_SeqBDiag(const PetscScalar *alpha,Mat inA)
315: {
316:   Mat_SeqBDiag *a = (Mat_SeqBDiag*)inA->data;
317:   int          one = 1,i,len,bs = a->bs;

320:   for (i=0; i<a->nd; i++) {
321:     len = bs*bs*a->bdlen[i];
322:     if (a->diag[i] > 0) {
323:       BLscal_(&len,(PetscScalar*)alpha,a->diagv[i] + bs*bs*a->diag[i],&one);
324:     } else {
325:       BLscal_(&len,(PetscScalar*)alpha,a->diagv[i],&one);
326:     }
327:   }
328:   PetscLogFlops(a->nz);
329:   return(0);
330: }

334: int MatDiagonalScale_SeqBDiag(Mat A,Vec ll,Vec rr)
335: {
336:   Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
337:   PetscScalar  *l,*r,*dv;
338:   int          d,j,len,ierr;
339:   int          nd = a->nd,bs = a->bs,diag,m,n;

342:   if (ll) {
343:     VecGetSize(ll,&m);
344:     if (m != A->m) SETERRQ(PETSC_ERR_ARG_SIZ,"Left scaling vector wrong length");
345:     if (bs == 1) {
346:       VecGetArray(ll,&l);
347:       for (d=0; d<nd; d++) {
348:         dv   = a->diagv[d];
349:         diag = a->diag[d];
350:         len  = a->bdlen[d];
351:         if (diag > 0) for (j=0; j<len; j++) dv[j+diag] *= l[j+diag];
352:         else          for (j=0; j<len; j++) dv[j]      *= l[j];
353:       }
354:       VecRestoreArray(ll,&l);
355:       PetscLogFlops(a->nz);
356:     } else SETERRQ(PETSC_ERR_SUP,"Not yet done for bs>1");
357:   }
358:   if (rr) {
359:     VecGetSize(rr,&n);
360:     if (n != A->n) SETERRQ(PETSC_ERR_ARG_SIZ,"Right scaling vector wrong length");
361:     if (bs == 1) {
362:       VecGetArray(rr,&r);
363:       for (d=0; d<nd; d++) {
364:         dv   = a->diagv[d];
365:         diag = a->diag[d];
366:         len  = a->bdlen[d];
367:         if (diag > 0) for (j=0; j<len; j++) dv[j+diag] *= r[j];
368:         else          for (j=0; j<len; j++) dv[j]      *= r[j-diag];
369:       }
370:       VecRestoreArray(rr,&r);
371:       PetscLogFlops(a->nz);
372:     } else SETERRQ(PETSC_ERR_SUP,"Not yet done for bs>1");
373:   }
374:   return(0);
375: }

377: static int MatDuplicate_SeqBDiag(Mat,MatDuplicateOption,Mat *);

381: int MatSetUpPreallocation_SeqBDiag(Mat A)
382: {
383:   int        ierr;

386:    MatSeqBDiagSetPreallocation(A,PETSC_DEFAULT,PETSC_DEFAULT,0,0);
387:   return(0);
388: }

390: /* -------------------------------------------------------------------*/
391: static struct _MatOps MatOps_Values = {MatSetValues_SeqBDiag_N,
392:        MatGetRow_SeqBDiag,
393:        MatRestoreRow_SeqBDiag,
394:        MatMult_SeqBDiag_N,
395: /* 4*/ MatMultAdd_SeqBDiag_N,
396:        MatMultTranspose_SeqBDiag_N,
397:        MatMultTransposeAdd_SeqBDiag_N,
398:        MatSolve_SeqBDiag_N,
399:        0,
400:        0,
401: /*10*/ 0,
402:        0,
403:        0,
404:        MatRelax_SeqBDiag_N,
405:        MatTranspose_SeqBDiag,
406: /*15*/ MatGetInfo_SeqBDiag,
407:        0,
408:        MatGetDiagonal_SeqBDiag_N,
409:        MatDiagonalScale_SeqBDiag,
410:        MatNorm_SeqBDiag,
411: /*20*/ 0,
412:        MatAssemblyEnd_SeqBDiag,
413:        0,
414:        MatSetOption_SeqBDiag,
415:        MatZeroEntries_SeqBDiag,
416: /*25*/ MatZeroRows_SeqBDiag,
417:        0,
418:        MatLUFactorNumeric_SeqBDiag_N,
419:        0,
420:        0,
421: /*30*/ MatSetUpPreallocation_SeqBDiag,
422:        MatILUFactorSymbolic_SeqBDiag,
423:        0,
424:        0,
425:        0,
426: /*35*/ MatDuplicate_SeqBDiag,
427:        0,
428:        0,
429:        MatILUFactor_SeqBDiag,
430:        0,
431: /*40*/ 0,
432:        MatGetSubMatrices_SeqBDiag,
433:        0,
434:        MatGetValues_SeqBDiag_N,
435:        0,
436: /*45*/ MatPrintHelp_SeqBDiag,
437:        MatScale_SeqBDiag,
438:        0,
439:        0,
440:        0,
441: /*50*/ MatGetBlockSize_SeqBDiag,
442:        0,
443:        0,
444:        0,
445:        0,
446: /*55*/ 0,
447:        0,
448:        0,
449:        0,
450:        0,
451: /*60*/ 0,
452:        MatDestroy_SeqBDiag,
453:        MatView_SeqBDiag,
454:        MatGetPetscMaps_Petsc,
455:        0,
456: /*65*/ 0,
457:        0,
458:        0,
459:        0,
460:        0,
461: /*70*/ 0,
462:        0,
463:        0,
464:        0,
465:        0,
466: /*75*/ 0,
467:        0,
468:        0,
469:        0,
470:        0,
471: /*80*/ 0,
472:        0,
473:        0,
474:        0,
475:        0,
476: /*85*/ MatLoad_SeqBDiag
477: };

481: /*@C
482:    MatSeqBDiagSetPreallocation - Sets the nonzero structure and (optionally) arrays.

484:    Collective on MPI_Comm

486:    Input Parameters:
487: +  B - the matrix
488: .  nd - number of block diagonals (optional)
489: .  bs - each element of a diagonal is an bs x bs dense matrix
490: .  diag - optional array of block diagonal numbers (length nd).
491:    For a matrix element A[i,j], where i=row and j=column, the
492:    diagonal number is
493: $     diag = i/bs - j/bs  (integer division)
494:    Set diag=PETSC_NULL on input for PETSc to dynamically allocate memory as 
495:    needed (expensive).
496: -  diagv - pointer to actual diagonals (in same order as diag array), 
497:    if allocated by user.  Otherwise, set diagv=PETSC_NULL on input for PETSc
498:    to control memory allocation.

500:    Options Database Keys:
501: .  -mat_block_size <bs> - Sets blocksize
502: .  -mat_bdiag_diags <s1,s2,s3,...> - Sets diagonal numbers

504:    Notes:
505:    See the users manual for further details regarding this storage format.

507:    Fortran Note:
508:    Fortran programmers cannot set diagv; this value is ignored.

510:    Level: intermediate

512: .keywords: matrix, block, diagonal, sparse

514: .seealso: MatCreate(), MatCreateMPIBDiag(), MatSetValues()
515: @*/
516: int MatSeqBDiagSetPreallocation(Mat B,int nd,int bs,const int diag[],PetscScalar *diagv[])
517: {
518:   int ierr,(*f)(Mat,int,int,const int[],PetscScalar*[]);

521:   PetscObjectQueryFunction((PetscObject)B,"MatSeqBDiagSetPreallocation_C",(void (**)(void))&f);
522:   if (f) {
523:     (*f)(B,nd,bs,diag,diagv);
524:   }
525:   return(0);
526: }

528: EXTERN_C_BEGIN
531: int MatSeqBDiagSetPreallocation_SeqBDiag(Mat B,int nd,int bs,int *diag,PetscScalar **diagv)
532: {
533:   Mat_SeqBDiag *b;
534:   int          i,nda,sizetot,ierr, nd2 = 128,idiag[128];
535:   PetscTruth   flg1;


539:   B->preallocated = PETSC_TRUE;
540:   if (bs == PETSC_DEFAULT) bs = 1;
541:   if (bs == 0) SETERRQ(1,"Blocksize cannot be zero");
542:   if (nd == PETSC_DEFAULT) nd = 0;
543:   PetscOptionsGetInt(PETSC_NULL,"-mat_block_size",&bs,PETSC_NULL);
544:   PetscOptionsGetIntArray(PETSC_NULL,"-mat_bdiag_diags",idiag,&nd2,&flg1);
545:   if (flg1) {
546:     diag = idiag;
547:     nd   = nd2;
548:   }

550:   if ((B->n%bs) || (B->m%bs)) SETERRQ(PETSC_ERR_ARG_SIZ,"Invalid block size");
551:   if (!nd) nda = nd + 1;
552:   else     nda = nd;
553:   b            = (Mat_SeqBDiag*)B->data;

555:   PetscOptionsHasName(PETSC_NULL,"-mat_no_unroll",&flg1);
556:   if (!flg1) {
557:     switch (bs) {
558:       case 1:
559:         B->ops->setvalues       = MatSetValues_SeqBDiag_1;
560:         B->ops->getvalues       = MatGetValues_SeqBDiag_1;
561:         B->ops->getdiagonal     = MatGetDiagonal_SeqBDiag_1;
562:         B->ops->mult            = MatMult_SeqBDiag_1;
563:         B->ops->multadd         = MatMultAdd_SeqBDiag_1;
564:         B->ops->multtranspose   = MatMultTranspose_SeqBDiag_1;
565:         B->ops->multtransposeadd= MatMultTransposeAdd_SeqBDiag_1;
566:         B->ops->relax           = MatRelax_SeqBDiag_1;
567:         B->ops->solve           = MatSolve_SeqBDiag_1;
568:         B->ops->lufactornumeric = MatLUFactorNumeric_SeqBDiag_1;
569:         break;
570:       case 2:
571:         B->ops->mult            = MatMult_SeqBDiag_2;
572:         B->ops->multadd         = MatMultAdd_SeqBDiag_2;
573:         B->ops->solve           = MatSolve_SeqBDiag_2;
574:         break;
575:       case 3:
576:         B->ops->mult            = MatMult_SeqBDiag_3;
577:         B->ops->multadd         = MatMultAdd_SeqBDiag_3;
578:         B->ops->solve           = MatSolve_SeqBDiag_3;
579:         break;
580:       case 4:
581:         B->ops->mult            = MatMult_SeqBDiag_4;
582:         B->ops->multadd         = MatMultAdd_SeqBDiag_4;
583:         B->ops->solve           = MatSolve_SeqBDiag_4;
584:         break;
585:       case 5:
586:         B->ops->mult            = MatMult_SeqBDiag_5;
587:         B->ops->multadd         = MatMultAdd_SeqBDiag_5;
588:         B->ops->solve           = MatSolve_SeqBDiag_5;
589:         break;
590:    }
591:   }

593:   b->mblock = B->m/bs;
594:   b->nblock = B->n/bs;
595:   b->nd     = nd;
596:   b->bs     = bs;
597:   b->ndim   = 0;
598:   b->mainbd = -1;
599:   b->pivot  = 0;

601:   PetscMalloc(2*nda*sizeof(int),&b->diag);
602:   b->bdlen  = b->diag + nda;
603:   PetscMalloc((B->n+1)*sizeof(int),&b->colloc);
604:   PetscMalloc(nda*sizeof(PetscScalar*),&b->diagv);
605:   sizetot   = 0;

607:   if (diagv) { /* user allocated space */
608:     b->user_alloc = PETSC_TRUE;
609:     for (i=0; i<nd; i++) b->diagv[i] = diagv[i];
610:   } else b->user_alloc = PETSC_FALSE;

612:   for (i=0; i<nd; i++) {
613:     b->diag[i] = diag[i];
614:     if (diag[i] > 0) { /* lower triangular */
615:       b->bdlen[i] = PetscMin(b->nblock,b->mblock - diag[i]);
616:     } else {           /* upper triangular */
617:       b->bdlen[i] = PetscMin(b->mblock,b->nblock + diag[i]);
618:     }
619:     sizetot += b->bdlen[i];
620:   }
621:   sizetot   *= bs*bs;
622:   b->maxnz  =  sizetot;
623:   PetscMalloc((B->n+1)*sizeof(PetscScalar),&b->dvalue);
624:   PetscLogObjectMemory(B,(nda*(bs+2))*sizeof(int) + bs*nda*sizeof(PetscScalar)
625:                     + nda*sizeof(PetscScalar*) + sizeof(Mat_SeqBDiag)
626:                     + sizeof(struct _p_Mat) + sizetot*sizeof(PetscScalar));

628:   if (!b->user_alloc) {
629:     for (i=0; i<nd; i++) {
630:       PetscMalloc(bs*bs*b->bdlen[i]*sizeof(PetscScalar),&b->diagv[i]);
631:       PetscMemzero(b->diagv[i],bs*bs*b->bdlen[i]*sizeof(PetscScalar));
632:     }
633:     b->nonew = 0; b->nonew_diag = 0;
634:   } else { /* diagonals are set on input; don't allow dynamic allocation */
635:     b->nonew = 1; b->nonew_diag = 1;
636:   }

638:   /* adjust diagv so one may access rows with diagv[diag][row] for all rows */
639:   for (i=0; i<nd; i++) {
640:     if (diag[i] > 0) {
641:       b->diagv[i] -= bs*bs*diag[i];
642:     }
643:   }

645:   b->nz          = b->maxnz; /* Currently not keeping track of exact count */
646:   b->roworiented = PETSC_TRUE;
647:   B->info.nz_unneeded = (double)b->maxnz;
648:   return(0);
649: }
650: EXTERN_C_END

654: static int MatDuplicate_SeqBDiag(Mat A,MatDuplicateOption cpvalues,Mat *matout)
655: {
656:   Mat_SeqBDiag *newmat,*a = (Mat_SeqBDiag*)A->data;
657:   int          i,ierr,len,diag,bs = a->bs;
658:   Mat          mat;

661:   MatCreateSeqBDiag(A->comm,A->m,A->n,a->nd,bs,a->diag,PETSC_NULL,matout);

663:   /* Copy contents of diagonals */
664:   mat = *matout;
665:   newmat = (Mat_SeqBDiag*)mat->data;
666:   if (cpvalues == MAT_COPY_VALUES) {
667:     for (i=0; i<a->nd; i++) {
668:       len = a->bdlen[i] * bs * bs * sizeof(PetscScalar);
669:       diag = a->diag[i];
670:       if (diag > 0) {
671:         PetscMemcpy(newmat->diagv[i]+bs*bs*diag,a->diagv[i]+bs*bs*diag,len);
672:       } else {
673:         PetscMemcpy(newmat->diagv[i],a->diagv[i],len);
674:       }
675:     }
676:   }
677:   MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
678:   MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
679:   return(0);
680: }

684: int MatLoad_SeqBDiag(PetscViewer viewer,MatType type,Mat *A)
685: {
686:   Mat          B;
687:   int          *scols,i,nz,ierr,fd,header[4],size,nd = 128;
688:   int          bs,*rowlengths = 0,M,N,*cols,extra_rows,*diag = 0;
689:   int          idiag[128];
690:   PetscScalar  *vals,*svals;
691:   MPI_Comm     comm;
692:   PetscTruth   flg;
693: 
695:   PetscObjectGetComm((PetscObject)viewer,&comm);
696:   MPI_Comm_size(comm,&size);
697:   if (size > 1) SETERRQ(PETSC_ERR_ARG_SIZ,"view must have one processor");
698:   PetscViewerBinaryGetDescriptor(viewer,&fd);
699:   PetscBinaryRead(fd,header,4,PETSC_INT);
700:   if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Not matrix object");
701:   M = header[1]; N = header[2]; nz = header[3];
702:   if (M != N) SETERRQ(PETSC_ERR_SUP,"Can only load square matrices");
703:   if (header[3] < 0) {
704:     SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format, cannot load as SeqBDiag");
705:   }

707:   /* 
708:      This code adds extra rows to make sure the number of rows is 
709:     divisible by the blocksize
710:   */
711:   bs = 1;
712:   PetscOptionsGetInt(PETSC_NULL,"-matload_block_size",&bs,PETSC_NULL);
713:   extra_rows = bs - M + bs*(M/bs);
714:   if (extra_rows == bs) extra_rows = 0;
715:   if (extra_rows) {
716:     PetscLogInfo(0,"MatLoad_SeqBDiag:Padding loaded matrix to match blocksize\n");
717:   }

719:   /* read row lengths */
720:   PetscMalloc((M+extra_rows)*sizeof(int),&rowlengths);
721:   PetscBinaryRead(fd,rowlengths,M,PETSC_INT);
722:   for (i=0; i<extra_rows; i++) rowlengths[M+i] = 1;

724:   /* load information about diagonals */
725:   PetscOptionsGetIntArray(PETSC_NULL,"-matload_bdiag_diags",idiag,&nd,&flg);
726:   if (flg) {
727:     diag = idiag;
728:   }

730:   /* create our matrix */
731:   MatCreateSeqBDiag(comm,M+extra_rows,M+extra_rows,nd,bs,diag,
732:                            PETSC_NULL,A);
733:   B = *A;

735:   /* read column indices and nonzeros */
736:   PetscMalloc(nz*sizeof(int),&scols);
737:   cols = scols;
738:   PetscBinaryRead(fd,cols,nz,PETSC_INT);
739:   PetscMalloc(nz*sizeof(PetscScalar),&svals);
740:   vals = svals;
741:   PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);
742:   /* insert into matrix */

744:   for (i=0; i<M; i++) {
745:     MatSetValues(B,1,&i,rowlengths[i],scols,svals,INSERT_VALUES);
746:     scols += rowlengths[i]; svals += rowlengths[i];
747:   }
748:   vals[0] = 1.0;
749:   for (i=M; i<M+extra_rows; i++) {
750:     MatSetValues(B,1,&i,1,&i,vals,INSERT_VALUES);
751:   }

753:   PetscFree(cols);
754:   PetscFree(vals);
755:   PetscFree(rowlengths);

757:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
758:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
759:   return(0);
760: }

762: /*MC
763:    MATSEQBDIAG - MATSEQBDIAG = "seqbdiag" - A matrix type to be used for sequential block diagonal matrices.

765:    Options Database Keys:
766: . -mat_type seqbdiag - sets the matrix type to "seqbdiag" during a call to MatSetFromOptions()

768:   Level: beginner

770: .seealso: MatCreateSeqBDiag
771: M*/

773: EXTERN_C_BEGIN
776: int MatCreate_SeqBDiag(Mat B)
777: {
778:   Mat_SeqBDiag *b;
779:   int          ierr,size;

782:   MPI_Comm_size(B->comm,&size);
783:   if (size > 1) SETERRQ(PETSC_ERR_ARG_WRONG,"Comm must be of size 1");

785:   B->m = B->M = PetscMax(B->m,B->M);
786:   B->n = B->N = PetscMax(B->n,B->N);

788:   PetscNew(Mat_SeqBDiag,&b);
789:   B->data         = (void*)b;
790:   PetscMemzero(b,sizeof(Mat_SeqBDiag));
791:   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
792:   B->factor       = 0;
793:   B->mapping      = 0;

795:   PetscMapCreateMPI(B->comm,B->m,B->m,&B->rmap);
796:   PetscMapCreateMPI(B->comm,B->n,B->n,&B->cmap);

798:   b->ndim   = 0;
799:   b->mainbd = -1;
800:   b->pivot  = 0;

802:   b->roworiented = PETSC_TRUE;
803:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatSeqBDiagSetPreallocation_C",
804:                                     "MatSeqBDiagSetPreallocation_SeqBDiag",
805:                                      MatSeqBDiagSetPreallocation_SeqBDiag);

807:   return(0);
808: }
809: EXTERN_C_END

813: /*@C
814:    MatCreateSeqBDiag - Creates a sequential block diagonal matrix.

816:    Collective on MPI_Comm

818:    Input Parameters:
819: +  comm - MPI communicator, set to PETSC_COMM_SELF
820: .  m - number of rows
821: .  n - number of columns
822: .  nd - number of block diagonals (optional)
823: .  bs - each element of a diagonal is an bs x bs dense matrix
824: .  diag - optional array of block diagonal numbers (length nd).
825:    For a matrix element A[i,j], where i=row and j=column, the
826:    diagonal number is
827: $     diag = i/bs - j/bs  (integer division)
828:    Set diag=PETSC_NULL on input for PETSc to dynamically allocate memory as 
829:    needed (expensive).
830: -  diagv - pointer to actual diagonals (in same order as diag array), 
831:    if allocated by user.  Otherwise, set diagv=PETSC_NULL on input for PETSc
832:    to control memory allocation.

834:    Output Parameters:
835: .  A - the matrix

837:    Options Database Keys:
838: .  -mat_block_size <bs> - Sets blocksize
839: .  -mat_bdiag_diags <s1,s2,s3,...> - Sets diagonal numbers

841:    Notes:
842:    See the users manual for further details regarding this storage format.

844:    Fortran Note:
845:    Fortran programmers cannot set diagv; this value is ignored.

847:    Level: intermediate

849: .keywords: matrix, block, diagonal, sparse

851: .seealso: MatCreate(), MatCreateMPIBDiag(), MatSetValues()
852: @*/
853: int MatCreateSeqBDiag(MPI_Comm comm,int m,int n,int nd,int bs,const int diag[],PetscScalar *diagv[],Mat *A)
854: {

858:   MatCreate(comm,m,n,m,n,A);
859:   MatSetType(*A,MATSEQBDIAG);
860:   MatSeqBDiagSetPreallocation(*A,nd,bs,diag,diagv);
861:   return(0);
862: }