Actual source code: dscpack.c
1: #define PETSCMAT_DLL
3: /*
4: Provides an interface to the DSCPACK (Domain-Separator Codes) sparse direct solver
5: */
7: #include src/mat/impls/baij/seq/baij.h
8: #include src/mat/impls/baij/mpi/mpibaij.h
11: #include "dscmain.h"
14: typedef struct {
15: DSC_Solver My_DSC_Solver;
16: PetscInt num_local_strucs, *local_struc_old_num,
17: num_local_cols, num_local_nonz,
18: *global_struc_new_col_num,
19: *global_struc_new_num, *global_struc_owner,
20: dsc_id,bs,*local_cols_old_num,*replication;
21: PetscInt order_code,scheme_code,factor_type, stat,
22: LBLASLevel,DBLASLevel,max_mem_allowed;
23: MatStructure flg;
24: IS my_cols,iden,iden_dsc;
25: Vec vec_dsc;
26: VecScatter scat;
27: MPI_Comm comm_dsc;
29: /* A few inheritance details */
30: PetscMPIInt size;
31: PetscErrorCode (*MatDuplicate)(Mat,MatDuplicateOption,Mat*);
32: PetscErrorCode (*MatView)(Mat,PetscViewer);
33: PetscErrorCode (*MatAssemblyEnd)(Mat,MatAssemblyType);
34: PetscErrorCode (*MatCholeskyFactorSymbolic)(Mat,IS,MatFactorInfo*,Mat*);
35: PetscErrorCode (*MatDestroy)(Mat);
36: PetscErrorCode (*MatPreallocate)(Mat,PetscInt,PetscInt,PetscInt*,PetscInt,PetscInt*);
38: /* Clean up flag for destructor */
39: PetscTruth CleanUpDSCPACK;
40: } Mat_DSC;
42: EXTERN PetscErrorCode MatDuplicate_DSCPACK(Mat,MatDuplicateOption,Mat*);
44: EXTERN PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_Base_DSCPACK(Mat,const MatType,MatReuse,Mat*);
47: /* DSC function */
50: void isort2(PetscInt size, PetscInt *list, PetscInt *idx_dsc) {
51: /* in increasing order */
52: /* idx_dsc will contain indices such that */
53: /* list can be accessed in sorted order */
54: PetscInt i, j, x, y;
55:
56: for (i=0; i<size; i++) idx_dsc[i] =i;
58: for (i=1; i<size; i++){
59: y= idx_dsc[i];
60: x=list[idx_dsc[i]];
61: for (j=i-1; ((j>=0) && (x<list[idx_dsc[j]])); j--)
62: idx_dsc[j+1]=idx_dsc[j];
63: idx_dsc[j+1]=y;
64: }
65: }/*end isort2*/
69: PetscErrorCode BAIJtoMyANonz( PetscInt *AIndex, PetscInt *AStruct, PetscInt bs,
70: RealNumberType *ANonz, PetscInt NumLocalStructs,
71: PetscInt NumLocalNonz, PetscInt *GlobalStructNewColNum,
72: PetscInt *LocalStructOldNum,
73: PetscInt *LocalStructLocalNum,
74: RealNumberType **adr_MyANonz)
75: /*
76: Extract non-zero values of lower triangular part
77: of the permuted matrix that belong to this processor.
79: Only output parameter is adr_MyANonz -- is malloced and changed.
80: Rest are input parameters left unchanged.
82: When LocalStructLocalNum == PETSC_NULL,
83: AIndex, AStruct, and ANonz contain entire original matrix A
84: in PETSc SeqBAIJ format,
85: otherwise,
86: AIndex, AStruct, and ANonz are indeces for the submatrix
87: of A whose colomns (in increasing order) belong to this processor.
89: Other variables supply information on ownership of columns
90: and the new numbering in a fill-reducing permutation
92: This information is used to setup lower half of A nonzeroes
93: for columns owned by this processor
94: */
95: {
97: PetscInt i, j, k, iold,inew, jj, kk, bs2=bs*bs,
98: *idx, *NewColNum,
99: MyANonz_last, max_struct=0, struct_size;
100: RealNumberType *MyANonz;
104: /* loop: to find maximum number of subscripts over columns
105: assigned to this processor */
106: for (i=0; i <NumLocalStructs; i++) {
107: /* for each struct i (local) assigned to this processor */
108: if (LocalStructLocalNum){
109: iold = LocalStructLocalNum[i];
110: } else {
111: iold = LocalStructOldNum[i];
112: }
113:
114: struct_size = AIndex[iold+1] - AIndex[iold];
115: if ( max_struct <= struct_size) max_struct = struct_size;
116: }
118: /* allocate tmp arrays large enough to hold densest struct */
119: PetscMalloc((2*max_struct+1)*sizeof(PetscInt),&NewColNum);
120: idx = NewColNum + max_struct;
121:
122: PetscMalloc(NumLocalNonz*sizeof(RealNumberType),&MyANonz);
123: *adr_MyANonz = MyANonz;
125: /* loop to set up nonzeroes in MyANonz */
126: MyANonz_last = 0 ; /* points to first empty space in MyANonz */
127: for (i=0; i <NumLocalStructs; i++) {
129: /* for each struct i (local) assigned to this processor */
130: if (LocalStructLocalNum){
131: iold = LocalStructLocalNum[i];
132: } else {
133: iold = LocalStructOldNum[i];
134: }
136: struct_size = AIndex[iold+1] - AIndex[iold];
137: for (k=0, j=AIndex[iold]; j<AIndex[iold+1]; j++){
138: NewColNum[k] = GlobalStructNewColNum[AStruct[j]];
139: k++;
140: }
141: isort2(struct_size, NewColNum, idx);
142:
143: kk = AIndex[iold]*bs2; /* points to 1st element of iold block col in ANonz */
144: inew = GlobalStructNewColNum[LocalStructOldNum[i]];
146: for (jj = 0; jj < bs; jj++) {
147: for (j=0; j<struct_size; j++){
148: for ( k = 0; k<bs; k++){
149: if (NewColNum[idx[j]] + k >= inew)
150: MyANonz[MyANonz_last++] = ANonz[kk + idx[j]*bs2 + k*bs + jj];
151: }
152: }
153: inew++;
154: }
155: } /* end outer loop for i */
157: PetscFree(NewColNum);
158: if (MyANonz_last != NumLocalNonz) SETERRQ2(PETSC_ERR_PLIB,"MyANonz_last %d != NumLocalNonz %d\n",MyANonz_last, NumLocalNonz);
159: return(0);
160: }
165: PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_DSCPACK_Base(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
166: {
168: Mat B=*newmat;
169: Mat_DSC *lu=(Mat_DSC*)A->spptr;
170: void (*f)(void);
173: if (reuse == MAT_INITIAL_MATRIX) {
174: MatDuplicate(A,MAT_COPY_VALUES,&B);
175: }
176: /* Reset the original function pointers */
177: B->ops->duplicate = lu->MatDuplicate;
178: B->ops->view = lu->MatView;
179: B->ops->assemblyend = lu->MatAssemblyEnd;
180: B->ops->choleskyfactorsymbolic = lu->MatCholeskyFactorSymbolic;
181: B->ops->destroy = lu->MatDestroy;
182: PetscObjectQueryFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",&f);
183: if (f) {
184: PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C","",(PetscVoidFunction)lu->MatPreallocate);
185: }
186: PetscFree(lu);
188: PetscObjectComposeFunction((PetscObject)B,"MatConvert_seqbaij_dscpack_C","",PETSC_NULL);
189: PetscObjectComposeFunction((PetscObject)B,"MatConvert_dscpack_seqbaij_C","",PETSC_NULL);
190: PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_dscpack_C","",PETSC_NULL);
191: PetscObjectComposeFunction((PetscObject)B,"MatConvert_dscpack_mpibaij_C","",PETSC_NULL);
193: PetscObjectChangeTypeName((PetscObject)B,type);
194: *newmat = B;
196: return(0);
197: }
202: PetscErrorCode MatDestroy_DSCPACK(Mat A)
203: {
204: Mat_DSC *lu=(Mat_DSC*)A->spptr;
206:
208: if (lu->CleanUpDSCPACK) {
209: if (lu->dsc_id != -1) {
210: if(lu->stat) DSC_DoStats(lu->My_DSC_Solver);
211: DSC_FreeAll(lu->My_DSC_Solver);
212: DSC_Close0(lu->My_DSC_Solver);
213:
214: PetscFree(lu->local_cols_old_num);
215: }
216: DSC_End(lu->My_DSC_Solver);
217:
218: MPI_Comm_free(&(lu->comm_dsc));
219: ISDestroy(lu->my_cols);
220: PetscFree(lu->replication);
221: VecDestroy(lu->vec_dsc);
222: ISDestroy(lu->iden_dsc);
223: VecScatterDestroy(lu->scat);
224: if (lu->size >1 && lu->iden) {ISDestroy(lu->iden);}
225: }
226: if (lu->size == 1) {
227: MatConvert_DSCPACK_Base(A,MATSEQBAIJ,MAT_REUSE_MATRIX,&A);
228: } else {
229: MatConvert_DSCPACK_Base(A,MATMPIBAIJ,MAT_REUSE_MATRIX,&A);
230: }
231: (*A->ops->destroy)(A);
232: return(0);
233: }
237: PetscErrorCode MatSolve_DSCPACK(Mat A,Vec b,Vec x) {
238: Mat_DSC *lu= (Mat_DSC*)A->spptr;
240: RealNumberType *solution_vec,*rhs_vec;
243: /* scatter b into seq vec_dsc */
244: if ( !lu->scat ) {
245: VecScatterCreate(b,lu->my_cols,lu->vec_dsc,lu->iden_dsc,&lu->scat);
246: }
247: VecScatterBegin(b,lu->vec_dsc,INSERT_VALUES,SCATTER_FORWARD,lu->scat);
248: VecScatterEnd(b,lu->vec_dsc,INSERT_VALUES,SCATTER_FORWARD,lu->scat);
250: if (lu->dsc_id != -1){
251: VecGetArray(lu->vec_dsc,&rhs_vec);
252: DSC_InputRhsLocalVec(lu->My_DSC_Solver, rhs_vec, lu->num_local_cols);
253: VecRestoreArray(lu->vec_dsc,&rhs_vec);
254:
255: DSC_Solve(lu->My_DSC_Solver);
256: if (ierr != DSC_NO_ERROR) {
257: DSC_ErrorDisplay(lu->My_DSC_Solver);
258: SETERRQ(PETSC_ERR_LIB,"Error in calling DSC_Solve");
259: }
261: /* get the permuted local solution */
262: VecGetArray(lu->vec_dsc,&solution_vec);
263: DSC_GetLocalSolution(lu->My_DSC_Solver,solution_vec, lu->num_local_cols);
264: VecRestoreArray(lu->vec_dsc,&solution_vec);
266: } /* end of if (lu->dsc_id != -1) */
268: /* put permuted local solution solution_vec into x in the original order */
269: VecScatterBegin(lu->vec_dsc,x,INSERT_VALUES,SCATTER_REVERSE,lu->scat);
270: VecScatterEnd(lu->vec_dsc,x,INSERT_VALUES,SCATTER_REVERSE,lu->scat);
272: return(0);
273: }
277: PetscErrorCode MatCholeskyFactorNumeric_DSCPACK(Mat A,MatFactorInfo *info,Mat *F) {
278: Mat_SeqBAIJ *a_seq;
279: Mat_DSC *lu=(Mat_DSC*)(*F)->spptr;
280: Mat *tseq,A_seq=PETSC_NULL;
281: RealNumberType *my_a_nonz;
283: PetscMPIInt size;
284: PetscInt M=A->rmap.N,Mbs=M/lu->bs,max_mem_estimate,max_single_malloc_blk,
285: number_of_procs,i,j,next,iold,*idx,*iidx=0,*itmp;
286: IS my_cols_sorted;
287: Mat F_diag;
288:
290: MPI_Comm_size(A->comm,&size);
291: if ( lu->flg == DIFFERENT_NONZERO_PATTERN){ /* first numeric factorization */
292: /* convert A to A_seq */
293: if (size > 1) {
294: if (!lu->iden){
295: ISCreateStride(PETSC_COMM_SELF,M,0,1,&lu->iden);
296: }
297: MatGetSubMatrices(A,1,&lu->iden,&lu->iden,MAT_INITIAL_MATRIX,&tseq);
298: A_seq = tseq[0];
299: a_seq = (Mat_SeqBAIJ*)A_seq->data;
300: } else {
301: a_seq = (Mat_SeqBAIJ*)A->data;
302: }
303:
304: PetscMalloc(Mbs*sizeof(PetscInt),&lu->replication);
305: for (i=0; i<Mbs; i++) lu->replication[i] = lu->bs;
307: number_of_procs = DSC_Analyze(Mbs, a_seq->i, a_seq->j, lu->replication);
308:
309: i = size;
310: if ( number_of_procs < i ) i = number_of_procs;
311: number_of_procs = 1;
312: while ( i > 1 ){
313: number_of_procs *= 2; i /= 2;
314: }
316: /* DSC_Solver starts */
317: DSC_Open0( lu->My_DSC_Solver, number_of_procs, &lu->dsc_id, lu->comm_dsc );
319: if (lu->dsc_id != -1) {
320: DSC_Order(lu->My_DSC_Solver,lu->order_code,Mbs,a_seq->i,a_seq->j,lu->replication,
321: &M,&lu->num_local_strucs,
322: &lu->num_local_cols, &lu->num_local_nonz, &lu->global_struc_new_col_num,
323: &lu->global_struc_new_num, &lu->global_struc_owner,
324: &lu->local_struc_old_num);
325: if (ierr != DSC_NO_ERROR) {
326: DSC_ErrorDisplay(lu->My_DSC_Solver);
327: SETERRQ(PETSC_ERR_LIB,"Error when use DSC_Order()");
328: }
330: DSC_SFactor(lu->My_DSC_Solver,&max_mem_estimate,&max_single_malloc_blk,
331: lu->max_mem_allowed, lu->LBLASLevel, lu->DBLASLevel);
332: if (ierr != DSC_NO_ERROR) {
333: DSC_ErrorDisplay(lu->My_DSC_Solver);
334: SETERRQ(PETSC_ERR_LIB,"Error when use DSC_Order");
335: }
337: BAIJtoMyANonz(a_seq->i, a_seq->j, lu->bs, a_seq->a,
338: lu->num_local_strucs, lu->num_local_nonz,
339: lu->global_struc_new_col_num,
340: lu->local_struc_old_num,
341: PETSC_NULL,
342: &my_a_nonz);
343: if (ierr <0) {
344: DSC_ErrorDisplay(lu->My_DSC_Solver);
345: SETERRQ1(PETSC_ERR_LIB,"Error setting local nonzeroes at processor %d \n", lu->dsc_id);
346: }
348: /* get local_cols_old_num and IS my_cols to be used later */
349: PetscMalloc(lu->num_local_cols*sizeof(PetscInt),&lu->local_cols_old_num);
350: for (next = 0, i=0; i<lu->num_local_strucs; i++){
351: iold = lu->bs*lu->local_struc_old_num[i];
352: for (j=0; j<lu->bs; j++)
353: lu->local_cols_old_num[next++] = iold++;
354: }
355: ISCreateGeneral(PETSC_COMM_SELF,lu->num_local_cols,lu->local_cols_old_num,&lu->my_cols);
356:
357: } else { /* lu->dsc_id == -1 */
358: lu->num_local_cols = 0;
359: lu->local_cols_old_num = 0;
360: ISCreateGeneral(PETSC_COMM_SELF,lu->num_local_cols,lu->local_cols_old_num,&lu->my_cols);
361: }
362: /* generate vec_dsc and iden_dsc to be used later */
363: VecCreateSeq(PETSC_COMM_SELF,lu->num_local_cols,&lu->vec_dsc);
364: ISCreateStride(PETSC_COMM_SELF,lu->num_local_cols,0,1,&lu->iden_dsc);
365: lu->scat = PETSC_NULL;
367: if ( size>1 ) {
368: MatDestroyMatrices(1,&tseq);
369: }
370: } else { /* use previously computed symbolic factor */
371: /* convert A to my A_seq */
372: if (size > 1) {
373: if (lu->dsc_id == -1) {
374: itmp = 0;
375: } else {
376: PetscMalloc(2*lu->num_local_strucs*sizeof(PetscInt),&idx);
377: iidx = idx + lu->num_local_strucs;
378: PetscMalloc(lu->num_local_cols*sizeof(PetscInt),&itmp);
379:
380: isort2(lu->num_local_strucs, lu->local_struc_old_num, idx);
381: for (next=0, i=0; i< lu->num_local_strucs; i++) {
382: iold = lu->bs*lu->local_struc_old_num[idx[i]];
383: for (j=0; j<lu->bs; j++){
384: itmp[next++] = iold++; /* sorted local_cols_old_num */
385: }
386: }
387: for (i=0; i< lu->num_local_strucs; i++) {
388: iidx[idx[i]] = i; /* inverse of idx */
389: }
390: } /* end of (lu->dsc_id == -1) */
391: ISCreateGeneral(PETSC_COMM_SELF,lu->num_local_cols,itmp,&my_cols_sorted);
392: MatGetSubMatrices(A,1,&my_cols_sorted,&lu->iden,MAT_INITIAL_MATRIX,&tseq);
393: ISDestroy(my_cols_sorted);
394: A_seq = tseq[0];
395:
396: if (lu->dsc_id != -1) {
397: DSC_ReFactorInitialize(lu->My_DSC_Solver);
399: a_seq = (Mat_SeqBAIJ*)A_seq->data;
400: BAIJtoMyANonz(a_seq->i, a_seq->j, lu->bs, a_seq->a,
401: lu->num_local_strucs, lu->num_local_nonz,
402: lu->global_struc_new_col_num,
403: lu->local_struc_old_num,
404: iidx,
405: &my_a_nonz);
406: if (ierr <0) {
407: DSC_ErrorDisplay(lu->My_DSC_Solver);
408: SETERRQ1(PETSC_ERR_LIB,"Error setting local nonzeroes at processor %d \n", lu->dsc_id);
409: }
410: PetscFree(idx);
411: PetscFree(itmp);
412: } /* end of if(lu->dsc_id != -1) */
413: } else { /* size == 1 */
414: a_seq = (Mat_SeqBAIJ*)A->data;
415:
416: BAIJtoMyANonz(a_seq->i, a_seq->j, lu->bs, a_seq->a,
417: lu->num_local_strucs, lu->num_local_nonz,
418: lu->global_struc_new_col_num,
419: lu->local_struc_old_num,
420: PETSC_NULL,
421: &my_a_nonz);
422: if (ierr <0) {
423: DSC_ErrorDisplay(lu->My_DSC_Solver);
424: SETERRQ1(PETSC_ERR_LIB,"Error setting local nonzeroes at processor %d \n", lu->dsc_id);
425: }
426: }
427: if ( size>1 ) {MatDestroyMatrices(1,&tseq); }
428: }
429:
430: if (lu->dsc_id != -1) {
431: DSC_NFactor(lu->My_DSC_Solver, lu->scheme_code, my_a_nonz, lu->factor_type, lu->LBLASLevel, lu->DBLASLevel);
432: PetscFree(my_a_nonz);
433: }
434:
435: F_diag = ((Mat_MPIBAIJ *)(*F)->data)->A;
436: F_diag->assembled = PETSC_TRUE;
437: (*F)->assembled = PETSC_TRUE;
438: lu->flg = SAME_NONZERO_PATTERN;
440: return(0);
441: }
443: /* Note the Petsc permutation r is ignored */
446: PetscErrorCode MatCholeskyFactorSymbolic_DSCPACK(Mat A,IS r,MatFactorInfo *info,Mat *F) {
447: Mat B;
448: Mat_DSC *lu;
450: PetscInt bs,indx;
451: PetscTruth flg;
452: const char *ftype[]={"LDLT","LLT"},*ltype[]={"LBLAS1","LBLAS2","LBLAS3"},*dtype[]={"DBLAS1","DBLAS2"};
456: /* Create the factorization matrix F */
457: MatGetBlockSize(A,&bs);
458: MatCreate(A->comm,&B);
459: MatSetSizes(B,A->rmap.n,A->cmap.n,A->rmap.N,A->cmap.N);
460: MatSetType(B,A->type_name);
461: MatSeqBAIJSetPreallocation(B,bs,0,PETSC_NULL);
462: MatMPIBAIJSetPreallocation(B,bs,0,PETSC_NULL,0,PETSC_NULL);
463:
464: lu = (Mat_DSC*)B->spptr;
465: B->bs = bs;
467: B->ops->choleskyfactornumeric = MatCholeskyFactorNumeric_DSCPACK;
468: B->ops->solve = MatSolve_DSCPACK;
469: B->factor = FACTOR_CHOLESKY;
471: /* Set the default input options */
472: lu->order_code = 2;
473: lu->scheme_code = 1;
474: lu->factor_type = 2;
475: lu->stat = 0; /* do not display stats */
476: lu->LBLASLevel = DSC_LBLAS3;
477: lu->DBLASLevel = DSC_DBLAS2;
478: lu->max_mem_allowed = 256;
479: MPI_Comm_dup(A->comm,&(lu->comm_dsc));
480: /* Get the runtime input options */
481: PetscOptionsBegin(A->comm,A->prefix,"DSCPACK Options","Mat");
483: PetscOptionsInt("-mat_dscpack_order","order_code: \n\
484: 1 = ND, 2 = Hybrid with Minimum Degree, 3 = Hybrid with Minimum Deficiency", \
485: "None",
486: lu->order_code,&lu->order_code,PETSC_NULL);
488: PetscOptionsInt("-mat_dscpack_scheme","scheme_code: \n\
489: 1 = standard factorization, 2 = factorization + selective inversion", \
490: "None",
491: lu->scheme_code,&lu->scheme_code,PETSC_NULL);
492:
493: PetscOptionsEList("-mat_dscpack_factor","factor_type","None",ftype,2,ftype[0],&indx,&flg);
494: if (flg) {
495: switch (indx) {
496: case 0:
497: lu->factor_type = DSC_LDLT;
498: break;
499: case 1:
500: lu->factor_type = DSC_LLT;
501: break;
502: }
503: }
504: PetscOptionsInt("-mat_dscpack_MaxMemAllowed","in Mbytes","None",
505: lu->max_mem_allowed,&lu->max_mem_allowed,PETSC_NULL);
507: PetscOptionsInt("-mat_dscpack_stats","display stats: 0 = no display, 1 = display",
508: "None", lu->stat,&lu->stat,PETSC_NULL);
509:
510: PetscOptionsEList("-mat_dscpack_LBLAS","BLAS level used in the local phase","None",ltype,3,ltype[2],&indx,&flg);
511: if (flg) {
512: switch (indx) {
513: case 0:
514: lu->LBLASLevel = DSC_LBLAS1;
515: break;
516: case 1:
517: lu->LBLASLevel = DSC_LBLAS2;
518: break;
519: case 2:
520: lu->LBLASLevel = DSC_LBLAS3;
521: break;
522: }
523: }
525: PetscOptionsEList("-mat_dscpack_DBLAS","BLAS level used in the distributed phase","None",dtype,2,dtype[1],&indx,&flg);
526: if (flg) {
527: switch (indx) {
528: case 0:
529: lu->DBLASLevel = DSC_DBLAS1;
530: break;
531: case 1:
532: lu->DBLASLevel = DSC_DBLAS2;
533: break;
534: }
535: }
537: PetscOptionsEnd();
538:
539: lu->flg = DIFFERENT_NONZERO_PATTERN;
541: lu->My_DSC_Solver = DSC_Begin();
542: lu->CleanUpDSCPACK = PETSC_TRUE;
543: *F = B;
544: return(0);
545: }
549: PetscErrorCode MatAssemblyEnd_DSCPACK(Mat A,MatAssemblyType mode) {
551: Mat_DSC *lu=(Mat_DSC*)A->spptr;
554: (*lu->MatAssemblyEnd)(A,mode);
555: lu->MatCholeskyFactorSymbolic = A->ops->choleskyfactorsymbolic;
556: A->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_DSCPACK;
557: return(0);
558: }
562: PetscErrorCode MatFactorInfo_DSCPACK(Mat A,PetscViewer viewer)
563: {
564: Mat_DSC *lu=(Mat_DSC*)A->spptr;
566: char *s=0;
567:
569: PetscViewerASCIIPrintf(viewer,"DSCPACK run parameters:\n");
571: switch (lu->order_code) {
572: case 1: s = "ND"; break;
573: case 2: s = "Hybrid with Minimum Degree"; break;
574: case 3: s = "Hybrid with Minimum Deficiency"; break;
575: }
576: PetscViewerASCIIPrintf(viewer," order_code: %s \n",s);
578: switch (lu->scheme_code) {
579: case 1: s = "standard factorization"; break;
580: case 2: s = "factorization + selective inversion"; break;
581: }
582: PetscViewerASCIIPrintf(viewer," scheme_code: %s \n",s);
584: switch (lu->stat) {
585: case 0: s = "NO"; break;
586: case 1: s = "YES"; break;
587: }
588: PetscViewerASCIIPrintf(viewer," display stats: %s \n",s);
589:
590: if ( lu->factor_type == DSC_LLT) {
591: s = "LLT";
592: } else if ( lu->factor_type == DSC_LDLT){
593: s = "LDLT";
594: } else {
595: SETERRQ(PETSC_ERR_PLIB,"Unknown factor type");
596: }
597: PetscViewerASCIIPrintf(viewer," factor type: %s \n",s);
599: if ( lu->LBLASLevel == DSC_LBLAS1) {
600: s = "BLAS1";
601: } else if ( lu->LBLASLevel == DSC_LBLAS2){
602: s = "BLAS2";
603: } else if ( lu->LBLASLevel == DSC_LBLAS3){
604: s = "BLAS3";
605: } else {
606: SETERRQ(PETSC_ERR_PLIB,"Unknown local phase BLAS level");
607: }
608: PetscViewerASCIIPrintf(viewer," local phase BLAS level: %s \n",s);
609:
610: if ( lu->DBLASLevel == DSC_DBLAS1) {
611: s = "BLAS1";
612: } else if ( lu->DBLASLevel == DSC_DBLAS2){
613: s = "BLAS2";
614: } else {
615: SETERRQ(PETSC_ERR_PLIB,"Unknown distributed phase BLAS level");
616: }
617: PetscViewerASCIIPrintf(viewer," distributed phase BLAS level: %s \n",s);
618: return(0);
619: }
623: PetscErrorCode MatView_DSCPACK(Mat A,PetscViewer viewer) {
624: PetscErrorCode ierr;
625: PetscMPIInt size;
626: PetscTruth iascii;
627: PetscViewerFormat format;
628: Mat_DSC *lu=(Mat_DSC*)A->spptr;
631: /* This convertion ugliness is because MatView for BAIJ types calls MatConvert to AIJ */
632: size = lu->size;
633: if (size==1) {
634: MatConvert(A,MATSEQBAIJ,MAT_REUSE_MATRIX,&A);
635: } else {
636: MatConvert(A,MATMPIBAIJ,MAT_REUSE_MATRIX,&A);
637: }
639: MatView(A,viewer);
641: MatConvert(A,MATDSCPACK,MAT_REUSE_MATRIX,&A);
643: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
644: if (iascii) {
645: PetscViewerGetFormat(viewer,&format);
646: if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
647: MatFactorInfo_DSCPACK(A,viewer);
648: }
649: }
650: return(0);
651: }
656: PetscErrorCode PETSCMAT_DLLEXPORT MatMPIBAIJSetPreallocation_MPIDSCPACK(Mat B,PetscInt bs,PetscInt d_nz,PetscInt *d_nnz,PetscInt o_nz,PetscInt *o_nnz)
657: {
658: Mat A;
659: Mat_DSC *lu = (Mat_DSC*)B->spptr;
663: /*
664: After performing the MPIBAIJ Preallocation, we need to convert the local diagonal block matrix
665: into DSCPACK type so that the block jacobi preconditioner (for example) can use DSCPACK. I would
666: like this to be done in the MatCreate routine, but the creation of this inner matrix requires
667: block size info so that PETSc can determine the local size properly. The block size info is set
668: in the preallocation routine.
669: */
670: (*lu->MatPreallocate)(B,bs,d_nz,d_nnz,o_nz,o_nnz);
671: A = ((Mat_MPIBAIJ *)B->data)->A;
672: MatConvert_Base_DSCPACK(A,MATDSCPACK,MAT_REUSE_MATRIX,&A);
673: return(0);
674: }
680: PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_Base_DSCPACK(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
681: {
682: /* This routine is only called to convert to MATDSCPACK */
683: /* from MATSEQBAIJ if A has a single process communicator */
684: /* or MATMPIBAIJ otherwise, so we will ignore 'MatType type'. */
686: MPI_Comm comm;
687: Mat B=*newmat;
688: Mat_DSC *lu;
689: void (*f)(void);
692: if (reuse == MAT_INITIAL_MATRIX) {
693: MatDuplicate(A,MAT_COPY_VALUES,&B);
694: }
696: PetscObjectGetComm((PetscObject)A,&comm);
697: PetscNew(Mat_DSC,&lu);
699: lu->MatDuplicate = A->ops->duplicate;
700: lu->MatView = A->ops->view;
701: lu->MatAssemblyEnd = A->ops->assemblyend;
702: lu->MatCholeskyFactorSymbolic = A->ops->choleskyfactorsymbolic;
703: lu->MatDestroy = A->ops->destroy;
704: lu->CleanUpDSCPACK = PETSC_FALSE;
705: lu->bs = A->bs;
707: B->spptr = (void*)lu;
708: B->ops->duplicate = MatDuplicate_DSCPACK;
709: B->ops->view = MatView_DSCPACK;
710: B->ops->assemblyend = MatAssemblyEnd_DSCPACK;
711: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_DSCPACK;
712: B->ops->destroy = MatDestroy_DSCPACK;
714: MPI_Comm_size(comm,&(lu->size));
715: if (lu->size == 1) {
716: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_seqbaij_dscpack_C",
717: "MatConvert_Base_DSCPACK",MatConvert_Base_DSCPACK);
718: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_dscpack_seqbaij_C",
719: "MatConvert_DSCPACK_Base",MatConvert_DSCPACK_Base);
720: } else {
721: /* I really don't like needing to know the tag: MatMPIBAIJSetPreallocation_C */
722: PetscObjectQueryFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",&f);
723: if (f) {
724: lu->MatPreallocate = (PetscErrorCode (*)(Mat,PetscInt,PetscInt,PetscInt*,PetscInt,PetscInt*))f;
725: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIBAIJSetPreallocation_C",
726: "MatMPIBAIJSetPreallocation_MPIDSCPACK",
727: MatMPIBAIJSetPreallocation_MPIDSCPACK);
728: }
729: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpibaij_dscpack_C",
730: "MatConvert_Base_DSCPACK",MatConvert_Base_DSCPACK);
731: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_dscpack_mpibaij_C",
732: "MatConvert_DSCPACK_Base",MatConvert_DSCPACK_Base);
733: }
734: PetscObjectChangeTypeName((PetscObject)B,MATDSCPACK);
735: *newmat = B;
736: return(0);
737: }
742: PetscErrorCode MatDuplicate_DSCPACK(Mat A, MatDuplicateOption op, Mat *M) {
744: Mat_DSC *lu=(Mat_DSC *)A->spptr;
747: (*lu->MatDuplicate)(A,op,M);
748: PetscMemcpy((*M)->spptr,lu,sizeof(Mat_DSC));
749: return(0);
750: }
752: /*MC
753: MATDSCPACK - MATDSCPACK = "dscpack" - A matrix type providing direct solvers (Cholesky) for sequential
754: or distributed matrices via the external package DSCPACK.
756: If DSCPACK is installed (see the manual for
757: instructions on how to declare the existence of external packages),
758: a matrix type can be constructed which invokes DSCPACK solvers.
759: After calling MatCreate(...,A), simply call MatSetType(A,MATDSCPACK).
760: This matrix type is only supported for double precision real.
762: This matrix inherits from MATSEQBAIJ if constructed with a single process communicator,
763: and from MATMPIBAIJ otherwise. As a result, for sequential matrices, MatSeqBAIJSetPreallocation is
764: supported, and similarly MatMPIBAIJSetPreallocation is supported for distributed matrices. It is
765: recommended that you call both of the above preallocation routines for simplicity. Also,
766: MatConvert can be called to perform inplace conversion to and from MATSEQBAIJ or MATMPIBAIJ
767: for sequential or distributed matrices respectively.
769: Options Database Keys:
770: + -mat_type dscpack - sets the matrix type to dscpack during a call to MatSetFromOptions()
771: . -mat_dscpack_order <1,2,3> - DSCPACK ordering, 1:ND, 2:Hybrid with Minimum Degree, 3:Hybrid with Minimum Deficiency
772: . -mat_dscpack_scheme <1,2> - factorization scheme, 1:standard factorization, 2: factorization with selective inversion
773: . -mat_dscpack_factor <LLT,LDLT> - the type of factorization to be performed.
774: . -mat_dscpack_MaxMemAllowed <n> - the maximum memory to be used during factorization
775: . -mat_dscpack_stats <0,1> - display stats of the factorization and solves during MatDestroy(), 0: no display, 1: display
776: . -mat_dscpack_LBLAS <LBLAS1,LBLAS2,LBLAS3> - BLAS level used in the local phase
777: - -mat_dscpack_DBLAS <DBLAS1,DBLAS2> - BLAS level used in the distributed phase
779: Level: beginner
781: .seealso: PCCHOLESKY
782: M*/
787: PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_DSCPACK(Mat A)
788: {
790: PetscMPIInt size;
793: /* Change type name before calling MatSetType to force proper construction of SeqBAIJ or MPIBAIJ */
794: /* and DSCPACK types */
795: PetscObjectChangeTypeName((PetscObject)A,MATDSCPACK);
796: MPI_Comm_size(A->comm,&size);
797: if (size == 1) {
798: MatSetType(A,MATSEQBAIJ);
799: } else {
800: MatSetType(A,MATMPIBAIJ);
801: }
802: MatConvert_Base_DSCPACK(A,MATDSCPACK,MAT_REUSE_MATRIX,&A);
803: return(0);
804: }