Actual source code: mumps.c
1: #define PETSCMAT_DLL
3: /*
4: Provides an interface to the MUMPS sparse solver
5: */
6: #include ../src/mat/impls/aij/seq/aij.h
7: #include ../src/mat/impls/aij/mpi/mpiaij.h
8: #include ../src/mat/impls/sbaij/seq/sbaij.h
9: #include ../src/mat/impls/sbaij/mpi/mpisbaij.h
12: #if defined(PETSC_USE_COMPLEX)
13: #include "zmumps_c.h"
14: #else
15: #include "dmumps_c.h"
16: #endif
18: #define JOB_INIT -1
19: #define JOB_END -2
20: /* macros s.t. indices match MUMPS documentation */
21: #define ICNTL(I) icntl[(I)-1]
22: #define CNTL(I) cntl[(I)-1]
23: #define INFOG(I) infog[(I)-1]
24: #define INFO(I) info[(I)-1]
25: #define RINFOG(I) rinfog[(I)-1]
26: #define RINFO(I) rinfo[(I)-1]
28: typedef struct {
29: #if defined(PETSC_USE_COMPLEX)
30: ZMUMPS_STRUC_C id;
31: #else
32: DMUMPS_STRUC_C id;
33: #endif
34: MatStructure matstruc;
35: PetscMPIInt myid,size;
36: PetscInt *irn,*jcn,sym,nSolve;
37: PetscScalar *val;
38: MPI_Comm comm_mumps;
39: VecScatter scat_rhs, scat_sol;
40: PetscTruth isAIJ,CleanUpMUMPS;
41: Vec b_seq,x_seq;
42: PetscErrorCode (*MatDestroy)(Mat);
43: } Mat_MUMPS;
45: EXTERN PetscErrorCode MatDuplicate_MUMPS(Mat,MatDuplicateOption,Mat*);
47: /* convert Petsc mpiaij matrix to triples: row[nz], col[nz], val[nz] */
48: /*
49: input:
50: A - matrix in mpiaij or mpisbaij (bs=1) format
51: shift - 0: C style output triple; 1: Fortran style output triple.
52: valOnly - FALSE: spaces are allocated and values are set for the triple
53: TRUE: only the values in v array are updated
54: output:
55: nnz - dim of r, c, and v (number of local nonzero entries of A)
56: r, c, v - row and col index, matrix values (matrix triples)
57: */
58: PetscErrorCode MatConvertToTriples(Mat A,int shift,PetscTruth valOnly,int *nnz,int **r, int **c, PetscScalar **v)
59: {
60: PetscInt *ai, *aj, *bi, *bj, rstart,nz, *garray;
62: PetscInt i,j,jj,jB,irow,m=A->rmap->n,*ajj,*bjj,countA,countB,colA_start,jcol;
63: PetscInt *row,*col;
64: PetscScalar *av, *bv,*val;
65: PetscTruth isAIJ;
68: PetscTypeCompare((PetscObject)A,MATMPIAIJ,&isAIJ);
69: if (isAIJ){
70: Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
71: Mat_SeqAIJ *aa=(Mat_SeqAIJ*)(mat->A)->data;
72: Mat_SeqAIJ *bb=(Mat_SeqAIJ*)(mat->B)->data;
73: nz = aa->nz + bb->nz;
74: ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
75: garray = mat->garray;
76: av=aa->a; bv=bb->a;
77:
78: } else {
79: Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)A->data;
80: Mat_SeqSBAIJ *aa=(Mat_SeqSBAIJ*)(mat->A)->data;
81: Mat_SeqBAIJ *bb=(Mat_SeqBAIJ*)(mat->B)->data;
82: if (A->rmap->bs > 1) SETERRQ1(PETSC_ERR_SUP," bs=%d is not supported yet\n", A->rmap->bs);
83: nz = aa->nz + bb->nz;
84: ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; rstart= A->rmap->rstart;
85: garray = mat->garray;
86: av=aa->a; bv=bb->a;
87: }
89: if (!valOnly){
90: PetscMalloc(nz*sizeof(PetscInt) ,&row);
91: PetscMalloc(nz*sizeof(PetscInt),&col);
92: PetscMalloc(nz*sizeof(PetscScalar),&val);
93: *r = row; *c = col; *v = val;
94: } else {
95: row = *r; col = *c; val = *v;
96: }
97: *nnz = nz;
99: jj = 0; irow = rstart;
100: for ( i=0; i<m; i++ ) {
101: ajj = aj + ai[i]; /* ptr to the beginning of this row */
102: countA = ai[i+1] - ai[i];
103: countB = bi[i+1] - bi[i];
104: bjj = bj + bi[i];
106: /* get jB, the starting local col index for the 2nd B-part */
107: colA_start = rstart + ajj[0]; /* the smallest col index for A */
108: j=-1;
109: do {
110: j++;
111: if (j == countB) break;
112: jcol = garray[bjj[j]];
113: } while (jcol < colA_start);
114: jB = j;
115:
116: /* B-part, smaller col index */
117: colA_start = rstart + ajj[0]; /* the smallest col index for A */
118: for (j=0; j<jB; j++){
119: jcol = garray[bjj[j]];
120: if (!valOnly){
121: row[jj] = irow + shift; col[jj] = jcol + shift;
123: }
124: val[jj++] = *bv++;
125: }
126: /* A-part */
127: for (j=0; j<countA; j++){
128: if (!valOnly){
129: row[jj] = irow + shift; col[jj] = rstart + ajj[j] + shift;
130: }
131: val[jj++] = *av++;
132: }
133: /* B-part, larger col index */
134: for (j=jB; j<countB; j++){
135: if (!valOnly){
136: row[jj] = irow + shift; col[jj] = garray[bjj[j]] + shift;
137: }
138: val[jj++] = *bv++;
139: }
140: irow++;
141: }
142: return(0);
143: }
147: PetscErrorCode MatDestroy_MUMPS(Mat A)
148: {
149: Mat_MUMPS *lu=(Mat_MUMPS*)A->spptr;
151: PetscTruth isSeqAIJ,isSeqSBAIJ;
154: PetscTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
155: PetscTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
157: if (lu->CleanUpMUMPS) {
158: /* Terminate instance, deallocate memories */
159: if (lu->id.sol_loc){PetscFree2(lu->id.sol_loc,lu->id.isol_loc);}
160: if (lu->scat_rhs){VecScatterDestroy(lu->scat_rhs);}
161: if (lu->b_seq) {VecDestroy(lu->b_seq);}
162: if (lu->nSolve && lu->scat_sol){VecScatterDestroy(lu->scat_sol);}
163: if (lu->nSolve && lu->x_seq){VecDestroy(lu->x_seq);}
164: /* val is reused for SeqAIJ/SBAIJ - but malloced for MPIAIJ/SBAIJ */
165: if (!(isSeqAIJ || isSeqSBAIJ) && lu->val){PetscFree(lu->val);}
166: lu->id.job=JOB_END;
167: #if defined(PETSC_USE_COMPLEX)
168: zmumps_c(&lu->id);
169: #else
170: dmumps_c(&lu->id);
171: #endif
172: PetscFree(lu->irn);
173: PetscFree(lu->jcn);
174: MPI_Comm_free(&(lu->comm_mumps));
175: }
176: /* clear composed functions */
177: PetscObjectComposeFunctionDynamic((PetscObject)A,"MatFactorGetSolverPackage_C","",PETSC_NULL);
178: PetscObjectComposeFunctionDynamic((PetscObject)A,"MatMumpsSetIcntl_C","",PETSC_NULL);
179: (lu->MatDestroy)(A);
180: return(0);
181: }
185: PetscErrorCode MatSolve_MUMPS(Mat A,Vec b,Vec x)
186: {
187: Mat_MUMPS *lu=(Mat_MUMPS*)A->spptr;
188: PetscScalar *array;
189: Vec x_seq;
190: IS is_iden,is_petsc;
192: PetscInt i;
195: lu->id.nrhs = 1;
196: x_seq = lu->b_seq;
197: if (lu->size > 1){
198: /* MUMPS only supports centralized rhs. Scatter b into a seqential rhs vector */
199: VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
200: VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
201: if (!lu->myid) {VecGetArray(x_seq,&array);}
202: } else { /* size == 1 */
203: VecCopy(b,x);
204: VecGetArray(x,&array);
205: }
206: if (!lu->myid) { /* define rhs on the host */
207: lu->id.nrhs = 1;
208: #if defined(PETSC_USE_COMPLEX)
209: lu->id.rhs = (mumps_double_complex*)array;
210: #else
211: lu->id.rhs = array;
212: #endif
213: }
214: if (lu->size == 1){
215: VecRestoreArray(x,&array);
216: } else if (!lu->myid){
217: VecRestoreArray(x_seq,&array);
218: }
220: if (lu->size > 1){
221: /* distributed solution */
222: lu->id.ICNTL(21) = 1;
223: if (!lu->nSolve){
224: /* Create x_seq=sol_loc for repeated use */
225: PetscInt lsol_loc;
226: PetscScalar *sol_loc;
227: lsol_loc = lu->id.INFO(23); /* length of sol_loc */
228: PetscMalloc2(lsol_loc,PetscScalar,&sol_loc,lsol_loc,PetscInt,&lu->id.isol_loc);
229: lu->id.lsol_loc = lsol_loc;
230: #if defined(PETSC_USE_COMPLEX)
231: lu->id.sol_loc = (mumps_double_complex*)sol_loc;
232: #else
233: lu->id.sol_loc = sol_loc;
234: #endif
235: VecCreateSeqWithArray(PETSC_COMM_SELF,lsol_loc,sol_loc,&lu->x_seq);
236: }
237: }
239: /* solve phase */
240: /*-------------*/
241: lu->id.job = 3;
242: #if defined(PETSC_USE_COMPLEX)
243: zmumps_c(&lu->id);
244: #else
245: dmumps_c(&lu->id);
246: #endif
247: if (lu->id.INFOG(1) < 0) {
248: SETERRQ1(PETSC_ERR_LIB,"Error reported by MUMPS in solve phase: INFOG(1)=%d\n",lu->id.INFOG(1));
249: }
251: if (lu->size > 1) { /* convert mumps distributed solution to petsc mpi x */
252: if (!lu->nSolve){ /* create scatter scat_sol */
253: ISCreateStride(PETSC_COMM_SELF,lu->id.lsol_loc,0,1,&is_iden); /* from */
254: for (i=0; i<lu->id.lsol_loc; i++){
255: lu->id.isol_loc[i] -= 1; /* change Fortran style to C style */
256: }
257: ISCreateGeneral(PETSC_COMM_SELF,lu->id.lsol_loc,lu->id.isol_loc,&is_petsc); /* to */
258: VecScatterCreate(lu->x_seq,is_iden,x,is_petsc,&lu->scat_sol);
259: ISDestroy(is_iden);
260: ISDestroy(is_petsc);
261: }
262: VecScatterBegin(lu->scat_sol,lu->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
263: VecScatterEnd(lu->scat_sol,lu->x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
264: }
265: lu->nSolve++;
266: return(0);
267: }
269: #if !defined(PETSC_USE_COMPLEX)
270: /*
271: input:
272: F: numeric factor
273: output:
274: nneg: total number of negative pivots
275: nzero: 0
276: npos: (global dimension of F) - nneg
277: */
281: PetscErrorCode MatGetInertia_SBAIJMUMPS(Mat F,int *nneg,int *nzero,int *npos)
282: {
283: Mat_MUMPS *lu =(Mat_MUMPS*)F->spptr;
285: PetscMPIInt size;
288: MPI_Comm_size(((PetscObject)F)->comm,&size);
289: /* MUMPS 4.3.1 calls ScaLAPACK when ICNTL(13)=0 (default), which does not offer the possibility to compute the inertia of a dense matrix. Set ICNTL(13)=1 to skip ScaLAPACK */
290: if (size > 1 && lu->id.ICNTL(13) != 1){
291: SETERRQ1(PETSC_ERR_ARG_WRONG,"ICNTL(13)=%d. -mat_mumps_icntl_13 must be set as 1 for correct global matrix inertia\n",lu->id.INFOG(13));
292: }
293: if (nneg){
294: if (!lu->myid){
295: *nneg = lu->id.INFOG(12);
296: }
297: MPI_Bcast(nneg,1,MPI_INT,0,lu->comm_mumps);
298: }
299: if (nzero) *nzero = 0;
300: if (npos) *npos = F->rmap->N - (*nneg);
301: return(0);
302: }
303: #endif /* !defined(PETSC_USE_COMPLEX) */
307: PetscErrorCode MatFactorNumeric_MUMPS(Mat F,Mat A,const MatFactorInfo *info)
308: {
309: Mat_MUMPS *lu =(Mat_MUMPS*)(F)->spptr;
311: PetscInt rnz,nnz,nz=0,i,M=A->rmap->N,*ai,*aj,icntl;
312: PetscTruth valOnly,flg;
313: Mat F_diag;
314: IS is_iden;
315: Vec b;
316: PetscTruth isSeqAIJ,isSeqSBAIJ;
319: PetscTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
320: PetscTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
321: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
322: (F)->ops->solve = MatSolve_MUMPS;
324: /* Initialize a MUMPS instance */
325: MPI_Comm_rank(((PetscObject)A)->comm, &lu->myid);
326: MPI_Comm_size(((PetscObject)A)->comm,&lu->size);
327: lu->id.job = JOB_INIT;
328: MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_mumps));
329: lu->id.comm_fortran = MPI_Comm_c2f(lu->comm_mumps);
331: /* Set mumps options */
332: PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"MUMPS Options","Mat");
333: lu->id.par=1; /* host participates factorizaton and solve */
334: lu->id.sym=lu->sym;
335: if (lu->sym == 2){
336: PetscOptionsInt("-mat_mumps_sym","SYM: (1,2)","None",lu->id.sym,&icntl,&flg);
337: if (flg && icntl == 1) lu->id.sym=icntl; /* matrix is spd */
338: }
339: #if defined(PETSC_USE_COMPLEX)
340: zmumps_c(&lu->id);
341: #else
342: dmumps_c(&lu->id);
343: #endif
344:
345: if (isSeqAIJ || isSeqSBAIJ){
346: lu->id.ICNTL(18) = 0; /* centralized assembled matrix input */
347: } else {
348: lu->id.ICNTL(18) = 3; /* distributed assembled matrix input */
349: }
351: icntl=-1;
352: lu->id.ICNTL(4) = 0; /* level of printing; overwrite mumps default ICNTL(4)=2 */
353: PetscOptionsInt("-mat_mumps_icntl_4","ICNTL(4): level of printing (0 to 4)","None",lu->id.ICNTL(4),&icntl,&flg);
354: if ((flg && icntl > 0) || PetscLogPrintInfo) {
355: lu->id.ICNTL(4)=icntl; /* and use mumps default icntl(i), i=1,2,3 */
356: } else { /* no output */
357: lu->id.ICNTL(1) = 0; /* error message, default= 6 */
358: lu->id.ICNTL(2) = 0; /* output stream for diagnostic printing, statistics, and warning. default=0 */
359: lu->id.ICNTL(3) = 0; /* output stream for global information, default=6 */
360: }
361: PetscOptionsInt("-mat_mumps_icntl_6","ICNTL(6): column permutation and/or scaling to get a zero-free diagonal (0 to 7)","None",lu->id.ICNTL(6),&lu->id.ICNTL(6),PETSC_NULL);
362: icntl=-1;
363: PetscOptionsInt("-mat_mumps_icntl_7","ICNTL(7): matrix ordering (0 to 7)","None",lu->id.ICNTL(7),&icntl,&flg);
364: if (flg) {
365: if (icntl== 1){
366: SETERRQ(PETSC_ERR_SUP,"pivot order be set by the user in PERM_IN -- not supported by the PETSc/MUMPS interface\n");
367: } else {
368: lu->id.ICNTL(7) = icntl;
369: }
370: }
371: PetscOptionsInt("-mat_mumps_icntl_8","ICNTL(8): scaling strategy (-2 to 7 or 77)","None",lu->id.ICNTL(8),&lu->id.ICNTL(8),PETSC_NULL);
372: PetscOptionsInt("-mat_mumps_icntl_9","ICNTL(9): A or A^T x=b to be solved. 1: A; otherwise: A^T","None",lu->id.ICNTL(9),&lu->id.ICNTL(9),PETSC_NULL);
373: PetscOptionsInt("-mat_mumps_icntl_10","ICNTL(10): max num of refinements","None",lu->id.ICNTL(10),&lu->id.ICNTL(10),PETSC_NULL);
374: PetscOptionsInt("-mat_mumps_icntl_11","ICNTL(11): statistics related to the linear system solved (via -ksp_view)","None",lu->id.ICNTL(11),&lu->id.ICNTL(11),PETSC_NULL);
375: PetscOptionsInt("-mat_mumps_icntl_12","ICNTL(12): efficiency control: defines the ordering strategy with scaling constraints (0 to 3","None",lu->id.ICNTL(12),&lu->id.ICNTL(12),PETSC_NULL);
376: PetscOptionsInt("-mat_mumps_icntl_13","ICNTL(13): efficiency control: with or without ScaLAPACK","None",lu->id.ICNTL(13),&lu->id.ICNTL(13),PETSC_NULL);
377: PetscOptionsInt("-mat_mumps_icntl_14","ICNTL(14): percentage of estimated workspace increase","None",lu->id.ICNTL(14),&lu->id.ICNTL(14),PETSC_NULL);
378: PetscOptionsInt("-mat_mumps_icntl_19","ICNTL(19): Schur complement","None",lu->id.ICNTL(19),&lu->id.ICNTL(19),PETSC_NULL);
380: PetscOptionsInt("-mat_mumps_icntl_22","ICNTL(22): in-core/out-of-core facility (0 or 1)","None",lu->id.ICNTL(22),&lu->id.ICNTL(22),PETSC_NULL);
381: PetscOptionsInt("-mat_mumps_icntl_23","ICNTL(23): max size of the working memory (MB) that can allocate per processor","None",lu->id.ICNTL(23),&lu->id.ICNTL(23),PETSC_NULL);
382: PetscOptionsInt("-mat_mumps_icntl_24","ICNTL(24): detection of null pivot rows (0 or 1)","None",lu->id.ICNTL(24),&lu->id.ICNTL(24),PETSC_NULL);
383: PetscOptionsInt("-mat_mumps_icntl_25","ICNTL(25): computation of a null space basis","None",lu->id.ICNTL(25),&lu->id.ICNTL(25),PETSC_NULL);
384: PetscOptionsInt("-mat_mumps_icntl_26","ICNTL(26): Schur options for right-hand side or solution vector","None",lu->id.ICNTL(26),&lu->id.ICNTL(26),PETSC_NULL);
385: PetscOptionsInt("-mat_mumps_icntl_27","ICNTL(27): experimental parameter","None",lu->id.ICNTL(27),&lu->id.ICNTL(27),PETSC_NULL);
387: PetscOptionsReal("-mat_mumps_cntl_1","CNTL(1): relative pivoting threshold","None",lu->id.CNTL(1),&lu->id.CNTL(1),PETSC_NULL);
388: PetscOptionsReal("-mat_mumps_cntl_2","CNTL(2): stopping criterion of refinement","None",lu->id.CNTL(2),&lu->id.CNTL(2),PETSC_NULL);
389: PetscOptionsReal("-mat_mumps_cntl_3","CNTL(3): absolute pivoting threshold","None",lu->id.CNTL(3),&lu->id.CNTL(3),PETSC_NULL);
390: PetscOptionsReal("-mat_mumps_cntl_4","CNTL(4): value for static pivoting","None",lu->id.CNTL(4),&lu->id.CNTL(4),PETSC_NULL);
391: PetscOptionsReal("-mat_mumps_cntl_5","CNTL(5): fixation for null pivots","None",lu->id.CNTL(5),&lu->id.CNTL(5),PETSC_NULL);
392: PetscOptionsEnd();
393: }
395: /* define matrix A */
396: switch (lu->id.ICNTL(18)){
397: case 0: /* centralized assembled matrix input (size=1) */
398: if (!lu->myid) {
399: if (isSeqAIJ){
400: Mat_SeqAIJ *aa = (Mat_SeqAIJ*)A->data;
401: nz = aa->nz;
402: ai = aa->i; aj = aa->j; lu->val = aa->a;
403: } else if (isSeqSBAIJ) {
404: Mat_SeqSBAIJ *aa = (Mat_SeqSBAIJ*)A->data;
405: nz = aa->nz;
406: ai = aa->i; aj = aa->j; lu->val = aa->a;
407: } else {
408: SETERRQ(PETSC_ERR_SUP,"No mumps factorization for this matrix type");
409: }
410: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){ /* first numeric factorization, get irn and jcn */
411: PetscMalloc(nz*sizeof(PetscInt),&lu->irn);
412: PetscMalloc(nz*sizeof(PetscInt),&lu->jcn);
413: nz = 0;
414: for (i=0; i<M; i++){
415: rnz = ai[i+1] - ai[i];
416: while (rnz--) { /* Fortran row/col index! */
417: lu->irn[nz] = i+1; lu->jcn[nz] = (*aj)+1; aj++; nz++;
418: }
419: }
420: }
421: }
422: break;
423: case 3: /* distributed assembled matrix input (size>1) */
424: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
425: valOnly = PETSC_FALSE;
426: } else {
427: valOnly = PETSC_TRUE; /* only update mat values, not row and col index */
428: }
429: MatConvertToTriples(A,1,valOnly, &nnz, &lu->irn, &lu->jcn, &lu->val);
430: break;
431: default: SETERRQ(PETSC_ERR_SUP,"Matrix input format is not supported by MUMPS.");
432: }
434: /* analysis phase */
435: /*----------------*/
436: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
437: lu->id.job = 1;
439: lu->id.n = M;
440: switch (lu->id.ICNTL(18)){
441: case 0: /* centralized assembled matrix input */
442: if (!lu->myid) {
443: lu->id.nz =nz; lu->id.irn=lu->irn; lu->id.jcn=lu->jcn;
444: if (lu->id.ICNTL(6)>1){
445: #if defined(PETSC_USE_COMPLEX)
446: lu->id.a = (mumps_double_complex*)lu->val;
447: #else
448: lu->id.a = lu->val;
449: #endif
450: }
451: }
452: break;
453: case 3: /* distributed assembled matrix input (size>1) */
454: lu->id.nz_loc = nnz;
455: lu->id.irn_loc=lu->irn; lu->id.jcn_loc=lu->jcn;
456: if (lu->id.ICNTL(6)>1) {
457: #if defined(PETSC_USE_COMPLEX)
458: lu->id.a_loc = (mumps_double_complex*)lu->val;
459: #else
460: lu->id.a_loc = lu->val;
461: #endif
462: }
463: /* MUMPS only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
464: if (!lu->myid){
465: VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);
466: ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
467: } else {
468: VecCreateSeq(PETSC_COMM_SELF,0,&lu->b_seq);
469: ISCreateStride(PETSC_COMM_SELF,0,0,1,&is_iden);
470: }
471: VecCreate(((PetscObject)A)->comm,&b);
472: VecSetSizes(b,A->rmap->n,PETSC_DECIDE);
473: VecSetFromOptions(b);
475: VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);
476: ISDestroy(is_iden);
477: VecDestroy(b);
478: break;
479: }
480: #if defined(PETSC_USE_COMPLEX)
481: zmumps_c(&lu->id);
482: #else
483: dmumps_c(&lu->id);
484: #endif
485: if (lu->id.INFOG(1) < 0) {
486: SETERRQ1(PETSC_ERR_LIB,"Error reported by MUMPS in analysis phase: INFOG(1)=%d\n",lu->id.INFOG(1));
487: }
488: }
490: /* numerical factorization phase */
491: /*-------------------------------*/
492: lu->id.job = 2;
493: if(!lu->id.ICNTL(18)) {
494: if (!lu->myid) {
495: #if defined(PETSC_USE_COMPLEX)
496: lu->id.a = (mumps_double_complex*)lu->val;
497: #else
498: lu->id.a = lu->val;
499: #endif
500: }
501: } else {
502: #if defined(PETSC_USE_COMPLEX)
503: lu->id.a_loc = (mumps_double_complex*)lu->val;
504: #else
505: lu->id.a_loc = lu->val;
506: #endif
507: }
508: #if defined(PETSC_USE_COMPLEX)
509: zmumps_c(&lu->id);
510: #else
511: dmumps_c(&lu->id);
512: #endif
513: if (lu->id.INFOG(1) < 0) {
514: if (lu->id.INFO(1) == -13) {
515: SETERRQ1(PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: Cannot allocate required memory %d megabytes\n",lu->id.INFO(2));
516: } else {
517: SETERRQ2(PETSC_ERR_LIB,"Error reported by MUMPS in numerical factorization phase: INFO(1)=%d, INFO(2)=%d\n",lu->id.INFO(1),lu->id.INFO(2));
518: }
519: }
521: if (!lu->myid && lu->id.ICNTL(16) > 0){
522: SETERRQ1(PETSC_ERR_LIB," lu->id.ICNTL(16):=%d\n",lu->id.INFOG(16));
523: }
525: if (lu->size > 1){
526: if ((F)->factor == MAT_FACTOR_LU){
527: F_diag = ((Mat_MPIAIJ *)(F)->data)->A;
528: } else {
529: F_diag = ((Mat_MPISBAIJ *)(F)->data)->A;
530: }
531: F_diag->assembled = PETSC_TRUE;
532: if (lu->nSolve){
533: VecScatterDestroy(lu->scat_sol);
534: PetscFree2(lu->id.sol_loc,lu->id.isol_loc);
535: VecDestroy(lu->x_seq);
536: }
537: }
538: (F)->assembled = PETSC_TRUE;
539: lu->matstruc = SAME_NONZERO_PATTERN;
540: lu->CleanUpMUMPS = PETSC_TRUE;
541: lu->nSolve = 0;
542: return(0);
543: }
545: /* Note the Petsc r and c permutations are ignored */
548: PetscErrorCode MatLUFactorSymbolic_AIJMUMPS(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
549: {
550: Mat_MUMPS *lu = (Mat_MUMPS*)F->spptr;
553: lu->sym = 0;
554: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
555: F->ops->lufactornumeric = MatFactorNumeric_MUMPS;
556: return(0);
557: }
560: /* Note the Petsc r permutation is ignored */
563: PetscErrorCode MatCholeskyFactorSymbolic_SBAIJMUMPS(Mat F,Mat A,IS r,const MatFactorInfo *info)
564: {
565: Mat_MUMPS *lu = (Mat_MUMPS*)(F)->spptr;
568: lu->sym = 2;
569: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
570: (F)->ops->choleskyfactornumeric = MatFactorNumeric_MUMPS;
571: #if !defined(PETSC_USE_COMPLEX)
572: (F)->ops->getinertia = MatGetInertia_SBAIJMUMPS;
573: #endif
574: return(0);
575: }
579: PetscErrorCode MatFactorInfo_MUMPS(Mat A,PetscViewer viewer)
580: {
581: Mat_MUMPS *lu=(Mat_MUMPS*)A->spptr;
585: /* check if matrix is mumps type */
586: if (A->ops->solve != MatSolve_MUMPS) return(0);
588: PetscViewerASCIIPrintf(viewer,"MUMPS run parameters:\n");
589: PetscViewerASCIIPrintf(viewer," SYM (matrix type): %d \n",lu->id.sym);
590: PetscViewerASCIIPrintf(viewer," PAR (host participation): %d \n",lu->id.par);
591: PetscViewerASCIIPrintf(viewer," ICNTL(1) (output for error): %d \n",lu->id.ICNTL(1));
592: PetscViewerASCIIPrintf(viewer," ICNTL(2) (output of diagnostic msg):%d \n",lu->id.ICNTL(2));
593: PetscViewerASCIIPrintf(viewer," ICNTL(3) (output for global info): %d \n",lu->id.ICNTL(3));
594: PetscViewerASCIIPrintf(viewer," ICNTL(4) (level of printing): %d \n",lu->id.ICNTL(4));
595: PetscViewerASCIIPrintf(viewer," ICNTL(5) (input mat struct): %d \n",lu->id.ICNTL(5));
596: PetscViewerASCIIPrintf(viewer," ICNTL(6) (matrix prescaling): %d \n",lu->id.ICNTL(6));
597: PetscViewerASCIIPrintf(viewer," ICNTL(7) (matrix ordering): %d \n",lu->id.ICNTL(7));
598: PetscViewerASCIIPrintf(viewer," ICNTL(8) (scalling strategy): %d \n",lu->id.ICNTL(8));
599: PetscViewerASCIIPrintf(viewer," ICNTL(9) (A/A^T x=b is solved): %d \n",lu->id.ICNTL(9));
600: PetscViewerASCIIPrintf(viewer," ICNTL(10) (max num of refinements): %d \n",lu->id.ICNTL(10));
601: PetscViewerASCIIPrintf(viewer," ICNTL(11) (error analysis): %d \n",lu->id.ICNTL(11));
602: if (!lu->myid && lu->id.ICNTL(11)>0) {
603: PetscPrintf(PETSC_COMM_SELF," RINFOG(4) (inf norm of input mat): %g\n",lu->id.RINFOG(4));
604: PetscPrintf(PETSC_COMM_SELF," RINFOG(5) (inf norm of solution): %g\n",lu->id.RINFOG(5));
605: PetscPrintf(PETSC_COMM_SELF," RINFOG(6) (inf norm of residual): %g\n",lu->id.RINFOG(6));
606: PetscPrintf(PETSC_COMM_SELF," RINFOG(7),RINFOG(8) (backward error est): %g, %g\n",lu->id.RINFOG(7),lu->id.RINFOG(8));
607: PetscPrintf(PETSC_COMM_SELF," RINFOG(9) (error estimate): %g \n",lu->id.RINFOG(9));
608: PetscPrintf(PETSC_COMM_SELF," RINFOG(10),RINFOG(11)(condition numbers): %g, %g\n",lu->id.RINFOG(10),lu->id.RINFOG(11));
609:
610: }
611: PetscViewerASCIIPrintf(viewer," ICNTL(12) (efficiency control): %d \n",lu->id.ICNTL(12));
612: PetscViewerASCIIPrintf(viewer," ICNTL(13) (efficiency control): %d \n",lu->id.ICNTL(13));
613: PetscViewerASCIIPrintf(viewer," ICNTL(14) (percentage of estimated workspace increase): %d \n",lu->id.ICNTL(14));
614: /* ICNTL(15-17) not used */
615: PetscViewerASCIIPrintf(viewer," ICNTL(18) (input mat struct): %d \n",lu->id.ICNTL(18));
616: PetscViewerASCIIPrintf(viewer," ICNTL(19) (Shur complement info): %d \n",lu->id.ICNTL(19));
617: PetscViewerASCIIPrintf(viewer," ICNTL(20) (rhs sparse pattern): %d \n",lu->id.ICNTL(20));
618: PetscViewerASCIIPrintf(viewer," ICNTL(21) (solution struct): %d \n",lu->id.ICNTL(21));
619: PetscViewerASCIIPrintf(viewer," ICNTL(22) (in-core/out-of-core facility): %d \n",lu->id.ICNTL(22));
620: PetscViewerASCIIPrintf(viewer," ICNTL(23) (max size of memory can be allocated locally):%d \n",lu->id.ICNTL(23));
622: PetscViewerASCIIPrintf(viewer," ICNTL(24) (detection of null pivot rows): %d \n",lu->id.ICNTL(24));
623: PetscViewerASCIIPrintf(viewer," ICNTL(25) (computation of a null space basis): %d \n",lu->id.ICNTL(25));
624: PetscViewerASCIIPrintf(viewer," ICNTL(26) (Schur options for rhs or solution): %d \n",lu->id.ICNTL(26));
625: PetscViewerASCIIPrintf(viewer," ICNTL(27) (experimental parameter): %d \n",lu->id.ICNTL(27));
627: PetscViewerASCIIPrintf(viewer," CNTL(1) (relative pivoting threshold): %g \n",lu->id.CNTL(1));
628: PetscViewerASCIIPrintf(viewer," CNTL(2) (stopping criterion of refinement): %g \n",lu->id.CNTL(2));
629: PetscViewerASCIIPrintf(viewer," CNTL(3) (absolute pivoting threshold): %g \n",lu->id.CNTL(3));
630: PetscViewerASCIIPrintf(viewer," CNTL(4) (value of static pivoting): %g \n",lu->id.CNTL(4));
631: PetscViewerASCIIPrintf(viewer," CNTL(5) (fixation for null pivots): %g \n",lu->id.CNTL(5));
633: /* infomation local to each processor */
634: if (!lu->myid) {PetscPrintf(PETSC_COMM_SELF, " RINFO(1) (local estimated flops for the elimination after analysis): \n");}
635: PetscSynchronizedPrintf(((PetscObject)A)->comm," [%d] %g \n",lu->myid,lu->id.RINFO(1));
636: PetscSynchronizedFlush(((PetscObject)A)->comm);
637: if (!lu->myid) {PetscPrintf(PETSC_COMM_SELF, " RINFO(2) (local estimated flops for the assembly after factorization): \n");}
638: PetscSynchronizedPrintf(((PetscObject)A)->comm," [%d] %g \n",lu->myid,lu->id.RINFO(2));
639: PetscSynchronizedFlush(((PetscObject)A)->comm);
640: if (!lu->myid) {PetscPrintf(PETSC_COMM_SELF, " RINFO(3) (local estimated flops for the elimination after factorization): \n");}
641: PetscSynchronizedPrintf(((PetscObject)A)->comm," [%d] %g \n",lu->myid,lu->id.RINFO(3));
642: PetscSynchronizedFlush(((PetscObject)A)->comm);
644: if (!lu->myid) {PetscPrintf(PETSC_COMM_SELF, " INFO(15) (estimated size of (in MB) MUMPS internal data for running numerical factorization): \n");}
645: PetscSynchronizedPrintf(((PetscObject)A)->comm," [%d] %d \n",lu->myid,lu->id.INFO(15));
646: PetscSynchronizedFlush(((PetscObject)A)->comm);
648: if (!lu->myid) {PetscPrintf(PETSC_COMM_SELF, " INFO(16) (size of (in MB) MUMPS internal data used during numerical factorization): \n");}
649: PetscSynchronizedPrintf(((PetscObject)A)->comm," [%d] %d \n",lu->myid,lu->id.INFO(16));
650: PetscSynchronizedFlush(((PetscObject)A)->comm);
652: if (!lu->myid) {PetscPrintf(PETSC_COMM_SELF, " INFO(23) (num of pivots eliminated on this processor after factorization): \n");}
653: PetscSynchronizedPrintf(((PetscObject)A)->comm," [%d] %d \n",lu->myid,lu->id.INFO(23));
654: PetscSynchronizedFlush(((PetscObject)A)->comm);
656: if (!lu->myid){ /* information from the host */
657: PetscViewerASCIIPrintf(viewer," RINFOG(1) (global estimated flops for the elimination after analysis): %g \n",lu->id.RINFOG(1));
658: PetscViewerASCIIPrintf(viewer," RINFOG(2) (global estimated flops for the assembly after factorization): %g \n",lu->id.RINFOG(2));
659: PetscViewerASCIIPrintf(viewer," RINFOG(3) (global estimated flops for the elimination after factorization): %g \n",lu->id.RINFOG(3));
661: PetscViewerASCIIPrintf(viewer," INFOG(3) (estimated real workspace for factors on all processors after analysis): %d \n",lu->id.INFOG(3));
662: PetscViewerASCIIPrintf(viewer," INFOG(4) (estimated integer workspace for factors on all processors after analysis): %d \n",lu->id.INFOG(4));
663: PetscViewerASCIIPrintf(viewer," INFOG(5) (estimated maximum front size in the complete tree): %d \n",lu->id.INFOG(5));
664: PetscViewerASCIIPrintf(viewer," INFOG(6) (number of nodes in the complete tree): %d \n",lu->id.INFOG(6));
665: PetscViewerASCIIPrintf(viewer," INFOG(7) (ordering option effectively uese after analysis): %d \n",lu->id.INFOG(7));
666: PetscViewerASCIIPrintf(viewer," INFOG(8) (structural symmetry in percent of the permuted matrix after analysis): %d \n",lu->id.INFOG(8));
667: PetscViewerASCIIPrintf(viewer," INFOG(9) (total real/complex workspace to store the matrix factors after factorization): %d \n",lu->id.INFOG(9));
668: PetscViewerASCIIPrintf(viewer," INFOG(10) (total integer space store the matrix factors after factorization): %d \n",lu->id.INFOG(10));
669: PetscViewerASCIIPrintf(viewer," INFOG(11) (order of largest frontal matrix after factorization): %d \n",lu->id.INFOG(11));
670: PetscViewerASCIIPrintf(viewer," INFOG(12) (number of off-diagonal pivots): %d \n",lu->id.INFOG(12));
671: PetscViewerASCIIPrintf(viewer," INFOG(13) (number of delayed pivots after factorization): %d \n",lu->id.INFOG(13));
672: PetscViewerASCIIPrintf(viewer," INFOG(14) (number of memory compress after factorization): %d \n",lu->id.INFOG(14));
673: PetscViewerASCIIPrintf(viewer," INFOG(15) (number of steps of iterative refinement after solution): %d \n",lu->id.INFOG(15));
674: PetscViewerASCIIPrintf(viewer," INFOG(16) (estimated size (in MB) of all MUMPS internal data for factorization after analysis: value on the most memory consuming processor): %d \n",lu->id.INFOG(16));
675: PetscViewerASCIIPrintf(viewer," INFOG(17) (estimated size of all MUMPS internal data for factorization after analysis: sum over all processors): %d \n",lu->id.INFOG(17));
676: PetscViewerASCIIPrintf(viewer," INFOG(18) (size of all MUMPS internal data allocated during factorization: value on the most memory consuming processor): %d \n",lu->id.INFOG(18));
677: PetscViewerASCIIPrintf(viewer," INFOG(19) (size of all MUMPS internal data allocated during factorization: sum over all processors): %d \n",lu->id.INFOG(19));
678: PetscViewerASCIIPrintf(viewer," INFOG(20) (estimated number of entries in the factors): %d \n",lu->id.INFOG(20));
679: PetscViewerASCIIPrintf(viewer," INFOG(21) (size in MB of memory effectively used during factorization - value on the most memory consuming processor): %d \n",lu->id.INFOG(21));
680: PetscViewerASCIIPrintf(viewer," INFOG(22) (size in MB of memory effectively used during factorization - sum over all processors): %d \n",lu->id.INFOG(22));
681: PetscViewerASCIIPrintf(viewer," INFOG(23) (after analysis: value of ICNTL(6) effectively used): %d \n",lu->id.INFOG(23));
682: PetscViewerASCIIPrintf(viewer," INFOG(24) (after analysis: value of ICNTL(12) effectively used): %d \n",lu->id.INFOG(24));
683: PetscViewerASCIIPrintf(viewer," INFOG(25) (after factorization: number of pivots modified by static pivoting): %d \n",lu->id.INFOG(25));
684: }
685: return(0);
686: }
690: PetscErrorCode MatView_MUMPS(Mat A,PetscViewer viewer)
691: {
692: PetscErrorCode ierr;
693: PetscTruth iascii;
694: PetscViewerFormat format;
697: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
698: if (iascii) {
699: PetscViewerGetFormat(viewer,&format);
700: if (format == PETSC_VIEWER_ASCII_INFO){
701: MatFactorInfo_MUMPS(A,viewer);
702: }
703: }
704: return(0);
705: }
709: PetscErrorCode MatGetInfo_MUMPS(Mat A,MatInfoType flag,MatInfo *info)
710: {
711: Mat_MUMPS *lu =(Mat_MUMPS*)A->spptr;
714: info->block_size = 1.0;
715: info->nz_allocated = lu->id.INFOG(20);
716: info->nz_used = lu->id.INFOG(20);
717: info->nz_unneeded = 0.0;
718: info->assemblies = 0.0;
719: info->mallocs = 0.0;
720: info->memory = 0.0;
721: info->fill_ratio_given = 0;
722: info->fill_ratio_needed = 0;
723: info->factor_mallocs = 0;
724: return(0);
725: }
727: /*MC
728: MAT_SOLVER_MUMPS - A matrix type providing direct solvers (LU and Cholesky) for
729: distributed and sequential matrices via the external package MUMPS.
731: Works with MATAIJ and MATSBAIJ matrices
733: Options Database Keys:
734: + -mat_mumps_sym <0,1,2> - 0 the matrix is unsymmetric, 1 symmetric positive definite, 2 symmetric
735: . -mat_mumps_icntl_4 <0,...,4> - print level
736: . -mat_mumps_icntl_6 <0,...,7> - matrix prescaling options (see MUMPS User's Guide)
737: . -mat_mumps_icntl_7 <0,...,7> - matrix orderings (see MUMPS User's Guide)
738: . -mat_mumps_icntl_9 <1,2> - A or A^T x=b to be solved: 1 denotes A, 2 denotes A^T
739: . -mat_mumps_icntl_10 <n> - maximum number of iterative refinements
740: . -mat_mumps_icntl_11 <n> - error analysis, a positive value returns statistics during -ksp_view
741: . -mat_mumps_icntl_12 <n> - efficiency control (see MUMPS User's Guide)
742: . -mat_mumps_icntl_13 <n> - efficiency control (see MUMPS User's Guide)
743: . -mat_mumps_icntl_14 <n> - efficiency control (see MUMPS User's Guide)
744: . -mat_mumps_icntl_15 <n> - efficiency control (see MUMPS User's Guide)
745: . -mat_mumps_cntl_1 <delta> - relative pivoting threshold
746: . -mat_mumps_cntl_2 <tol> - stopping criterion for refinement
747: - -mat_mumps_cntl_3 <adelta> - absolute pivoting threshold
749: Level: beginner
751: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
753: M*/
758: PetscErrorCode MatFactorGetSolverPackage_mumps(Mat A,const MatSolverPackage *type)
759: {
761: *type = MAT_SOLVER_MUMPS;
762: return(0);
763: }
767: /*
768: The seq and mpi versions of this function are the same
769: */
772: PetscErrorCode MatGetFactor_seqaij_mumps(Mat A,MatFactorType ftype,Mat *F)
773: {
774: Mat B;
776: Mat_MUMPS *mumps;
779: if (ftype != MAT_FACTOR_LU) {
780: SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with MUMPS Cholesky, use SBAIJ matrix");
781: }
782: /* Create the factorization matrix */
783: MatCreate(((PetscObject)A)->comm,&B);
784: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
785: MatSetType(B,((PetscObject)A)->type_name);
786: MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
788: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
789: B->ops->view = MatView_MUMPS;
790: B->ops->getinfo = MatGetInfo_MUMPS;
791: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mumps",MatFactorGetSolverPackage_mumps);
792: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMumpsSetIcntl_C","MatMumpsSetIcntl",MatMumpsSetIcntl);
793: B->factor = MAT_FACTOR_LU;
795: PetscNewLog(B,Mat_MUMPS,&mumps);
796: mumps->CleanUpMUMPS = PETSC_FALSE;
797: mumps->isAIJ = PETSC_TRUE;
798: mumps->scat_rhs = PETSC_NULL;
799: mumps->scat_sol = PETSC_NULL;
800: mumps->nSolve = 0;
801: mumps->MatDestroy = B->ops->destroy;
802: B->ops->destroy = MatDestroy_MUMPS;
803: B->spptr = (void*)mumps;
805: *F = B;
806: return(0);
807: }
813: PetscErrorCode MatGetFactor_mpiaij_mumps(Mat A,MatFactorType ftype,Mat *F)
814: {
815: Mat B;
817: Mat_MUMPS *mumps;
820: if (ftype != MAT_FACTOR_LU) {
821: SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with MUMPS Cholesky, use SBAIJ matrix");
822: }
823: /* Create the factorization matrix */
824: MatCreate(((PetscObject)A)->comm,&B);
825: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
826: MatSetType(B,((PetscObject)A)->type_name);
827: MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
828: MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);
830: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMUMPS;
831: B->ops->view = MatView_MUMPS;
832: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mumps",MatFactorGetSolverPackage_mumps);
833: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMumpsSetIcntl_C","MatMumpsSetIcntl",MatMumpsSetIcntl);
834: B->factor = MAT_FACTOR_LU;
836: PetscNewLog(B,Mat_MUMPS,&mumps);
837: mumps->CleanUpMUMPS = PETSC_FALSE;
838: mumps->isAIJ = PETSC_TRUE;
839: mumps->scat_rhs = PETSC_NULL;
840: mumps->scat_sol = PETSC_NULL;
841: mumps->nSolve = 0;
842: mumps->MatDestroy = B->ops->destroy;
843: B->ops->destroy = MatDestroy_MUMPS;
844: B->spptr = (void*)mumps;
846: *F = B;
847: return(0);
848: }
854: PetscErrorCode MatGetFactor_seqsbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
855: {
856: Mat B;
858: Mat_MUMPS *mumps;
861: if (ftype != MAT_FACTOR_CHOLESKY) {
862: SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
863: }
864: /* Create the factorization matrix */
865: MatCreate(((PetscObject)A)->comm,&B);
866: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
867: MatSetType(B,((PetscObject)A)->type_name);
868: MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);
869: MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);
871: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJMUMPS;
872: B->ops->view = MatView_MUMPS;
873: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mumps",MatFactorGetSolverPackage_mumps);
874: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMumpsSetIcntl_C","MatMumpsSetIcntl",MatMumpsSetIcntl);
875: B->factor = MAT_FACTOR_CHOLESKY;
877: PetscNewLog(B,Mat_MUMPS,&mumps);
878: mumps->CleanUpMUMPS = PETSC_FALSE;
879: mumps->isAIJ = PETSC_TRUE;
880: mumps->scat_rhs = PETSC_NULL;
881: mumps->scat_sol = PETSC_NULL;
882: mumps->nSolve = 0;
883: mumps->MatDestroy = B->ops->destroy;
884: B->ops->destroy = MatDestroy_MUMPS;
885: B->spptr = (void*)mumps;
887: *F = B;
888: return(0);
889: }
895: PetscErrorCode MatGetFactor_mpisbaij_mumps(Mat A,MatFactorType ftype,Mat *F)
896: {
897: Mat B;
899: Mat_MUMPS *mumps;
902: if (ftype != MAT_FACTOR_CHOLESKY) {
903: SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with MUMPS LU, use AIJ matrix");
904: }
905: /* Create the factorization matrix */
906: MatCreate(((PetscObject)A)->comm,&B);
907: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
908: MatSetType(B,((PetscObject)A)->type_name);
909: MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);
910: MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);
912: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJMUMPS;
913: B->ops->view = MatView_MUMPS;
914: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mumps",MatFactorGetSolverPackage_mumps);
915: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMumpsSetIcntl_C","MatMumpsSetIcntl",MatMumpsSetIcntl);
916: B->factor = MAT_FACTOR_CHOLESKY;
918: PetscNewLog(B,Mat_MUMPS,&mumps);
919: mumps->CleanUpMUMPS = PETSC_FALSE;
920: mumps->isAIJ = PETSC_TRUE;
921: mumps->scat_rhs = PETSC_NULL;
922: mumps->scat_sol = PETSC_NULL;
923: mumps->nSolve = 0;
924: mumps->MatDestroy = B->ops->destroy;
925: B->ops->destroy = MatDestroy_MUMPS;
926: B->spptr = (void*)mumps;
928: *F = B;
929: return(0);
930: }
933: /* -------------------------------------------------------------------------------------------*/
934: /*@
935: MatMumpsSetIcntl - Set MUMPS parameter ICNTL()
937: Collective on Mat
939: Input Parameters:
940: + F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
941: . idx - index of MUMPS parameter array ICNTL()
942: - icntl - value of MUMPS ICNTL(imumps)
944: Options Database:
945: . -mat_mumps_icntl_<idx> <icntl>
947: Level: beginner
949: References: MUMPS Users' Guide
951: .seealso: MatGetFactor()
952: @*/
955: PetscErrorCode MatMumpsSetIcntl(Mat F,PetscInt idx,PetscInt icntl)
956: {
957: Mat_MUMPS *lu =(Mat_MUMPS*)(F)->spptr;
960: lu->id.ICNTL(idx) = icntl;
961: return(0);
962: }