Actual source code: pastix.c
petsc-dev 2014-02-02
1: /*
2: Provides an interface to the PaStiX sparse solver
3: */
4: #include <../src/mat/impls/aij/seq/aij.h>
5: #include <../src/mat/impls/aij/mpi/mpiaij.h>
6: #include <../src/mat/impls/sbaij/seq/sbaij.h>
7: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
9: #if defined(PETSC_USE_COMPLEX) && defined(__cplusplus)
10: #define _H_COMPLEX
11: #endif
13: EXTERN_C_BEGIN
14: #include <pastix.h>
15: EXTERN_C_END
17: #if defined(PETSC_USE_COMPLEX)
18: #if defined(PETSC_USE_REAL_SINGLE)
19: #define PASTIX_CALL c_pastix
20: #define PASTIX_CHECKMATRIX c_pastix_checkMatrix
21: #define PastixScalar COMPLEX
22: #else
23: #define PASTIX_CALL z_pastix
24: #define PASTIX_CHECKMATRIX z_pastix_checkMatrix
25: #define PastixScalar DCOMPLEX
26: #endif
28: #else /* PETSC_USE_COMPLEX */
30: #if defined(PETSC_USE_REAL_SINGLE)
31: #define PASTIX_CALL s_pastix
32: #define PASTIX_CHECKMATRIX s_pastix_checkMatrix
33: #define PastixScalar float
34: #else
35: #define PASTIX_CALL d_pastix
36: #define PASTIX_CHECKMATRIX d_pastix_checkMatrix
37: #define PastixScalar double
38: #endif
40: #endif /* PETSC_USE_COMPLEX */
42: typedef struct Mat_Pastix_ {
43: pastix_data_t *pastix_data; /* Pastix data storage structure */
44: MatStructure matstruc;
45: PetscInt n; /* Number of columns in the matrix */
46: PetscInt *colptr; /* Index of first element of each column in row and val */
47: PetscInt *row; /* Row of each element of the matrix */
48: PetscScalar *val; /* Value of each element of the matrix */
49: PetscInt *perm; /* Permutation tabular */
50: PetscInt *invp; /* Reverse permutation tabular */
51: PetscScalar *rhs; /* Rhight-hand-side member */
52: PetscInt rhsnbr; /* Rhight-hand-side number (must be 1) */
53: PetscInt iparm[64]; /* Integer parameters */
54: double dparm[64]; /* Floating point parameters */
55: MPI_Comm pastix_comm; /* PaStiX MPI communicator */
56: PetscMPIInt commRank; /* MPI rank */
57: PetscMPIInt commSize; /* MPI communicator size */
58: PetscBool CleanUpPastix; /* Boolean indicating if we call PaStiX clean step */
59: VecScatter scat_rhs;
60: VecScatter scat_sol;
61: Vec b_seq;
62: PetscBool isAIJ;
63: PetscErrorCode (*Destroy)(Mat);
64: } Mat_Pastix;
66: extern PetscErrorCode MatDuplicate_Pastix(Mat,MatDuplicateOption,Mat*);
70: /*
71: convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz]
73: input:
74: A - matrix in seqaij or mpisbaij (bs=1) format
75: valOnly - FALSE: spaces are allocated and values are set for the CSC
76: TRUE: Only fill values
77: output:
78: n - Size of the matrix
79: colptr - Index of first element of each column in row and val
80: row - Row of each element of the matrix
81: values - Value of each element of the matrix
82: */
83: PetscErrorCode MatConvertToCSC(Mat A,PetscBool valOnly,PetscInt *n,PetscInt **colptr,PetscInt **row,PetscScalar **values)
84: {
85: Mat_SeqAIJ *aa = (Mat_SeqAIJ*)A->data;
86: PetscInt *rowptr = aa->i;
87: PetscInt *col = aa->j;
88: PetscScalar *rvalues = aa->a;
89: PetscInt m = A->rmap->N;
90: PetscInt nnz;
91: PetscInt i,j, k;
92: PetscInt base = 1;
93: PetscInt idx;
95: PetscInt colidx;
96: PetscInt *colcount;
97: PetscBool isSBAIJ;
98: PetscBool isSeqSBAIJ;
99: PetscBool isMpiSBAIJ;
100: PetscBool isSym;
101: PetscBool flg;
102: PetscInt icntl;
103: PetscInt verb;
104: PetscInt check;
107: MatIsSymmetric(A,0.0,&isSym);
108: PetscObjectTypeCompare((PetscObject)A,MATSBAIJ,&isSBAIJ);
109: PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
110: PetscObjectTypeCompare((PetscObject)A,MATMPISBAIJ,&isMpiSBAIJ);
112: *n = A->cmap->N;
114: /* PaStiX only needs triangular matrix if matrix is symmetric
115: */
116: if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) nnz = (aa->nz - *n)/2 + *n;
117: else nnz = aa->nz;
119: if (!valOnly) {
120: PetscMalloc(((*n)+1) *sizeof(PetscInt) ,colptr);
121: PetscMalloc(nnz *sizeof(PetscInt) ,row);
122: PetscMalloc1(nnz ,values);
124: if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
125: PetscMemcpy (*colptr, rowptr, ((*n)+1)*sizeof(PetscInt));
126: for (i = 0; i < *n+1; i++) (*colptr)[i] += base;
127: PetscMemcpy (*row, col, (nnz)*sizeof(PetscInt));
128: for (i = 0; i < nnz; i++) (*row)[i] += base;
129: PetscMemcpy (*values, rvalues, (nnz)*sizeof(PetscScalar));
130: } else {
131: PetscMalloc1((*n),&colcount);
133: for (i = 0; i < m; i++) colcount[i] = 0;
134: /* Fill-in colptr */
135: for (i = 0; i < m; i++) {
136: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
137: if (!isSym || col[j] <= i) colcount[col[j]]++;
138: }
139: }
141: (*colptr)[0] = base;
142: for (j = 0; j < *n; j++) {
143: (*colptr)[j+1] = (*colptr)[j] + colcount[j];
144: /* in next loop we fill starting from (*colptr)[colidx] - base */
145: colcount[j] = -base;
146: }
148: /* Fill-in rows and values */
149: for (i = 0; i < m; i++) {
150: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
151: if (!isSym || col[j] <= i) {
152: colidx = col[j];
153: idx = (*colptr)[colidx] + colcount[colidx];
154: (*row)[idx] = i + base;
155: (*values)[idx] = rvalues[j];
156: colcount[colidx]++;
157: }
158: }
159: }
160: PetscFree(colcount);
161: }
162: } else {
163: /* Fill-in only values */
164: for (i = 0; i < m; i++) {
165: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
166: colidx = col[j];
167: if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) ||!isSym || col[j] <= i) {
168: /* look for the value to fill */
169: for (k = (*colptr)[colidx] - base; k < (*colptr)[colidx + 1] - base; k++) {
170: if (((*row)[k]-base) == i) {
171: (*values)[k] = rvalues[j];
172: break;
173: }
174: }
175: /* data structure of sparse matrix has changed */
176: if (k == (*colptr)[colidx + 1] - base) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"overflow on k %D",k);
177: }
178: }
179: }
180: }
182: icntl =-1;
183: check = 0;
184: PetscOptionsInt("-mat_pastix_check","Check the matrix 0 : no, 1 : yes)","None",check,&icntl,&flg);
185: if ((flg && icntl >= 0) || PetscLogPrintInfo) check = icntl;
187: if (check == 1) {
188: PetscScalar *tmpvalues;
189: PetscInt *tmprows,*tmpcolptr;
190: tmpvalues = (PetscScalar*)malloc(nnz*sizeof(PetscScalar)); if (!tmpvalues) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
191: tmprows = (PetscInt*) malloc(nnz*sizeof(PetscInt)); if (!tmprows) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
192: tmpcolptr = (PetscInt*) malloc((*n+1)*sizeof(PetscInt)); if (!tmpcolptr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
194: PetscMemcpy(tmpcolptr,*colptr,(*n+1)*sizeof(PetscInt));
195: PetscMemcpy(tmprows,*row,nnz*sizeof(PetscInt));
196: PetscMemcpy(tmpvalues,*values,nnz*sizeof(PetscScalar));
197: PetscFree(*row);
198: PetscFree(*values);
200: icntl=-1;
201: verb = API_VERBOSE_NOT;
202: PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",verb,&icntl,&flg);
203: if ((flg && icntl >= 0) || PetscLogPrintInfo) {
204: verb = icntl;
205: }
206: PASTIX_CHECKMATRIX(MPI_COMM_WORLD,verb,((isSym != 0) ? API_SYM_YES : API_SYM_NO),API_YES,*n,&tmpcolptr,&tmprows,(PastixScalar**)&tmpvalues,NULL,1);
208: PetscMemcpy(*colptr,tmpcolptr,(*n+1)*sizeof(PetscInt));
209: PetscMalloc1(((*colptr)[*n]-1),row);
210: PetscMemcpy(*row,tmprows,((*colptr)[*n]-1)*sizeof(PetscInt));
211: PetscMalloc1(((*colptr)[*n]-1),values);
212: PetscMemcpy(*values,tmpvalues,((*colptr)[*n]-1)*sizeof(PetscScalar));
213: free(tmpvalues);
214: free(tmprows);
215: free(tmpcolptr);
217: }
218: return(0);
219: }
225: /*
226: Call clean step of PaStiX if lu->CleanUpPastix == true.
227: Free the CSC matrix.
228: */
229: PetscErrorCode MatDestroy_Pastix(Mat A)
230: {
231: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
233: PetscMPIInt size=lu->commSize;
236: if (lu && lu->CleanUpPastix) {
237: /* Terminate instance, deallocate memories */
238: if (size > 1) {
239: VecScatterDestroy(&lu->scat_rhs);
240: VecDestroy(&lu->b_seq);
241: VecScatterDestroy(&lu->scat_sol);
242: }
244: lu->iparm[IPARM_START_TASK]=API_TASK_CLEAN;
245: lu->iparm[IPARM_END_TASK] =API_TASK_CLEAN;
247: PASTIX_CALL(&(lu->pastix_data),
248: lu->pastix_comm,
249: lu->n,
250: lu->colptr,
251: lu->row,
252: (PastixScalar*)lu->val,
253: lu->perm,
254: lu->invp,
255: (PastixScalar*)lu->rhs,
256: lu->rhsnbr,
257: lu->iparm,
258: lu->dparm);
260: PetscFree(lu->colptr);
261: PetscFree(lu->row);
262: PetscFree(lu->val);
263: PetscFree(lu->perm);
264: PetscFree(lu->invp);
265: MPI_Comm_free(&(lu->pastix_comm));
266: }
267: if (lu && lu->Destroy) {
268: (lu->Destroy)(A);
269: }
270: PetscFree(A->spptr);
271: return(0);
272: }
276: /*
277: Gather right-hand-side.
278: Call for Solve step.
279: Scatter solution.
280: */
281: PetscErrorCode MatSolve_PaStiX(Mat A,Vec b,Vec x)
282: {
283: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
284: PetscScalar *array;
285: Vec x_seq;
289: lu->rhsnbr = 1;
290: x_seq = lu->b_seq;
291: if (lu->commSize > 1) {
292: /* PaStiX only supports centralized rhs. Scatter b into a seqential rhs vector */
293: VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
294: VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
295: VecGetArray(x_seq,&array);
296: } else { /* size == 1 */
297: VecCopy(b,x);
298: VecGetArray(x,&array);
299: }
300: lu->rhs = array;
301: if (lu->commSize == 1) {
302: VecRestoreArray(x,&array);
303: } else {
304: VecRestoreArray(x_seq,&array);
305: }
307: /* solve phase */
308: /*-------------*/
309: lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
310: lu->iparm[IPARM_END_TASK] = API_TASK_REFINE;
311: lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;
313: PASTIX_CALL(&(lu->pastix_data),
314: lu->pastix_comm,
315: lu->n,
316: lu->colptr,
317: lu->row,
318: (PastixScalar*)lu->val,
319: lu->perm,
320: lu->invp,
321: (PastixScalar*)lu->rhs,
322: lu->rhsnbr,
323: lu->iparm,
324: lu->dparm);
326: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %d\n",lu->iparm[IPARM_ERROR_NUMBER]);
328: if (lu->commSize == 1) {
329: VecRestoreArray(x,&(lu->rhs));
330: } else {
331: VecRestoreArray(x_seq,&(lu->rhs));
332: }
334: if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
335: VecScatterBegin(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
336: VecScatterEnd(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
337: }
338: return(0);
339: }
341: /*
342: Numeric factorisation using PaStiX solver.
344: */
347: PetscErrorCode MatFactorNumeric_PaStiX(Mat F,Mat A,const MatFactorInfo *info)
348: {
349: Mat_Pastix *lu =(Mat_Pastix*)(F)->spptr;
350: Mat *tseq;
351: PetscErrorCode 0;
352: PetscInt icntl;
353: PetscInt M=A->rmap->N;
354: PetscBool valOnly,flg, isSym;
355: Mat F_diag;
356: IS is_iden;
357: Vec b;
358: IS isrow;
359: PetscBool isSeqAIJ,isSeqSBAIJ,isMPIAIJ;
362: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
363: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);
364: PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
365: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
366: (F)->ops->solve = MatSolve_PaStiX;
368: /* Initialize a PASTIX instance */
369: MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->pastix_comm));
370: MPI_Comm_rank(lu->pastix_comm, &lu->commRank);
371: MPI_Comm_size(lu->pastix_comm, &lu->commSize);
373: /* Set pastix options */
374: lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
375: lu->iparm[IPARM_START_TASK] = API_TASK_INIT;
376: lu->iparm[IPARM_END_TASK] = API_TASK_INIT;
378: lu->rhsnbr = 1;
380: /* Call to set default pastix options */
381: PASTIX_CALL(&(lu->pastix_data),
382: lu->pastix_comm,
383: lu->n,
384: lu->colptr,
385: lu->row,
386: (PastixScalar*)lu->val,
387: lu->perm,
388: lu->invp,
389: (PastixScalar*)lu->rhs,
390: lu->rhsnbr,
391: lu->iparm,
392: lu->dparm);
394: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"PaStiX Options","Mat");
396: icntl = -1;
398: lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;
400: PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",lu->iparm[IPARM_VERBOSE],&icntl,&flg);
401: if ((flg && icntl >= 0) || PetscLogPrintInfo) {
402: lu->iparm[IPARM_VERBOSE] = icntl;
403: }
404: icntl=-1;
405: PetscOptionsInt("-mat_pastix_threadnbr","iparm[IPARM_THREAD_NBR] : Number of thread by MPI node","None",lu->iparm[IPARM_THREAD_NBR],&icntl,&flg);
406: if ((flg && icntl > 0)) {
407: lu->iparm[IPARM_THREAD_NBR] = icntl;
408: }
409: PetscOptionsEnd();
410: valOnly = PETSC_FALSE;
411: } else {
412: if (isSeqAIJ || isMPIAIJ) {
413: PetscFree(lu->colptr);
414: PetscFree(lu->row);
415: PetscFree(lu->val);
416: valOnly = PETSC_FALSE;
417: } else valOnly = PETSC_TRUE;
418: }
420: lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;
422: /* convert mpi A to seq mat A */
423: ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
424: MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
425: ISDestroy(&isrow);
427: MatConvertToCSC(*tseq,valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val);
428: MatIsSymmetric(*tseq,0.0,&isSym);
429: MatDestroyMatrices(1,&tseq);
431: if (!lu->perm) {
432: PetscMalloc1((lu->n),&(lu->perm));
433: PetscMalloc1((lu->n),&(lu->invp));
434: }
436: if (isSym) {
437: /* On symmetric matrix, LLT */
438: lu->iparm[IPARM_SYM] = API_SYM_YES;
439: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
440: } else {
441: /* On unsymmetric matrix, LU */
442: lu->iparm[IPARM_SYM] = API_SYM_NO;
443: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
444: }
446: /*----------------*/
447: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
448: if (!(isSeqAIJ || isSeqSBAIJ)) {
449: /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
450: VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);
451: ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
452: MatGetVecs(A,NULL,&b);
453: VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);
454: VecScatterCreate(lu->b_seq,is_iden,b,is_iden,&lu->scat_sol);
455: ISDestroy(&is_iden);
456: VecDestroy(&b);
457: }
458: lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
459: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
461: PASTIX_CALL(&(lu->pastix_data),
462: lu->pastix_comm,
463: lu->n,
464: lu->colptr,
465: lu->row,
466: (PastixScalar*)lu->val,
467: lu->perm,
468: lu->invp,
469: (PastixScalar*)lu->rhs,
470: lu->rhsnbr,
471: lu->iparm,
472: lu->dparm);
473: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
474: } else {
475: lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
476: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
477: PASTIX_CALL(&(lu->pastix_data),
478: lu->pastix_comm,
479: lu->n,
480: lu->colptr,
481: lu->row,
482: (PastixScalar*)lu->val,
483: lu->perm,
484: lu->invp,
485: (PastixScalar*)lu->rhs,
486: lu->rhsnbr,
487: lu->iparm,
488: lu->dparm);
490: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
491: }
493: if (lu->commSize > 1) {
494: if ((F)->factortype == MAT_FACTOR_LU) {
495: F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
496: } else {
497: F_diag = ((Mat_MPISBAIJ*)(F)->data)->A;
498: }
499: F_diag->assembled = PETSC_TRUE;
500: }
501: (F)->assembled = PETSC_TRUE;
502: lu->matstruc = SAME_NONZERO_PATTERN;
503: lu->CleanUpPastix = PETSC_TRUE;
504: return(0);
505: }
507: /* Note the Petsc r and c permutations are ignored */
510: PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
511: {
512: Mat_Pastix *lu = (Mat_Pastix*)F->spptr;
515: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
516: lu->iparm[IPARM_SYM] = API_SYM_YES;
517: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
518: F->ops->lufactornumeric = MatFactorNumeric_PaStiX;
519: return(0);
520: }
523: /* Note the Petsc r permutation is ignored */
526: PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F,Mat A,IS r,const MatFactorInfo *info)
527: {
528: Mat_Pastix *lu = (Mat_Pastix*)(F)->spptr;
531: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LLT;
532: lu->iparm[IPARM_SYM] = API_SYM_NO;
533: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
534: (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
535: return(0);
536: }
540: PetscErrorCode MatView_PaStiX(Mat A,PetscViewer viewer)
541: {
542: PetscErrorCode ierr;
543: PetscBool iascii;
544: PetscViewerFormat format;
547: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
548: if (iascii) {
549: PetscViewerGetFormat(viewer,&format);
550: if (format == PETSC_VIEWER_ASCII_INFO) {
551: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
553: PetscViewerASCIIPrintf(viewer,"PaStiX run parameters:\n");
554: PetscViewerASCIIPrintf(viewer," Matrix type : %s \n",((lu->iparm[IPARM_SYM] == API_SYM_YES) ? "Symmetric" : "Unsymmetric"));
555: PetscViewerASCIIPrintf(viewer," Level of printing (0,1,2): %d \n",lu->iparm[IPARM_VERBOSE]);
556: PetscViewerASCIIPrintf(viewer," Number of refinements iterations : %d \n",lu->iparm[IPARM_NBITER]);
557: PetscPrintf(PETSC_COMM_SELF," Error : %g \n",lu->dparm[DPARM_RELATIVE_ERROR]);
558: }
559: }
560: return(0);
561: }
564: /*MC
565: MATSOLVERPASTIX - A solver package providing direct solvers (LU) for distributed
566: and sequential matrices via the external package PaStiX.
568: Use ./configure --download-pastix to have PETSc installed with PaStiX
570: Options Database Keys:
571: + -mat_pastix_verbose <0,1,2> - print level
572: - -mat_pastix_threadnbr <integer> - Set the thread number by MPI task.
574: Level: beginner
576: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
578: M*/
583: PetscErrorCode MatGetInfo_PaStiX(Mat A,MatInfoType flag,MatInfo *info)
584: {
585: Mat_Pastix *lu =(Mat_Pastix*)A->spptr;
588: info->block_size = 1.0;
589: info->nz_allocated = lu->iparm[IPARM_NNZEROS];
590: info->nz_used = lu->iparm[IPARM_NNZEROS];
591: info->nz_unneeded = 0.0;
592: info->assemblies = 0.0;
593: info->mallocs = 0.0;
594: info->memory = 0.0;
595: info->fill_ratio_given = 0;
596: info->fill_ratio_needed = 0;
597: info->factor_mallocs = 0;
598: return(0);
599: }
603: PetscErrorCode MatFactorGetSolverPackage_pastix(Mat A,const MatSolverPackage *type)
604: {
606: *type = MATSOLVERPASTIX;
607: return(0);
608: }
610: /*
611: The seq and mpi versions of this function are the same
612: */
615: PETSC_EXTERN PetscErrorCode MatGetFactor_seqaij_pastix(Mat A,MatFactorType ftype,Mat *F)
616: {
617: Mat B;
619: Mat_Pastix *pastix;
622: if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
623: /* Create the factorization matrix */
624: MatCreate(PetscObjectComm((PetscObject)A),&B);
625: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
626: MatSetType(B,((PetscObject)A)->type_name);
627: MatSeqAIJSetPreallocation(B,0,NULL);
629: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
630: B->ops->view = MatView_PaStiX;
631: B->ops->getinfo = MatGetInfo_PaStiX;
633: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);
635: B->factortype = MAT_FACTOR_LU;
637: PetscNewLog(B,&pastix);
639: pastix->CleanUpPastix = PETSC_FALSE;
640: pastix->isAIJ = PETSC_TRUE;
641: pastix->scat_rhs = NULL;
642: pastix->scat_sol = NULL;
643: pastix->Destroy = B->ops->destroy;
644: B->ops->destroy = MatDestroy_Pastix;
645: B->spptr = (void*)pastix;
647: *F = B;
648: return(0);
649: }
653: PETSC_EXTERN PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A,MatFactorType ftype,Mat *F)
654: {
655: Mat B;
657: Mat_Pastix *pastix;
660: if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
661: /* Create the factorization matrix */
662: MatCreate(PetscObjectComm((PetscObject)A),&B);
663: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
664: MatSetType(B,((PetscObject)A)->type_name);
665: MatSeqAIJSetPreallocation(B,0,NULL);
666: MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);
668: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
669: B->ops->view = MatView_PaStiX;
671: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);
673: B->factortype = MAT_FACTOR_LU;
675: PetscNewLog(B,&pastix);
677: pastix->CleanUpPastix = PETSC_FALSE;
678: pastix->isAIJ = PETSC_TRUE;
679: pastix->scat_rhs = NULL;
680: pastix->scat_sol = NULL;
681: pastix->Destroy = B->ops->destroy;
682: B->ops->destroy = MatDestroy_Pastix;
683: B->spptr = (void*)pastix;
685: *F = B;
686: return(0);
687: }
691: PETSC_EXTERN PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
692: {
693: Mat B;
695: Mat_Pastix *pastix;
698: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
699: /* Create the factorization matrix */
700: MatCreate(PetscObjectComm((PetscObject)A),&B);
701: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
702: MatSetType(B,((PetscObject)A)->type_name);
703: MatSeqSBAIJSetPreallocation(B,1,0,NULL);
704: MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);
706: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
707: B->ops->view = MatView_PaStiX;
709: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);
711: B->factortype = MAT_FACTOR_CHOLESKY;
713: PetscNewLog(B,&pastix);
715: pastix->CleanUpPastix = PETSC_FALSE;
716: pastix->isAIJ = PETSC_TRUE;
717: pastix->scat_rhs = NULL;
718: pastix->scat_sol = NULL;
719: pastix->Destroy = B->ops->destroy;
720: B->ops->destroy = MatDestroy_Pastix;
721: B->spptr = (void*)pastix;
723: *F = B;
724: return(0);
725: }
729: PETSC_EXTERN PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
730: {
731: Mat B;
733: Mat_Pastix *pastix;
736: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
738: /* Create the factorization matrix */
739: MatCreate(PetscObjectComm((PetscObject)A),&B);
740: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
741: MatSetType(B,((PetscObject)A)->type_name);
742: MatSeqSBAIJSetPreallocation(B,1,0,NULL);
743: MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);
745: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
746: B->ops->view = MatView_PaStiX;
748: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);
750: B->factortype = MAT_FACTOR_CHOLESKY;
752: PetscNewLog(B,&pastix);
754: pastix->CleanUpPastix = PETSC_FALSE;
755: pastix->isAIJ = PETSC_TRUE;
756: pastix->scat_rhs = NULL;
757: pastix->scat_sol = NULL;
758: pastix->Destroy = B->ops->destroy;
759: B->ops->destroy = MatDestroy_Pastix;
760: B->spptr = (void*)pastix;
762: *F = B;
763: return(0);
764: }