Actual source code: mpisbaijspooles.c

  1: /* 
  2:    Provides an interface to the Spooles parallel sparse solver (MPI SPOOLES)
  3: */

 5:  #include src/mat/impls/aij/seq/spooles/spooles.h
 6:  #include src/mat/impls/sbaij/mpi/mpisbaij.h

 10: PetscErrorCode MatDestroy_MPISBAIJSpooles(Mat A)
 11: {
 13: 
 15:   /* MPISBAIJ_Spooles isn't really the matrix that USES spooles, */
 16:   /* rather it is a factory class for creating a symmetric matrix that can */
 17:   /* invoke Spooles' parallel cholesky solver. */
 18:   /* As a result, we don't have to clean up the stuff set for use in spooles */
 19:   /* as in MatDestroy_MPIAIJ_Spooles. */
 20:   MatConvert_Spooles_Base(A,MATMPISBAIJ,&A);
 21:   (*A->ops->destroy)(A);
 22:   return(0);
 23: }

 27: PetscErrorCode MatAssemblyEnd_MPISBAIJSpooles(Mat A,MatAssemblyType mode) {
 29:   int bs;
 30:   Mat_Spooles *lu=(Mat_Spooles *)(A->spptr);

 33:   (*lu->MatAssemblyEnd)(A,mode);
 34:   MatGetBlockSize(A,&bs);
 35:   if (bs > 1) SETERRQ1(PETSC_ERR_SUP,"Block size %D not supported by Spooles",bs);
 36:   lu->MatCholeskyFactorSymbolic  = A->ops->choleskyfactorsymbolic;
 37:   A->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MPISBAIJSpooles;
 38:   return(0);
 39: }

 41: /* 
 42:   input:
 43:    F:                 numeric factor
 44:   output:
 45:    nneg, nzero, npos: global matrix inertia in all processors
 46: */

 50: PetscErrorCode MatGetInertia_MPISBAIJSpooles(Mat F,int *nneg,int *nzero,int *npos)
 51: {
 52:   Mat_Spooles *lu = (Mat_Spooles*)F->spptr;
 54:   int neg,zero,pos,sbuf[3],rbuf[3];

 57:   FrontMtx_inertia(lu->frontmtx, &neg, &zero, &pos);
 58:   sbuf[0] = neg; sbuf[1] = zero; sbuf[2] = pos;
 59:   MPI_Allreduce(sbuf,rbuf,3,MPI_INT,MPI_SUM,F->comm);
 60:   *nneg  = rbuf[0]; *nzero = rbuf[1]; *npos  = rbuf[2];
 61:   return(0);
 62: }

 64: /* Note the Petsc r permutation is ignored */
 67: PetscErrorCode MatCholeskyFactorSymbolic_MPISBAIJSpooles(Mat A,IS r,MatFactorInfo *info,Mat *F)
 68: {
 69:   Mat           B;
 70:   Mat_Spooles   *lu;
 72: 

 75:   /* Create the factorization matrix */
 76:   MatCreate(A->comm,A->m,A->n,A->M,A->N,&B);
 77:   MatSetType(B,A->type_name);
 78:   MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);
 79: 
 80:   B->ops->choleskyfactornumeric = MatFactorNumeric_MPIAIJSpooles;
 81:   B->ops->getinertia            = MatGetInertia_MPISBAIJSpooles;
 82:   B->factor                     = FACTOR_CHOLESKY;

 84:   lu                       = (Mat_Spooles*)(B->spptr);
 85:   lu->options.pivotingflag = SPOOLES_NO_PIVOTING;
 86:   lu->flg                  = DIFFERENT_NONZERO_PATTERN;
 87:   lu->options.useQR        = PETSC_FALSE;
 88:   lu->options.symflag      = SPOOLES_SYMMETRIC;  /* default */

 90:   MPI_Comm_dup(A->comm,&(lu->comm_spooles));
 91:   *F = B;
 92:   return(0);
 93: }

 98: PetscErrorCode MatMPISBAIJSetPreallocation_MPISBAIJSpooles(Mat  B,int bs,int d_nz,int *d_nnz,int o_nz,int *o_nnz)
 99: {
100:   Mat         A;
101:   Mat_Spooles *lu = (Mat_Spooles*)B->spptr;

105:   /*
106:     After performing the MPISBAIJ Preallocation, we need to convert the local diagonal block matrix
107:     into Spooles type so that the block jacobi preconditioner (for example) can use Spooles.  I would
108:     like this to be done in the MatCreate routine, but the creation of this inner matrix requires
109:     block size info so that PETSc can determine the local size properly.  The block size info is set
110:     in the preallocation routine.
111:   */
112:   (*lu->MatPreallocate)(B,bs,d_nz,d_nnz,o_nz,o_nnz);
113:   A    = ((Mat_MPISBAIJ *)B->data)->A;
114:   MatConvert_SeqSBAIJ_SeqSBAIJSpooles(A,MATSEQSBAIJSPOOLES,&A);
115:   return(0);
116: }

122: PetscErrorCode MatConvert_MPISBAIJ_MPISBAIJSpooles(Mat A,const MatType type,Mat *newmat)
123: {
124:   /* This routine is only called to convert a MATMPISBAIJ matrix */
125:   /* to a MATMPISBAIJSPOOLES matrix, so we will ignore 'MatType type'. */
127:   Mat         B=*newmat;
128:   Mat_Spooles *lu;
129:   void        (*f)(void);

132:   if (B != A) {
133:     /* This routine is inherited, so we know the type is correct. */
134:     MatDuplicate(A,MAT_COPY_VALUES,&B);
135:   }

137:   PetscNew(Mat_Spooles,&lu);
138:   B->spptr                       = (void*)lu;

140:   lu->basetype                   = MATMPISBAIJ;
141:   lu->MatDuplicate               = A->ops->duplicate;
142:   lu->MatCholeskyFactorSymbolic  = A->ops->choleskyfactorsymbolic;
143:   lu->MatLUFactorSymbolic        = A->ops->lufactorsymbolic;
144:   lu->MatView                    = A->ops->view;
145:   lu->MatAssemblyEnd             = A->ops->assemblyend;
146:   lu->MatDestroy                 = A->ops->destroy;
147: 
148:   B->ops->duplicate              = MatDuplicate_Spooles;
149:   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MPISBAIJSpooles;
150:   B->ops->assemblyend            = MatAssemblyEnd_MPISBAIJSpooles;
151:   B->ops->destroy                = MatDestroy_MPISBAIJSpooles;

153:   /* I really don't like needing to know the tag: MatMPISBAIJSetPreallocation_C */
154:   PetscObjectQueryFunction((PetscObject)B,"MatMPISBAIJSetPreallocation_C",&f);
155:   if (f) {
156:     lu->MatPreallocate = (PetscErrorCode (*)(Mat,int,int,int*,int,int*))f;
157:     PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPISBAIJSetPreallocation_C",
158:                                              "MatMPISBAIJSetPreallocation_MPISBAIJSpooles",
159:                                              MatMPISBAIJSetPreallocation_MPISBAIJSpooles);
160:   }

162:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpisbaijspooles_mpisbaij_C",
163:                                            "MatConvert_Spooles_Base",MatConvert_Spooles_Base);
164:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpisbaij_mpisbaijspooles_C",
165:                                            "MatConvert_MPISBAIJ_MPISBAIJSpooles",
166:                                            MatConvert_MPISBAIJ_MPISBAIJSpooles);

168:   PetscObjectChangeTypeName((PetscObject)B,MATMPISBAIJSPOOLES);
169:   *newmat = B;
170:   return(0);
171: }

174: /*MC
175:   MATMPISBAIJSPOOLES - MATMPISBAIJSPOOLES = "mpisbaijspooles" - a matrix type providing direct solvers (Cholesky) for distributed symmetric
176:   matrices via the external package Spooles.

178:   If Spooles is installed (see the manual for
179:   instructions on how to declare the existence of external packages),
180:   a matrix type can be constructed which invokes Spooles solvers.
181:   After calling MatCreate(...,A), simply call MatSetType(A,MATMPISBAIJSPOOLES).
182:   This matrix type is only supported for double precision real.

184:   This matrix inherits from MATMPISBAIJ.  As a result, MatMPISBAIJSetPreallocation is 
185:   supported for this matrix type.  One can also call MatConvert for an inplace conversion to or from 
186:   the MATMPISBAIJ type without data copy.

188:   Options Database Keys:
189: + -mat_type mpisbaijspooles - sets the matrix type to mpisbaijspooles during a call to MatSetFromOptions()
190: . -mat_spooles_tau <tau> - upper bound on the magnitude of the largest element in L or U
191: . -mat_spooles_seed <seed> - random number seed used for ordering
192: . -mat_spooles_msglvl <msglvl> - message output level
193: . -mat_spooles_ordering <BestOfNDandMS,MMD,MS,ND> - ordering used
194: . -mat_spooles_maxdomainsize <n> - maximum subgraph size used by Spooles orderings
195: . -mat_spooles_maxzeros <n> - maximum number of zeros inside a supernode
196: . -mat_spooles_maxsize <n> - maximum size of a supernode
197: . -mat_spooles_FrontMtxInfo <true,fase> - print Spooles information about the computed factorization
198: . -mat_spooles_symmetryflag <0,1,2> - 0: SPOOLES_SYMMETRIC, 1: SPOOLES_HERMITIAN, 2: SPOOLES_NONSYMMETRIC
199: . -mat_spooles_patchAndGoFlag <0,1,2> - 0: no patch, 1: use PatchAndGo strategy 1, 2: use PatchAndGo strategy 2
200: . -mat_spooles_toosmall <dt> - drop tolerance for PatchAndGo strategy 1
201: . -mat_spooles_storeids <bool integer> - if nonzero, stores row and col numbers where patches were applied in an IV object
202: . -mat_spooles_fudge <delta> - fudge factor for rescaling diagonals with PatchAndGo strategy 2
203: - -mat_spooles_storevalues <bool integer> - if nonzero and PatchAndGo strategy 2 is used, store change in diagonal value in a DV object

205:    Level: beginner

207: .seealso: MATSEQSBAIJSPOOLES, MATSEQAIJSPOOLES, MATMPIAIJSPOOLES, PCCHOLESKY
208: M*/

213: PetscErrorCode MatCreate_MPISBAIJSpooles(Mat A)
214: {

218:   /* Change type name before calling MatSetType to force proper construction of MPISBAIJ */
219:   /*   and MPISBAIJSpooles types */
220:   PetscObjectChangeTypeName((PetscObject)A,MATMPISBAIJSPOOLES);
221:   MatSetType(A,MATMPISBAIJ);
222:   MatConvert_MPISBAIJ_MPISBAIJSpooles(A,MATMPISBAIJSPOOLES,&A);
223:   return(0);
224: }