Actual source code: ex8.c

  2: static char help[] = "Illustrates use of the preconditioner ASM.\n\
  3: The Additive Schwarz Method for solving a linear system in parallel with KSP.  The\n\
  4: code indicates the procedure for setting user-defined subdomains.  Input\n\
  5: parameters include:\n\
  6:   -user_set_subdomain_solvers:  User explicitly sets subdomain solvers\n\
  7:   -user_set_subdomains:  Activate user-defined subdomains\n\n";

  9: /*
 10:    Note:  This example focuses on setting the subdomains for the ASM 
 11:    preconditioner for a problem on a 2D rectangular grid.  See ex1.c
 12:    and ex2.c for more detailed comments on the basic usage of KSP
 13:    (including working with matrices and vectors).

 15:    The ASM preconditioner is fully parallel, but currently the routine
 16:    PCASMCreateSubDomains2D(), which is used in this example to demonstrate
 17:    user-defined subdomains (activated via -user_set_subdomains), is
 18:    uniprocessor only.

 20:    This matrix in this linear system arises from the discretized Laplacian,
 21:    and thus is not very interesting in terms of experimenting with variants
 22:    of the ASM preconditioner.  
 23: */

 25: /*T
 26:    Concepts: KSP^Additive Schwarz Method (ASM) with user-defined subdomains
 27:    Processors: n
 28: T*/

 30: /* 
 31:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 32:   automatically includes:
 33:      petsc.h       - base PETSc routines   petscvec.h - vectors
 34:      petscsys.h    - system routines       petscmat.h - matrices
 35:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 36:      petscviewer.h - viewers               petscpc.h  - preconditioners
 37: */
 38:  #include petscksp.h

 42: int main(int argc,char **args)
 43: {
 44:   Vec            x,b,u;                 /* approx solution, RHS, exact solution */
 45:   Mat            A;                       /* linear system matrix */
 46:   KSP            ksp;                    /* linear solver context */
 47:   PC             pc;                      /* PC context */
 48:   IS             *is;                     /* array of index sets that define the subdomains */
 49:   PetscInt       overlap = 1;             /* width of subdomain overlap */
 50:   PetscInt       Nsub;                    /* number of subdomains */
 51:   PetscInt       m = 15,n = 17;          /* mesh dimensions in x- and y- directions */
 52:   PetscInt       M = 2,N = 1;            /* number of subdomains in x- and y- directions */
 53:   PetscInt       i,j,I,J,Istart,Iend;
 55:   PetscMPIInt    size;
 56:   PetscTruth     flg;
 57:   PetscTruth     user_subdomains;         /* flag - 1 indicates user-defined subdomains */
 58:   PetscScalar    v, one = 1.0;

 60:   PetscInitialize(&argc,&args,(char *)0,help);
 61:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 62:   PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
 63:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
 64:   PetscOptionsGetInt(PETSC_NULL,"-M",&M,PETSC_NULL);
 65:   PetscOptionsGetInt(PETSC_NULL,"-N",&N,PETSC_NULL);
 66:   PetscOptionsGetInt(PETSC_NULL,"-overlap",&overlap,PETSC_NULL);
 67:   PetscOptionsHasName(PETSC_NULL,"-user_set_subdomains",&user_subdomains);

 69:   /* -------------------------------------------------------------------
 70:          Compute the matrix and right-hand-side vector that define
 71:          the linear system, Ax = b.
 72:      ------------------------------------------------------------------- */

 74:   /* 
 75:      Assemble the matrix for the five point stencil, YET AGAIN 
 76:   */
 77:   MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
 78:   MatSetFromOptions(A);
 79:   MatGetOwnershipRange(A,&Istart,&Iend);
 80:   for (I=Istart; I<Iend; I++) {
 81:     v = -1.0; i = I/n; j = I - i*n;
 82:     if (i>0)   {J = I - n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 83:     if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 84:     if (j>0)   {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 85:     if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 86:     v = 4.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
 87:   }
 88:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
 89:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

 91:   /* 
 92:      Create and set vectors 
 93:   */
 94:   VecCreate(PETSC_COMM_WORLD,&b);
 95:   VecSetSizes(b,PETSC_DECIDE,m*n);
 96:   VecSetFromOptions(b);
 97:   VecDuplicate(b,&u);
 98:   VecDuplicate(b,&x);
 99:   VecSet(&one,u);
100:   MatMult(A,u,b);

102:   /* 
103:      Create linear solver context 
104:   */
105:   KSPCreate(PETSC_COMM_WORLD,&ksp);

107:   /* 
108:      Set operators. Here the matrix that defines the linear system
109:      also serves as the preconditioning matrix.
110:   */
111:   KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);

113:   /* 
114:      Set the default preconditioner for this program to be ASM
115:   */
116:   KSPGetPC(ksp,&pc);
117:   PCSetType(pc,PCASM);

119:   /* -------------------------------------------------------------------
120:                   Define the problem decomposition
121:      ------------------------------------------------------------------- */

123:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
124:        Basic method, should be sufficient for the needs of many users.
125:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

127:      Set the overlap, using the default PETSc decomposition via
128:          PCASMSetOverlap(pc,overlap);
129:      Could instead use the option -pc_asm_overlap <ovl> 

131:      Set the total number of blocks via -pc_asm_blocks <blks>
132:      Note:  The ASM default is to use 1 block per processor.  To
133:      experiment on a single processor with various overlaps, you
134:      must specify use of multiple blocks!
135:   */

137:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
138:        More advanced method, setting user-defined subdomains
139:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

141:      Firstly, create index sets that define the subdomains.  The utility
142:      routine PCASMCreateSubdomains2D() is a simple example (that currently
143:      supports 1 processor only!).  More generally, the user should write
144:      a custom routine for a particular problem geometry.

146:      Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains()
147:      to set the subdomains for the ASM preconditioner.
148:   */

150:   if (!user_subdomains) { /* basic version */
151:     PCASMSetOverlap(pc,overlap);
152:   } else { /* advanced version */
153:     if (size != 1) SETERRQ(1,"PCASMCreateSubdomains() is currently a uniprocessor routine only!");
154:     PCASMCreateSubdomains2D(m,n,M,N,1,overlap,&Nsub,&is);
155:     PCASMSetLocalSubdomains(pc,Nsub,is);
156:   }

158:   /* -------------------------------------------------------------------
159:                 Set the linear solvers for the subblocks
160:      ------------------------------------------------------------------- */

162:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
163:        Basic method, should be sufficient for the needs of most users.
164:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

166:      By default, the ASM preconditioner uses the same solver on each
167:      block of the problem.  To set the same solver options on all blocks,
168:      use the prefix -sub before the usual PC and KSP options, e.g.,
169:           -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4

171:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
172:         Advanced method, setting different solvers for various blocks.
173:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

175:      Note that each block's KSP context is completely independent of
176:      the others, and the full range of uniprocessor KSP options is
177:      available for each block.

179:      - Use PCASMGetSubKSP() to extract the array of KSP contexts for
180:        the local blocks.
181:      - See ex7.c for a simple example of setting different linear solvers
182:        for the individual blocks for the block Jacobi method (which is
183:        equivalent to the ASM method with zero overlap).
184:   */

186:   PetscOptionsHasName(PETSC_NULL,"-user_set_subdomain_solvers",&flg);
187:   if (flg) {
188:     KSP        *subksp;       /* array of KSP contexts for local subblocks */
189:     PetscInt   nlocal,first;  /* number of local subblocks, first local subblock */
190:     PC         subpc;          /* PC context for subblock */
191:     PetscTruth isasm;

193:     PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");

195:     /* 
196:        Set runtime options
197:     */
198:     KSPSetFromOptions(ksp);

200:     /* 
201:        Flag an error if PCTYPE is changed from the runtime options
202:      */
203:     PetscTypeCompare((PetscObject)pc,PCASM,&isasm);
204:     if (isasm) {
205:       SETERRQ(1,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");
206:     }
207:     /* 
208:        Call KSPSetUp() to set the block Jacobi data structures (including
209:        creation of an internal KSP context for each block).

211:        Note: KSPSetUp() MUST be called before PCASMGetSubKSP().
212:     */
213:     KSPSetUp(ksp);

215:     /*
216:        Extract the array of KSP contexts for the local blocks
217:     */
218:     PCASMGetSubKSP(pc,&nlocal,&first,&subksp);

220:     /*
221:        Loop over the local blocks, setting various KSP options
222:        for each block.  
223:     */
224:     for (i=0; i<nlocal; i++) {
225:       KSPGetPC(subksp[i],&subpc);
226:       PCSetType(subpc,PCILU);
227:       KSPSetType(subksp[i],KSPGMRES);
228:       KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
229:     }
230:   } else {
231:     /* 
232:        Set runtime options
233:     */
234:     KSPSetFromOptions(ksp);
235:   }

237:   /* -------------------------------------------------------------------
238:                       Solve the linear system
239:      ------------------------------------------------------------------- */

241:   KSPSolve(ksp,b,x);

243:   /* 
244:      Free work space.  All PETSc objects should be destroyed when they
245:      are no longer needed.
246:   */

248:   if (user_subdomains) {
249:     for (i=0; i<Nsub; i++) {
250:       ISDestroy(is[i]);
251:     }
252:     PetscFree(is);
253:   }
254:   KSPDestroy(ksp);
255:   VecDestroy(u);
256:   VecDestroy(x);
257:   VecDestroy(b);
258:   MatDestroy(A);
259:   PetscFinalize();
260:   return 0;
261: }