Actual source code: ex8.c
1: /*$Id: ex8.c,v 1.49 2001/04/10 19:36:40 bsmith Exp $*/
3: static char help[] = "Illustrates use of the preconditioner ASM.n
4: The Additive Schwarz Method for solving a linear system in parallel with SLES. Then
5: code indicates the procedure for setting user-defined subdomains. Inputn
6: parameters include:n
7: -user_set_subdomain_solvers: User explicitly sets subdomain solversn
8: -user_set_subdomains: Activate user-defined subdomainsnn";
10: /*
11: Note: This example focuses on setting the subdomains for the ASM
12: preconditioner for a problem on a 2D rectangular grid. See ex1.c
13: and ex2.c for more detailed comments on the basic usage of SLES
14: (including working with matrices and vectors).
16: The ASM preconditioner is fully parallel, but currently the routine
17: PCASMCreateSubDomains2D(), which is used in this example to demonstrate
18: user-defined subdomains (activated via -user_set_subdomains), is
19: uniprocessor only.
21: This matrix in this linear system arises from the discretized Laplacian,
22: and thus is not very interesting in terms of experimenting with variants
23: of the ASM preconditioner.
24: */
26: /*T
27: Concepts: SLES^Additive Schwarz Method (ASM) with user-defined subdomains
28: Processors: n
29: T*/
31: /*
32: Include "petscsles.h" so that we can use SLES solvers. Note that this file
33: automatically includes:
34: petsc.h - base PETSc routines petscvec.h - vectors
35: petscsys.h - system routines petscmat.h - matrices
36: petscis.h - index sets petscksp.h - Krylov subspace methods
37: petscviewer.h - viewers petscpc.h - preconditioners
38: */
39: #include "petscsles.h"
41: int main(int argc,char **args)
42: {
43: Vec x,b,u; /* approx solution, RHS, exact solution */
44: Mat A; /* linear system matrix */
45: SLES sles; /* linear solver context */
46: PC pc; /* PC context */
47: IS *is; /* array of index sets that define the subdomains */
48: int overlap = 1; /* width of subdomain overlap */
49: int Nsub; /* number of subdomains */
50: int m = 15,n = 17; /* mesh dimensions in x- and y- directions */
51: int M = 2,N = 1; /* number of subdomains in x- and y- directions */
52: int i,j,its,I,J,ierr,Istart,Iend,size;
53: PetscTruth flg;
54: PetscTruth user_subdomains; /* flag - 1 indicates user-defined subdomains */
55: Scalar v, one = 1.0;
57: PetscInitialize(&argc,&args,(char *)0,help);
58: MPI_Comm_size(PETSC_COMM_WORLD,&size);
59: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
60: PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
61: PetscOptionsGetInt(PETSC_NULL,"-M",&M,PETSC_NULL);
62: PetscOptionsGetInt(PETSC_NULL,"-N",&N,PETSC_NULL);
63: PetscOptionsGetInt(PETSC_NULL,"-overlap",&overlap,PETSC_NULL);
64: PetscOptionsHasName(PETSC_NULL,"-user_set_subdomains",&user_subdomains);
66: /* -------------------------------------------------------------------
67: Compute the matrix and right-hand-side vector that define
68: the linear system, Ax = b.
69: ------------------------------------------------------------------- */
71: /*
72: Assemble the matrix for the five point stencil, YET AGAIN
73: */
74: MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
75: MatSetFromOptions(A);
76: MatGetOwnershipRange(A,&Istart,&Iend);
77: for (I=Istart; I<Iend; I++) {
78: v = -1.0; i = I/n; j = I - i*n;
79: if (i>0) {J = I - n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
80: if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
81: if (j>0) {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
82: if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
83: v = 4.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
84: }
85: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
86: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
88: /*
89: Create and set vectors
90: */
91: VecCreate(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,&b);
92: VecSetFromOptions(b);
93: VecDuplicate(b,&u);
94: VecDuplicate(b,&x);
95: VecSet(&one,u);
96: MatMult(A,u,b);
98: /*
99: Create linear solver context
100: */
101: SLESCreate(PETSC_COMM_WORLD,&sles);
103: /*
104: Set operators. Here the matrix that defines the linear system
105: also serves as the preconditioning matrix.
106: */
107: SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN);
109: /*
110: Set the default preconditioner for this program to be ASM
111: */
112: SLESGetPC(sles,&pc);
113: PCSetType(pc,PCASM);
115: /* -------------------------------------------------------------------
116: Define the problem decomposition
117: ------------------------------------------------------------------- */
119: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120: Basic method, should be sufficient for the needs of many users.
121: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
123: Set the overlap, using the default PETSc decomposition via
124: PCASMSetOverlap(pc,overlap);
125: Could instead use the option -pc_asm_overlap <ovl>
127: Set the total number of blocks via -pc_asm_blocks <blks>
128: Note: The ASM default is to use 1 block per processor. To
129: experiment on a single processor with various overlaps, you
130: must specify use of multiple blocks!
131: */
133: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134: More advanced method, setting user-defined subdomains
135: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137: Firstly, create index sets that define the subdomains. The utility
138: routine PCASMCreateSubdomains2D() is a simple example (that currently
139: supports 1 processor only!). More generally, the user should write
140: a custom routine for a particular problem geometry.
142: Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains()
143: to set the subdomains for the ASM preconditioner.
144: */
146: if (!user_subdomains) { /* basic version */
147: PCASMSetOverlap(pc,overlap);
148: } else { /* advanced version */
149: if (size != 1) SETERRQ(1,"PCASMCreateSubdomains() is currently a uniprocessor routine only!");
150: PCASMCreateSubdomains2D(m,n,M,N,1,overlap,&Nsub,&is);
151: PCASMSetLocalSubdomains(pc,Nsub,is);
152: }
154: /* -------------------------------------------------------------------
155: Set the linear solvers for the subblocks
156: ------------------------------------------------------------------- */
158: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
159: Basic method, should be sufficient for the needs of most users.
160: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162: By default, the ASM preconditioner uses the same solver on each
163: block of the problem. To set the same solver options on all blocks,
164: use the prefix -sub before the usual PC and KSP options, e.g.,
165: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
167: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168: Advanced method, setting different solvers for various blocks.
169: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
171: Note that each block's SLES context is completely independent of
172: the others, and the full range of uniprocessor SLES options is
173: available for each block.
175: - Use PCASMGetSubSLES() to extract the array of SLES contexts for
176: the local blocks.
177: - See ex7.c for a simple example of setting different linear solvers
178: for the individual blocks for the block Jacobi method (which is
179: equivalent to the ASM method with zero overlap).
180: */
182: PetscOptionsHasName(PETSC_NULL,"-user_set_subdomain_solvers",&flg);
183: if (flg) {
184: SLES *subsles; /* array of SLES contexts for local subblocks */
185: int nlocal,first; /* number of local subblocks, first local subblock */
186: KSP subksp; /* KSP context for subblock */
187: PC subpc; /* PC context for subblock */
188: PetscTruth isasm;
190: PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.n");
192: /*
193: Set runtime options
194: */
195: SLESSetFromOptions(sles);
197: /*
198: Flag an error if PCTYPE is changed from the runtime options
199: */
200: PetscTypeCompare((PetscObject)pc,PCASM,&isasm);
201: if (isasm) {
202: SETERRQ(1,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");
203: }
204: /*
205: Call SLESSetUp() to set the block Jacobi data structures (including
206: creation of an internal SLES context for each block).
208: Note: SLESSetUp() MUST be called before PCASMGetSubSLES().
209: */
210: SLESSetUp(sles,x,b);
212: /*
213: Extract the array of SLES contexts for the local blocks
214: */
215: PCASMGetSubSLES(pc,&nlocal,&first,&subsles);
217: /*
218: Loop over the local blocks, setting various SLES options
219: for each block.
220: */
221: for (i=0; i<nlocal; i++) {
222: SLESGetPC(subsles[i],&subpc);
223: SLESGetKSP(subsles[i],&subksp);
224: PCSetType(subpc,PCILU);
225: KSPSetType(subksp,KSPGMRES);
226: KSPSetTolerances(subksp,1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
227: }
228: } else {
229: /*
230: Set runtime options
231: */
232: SLESSetFromOptions(sles);
233: }
235: /* -------------------------------------------------------------------
236: Solve the linear system
237: ------------------------------------------------------------------- */
239: SLESSolve(sles,b,x,&its);
241: /*
242: Free work space. All PETSc objects should be destroyed when they
243: are no longer needed.
244: */
246: if (user_subdomains) {
247: for (i=0; i<Nsub; i++) {
248: ISDestroy(is[i]);
249: }
250: PetscFree(is);
251: }
252: SLESDestroy(sles);
253: VecDestroy(u);
254: VecDestroy(x);
255: VecDestroy(b);
256: MatDestroy(A);
257: PetscFinalize();
258: return 0;
259: }