Actual source code: ex7.c
2: static char help[] = "Block Jacobi preconditioner for solving a linear system in parallel with KSP.\n\
3: The code indicates the\n\
4: procedures for setting the particular block sizes and for using different\n\
5: linear solvers on the individual blocks.\n\n";
7: /*
8: Note: This example focuses on ways to customize the block Jacobi
9: preconditioner. See ex1.c and ex2.c for more detailed comments on
10: the basic usage of KSP (including working with matrices and vectors).
12: Recall: The block Jacobi method is equivalent to the ASM preconditioner
13: with zero overlap.
14: */
16: /*T
17: Concepts: KSP^customizing the block Jacobi preconditioner
18: Processors: n
19: T*/
21: /*
22: Include "petscksp.h" so that we can use KSP solvers. Note that this file
23: automatically includes:
24: petsc.h - base PETSc routines petscvec.h - vectors
25: petscsys.h - system routines petscmat.h - matrices
26: petscis.h - index sets petscksp.h - Krylov subspace methods
27: petscviewer.h - viewers petscpc.h - preconditioners
28: */
29: #include petscksp.h
33: int main(int argc,char **args)
34: {
35: Vec x,b,u; /* approx solution, RHS, exact solution */
36: Mat A; /* linear system matrix */
37: KSP ksp; /* KSP context */
38: KSP *subksp; /* array of local KSP contexts on this processor */
39: PC pc; /* PC context */
40: PC subpc; /* PC context for subdomain */
41: PetscReal norm; /* norm of solution error */
43: PetscInt i,j,I,J,*blks,m = 8,n;
44: PetscMPIInt rank,size;
45: PetscInt its,nlocal,first,Istart,Iend;
46: PetscScalar v,one = 1.0,none = -1.0;
47: PetscTruth isbjacobi,flg;
49: PetscInitialize(&argc,&args,(char *)0,help);
50: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
51: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
52: MPI_Comm_size(PETSC_COMM_WORLD,&size);
53: n = m+2;
55: /* -------------------------------------------------------------------
56: Compute the matrix and right-hand-side vector that define
57: the linear system, Ax = b.
58: ------------------------------------------------------------------- */
60: /*
61: Create and assemble parallel matrix
62: */
63: MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
64: MatSetFromOptions(A);
65: MatGetOwnershipRange(A,&Istart,&Iend);
66: for (I=Istart; I<Iend; I++) {
67: v = -1.0; i = I/n; j = I - i*n;
68: if (i>0) {J = I - n; MatSetValues(A,1,&I,1,&J,&v,ADD_VALUES);}
69: if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,ADD_VALUES);}
70: if (j>0) {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,ADD_VALUES);}
71: if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,ADD_VALUES);}
72: v = 4.0; MatSetValues(A,1,&I,1,&I,&v,ADD_VALUES);
73: }
74: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
75: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
77: /*
78: Create parallel vectors
79: */
80: VecCreate(PETSC_COMM_WORLD,&u);
81: VecSetSizes(u,PETSC_DECIDE,m*n);
82: VecSetFromOptions(u);
83: VecDuplicate(u,&b);
84: VecDuplicate(b,&x);
86: /*
87: Set exact solution; then compute right-hand-side vector.
88: */
89: VecSet(&one,u);
90: MatMult(A,u,b);
92: /*
93: Create linear solver context
94: */
95: KSPCreate(PETSC_COMM_WORLD,&ksp);
97: /*
98: Set operators. Here the matrix that defines the linear system
99: also serves as the preconditioning matrix.
100: */
101: KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);
103: /*
104: Set default preconditioner for this program to be block Jacobi.
105: This choice can be overridden at runtime with the option
106: -pc_type <type>
107: */
108: KSPGetPC(ksp,&pc);
109: PCSetType(pc,PCBJACOBI);
112: /* -------------------------------------------------------------------
113: Define the problem decomposition
114: ------------------------------------------------------------------- */
116: /*
117: Call PCBJacobiSetTotalBlocks() to set individually the size of
118: each block in the preconditioner. This could also be done with
119: the runtime option
120: -pc_bjacobi_blocks <blocks>
121: Also, see the command PCBJacobiSetLocalBlocks() to set the
122: local blocks.
124: Note: The default decomposition is 1 block per processor.
125: */
126: PetscMalloc(m*sizeof(PetscInt),&blks);
127: for (i=0; i<m; i++) blks[i] = n;
128: PCBJacobiSetTotalBlocks(pc,m,blks);
129: PetscFree(blks);
132: /* -------------------------------------------------------------------
133: Set the linear solvers for the subblocks
134: ------------------------------------------------------------------- */
136: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137: Basic method, should be sufficient for the needs of most users.
138: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140: By default, the block Jacobi method uses the same solver on each
141: block of the problem. To set the same solver options on all blocks,
142: use the prefix -sub before the usual PC and KSP options, e.g.,
143: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
144: */
146: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
147: Advanced method, setting different solvers for various blocks.
148: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150: Note that each block's KSP context is completely independent of
151: the others, and the full range of uniprocessor KSP options is
152: available for each block. The following section of code is intended
153: to be a simple illustration of setting different linear solvers for
154: the individual blocks. These choices are obviously not recommended
155: for solving this particular problem.
156: */
157: PetscTypeCompare((PetscObject)pc,PCBJACOBI,&isbjacobi);
158: if (isbjacobi) {
159: /*
160: Call KSPSetUp() to set the block Jacobi data structures (including
161: creation of an internal KSP context for each block).
163: Note: KSPSetUp() MUST be called before PCBJacobiGetSubKSP().
164: */
165: KSPSetUp(ksp);
167: /*
168: Extract the array of KSP contexts for the local blocks
169: */
170: PCBJacobiGetSubKSP(pc,&nlocal,&first,&subksp);
172: /*
173: Loop over the local blocks, setting various KSP options
174: for each block.
175: */
176: for (i=0; i<nlocal; i++) {
177: KSPGetPC(subksp[i],&subpc);
178: if (!rank) {
179: if (i%2) {
180: PCSetType(subpc,PCILU);
181: } else {
182: PCSetType(subpc,PCNONE);
183: KSPSetType(subksp[i],KSPBCGS);
184: KSPSetTolerances(subksp[i],1.e-6,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
185: }
186: } else {
187: PCSetType(subpc,PCJACOBI);
188: KSPSetType(subksp[i],KSPGMRES);
189: KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
190: }
191: }
192: }
194: /* -------------------------------------------------------------------
195: Solve the linear system
196: ------------------------------------------------------------------- */
198: /*
199: Set runtime options
200: */
201: KSPSetFromOptions(ksp);
203: /*
204: Solve the linear system
205: */
206: KSPSolve(ksp,b,x);
208: /*
209: View info about the solver
210: */
211: PetscOptionsHasName(PETSC_NULL,"-nokspview",&flg);
212: if (!flg) {
213: KSPView(ksp,PETSC_VIEWER_STDOUT_WORLD);
214: }
216: /* -------------------------------------------------------------------
217: Check solution and clean up
218: ------------------------------------------------------------------- */
220: /*
221: Check the error
222: */
223: VecAXPY(&none,u,x);
224: VecNorm(x,NORM_2,&norm);
225: KSPGetIterationNumber(ksp,&its);
226: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A iterations %D\n",norm,its);
228: /*
229: Free work space. All PETSc objects should be destroyed when they
230: are no longer needed.
231: */
232: KSPDestroy(ksp);
233: VecDestroy(u); VecDestroy(x);
234: VecDestroy(b); MatDestroy(A);
235: PetscFinalize();
236: return 0;
237: }