Actual source code: ex4.c
1: /*$Id: ex4.c,v 1.52 2001/04/10 19:36:40 bsmith Exp $*/
3: static char help[] = "Uses a different preconditioner matrix and linear system matrix in the SLES solvers.n
4: Note that different storage formatsn
5: can be used for the different matrices.nn";
7: /*T
8: Concepts: SLES^different matrices for linear system and preconditioner;
9: Processors: n
10: T*/
12: /*
13: Include "petscsles.h" so that we can use SLES solvers. Note that this file
14: automatically includes:
15: petsc.h - base PETSc routines petscvec.h - vectors
16: petscsys.h - system routines petscmat.h - matrices
17: petscis.h - index sets petscksp.h - Krylov subspace methods
18: petscviewer.h - viewers petscpc.h - preconditioners
19: */
20: #include petscsles.h
22: int main(int argc,char **args)
23: {
24: SLES sles; /* linear solver context */
25: Mat A,B; /* linear system matrix, preconditioning matrix */
26: PetscRandom rctx; /* random number generator context */
27: Vec x,b,u; /* approx solution, RHS, exact solution */
28: Vec tmp; /* work vector */
29: Scalar v,one = 1.0,scale = 0.0;
30: int i,j,m = 15,n = 17,its,I,J,ierr,Istart,Iend;
32: PetscInitialize(&argc,&args,(char *)0,help);
33: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
34: PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
35: PetscOptionsGetScalar(PETSC_NULL,"-scale",&scale,PETSC_NULL);
37: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
38: Compute the matrix and right-hand-side vector that define
39: the linear system,Ax = b. Also, create a different
40: preconditioner matrix.
41: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
43: /*
44: Create the linear system matrix (A).
45: - Here we use a block diagonal matrix format (MATMPIBDIAG) and
46: specify only the global size. The parallel partitioning of
47: the matrix will be determined at runtime by PETSc.
48: */
49: MatCreateMPIBDiag(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,m*n,
50: 0,1,PETSC_NULL,PETSC_NULL,&A);
52: /*
53: Create a different preconditioner matrix (B). This is usually
54: done to form a cheaper (or sparser) preconditioner matrix
55: compared to the linear system matrix.
56: - Here we use MatCreate(), so that the matrix format and
57: parallel partitioning will be determined at runtime.
58: */
59: MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&B);
60: MatSetFromOptions(B);
62: /*
63: Currently, all PETSc parallel matrix formats are partitioned by
64: contiguous chunks of rows across the processors. Determine which
65: rows of the matrix are locally owned.
66: */
67: MatGetOwnershipRange(A,&Istart,&Iend);
69: /*
70: Set entries within the two matrices
71: */
72: for (I=Istart; I<Iend; I++) {
73: v = -1.0; i = I/n; j = I - i*n;
74: if (i>0) {
75: J=I-n;
76: MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
77: MatSetValues(B,1,&I,1,&J,&v,INSERT_VALUES);
78: }
79: if (i<m-1) {
80: J=I+n;
81: MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
82: MatSetValues(B,1,&I,1,&J,&v,INSERT_VALUES);
83: }
84: if (j>0) {
85: J=I-1;
86: MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
87: }
88: if (j<n-1) {
89: J=I+1;
90: MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
91: }
92: v = 5.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
93: v = 3.0; MatSetValues(B,1,&I,1,&I,&v,INSERT_VALUES);
94: }
96: /*
97: Assemble the preconditioner matrix (B), using the 2-step process
98: MatAssemblyBegin(), MatAssemblyEnd()
99: Note that computations can be done while messages are in
100: transition by placing code between these two statements.
101: */
102: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
103: for (I=Istart; I<Iend; I++) {
104: v = -0.5; i = I/n;
105: if (i>1) {
106: J=I-(n+1); MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
107: }
108: if (i<m-2) {
109: J=I+n+1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
110: }
111: }
112: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
114: /*
115: Assemble the linear system matrix, (A)
116: */
117: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
118: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
120: /*
121: Create parallel vectors.
122: - When using VecCreate(), we specify only the vector's global
123: dimension; the parallel partitioning is determined at runtime.
124: - Note: We form 1 vector from scratch and then duplicate as needed.
125: */
126: VecCreate(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,&b);
127: VecSetFromOptions(b);
128: VecDuplicate(b,&u);
129: VecDuplicate(b,&x);
131: /*
132: Make solution vector be 1 to random noise
133: */
134: VecSet(&one,u);
135: VecDuplicate(u,&tmp);
136: PetscRandomCreate(PETSC_COMM_WORLD,RANDOM_DEFAULT,&rctx);
137: VecSetRandom(rctx,tmp);
138: PetscRandomDestroy(rctx);
139: VecAXPY(&scale,tmp,u);
140: VecDestroy(tmp);
142: /*
143: Compute right-hand-side vector
144: */
145: MatMult(A,u,b);
147: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: Create the linear solver and set various options
149: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
151: /*
152: Create linear solver context
153: */
154: SLESCreate(PETSC_COMM_WORLD,&sles);
156: /*
157: Set operators. Note that we use different matrices to define the
158: linear system and to precondition it.
159: */
160: SLESSetOperators(sles,A,B,DIFFERENT_NONZERO_PATTERN);
162: /*
163: Set runtime options (e.g., -ksp_type <type> -pc_type <type>)
164: */
165: SLESSetFromOptions(sles);
167: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168: Solve the linear system
169: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
171: SLESSolve(sles,b,x,&its);
173: /*
174: Free work space. All PETSc objects should be destroyed when they
175: are no longer needed.
176: */
177: SLESDestroy(sles); VecDestroy(u);
178: MatDestroy(B); VecDestroy(x);
179: MatDestroy(A); VecDestroy(b);
181: /*
182: Always call PetscFinalize() before exiting a program. This routine
183: - finalizes the PETSc libraries as well as MPI
184: - provides summary and diagnostic information if certain runtime
185: options are chosen (e.g., -log_summary).
186: */
187: PetscFinalize();
188: return 0;
189: }