Actual source code: ex2.c
2: static char help[] = "Demonstrates running several independent tasks in PETSc.\n\n";
4: /*T
5: Concepts: KSP^solving linear equations
6: Processors: n
8: Comments: Demonstrates how to use PetscSetCommWorld() to tell a subset of
9: processors (in this case each individual processor) to run
10: as if it was all the processors that PETSc is using. ADVANCED
11: example, not for beginning PETSc users.
13: T*/
15: /*
16: Include "petscksp.h" so that we can use KSP solvers. Note that this file
17: automatically includes:
18: petsc.h - base PETSc routines petscvec.h - vectors
19: petscsys.h - system routines petscmat.h - matrices
20: petscis.h - index sets petscksp.h - Krylov subspace methods
21: petscviewer.h - viewers petscpc.h - preconditioners
22: */
23: #include petscksp.h
25: EXTERN PetscErrorCode kspex(int,char**);
29: int main(int argc,char **argv)
30: {
31: MPI_Init(&argc,&argv);
32: kspex(argc,argv);
33: MPI_Finalize();
34: return 0;
35: }
39: int kspex(int argc,char **args)
40: {
41: Vec x,b,u; /* approx solution, RHS, exact solution */
42: Mat A; /* linear system matrix */
43: KSP ksp; /* linear solver context */
44: PC pc; /* preconditioner context */
45: PetscReal norm; /* norm of solution error */
46: PetscInt i,j,I,J,Istart,Iend,m = 8,n = 7,its;
48: PetscScalar v,one = 1.0,none = -1.0;
50: PetscSetCommWorld(PETSC_COMM_SELF);
51: PetscInitialize(&argc,&args,(char *)0,help);
53: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
54: PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
56: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
57: Compute the matrix and right-hand-side vector that define
58: the linear system, Ax = b.
59: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
60: /*
61: Create parallel matrix, specifying only its global dimensions.
62: When using MatCreate(), the matrix format can be specified at
63: runtime. Also, the parallel partitioning of the matrix is
64: determined by PETSc at runtime.
65: */
66: MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
67: MatSetFromOptions(A);
69: /*
70: Currently, all PETSc parallel matrix formats are partitioned by
71: contiguous chunks of rows across the processors. Determine which
72: rows of the matrix are locally owned.
73: */
74: MatGetOwnershipRange(A,&Istart,&Iend);
76: /*
77: Set matrix elements for the 2-D, five-point stencil in parallel.
78: - Each processor needs to insert only elements that it owns
79: locally (but any non-local elements will be sent to the
80: appropriate processor during matrix assembly).
81: - Always specify global row and columns of matrix entries.
82: */
83: for (I=Istart; I<Iend; I++) {
84: v = -1.0; i = I/n; j = I - i*n;
85: if (i>0) {J = I - n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
86: if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
87: if (j>0) {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
88: if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
89: v = 4.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
90: }
92: /*
93: Assemble matrix, using the 2-step process:
94: MatAssemblyBegin(), MatAssemblyEnd()
95: Computations can be done while messages are in transition,
96: by placing code between these two statements.
97: */
98: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
99: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
101: /*
102: Create parallel vectors.
103: - When using VecCreate() VecSetSizes() and VecSetFromOptions(),
104: we specify only the vector's global
105: dimension; the parallel partitioning is determined at runtime.
106: - Note: We form 1 vector from scratch and then duplicate as needed.
107: */
108: VecCreate(PETSC_COMM_WORLD,&u);
109: VecSetSizes(u,PETSC_DECIDE,m*n);
110: VecSetFromOptions(u);
111: VecDuplicate(u,&b);
112: VecDuplicate(b,&x);
114: /*
115: Set exact solution; then compute right-hand-side vector.
116: */
117: VecSet(&one,u);
118: MatMult(A,u,b);
120: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121: Create the linear solver and set various options
122: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
124: /*
125: Create linear solver context
126: */
127: KSPCreate(PETSC_COMM_WORLD,&ksp);
129: /*
130: Set operators. Here the matrix that defines the linear system
131: also serves as the preconditioning matrix.
132: */
133: KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);
135: /*
136: Set linear solver defaults for this problem (optional).
137: - By extracting the KSP and PC contexts from the KSP context,
138: we can then directly directly call any KSP and PC routines
139: to set various options.
140: - The following four statements are optional; all of these
141: parameters could alternatively be specified at runtime via
142: KSPSetFromOptions();
143: */
144: KSPGetPC(ksp,&pc);
145: PCSetType(pc,PCJACOBI);
146: KSPSetTolerances(ksp,1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
148: /*
149: Set runtime options, e.g.,
150: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
151: These options will override those specified above as long as
152: KSPSetFromOptions() is called _after_ any other customization
153: routines.
154: */
155: KSPSetFromOptions(ksp);
157: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
158: Solve the linear system
159: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
161: KSPSolve(ksp,b,x);
163: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164: Check solution and clean up
165: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
167: /*
168: Check the error
169: */
170: VecAXPY(&none,u,x);
171: VecNorm(x,NORM_2,&norm);
172: KSPGetIterationNumber(ksp,&its);
173: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A iterations %D\n",norm,its);
175: /*
176: Free work space. All PETSc objects should be destroyed when they
177: are no longer needed.
178: */
179: KSPDestroy(ksp);
180: VecDestroy(u); VecDestroy(x);
181: VecDestroy(b); MatDestroy(A);
182: PetscFinalize();
183: return 0;
184: }