Actual source code: samgmgpetsc.c
1: #include "global.h"
2: #include petscfunc.h
3: #include petscksp.h
4: #include petscmg.h
6: static char help[] = "Does PETSc multigrid cycling using hierarchy build by SAMG\n\n";
8: PetscErrorCode samgmgpetsc(const int numnodes, double* Asky, int* ia,
9: int* ja, double* rhs, double* u_approx,
10: const OPTIONS *options)
11: {
12: /*..Petsc variables..*/
13: Vec x, b; /* approx solution and RHS */
14: Mat A; /* linear system matrix */
15: KSP ksp; /* linear solver context */
16: PC pc; /* preconditioner context */
17: PCType pctype; /* preconditioning technique */
18: KSP ksp; /* KSP context */
20: int its; /* Error messages and number of iterations */
21: /*..Other variables for the PETSc interface..*/
22: int *nnz_per_row; /* integer vector to hold the number of nonzeros */
23: /* of each row. This vector will be used to */
24: /* allocate memory for the matrix, and to store */
25: /* elements in the matrix */
26: int *cols; /* cols is a vector of collumn indices used in */
27: /* assembling the PETSc rhs vector */
28: PetscScalar *sol_array;/* sol_array used to pass the PETSc solution */
29: /* back to the calling program */
30: /*..Variables used to customize the convergence criterium to */
31: /* ||res|| / ||b|| < tol */
32: double bnrm2;
33: CONVHIST *convhist;
34: /*..Context for the SAMG preconditioner..*/
35: SamgShellPC *samg_ctx;
36: /*..Variables to extract SAMG hierarchy..*/
37: int k, levels, numnonzero;
38: double normdiff;
39: GridCtx grid[MAX_LEVELS];
40: char pathfilename[80], basefilename[80];
41: /*..Variables for intermediate levels..*/
42: KSP ksp_pre, ksp_post;
43: PC pc_pre, pc_post;
44: Mat FineLevelMatrix;
45: int petsc_level, size;
46: /*..Variables for coarse grid solve..*/
47: KSP coarsegridksp;
48: PC coarsegridpc;
49: KSP coarsegridksp;
50: int coarsegrid_n;
51: double coarsegrid_rnorm;
52: /*..Variables that determine behaviour of the code..*/
53: static PetscErrorCode debug = *(options->DEBUG);
54: /*..Other variables..*/
55: int I;
56: PetscTruth flg, issamg, issamg_print;
57: /*..Variables for CPU timings..*/
58: PetscLogDouble v1,v2,t_setup, t_solve;
60: /*..Executable statements..*/
61: PetscInitialize( (int*) 0, (char ***) 0,(char *) 0, help);
63: /*..Get start time of linear system setup..*/
64: PetscGetTime(&v1);
66: PetscMalloc(numnodes * sizeof(int),&nnz_per_row);
68: /*..The numbero f nonzeros entries in row I can be calculated as
69: ia[I+1] - 1 - ia[I] + 1 = ia[I+1] - ia[I] ..*/
70: for (I=0;I<numnodes;I++)
71: nnz_per_row[I] = ia[I+1] - ia[I];
73: /*..Allocate (create) SeqAIJ matrix for use within PETSc..*/
74: MatCreate(PETSC_COMM_WORLD,numnodes,numnodes,numnodes,numnodes,&A);
75: MatSetType(A,MATSEQAIJ);
76: MatSeqAIJSetPreallocation(A,0,nnz_per_row);
78: /*..Assemble matrix for use within PETSc..*/
79: for (I=0;I<numnodes;I++){
80: MatSetValues(A,
81: 1, /* number of rows */
82: &I, /* pointer to global row number */
83: nnz_per_row[I], /* number of collums = number of nonzero ... */
84: /* entries in row I */
85: &(ja[ ia[I] ]),
86: /* vector global column indices */
87: (PetscScalar *) &(Asky[ ia[I] ]),
88: /* vector of coefficients */
89: INSERT_VALUES);
90: }
92: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
93: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
94: if (debug)
95: MatView(A,PETSC_VIEWER_STDOUT_SELF);
97: /*..Create solution and rhs vector. Note that we form vector from
98: scratch and then duplicate as needed..*/
99: VecCreate(PETSC_COMM_WORLD,&x);
100: VecSetSizes(x,PETSC_DECIDE,numnodes);
101: VecSetType(x,VECSEQ);
102: VecDuplicate(x,&b);
104: PetscMalloc(numnodes * sizeof(int),&cols);
105: for (I=0;I<numnodes;I++)
106: cols[I] = I;
108: /*..Assemble the right-hand side vector for use within PETSc..*/
109: VecSetValues(b,numnodes,cols,(PetscScalar*)rhs,INSERT_VALUES);
110:
111: VecAssemblyBegin(b);
112: VecAssemblyEnd(b);
113: if (debug){
114: printf("[PETSc]:The right-hand side \n");
115: VecView(b,PETSC_VIEWER_STDOUT_SELF);
116: printf("\n");
117: }
118: VecNorm(b,NORM_2,&bnrm2);
120: /*..Assemble the start solution vector for use within PETSc..*/
121: VecSetValues(x,numnodes,cols,(PetscScalar*)u_approx,INSERT_VALUES);
122:
123: VecAssemblyBegin(x);
124: VecAssemblyEnd(x);
126: VecNorm(b,NORM_2,&bnrm2);
127: if (debug)
128: printf("[PETSc]:The right-hand side norm = %e \n",bnrm2);
130: /*..Create linear solver context..*/
131: KSPCreate(MPI_COMM_WORLD,&ksp);
133: /*..Set operators. Here the matrix that defines the linear system
134: also serves as the preconditioning matrix..*/
135: KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);
137: /*..Extract pc type from context..*/
138: KSPGetPC(ksp,&pc);
140: /*..Customize tolerances..*/
141: KSPSetTolerances(ksp,1e-12,1e-14,PETSC_DEFAULT,
142: PETSC_DEFAULT);
144: /*..Create user defined context for the shell preconditioner..*/
145: SamgShellPCCreate(&samg_ctx);
147: /*..Do the setup for the SAMG precondioner..*/
148: SamgShellPCSetUp(samg_ctx,A);
150: /*..Give user defined preconditioner a name..*/
151: PCShellSetName(pc,"SAMG (Scalar mode)");
152:
154: /*..Parse SAMG hierarchy to PETSc variables..*/
155: levels = samg_ctx->LEVELS;
156: numnonzero = ia[numnodes];
157: SamgGetGrid(levels, numnodes, numnonzero, grid, PETSC_NULL);
159: /*..Print coarser grid and interpolation operators to file..*/
160: PetscOptionsHasName(PETSC_NULL,"-samg_print",&issamg_print);
161:
162: if (issamg_print){
163: for (k=2;k<=levels;k++){
164: sprintf(pathfilename,"./");
165: sprintf(basefilename,"Pcoarsemat.%02u",k);
166: PrintMatrix(grid[k].A, pathfilename, basefilename);
167: }
168: for (k=1;k<=levels-1;k++){
169: sprintf(basefilename,"Pinterpol.%02u%02",k, k-1);
170: PrintMatrix(grid[k].Interp, pathfilename, basefilename);
171: }
172: }
173:
174: /*..Perform check on parsing..*/
175: PetscOptionsHasName(PETSC_NULL,"-samg_check",&issamg_print);
176:
177: if (issamg_print)
178: SamgCheckGalerkin(levels, A, grid, PETSC_NULL);
179:
180: /*..Set KSP solver type..*/
181: KSPSetType(ksp,KSPRICHARDSON);
182: KSPSetMonitor(ksp,KSPDefaultMonitor,PETSC_NULL, PETSC_NULL);
183:
184: /*..Set MG preconditioner..*/
185: PCSetType(pc,PCMG);
186: MGSetLevels(pc,levels, PETSC_NULL);
187: MGSetType(pc, MGMULTIPLICATIVE);
188: MGSetCycles(pc, 1);
189: MGSetNumberSmoothUp(pc,1);
190: MGSetNumberSmoothDown(pc,1);
192: /*....Set smoother, work vectors and residual calculation on each
193: level....*/
194: for (k=1;k<=levels;k++){
195: petsc_level = levels - k;
196: /*....Get pre-smoothing KSP context....*/
197: MGGetSmootherDown(pc,petsc_level,&grid[k].ksp_pre);
198:
199: MGGetSmootherUp(pc,petsc_level,&grid[k].ksp_post);
200:
201: if (k==1)
202: FineLevelMatrix = A;
203: else
204: FineLevelMatrix = grid[k].A;
205: MatGetSize(FineLevelMatrix, &size, &size);
206: VecCreate(MPI_COMM_WORLD,&grid[k].x);
207: VecSetSizes(grid[k].x,PETSC_DECIDE,size);
208: VecSetType(grid[k].x,VECSEQ);
209: VecDuplicate(grid[k].x,&grid[k].b);
210: VecDuplicate(grid[k].x,&grid[k].r);
212: /*....set ksp_pre context....*/
213: KSPSetOperators(grid[k].ksp_pre, FineLevelMatrix,
214: FineLevelMatrix, DIFFERENT_NONZERO_PATTERN);
215:
216: KSPGetPC(ksp_pre_pre,&pc_pre);
217: KSPSetType(grid[k].ksp_pre, KSPRICHARDSON);
218: KSPSetTolerances(grid[k].ksp_pre, 1e-12, 1e-50, 1e7,1);
219:
220: PCSetType(pc_pre, PCSOR);
221: PCSORSetSymmetric(pc_pre,SOR_FORWARD_SWEEP);
223: /*....set ksp_post context....*/
224: KSPSetOperators(grid[k].ksp_post, FineLevelMatrix,
225: FineLevelMatrix, DIFFERENT_NONZERO_PATTERN);
226:
227: KSPGetPC(grid[k].ksp_post,&pc_post);
228: KSPSetInitialGuessNonzero(grid[k].ksp_post, PETSC_TRUE);
229: KSPSetType(grid[k].ksp_post, KSPRICHARDSON);
230: KSPSetTolerances(grid[k].ksp_post, 1e-12, 1e-50, 1e7,1);
231:
232: PCSetType(pc_post, PCSOR);
233: PCSORSetSymmetric(pc_post,SOR_BACKWARD_SWEEP);
235: MGSetX(pc,petsc_level,grid[k].x);
236: MGSetRhs(pc,petsc_level,grid[k].b);
237: MGSetR(pc,petsc_level,grid[k].r);
238: MGSetResidual(pc,petsc_level,MGDefaultResidual,FineLevelMatrix);
239:
240: }
242: /*....Create interpolation between the levels....*/
243: for (k=1;k<=levels-1;k++){
244: petsc_level = levels - k;
245: MGSetInterpolate(pc,petsc_level,grid[k].Interp);
246: MGSetRestriction(pc,petsc_level,grid[k].Interp);
247: }
249: /*....Set coarse grid solver....*/
250: MGGetCoarseSolve(pc,&coarsegridksp);
251: KSPSetFromOptions(coarsegridksp);
252: KSPSetOperators(coarsegridksp, grid[levels].A, grid[levels].A,
253: DIFFERENT_NONZERO_PATTERN);
254: KSPGetPC(coarsegridksp,&coarsegridpc);
255: KSPSetType(coarsegridksp, KSPPREONLY);
256: PCSetType(coarsegridpc, PCLU);
258: /*..Allow above criterea to be overwritten..*/
259: KSPSetFromOptions(ksp);
261: /*..Indicate that we are going to use a non-zero initial solution..*/
262: KSPSetInitialGuessNonzero(ksp, PETSC_TRUE);
264: /*..Get end time of linear system setup..*/
265: PetscGetTime(&v2);
266: t_setup = v2 - v1;
268: /*..Get start time of linear solve..*/
269: PetscGetTime(&v1);
271: /*..Solve linear system..*/
272: KSPSolve(ksp,b,x);
273: KSPGetIterationNumber(ksp,&its);
274:
275: /*..Print number of iterations..*/
276: PetscPrintf(PETSC_COMM_WORLD,"\n** Number of iterations done = %d \n",
277: its);
279: /*..Get end time of linear solve..*/
280: PetscGetTime(&v2);
281: t_solve = v2 - v1;
283: printf("\n[PETSc]:Time spend in setup = %e \n",t_setup);
284: printf("[PETSc]:Time spend in solve = %e \n",t_solve);
285: printf("[PETSc]:Total time = %e \n\n", t_setup + t_solve);
286:
287: /*..Copy PETSc solution back..*/
288: ierr= VecGetArray(x, &sol_array);
289: for (I=0;I<numnodes;I++){
290: u_approx[I] = sol_array[I];
291: }
292: VecRestoreArray(x,&sol_array);
294: if (debug){
295: printf("[PETSc]:The solution \n");
296: VecView(x,PETSC_VIEWER_STDOUT_SELF);
297: printf("\n");
298: }
300: /*..Free work space..*/
301: SamgShellPCDestroy(samg_ctx);
302: VecDestroy(x);
303: VecDestroy(b);
304: MatDestroy(A);
305: KSPDestroy(ksp);
306: for (k=2;k<=levels;k++){
307: MatDestroy(grid[k].A);
308: }
309: for (k=1;k<=levels-1;k++){
310: MatDestroy(grid[k].Interp);
311: }
312: for (k=1;k<=levels-1;k++){
313: VecDestroy(grid[k].b);
314: VecDestroy(grid[k].x);
315: VecDestroy(grid[k].r);
316: }
318: PetscFinalize();
319:
320: return 0;
321: }