Actual source code: redundant.c
1: /*$Id: redundant.c,v 1.29 2001/04/10 19:36:17 bsmith Exp $*/
2: /*
3: This file defines a "solve the problem redundantly on each processor" preconditioner.
5: */
6: #include src/sles/pc/pcimpl.h
7: #include petscsles.h
9: typedef struct {
10: PC pc; /* actual preconditioner used on each processor */
11: Vec x,b; /* sequential vectors to hold parallel vectors */
12: Mat *mats,*pmats; /* matrix and optional preconditioner matrix */
13: VecScatter scatterin,scatterout; /* scatter used to move all values to each processor */
14: PetscTruth useparallelmat;
15: } PC_Redundant;
17: static int PCView_Redundant(PC pc,PetscViewer viewer)
18: {
19: PC_Redundant *red = (PC_Redundant*)pc->data;
20: int ierr,rank;
21: PetscTruth isascii,isstring;
22: PetscViewer sviewer;
25: MPI_Comm_rank(pc->comm,&rank);
26: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
27: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_STRING,&isstring);
28: if (isascii) {
29: PetscViewerASCIIPrintf(viewer," Redundant solver preconditioner: Actual PC followsn");
30: PetscViewerGetSingleton(viewer,&sviewer);
31: if (!rank) {
32: PetscViewerASCIIPushTab(viewer);
33: PCView(red->pc,sviewer);
34: PetscViewerASCIIPopTab(viewer);
35: }
36: PetscViewerRestoreSingleton(viewer,&sviewer);
37: } else if (isstring) {
38: PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");
39: PetscViewerGetSingleton(viewer,&sviewer);
40: if (!rank) {
41: PCView(red->pc,sviewer);
42: }
43: PetscViewerRestoreSingleton(viewer,&sviewer);
44: } else {
45: SETERRQ1(1,"Viewer type %s not supported for PC redundant",((PetscObject)viewer)->type_name);
46: }
47: return(0);
48: }
50: static int PCSetUp_Redundant(PC pc)
51: {
52: PC_Redundant *red = (PC_Redundant*)pc->data;
53: int ierr,mstart,mlocal,m,size;
54: IS isl;
55: MatReuse reuse = MAT_INITIAL_MATRIX;
56: MatStructure str = DIFFERENT_NONZERO_PATTERN;
57: MPI_Comm comm;
60: PCSetFromOptions(red->pc);
61: VecGetSize(pc->vec,&m);
62: if (!pc->setupcalled) {
63: VecGetLocalSize(pc->vec,&mlocal);
64: VecCreateSeq(PETSC_COMM_SELF,m,&red->x);
65: VecDuplicate(red->x,&red->b);
66: PCSetVector(red->pc,red->x);
67: if (!red->scatterin) {
69: /*
70: Create the vectors and vector scatter to get the entire vector onto each processor
71: */
72: VecGetOwnershipRange(pc->vec,&mstart,PETSC_NULL);
73: VecScatterCreate(pc->vec,0,red->x,0,&red->scatterin);
74: ISCreateStride(pc->comm,mlocal,mstart,1,&isl);
75: VecScatterCreate(red->x,isl,pc->vec,isl,&red->scatterout);
76: ISDestroy(isl);
77: }
78: }
80: /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix*/
82: PetscObjectGetComm((PetscObject)pc->pmat,&comm);
83: MPI_Comm_size(comm,&size);
84: if (size == 1) {
85: red->useparallelmat = PETSC_FALSE;
86: }
88: if (red->useparallelmat) {
89: if (pc->setupcalled == 1 && pc->flag == DIFFERENT_NONZERO_PATTERN) {
90: /* destroy old matrices */
91: if (red->pmats && red->pmats != red->mats) {
92: MatDestroyMatrices(1,&red->pmats);
93: }
94: if (red->mats) {
95: MatDestroyMatrices(1,&red->mats);
96: }
97: } else if (pc->setupcalled == 1) {
98: reuse = MAT_REUSE_MATRIX;
99: str = SAME_NONZERO_PATTERN;
100: }
101:
102: /*
103: grab the parallel matrix and put it on each processor
104: */
105: ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);
106: MatGetSubMatrices(pc->mat,1,&isl,&isl,reuse,&red->mats);
107: if (pc->pmat != pc->mat) {
108: MatGetSubMatrices(pc->pmat,1,&isl,&isl,reuse,&red->pmats);
109: } else {
110: red->pmats = red->mats;
111: }
112: ISDestroy(isl);
114: /* tell sequential PC its operators */
115: PCSetOperators(red->pc,red->mats[0],red->pmats[0],str);
116: } else {
117: PCSetOperators(red->pc,pc->mat,pc->pmat,pc->flag);
118: }
119: PCSetFromOptions(red->pc);
120: PCSetVector(red->pc,red->b);
121: PCSetUp(red->pc);
122: return(0);
123: }
126: static int PCApply_Redundant(PC pc,Vec x,Vec y)
127: {
128: PC_Redundant *red = (PC_Redundant*)pc->data;
129: int ierr;
132: /* move all values to each processor */
133: VecScatterBegin(x,red->b,INSERT_VALUES,SCATTER_FORWARD,red->scatterin);
134: VecScatterEnd(x,red->b,INSERT_VALUES,SCATTER_FORWARD,red->scatterin);
136: /* apply preconditioner on each processor */
137: PCApply(red->pc,red->b,red->x);
139: /* move local part of values into y vector */
140: VecScatterBegin(red->x,y,INSERT_VALUES,SCATTER_FORWARD,red->scatterout);
141: VecScatterEnd(red->x,y,INSERT_VALUES,SCATTER_FORWARD,red->scatterout);
142: return(0);
143: }
146: static int PCDestroy_Redundant(PC pc)
147: {
148: PC_Redundant *red = (PC_Redundant*)pc->data;
149: int ierr;
152: if (red->scatterin) {VecScatterDestroy(red->scatterin);}
153: if (red->scatterout) {VecScatterDestroy(red->scatterout);}
154: if (red->x) {VecDestroy(red->x);}
155: if (red->b) {VecDestroy(red->b);}
156: if (red->pmats && red->pmats != red->mats) {
157: MatDestroyMatrices(1,&red->pmats);
158: }
159: if (red->mats) {
160: MatDestroyMatrices(1,&red->mats);
161: }
162: PCDestroy(red->pc);
163: PetscFree(red);
164: return(0);
165: }
167: static int PCSetFromOptions_Redundant(PC pc)
168: {
170: return(0);
171: }
173: EXTERN_C_BEGIN
174: int PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
175: {
176: PC_Redundant *red = (PC_Redundant*)pc->data;
177: int ierr;
180: red->scatterin = in;
181: red->scatterout = out;
182: PetscObjectReference((PetscObject)in);
183: PetscObjectReference((PetscObject)out);
184: return(0);
185: }
186: EXTERN_C_END
188: /*@
189: PCRedundantSetScatter - Sets the scatter used to copy values into the
190: redundant local solve and the scatter to move them back into the global
191: vector.
193: Collective on PC
195: Input Parameters:
196: + pc - the preconditioner context
197: . in - the scatter to move the values in
198: - out - the scatter to move them out
200: Level: advanced
202: .keywords: PC, redundant solve
203: @*/
204: int PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
205: {
206: int ierr,(*f)(PC,VecScatter,VecScatter);
210: PetscObjectQueryFunction((PetscObject)pc,"PCRedundantSetScatter_C",(void (**)())&f);
211: if (f) {
212: (*f)(pc,in,out);
213: }
214: return(0);
215: }
217: EXTERN_C_BEGIN
218: int PCRedundantGetPC_Redundant(PC pc,PC *innerpc)
219: {
220: PC_Redundant *red = (PC_Redundant*)pc->data;
223: *innerpc = red->pc;
224: return(0);
225: }
226: EXTERN_C_END
228: /*@
229: PCRedundantGetPC - Gets the sequential PC created by the redundant PC.
231: Not Collective
233: Input Parameter:
234: . pc - the preconditioner context
236: Output Parameter:
237: . innerpc - the sequential PC
239: Level: advanced
241: .keywords: PC, redundant solve
242: @*/
243: int PCRedundantGetPC(PC pc,PC *innerpc)
244: {
245: int ierr,(*f)(PC,PC*);
249: PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetPC_C",(void (**)())&f);
250: if (f) {
251: (*f)(pc,innerpc);
252: }
253: return(0);
254: }
256: EXTERN_C_BEGIN
257: int PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
258: {
259: PC_Redundant *red = (PC_Redundant*)pc->data;
262: if (mat) *mat = red->mats[0];
263: if (pmat) *pmat = red->pmats[0];
264: return(0);
265: }
266: EXTERN_C_END
268: /*@
269: PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix
271: Not Collective
273: Input Parameter:
274: . pc - the preconditioner context
276: Output Parameters:
277: + mat - the matrix
278: - pmat - the (possibly different) preconditioner matrix
280: Level: advanced
282: .keywords: PC, redundant solve
283: @*/
284: int PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
285: {
286: int ierr,(*f)(PC,Mat*,Mat*);
290: PetscObjectQueryFunction((PetscObject)pc,"PCRedundantGetOperators_C",(void (**)())&f);
291: if (f) {
292: (*f)(pc,mat,pmat);
293: }
294: return(0);
295: }
297: /* -------------------------------------------------------------------------------------*/
298: EXTERN_C_BEGIN
299: int PCCreate_Redundant(PC pc)
300: {
301: int ierr;
302: PC_Redundant *red;
303: char *prefix;
306: PetscNew(PC_Redundant,&red);
307: PetscLogObjectMemory(pc,sizeof(PC_Redundant));
308: PetscMemzero(red,sizeof(PC_Redundant));
309: red->useparallelmat = PETSC_TRUE;
311: /* create the sequential PC that each processor has copy of */
312: PCCreate(PETSC_COMM_SELF,&red->pc);
313: PCGetOptionsPrefix(pc,&prefix);
314: PCSetOptionsPrefix(red->pc,prefix);
315: PCAppendOptionsPrefix(red->pc,"redundant_");
317: pc->ops->apply = PCApply_Redundant;
318: pc->ops->applytranspose = 0;
319: pc->ops->setup = PCSetUp_Redundant;
320: pc->ops->destroy = PCDestroy_Redundant;
321: pc->ops->setfromoptions = PCSetFromOptions_Redundant;
322: pc->ops->setuponblocks = 0;
323: pc->ops->view = PCView_Redundant;
324: pc->ops->applyrichardson = 0;
326: pc->data = (void*)red;
328: PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantSetScatter_C","PCRedundantSetScatter_Redundant",
329: PCRedundantSetScatter_Redundant);
330: PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetPC_C","PCRedundantGetPC_Redundant",
331: PCRedundantGetPC_Redundant);
332: PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCRedundantGetOperators_C","PCRedundantGetOperators_Redundant",
333: PCRedundantGetOperators_Redundant);
335: return(0);
336: }
337: EXTERN_C_END