Actual source code: telescope.c
petsc-3.7.0 2016-04-25
4: #include <petsc/private/matimpl.h>
5: #include <petsc/private/pcimpl.h>
6: #include <petscksp.h> /*I "petscksp.h" I*/
7: #include <petscdm.h> /*I "petscdm.h" I*/
9: #include telescope.h
11: /*
12: PCTelescopeSetUp_default()
13: PCTelescopeMatCreate_default()
15: default
17: // scatter in
18: x(comm) -> xtmp(comm)
20: xred(subcomm) <- xtmp
21: yred(subcomm)
23: yred(subcomm) --> xtmp
25: // scatter out
26: xtmp(comm) -> y(comm)
27: */
29: PetscBool isActiveRank(PetscSubcomm scomm)
30: {
31: if (scomm->color == 0) { return PETSC_TRUE; }
32: else { return PETSC_FALSE; }
33: }
37: DM private_PCTelescopeGetSubDM(PC_Telescope sred)
38: {
39: DM subdm = NULL;
41: if (!isActiveRank(sred->psubcomm)) { subdm = NULL; }
42: else {
43: switch (sred->sr_type) {
44: case TELESCOPE_DEFAULT: subdm = NULL;
45: break;
46: case TELESCOPE_DMDA: subdm = ((PC_Telescope_DMDACtx*)sred->dm_ctx)->dmrepart;
47: break;
48: case TELESCOPE_DMPLEX: subdm = NULL;
49: break;
50: }
51: }
52: return(subdm);
53: }
57: PetscErrorCode PCTelescopeSetUp_default(PC pc,PC_Telescope sred)
58: {
60: PetscInt m,M,bs,st,ed;
61: Vec x,xred,yred,xtmp;
62: Mat B;
63: MPI_Comm comm,subcomm;
64: VecScatter scatter;
65: IS isin;
68: PetscInfo(pc,"PCTelescope: setup (default)\n");
69: comm = PetscSubcommParent(sred->psubcomm);
70: subcomm = PetscSubcommChild(sred->psubcomm);
72: PCGetOperators(pc,NULL,&B);
73: MatGetSize(B,&M,NULL);
74: MatGetBlockSize(B,&bs);
75: MatCreateVecs(B,&x,NULL);
77: xred = NULL;
78: m = 0;
79: if (isActiveRank(sred->psubcomm)) {
80: VecCreate(subcomm,&xred);
81: VecSetSizes(xred,PETSC_DECIDE,M);
82: VecSetBlockSize(xred,bs);
83: VecSetFromOptions(xred);
84: VecGetLocalSize(xred,&m);
85: }
87: yred = NULL;
88: if (isActiveRank(sred->psubcomm)) {
89: VecDuplicate(xred,&yred);
90: }
92: VecCreate(comm,&xtmp);
93: VecSetSizes(xtmp,m,PETSC_DECIDE);
94: VecSetBlockSize(xtmp,bs);
95: VecSetType(xtmp,((PetscObject)x)->type_name);
97: if (isActiveRank(sred->psubcomm)) {
98: VecGetOwnershipRange(xred,&st,&ed);
99: ISCreateStride(comm,(ed-st),st,1,&isin);
100: } else {
101: VecGetOwnershipRange(x,&st,&ed);
102: ISCreateStride(comm,0,st,1,&isin);
103: }
104: ISSetBlockSize(isin,bs);
106: VecScatterCreate(x,isin,xtmp,NULL,&scatter);
108: sred->isin = isin;
109: sred->scatter = scatter;
110: sred->xred = xred;
111: sred->yred = yred;
112: sred->xtmp = xtmp;
113: VecDestroy(&x);
114: return(0);
115: }
119: PetscErrorCode PCTelescopeMatCreate_default(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
120: {
122: MPI_Comm comm,subcomm;
123: Mat Bred,B;
124: PetscInt nr,nc;
125: IS isrow,iscol;
126: Mat Blocal,*_Blocal;
129: PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (default)\n");
130: PetscObjectGetComm((PetscObject)pc,&comm);
131: subcomm = PetscSubcommChild(sred->psubcomm);
132: PCGetOperators(pc,NULL,&B);
133: MatGetSize(B,&nr,&nc);
134: isrow = sred->isin;
135: ISCreateStride(comm,nc,0,1,&iscol);
136: MatGetSubMatrices(B,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);
137: Blocal = *_Blocal;
138: PetscFree(_Blocal);
139: Bred = NULL;
140: if (isActiveRank(sred->psubcomm)) {
141: PetscInt mm;
143: if (reuse != MAT_INITIAL_MATRIX) { Bred = *A; }
145: MatGetSize(Blocal,&mm,NULL);
146: MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);
147: }
148: *A = Bred;
149: ISDestroy(&iscol);
150: MatDestroy(&Blocal);
151: return(0);
152: }
156: PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc,PC_Telescope sred,Mat sub_mat)
157: {
158: PetscErrorCode ierr;
159: MatNullSpace nullspace,sub_nullspace;
160: Mat A,B;
161: PetscBool has_const;
162: PetscInt i,k,n = 0;
163: const Vec *vecs;
164: Vec *sub_vecs = NULL;
165: MPI_Comm subcomm;
168: PCGetOperators(pc,&A,&B);
169: MatGetNullSpace(B,&nullspace);
170: if (!nullspace) return(0);
172: PetscInfo(pc,"PCTelescope: generating nullspace (default)\n");
173: subcomm = PetscSubcommChild(sred->psubcomm);
174: MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);
176: if (isActiveRank(sred->psubcomm)) {
177: if (n) {
178: VecDuplicateVecs(sred->xred,n,&sub_vecs);
179: }
180: }
182: /* copy entries */
183: for (k=0; k<n; k++) {
184: const PetscScalar *x_array;
185: PetscScalar *LA_sub_vec;
186: PetscInt st,ed;
188: /* pull in vector x->xtmp */
189: VecScatterBegin(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);
190: VecScatterEnd(sred->scatter,vecs[k],sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);
191: /* copy vector entires into xred */
192: VecGetArrayRead(sred->xtmp,&x_array);
193: if (sub_vecs[k]) {
194: VecGetOwnershipRange(sub_vecs[k],&st,&ed);
195: VecGetArray(sub_vecs[k],&LA_sub_vec);
196: for (i=0; i<ed-st; i++) {
197: LA_sub_vec[i] = x_array[i];
198: }
199: VecRestoreArray(sub_vecs[k],&LA_sub_vec);
200: }
201: VecRestoreArrayRead(sred->xtmp,&x_array);
202: }
204: if (isActiveRank(sred->psubcomm)) {
205: /* create new nullspace for redundant object */
206: MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,&sub_nullspace);
207: sub_nullspace->remove = nullspace->remove;
208: sub_nullspace->rmctx = nullspace->rmctx;
210: /* attach redundant nullspace to Bred */
211: MatSetNullSpace(sub_mat,sub_nullspace);
212: VecDestroyVecs(n,&sub_vecs);
213: }
214: return(0);
215: }
219: static PetscErrorCode PCView_Telescope(PC pc,PetscViewer viewer)
220: {
221: PC_Telescope sred = (PC_Telescope)pc->data;
222: PetscErrorCode ierr;
223: PetscBool iascii,isstring;
224: PetscViewer subviewer;
227: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
228: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
229: if (iascii) {
230: if (!sred->psubcomm) {
231: PetscViewerASCIIPrintf(viewer," Telescope: preconditioner not yet setup\n");
232: } else {
233: MPI_Comm comm,subcomm;
234: PetscMPIInt comm_size,subcomm_size;
235: DM dm,subdm;
237: PCGetDM(pc,&dm);
238: subdm = private_PCTelescopeGetSubDM(sred);
239: comm = PetscSubcommParent(sred->psubcomm);
240: subcomm = PetscSubcommChild(sred->psubcomm);
241: MPI_Comm_size(comm,&comm_size);
242: MPI_Comm_size(subcomm,&subcomm_size);
244: PetscViewerASCIIPrintf(viewer," Telescope: parent comm size reduction factor = %D\n",sred->redfactor);
245: PetscViewerASCIIPrintf(viewer," Telescope: comm_size = %d , subcomm_size = %d\n",(int)comm_size,(int)subcomm_size);
246: PetscViewerGetSubViewer(viewer,subcomm,&subviewer);
247: if (isActiveRank(sred->psubcomm)) {
248: PetscViewerASCIIPushTab(subviewer);
250: if (dm && sred->ignore_dm) {
251: PetscViewerASCIIPrintf(subviewer," Telescope: ignoring DM\n");
252: }
253: if (sred->ignore_kspcomputeoperators) {
254: PetscViewerASCIIPrintf(subviewer," Telescope: ignoring KSPComputeOperators\n");
255: }
256: switch (sred->sr_type) {
257: case TELESCOPE_DEFAULT:
258: PetscViewerASCIIPrintf(subviewer," Telescope: using default setup\n");
259: break;
260: case TELESCOPE_DMDA:
261: PetscViewerASCIIPrintf(subviewer," Telescope: DMDA detected\n");
262: DMView_DMDAShort(subdm,subviewer);
263: break;
264: case TELESCOPE_DMPLEX:
265: PetscViewerASCIIPrintf(subviewer," Telescope: DMPLEX detected\n");
266: break;
267: }
269: KSPView(sred->ksp,subviewer);
270: PetscViewerASCIIPopTab(subviewer);
271: }
272: PetscViewerRestoreSubViewer(viewer,subcomm,&subviewer);
273: }
274: }
275: return(0);
276: }
280: static PetscErrorCode PCSetUp_Telescope(PC pc)
281: {
282: PC_Telescope sred = (PC_Telescope)pc->data;
283: PetscErrorCode ierr;
284: MPI_Comm comm,subcomm=0;
285: PCTelescopeType sr_type;
288: PetscObjectGetComm((PetscObject)pc,&comm);
290: /* subcomm definition */
291: if (!pc->setupcalled) {
292: if (!sred->psubcomm) {
293: PetscSubcommCreate(comm,&sred->psubcomm);
294: PetscSubcommSetNumber(sred->psubcomm,sred->redfactor);
295: PetscSubcommSetType(sred->psubcomm,PETSC_SUBCOMM_INTERLACED);
296: /* disable runtime switch of psubcomm type, e.g., '-psubcomm_type interlaced */
297: /* PetscSubcommSetFromOptions(sred->psubcomm); */
298: PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));
299: /* create a new PC that processors in each subcomm have copy of */
300: }
301: }
302: subcomm = PetscSubcommChild(sred->psubcomm);
304: /* internal KSP */
305: if (!pc->setupcalled) {
306: const char *prefix;
308: if (isActiveRank(sred->psubcomm)) {
309: KSPCreate(subcomm,&sred->ksp);
310: KSPSetErrorIfNotConverged(sred->ksp,pc->erroriffailure);
311: PetscObjectIncrementTabLevel((PetscObject)sred->ksp,(PetscObject)pc,1);
312: PetscLogObjectParent((PetscObject)pc,(PetscObject)sred->ksp);
313: KSPSetType(sred->ksp,KSPPREONLY);
314: PCGetOptionsPrefix(pc,&prefix);
315: KSPSetOptionsPrefix(sred->ksp,prefix);
316: KSPAppendOptionsPrefix(sred->ksp,"telescope_");
317: }
318: }
319: /* Determine type of setup/update */
320: if (!pc->setupcalled) {
321: PetscBool has_dm,same;
322: DM dm;
324: sr_type = TELESCOPE_DEFAULT;
325: has_dm = PETSC_FALSE;
326: PCGetDM(pc,&dm);
327: if (dm) { has_dm = PETSC_TRUE; }
328: if (has_dm) {
329: /* check for dmda */
330: PetscObjectTypeCompare((PetscObject)dm,DMDA,&same);
331: if (same) {
332: PetscInfo(pc,"PCTelescope: found DMDA\n");
333: sr_type = TELESCOPE_DMDA;
334: }
335: /* check for dmplex */
336: PetscObjectTypeCompare((PetscObject)dm,DMPLEX,&same);
337: if (same) {
338: PetscInfo(pc,"PCTelescope: found DMPLEX\n");
339: sr_type = TELESCOPE_DMPLEX;
340: }
341: }
343: if (sred->ignore_dm) {
344: PetscInfo(pc,"PCTelescope: ignore DM\n");
345: sr_type = TELESCOPE_DEFAULT;
346: }
347: sred->sr_type = sr_type;
348: } else {
349: sr_type = sred->sr_type;
350: }
352: /* set function pointers for repartition setup, matrix creation/update, matrix nullspace and reset functionality */
353: switch (sr_type) {
354: case TELESCOPE_DEFAULT:
355: sred->pctelescope_setup_type = PCTelescopeSetUp_default;
356: sred->pctelescope_matcreate_type = PCTelescopeMatCreate_default;
357: sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
358: sred->pctelescope_reset_type = NULL;
359: break;
360: case TELESCOPE_DMDA:
361: pc->ops->apply = PCApply_Telescope_dmda;
362: pc->ops->applyrichardson = PCApplyRichardson_Telescope_dmda;
363: sred->pctelescope_setup_type = PCTelescopeSetUp_dmda;
364: sred->pctelescope_matcreate_type = PCTelescopeMatCreate_dmda;
365: sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
366: sred->pctelescope_reset_type = PCReset_Telescope_dmda;
367: break;
368: case TELESCOPE_DMPLEX: SETERRQ(comm,PETSC_ERR_SUP,"Supprt for DMPLEX is currently not available");
369: break;
370: default: SETERRQ(comm,PETSC_ERR_SUP,"Only supprt for repartitioning DMDA is provided");
371: break;
372: }
374: /* setup */
375: if (sred->pctelescope_setup_type) {
376: sred->pctelescope_setup_type(pc,sred);
377: }
378: /* update */
379: if (!pc->setupcalled) {
380: if (sred->pctelescope_matcreate_type) {
381: sred->pctelescope_matcreate_type(pc,sred,MAT_INITIAL_MATRIX,&sred->Bred);
382: }
383: if (sred->pctelescope_matnullspacecreate_type) {
384: sred->pctelescope_matnullspacecreate_type(pc,sred,sred->Bred);
385: }
386: } else {
387: if (sred->pctelescope_matcreate_type) {
388: sred->pctelescope_matcreate_type(pc,sred,MAT_REUSE_MATRIX,&sred->Bred);
389: }
390: }
392: /* common - no construction */
393: if (isActiveRank(sred->psubcomm)) {
394: KSPSetOperators(sred->ksp,sred->Bred,sred->Bred);
395: if (pc->setfromoptionscalled && !pc->setupcalled){
396: KSPSetFromOptions(sred->ksp);
397: }
398: }
399: return(0);
400: }
404: static PetscErrorCode PCApply_Telescope(PC pc,Vec x,Vec y)
405: {
406: PC_Telescope sred = (PC_Telescope)pc->data;
407: PetscErrorCode ierr;
408: Vec xtmp,xred,yred;
409: PetscInt i,st,ed;
410: VecScatter scatter;
411: PetscScalar *array;
412: const PetscScalar *x_array;
415: xtmp = sred->xtmp;
416: scatter = sred->scatter;
417: xred = sred->xred;
418: yred = sred->yred;
420: /* pull in vector x->xtmp */
421: VecScatterBegin(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);
422: VecScatterEnd(scatter,x,xtmp,INSERT_VALUES,SCATTER_FORWARD);
424: /* copy vector entires into xred */
425: VecGetArrayRead(xtmp,&x_array);
426: if (xred) {
427: PetscScalar *LA_xred;
428: VecGetOwnershipRange(xred,&st,&ed);
429: VecGetArray(xred,&LA_xred);
430: for (i=0; i<ed-st; i++) {
431: LA_xred[i] = x_array[i];
432: }
433: VecRestoreArray(xred,&LA_xred);
434: }
435: VecRestoreArrayRead(xtmp,&x_array);
436: /* solve */
437: if (isActiveRank(sred->psubcomm)) {
438: KSPSolve(sred->ksp,xred,yred);
439: }
440: /* return vector */
441: VecGetArray(xtmp,&array);
442: if (yred) {
443: const PetscScalar *LA_yred;
444: VecGetOwnershipRange(yred,&st,&ed);
445: VecGetArrayRead(yred,&LA_yred);
446: for (i=0; i<ed-st; i++) {
447: array[i] = LA_yred[i];
448: }
449: VecRestoreArrayRead(yred,&LA_yred);
450: }
451: VecRestoreArray(xtmp,&array);
452: VecScatterBegin(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);
453: VecScatterEnd(scatter,xtmp,y,INSERT_VALUES,SCATTER_REVERSE);
454: return(0);
455: }
459: static PetscErrorCode PCApplyRichardson_Telescope(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
460: {
461: PC_Telescope sred = (PC_Telescope)pc->data;
462: PetscErrorCode ierr;
463: Vec xtmp,yred;
464: PetscInt i,st,ed;
465: VecScatter scatter;
466: const PetscScalar *x_array;
467: PetscBool default_init_guess_value;
470: xtmp = sred->xtmp;
471: scatter = sred->scatter;
472: yred = sred->yred;
474: if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope only supports max_it = 1");
475: *reason = (PCRichardsonConvergedReason)0;
477: if (!zeroguess) {
478: PetscInfo(pc,"PCTelescope: Scattering y for non-zero initial guess\n");
479: /* pull in vector y->xtmp */
480: VecScatterBegin(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);
481: VecScatterEnd(scatter,y,xtmp,INSERT_VALUES,SCATTER_FORWARD);
483: /* copy vector entires into xred */
484: VecGetArrayRead(xtmp,&x_array);
485: if (yred) {
486: PetscScalar *LA_yred;
487: VecGetOwnershipRange(yred,&st,&ed);
488: VecGetArray(yred,&LA_yred);
489: for (i=0; i<ed-st; i++) {
490: LA_yred[i] = x_array[i];
491: }
492: VecRestoreArray(yred,&LA_yred);
493: }
494: VecRestoreArrayRead(xtmp,&x_array);
495: }
497: if (isActiveRank(sred->psubcomm)) {
498: KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);
499: if (!zeroguess) KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);
500: }
502: PCApply_Telescope(pc,x,y);
504: if (isActiveRank(sred->psubcomm)) {
505: KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);
506: }
508: if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
509: *outits = 1;
510: return(0);
511: }
515: static PetscErrorCode PCReset_Telescope(PC pc)
516: {
517: PC_Telescope sred = (PC_Telescope)pc->data;
520: ISDestroy(&sred->isin);
521: VecScatterDestroy(&sred->scatter);
522: VecDestroy(&sred->xred);
523: VecDestroy(&sred->yred);
524: VecDestroy(&sred->xtmp);
525: MatDestroy(&sred->Bred);
526: KSPReset(sred->ksp);
527: if (sred->pctelescope_reset_type) {
528: sred->pctelescope_reset_type(pc);
529: }
530: return(0);
531: }
535: static PetscErrorCode PCDestroy_Telescope(PC pc)
536: {
537: PC_Telescope sred = (PC_Telescope)pc->data;
538: PetscErrorCode ierr;
541: PCReset_Telescope(pc);
542: KSPDestroy(&sred->ksp);
543: PetscSubcommDestroy(&sred->psubcomm);
544: PetscFree(sred->dm_ctx);
545: PetscFree(pc->data);
546: return(0);
547: }
551: static PetscErrorCode PCSetFromOptions_Telescope(PetscOptionItems *PetscOptionsObject,PC pc)
552: {
553: PC_Telescope sred = (PC_Telescope)pc->data;
554: PetscErrorCode ierr;
555: MPI_Comm comm;
556: PetscMPIInt size;
559: PetscObjectGetComm((PetscObject)pc,&comm);
560: MPI_Comm_size(comm,&size);
561: PetscOptionsHead(PetscOptionsObject,"Telescope options");
562: PetscOptionsInt("-pc_telescope_reduction_factor","Factor to reduce comm size by","PCTelescopeSetReductionFactor",sred->redfactor,&sred->redfactor,0);
563: if (sred->redfactor > size) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"-pc_telescope_reduction_factor <= comm size");
564: PetscOptionsBool("-pc_telescope_ignore_dm","Ignore any DM attached to the PC","PCTelescopeSetIgnoreDM",sred->ignore_dm,&sred->ignore_dm,0);
565: PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators","Ignore method used to compute A","PCTelescopeSetIgnoreKSPComputeOperators",sred->ignore_kspcomputeoperators,&sred->ignore_kspcomputeoperators,0);
566: PetscOptionsTail();
567: return(0);
568: }
570: /* PC simplementation specific API's */
572: static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc,KSP *ksp)
573: {
574: PC_Telescope red = (PC_Telescope)pc->data;
576: if (ksp) *ksp = red->ksp;
577: return(0);
578: }
580: static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc,PetscInt *fact)
581: {
582: PC_Telescope red = (PC_Telescope)pc->data;
584: if (fact) *fact = red->redfactor;
585: return(0);
586: }
588: static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc,PetscInt fact)
589: {
590: PC_Telescope red = (PC_Telescope)pc->data;
591: PetscMPIInt size;
592: PetscErrorCode ierr;
595: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
596: if (fact <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be positive",fact);
597: if (fact > size) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Reduction factor of telescoping PC %D must be <= comm.size",fact);
598: red->redfactor = fact;
599: return(0);
600: }
602: static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc,PetscBool *v)
603: {
604: PC_Telescope red = (PC_Telescope)pc->data;
606: if (v) *v = red->ignore_dm;
607: return(0);
608: }
609: static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc,PetscBool v)
610: {
611: PC_Telescope red = (PC_Telescope)pc->data;
613: red->ignore_dm = v;
614: return(0);
615: }
617: static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool *v)
618: {
619: PC_Telescope red = (PC_Telescope)pc->data;
621: if (v) *v = red->ignore_kspcomputeoperators;
622: return(0);
623: }
624: static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc,PetscBool v)
625: {
626: PC_Telescope red = (PC_Telescope)pc->data;
628: red->ignore_kspcomputeoperators = v;
629: return(0);
630: }
632: static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc,DM *dm)
633: {
634: PC_Telescope red = (PC_Telescope)pc->data;
636: *dm = private_PCTelescopeGetSubDM(red);
637: return(0);
638: }
640: /*@
641: PCTelescopeGetKSP - Gets the KSP created by the telescoping PC.
643: Not Collective
645: Input Parameter:
646: . pc - the preconditioner context
648: Output Parameter:
649: . subksp - the KSP defined the smaller set of processes
651: Level: advanced
653: .keywords: PC, telescopting solve
654: @*/
655: PetscErrorCode PCTelescopeGetKSP(PC pc,KSP *subksp)
656: {
659: PetscUseMethod(pc,"PCTelescopeGetKSP_C",(PC,KSP*),(pc,subksp));
660: return(0);
661: }
663: /*@
664: PCTelescopeGetReductionFactor - Gets the factor by which the original number of processes has been reduced by.
666: Not Collective
668: Input Parameter:
669: . pc - the preconditioner context
671: Output Parameter:
672: . fact - the reduction factor
674: Level: advanced
676: .keywords: PC, telescoping solve
677: @*/
678: PetscErrorCode PCTelescopeGetReductionFactor(PC pc,PetscInt *fact)
679: {
682: PetscUseMethod(pc,"PCTelescopeGetReductionFactor_C",(PC,PetscInt*),(pc,fact));
683: return(0);
684: }
686: /*@
687: PCTelescopeSetReductionFactor - Sets the factor by which the original number of processes has been reduced by.
689: Not Collective
691: Input Parameter:
692: . pc - the preconditioner context
694: Output Parameter:
695: . fact - the reduction factor
697: Level: advanced
699: .keywords: PC, telescoping solve
700: @*/
701: PetscErrorCode PCTelescopeSetReductionFactor(PC pc,PetscInt fact)
702: {
705: PetscTryMethod(pc,"PCTelescopeSetReductionFactor_C",(PC,PetscInt),(pc,fact));
706: return(0);
707: }
709: /*@
710: PCTelescopeGetIgnoreDM - Get the flag indicating if any DM attached to the PC will be used.
712: Not Collective
714: Input Parameter:
715: . pc - the preconditioner context
717: Output Parameter:
718: . v - the flag
720: Level: advanced
722: .keywords: PC, telescoping solve
723: @*/
724: PetscErrorCode PCTelescopeGetIgnoreDM(PC pc,PetscBool *v)
725: {
728: PetscUseMethod(pc,"PCTelescopeGetIgnoreDM_C",(PC,PetscBool*),(pc,v));
729: return(0);
730: }
732: /*@
733: PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.
735: Not Collective
737: Input Parameter:
738: . pc - the preconditioner context
740: Output Parameter:
741: . v - Use PETSC_TRUE to ignore any DM
743: Level: advanced
745: .keywords: PC, telescoping solve
746: @*/
747: PetscErrorCode PCTelescopeSetIgnoreDM(PC pc,PetscBool v)
748: {
751: PetscTryMethod(pc,"PCTelescopeSetIgnoreDM_C",(PC,PetscBool),(pc,v));
752: return(0);
753: }
755: /*@
756: PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if KSPComputeOperators will be used.
758: Not Collective
760: Input Parameter:
761: . pc - the preconditioner context
763: Output Parameter:
764: . v - the flag
766: Level: advanced
768: .keywords: PC, telescoping solve
769: @*/
770: PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc,PetscBool *v)
771: {
774: PetscUseMethod(pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",(PC,PetscBool*),(pc,v));
775: return(0);
776: }
778: /*@
779: PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore KSPComputeOperators.
781: Not Collective
783: Input Parameter:
784: . pc - the preconditioner context
786: Output Parameter:
787: . v - Use PETSC_TRUE to ignore the method (if defined) set via KSPSetComputeOperators on pc
789: Level: advanced
791: .keywords: PC, telescoping solve
792: @*/
793: PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc,PetscBool v)
794: {
797: PetscTryMethod(pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",(PC,PetscBool),(pc,v));
798: return(0);
799: }
801: /*@
802: PCTelescopeGetDM - Get the re-partitioned DM attached to the sub KSP.
804: Not Collective
806: Input Parameter:
807: . pc - the preconditioner context
809: Output Parameter:
810: . subdm - The re-partitioned DM
812: Level: advanced
814: .keywords: PC, telescoping solve
815: @*/
816: PetscErrorCode PCTelescopeGetDM(PC pc,DM *subdm)
817: {
820: PetscUseMethod(pc,"PCTelescopeGetDM_C",(PC,DM*),(pc,subdm));
821: return(0);
822: }
824: /* -------------------------------------------------------------------------------------*/
825: /*MC
826: PCTELESCOPE - Runs a KSP solver on a sub-group of processors. MPI processes not in the sub-communicator are idle during the solve.
828: Options Database:
829: + -pc_telescope_reduction_factor <n> - factor to use communicator size by, for example if you are using 64 MPI processes and
830: use an n of 4, the new sub-communicator will be 4 defined with 64/4 processes
831: - -pc_telescope_ignore_dm <false> - flag to indicate whether an attached DM should be ignored
833: Level: advanced
835: Notes:
836: The preconditioner is deemed telescopic as it only calls KSPSolve() on a single
837: sub-communicator in contrast with PCREDUNDANT which calls KSPSolve() on N sub-communicators.
838: This means there will be MPI processes within c, which will be idle during the application of this preconditioner.
840: The default KSP is PREONLY. If a DM is attached to the PC, it is re-partitioned on the sub-communicator.
841: Both the B mat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned DM.
842: Currently only support for re-partitioning a DMDA is provided.
843: Any nullspace attached to the original Bmat operator are extracted, re-partitioned and set on the repartitioned Bmat operator.
844: KSPSetComputeOperators() is not propagated to the sub KSP.
845: Currently there is no support for the flag -pc_use_amat
847: Assuming that the parent preconditioner (PC) is defined on a communicator c, this implementation
848: creates a child sub-communicator (c') containing less MPI processes than the original parent preconditioner (PC).
850: Developer Notes:
851: During PCSetup, the B operator is scattered onto c'.
852: Within PCApply, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
853: Then KSPSolve() is executed on the c' communicator.
855: The communicator used within the telescoping preconditioner is defined by a PetscSubcomm using the INTERLACED
856: creation routine. We run the sub KSP on only the ranks within the communicator which have a color equal to zero.
858: The telescoping preconditioner is aware of nullspaces which are attached to the only B operator.
859: In case where B has a n nullspace attached, these nullspaces vectors are extract from B and mapped into
860: a new nullspace (defined on the sub-communicator) which is attached to B' (the B operator which was scattered to c')
862: The telescoping preconditioner is aware of an attached DM. In the event that the DM is of type DMDA (2D or 3D -
863: 1D support for 1D DMDAs is not provided), a new DMDA is created on c' (e.g. it is re-partitioned), and this new DM
864: is attached the sub KSPSolve(). The design of telescope is such that it should be possible to extend support
865: for re-partitioning other DM's (e.g. DMPLEX). The user can supply a flag to ignore attached DMs.
867: By default, B' is defined by simply fusing rows from different MPI processes
869: When a DMDA is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permuting of B
870: into the ordering defined by the DMDA on c', (ii) extracting the local chunks via MatGetSubMatrices(), (iii) fusing the
871: locally (sequential) matrices defined on the ranks common to c and c' into B' using MatCreateMPIMatConcatenateSeqMat()
873: Limitations/improvements
874: VecPlaceArray could be used within PCApply() to improve efficiency and reduce memory usage.
876: The symmetric permutation used when a DMDA is encountered is performed via explicitly assmbleming a permutation matrix P,
877: and performing P^T.A.P. Possibly it might be more efficient to use MatPermute(). I opted to use P^T.A.P as it appears
878: VecPermute() does not supported for the use case required here. By computing P, I can permute both the operator and RHS in a
879: consistent manner.
881: Mapping of vectors is performed this way
882: Suppose the parent comm size was 4, and we set a reduction factor of 2, thus would give a comm size on c' of 2.
883: Using the interlaced creation routine, the ranks in c with color = 0, will be rank 0 and 2.
884: We perform the scatter to the sub-comm in the following way,
885: [1] Given a vector x defined on comm c
887: rank(c) : _________ 0 ______ ________ 1 _______ ________ 2 _____________ ___________ 3 __________
888: x : [0, 1, 2, 3, 4, 5] [6, 7, 8, 9, 10, 11] [12, 13, 14, 15, 16, 17] [18, 19, 20, 21, 22, 23]
890: scatter to xtmp defined also on comm c so that we have the following values
892: rank(c) : ___________________ 0 ________________ _1_ ______________________ 2 _______________________ __3_
893: xtmp : [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] [ ] [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23] [ ]
895: The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values
898: [2] Copy the value from rank 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
899: Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.
901: rank(c') : ___________________ 0 _______________ ______________________ 1 _____________________
902: xred : [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]
905: Contributed by Dave May
907: .seealso: PCTelescopeGetKSP(), PCTelescopeGetDM(), PCTelescopeGetReductionFactor(), PCTelescopeSetReductionFactor(), PCTelescopeGetIgnoreDM(), PCTelescopeSetIgnoreDM(), PCREDUNDANT
908: M*/
911: PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
912: {
913: PetscErrorCode ierr;
914: struct _PC_Telescope *sred;
917: PetscNewLog(pc,&sred);
918: sred->redfactor = 1;
919: sred->ignore_dm = PETSC_FALSE;
920: sred->ignore_kspcomputeoperators = PETSC_FALSE;
921: pc->data = (void*)sred;
923: pc->ops->apply = PCApply_Telescope;
924: pc->ops->applytranspose = NULL;
925: pc->ops->applyrichardson = PCApplyRichardson_Telescope;
926: pc->ops->setup = PCSetUp_Telescope;
927: pc->ops->destroy = PCDestroy_Telescope;
928: pc->ops->reset = PCReset_Telescope;
929: pc->ops->setfromoptions = PCSetFromOptions_Telescope;
930: pc->ops->view = PCView_Telescope;
932: sred->pctelescope_setup_type = PCTelescopeSetUp_default;
933: sred->pctelescope_matcreate_type = PCTelescopeMatCreate_default;
934: sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
935: sred->pctelescope_reset_type = NULL;
937: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetKSP_C",PCTelescopeGetKSP_Telescope);
938: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetReductionFactor_C",PCTelescopeGetReductionFactor_Telescope);
939: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetReductionFactor_C",PCTelescopeSetReductionFactor_Telescope);
940: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreDM_C",PCTelescopeGetIgnoreDM_Telescope);
941: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreDM_C",PCTelescopeSetIgnoreDM_Telescope);
942: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetIgnoreKSPComputeOperators_C",PCTelescopeGetIgnoreKSPComputeOperators_Telescope);
943: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeSetIgnoreKSPComputeOperators_C",PCTelescopeSetIgnoreKSPComputeOperators_Telescope);
944: PetscObjectComposeFunction((PetscObject)pc,"PCTelescopeGetDM_C",PCTelescopeGetDM_Telescope);
945: return(0);
946: }