Actual source code: precon.c
petsc-dev 2014-02-02
2: /*
3: The PC (preconditioner) interface routines, callable by users.
4: */
5: #include <petsc-private/pcimpl.h> /*I "petscksp.h" I*/
6: #include <petscdm.h>
8: /* Logging support */
9: PetscClassId PC_CLASSID;
10: PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
11: PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks, PC_ApplyOnMproc;
15: PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[])
16: {
18: PetscMPIInt size;
19: PetscBool flg1,flg2,set,flg3;
22: MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
23: if (pc->pmat) {
24: PetscErrorCode (*f)(Mat,MatReuse,Mat*);
25: PetscObjectQueryFunction((PetscObject)pc->pmat,"MatGetDiagonalBlock_C",&f);
26: if (size == 1) {
27: MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);
28: MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);
29: MatIsSymmetricKnown(pc->pmat,&set,&flg3);
30: if (flg1 && (!flg2 || (set && flg3))) {
31: *type = PCICC;
32: } else if (flg2) {
33: *type = PCILU;
34: } else if (f) { /* likely is a parallel matrix run on one processor */
35: *type = PCBJACOBI;
36: } else {
37: *type = PCNONE;
38: }
39: } else {
40: if (f) {
41: *type = PCBJACOBI;
42: } else {
43: *type = PCNONE;
44: }
45: }
46: } else {
47: if (size == 1) {
48: *type = PCILU;
49: } else {
50: *type = PCBJACOBI;
51: }
52: }
53: return(0);
54: }
58: /*@
59: PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats
61: Collective on PC
63: Input Parameter:
64: . pc - the preconditioner context
66: Level: developer
68: Notes: This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC
70: .keywords: PC, destroy
72: .seealso: PCCreate(), PCSetUp()
73: @*/
74: PetscErrorCode PCReset(PC pc)
75: {
80: if (pc->ops->reset) {
81: (*pc->ops->reset)(pc);
82: }
83: VecDestroy(&pc->diagonalscaleright);
84: VecDestroy(&pc->diagonalscaleleft);
85: MatDestroy(&pc->pmat);
86: MatDestroy(&pc->mat);
88: pc->setupcalled = 0;
89: return(0);
90: }
94: /*@
95: PCDestroy - Destroys PC context that was created with PCCreate().
97: Collective on PC
99: Input Parameter:
100: . pc - the preconditioner context
102: Level: developer
104: .keywords: PC, destroy
106: .seealso: PCCreate(), PCSetUp()
107: @*/
108: PetscErrorCode PCDestroy(PC *pc)
109: {
113: if (!*pc) return(0);
115: if (--((PetscObject)(*pc))->refct > 0) {*pc = 0; return(0);}
117: PCReset(*pc);
119: /* if memory was published with SAWs then destroy it */
120: PetscObjectSAWsViewOff((PetscObject)*pc);
121: if ((*pc)->ops->destroy) {(*(*pc)->ops->destroy)((*pc));}
122: DMDestroy(&(*pc)->dm);
123: PetscHeaderDestroy(pc);
124: return(0);
125: }
129: /*@C
130: PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
131: scaling as needed by certain time-stepping codes.
133: Logically Collective on PC
135: Input Parameter:
136: . pc - the preconditioner context
138: Output Parameter:
139: . flag - PETSC_TRUE if it applies the scaling
141: Level: developer
143: Notes: If this returns PETSC_TRUE then the system solved via the Krylov method is
144: $ D M A D^{-1} y = D M b for left preconditioning or
145: $ D A M D^{-1} z = D b for right preconditioning
147: .keywords: PC
149: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale()
150: @*/
151: PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag)
152: {
156: *flag = pc->diagonalscale;
157: return(0);
158: }
162: /*@
163: PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
164: scaling as needed by certain time-stepping codes.
166: Logically Collective on PC
168: Input Parameters:
169: + pc - the preconditioner context
170: - s - scaling vector
172: Level: intermediate
174: Notes: The system solved via the Krylov method is
175: $ D M A D^{-1} y = D M b for left preconditioning or
176: $ D A M D^{-1} z = D b for right preconditioning
178: PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
180: .keywords: PC
182: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale()
183: @*/
184: PetscErrorCode PCSetDiagonalScale(PC pc,Vec s)
185: {
191: pc->diagonalscale = PETSC_TRUE;
193: PetscObjectReference((PetscObject)s);
194: VecDestroy(&pc->diagonalscaleleft);
196: pc->diagonalscaleleft = s;
198: VecDuplicate(s,&pc->diagonalscaleright);
199: VecCopy(s,pc->diagonalscaleright);
200: VecReciprocal(pc->diagonalscaleright);
201: return(0);
202: }
206: /*@
207: PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.
209: Logically Collective on PC
211: Input Parameters:
212: + pc - the preconditioner context
213: . in - input vector
214: + out - scaled vector (maybe the same as in)
216: Level: intermediate
218: Notes: The system solved via the Krylov method is
219: $ D M A D^{-1} y = D M b for left preconditioning or
220: $ D A M D^{-1} z = D b for right preconditioning
222: PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
224: If diagonal scaling is turned off and in is not out then in is copied to out
226: .keywords: PC
228: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale()
229: @*/
230: PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out)
231: {
238: if (pc->diagonalscale) {
239: VecPointwiseMult(out,pc->diagonalscaleleft,in);
240: } else if (in != out) {
241: VecCopy(in,out);
242: }
243: return(0);
244: }
248: /*@
249: PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.
251: Logically Collective on PC
253: Input Parameters:
254: + pc - the preconditioner context
255: . in - input vector
256: + out - scaled vector (maybe the same as in)
258: Level: intermediate
260: Notes: The system solved via the Krylov method is
261: $ D M A D^{-1} y = D M b for left preconditioning or
262: $ D A M D^{-1} z = D b for right preconditioning
264: PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.
266: If diagonal scaling is turned off and in is not out then in is copied to out
268: .keywords: PC
270: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale()
271: @*/
272: PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out)
273: {
280: if (pc->diagonalscale) {
281: VecPointwiseMult(out,pc->diagonalscaleright,in);
282: } else if (in != out) {
283: VecCopy(in,out);
284: }
285: return(0);
286: }
290: /*@
291: PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the
292: operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
293: TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat.
295: Logically Collective on PC
297: Input Parameters:
298: + pc - the preconditioner context
299: - flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
301: Options Database Key:
302: . -pc_use_amat <true,false>
304: Notes:
305: For the common case in which the linear system matrix and the matrix used to construct the
306: preconditioner are identical, this routine is does nothing.
308: Level: intermediate
310: .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
311: @*/
312: PetscErrorCode PCSetUseAmat(PC pc,PetscBool flg)
313: {
316: pc->useAmat = flg;
317: return(0);
318: }
322: /*@
323: PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the
324: operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
325: TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat.
327: Logically Collective on PC
329: Input Parameter:
330: . pc - the preconditioner context
332: Output Parameter:
333: . flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)
335: Notes:
336: For the common case in which the linear system matrix and the matrix used to construct the
337: preconditioner are identical, this routine is does nothing.
339: Level: intermediate
341: .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
342: @*/
343: PetscErrorCode PCGetUseAmat(PC pc,PetscBool *flg)
344: {
347: *flg = pc->useAmat;
348: return(0);
349: }
353: /*@
354: PCCreate - Creates a preconditioner context.
356: Collective on MPI_Comm
358: Input Parameter:
359: . comm - MPI communicator
361: Output Parameter:
362: . pc - location to put the preconditioner context
364: Notes:
365: The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or ICC
366: in parallel. For dense matrices it is always PCNONE.
368: Level: developer
370: .keywords: PC, create, context
372: .seealso: PCSetUp(), PCApply(), PCDestroy()
373: @*/
374: PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc)
375: {
376: PC pc;
381: *newpc = 0;
382: PCInitializePackage();
384: PetscHeaderCreate(pc,_p_PC,struct _PCOps,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView);
386: pc->mat = 0;
387: pc->pmat = 0;
388: pc->setupcalled = 0;
389: pc->setfromoptionscalled = 0;
390: pc->data = 0;
391: pc->diagonalscale = PETSC_FALSE;
392: pc->diagonalscaleleft = 0;
393: pc->diagonalscaleright = 0;
395: pc->modifysubmatrices = 0;
396: pc->modifysubmatricesP = 0;
398: *newpc = pc;
399: return(0);
401: }
403: /* -------------------------------------------------------------------------------*/
407: /*@
408: PCApply - Applies the preconditioner to a vector.
410: Collective on PC and Vec
412: Input Parameters:
413: + pc - the preconditioner context
414: - x - input vector
416: Output Parameter:
417: . y - output vector
419: Level: developer
421: .keywords: PC, apply
423: .seealso: PCApplyTranspose(), PCApplyBAorAB()
424: @*/
425: PetscErrorCode PCApply(PC pc,Vec x,Vec y)
426: {
433: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
434: VecValidValues(x,2,PETSC_TRUE);
435: if (pc->setupcalled < 2) {
436: PCSetUp(pc);
437: }
438: if (!pc->ops->apply) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply");
439: PetscLogEventBegin(PC_Apply,pc,x,y,0);
440: (*pc->ops->apply)(pc,x,y);
441: PetscLogEventEnd(PC_Apply,pc,x,y,0);
442: VecValidValues(y,3,PETSC_FALSE);
443: return(0);
444: }
448: /*@
449: PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.
451: Collective on PC and Vec
453: Input Parameters:
454: + pc - the preconditioner context
455: - x - input vector
457: Output Parameter:
458: . y - output vector
460: Notes:
461: Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
463: Level: developer
465: .keywords: PC, apply, symmetric, left
467: .seealso: PCApply(), PCApplySymmetricRight()
468: @*/
469: PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y)
470: {
477: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
478: VecValidValues(x,2,PETSC_TRUE);
479: if (pc->setupcalled < 2) {
480: PCSetUp(pc);
481: }
482: if (!pc->ops->applysymmetricleft) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
483: PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);
484: (*pc->ops->applysymmetricleft)(pc,x,y);
485: PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);
486: VecValidValues(y,3,PETSC_FALSE);
487: return(0);
488: }
492: /*@
493: PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.
495: Collective on PC and Vec
497: Input Parameters:
498: + pc - the preconditioner context
499: - x - input vector
501: Output Parameter:
502: . y - output vector
504: Level: developer
506: Notes:
507: Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.
509: .keywords: PC, apply, symmetric, right
511: .seealso: PCApply(), PCApplySymmetricLeft()
512: @*/
513: PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y)
514: {
521: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
522: VecValidValues(x,2,PETSC_TRUE);
523: if (pc->setupcalled < 2) {
524: PCSetUp(pc);
525: }
526: if (!pc->ops->applysymmetricright) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
527: PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);
528: (*pc->ops->applysymmetricright)(pc,x,y);
529: PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);
530: VecValidValues(y,3,PETSC_FALSE);
531: return(0);
532: }
536: /*@
537: PCApplyTranspose - Applies the transpose of preconditioner to a vector.
539: Collective on PC and Vec
541: Input Parameters:
542: + pc - the preconditioner context
543: - x - input vector
545: Output Parameter:
546: . y - output vector
548: Notes: For complex numbers this applies the non-Hermitian transpose.
550: Developer Notes: We need to implement a PCApplyHermitianTranspose()
552: Level: developer
554: .keywords: PC, apply, transpose
556: .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists()
557: @*/
558: PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y)
559: {
566: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
567: VecValidValues(x,2,PETSC_TRUE);
568: if (pc->setupcalled < 2) {
569: PCSetUp(pc);
570: }
571: if (!pc->ops->applytranspose) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose");
572: PetscLogEventBegin(PC_Apply,pc,x,y,0);
573: (*pc->ops->applytranspose)(pc,x,y);
574: PetscLogEventEnd(PC_Apply,pc,x,y,0);
575: VecValidValues(y,3,PETSC_FALSE);
576: return(0);
577: }
581: /*@
582: PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation
584: Collective on PC and Vec
586: Input Parameters:
587: . pc - the preconditioner context
589: Output Parameter:
590: . flg - PETSC_TRUE if a transpose operation is defined
592: Level: developer
594: .keywords: PC, apply, transpose
596: .seealso: PCApplyTranspose()
597: @*/
598: PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg)
599: {
603: if (pc->ops->applytranspose) *flg = PETSC_TRUE;
604: else *flg = PETSC_FALSE;
605: return(0);
606: }
610: /*@
611: PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.
613: Collective on PC and Vec
615: Input Parameters:
616: + pc - the preconditioner context
617: . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
618: . x - input vector
619: - work - work vector
621: Output Parameter:
622: . y - output vector
624: Level: developer
626: Notes: If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the
627: specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling.
629: .keywords: PC, apply, operator
631: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose()
632: @*/
633: PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work)
634: {
642: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
643: VecValidValues(x,3,PETSC_TRUE);
644: if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric");
645: if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application");
647: if (pc->setupcalled < 2) {
648: PCSetUp(pc);
649: }
651: if (pc->diagonalscale) {
652: if (pc->ops->applyBA) {
653: Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
654: VecDuplicate(x,&work2);
655: PCDiagonalScaleRight(pc,x,work2);
656: (*pc->ops->applyBA)(pc,side,work2,y,work);
657: PCDiagonalScaleLeft(pc,y,y);
658: VecDestroy(&work2);
659: } else if (side == PC_RIGHT) {
660: PCDiagonalScaleRight(pc,x,y);
661: PCApply(pc,y,work);
662: MatMult(pc->mat,work,y);
663: PCDiagonalScaleLeft(pc,y,y);
664: } else if (side == PC_LEFT) {
665: PCDiagonalScaleRight(pc,x,y);
666: MatMult(pc->mat,y,work);
667: PCApply(pc,work,y);
668: PCDiagonalScaleLeft(pc,y,y);
669: } else if (side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner");
670: } else {
671: if (pc->ops->applyBA) {
672: (*pc->ops->applyBA)(pc,side,x,y,work);
673: } else if (side == PC_RIGHT) {
674: PCApply(pc,x,work);
675: MatMult(pc->mat,work,y);
676: } else if (side == PC_LEFT) {
677: MatMult(pc->mat,x,work);
678: PCApply(pc,work,y);
679: } else if (side == PC_SYMMETRIC) {
680: /* There's an extra copy here; maybe should provide 2 work vectors instead? */
681: PCApplySymmetricRight(pc,x,work);
682: MatMult(pc->mat,work,y);
683: VecCopy(y,work);
684: PCApplySymmetricLeft(pc,work,y);
685: }
686: }
687: VecValidValues(y,4,PETSC_FALSE);
688: return(0);
689: }
693: /*@
694: PCApplyBAorABTranspose - Applies the transpose of the preconditioner
695: and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
696: NOT tr(B*A) = tr(A)*tr(B).
698: Collective on PC and Vec
700: Input Parameters:
701: + pc - the preconditioner context
702: . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
703: . x - input vector
704: - work - work vector
706: Output Parameter:
707: . y - output vector
710: Notes: this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
711: defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)
713: Level: developer
715: .keywords: PC, apply, operator, transpose
717: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB()
718: @*/
719: PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work)
720: {
728: if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
729: VecValidValues(x,3,PETSC_TRUE);
730: if (pc->ops->applyBAtranspose) {
731: (*pc->ops->applyBAtranspose)(pc,side,x,y,work);
732: VecValidValues(y,4,PETSC_FALSE);
733: return(0);
734: }
735: if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left");
737: if (pc->setupcalled < 2) {
738: PCSetUp(pc);
739: }
741: if (side == PC_RIGHT) {
742: PCApplyTranspose(pc,x,work);
743: MatMultTranspose(pc->mat,work,y);
744: } else if (side == PC_LEFT) {
745: MatMultTranspose(pc->mat,x,work);
746: PCApplyTranspose(pc,work,y);
747: }
748: /* add support for PC_SYMMETRIC */
749: VecValidValues(y,4,PETSC_FALSE);
750: return(0);
751: }
753: /* -------------------------------------------------------------------------------*/
757: /*@
758: PCApplyRichardsonExists - Determines whether a particular preconditioner has a
759: built-in fast application of Richardson's method.
761: Not Collective
763: Input Parameter:
764: . pc - the preconditioner
766: Output Parameter:
767: . exists - PETSC_TRUE or PETSC_FALSE
769: Level: developer
771: .keywords: PC, apply, Richardson, exists
773: .seealso: PCApplyRichardson()
774: @*/
775: PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists)
776: {
780: if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
781: else *exists = PETSC_FALSE;
782: return(0);
783: }
787: /*@
788: PCApplyRichardson - Applies several steps of Richardson iteration with
789: the particular preconditioner. This routine is usually used by the
790: Krylov solvers and not the application code directly.
792: Collective on PC
794: Input Parameters:
795: + pc - the preconditioner context
796: . b - the right hand side
797: . w - one work vector
798: . rtol - relative decrease in residual norm convergence criteria
799: . abstol - absolute residual norm convergence criteria
800: . dtol - divergence residual norm increase criteria
801: . its - the number of iterations to apply.
802: - guesszero - if the input x contains nonzero initial guess
804: Output Parameter:
805: + outits - number of iterations actually used (for SOR this always equals its)
806: . reason - the reason the apply terminated
807: - y - the solution (also contains initial guess if guesszero is PETSC_FALSE
809: Notes:
810: Most preconditioners do not support this function. Use the command
811: PCApplyRichardsonExists() to determine if one does.
813: Except for the multigrid PC this routine ignores the convergence tolerances
814: and always runs for the number of iterations
816: Level: developer
818: .keywords: PC, apply, Richardson
820: .seealso: PCApplyRichardsonExists()
821: @*/
822: PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
823: {
831: if (b == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors");
832: if (pc->setupcalled < 2) {
833: PCSetUp(pc);
834: }
835: if (!pc->ops->applyrichardson) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson");
836: (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);
837: return(0);
838: }
840: /*
841: a setupcall of 0 indicates never setup,
842: 1 needs to be resetup,
843: 2 does not need any changes.
844: */
847: /*@
848: PCSetUp - Prepares for the use of a preconditioner.
850: Collective on PC
852: Input Parameter:
853: . pc - the preconditioner context
855: Level: developer
857: .keywords: PC, setup
859: .seealso: PCCreate(), PCApply(), PCDestroy()
860: @*/
861: PetscErrorCode PCSetUp(PC pc)
862: {
864: const char *def;
868: if (!pc->mat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first");
870: if (pc->setupcalled > 1) {
871: PetscInfo(pc,"Setting PC with identical preconditioner\n");
872: return(0);
873: } else if (!pc->setupcalled) {
874: PetscInfo(pc,"Setting up new PC\n");
875: } else if (pc->flag == SAME_NONZERO_PATTERN) {
876: PetscInfo(pc,"Setting up PC with same nonzero pattern\n");
877: } else {
878: PetscInfo(pc,"Setting up PC with different nonzero pattern\n");
879: }
881: if (!((PetscObject)pc)->type_name) {
882: PCGetDefaultType_Private(pc,&def);
883: PCSetType(pc,def);
884: }
886: PetscLogEventBegin(PC_SetUp,pc,0,0,0);
887: if (pc->ops->setup) {
888: (*pc->ops->setup)(pc);
889: }
890: pc->setupcalled = 2;
892: PetscLogEventEnd(PC_SetUp,pc,0,0,0);
893: return(0);
894: }
898: /*@
899: PCSetUpOnBlocks - Sets up the preconditioner for each block in
900: the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
901: methods.
903: Collective on PC
905: Input Parameters:
906: . pc - the preconditioner context
908: Level: developer
910: .keywords: PC, setup, blocks
912: .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp()
913: @*/
914: PetscErrorCode PCSetUpOnBlocks(PC pc)
915: {
920: if (!pc->ops->setuponblocks) return(0);
921: PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);
922: (*pc->ops->setuponblocks)(pc);
923: PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);
924: return(0);
925: }
929: /*@C
930: PCSetModifySubMatrices - Sets a user-defined routine for modifying the
931: submatrices that arise within certain subdomain-based preconditioners.
932: The basic submatrices are extracted from the preconditioner matrix as
933: usual; the user can then alter these (for example, to set different boundary
934: conditions for each submatrix) before they are used for the local solves.
936: Logically Collective on PC
938: Input Parameters:
939: + pc - the preconditioner context
940: . func - routine for modifying the submatrices
941: - ctx - optional user-defined context (may be null)
943: Calling sequence of func:
944: $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);
946: . row - an array of index sets that contain the global row numbers
947: that comprise each local submatrix
948: . col - an array of index sets that contain the global column numbers
949: that comprise each local submatrix
950: . submat - array of local submatrices
951: - ctx - optional user-defined context for private data for the
952: user-defined func routine (may be null)
954: Notes:
955: PCSetModifySubMatrices() MUST be called before KSPSetUp() and
956: KSPSolve().
958: A routine set by PCSetModifySubMatrices() is currently called within
959: the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
960: preconditioners. All other preconditioners ignore this routine.
962: Level: advanced
964: .keywords: PC, set, modify, submatrices
966: .seealso: PCModifySubMatrices(), PCASMGetSubMatrices()
967: @*/
968: PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx)
969: {
972: pc->modifysubmatrices = func;
973: pc->modifysubmatricesP = ctx;
974: return(0);
975: }
979: /*@C
980: PCModifySubMatrices - Calls an optional user-defined routine within
981: certain preconditioners if one has been set with PCSetModifySubMarices().
983: Collective on PC
985: Input Parameters:
986: + pc - the preconditioner context
987: . nsub - the number of local submatrices
988: . row - an array of index sets that contain the global row numbers
989: that comprise each local submatrix
990: . col - an array of index sets that contain the global column numbers
991: that comprise each local submatrix
992: . submat - array of local submatrices
993: - ctx - optional user-defined context for private data for the
994: user-defined routine (may be null)
996: Output Parameter:
997: . submat - array of local submatrices (the entries of which may
998: have been modified)
1000: Notes:
1001: The user should NOT generally call this routine, as it will
1002: automatically be called within certain preconditioners (currently
1003: block Jacobi, additive Schwarz) if set.
1005: The basic submatrices are extracted from the preconditioner matrix
1006: as usual; the user can then alter these (for example, to set different
1007: boundary conditions for each submatrix) before they are used for the
1008: local solves.
1010: Level: developer
1012: .keywords: PC, modify, submatrices
1014: .seealso: PCSetModifySubMatrices()
1015: @*/
1016: PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx)
1017: {
1022: if (!pc->modifysubmatrices) return(0);
1023: PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);
1024: (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);
1025: PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);
1026: return(0);
1027: }
1031: /*@
1032: PCSetOperators - Sets the matrix associated with the linear system and
1033: a (possibly) different one associated with the preconditioner.
1035: Logically Collective on PC and Mat
1037: Input Parameters:
1038: + pc - the preconditioner context
1039: . Amat - the matrix that defines the linear system
1040: . Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat.
1041: - flag - flag indicating information about the preconditioner matrix structure
1042: during successive linear solves. This flag is ignored the first time a
1043: linear system is solved, and thus is irrelevant when solving just one linear
1044: system.
1046: Notes:
1047: The flag can be used to eliminate unnecessary work in the preconditioner
1048: during the repeated solution of linear systems of the same size. The
1049: available options are
1050: + SAME_PRECONDITIONER -
1051: Pmat is identical during successive linear solves.
1052: This option is intended for folks who are using
1053: different Amat and Pmat matrices and wish to reuse the
1054: same preconditioner matrix. For example, this option
1055: saves work by not recomputing incomplete factorization
1056: for ILU/ICC preconditioners.
1057: . SAME_NONZERO_PATTERN -
1058: Pmat has the same nonzero structure during
1059: successive linear solves.
1060: - DIFFERENT_NONZERO_PATTERN -
1061: Pmat does not have the same nonzero structure.
1063: Passing a NULL for Amat or Pmat removes the matrix that is currently used.
1065: If you wish to replace either Amat or Pmat but leave the other one untouched then
1066: first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
1067: on it and then pass it back in in your call to KSPSetOperators().
1069: Caution:
1070: If you specify SAME_NONZERO_PATTERN, PETSc believes your assertion
1071: and does not check the structure of the matrix. If you erroneously
1072: claim that the structure is the same when it actually is not, the new
1073: preconditioner will not function correctly. Thus, use this optimization
1074: feature carefully!
1076: If in doubt about whether your preconditioner matrix has changed
1077: structure or not, use the flag DIFFERENT_NONZERO_PATTERN.
1079: More Notes about Repeated Solution of Linear Systems:
1080: PETSc does NOT reset the matrix entries of either Amat or Pmat
1081: to zero after a linear solve; the user is completely responsible for
1082: matrix assembly. See the routine MatZeroEntries() if desiring to
1083: zero all elements of a matrix.
1085: Level: intermediate
1087: .keywords: PC, set, operators, matrix, linear system
1089: .seealso: PCGetOperators(), MatZeroEntries()
1090: @*/
1091: PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat,MatStructure flag)
1092: {
1094: PetscInt m1,n1,m2,n2;
1102: if (pc->setupcalled && Amat && Pmat) {
1103: MatGetLocalSize(Amat,&m1,&n1);
1104: MatGetLocalSize(pc->mat,&m2,&n2);
1105: if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1106: MatGetLocalSize(Pmat,&m1,&n1);
1107: MatGetLocalSize(pc->pmat,&m2,&n2);
1108: if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1109: }
1111: /* reference first in case the matrices are the same */
1112: if (Amat) {PetscObjectReference((PetscObject)Amat);}
1113: MatDestroy(&pc->mat);
1114: if (Pmat) {PetscObjectReference((PetscObject)Pmat);}
1115: MatDestroy(&pc->pmat);
1116: pc->mat = Amat;
1117: pc->pmat = Pmat;
1119: if (pc->setupcalled == 2 && flag != SAME_PRECONDITIONER) {
1120: pc->setupcalled = 1;
1121: }
1122: pc->flag = flag;
1123: return(0);
1124: }
1128: /*@C
1129: PCGetOperators - Gets the matrix associated with the linear system and
1130: possibly a different one associated with the preconditioner.
1132: Not collective, though parallel Mats are returned if the PC is parallel
1134: Input Parameter:
1135: . pc - the preconditioner context
1137: Output Parameters:
1138: + Amat - the matrix defining the linear system
1139: . Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat.
1140: - flag - flag indicating information about the preconditioner
1141: matrix structure. See PCSetOperators() for details.
1143: Level: intermediate
1145: Notes: Does not increase the reference count of the matrices, so you should not destroy them
1147: Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1148: are created in PC and returned to the user. In this case, if both operators
1149: mat and pmat are requested, two DIFFERENT operators will be returned. If
1150: only one is requested both operators in the PC will be the same (i.e. as
1151: if one had called KSP/PCSetOperators() with the same argument for both Mats).
1152: The user must set the sizes of the returned matrices and their type etc just
1153: as if the user created them with MatCreate(). For example,
1155: $ KSP/PCGetOperators(ksp/pc,&Amat,NULL,NULL); is equivalent to
1156: $ set size, type, etc of Amat
1158: $ MatCreate(comm,&mat);
1159: $ KSP/PCSetOperators(ksp/pc,Amat,Amat,SAME_NONZERO_PATTERN);
1160: $ PetscObjectDereference((PetscObject)mat);
1161: $ set size, type, etc of Amat
1163: and
1165: $ KSP/PCGetOperators(ksp/pc,&Amat,&Pmat,NULL); is equivalent to
1166: $ set size, type, etc of Amat and Pmat
1168: $ MatCreate(comm,&Amat);
1169: $ MatCreate(comm,&Pmat);
1170: $ KSP/PCSetOperators(ksp/pc,Amat,Pmat,SAME_NONZERO_PATTERN);
1171: $ PetscObjectDereference((PetscObject)Amat);
1172: $ PetscObjectDereference((PetscObject)Pmat);
1173: $ set size, type, etc of Amat and Pmat
1175: The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1176: of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely
1177: managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1178: at this is when you create a SNES you do not NEED to create a KSP and attach it to
1179: the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1180: you do not need to attach a PC to it (the KSP object manages the PC object for you).
1181: Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1182: it can be created for you?
1185: .keywords: PC, get, operators, matrix, linear system
1187: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet()
1188: @*/
1189: PetscErrorCode PCGetOperators(PC pc,Mat *Amat,Mat *Pmat,MatStructure *flag)
1190: {
1195: if (Amat) {
1196: if (!pc->mat) {
1197: if (pc->pmat && !Pmat) { /* Apmat has been set, but user did not request it, so use for Amat */
1198: pc->mat = pc->pmat;
1199: PetscObjectReference((PetscObject)pc->mat);
1200: } else { /* both Amat and Pmat are empty */
1201: MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat);
1202: if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */
1203: pc->pmat = pc->mat;
1204: PetscObjectReference((PetscObject)pc->pmat);
1205: }
1206: }
1207: }
1208: *Amat = pc->mat;
1209: }
1210: if (Pmat) {
1211: if (!pc->pmat) {
1212: if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */
1213: pc->pmat = pc->mat;
1214: PetscObjectReference((PetscObject)pc->pmat);
1215: } else {
1216: MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat);
1217: if (!Amat) { /* user did NOT request Amat, so make same as Pmat */
1218: pc->mat = pc->pmat;
1219: PetscObjectReference((PetscObject)pc->mat);
1220: }
1221: }
1222: }
1223: *Pmat = pc->pmat;
1224: }
1225: if (flag) *flag = pc->flag;
1226: return(0);
1227: }
1231: /*@C
1232: PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1233: possibly a different one associated with the preconditioner have been set in the PC.
1235: Not collective, though the results on all processes should be the same
1237: Input Parameter:
1238: . pc - the preconditioner context
1240: Output Parameters:
1241: + mat - the matrix associated with the linear system was set
1242: - pmat - matrix associated with the preconditioner was set, usually the same
1244: Level: intermediate
1246: .keywords: PC, get, operators, matrix, linear system
1248: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators()
1249: @*/
1250: PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat)
1251: {
1254: if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE;
1255: if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1256: return(0);
1257: }
1261: /*@
1262: PCFactorGetMatrix - Gets the factored matrix from the
1263: preconditioner context. This routine is valid only for the LU,
1264: incomplete LU, Cholesky, and incomplete Cholesky methods.
1266: Not Collective on PC though Mat is parallel if PC is parallel
1268: Input Parameters:
1269: . pc - the preconditioner context
1271: Output parameters:
1272: . mat - the factored matrix
1274: Level: advanced
1276: Notes: Does not increase the reference count for the matrix so DO NOT destroy it
1278: .keywords: PC, get, factored, matrix
1279: @*/
1280: PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat)
1281: {
1287: if (pc->ops->getfactoredmatrix) {
1288: (*pc->ops->getfactoredmatrix)(pc,mat);
1289: } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix");
1290: return(0);
1291: }
1295: /*@C
1296: PCSetOptionsPrefix - Sets the prefix used for searching for all
1297: PC options in the database.
1299: Logically Collective on PC
1301: Input Parameters:
1302: + pc - the preconditioner context
1303: - prefix - the prefix string to prepend to all PC option requests
1305: Notes:
1306: A hyphen (-) must NOT be given at the beginning of the prefix name.
1307: The first character of all runtime options is AUTOMATICALLY the
1308: hyphen.
1310: Level: advanced
1312: .keywords: PC, set, options, prefix, database
1314: .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix()
1315: @*/
1316: PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[])
1317: {
1322: PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);
1323: return(0);
1324: }
1328: /*@C
1329: PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1330: PC options in the database.
1332: Logically Collective on PC
1334: Input Parameters:
1335: + pc - the preconditioner context
1336: - prefix - the prefix string to prepend to all PC option requests
1338: Notes:
1339: A hyphen (-) must NOT be given at the beginning of the prefix name.
1340: The first character of all runtime options is AUTOMATICALLY the
1341: hyphen.
1343: Level: advanced
1345: .keywords: PC, append, options, prefix, database
1347: .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix()
1348: @*/
1349: PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[])
1350: {
1355: PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);
1356: return(0);
1357: }
1361: /*@C
1362: PCGetOptionsPrefix - Gets the prefix used for searching for all
1363: PC options in the database.
1365: Not Collective
1367: Input Parameters:
1368: . pc - the preconditioner context
1370: Output Parameters:
1371: . prefix - pointer to the prefix string used, is returned
1373: Notes: On the fortran side, the user should pass in a string 'prifix' of
1374: sufficient length to hold the prefix.
1376: Level: advanced
1378: .keywords: PC, get, options, prefix, database
1380: .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix()
1381: @*/
1382: PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[])
1383: {
1389: PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);
1390: return(0);
1391: }
1395: /*@
1396: PCPreSolve - Optional pre-solve phase, intended for any
1397: preconditioner-specific actions that must be performed before
1398: the iterative solve itself.
1400: Collective on PC
1402: Input Parameters:
1403: + pc - the preconditioner context
1404: - ksp - the Krylov subspace context
1406: Level: developer
1408: Sample of Usage:
1409: .vb
1410: PCPreSolve(pc,ksp);
1411: KSPSolve(ksp,b,x);
1412: PCPostSolve(pc,ksp);
1413: .ve
1415: Notes:
1416: The pre-solve phase is distinct from the PCSetUp() phase.
1418: KSPSolve() calls this directly, so is rarely called by the user.
1420: .keywords: PC, pre-solve
1422: .seealso: PCPostSolve()
1423: @*/
1424: PetscErrorCode PCPreSolve(PC pc,KSP ksp)
1425: {
1427: Vec x,rhs;
1432: pc->presolvedone++;
1433: if (pc->presolvedone > 2) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice");
1434: KSPGetSolution(ksp,&x);
1435: KSPGetRhs(ksp,&rhs);
1437: if (pc->ops->presolve) {
1438: (*pc->ops->presolve)(pc,ksp,rhs,x);
1439: }
1440: return(0);
1441: }
1445: /*@
1446: PCPostSolve - Optional post-solve phase, intended for any
1447: preconditioner-specific actions that must be performed after
1448: the iterative solve itself.
1450: Collective on PC
1452: Input Parameters:
1453: + pc - the preconditioner context
1454: - ksp - the Krylov subspace context
1456: Sample of Usage:
1457: .vb
1458: PCPreSolve(pc,ksp);
1459: KSPSolve(ksp,b,x);
1460: PCPostSolve(pc,ksp);
1461: .ve
1463: Note:
1464: KSPSolve() calls this routine directly, so it is rarely called by the user.
1466: Level: developer
1468: .keywords: PC, post-solve
1470: .seealso: PCPreSolve(), KSPSolve()
1471: @*/
1472: PetscErrorCode PCPostSolve(PC pc,KSP ksp)
1473: {
1475: Vec x,rhs;
1480: pc->presolvedone--;
1481: KSPGetSolution(ksp,&x);
1482: KSPGetRhs(ksp,&rhs);
1483: if (pc->ops->postsolve) {
1484: (*pc->ops->postsolve)(pc,ksp,rhs,x);
1485: }
1486: return(0);
1487: }
1491: /*@C
1492: PCLoad - Loads a PC that has been stored in binary with PCView().
1494: Collective on PetscViewer
1496: Input Parameters:
1497: + newdm - the newly loaded PC, this needs to have been created with PCCreate() or
1498: some related function before a call to PCLoad().
1499: - viewer - binary file viewer, obtained from PetscViewerBinaryOpen()
1501: Level: intermediate
1503: Notes:
1504: The type is determined by the data in the file, any type set into the PC before this call is ignored.
1506: Notes for advanced users:
1507: Most users should not need to know the details of the binary storage
1508: format, since PCLoad() and PCView() completely hide these details.
1509: But for anyone who's interested, the standard binary matrix storage
1510: format is
1511: .vb
1512: has not yet been determined
1513: .ve
1515: .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad()
1516: @*/
1517: PetscErrorCode PCLoad(PC newdm, PetscViewer viewer)
1518: {
1520: PetscBool isbinary;
1521: PetscInt classid;
1522: char type[256];
1527: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
1528: if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
1530: PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);
1531: if (classid != PC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file");
1532: PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);
1533: PCSetType(newdm, type);
1534: if (newdm->ops->load) {
1535: (*newdm->ops->load)(newdm,viewer);
1536: }
1537: return(0);
1538: }
1540: #include <petscdraw.h>
1541: #if defined(PETSC_HAVE_SAWS)
1542: #include <petscviewersaws.h>
1543: #endif
1546: /*@C
1547: PCView - Prints the PC data structure.
1549: Collective on PC
1551: Input Parameters:
1552: + PC - the PC context
1553: - viewer - optional visualization context
1555: Note:
1556: The available visualization contexts include
1557: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
1558: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1559: output where only the first processor opens
1560: the file. All other processors send their
1561: data to the first processor to print.
1563: The user can open an alternative visualization contexts with
1564: PetscViewerASCIIOpen() (output to a specified file).
1566: Level: developer
1568: .keywords: PC, view
1570: .seealso: KSPView(), PetscViewerASCIIOpen()
1571: @*/
1572: PetscErrorCode PCView(PC pc,PetscViewer viewer)
1573: {
1574: PCType cstr;
1575: PetscErrorCode ierr;
1576: PetscBool iascii,isstring,isbinary,isdraw;
1577: PetscViewerFormat format;
1578: #if defined(PETSC_HAVE_SAWS)
1579: PetscBool isams;
1580: #endif
1584: if (!viewer) {
1585: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);
1586: }
1590: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
1591: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
1592: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
1593: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
1594: #if defined(PETSC_HAVE_SAWS)
1595: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&isams);
1596: #endif
1598: if (iascii) {
1599: PetscViewerGetFormat(viewer,&format);
1600: PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer);
1601: if (!pc->setupcalled) {
1602: PetscViewerASCIIPrintf(viewer," PC has not been set up so information may be incomplete\n");
1603: }
1604: if (pc->ops->view) {
1605: PetscViewerASCIIPushTab(viewer);
1606: (*pc->ops->view)(pc,viewer);
1607: PetscViewerASCIIPopTab(viewer);
1608: }
1609: if (pc->mat) {
1610: PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);
1611: if (pc->pmat == pc->mat) {
1612: PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");
1613: PetscViewerASCIIPushTab(viewer);
1614: MatView(pc->mat,viewer);
1615: PetscViewerASCIIPopTab(viewer);
1616: } else {
1617: if (pc->pmat) {
1618: PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");
1619: } else {
1620: PetscViewerASCIIPrintf(viewer," linear system matrix:\n");
1621: }
1622: PetscViewerASCIIPushTab(viewer);
1623: MatView(pc->mat,viewer);
1624: if (pc->pmat) {MatView(pc->pmat,viewer);}
1625: PetscViewerASCIIPopTab(viewer);
1626: }
1627: PetscViewerPopFormat(viewer);
1628: }
1629: } else if (isstring) {
1630: PCGetType(pc,&cstr);
1631: PetscViewerStringSPrintf(viewer," %-7.7s",cstr);
1632: if (pc->ops->view) {(*pc->ops->view)(pc,viewer);}
1633: } else if (isbinary) {
1634: PetscInt classid = PC_FILE_CLASSID;
1635: MPI_Comm comm;
1636: PetscMPIInt rank;
1637: char type[256];
1639: PetscObjectGetComm((PetscObject)pc,&comm);
1640: MPI_Comm_rank(comm,&rank);
1641: if (!rank) {
1642: PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT,PETSC_FALSE);
1643: PetscStrncpy(type,((PetscObject)pc)->type_name,256);
1644: PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR,PETSC_FALSE);
1645: }
1646: if (pc->ops->view) {
1647: (*pc->ops->view)(pc,viewer);
1648: }
1649: } else if (isdraw) {
1650: PetscDraw draw;
1651: char str[25];
1652: PetscReal x,y,bottom,h;
1653: PetscInt n;
1655: PetscViewerDrawGetDraw(viewer,0,&draw);
1656: PetscDrawGetCurrentPoint(draw,&x,&y);
1657: if (pc->mat) {
1658: MatGetSize(pc->mat,&n,NULL);
1659: PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);
1660: } else {
1661: PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);
1662: }
1663: PetscDrawBoxedString(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);
1664: bottom = y - h;
1665: PetscDrawPushCurrentPoint(draw,x,bottom);
1666: if (pc->ops->view) {
1667: (*pc->ops->view)(pc,viewer);
1668: }
1669: PetscDrawPopCurrentPoint(draw);
1670: #if defined(PETSC_HAVE_SAWS)
1671: } else if (isams) {
1672: PetscMPIInt rank;
1674: PetscObjectName((PetscObject)pc);
1675: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
1676: if (!((PetscObject)pc)->amsmem && !rank) {
1677: PetscObjectViewSAWs((PetscObject)pc,viewer);
1678: }
1679: if (pc->mat) {MatView(pc->mat,viewer);}
1680: if (pc->pmat && pc->pmat != pc->mat) {MatView(pc->pmat,viewer);}
1681: #endif
1682: }
1683: return(0);
1684: }
1689: /*@
1690: PCSetInitialGuessNonzero - Tells the iterative solver that the
1691: initial guess is nonzero; otherwise PC assumes the initial guess
1692: is to be zero (and thus zeros it out before solving).
1694: Logically Collective on PC
1696: Input Parameters:
1697: + pc - iterative context obtained from PCCreate()
1698: - flg - PETSC_TRUE indicates the guess is non-zero, PETSC_FALSE indicates the guess is zero
1700: Level: Developer
1702: Notes:
1703: This is a weird function. Since PC's are linear operators on the right hand side they
1704: CANNOT use an initial guess. This function is for the "pass-through" preconditioners
1705: PCKSP and PCREDUNDANT and causes the inner KSP object to use the nonzero
1706: initial guess. Not currently working for PCREDUNDANT, that has to be rewritten to use KSP.
1709: .keywords: PC, set, initial guess, nonzero
1711: .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll()
1712: @*/
1713: PetscErrorCode PCSetInitialGuessNonzero(PC pc,PetscBool flg)
1714: {
1717: pc->nonzero_guess = flg;
1718: return(0);
1719: }
1723: /*@C
1724: PCRegister - Adds a method to the preconditioner package.
1726: Not collective
1728: Input Parameters:
1729: + name_solver - name of a new user-defined solver
1730: - routine_create - routine to create method context
1732: Notes:
1733: PCRegister() may be called multiple times to add several user-defined preconditioners.
1735: Sample usage:
1736: .vb
1737: PCRegister("my_solver", MySolverCreate);
1738: .ve
1740: Then, your solver can be chosen with the procedural interface via
1741: $ PCSetType(pc,"my_solver")
1742: or at runtime via the option
1743: $ -pc_type my_solver
1745: Level: advanced
1747: .keywords: PC, register
1749: .seealso: PCRegisterAll(), PCRegisterDestroy()
1750: @*/
1751: PetscErrorCode PCRegister(const char sname[],PetscErrorCode (*function)(PC))
1752: {
1756: PetscFunctionListAdd(&PCList,sname,function);
1757: return(0);
1758: }
1762: /*@
1763: PCComputeExplicitOperator - Computes the explicit preconditioned operator.
1765: Collective on PC
1767: Input Parameter:
1768: . pc - the preconditioner object
1770: Output Parameter:
1771: . mat - the explict preconditioned operator
1773: Notes:
1774: This computation is done by applying the operators to columns of the
1775: identity matrix.
1777: Currently, this routine uses a dense matrix format when 1 processor
1778: is used and a sparse format otherwise. This routine is costly in general,
1779: and is recommended for use only with relatively small systems.
1781: Level: advanced
1783: .keywords: PC, compute, explicit, operator
1785: .seealso: KSPComputeExplicitOperator()
1787: @*/
1788: PetscErrorCode PCComputeExplicitOperator(PC pc,Mat *mat)
1789: {
1790: Vec in,out;
1792: PetscInt i,M,m,*rows,start,end;
1793: PetscMPIInt size;
1794: MPI_Comm comm;
1795: PetscScalar *array,one = 1.0;
1801: PetscObjectGetComm((PetscObject)pc,&comm);
1802: MPI_Comm_size(comm,&size);
1804: if (!pc->pmat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"You must call KSPSetOperators() or PCSetOperators() before this call");
1805: MatGetVecs(pc->pmat,&in,0);
1806: VecDuplicate(in,&out);
1807: VecGetOwnershipRange(in,&start,&end);
1808: VecGetSize(in,&M);
1809: VecGetLocalSize(in,&m);
1810: PetscMalloc1((m+1),&rows);
1811: for (i=0; i<m; i++) rows[i] = start + i;
1813: MatCreate(comm,mat);
1814: MatSetSizes(*mat,m,m,M,M);
1815: if (size == 1) {
1816: MatSetType(*mat,MATSEQDENSE);
1817: MatSeqDenseSetPreallocation(*mat,NULL);
1818: } else {
1819: MatSetType(*mat,MATMPIAIJ);
1820: MatMPIAIJSetPreallocation(*mat,0,NULL,0,NULL);
1821: }
1823: for (i=0; i<M; i++) {
1825: VecSet(in,0.0);
1826: VecSetValues(in,1,&i,&one,INSERT_VALUES);
1827: VecAssemblyBegin(in);
1828: VecAssemblyEnd(in);
1830: /* should fix, allowing user to choose side */
1831: PCApply(pc,in,out);
1833: VecGetArray(out,&array);
1834: MatSetValues(*mat,m,rows,1,&i,array,INSERT_VALUES);
1835: VecRestoreArray(out,&array);
1837: }
1838: PetscFree(rows);
1839: VecDestroy(&out);
1840: MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);
1841: MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);
1842: return(0);
1843: }
1847: /*@
1848: PCSetCoordinates - sets the coordinates of all the nodes on the local process
1850: Collective on PC
1852: Input Parameters:
1853: + pc - the solver context
1854: . dim - the dimension of the coordinates 1, 2, or 3
1855: - coords - the coordinates
1857: Level: intermediate
1859: Notes: coords is an array of the 3D coordinates for the nodes on
1860: the local processor. So if there are 108 equation on a processor
1861: for a displacement finite element discretization of elasticity (so
1862: that there are 36 = 108/3 nodes) then the array must have 108
1863: double precision values (ie, 3 * 36). These x y z coordinates
1864: should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1865: ... , N-1.z ].
1867: .seealso: MatSetNearNullSpace
1868: @*/
1869: PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords)
1870: {
1874: PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));
1875: return(0);
1876: }