Actual source code: itfunc.c
1: /*
2: Interface KSP routines that the user calls.
3: */
5: #include <petsc/private/kspimpl.h>
6: #include <petsc/private/matimpl.h>
7: #include <petscdm.h>
9: /* number of nested levels of KSPSetUp/Solve(). This is used to determine if KSP_DIVERGED_ITS should be fatal. */
10: static PetscInt level = 0;
12: static inline PetscErrorCode ObjectView(PetscObject obj, PetscViewer viewer, PetscViewerFormat format)
13: {
14: PetscCall(PetscViewerPushFormat(viewer, format));
15: PetscCall(PetscObjectView(obj, viewer));
16: PetscCall(PetscViewerPopFormat(viewer));
17: return PETSC_SUCCESS;
18: }
20: /*@
21: KSPComputeExtremeSingularValues - Computes the extreme singular values
22: for the preconditioned operator. Called after or during `KSPSolve()`.
24: Not Collective
26: Input Parameter:
27: . ksp - iterative context obtained from `KSPCreate()`
29: Output Parameters:
30: . emin, emax - extreme singular values
32: Options Database Keys:
33: . -ksp_view_singularvalues - compute extreme singular values and print when `KSPSolve()` completes.
35: Notes:
36: One must call `KSPSetComputeSingularValues()` before calling `KSPSetUp()`
37: (or use the option -ksp_view_eigenvalues) in order for this routine to work correctly.
39: Many users may just want to use the monitoring routine
40: `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
41: to print the extreme singular values at each iteration of the linear solve.
43: Estimates of the smallest singular value may be very inaccurate, especially if the Krylov method has not converged.
44: The largest singular value is usually accurate to within a few percent if the method has converged, but is still not
45: intended for eigenanalysis. Consider the excellent package `SLEPc` if accurate values are required.
47: Disable restarts if using KSPGMRES, otherwise this estimate will only be using those iterations after the last
48: restart. See `KSPGMRESSetRestart()` for more details.
50: Level: advanced
52: .seealso: [](chapter_ksp), `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeEigenvalues()`, `KSP`
53: @*/
54: PetscErrorCode KSPComputeExtremeSingularValues(KSP ksp, PetscReal *emax, PetscReal *emin)
55: {
56: PetscFunctionBegin;
60: PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Singular values not requested before KSPSetUp()");
62: if (ksp->ops->computeextremesingularvalues) PetscUseTypeMethod(ksp, computeextremesingularvalues, emax, emin);
63: else {
64: *emin = -1.0;
65: *emax = -1.0;
66: }
67: PetscFunctionReturn(PETSC_SUCCESS);
68: }
70: /*@
71: KSPComputeEigenvalues - Computes the extreme eigenvalues for the
72: preconditioned operator. Called after or during `KSPSolve()`.
74: Not Collective
76: Input Parameters:
77: + ksp - iterative context obtained from `KSPCreate()`
78: - n - size of arrays r and c. The number of eigenvalues computed (neig) will, in
79: general, be less than this.
81: Output Parameters:
82: + r - real part of computed eigenvalues, provided by user with a dimension of at least n
83: . c - complex part of computed eigenvalues, provided by user with a dimension of at least n
84: - neig - actual number of eigenvalues computed (will be less than or equal to n)
86: Options Database Keys:
87: . -ksp_view_eigenvalues - Prints eigenvalues to stdout
89: Notes:
90: The number of eigenvalues estimated depends on the size of the Krylov space
91: generated during the `KSPSolve()` ; for example, with
92: CG it corresponds to the number of CG iterations, for GMRES it is the number
93: of GMRES iterations SINCE the last restart. Any extra space in r[] and c[]
94: will be ignored.
96: `KSPComputeEigenvalues()` does not usually provide accurate estimates; it is
97: intended only for assistance in understanding the convergence of iterative
98: methods, not for eigenanalysis. For accurate computation of eigenvalues we recommend using
99: the excellent package SLEPc.
101: One must call `KSPSetComputeEigenvalues()` before calling `KSPSetUp()`
102: in order for this routine to work correctly.
104: Many users may just want to use the monitoring routine
105: `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
106: to print the singular values at each iteration of the linear solve.
108: `KSPComputeRitz()` provides estimates for both the eigenvalues and their corresponding eigenvectors.
110: Level: advanced
112: .seealso: [](chapter_ksp), `KSPSetComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`, `KSPComputeExtremeSingularValues()`, `KSP`, `KSPComputeRitz()`
113: @*/
114: PetscErrorCode KSPComputeEigenvalues(KSP ksp, PetscInt n, PetscReal r[], PetscReal c[], PetscInt *neig)
115: {
116: PetscFunctionBegin;
120: PetscCheck(n >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Requested < 0 Eigenvalues");
122: PetscCheck(ksp->calc_sings, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Eigenvalues not requested before KSPSetUp()");
124: if (n && ksp->ops->computeeigenvalues) PetscUseTypeMethod(ksp, computeeigenvalues, n, r, c, neig);
125: else *neig = 0;
126: PetscFunctionReturn(PETSC_SUCCESS);
127: }
129: /*@
130: KSPComputeRitz - Computes the Ritz or harmonic Ritz pairs associated with the
131: smallest or largest in modulus, for the preconditioned operator.
133: Not Collective
135: Input Parameters:
136: + ksp - iterative context obtained from `KSPCreate()`
137: . ritz - `PETSC_TRUE` or `PETSC_FALSE` for Ritz pairs or harmonic Ritz pairs, respectively
138: - small - `PETSC_TRUE` or `PETSC_FALSE` for smallest or largest (harmonic) Ritz values, respectively
140: Output Parameters:
141: + nrit - On input number of (harmonic) Ritz pairs to compute; on output, actual number of computed (harmonic) Ritz pairs
142: . S - an array of the Ritz vectors, pass in an array of vectors of size nrit
143: . tetar - real part of the Ritz values, pass in an array of size nrit
144: - tetai - imaginary part of the Ritz values, pass in an array of size nrit
146: Notes:
147: This only works with a `KSPType` of `KSPGMRES`.
149: One must call `KSPSetComputeRitz()` before calling `KSPSetUp()` in order for this routine to work correctly.
151: This routine must be called after `KSPSolve()`.
153: In GMRES, the (harmonic) Ritz pairs are computed from the Hessenberg matrix obtained during
154: the last complete cycle of the GMRES solve, or during the partial cycle if the solve ended before
155: a restart (that is a complete GMRES cycle was never achieved).
157: The number of actual (harmonic) Ritz pairs computed is less than or equal to the restart
158: parameter for GMRES if a complete cycle has been performed or less or equal to the number of GMRES
159: iterations.
161: `KSPComputeEigenvalues()` provides estimates for only the eigenvalues (Ritz values).
163: For real matrices, the (harmonic) Ritz pairs can be complex-valued. In such a case,
164: the routine selects the complex (harmonic) Ritz value and its conjugate, and two successive entries of the
165: vectors S are equal to the real and the imaginary parts of the associated vectors.
166: When PETSc has been built with complex scalars, the real and imaginary parts of the Ritz
167: values are still returned in tetar and tetai, as is done in `KSPComputeEigenvalues()`, but
168: the Ritz vectors S are complex.
170: The (harmonic) Ritz pairs are given in order of increasing (harmonic) Ritz values in modulus.
172: The Ritz pairs do not necessarily accurately reflect the eigenvalues and eigenvectors of the operator, consider the
173: excellent package `SLEPc` if accurate values are required.
175: Level: advanced
177: .seealso: [](chapter_ksp), `KSPSetComputeRitz()`, `KSP`, `KSPGMRES`, `KSPComputeEigenvalues()`, `KSPSetComputeSingularValues()`, `KSPMonitorSingularValue()`
178: @*/
179: PetscErrorCode KSPComputeRitz(KSP ksp, PetscBool ritz, PetscBool small, PetscInt *nrit, Vec S[], PetscReal tetar[], PetscReal tetai[])
180: {
181: PetscFunctionBegin;
183: PetscCheck(ksp->calc_ritz, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Ritz pairs not requested before KSPSetUp()");
184: PetscTryTypeMethod(ksp, computeritz, ritz, small, nrit, S, tetar, tetai);
185: PetscFunctionReturn(PETSC_SUCCESS);
186: }
187: /*@
188: KSPSetUpOnBlocks - Sets up the preconditioner for each block in
189: the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
190: methods.
192: Collective
194: Input Parameter:
195: . ksp - the `KSP` context
197: Notes:
198: `KSPSetUpOnBlocks()` is a routine that the user can optionally call for
199: more precise profiling (via -log_view) of the setup phase for these
200: block preconditioners. If the user does not call `KSPSetUpOnBlocks()`,
201: it will automatically be called from within `KSPSolve()`.
203: Calling `KSPSetUpOnBlocks()` is the same as calling `PCSetUpOnBlocks()`
204: on the PC context within the `KSP` context.
206: Level: advanced
208: .seealso: [](chapter_ksp), `PCSetUpOnBlocks()`, `KSPSetUp()`, `PCSetUp()`, `KSP`
209: @*/
210: PetscErrorCode KSPSetUpOnBlocks(KSP ksp)
211: {
212: PC pc;
213: PCFailedReason pcreason;
215: PetscFunctionBegin;
217: level++;
218: PetscCall(KSPGetPC(ksp, &pc));
219: PetscCall(PCSetUpOnBlocks(pc));
220: PetscCall(PCGetFailedReasonRank(pc, &pcreason));
221: level--;
222: /*
223: This is tricky since only a subset of MPI ranks may set this; each KSPSolve_*() is responsible for checking
224: this flag and initializing an appropriate vector with VecSetInf() so that the first norm computation can
225: produce a result at KSPCheckNorm() thus communicating the known problem to all MPI ranks so they may
226: terminate the Krylov solve. For many KSP implementations this is handled within KSPInitialResidual()
227: */
228: if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;
229: PetscFunctionReturn(PETSC_SUCCESS);
230: }
232: /*@
233: KSPSetReusePreconditioner - reuse the current preconditioner, do not construct a new one even if the operator changes
235: Collective
237: Input Parameters:
238: + ksp - iterative context obtained from `KSPCreate()`
239: - flag - `PETSC_TRUE` to reuse the current preconditioner
241: Level: intermediate
243: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `PCSetReusePreconditioner()`, `KSP`
244: @*/
245: PetscErrorCode KSPSetReusePreconditioner(KSP ksp, PetscBool flag)
246: {
247: PC pc;
249: PetscFunctionBegin;
251: PetscCall(KSPGetPC(ksp, &pc));
252: PetscCall(PCSetReusePreconditioner(pc, flag));
253: PetscFunctionReturn(PETSC_SUCCESS);
254: }
256: /*@
257: KSPGetReusePreconditioner - Determines if the `KSP` reuses the current preconditioner even if the operator in the preconditioner has changed.
259: Collective
261: Input Parameters:
262: . ksp - iterative context obtained from `KSPCreate()`
264: Output Parameters:
265: . flag - the boolean flag
267: Level: intermediate
269: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSPSetReusePreconditioner()`, `KSP`
270: @*/
271: PetscErrorCode KSPGetReusePreconditioner(KSP ksp, PetscBool *flag)
272: {
273: PetscFunctionBegin;
276: *flag = PETSC_FALSE;
277: if (ksp->pc) PetscCall(PCGetReusePreconditioner(ksp->pc, flag));
278: PetscFunctionReturn(PETSC_SUCCESS);
279: }
281: /*@
282: KSPSetSkipPCSetFromOptions - prevents `KSPSetFromOptions()` from calling `PCSetFromOptions()`. This is used if the same `PC` is shared by more than one `KSP` so its options are not resettable for each `KSP`
284: Collective
286: Input Parameters:
287: + ksp - iterative context obtained from `KSPCreate()`
288: - flag - `PETSC_TRUE` to skip calling the `PCSetFromOptions()`
290: Level: intermediate
292: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `PCSetReusePreconditioner()`, `KSP`
293: @*/
294: PetscErrorCode KSPSetSkipPCSetFromOptions(KSP ksp, PetscBool flag)
295: {
296: PetscFunctionBegin;
298: ksp->skippcsetfromoptions = flag;
299: PetscFunctionReturn(PETSC_SUCCESS);
300: }
302: /*@
303: KSPSetUp - Sets up the internal data structures for the
304: later use of an iterative solver.
306: Collective
308: Input Parameter:
309: . ksp - iterative context obtained from `KSPCreate()`
311: Level: developer
313: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSolve()`, `KSPDestroy()`, `KSP`
314: @*/
315: PetscErrorCode KSPSetUp(KSP ksp)
316: {
317: Mat A, B;
318: Mat mat, pmat;
319: MatNullSpace nullsp;
320: PCFailedReason pcreason;
322: PetscFunctionBegin;
324: level++;
326: /* reset the convergence flag from the previous solves */
327: ksp->reason = KSP_CONVERGED_ITERATING;
329: if (!((PetscObject)ksp)->type_name) PetscCall(KSPSetType(ksp, KSPGMRES));
330: PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));
332: if (ksp->dmActive && !ksp->setupstage) {
333: /* first time in so build matrix and vector data structures using DM */
334: if (!ksp->vec_rhs) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_rhs));
335: if (!ksp->vec_sol) PetscCall(DMCreateGlobalVector(ksp->dm, &ksp->vec_sol));
336: PetscCall(DMCreateMatrix(ksp->dm, &A));
337: PetscCall(KSPSetOperators(ksp, A, A));
338: PetscCall(PetscObjectDereference((PetscObject)A));
339: }
341: if (ksp->dmActive) {
342: DMKSP kdm;
343: PetscCall(DMGetDMKSP(ksp->dm, &kdm));
345: if (kdm->ops->computeinitialguess && ksp->setupstage != KSP_SETUP_NEWRHS) {
346: /* only computes initial guess the first time through */
347: PetscCallBack("KSP callback initial guess", (*kdm->ops->computeinitialguess)(ksp, ksp->vec_sol, kdm->initialguessctx));
348: PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
349: }
350: if (kdm->ops->computerhs) PetscCallBack("KSP callback rhs", (*kdm->ops->computerhs)(ksp, ksp->vec_rhs, kdm->rhsctx));
352: if (ksp->setupstage != KSP_SETUP_NEWRHS) {
353: PetscCheck(kdm->ops->computeoperators, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "You called KSPSetDM() but did not use DMKSPSetComputeOperators() or KSPSetDMActive(ksp,PETSC_FALSE);");
354: PetscCall(KSPGetOperators(ksp, &A, &B));
355: PetscCallBack("KSP callback operators", (*kdm->ops->computeoperators)(ksp, A, B, kdm->operatorsctx));
356: }
357: }
359: if (ksp->setupstage == KSP_SETUP_NEWRHS) {
360: level--;
361: PetscFunctionReturn(PETSC_SUCCESS);
362: }
363: PetscCall(PetscLogEventBegin(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
365: switch (ksp->setupstage) {
366: case KSP_SETUP_NEW:
367: PetscUseTypeMethod(ksp, setup);
368: break;
369: case KSP_SETUP_NEWMATRIX: { /* This should be replaced with a more general mechanism */
370: if (ksp->setupnewmatrix) PetscUseTypeMethod(ksp, setup);
371: } break;
372: default:
373: break;
374: }
376: if (!ksp->pc) PetscCall(KSPGetPC(ksp, &ksp->pc));
377: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
378: /* scale the matrix if requested */
379: if (ksp->dscale) {
380: PetscScalar *xx;
381: PetscInt i, n;
382: PetscBool zeroflag = PETSC_FALSE;
383: if (!ksp->pc) PetscCall(KSPGetPC(ksp, &ksp->pc));
384: if (!ksp->diagonal) { /* allocate vector to hold diagonal */
385: PetscCall(MatCreateVecs(pmat, &ksp->diagonal, NULL));
386: }
387: PetscCall(MatGetDiagonal(pmat, ksp->diagonal));
388: PetscCall(VecGetLocalSize(ksp->diagonal, &n));
389: PetscCall(VecGetArray(ksp->diagonal, &xx));
390: for (i = 0; i < n; i++) {
391: if (xx[i] != 0.0) xx[i] = 1.0 / PetscSqrtReal(PetscAbsScalar(xx[i]));
392: else {
393: xx[i] = 1.0;
394: zeroflag = PETSC_TRUE;
395: }
396: }
397: PetscCall(VecRestoreArray(ksp->diagonal, &xx));
398: if (zeroflag) PetscCall(PetscInfo(ksp, "Zero detected in diagonal of matrix, using 1 at those locations\n"));
399: PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
400: if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
401: ksp->dscalefix2 = PETSC_FALSE;
402: }
403: PetscCall(PetscLogEventEnd(KSP_SetUp, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
404: PetscCall(PCSetErrorIfFailure(ksp->pc, ksp->errorifnotconverged));
405: PetscCall(PCSetUp(ksp->pc));
406: PetscCall(PCGetFailedReasonRank(ksp->pc, &pcreason));
407: /* TODO: this code was wrong and is still wrong, there is no way to propagate the failure to all processes; their is no code to handle a ksp->reason on only some ranks */
408: if (pcreason) ksp->reason = KSP_DIVERGED_PC_FAILED;
410: PetscCall(MatGetNullSpace(mat, &nullsp));
411: if (nullsp) {
412: PetscBool test = PETSC_FALSE;
413: PetscCall(PetscOptionsGetBool(((PetscObject)ksp)->options, ((PetscObject)ksp)->prefix, "-ksp_test_null_space", &test, NULL));
414: if (test) PetscCall(MatNullSpaceTest(nullsp, mat, NULL));
415: }
416: ksp->setupstage = KSP_SETUP_NEWRHS;
417: level--;
418: PetscFunctionReturn(PETSC_SUCCESS);
419: }
421: /*@C
422: KSPConvergedReasonView - Displays the reason a `KSP` solve converged or diverged to a viewer
424: Collective
426: Parameter:
427: + ksp - iterative context obtained from `KSPCreate()`
428: - viewer - the viewer to display the reason
430: Options Database Keys:
431: + -ksp_converged_reason - print reason for converged or diverged, also prints number of iterations
432: - -ksp_converged_reason ::failed - only print reason and number of iterations when diverged
434: Notes:
435: To change the format of the output call PetscViewerPushFormat(viewer,format) before this call. Use PETSC_VIEWER_DEFAULT for the default,
436: use PETSC_VIEWER_FAILED to only display a reason if it fails.
438: Level: beginner
440: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
441: `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `KSP`, `KSPGetConvergedReason()`, `PetscViewerPushFormat()`, `PetscViewerPopFormat()`
442: @*/
443: PetscErrorCode KSPConvergedReasonView(KSP ksp, PetscViewer viewer)
444: {
445: PetscBool isAscii;
446: PetscViewerFormat format;
448: PetscFunctionBegin;
449: if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
450: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
451: if (isAscii) {
452: PetscCall(PetscViewerGetFormat(viewer, &format));
453: PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel));
454: if (ksp->reason > 0 && format != PETSC_VIEWER_FAILED) {
455: if (((PetscObject)ksp)->prefix) {
456: PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
457: } else {
458: PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
459: }
460: } else if (ksp->reason <= 0) {
461: if (((PetscObject)ksp)->prefix) {
462: PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT "\n", ((PetscObject)ksp)->prefix, KSPConvergedReasons[ksp->reason], ksp->its));
463: } else {
464: PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT "\n", KSPConvergedReasons[ksp->reason], ksp->its));
465: }
466: if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
467: PCFailedReason reason;
468: PetscCall(PCGetFailedReason(ksp->pc, &reason));
469: PetscCall(PetscViewerASCIIPrintf(viewer, " PC failed due to %s \n", PCFailedReasons[reason]));
470: }
471: }
472: PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel));
473: }
474: PetscFunctionReturn(PETSC_SUCCESS);
475: }
477: /*@C
478: KSPConvergedReasonViewSet - Sets an ADDITIONAL function that is to be used at the
479: end of the linear solver to display the convergence reason of the linear solver.
481: Logically Collective
483: Input Parameters:
484: + ksp - the `KSP` context
485: . f - the ksp converged reason view function
486: . vctx - [optional] user-defined context for private data for the
487: ksp converged reason view routine (use NULL if no context is desired)
488: - reasonviewdestroy - [optional] routine that frees reasonview context
489: (may be NULL)
491: Options Database Keys:
492: + -ksp_converged_reason - sets a default `KSPConvergedReasonView()`
493: - -ksp_converged_reason_view_cancel - cancels all converged reason viewers that have
494: been hardwired into a code by
495: calls to `KSPConvergedReasonViewSet()`, but
496: does not cancel those set via
497: the options database.
499: Notes:
500: Several different converged reason view routines may be set by calling
501: `KSPConvergedReasonViewSet()` multiple times; all will be called in the
502: order in which they were set.
504: Level: intermediate
506: .seealso: [](chapter_ksp), `KSPConvergedReasonView()`, `KSPConvergedReasonViewCancel()`
507: @*/
508: PetscErrorCode KSPConvergedReasonViewSet(KSP ksp, PetscErrorCode (*f)(KSP, void *), void *vctx, PetscErrorCode (*reasonviewdestroy)(void **))
509: {
510: PetscInt i;
511: PetscBool identical;
513: PetscFunctionBegin;
515: for (i = 0; i < ksp->numberreasonviews; i++) {
516: PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))f, vctx, reasonviewdestroy, (PetscErrorCode(*)(void))ksp->reasonview[i], ksp->reasonviewcontext[i], ksp->reasonviewdestroy[i], &identical));
517: if (identical) PetscFunctionReturn(PETSC_SUCCESS);
518: }
519: PetscCheck(ksp->numberreasonviews < MAXKSPREASONVIEWS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP reasonview set");
520: ksp->reasonview[ksp->numberreasonviews] = f;
521: ksp->reasonviewdestroy[ksp->numberreasonviews] = reasonviewdestroy;
522: ksp->reasonviewcontext[ksp->numberreasonviews++] = (void *)vctx;
523: PetscFunctionReturn(PETSC_SUCCESS);
524: }
526: /*@
527: KSPConvergedReasonViewCancel - Clears all the reasonview functions for a `KSP` object.
529: Collective
531: Input Parameter:
532: . ksp - iterative context obtained from `KSPCreate()`
534: Level: intermediate
536: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPDestroy()`, `KSPReset()`
537: @*/
538: PetscErrorCode KSPConvergedReasonViewCancel(KSP ksp)
539: {
540: PetscInt i;
542: PetscFunctionBegin;
544: for (i = 0; i < ksp->numberreasonviews; i++) {
545: if (ksp->reasonviewdestroy[i]) PetscCall((*ksp->reasonviewdestroy[i])(&ksp->reasonviewcontext[i]));
546: }
547: ksp->numberreasonviews = 0;
548: PetscFunctionReturn(PETSC_SUCCESS);
549: }
551: /*@
552: KSPConvergedReasonViewFromOptions - Processes command line options to determine if/how a KSPReason is to be viewed.
554: Collective
556: Input Parameters:
557: . ksp - the `KSP` object
559: Level: intermediate
561: .seealso: [](chapter_ksp), `KSPConvergedReasonView()`
562: @*/
563: PetscErrorCode KSPConvergedReasonViewFromOptions(KSP ksp)
564: {
565: PetscViewer viewer;
566: PetscBool flg;
567: PetscViewerFormat format;
568: PetscInt i;
570: PetscFunctionBegin;
572: /* Call all user-provided reason review routines */
573: for (i = 0; i < ksp->numberreasonviews; i++) PetscCall((*ksp->reasonview[i])(ksp, ksp->reasonviewcontext[i]));
575: /* Call the default PETSc routine */
576: PetscCall(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ksp), ((PetscObject)ksp)->options, ((PetscObject)ksp)->prefix, "-ksp_converged_reason", &viewer, &format, &flg));
577: if (flg) {
578: PetscCall(PetscViewerPushFormat(viewer, format));
579: PetscCall(KSPConvergedReasonView(ksp, viewer));
580: PetscCall(PetscViewerPopFormat(viewer));
581: PetscCall(PetscViewerDestroy(&viewer));
582: }
583: PetscFunctionReturn(PETSC_SUCCESS);
584: }
586: /*@C
587: KSPConvergedRateView - Displays the reason a `KSP` solve converged or diverged to a viewer
589: Collective
591: Input Parameters:
592: + ksp - iterative context obtained from `KSPCreate()`
593: - viewer - the viewer to display the reason
595: Options Database Keys:
596: . -ksp_converged_rate - print reason for convergence or divergence and the convergence rate (or 0.0 for divergence)
598: Notes:
599: To change the format of the output, call PetscViewerPushFormat(viewer,format) before this call.
601: Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $log r_k = log r_0 + k log c$. After linear regression,
602: the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,
603: see also https://en.wikipedia.org/wiki/Coefficient_of_determination
605: Level: intermediate
607: .seealso: [](chapter_ksp), `KSPConvergedReasonView()`, `KSPGetConvergedRate()`, `KSPSetTolerances()`, `KSPConvergedDefault()`
608: @*/
609: PetscErrorCode KSPConvergedRateView(KSP ksp, PetscViewer viewer)
610: {
611: PetscViewerFormat format;
612: PetscBool isAscii;
613: PetscReal rrate, rRsq, erate = 0.0, eRsq = 0.0;
614: PetscInt its;
615: const char *prefix, *reason = KSPConvergedReasons[ksp->reason];
617: PetscFunctionBegin;
618: PetscCall(KSPGetOptionsPrefix(ksp, &prefix));
619: PetscCall(KSPGetIterationNumber(ksp, &its));
620: PetscCall(KSPComputeConvergenceRate(ksp, &rrate, &rRsq, &erate, &eRsq));
621: if (!viewer) viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
622: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isAscii));
623: if (isAscii) {
624: PetscCall(PetscViewerGetFormat(viewer, &format));
625: PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ksp)->tablevel));
626: if (ksp->reason > 0) {
627: if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve converged due to %s iterations %" PetscInt_FMT, prefix, reason, its));
628: else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve converged due to %s iterations %" PetscInt_FMT, reason, its));
629: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
630: if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
631: if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
632: PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
633: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
634: } else if (ksp->reason <= 0) {
635: if (prefix) PetscCall(PetscViewerASCIIPrintf(viewer, "Linear %s solve did not converge due to %s iterations %" PetscInt_FMT, prefix, reason, its));
636: else PetscCall(PetscViewerASCIIPrintf(viewer, "Linear solve did not converge due to %s iterations %" PetscInt_FMT, reason, its));
637: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
638: if (rRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " res rate %g R^2 %g", (double)rrate, (double)rRsq));
639: if (eRsq >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " error rate %g R^2 %g", (double)erate, (double)eRsq));
640: PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
641: PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
642: if (ksp->reason == KSP_DIVERGED_PC_FAILED) {
643: PCFailedReason reason;
644: PetscCall(PCGetFailedReason(ksp->pc, &reason));
645: PetscCall(PetscViewerASCIIPrintf(viewer, " PC failed due to %s \n", PCFailedReasons[reason]));
646: }
647: }
648: PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ksp)->tablevel));
649: }
650: PetscFunctionReturn(PETSC_SUCCESS);
651: }
653: #include <petscdraw.h>
655: static PetscErrorCode KSPViewEigenvalues_Internal(KSP ksp, PetscBool isExplicit, PetscViewer viewer, PetscViewerFormat format)
656: {
657: PetscReal *r, *c;
658: PetscInt n, i, neig;
659: PetscBool isascii, isdraw;
660: PetscMPIInt rank;
662: PetscFunctionBegin;
663: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)ksp), &rank));
664: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
665: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
666: if (isExplicit) {
667: PetscCall(VecGetSize(ksp->vec_sol, &n));
668: PetscCall(PetscMalloc2(n, &r, n, &c));
669: PetscCall(KSPComputeEigenvaluesExplicitly(ksp, n, r, c));
670: neig = n;
671: } else {
672: PetscInt nits;
674: PetscCall(KSPGetIterationNumber(ksp, &nits));
675: n = nits + 2;
676: if (!nits) {
677: PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any eigenvalues\n"));
678: PetscFunctionReturn(PETSC_SUCCESS);
679: }
680: PetscCall(PetscMalloc2(n, &r, n, &c));
681: PetscCall(KSPComputeEigenvalues(ksp, n, r, c, &neig));
682: }
683: if (isascii) {
684: PetscCall(PetscViewerASCIIPrintf(viewer, "%s computed eigenvalues\n", isExplicit ? "Explicitly" : "Iteratively"));
685: for (i = 0; i < neig; ++i) {
686: if (c[i] >= 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, "%g + %gi\n", (double)r[i], (double)c[i]));
687: else PetscCall(PetscViewerASCIIPrintf(viewer, "%g - %gi\n", (double)r[i], -(double)c[i]));
688: }
689: } else if (isdraw && rank == 0) {
690: PetscDraw draw;
691: PetscDrawSP drawsp;
693: if (format == PETSC_VIEWER_DRAW_CONTOUR) {
694: PetscCall(KSPPlotEigenContours_Private(ksp, neig, r, c));
695: } else {
696: PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
697: PetscCall(PetscDrawSPCreate(draw, 1, &drawsp));
698: PetscCall(PetscDrawSPReset(drawsp));
699: for (i = 0; i < neig; ++i) PetscCall(PetscDrawSPAddPoint(drawsp, r + i, c + i));
700: PetscCall(PetscDrawSPDraw(drawsp, PETSC_TRUE));
701: PetscCall(PetscDrawSPSave(drawsp));
702: PetscCall(PetscDrawSPDestroy(&drawsp));
703: }
704: }
705: PetscCall(PetscFree2(r, c));
706: PetscFunctionReturn(PETSC_SUCCESS);
707: }
709: static PetscErrorCode KSPViewSingularvalues_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
710: {
711: PetscReal smax, smin;
712: PetscInt nits;
713: PetscBool isascii;
715: PetscFunctionBegin;
716: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
717: PetscCall(KSPGetIterationNumber(ksp, &nits));
718: if (!nits) {
719: PetscCall(PetscViewerASCIIPrintf(viewer, "Zero iterations in solver, cannot approximate any singular values\n"));
720: PetscFunctionReturn(PETSC_SUCCESS);
721: }
722: PetscCall(KSPComputeExtremeSingularValues(ksp, &smax, &smin));
723: if (isascii) PetscCall(PetscViewerASCIIPrintf(viewer, "Iteratively computed extreme singular values: max %g min %g max/min %g\n", (double)smax, (double)smin, (double)(smax / smin)));
724: PetscFunctionReturn(PETSC_SUCCESS);
725: }
727: static PetscErrorCode KSPViewFinalResidual_Internal(KSP ksp, PetscViewer viewer, PetscViewerFormat format)
728: {
729: PetscBool isascii;
731: PetscFunctionBegin;
732: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
733: PetscCheck(!ksp->dscale || ksp->dscalefix, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONGSTATE, "Cannot compute final scale with -ksp_diagonal_scale except also with -ksp_diagonal_scale_fix");
734: if (isascii) {
735: Mat A;
736: Vec t;
737: PetscReal norm;
739: PetscCall(PCGetOperators(ksp->pc, &A, NULL));
740: PetscCall(VecDuplicate(ksp->vec_rhs, &t));
741: PetscCall(KSP_MatMult(ksp, A, ksp->vec_sol, t));
742: PetscCall(VecAYPX(t, -1.0, ksp->vec_rhs));
743: PetscCall(VecNorm(t, NORM_2, &norm));
744: PetscCall(VecDestroy(&t));
745: PetscCall(PetscViewerASCIIPrintf(viewer, "KSP final norm of residual %g\n", (double)norm));
746: }
747: PetscFunctionReturn(PETSC_SUCCESS);
748: }
750: static PetscErrorCode KSPMonitorPauseFinal_Internal(KSP ksp)
751: {
752: PetscInt i;
754: PetscFunctionBegin;
755: if (!ksp->pauseFinal) PetscFunctionReturn(PETSC_SUCCESS);
756: for (i = 0; i < ksp->numbermonitors; ++i) {
757: PetscViewerAndFormat *vf = (PetscViewerAndFormat *)ksp->monitorcontext[i];
758: PetscDraw draw;
759: PetscReal lpause;
761: if (!vf) continue;
762: if (vf->lg) {
763: if (!PetscCheckPointer(vf->lg, PETSC_OBJECT)) continue;
764: if (((PetscObject)vf->lg)->classid != PETSC_DRAWLG_CLASSID) continue;
765: PetscCall(PetscDrawLGGetDraw(vf->lg, &draw));
766: PetscCall(PetscDrawGetPause(draw, &lpause));
767: PetscCall(PetscDrawSetPause(draw, -1.0));
768: PetscCall(PetscDrawPause(draw));
769: PetscCall(PetscDrawSetPause(draw, lpause));
770: } else {
771: PetscBool isdraw;
773: if (!PetscCheckPointer(vf->viewer, PETSC_OBJECT)) continue;
774: if (((PetscObject)vf->viewer)->classid != PETSC_VIEWER_CLASSID) continue;
775: PetscCall(PetscObjectTypeCompare((PetscObject)vf->viewer, PETSCVIEWERDRAW, &isdraw));
776: if (!isdraw) continue;
777: PetscCall(PetscViewerDrawGetDraw(vf->viewer, 0, &draw));
778: PetscCall(PetscDrawGetPause(draw, &lpause));
779: PetscCall(PetscDrawSetPause(draw, -1.0));
780: PetscCall(PetscDrawPause(draw));
781: PetscCall(PetscDrawSetPause(draw, lpause));
782: }
783: }
784: PetscFunctionReturn(PETSC_SUCCESS);
785: }
787: static PetscErrorCode KSPSolve_Private(KSP ksp, Vec b, Vec x)
788: {
789: PetscBool flg = PETSC_FALSE, inXisinB = PETSC_FALSE, guess_zero;
790: Mat mat, pmat;
791: MPI_Comm comm;
792: MatNullSpace nullsp;
793: Vec btmp, vec_rhs = NULL;
795: PetscFunctionBegin;
796: level++;
797: comm = PetscObjectComm((PetscObject)ksp);
798: if (x && x == b) {
799: PetscCheck(ksp->guess_zero, comm, PETSC_ERR_ARG_INCOMP, "Cannot use x == b with nonzero initial guess");
800: PetscCall(VecDuplicate(b, &x));
801: inXisinB = PETSC_TRUE;
802: }
803: if (b) {
804: PetscCall(PetscObjectReference((PetscObject)b));
805: PetscCall(VecDestroy(&ksp->vec_rhs));
806: ksp->vec_rhs = b;
807: }
808: if (x) {
809: PetscCall(PetscObjectReference((PetscObject)x));
810: PetscCall(VecDestroy(&ksp->vec_sol));
811: ksp->vec_sol = x;
812: }
814: if (ksp->viewPre) PetscCall(ObjectView((PetscObject)ksp, ksp->viewerPre, ksp->formatPre));
816: if (ksp->presolve) PetscCall((*ksp->presolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->prectx));
818: /* reset the residual history list if requested */
819: if (ksp->res_hist_reset) ksp->res_hist_len = 0;
820: if (ksp->err_hist_reset) ksp->err_hist_len = 0;
822: /* KSPSetUp() scales the matrix if needed */
823: PetscCall(KSPSetUp(ksp));
824: PetscCall(KSPSetUpOnBlocks(ksp));
826: if (ksp->guess) {
827: PetscObjectState ostate, state;
829: PetscCall(KSPGuessSetUp(ksp->guess));
830: PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &ostate));
831: PetscCall(KSPGuessFormGuess(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
832: PetscCall(PetscObjectStateGet((PetscObject)ksp->vec_sol, &state));
833: if (state != ostate) {
834: ksp->guess_zero = PETSC_FALSE;
835: } else {
836: PetscCall(PetscInfo(ksp, "Using zero initial guess since the KSPGuess object did not change the vector\n"));
837: ksp->guess_zero = PETSC_TRUE;
838: }
839: }
841: PetscCall(VecSetErrorIfLocked(ksp->vec_sol, 3));
843: PetscCall(PetscLogEventBegin(KSP_Solve, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
844: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
845: /* diagonal scale RHS if called for */
846: if (ksp->dscale) {
847: PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
848: /* second time in, but matrix was scaled back to original */
849: if (ksp->dscalefix && ksp->dscalefix2) {
850: Mat mat, pmat;
852: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
853: PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
854: if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
855: }
857: /* scale initial guess */
858: if (!ksp->guess_zero) {
859: if (!ksp->truediagonal) {
860: PetscCall(VecDuplicate(ksp->diagonal, &ksp->truediagonal));
861: PetscCall(VecCopy(ksp->diagonal, ksp->truediagonal));
862: PetscCall(VecReciprocal(ksp->truediagonal));
863: }
864: PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->truediagonal));
865: }
866: }
867: PetscCall(PCPreSolve(ksp->pc, ksp));
869: if (ksp->guess_zero) PetscCall(VecSet(ksp->vec_sol, 0.0));
870: if (ksp->guess_knoll) { /* The Knoll trick is independent on the KSPGuess specified */
871: PetscCall(PCApply(ksp->pc, ksp->vec_rhs, ksp->vec_sol));
872: PetscCall(KSP_RemoveNullSpace(ksp, ksp->vec_sol));
873: ksp->guess_zero = PETSC_FALSE;
874: }
876: /* can we mark the initial guess as zero for this solve? */
877: guess_zero = ksp->guess_zero;
878: if (!ksp->guess_zero) {
879: PetscReal norm;
881: PetscCall(VecNormAvailable(ksp->vec_sol, NORM_2, &flg, &norm));
882: if (flg && !norm) ksp->guess_zero = PETSC_TRUE;
883: }
884: if (ksp->transpose_solve) {
885: PetscCall(MatGetNullSpace(pmat, &nullsp));
886: } else {
887: PetscCall(MatGetTransposeNullSpace(pmat, &nullsp));
888: }
889: if (nullsp) {
890: PetscCall(VecDuplicate(ksp->vec_rhs, &btmp));
891: PetscCall(VecCopy(ksp->vec_rhs, btmp));
892: PetscCall(MatNullSpaceRemove(nullsp, btmp));
893: vec_rhs = ksp->vec_rhs;
894: ksp->vec_rhs = btmp;
895: }
896: PetscCall(VecLockReadPush(ksp->vec_rhs));
897: PetscUseTypeMethod(ksp, solve);
898: PetscCall(KSPMonitorPauseFinal_Internal(ksp));
900: PetscCall(VecLockReadPop(ksp->vec_rhs));
901: if (nullsp) {
902: ksp->vec_rhs = vec_rhs;
903: PetscCall(VecDestroy(&btmp));
904: }
906: ksp->guess_zero = guess_zero;
908: PetscCheck(ksp->reason, comm, PETSC_ERR_PLIB, "Internal error, solver returned without setting converged reason");
909: ksp->totalits += ksp->its;
911: PetscCall(KSPConvergedReasonViewFromOptions(ksp));
913: if (ksp->viewRate) {
914: PetscCall(PetscViewerPushFormat(ksp->viewerRate, ksp->formatRate));
915: PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
916: PetscCall(PetscViewerPopFormat(ksp->viewerRate));
917: }
918: PetscCall(PCPostSolve(ksp->pc, ksp));
920: /* diagonal scale solution if called for */
921: if (ksp->dscale) {
922: PetscCall(VecPointwiseMult(ksp->vec_sol, ksp->vec_sol, ksp->diagonal));
923: /* unscale right hand side and matrix */
924: if (ksp->dscalefix) {
925: Mat mat, pmat;
927: PetscCall(VecReciprocal(ksp->diagonal));
928: PetscCall(VecPointwiseMult(ksp->vec_rhs, ksp->vec_rhs, ksp->diagonal));
929: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
930: PetscCall(MatDiagonalScale(pmat, ksp->diagonal, ksp->diagonal));
931: if (mat != pmat) PetscCall(MatDiagonalScale(mat, ksp->diagonal, ksp->diagonal));
932: PetscCall(VecReciprocal(ksp->diagonal));
933: ksp->dscalefix2 = PETSC_TRUE;
934: }
935: }
936: PetscCall(PetscLogEventEnd(KSP_Solve, ksp, ksp->vec_rhs, ksp->vec_sol, 0));
937: if (ksp->guess) PetscCall(KSPGuessUpdate(ksp->guess, ksp->vec_rhs, ksp->vec_sol));
938: if (ksp->postsolve) PetscCall((*ksp->postsolve)(ksp, ksp->vec_rhs, ksp->vec_sol, ksp->postctx));
940: PetscCall(PCGetOperators(ksp->pc, &mat, &pmat));
941: if (ksp->viewEV) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_FALSE, ksp->viewerEV, ksp->formatEV));
942: if (ksp->viewEVExp) PetscCall(KSPViewEigenvalues_Internal(ksp, PETSC_TRUE, ksp->viewerEVExp, ksp->formatEVExp));
943: if (ksp->viewSV) PetscCall(KSPViewSingularvalues_Internal(ksp, ksp->viewerSV, ksp->formatSV));
944: if (ksp->viewFinalRes) PetscCall(KSPViewFinalResidual_Internal(ksp, ksp->viewerFinalRes, ksp->formatFinalRes));
945: if (ksp->viewMat) PetscCall(ObjectView((PetscObject)mat, ksp->viewerMat, ksp->formatMat));
946: if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)pmat, ksp->viewerPMat, ksp->formatPMat));
947: if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)ksp->vec_rhs, ksp->viewerRhs, ksp->formatRhs));
948: if (ksp->viewSol) PetscCall(ObjectView((PetscObject)ksp->vec_sol, ksp->viewerSol, ksp->formatSol));
949: if (ksp->view) PetscCall(ObjectView((PetscObject)ksp, ksp->viewer, ksp->format));
950: if (ksp->viewDScale) PetscCall(ObjectView((PetscObject)ksp->diagonal, ksp->viewerDScale, ksp->formatDScale));
951: if (ksp->viewMatExp) {
952: Mat A, B;
954: PetscCall(PCGetOperators(ksp->pc, &A, NULL));
955: if (ksp->transpose_solve) {
956: Mat AT;
958: PetscCall(MatCreateTranspose(A, &AT));
959: PetscCall(MatComputeOperator(AT, MATAIJ, &B));
960: PetscCall(MatDestroy(&AT));
961: } else {
962: PetscCall(MatComputeOperator(A, MATAIJ, &B));
963: }
964: PetscCall(ObjectView((PetscObject)B, ksp->viewerMatExp, ksp->formatMatExp));
965: PetscCall(MatDestroy(&B));
966: }
967: if (ksp->viewPOpExp) {
968: Mat B;
970: PetscCall(KSPComputeOperator(ksp, MATAIJ, &B));
971: PetscCall(ObjectView((PetscObject)B, ksp->viewerPOpExp, ksp->formatPOpExp));
972: PetscCall(MatDestroy(&B));
973: }
975: if (inXisinB) {
976: PetscCall(VecCopy(x, b));
977: PetscCall(VecDestroy(&x));
978: }
979: PetscCall(PetscObjectSAWsBlock((PetscObject)ksp));
980: if (ksp->errorifnotconverged && ksp->reason < 0 && ((level == 1) || (ksp->reason != KSP_DIVERGED_ITS))) {
981: PCFailedReason reason;
983: PetscCheck(ksp->reason == KSP_DIVERGED_PC_FAILED, comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve has not converged, reason %s", KSPConvergedReasons[ksp->reason]);
984: PetscCall(PCGetFailedReason(ksp->pc, &reason));
985: SETERRQ(comm, PETSC_ERR_NOT_CONVERGED, "KSPSolve has not converged, reason %s PC failed due to %s", KSPConvergedReasons[ksp->reason], PCFailedReasons[reason]);
986: }
987: level--;
988: PetscFunctionReturn(PETSC_SUCCESS);
989: }
991: /*@
992: KSPSolve - Solves linear system.
994: Collective
996: Parameters:
997: + ksp - iterative context obtained from `KSPCreate()`
998: . b - the right hand side vector
999: - x - the solution (this may be the same vector as b, then b will be overwritten with answer)
1001: Options Database Keys:
1002: + -ksp_view_eigenvalues - compute preconditioned operators eigenvalues
1003: . -ksp_view_eigenvalues_explicit - compute the eigenvalues by forming the dense operator and using LAPACK
1004: . -ksp_view_mat binary - save matrix to the default binary viewer
1005: . -ksp_view_pmat binary - save matrix used to build preconditioner to the default binary viewer
1006: . -ksp_view_rhs binary - save right hand side vector to the default binary viewer
1007: . -ksp_view_solution binary - save computed solution vector to the default binary viewer
1008: (can be read later with src/ksp/tutorials/ex10.c for testing solvers)
1009: . -ksp_view_mat_explicit - for matrix-free operators, computes the matrix entries and views them
1010: . -ksp_view_preconditioned_operator_explicit - computes the product of the preconditioner and matrix as an explicit matrix and views it
1011: . -ksp_converged_reason - print reason for converged or diverged, also prints number of iterations
1012: . -ksp_view_final_residual - print 2-norm of true linear system residual at the end of the solution process
1013: . -ksp_error_if_not_converged - stop the program as soon as an error is detected in a `KSPSolve()`
1014: - -ksp_view - print the ksp data structure at the end of the system solution
1016: Notes:
1018: If one uses `KSPSetDM()` then x or b need not be passed. Use `KSPGetSolution()` to access the solution in this case.
1020: The operator is specified with `KSPSetOperators()`.
1022: `KSPSolve()` will normally return without generating an error regardless of whether the linear system was solved or if constructing the preconditioner failed.
1023: Call `KSPGetConvergedReason()` to determine if the solver converged or failed and why. The option -ksp_error_if_not_converged or function `KSPSetErrorIfNotConverged()`
1024: will cause `KSPSolve()` to error as soon as an error occurs in the linear solver. In inner KSPSolves() KSP_DIVERGED_ITS is not treated as an error because when using nested solvers
1025: it may be fine that inner solvers in the preconditioner do not converge during the solution process.
1027: The number of iterations can be obtained from `KSPGetIterationNumber()`.
1029: If you provide a matrix that has a `MatSetNullSpace()` and `MatSetTransposeNullSpace()` this will use that information to solve singular systems
1030: in the least squares sense with a norm minimizing solution.
1032: A x = b where b = b_p + b_t where b_t is not in the range of A (and hence by the fundamental theorem of linear algebra is in the nullspace(A') see `MatSetNullSpace()`
1034: `KSP` first removes b_t producing the linear system A x = b_p (which has multiple solutions) and solves this to find the ||x|| minimizing solution (and hence
1035: it finds the solution x orthogonal to the nullspace(A). The algorithm is simply in each iteration of the Krylov method we remove the nullspace(A) from the search
1036: direction thus the solution which is a linear combination of the search directions has no component in the nullspace(A).
1038: We recommend always using `KSPGMRES` for such singular systems.
1039: If nullspace(A) = nullspace(A') (note symmetric matrices always satisfy this property) then both left and right preconditioning will work
1040: If nullspace(A) != nullspace(A') then left preconditioning will work but right preconditioning may not work (or it may).
1042: Developer Note: The reason we cannot always solve nullspace(A) != nullspace(A') systems with right preconditioning is because we need to remove at each iteration
1043: the nullspace(AB) from the search direction. While we know the nullspace(A) the nullspace(AB) equals B^-1 times the nullspace(A) but except for trivial preconditioners
1044: such as diagonal scaling we cannot apply the inverse of the preconditioner to a vector and thus cannot compute the nullspace(AB).
1046: If using a direct method (e.g., via the `KSP` solver
1047: `KSPPREONLY` and a preconditioner such as `PCLU` or `PCILU`,
1048: then its=1. See `KSPSetTolerances()` and `KSPConvergedDefault()`
1049: for more details.
1051: Understanding Convergence:
1052: The routines `KSPMonitorSet()`, `KSPComputeEigenvalues()`, and
1053: `KSPComputeEigenvaluesExplicitly()` provide information on additional
1054: options to monitor convergence and print eigenvalue information.
1056: Level: beginner
1058: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1059: `KSPSolveTranspose()`, `KSPGetIterationNumber()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatSetTransposeNullSpace()`, `KSP`,
1060: `KSPConvergedReasonView()`, `KSPCheckSolve()`, `KSPSetErrorIfNotConverged()`
1061: @*/
1062: PetscErrorCode KSPSolve(KSP ksp, Vec b, Vec x)
1063: {
1064: PetscFunctionBegin;
1068: ksp->transpose_solve = PETSC_FALSE;
1069: PetscCall(KSPSolve_Private(ksp, b, x));
1070: PetscFunctionReturn(PETSC_SUCCESS);
1071: }
1073: /*@
1074: KSPSolveTranspose - Solves the transpose of a linear system.
1076: Collective
1078: Input Parameters:
1079: + ksp - iterative context obtained from `KSPCreate()`
1080: . b - right hand side vector
1081: - x - solution vector
1083: Notes:
1084: For complex numbers this solve the non-Hermitian transpose system.
1086: Developer Notes:
1087: We need to implement a `KSPSolveHermitianTranspose()`
1089: Level: developer
1091: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPDestroy()`, `KSPSetTolerances()`, `KSPConvergedDefault()`,
1092: `KSPSolve()`, `KSP`
1093: @*/
1094: PetscErrorCode KSPSolveTranspose(KSP ksp, Vec b, Vec x)
1095: {
1096: PetscFunctionBegin;
1100: if (ksp->transpose.use_explicittranspose) {
1101: Mat J, Jpre;
1102: PetscCall(KSPGetOperators(ksp, &J, &Jpre));
1103: if (!ksp->transpose.reuse_transpose) {
1104: PetscCall(MatTranspose(J, MAT_INITIAL_MATRIX, &ksp->transpose.AT));
1105: if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_INITIAL_MATRIX, &ksp->transpose.BT));
1106: ksp->transpose.reuse_transpose = PETSC_TRUE;
1107: } else {
1108: PetscCall(MatTranspose(J, MAT_REUSE_MATRIX, &ksp->transpose.AT));
1109: if (J != Jpre) PetscCall(MatTranspose(Jpre, MAT_REUSE_MATRIX, &ksp->transpose.BT));
1110: }
1111: if (J == Jpre && ksp->transpose.BT != ksp->transpose.AT) {
1112: PetscCall(PetscObjectReference((PetscObject)ksp->transpose.AT));
1113: ksp->transpose.BT = ksp->transpose.AT;
1114: }
1115: PetscCall(KSPSetOperators(ksp, ksp->transpose.AT, ksp->transpose.BT));
1116: } else {
1117: ksp->transpose_solve = PETSC_TRUE;
1118: }
1119: PetscCall(KSPSolve_Private(ksp, b, x));
1120: PetscFunctionReturn(PETSC_SUCCESS);
1121: }
1123: static PetscErrorCode KSPViewFinalMatResidual_Internal(KSP ksp, Mat B, Mat X, PetscViewer viewer, PetscViewerFormat format, PetscInt shift)
1124: {
1125: Mat A, R;
1126: PetscReal *norms;
1127: PetscInt i, N;
1128: PetscBool flg;
1130: PetscFunctionBegin;
1131: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &flg));
1132: if (flg) {
1133: PetscCall(PCGetOperators(ksp->pc, &A, NULL));
1134: PetscCall(MatMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &R));
1135: PetscCall(MatAYPX(R, -1.0, B, SAME_NONZERO_PATTERN));
1136: PetscCall(MatGetSize(R, NULL, &N));
1137: PetscCall(PetscMalloc1(N, &norms));
1138: PetscCall(MatGetColumnNorms(R, NORM_2, norms));
1139: PetscCall(MatDestroy(&R));
1140: for (i = 0; i < N; ++i) PetscCall(PetscViewerASCIIPrintf(viewer, "%s #%" PetscInt_FMT " %g\n", i == 0 ? "KSP final norm of residual" : " ", shift + i, (double)norms[i]));
1141: PetscCall(PetscFree(norms));
1142: }
1143: PetscFunctionReturn(PETSC_SUCCESS);
1144: }
1146: /*@
1147: KSPMatSolve - Solves a linear system with multiple right-hand sides stored as a MATDENSE. Unlike `KSPSolve()`, B and X must be different matrices.
1149: Input Parameters:
1150: + ksp - iterative context
1151: - B - block of right-hand sides
1153: Output Parameter:
1154: . X - block of solutions
1156: Notes:
1157: This is a stripped-down version of `KSPSolve()`, which only handles -ksp_view, -ksp_converged_reason, and -ksp_view_final_residual.
1159: Level: intermediate
1161: .seealso: [](chapter_ksp), `KSPSolve()`, `MatMatSolve()`, `MATDENSE`, `KSPHPDDM`, `PCBJACOBI`, `PCASM`
1162: @*/
1163: PetscErrorCode KSPMatSolve(KSP ksp, Mat B, Mat X)
1164: {
1165: Mat A, P, vB, vX;
1166: Vec cb, cx;
1167: PetscInt n1, N1, n2, N2, Bbn = PETSC_DECIDE;
1168: PetscBool match;
1170: PetscFunctionBegin;
1174: PetscCheckSameComm(ksp, 1, B, 2);
1175: PetscCheckSameComm(ksp, 1, X, 3);
1176: PetscCheckSameType(B, 2, X, 3);
1177: PetscCheck(B->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
1178: MatCheckPreallocated(X, 3);
1179: if (!X->assembled) {
1180: PetscCall(MatSetOption(X, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
1181: PetscCall(MatAssemblyBegin(X, MAT_FINAL_ASSEMBLY));
1182: PetscCall(MatAssemblyEnd(X, MAT_FINAL_ASSEMBLY));
1183: }
1184: PetscCheck(B != X, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_IDN, "B and X must be different matrices");
1185: PetscCall(KSPGetOperators(ksp, &A, &P));
1186: PetscCall(MatGetLocalSize(B, NULL, &n2));
1187: PetscCall(MatGetLocalSize(X, NULL, &n1));
1188: PetscCall(MatGetSize(B, NULL, &N2));
1189: PetscCall(MatGetSize(X, NULL, &N1));
1190: PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of right-hand sides (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of solutions (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1);
1191: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)B, &match, MATSEQDENSE, MATMPIDENSE, ""));
1192: PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of right-hand sides not stored in a dense Mat");
1193: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, ""));
1194: PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of solutions not stored in a dense Mat");
1195: PetscCall(KSPSetUp(ksp));
1196: PetscCall(KSPSetUpOnBlocks(ksp));
1197: if (ksp->ops->matsolve) {
1198: if (ksp->guess_zero) PetscCall(MatZeroEntries(X));
1199: PetscCall(PetscLogEventBegin(KSP_MatSolve, ksp, B, X, 0));
1200: PetscCall(KSPGetMatSolveBatchSize(ksp, &Bbn));
1201: /* by default, do a single solve with all columns */
1202: if (Bbn == PETSC_DECIDE) Bbn = N2;
1203: else PetscCheck(Bbn >= 1, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "KSPMatSolve() batch size %" PetscInt_FMT " must be positive", Bbn);
1204: PetscCall(PetscInfo(ksp, "KSP type %s solving using batches of width at most %" PetscInt_FMT "\n", ((PetscObject)ksp)->type_name, Bbn));
1205: /* if -ksp_matsolve_batch_size is greater than the actual number of columns, do a single solve with all columns */
1206: if (Bbn >= N2) {
1207: PetscUseTypeMethod(ksp, matsolve, B, X);
1208: if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, B, X, ksp->viewerFinalRes, ksp->formatFinalRes, 0));
1210: PetscCall(KSPConvergedReasonViewFromOptions(ksp));
1212: if (ksp->viewRate) {
1213: PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1214: PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1215: PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1216: }
1217: } else {
1218: for (n2 = 0; n2 < N2; n2 += Bbn) {
1219: PetscCall(MatDenseGetSubMatrix(B, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vB));
1220: PetscCall(MatDenseGetSubMatrix(X, PETSC_DECIDE, PETSC_DECIDE, n2, PetscMin(n2 + Bbn, N2), &vX));
1221: PetscUseTypeMethod(ksp, matsolve, vB, vX);
1222: if (ksp->viewFinalRes) PetscCall(KSPViewFinalMatResidual_Internal(ksp, vB, vX, ksp->viewerFinalRes, ksp->formatFinalRes, n2));
1224: PetscCall(KSPConvergedReasonViewFromOptions(ksp));
1226: if (ksp->viewRate) {
1227: PetscCall(PetscViewerPushFormat(ksp->viewerRate, PETSC_VIEWER_DEFAULT));
1228: PetscCall(KSPConvergedRateView(ksp, ksp->viewerRate));
1229: PetscCall(PetscViewerPopFormat(ksp->viewerRate));
1230: }
1231: PetscCall(MatDenseRestoreSubMatrix(B, &vB));
1232: PetscCall(MatDenseRestoreSubMatrix(X, &vX));
1233: }
1234: }
1235: if (ksp->viewMat) PetscCall(ObjectView((PetscObject)A, ksp->viewerMat, ksp->formatMat));
1236: if (ksp->viewPMat) PetscCall(ObjectView((PetscObject)P, ksp->viewerPMat, ksp->formatPMat));
1237: if (ksp->viewRhs) PetscCall(ObjectView((PetscObject)B, ksp->viewerRhs, ksp->formatRhs));
1238: if (ksp->viewSol) PetscCall(ObjectView((PetscObject)X, ksp->viewerSol, ksp->formatSol));
1239: if (ksp->view) PetscCall(KSPView(ksp, ksp->viewer));
1240: PetscCall(PetscLogEventEnd(KSP_MatSolve, ksp, B, X, 0));
1241: } else {
1242: PetscCall(PetscInfo(ksp, "KSP type %s solving column by column\n", ((PetscObject)ksp)->type_name));
1243: for (n2 = 0; n2 < N2; ++n2) {
1244: PetscCall(MatDenseGetColumnVecRead(B, n2, &cb));
1245: PetscCall(MatDenseGetColumnVecWrite(X, n2, &cx));
1246: PetscCall(KSPSolve(ksp, cb, cx));
1247: PetscCall(MatDenseRestoreColumnVecWrite(X, n2, &cx));
1248: PetscCall(MatDenseRestoreColumnVecRead(B, n2, &cb));
1249: }
1250: }
1251: PetscFunctionReturn(PETSC_SUCCESS);
1252: }
1254: /*@
1255: KSPSetMatSolveBatchSize - Sets the maximum number of columns treated simultaneously in `KSPMatSolve()`.
1257: Logically collective
1259: Input Parameters:
1260: + ksp - iterative context
1261: - bs - batch size
1263: Level: advanced
1265: .seealso: [](chapter_ksp), `KSPMatSolve()`, `KSPGetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matmatmult_Bbn`
1266: @*/
1267: PetscErrorCode KSPSetMatSolveBatchSize(KSP ksp, PetscInt bs)
1268: {
1269: PetscFunctionBegin;
1272: ksp->nmax = bs;
1273: PetscFunctionReturn(PETSC_SUCCESS);
1274: }
1276: /*@
1277: KSPGetMatSolveBatchSize - Gets the maximum number of columns treated simultaneously in `KSPMatSolve()`.
1279: Input Parameter:
1280: . ksp - iterative context
1282: Output Parameter:
1283: . bs - batch size
1285: Level: advanced
1287: .seealso: [](chapter_ksp), `KSPMatSolve()`, `KSPSetMatSolveBatchSize()`, `-mat_mumps_icntl_27`, `-matmatmult_Bbn`
1288: @*/
1289: PetscErrorCode KSPGetMatSolveBatchSize(KSP ksp, PetscInt *bs)
1290: {
1291: PetscFunctionBegin;
1294: *bs = ksp->nmax;
1295: PetscFunctionReturn(PETSC_SUCCESS);
1296: }
1298: /*@
1299: KSPResetViewers - Resets all the viewers set from the options database during `KSPSetFromOptions()`
1301: Collective
1303: Input Parameter:
1304: . ksp - iterative context obtained from `KSPCreate()`
1306: Level: beginner
1308: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSPSetFromOptions()`, `KSP`
1309: @*/
1310: PetscErrorCode KSPResetViewers(KSP ksp)
1311: {
1312: PetscFunctionBegin;
1314: if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1315: PetscCall(PetscViewerDestroy(&ksp->viewer));
1316: PetscCall(PetscViewerDestroy(&ksp->viewerPre));
1317: PetscCall(PetscViewerDestroy(&ksp->viewerRate));
1318: PetscCall(PetscViewerDestroy(&ksp->viewerMat));
1319: PetscCall(PetscViewerDestroy(&ksp->viewerPMat));
1320: PetscCall(PetscViewerDestroy(&ksp->viewerRhs));
1321: PetscCall(PetscViewerDestroy(&ksp->viewerSol));
1322: PetscCall(PetscViewerDestroy(&ksp->viewerMatExp));
1323: PetscCall(PetscViewerDestroy(&ksp->viewerEV));
1324: PetscCall(PetscViewerDestroy(&ksp->viewerSV));
1325: PetscCall(PetscViewerDestroy(&ksp->viewerEVExp));
1326: PetscCall(PetscViewerDestroy(&ksp->viewerFinalRes));
1327: PetscCall(PetscViewerDestroy(&ksp->viewerPOpExp));
1328: PetscCall(PetscViewerDestroy(&ksp->viewerDScale));
1329: ksp->view = PETSC_FALSE;
1330: ksp->viewPre = PETSC_FALSE;
1331: ksp->viewMat = PETSC_FALSE;
1332: ksp->viewPMat = PETSC_FALSE;
1333: ksp->viewRhs = PETSC_FALSE;
1334: ksp->viewSol = PETSC_FALSE;
1335: ksp->viewMatExp = PETSC_FALSE;
1336: ksp->viewEV = PETSC_FALSE;
1337: ksp->viewSV = PETSC_FALSE;
1338: ksp->viewEVExp = PETSC_FALSE;
1339: ksp->viewFinalRes = PETSC_FALSE;
1340: ksp->viewPOpExp = PETSC_FALSE;
1341: ksp->viewDScale = PETSC_FALSE;
1342: PetscFunctionReturn(PETSC_SUCCESS);
1343: }
1345: /*@
1346: KSPReset - Resets a `KSP` context to the kspsetupcalled = 0 state and removes any allocated Vecs and Mats
1348: Collective
1350: Input Parameter:
1351: . ksp - iterative context obtained from `KSPCreate()`
1353: Level: beginner
1355: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1356: @*/
1357: PetscErrorCode KSPReset(KSP ksp)
1358: {
1359: PetscFunctionBegin;
1361: if (!ksp) PetscFunctionReturn(PETSC_SUCCESS);
1362: PetscTryTypeMethod(ksp, reset);
1363: if (ksp->pc) PetscCall(PCReset(ksp->pc));
1364: if (ksp->guess) {
1365: KSPGuess guess = ksp->guess;
1366: PetscTryTypeMethod(guess, reset);
1367: }
1368: PetscCall(VecDestroyVecs(ksp->nwork, &ksp->work));
1369: PetscCall(VecDestroy(&ksp->vec_rhs));
1370: PetscCall(VecDestroy(&ksp->vec_sol));
1371: PetscCall(VecDestroy(&ksp->diagonal));
1372: PetscCall(VecDestroy(&ksp->truediagonal));
1374: PetscCall(KSPResetViewers(ksp));
1376: ksp->setupstage = KSP_SETUP_NEW;
1377: ksp->nmax = PETSC_DECIDE;
1378: PetscFunctionReturn(PETSC_SUCCESS);
1379: }
1381: /*@C
1382: KSPDestroy - Destroys `KSP` context.
1384: Collective
1386: Input Parameter:
1387: . ksp - iterative context obtained from `KSPCreate()`
1389: Level: beginner
1391: .seealso: [](chapter_ksp), `KSPCreate()`, `KSPSetUp()`, `KSPSolve()`, `KSP`
1392: @*/
1393: PetscErrorCode KSPDestroy(KSP *ksp)
1394: {
1395: PC pc;
1397: PetscFunctionBegin;
1398: if (!*ksp) PetscFunctionReturn(PETSC_SUCCESS);
1400: if (--((PetscObject)(*ksp))->refct > 0) {
1401: *ksp = NULL;
1402: PetscFunctionReturn(PETSC_SUCCESS);
1403: }
1405: PetscCall(PetscObjectSAWsViewOff((PetscObject)*ksp));
1407: /*
1408: Avoid a cascading call to PCReset(ksp->pc) from the following call:
1409: PCReset() shouldn't be called from KSPDestroy() as it is unprotected by pc's
1410: refcount (and may be shared, e.g., by other ksps).
1411: */
1412: pc = (*ksp)->pc;
1413: (*ksp)->pc = NULL;
1414: PetscCall(KSPReset((*ksp)));
1415: (*ksp)->pc = pc;
1416: PetscTryTypeMethod((*ksp), destroy);
1418: if ((*ksp)->transpose.use_explicittranspose) {
1419: PetscCall(MatDestroy(&(*ksp)->transpose.AT));
1420: PetscCall(MatDestroy(&(*ksp)->transpose.BT));
1421: (*ksp)->transpose.reuse_transpose = PETSC_FALSE;
1422: }
1424: PetscCall(KSPGuessDestroy(&(*ksp)->guess));
1425: PetscCall(DMDestroy(&(*ksp)->dm));
1426: PetscCall(PCDestroy(&(*ksp)->pc));
1427: PetscCall(PetscFree((*ksp)->res_hist_alloc));
1428: PetscCall(PetscFree((*ksp)->err_hist_alloc));
1429: if ((*ksp)->convergeddestroy) PetscCall((*(*ksp)->convergeddestroy)((*ksp)->cnvP));
1430: PetscCall(KSPMonitorCancel((*ksp)));
1431: PetscCall(KSPConvergedReasonViewCancel((*ksp)));
1432: PetscCall(PetscHeaderDestroy(ksp));
1433: PetscFunctionReturn(PETSC_SUCCESS);
1434: }
1436: /*@
1437: KSPSetPCSide - Sets the preconditioning side.
1439: Logically Collective
1441: Input Parameter:
1442: . ksp - iterative context obtained from `KSPCreate()`
1444: Output Parameter:
1445: . side - the preconditioning side, where side is one of
1446: .vb
1447: PC_LEFT - left preconditioning (default)
1448: PC_RIGHT - right preconditioning
1449: PC_SYMMETRIC - symmetric preconditioning
1450: .ve
1452: Options Database Keys:
1453: . -ksp_pc_side <right,left,symmetric> - `KSP` preconditioner side
1455: Notes:
1456: Left preconditioning is used by default for most Krylov methods except KSPFGMRES which only supports right preconditioning.
1458: For methods changing the side of the preconditioner changes the norm type that is used, see `KSPSetNormType()`.
1460: Symmetric preconditioning is currently available only for the KSPQCG method. Note, however, that
1461: symmetric preconditioning can be emulated by using either right or left
1462: preconditioning and a pre or post processing step.
1464: Setting the PC side often affects the default norm type. See `KSPSetNormType()` for details.
1466: Level: intermediate
1468: .seealso: [](chapter_ksp), `KSPGetPCSide()`, `KSPSetNormType()`, `KSPGetNormType()`, `KSP`
1469: @*/
1470: PetscErrorCode KSPSetPCSide(KSP ksp, PCSide side)
1471: {
1472: PetscFunctionBegin;
1475: ksp->pc_side = ksp->pc_side_set = side;
1476: PetscFunctionReturn(PETSC_SUCCESS);
1477: }
1479: /*@
1480: KSPGetPCSide - Gets the preconditioning side.
1482: Not Collective
1484: Input Parameter:
1485: . ksp - iterative context obtained from `KSPCreate()`
1487: Output Parameter:
1488: . side - the preconditioning side, where side is one of
1489: .vb
1490: PC_LEFT - left preconditioning (default)
1491: PC_RIGHT - right preconditioning
1492: PC_SYMMETRIC - symmetric preconditioning
1493: .ve
1495: Level: intermediate
1497: .seealso: [](chapter_ksp), `KSPSetPCSide()`, `KSP`
1498: @*/
1499: PetscErrorCode KSPGetPCSide(KSP ksp, PCSide *side)
1500: {
1501: PetscFunctionBegin;
1504: PetscCall(KSPSetUpNorms_Private(ksp, PETSC_TRUE, &ksp->normtype, &ksp->pc_side));
1505: *side = ksp->pc_side;
1506: PetscFunctionReturn(PETSC_SUCCESS);
1507: }
1509: /*@
1510: KSPGetTolerances - Gets the relative, absolute, divergence, and maximum
1511: iteration tolerances used by the default `KSP` convergence tests.
1513: Not Collective
1515: Input Parameter:
1516: . ksp - the Krylov subspace context
1518: Output Parameters:
1519: + rtol - the relative convergence tolerance
1520: . abstol - the absolute convergence tolerance
1521: . dtol - the divergence tolerance
1522: - maxits - maximum number of iterations
1524: Notes:
1525: The user can specify NULL for any parameter that is not needed.
1527: Level: intermediate
1529: maximum, iterations
1531: .seealso: [](chapter_ksp), `KSPSetTolerances()`, `KSP`
1532: @*/
1533: PetscErrorCode KSPGetTolerances(KSP ksp, PetscReal *rtol, PetscReal *abstol, PetscReal *dtol, PetscInt *maxits)
1534: {
1535: PetscFunctionBegin;
1537: if (abstol) *abstol = ksp->abstol;
1538: if (rtol) *rtol = ksp->rtol;
1539: if (dtol) *dtol = ksp->divtol;
1540: if (maxits) *maxits = ksp->max_it;
1541: PetscFunctionReturn(PETSC_SUCCESS);
1542: }
1544: /*@
1545: KSPSetTolerances - Sets the relative, absolute, divergence, and maximum
1546: iteration tolerances used by the default `KSP` convergence testers.
1548: Logically Collective
1550: Input Parameters:
1551: + ksp - the Krylov subspace context
1552: . rtol - the relative convergence tolerance, relative decrease in the (possibly preconditioned) residual norm
1553: . abstol - the absolute convergence tolerance absolute size of the (possibly preconditioned) residual norm
1554: . dtol - the divergence tolerance, amount (possibly preconditioned) residual norm can increase before `KSPConvergedDefault()` concludes that the method is diverging
1555: - maxits - maximum number of iterations to use
1557: Options Database Keys:
1558: + -ksp_atol <abstol> - Sets abstol
1559: . -ksp_rtol <rtol> - Sets rtol
1560: . -ksp_divtol <dtol> - Sets dtol
1561: - -ksp_max_it <maxits> - Sets maxits
1563: Notes:
1564: Use PETSC_DEFAULT to retain the default value of any of the tolerances.
1566: See `KSPConvergedDefault()` for details how these parameters are used in the default convergence test. See also `KSPSetConvergenceTest()`
1567: for setting user-defined stopping criteria.
1569: Level: intermediate
1571: convergence, maximum, iterations
1573: .seealso: [](chapter_ksp), `KSPGetTolerances()`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSP`
1574: @*/
1575: PetscErrorCode KSPSetTolerances(KSP ksp, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt maxits)
1576: {
1577: PetscFunctionBegin;
1584: if (rtol != PETSC_DEFAULT) {
1585: PetscCheck(rtol >= 0.0 && rtol < 1.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Relative tolerance %g must be non-negative and less than 1.0", (double)rtol);
1586: ksp->rtol = rtol;
1587: }
1588: if (abstol != PETSC_DEFAULT) {
1589: PetscCheck(abstol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Absolute tolerance %g must be non-negative", (double)abstol);
1590: ksp->abstol = abstol;
1591: }
1592: if (dtol != PETSC_DEFAULT) {
1593: PetscCheck(dtol >= 0.0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Divergence tolerance %g must be larger than 1.0", (double)dtol);
1594: ksp->divtol = dtol;
1595: }
1596: if (maxits != PETSC_DEFAULT) {
1597: PetscCheck(maxits >= 0, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Maximum number of iterations %" PetscInt_FMT " must be non-negative", maxits);
1598: ksp->max_it = maxits;
1599: }
1600: PetscFunctionReturn(PETSC_SUCCESS);
1601: }
1603: /*@
1604: KSPSetInitialGuessNonzero - Tells the iterative solver that the
1605: initial guess is nonzero; otherwise `KSP` assumes the initial guess
1606: is to be zero (and thus zeros it out before solving).
1608: Logically Collective
1610: Input Parameters:
1611: + ksp - iterative context obtained from `KSPCreate()`
1612: - flg - ``PETSC_TRUE`` indicates the guess is non-zero, `PETSC_FALSE` indicates the guess is zero
1614: Options database keys:
1615: . -ksp_initial_guess_nonzero <true,false> - use nonzero initial guess
1617: Level: beginner
1619: Notes:
1620: If this is not called the X vector is zeroed in the call to `KSPSolve()`.
1622: .seealso: [](chapter_ksp), `KSPGetInitialGuessNonzero()`, `KSPSetGuessType()`, `KSPGuessType`, `KSP`
1623: @*/
1624: PetscErrorCode KSPSetInitialGuessNonzero(KSP ksp, PetscBool flg)
1625: {
1626: PetscFunctionBegin;
1629: ksp->guess_zero = (PetscBool) !(int)flg;
1630: PetscFunctionReturn(PETSC_SUCCESS);
1631: }
1633: /*@
1634: KSPGetInitialGuessNonzero - Determines whether the `KSP` solver is using
1635: a zero initial guess.
1637: Not Collective
1639: Input Parameter:
1640: . ksp - iterative context obtained from `KSPCreate()`
1642: Output Parameter:
1643: . flag - `PETSC_TRUE` if guess is nonzero, else `PETSC_FALSE`
1645: Level: intermediate
1647: .seealso: [](chapter_ksp), `KSPSetInitialGuessNonzero()`, `KSP`
1648: @*/
1649: PetscErrorCode KSPGetInitialGuessNonzero(KSP ksp, PetscBool *flag)
1650: {
1651: PetscFunctionBegin;
1654: if (ksp->guess_zero) *flag = PETSC_FALSE;
1655: else *flag = PETSC_TRUE;
1656: PetscFunctionReturn(PETSC_SUCCESS);
1657: }
1659: /*@
1660: KSPSetErrorIfNotConverged - Causes `KSPSolve()` to generate an error if the solver has not converged as soon as the error is detected.
1662: Logically Collective
1664: Input Parameters:
1665: + ksp - iterative context obtained from `KSPCreate()`
1666: - flg - `PETSC_TRUE` indicates you want the error generated
1668: Options database keys:
1669: . -ksp_error_if_not_converged <true,false> - generate an error and stop the program
1671: Level: intermediate
1673: Notes:
1674: Normally PETSc continues if a linear solver fails to converge, you can call `KSPGetConvergedReason()` after a `KSPSolve()`
1675: to determine if it has converged.
1677: A `KSP_DIVERGED_ITS` will not generate an error in a `KSPSolve()` inside a nested linear solver
1679: .seealso: [](chapter_ksp), `KSPGetErrorIfNotConverged()`, `KSP`
1680: @*/
1681: PetscErrorCode KSPSetErrorIfNotConverged(KSP ksp, PetscBool flg)
1682: {
1683: PetscFunctionBegin;
1686: ksp->errorifnotconverged = flg;
1687: PetscFunctionReturn(PETSC_SUCCESS);
1688: }
1690: /*@
1691: KSPGetErrorIfNotConverged - Will `KSPSolve()` generate an error if the solver does not converge?
1693: Not Collective
1695: Input Parameter:
1696: . ksp - iterative context obtained from KSPCreate()
1698: Output Parameter:
1699: . flag - `PETSC_TRUE` if it will generate an error, else `PETSC_FALSE`
1701: Level: intermediate
1703: .seealso: [](chapter_ksp), `KSPSetErrorIfNotConverged()`, `KSP`
1704: @*/
1705: PetscErrorCode KSPGetErrorIfNotConverged(KSP ksp, PetscBool *flag)
1706: {
1707: PetscFunctionBegin;
1710: *flag = ksp->errorifnotconverged;
1711: PetscFunctionReturn(PETSC_SUCCESS);
1712: }
1714: /*@
1715: KSPSetInitialGuessKnoll - Tells the iterative solver to use `PCApply()` to compute the initial guess (The Knoll trick)
1717: Logically Collective
1719: Input Parameters:
1720: + ksp - iterative context obtained from `KSPCreate()`
1721: - flg - `PETSC_TRUE` or `PETSC_FALSE`
1723: Level: advanced
1725: Developer Note: the Knoll trick is not currently implemented using the KSPGuess class
1727: .seealso: [](chapter_ksp), `KSPGetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1728: @*/
1729: PetscErrorCode KSPSetInitialGuessKnoll(KSP ksp, PetscBool flg)
1730: {
1731: PetscFunctionBegin;
1734: ksp->guess_knoll = flg;
1735: PetscFunctionReturn(PETSC_SUCCESS);
1736: }
1738: /*@
1739: KSPGetInitialGuessKnoll - Determines whether the `KSP` solver is using the Knoll trick (using PCApply(pc,b,...) to compute
1740: the initial guess
1742: Not Collective
1744: Input Parameter:
1745: . ksp - iterative context obtained from `KSPCreate()`
1747: Output Parameter:
1748: . flag - `PETSC_TRUE` if using Knoll trick, else `PETSC_FALSE`
1750: Level: advanced
1752: .seealso: [](chapter_ksp), `KSPSetInitialGuessKnoll()`, `KSPSetInitialGuessNonzero()`, `KSPGetInitialGuessNonzero()`, `KSP`
1753: @*/
1754: PetscErrorCode KSPGetInitialGuessKnoll(KSP ksp, PetscBool *flag)
1755: {
1756: PetscFunctionBegin;
1759: *flag = ksp->guess_knoll;
1760: PetscFunctionReturn(PETSC_SUCCESS);
1761: }
1763: /*@
1764: KSPGetComputeSingularValues - Gets the flag indicating whether the extreme singular
1765: values will be calculated via a Lanczos or Arnoldi process as the linear
1766: system is solved.
1768: Not Collective
1770: Input Parameter:
1771: . ksp - iterative context obtained from `KSPCreate()`
1773: Output Parameter:
1774: . flg - `PETSC_TRUE` or `PETSC_FALSE`
1776: Options Database Key:
1777: . -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`
1779: Notes:
1780: Currently this option is not valid for all iterative methods.
1782: Many users may just want to use the monitoring routine
1783: `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
1784: to print the singular values at each iteration of the linear solve.
1786: Level: advanced
1788: .seealso: [](chapter_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`
1789: @*/
1790: PetscErrorCode KSPGetComputeSingularValues(KSP ksp, PetscBool *flg)
1791: {
1792: PetscFunctionBegin;
1795: *flg = ksp->calc_sings;
1796: PetscFunctionReturn(PETSC_SUCCESS);
1797: }
1799: /*@
1800: KSPSetComputeSingularValues - Sets a flag so that the extreme singular
1801: values will be calculated via a Lanczos or Arnoldi process as the linear
1802: system is solved.
1804: Logically Collective
1806: Input Parameters:
1807: + ksp - iterative context obtained from `KSPCreate()`
1808: - flg - `PETSC_TRUE` or `PETSC_FALSE`
1810: Options Database Key:
1811: . -ksp_monitor_singular_value - Activates `KSPSetComputeSingularValues()`
1813: Notes:
1814: Currently this option is not valid for all iterative methods.
1816: Many users may just want to use the monitoring routine
1817: `KSPMonitorSingularValue()` (which can be set with option -ksp_monitor_singular_value)
1818: to print the singular values at each iteration of the linear solve.
1820: Level: advanced
1822: .seealso: [](chapter_ksp), `KSPComputeExtremeSingularValues()`, `KSPMonitorSingularValue()`, `KSP`
1823: @*/
1824: PetscErrorCode KSPSetComputeSingularValues(KSP ksp, PetscBool flg)
1825: {
1826: PetscFunctionBegin;
1829: ksp->calc_sings = flg;
1830: PetscFunctionReturn(PETSC_SUCCESS);
1831: }
1833: /*@
1834: KSPGetComputeEigenvalues - Gets the flag indicating that the extreme eigenvalues
1835: values will be calculated via a Lanczos or Arnoldi process as the linear
1836: system is solved.
1838: Not Collective
1840: Input Parameter:
1841: . ksp - iterative context obtained from `KSPCreate()`
1843: Output Parameter:
1844: . flg - `PETSC_TRUE` or `PETSC_FALSE`
1846: Notes:
1847: Currently this option is not valid for all iterative methods.
1849: Level: advanced
1851: .seealso: [](chapter_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`
1852: @*/
1853: PetscErrorCode KSPGetComputeEigenvalues(KSP ksp, PetscBool *flg)
1854: {
1855: PetscFunctionBegin;
1858: *flg = ksp->calc_sings;
1859: PetscFunctionReturn(PETSC_SUCCESS);
1860: }
1862: /*@
1863: KSPSetComputeEigenvalues - Sets a flag so that the extreme eigenvalues
1864: values will be calculated via a Lanczos or Arnoldi process as the linear
1865: system is solved.
1867: Logically Collective
1869: Input Parameters:
1870: + ksp - iterative context obtained from `KSPCreate()`
1871: - flg - `PETSC_TRUE` or `PETSC_FALSE`
1873: Notes:
1874: Currently this option is not valid for all iterative methods.
1876: Level: advanced
1878: .seealso: [](chapter_ksp), `KSPComputeEigenvalues()`, `KSPComputeEigenvaluesExplicitly()`, `KSP`
1879: @*/
1880: PetscErrorCode KSPSetComputeEigenvalues(KSP ksp, PetscBool flg)
1881: {
1882: PetscFunctionBegin;
1885: ksp->calc_sings = flg;
1886: PetscFunctionReturn(PETSC_SUCCESS);
1887: }
1889: /*@
1890: KSPSetComputeRitz - Sets a flag so that the Ritz or harmonic Ritz pairs
1891: will be calculated via a Lanczos or Arnoldi process as the linear
1892: system is solved.
1894: Logically Collective
1896: Input Parameters:
1897: + ksp - iterative context obtained from `KSPCreate()`
1898: - flg - `PETSC_TRUE` or `PETSC_FALSE`
1900: Notes:
1901: Currently this option is only valid for the GMRES method.
1903: Level: advanced
1905: .seealso: [](chapter_ksp), `KSPComputeRitz()`, `KSP`
1906: @*/
1907: PetscErrorCode KSPSetComputeRitz(KSP ksp, PetscBool flg)
1908: {
1909: PetscFunctionBegin;
1912: ksp->calc_ritz = flg;
1913: PetscFunctionReturn(PETSC_SUCCESS);
1914: }
1916: /*@
1917: KSPGetRhs - Gets the right-hand-side vector for the linear system to
1918: be solved.
1920: Not Collective
1922: Input Parameter:
1923: . ksp - iterative context obtained from `KSPCreate()`
1925: Output Parameter:
1926: . r - right-hand-side vector
1928: Level: developer
1930: .seealso: [](chapter_ksp), `KSPGetSolution()`, `KSPSolve()`, `KSP`
1931: @*/
1932: PetscErrorCode KSPGetRhs(KSP ksp, Vec *r)
1933: {
1934: PetscFunctionBegin;
1937: *r = ksp->vec_rhs;
1938: PetscFunctionReturn(PETSC_SUCCESS);
1939: }
1941: /*@
1942: KSPGetSolution - Gets the location of the solution for the
1943: linear system to be solved. Note that this may not be where the solution
1944: is stored during the iterative process; see `KSPBuildSolution()`.
1946: Not Collective
1948: Input Parameters:
1949: . ksp - iterative context obtained from `KSPCreate()`
1951: Output Parameters:
1952: . v - solution vector
1954: Level: developer
1956: .seealso: [](chapter_ksp), `KSPGetRhs()`, `KSPBuildSolution()`, `KSPSolve()`, `KSP`
1957: @*/
1958: PetscErrorCode KSPGetSolution(KSP ksp, Vec *v)
1959: {
1960: PetscFunctionBegin;
1963: *v = ksp->vec_sol;
1964: PetscFunctionReturn(PETSC_SUCCESS);
1965: }
1967: /*@
1968: KSPSetPC - Sets the preconditioner to be used to calculate the
1969: application of the preconditioner on a vector.
1971: Collective
1973: Input Parameters:
1974: + ksp - iterative context obtained from `KSPCreate()`
1975: - pc - the preconditioner object (can be NULL)
1977: Notes:
1978: Use `KSPGetPC()` to retrieve the preconditioner context.
1980: Level: developer
1982: .seealso: [](chapter_ksp), `KSPGetPC()`, `KSP`
1983: @*/
1984: PetscErrorCode KSPSetPC(KSP ksp, PC pc)
1985: {
1986: PetscFunctionBegin;
1988: if (pc) {
1990: PetscCheckSameComm(ksp, 1, pc, 2);
1991: }
1992: PetscCall(PetscObjectReference((PetscObject)pc));
1993: PetscCall(PCDestroy(&ksp->pc));
1994: ksp->pc = pc;
1995: PetscFunctionReturn(PETSC_SUCCESS);
1996: }
1998: /*@
1999: KSPGetPC - Returns a pointer to the preconditioner context
2000: set with `KSPSetPC()`.
2002: Not Collective
2004: Input Parameters:
2005: . ksp - iterative context obtained from `KSPCreate()`
2007: Output Parameter:
2008: . pc - preconditioner context
2010: Level: developer
2012: .seealso: [](chapter_ksp), `KSPSetPC()`, `KSP`
2013: @*/
2014: PetscErrorCode KSPGetPC(KSP ksp, PC *pc)
2015: {
2016: PetscFunctionBegin;
2019: if (!ksp->pc) {
2020: PetscCall(PCCreate(PetscObjectComm((PetscObject)ksp), &ksp->pc));
2021: PetscCall(PetscObjectIncrementTabLevel((PetscObject)ksp->pc, (PetscObject)ksp, 0));
2022: PetscCall(PetscObjectSetOptions((PetscObject)ksp->pc, ((PetscObject)ksp)->options));
2023: }
2024: *pc = ksp->pc;
2025: PetscFunctionReturn(PETSC_SUCCESS);
2026: }
2028: /*@
2029: KSPMonitor - runs the user provided monitor routines, if they exist
2031: Collective
2033: Input Parameters:
2034: + ksp - iterative context obtained from `KSPCreate()`
2035: . it - iteration number
2036: - rnorm - relative norm of the residual
2038: Notes:
2039: This routine is called by the `KSP` implementations.
2040: It does not typically need to be called by the user.
2042: Level: developer
2044: .seealso: [](chapter_ksp), `KSPMonitorSet()`
2045: @*/
2046: PetscErrorCode KSPMonitor(KSP ksp, PetscInt it, PetscReal rnorm)
2047: {
2048: PetscInt i, n = ksp->numbermonitors;
2050: PetscFunctionBegin;
2051: for (i = 0; i < n; i++) PetscCall((*ksp->monitor[i])(ksp, it, rnorm, ksp->monitorcontext[i]));
2052: PetscFunctionReturn(PETSC_SUCCESS);
2053: }
2055: /*@C
2056: KSPMonitorSet - Sets an ADDITIONAL function to be called at every iteration to monitor
2057: the residual/error etc.
2059: Logically Collective
2061: Input Parameters:
2062: + ksp - iterative context obtained from `KSPCreate()`
2063: . monitor - pointer to function (if this is NULL, it turns off monitoring
2064: . mctx - [optional] context for private data for the
2065: monitor routine (use NULL if no context is desired)
2066: - monitordestroy - [optional] routine that frees monitor context
2067: (may be NULL)
2069: Calling Sequence of monitor:
2070: $ monitor (KSP ksp, PetscInt it, PetscReal rnorm, void *mctx)
2072: + ksp - iterative context obtained from `KSPCreate()`
2073: . it - iteration number
2074: . rnorm - (estimated) 2-norm of (preconditioned) residual
2075: - mctx - optional monitoring context, as set by `KSPMonitorSet()`
2077: Options Database Keys:
2078: + -ksp_monitor - sets `KSPMonitorResidual()`
2079: . -ksp_monitor draw - sets `KSPMonitorResidualDraw()` and plots residual
2080: . -ksp_monitor draw::draw_lg - sets `KSPMonitorResidualDrawLG()` and plots residual
2081: . -ksp_monitor_pause_final - Pauses any graphics when the solve finishes (only works for internal monitors)
2082: . -ksp_monitor_true_residual - sets `KSPMonitorTrueResidual()`
2083: . -ksp_monitor_true_residual draw::draw_lg - sets `KSPMonitorTrueResidualDrawLG()` and plots residual
2084: . -ksp_monitor_max - sets `KSPMonitorTrueResidualMax()`
2085: . -ksp_monitor_singular_value - sets `KSPMonitorSingularValue()`
2086: - -ksp_monitor_cancel - cancels all monitors that have
2087: been hardwired into a code by
2088: calls to `KSPMonitorSet()`, but
2089: does not cancel those set via
2090: the options database.
2092: Notes:
2093: The default is to do nothing. To print the residual, or preconditioned
2094: residual if `KSPSetNormType`(ksp,`KSP_NORM_PRECONDITIONED`) was called, use
2095: `KSPMonitorResidual()` as the monitoring routine, with a `PETSCVIEWERASCII` as the
2096: context.
2098: Several different monitoring routines may be set by calling
2099: `KSPMonitorSet()` multiple times; all will be called in the
2100: order in which they were set.
2102: Fortran Notes:
2103: Only a single monitor function can be set for each `KSP` object
2105: Level: beginner
2107: .seealso: [](chapter_ksp), `KSPMonitorResidual()`, `KSPMonitorCancel()`, `KSP`
2108: @*/
2109: PetscErrorCode KSPMonitorSet(KSP ksp, PetscErrorCode (*monitor)(KSP, PetscInt, PetscReal, void *), void *mctx, PetscErrorCode (*monitordestroy)(void **))
2110: {
2111: PetscInt i;
2112: PetscBool identical;
2114: PetscFunctionBegin;
2116: for (i = 0; i < ksp->numbermonitors; i++) {
2117: PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))monitor, mctx, monitordestroy, (PetscErrorCode(*)(void))ksp->monitor[i], ksp->monitorcontext[i], ksp->monitordestroy[i], &identical));
2118: if (identical) PetscFunctionReturn(PETSC_SUCCESS);
2119: }
2120: PetscCheck(ksp->numbermonitors < MAXKSPMONITORS, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_OUTOFRANGE, "Too many KSP monitors set");
2121: ksp->monitor[ksp->numbermonitors] = monitor;
2122: ksp->monitordestroy[ksp->numbermonitors] = monitordestroy;
2123: ksp->monitorcontext[ksp->numbermonitors++] = (void *)mctx;
2124: PetscFunctionReturn(PETSC_SUCCESS);
2125: }
2127: /*@
2128: KSPMonitorCancel - Clears all monitors for a `KSP` object.
2130: Logically Collective
2132: Input Parameters:
2133: . ksp - iterative context obtained from `KSPCreate()`
2135: Options Database Key:
2136: . -ksp_monitor_cancel - Cancels all monitors that have been hardwired into a code by calls to `KSPMonitorSet()`, but does not cancel those set via the options database.
2138: Level: intermediate
2140: .seealso: [](chapter_ksp), `KSPMonitorResidual()`, `KSPMonitorSet()`, `KSP`
2141: @*/
2142: PetscErrorCode KSPMonitorCancel(KSP ksp)
2143: {
2144: PetscInt i;
2146: PetscFunctionBegin;
2148: for (i = 0; i < ksp->numbermonitors; i++) {
2149: if (ksp->monitordestroy[i]) PetscCall((*ksp->monitordestroy[i])(&ksp->monitorcontext[i]));
2150: }
2151: ksp->numbermonitors = 0;
2152: PetscFunctionReturn(PETSC_SUCCESS);
2153: }
2155: /*@C
2156: KSPGetMonitorContext - Gets the monitoring context, as set by `KSPMonitorSet()` for the FIRST monitor only.
2158: Not Collective
2160: Input Parameter:
2161: . ksp - iterative context obtained from `KSPCreate()`
2163: Output Parameter:
2164: . ctx - monitoring context
2166: Level: intermediate
2168: .seealso: [](chapter_ksp), `KSPMonitorResidual()`, `KSP`
2169: @*/
2170: PetscErrorCode KSPGetMonitorContext(KSP ksp, void *ctx)
2171: {
2172: PetscFunctionBegin;
2174: *(void **)ctx = ksp->monitorcontext[0];
2175: PetscFunctionReturn(PETSC_SUCCESS);
2176: }
2178: /*@
2179: KSPSetResidualHistory - Sets the array used to hold the residual history.
2180: If set, this array will contain the residual norms computed at each
2181: iteration of the solver.
2183: Not Collective
2185: Input Parameters:
2186: + ksp - iterative context obtained from `KSPCreate()`
2187: . a - array to hold history
2188: . na - size of a
2189: - reset - `PETSC_TRUE` indicates the history counter is reset to zero
2190: for each new linear solve
2192: Level: advanced
2194: Notes:
2195: If provided, he array is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2196: If 'a' is NULL then space is allocated for the history. If 'na' `PETSC_DECIDE` or `PETSC_DEFAULT` then a
2197: default array of length 10000 is allocated.
2199: If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history
2201: .seealso: [](chapter_ksp), `KSPGetResidualHistory()`, `KSP`
2202: @*/
2203: PetscErrorCode KSPSetResidualHistory(KSP ksp, PetscReal a[], PetscInt na, PetscBool reset)
2204: {
2205: PetscFunctionBegin;
2208: PetscCall(PetscFree(ksp->res_hist_alloc));
2209: if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2210: ksp->res_hist = a;
2211: ksp->res_hist_max = (size_t)na;
2212: } else {
2213: if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->res_hist_max = (size_t)na;
2214: else ksp->res_hist_max = 10000; /* like default ksp->max_it */
2215: PetscCall(PetscCalloc1(ksp->res_hist_max, &ksp->res_hist_alloc));
2217: ksp->res_hist = ksp->res_hist_alloc;
2218: }
2219: ksp->res_hist_len = 0;
2220: ksp->res_hist_reset = reset;
2221: PetscFunctionReturn(PETSC_SUCCESS);
2222: }
2224: /*@C
2225: KSPGetResidualHistory - Gets the array used to hold the residual history and the number of residuals it contains.
2227: Not Collective
2229: Input Parameter:
2230: . ksp - iterative context obtained from `KSPCreate()`
2232: Output Parameters:
2233: + a - pointer to array to hold history (or NULL)
2234: - na - number of used entries in a (or NULL)
2236: Level: advanced
2238: Note:
2239: This array is borrowed and should not be freed by the caller.
2241: Can only be called after a `KSPSetResidualHistory()` otherwise a and na are set to zero
2243: Fortran Note:
2244: The Fortran version of this routine has a calling sequence
2245: $ call `KSPGetResidualHistory`(`KSP` ksp, integer na, integer ierr)
2246: note that you have passed a Fortran array into `KSPSetResidualHistory()` and you need
2247: to access the residual values from this Fortran array you provided. Only the na (number of
2248: residual norms currently held) is set.
2250: .seealso: [](chapter_ksp), `KSPSetResidualHistory()`, `KSP`
2251: @*/
2252: PetscErrorCode KSPGetResidualHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2253: {
2254: PetscFunctionBegin;
2256: if (a) *a = ksp->res_hist;
2257: if (na) *na = (PetscInt)ksp->res_hist_len;
2258: PetscFunctionReturn(PETSC_SUCCESS);
2259: }
2261: /*@
2262: KSPSetErrorHistory - Sets the array used to hold the error history. If set, this array will contain the error norms computed at each iteration of the solver.
2264: Not Collective
2266: Input Parameters:
2267: + ksp - iterative context obtained from `KSPCreate()`
2268: . a - array to hold history
2269: . na - size of a
2270: - reset - `PETSC_TRUE` indicates the history counter is reset to zero for each new linear solve
2272: Level: advanced
2274: Notes:
2275: If provided, the array is NOT freed by PETSc so the user needs to keep track of it and destroy once the `KSP` object is destroyed.
2276: If 'a' is NULL then space is allocated for the history. If 'na' PETSC_DECIDE or PETSC_DEFAULT then a default array of length 10000 is allocated.
2278: If the array is not long enough then once the iterations is longer than the array length `KSPSolve()` stops recording the history
2280: .seealso: [](chapter_ksp), `KSPGetErrorHistory()`, `KSPSetResidualHistory()`, `KSP`
2281: @*/
2282: PetscErrorCode KSPSetErrorHistory(KSP ksp, PetscReal a[], PetscInt na, PetscBool reset)
2283: {
2284: PetscFunctionBegin;
2287: PetscCall(PetscFree(ksp->err_hist_alloc));
2288: if (na != PETSC_DECIDE && na != PETSC_DEFAULT && a) {
2289: ksp->err_hist = a;
2290: ksp->err_hist_max = (size_t)na;
2291: } else {
2292: if (na != PETSC_DECIDE && na != PETSC_DEFAULT) ksp->err_hist_max = (size_t)na;
2293: else ksp->err_hist_max = 10000; /* like default ksp->max_it */
2294: PetscCall(PetscCalloc1(ksp->err_hist_max, &ksp->err_hist_alloc));
2296: ksp->err_hist = ksp->err_hist_alloc;
2297: }
2298: ksp->err_hist_len = 0;
2299: ksp->err_hist_reset = reset;
2300: PetscFunctionReturn(PETSC_SUCCESS);
2301: }
2303: /*@C
2304: KSPGetErrorHistory - Gets the array used to hold the error history and the number of residuals it contains.
2306: Not Collective
2308: Input Parameter:
2309: . ksp - iterative context obtained from `KSPCreate()`
2311: Output Parameters:
2312: + a - pointer to array to hold history (or NULL)
2313: - na - number of used entries in a (or NULL)
2315: Level: advanced
2317: Notes:
2318: This array is borrowed and should not be freed by the caller.
2319: Can only be called after a `KSPSetErrorHistory()` otherwise a and na are set to zero
2321: Fortran Note:
2322: The Fortran version of this routine has a calling sequence
2323: $ call KSPGetErrorHistory(KSP ksp, integer na, integer ierr)
2324: note that you have passed a Fortran array into `KSPSetErrorHistory()` and you need
2325: to access the residual values from this Fortran array you provided. Only the na (number of
2326: residual norms currently held) is set.
2328: .seealso: [](chapter_ksp), `KSPSetErrorHistory()`, `KSPGetResidualHistory()`, `KSP`
2329: @*/
2330: PetscErrorCode KSPGetErrorHistory(KSP ksp, const PetscReal *a[], PetscInt *na)
2331: {
2332: PetscFunctionBegin;
2334: if (a) *a = ksp->err_hist;
2335: if (na) *na = (PetscInt)ksp->err_hist_len;
2336: PetscFunctionReturn(PETSC_SUCCESS);
2337: }
2339: /*
2340: KSPComputeConvergenceRate - Compute the convergence rate for the iteration
2342: Not collective
2344: Input Parameter:
2345: . ksp - The `KSP`
2347: Output Parameters:
2348: + cr - The residual contraction rate
2349: . rRsq - The coefficient of determination, R^2, indicating the linearity of the data
2350: . ce - The error contraction rate
2351: - eRsq - The coefficient of determination, R^2, indicating the linearity of the data
2353: Note:
2354: Suppose that the residual is reduced linearly, $r_k = c^k r_0$, which means $log r_k = log r_0 + k log c$. After linear regression,
2355: the slope is $\log c$. The coefficient of determination is given by $1 - \frac{\sum_i (y_i - f(x_i))^2}{\sum_i (y_i - \bar y)}$,
2356: see also https://en.wikipedia.org/wiki/Coefficient_of_determination
2358: Level: advanced
2360: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedRateView()`
2361: */
2362: PetscErrorCode KSPComputeConvergenceRate(KSP ksp, PetscReal *cr, PetscReal *rRsq, PetscReal *ce, PetscReal *eRsq)
2363: {
2364: PetscReal const *hist;
2365: PetscReal *x, *y, slope, intercept, mean = 0.0, var = 0.0, res = 0.0;
2366: PetscInt n, k;
2368: PetscFunctionBegin;
2369: if (cr || rRsq) {
2370: PetscCall(KSPGetResidualHistory(ksp, &hist, &n));
2371: if (!n) {
2372: if (cr) *cr = 0.0;
2373: if (rRsq) *rRsq = -1.0;
2374: } else {
2375: PetscCall(PetscMalloc2(n, &x, n, &y));
2376: for (k = 0; k < n; ++k) {
2377: x[k] = k;
2378: y[k] = PetscLogReal(hist[k]);
2379: mean += y[k];
2380: }
2381: mean /= n;
2382: PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2383: for (k = 0; k < n; ++k) {
2384: res += PetscSqr(y[k] - (slope * x[k] + intercept));
2385: var += PetscSqr(y[k] - mean);
2386: }
2387: PetscCall(PetscFree2(x, y));
2388: if (cr) *cr = PetscExpReal(slope);
2389: if (rRsq) *rRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2390: }
2391: }
2392: if (ce || eRsq) {
2393: PetscCall(KSPGetErrorHistory(ksp, &hist, &n));
2394: if (!n) {
2395: if (ce) *ce = 0.0;
2396: if (eRsq) *eRsq = -1.0;
2397: } else {
2398: PetscCall(PetscMalloc2(n, &x, n, &y));
2399: for (k = 0; k < n; ++k) {
2400: x[k] = k;
2401: y[k] = PetscLogReal(hist[k]);
2402: mean += y[k];
2403: }
2404: mean /= n;
2405: PetscCall(PetscLinearRegression(n, x, y, &slope, &intercept));
2406: for (k = 0; k < n; ++k) {
2407: res += PetscSqr(y[k] - (slope * x[k] + intercept));
2408: var += PetscSqr(y[k] - mean);
2409: }
2410: PetscCall(PetscFree2(x, y));
2411: if (ce) *ce = PetscExpReal(slope);
2412: if (eRsq) *eRsq = var < PETSC_MACHINE_EPSILON ? 0.0 : 1.0 - (res / var);
2413: }
2414: }
2415: PetscFunctionReturn(PETSC_SUCCESS);
2416: }
2418: /*@C
2419: KSPSetConvergenceTest - Sets the function to be used to determine convergence.
2421: Logically Collective
2423: Input Parameters:
2424: + ksp - iterative context obtained from `KSPCreate()`
2425: . converge - pointer to the function
2426: . cctx - context for private data for the convergence routine (may be null)
2427: - destroy - a routine for destroying the context (may be null)
2429: Calling sequence of converge:
2430: $ converge (`KSP` ksp, `PetscInt` it, `PetscReal` rnorm, `KSPConvergedReason` *reason,void *mctx)
2432: + ksp - iterative context obtained from `KSPCreate()`
2433: . it - iteration number
2434: . rnorm - (estimated) 2-norm of (preconditioned) residual
2435: . reason - the reason why it has converged or diverged
2436: - cctx - optional convergence context, as set by `KSPSetConvergenceTest()`
2438: Level: advanced
2440: Notes:
2441: Must be called after the `KSP` type has been set so put this after
2442: a call to `KSPSetType()`, or `KSPSetFromOptions()`.
2444: The default convergence test, `KSPConvergedDefault()`, aborts if the
2445: residual grows to more than 10000 times the initial residual.
2447: The default is a combination of relative and absolute tolerances.
2448: The residual value that is tested may be an approximation; routines
2449: that need exact values should compute them.
2451: In the default PETSc convergence test, the precise values of reason
2452: are macros such as `KSP_CONVERGED_RTOL`, which are defined in petscksp.h.
2454: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSP`, `KSPGetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2455: @*/
2456: PetscErrorCode KSPSetConvergenceTest(KSP ksp, PetscErrorCode (*converge)(KSP, PetscInt, PetscReal, KSPConvergedReason *, void *), void *cctx, PetscErrorCode (*destroy)(void *))
2457: {
2458: PetscFunctionBegin;
2460: if (ksp->convergeddestroy) PetscCall((*ksp->convergeddestroy)(ksp->cnvP));
2461: ksp->converged = converge;
2462: ksp->convergeddestroy = destroy;
2463: ksp->cnvP = (void *)cctx;
2464: PetscFunctionReturn(PETSC_SUCCESS);
2465: }
2467: /*@C
2468: KSPGetConvergenceTest - Gets the function to be used to determine convergence.
2470: Logically Collective
2472: Input Parameter:
2473: . ksp - iterative context obtained from `KSPCreate()`
2475: Output Parameters:
2476: + converge - pointer to convergence test function
2477: . cctx - context for private data for the convergence routine (may be null)
2478: - destroy - a routine for destroying the context (may be null)
2480: Calling sequence of converge:
2481: $ converge (`KSP` ksp, `PetscInt` it, `PetscReal` rnorm, `KSPConvergedReason` *reason,void *mctx)
2483: + ksp - iterative context obtained from `KSPCreate()`
2484: . it - iteration number
2485: . rnorm - (estimated) 2-norm of (preconditioned) residual
2486: . reason - the reason why it has converged or diverged
2487: - cctx - optional convergence context, as set by `KSPSetConvergenceTest()`
2489: Level: advanced
2491: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSP`, `KSPSetConvergenceTest()`, `KSPGetAndClearConvergenceTest()`
2492: @*/
2493: PetscErrorCode KSPGetConvergenceTest(KSP ksp, PetscErrorCode (**converge)(KSP, PetscInt, PetscReal, KSPConvergedReason *, void *), void **cctx, PetscErrorCode (**destroy)(void *))
2494: {
2495: PetscFunctionBegin;
2497: if (converge) *converge = ksp->converged;
2498: if (destroy) *destroy = ksp->convergeddestroy;
2499: if (cctx) *cctx = ksp->cnvP;
2500: PetscFunctionReturn(PETSC_SUCCESS);
2501: }
2503: /*@C
2504: KSPGetAndClearConvergenceTest - Gets the function to be used to determine convergence. Removes the current test without calling destroy on the test context
2506: Logically Collective
2508: Input Parameter:
2509: . ksp - iterative context obtained from `KSPCreate()`
2511: Output Parameters:
2512: + converge - pointer to convergence test function
2513: . cctx - context for private data for the convergence routine
2514: - destroy - a routine for destroying the context
2516: Calling sequence of converge:
2517: $ converge (`KSP` ksp, `PetscInt` it, `PetscReal` rnorm, `KSPConvergedReason` *reason,void *mctx)
2519: + ksp - iterative context obtained from `KSPCreate()`
2520: . it - iteration number
2521: . rnorm - (estimated) 2-norm of (preconditioned) residual
2522: . reason - the reason why it has converged or diverged
2523: - cctx - optional convergence context, as set by `KSPSetConvergenceTest()`
2525: Level: advanced
2527: Note:
2528: This is intended to be used to allow transferring the convergence test (and its context) to another testing object (for example another `KSP`) and then calling
2529: `KSPSetConvergenceTest()` on this original `KSP`. If you just called `KSPGetConvergenceTest()` followed by `KSPSetConvergenceTest()` the original context information
2530: would be destroyed and hence the transferred context would be invalid and trigger a crash on use
2532: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedDefault()`, `KSPGetConvergenceContext()`, `KSPSetTolerances()`, `KSP`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2533: @*/
2534: PetscErrorCode KSPGetAndClearConvergenceTest(KSP ksp, PetscErrorCode (**converge)(KSP, PetscInt, PetscReal, KSPConvergedReason *, void *), void **cctx, PetscErrorCode (**destroy)(void *))
2535: {
2536: PetscFunctionBegin;
2538: *converge = ksp->converged;
2539: *destroy = ksp->convergeddestroy;
2540: *cctx = ksp->cnvP;
2541: ksp->converged = NULL;
2542: ksp->cnvP = NULL;
2543: ksp->convergeddestroy = NULL;
2544: PetscFunctionReturn(PETSC_SUCCESS);
2545: }
2547: /*@C
2548: KSPGetConvergenceContext - Gets the convergence context set with `KSPSetConvergenceTest()`.
2550: Not Collective
2552: Input Parameter:
2553: . ksp - iterative context obtained from `KSPCreate()`
2555: Output Parameter:
2556: . ctx - monitoring context
2558: Level: advanced
2560: .seealso: [](chapter_ksp), `KSP`, `KSPConvergedDefault()`, `KSPSetConvergenceTest()`, `KSPGetConvergenceTest()`
2561: @*/
2562: PetscErrorCode KSPGetConvergenceContext(KSP ksp, void *ctx)
2563: {
2564: PetscFunctionBegin;
2566: *(void **)ctx = ksp->cnvP;
2567: PetscFunctionReturn(PETSC_SUCCESS);
2568: }
2570: /*@C
2571: KSPBuildSolution - Builds the approximate solution in a vector provided.
2573: Collective
2575: Input Parameter:
2576: . ctx - iterative context obtained from `KSPCreate()`
2578: Output Parameter:
2579: Provide exactly one of
2580: + v - location to stash solution.
2581: - V - the solution is returned in this location. This vector is created
2582: internally. This vector should NOT be destroyed by the user with
2583: `VecDestroy()`.
2585: Notes:
2586: This routine can be used in one of two ways
2587: .vb
2588: KSPBuildSolution(ksp,NULL,&V);
2589: or
2590: KSPBuildSolution(ksp,v,NULL); or KSPBuildSolution(ksp,v,&v);
2591: .ve
2592: In the first case an internal vector is allocated to store the solution
2593: (the user cannot destroy this vector). In the second case the solution
2594: is generated in the vector that the user provides. Note that for certain
2595: methods, such as `KSPCG`, the second case requires a copy of the solution,
2596: while in the first case the call is essentially free since it simply
2597: returns the vector where the solution already is stored. For some methods
2598: like `KSPGMRES` this is a reasonably expensive operation and should only be
2599: used in truly needed.
2601: Level: developer
2603: .seealso: [](chapter_ksp), `KSPGetSolution()`, `KSPBuildResidual()`, `KSP`
2604: @*/
2605: PetscErrorCode KSPBuildSolution(KSP ksp, Vec v, Vec *V)
2606: {
2607: PetscFunctionBegin;
2609: PetscCheck(V || v, PetscObjectComm((PetscObject)ksp), PETSC_ERR_ARG_WRONG, "Must provide either v or V");
2610: if (!V) V = &v;
2611: PetscUseTypeMethod(ksp, buildsolution, v, V);
2612: PetscFunctionReturn(PETSC_SUCCESS);
2613: }
2615: /*@C
2616: KSPBuildResidual - Builds the residual in a vector provided.
2618: Collective
2620: Input Parameter:
2621: . ksp - iterative context obtained from `KSPCreate()`
2623: Output Parameters:
2624: + v - optional location to stash residual. If v is not provided,
2625: then a location is generated.
2626: . t - work vector. If not provided then one is generated.
2627: - V - the residual
2629: Note:
2630: Regardless of whether or not v is provided, the residual is
2631: returned in V.
2633: Level: advanced
2635: .seealso: [](chapter_ksp), `KSP`, `KSPBuildSolution()`
2636: @*/
2637: PetscErrorCode KSPBuildResidual(KSP ksp, Vec t, Vec v, Vec *V)
2638: {
2639: PetscBool flag = PETSC_FALSE;
2640: Vec w = v, tt = t;
2642: PetscFunctionBegin;
2644: if (!w) PetscCall(VecDuplicate(ksp->vec_rhs, &w));
2645: if (!tt) {
2646: PetscCall(VecDuplicate(ksp->vec_sol, &tt));
2647: flag = PETSC_TRUE;
2648: }
2649: PetscUseTypeMethod(ksp, buildresidual, tt, w, V);
2650: if (flag) PetscCall(VecDestroy(&tt));
2651: PetscFunctionReturn(PETSC_SUCCESS);
2652: }
2654: /*@
2655: KSPSetDiagonalScale - Tells `KSP` to symmetrically diagonally scale the system
2656: before solving. This actually CHANGES the matrix (and right hand side).
2658: Logically Collective
2660: Input Parameters:
2661: + ksp - the `KSP` context
2662: - scale - `PETSC_TRUE` or `PETSC_FALSE`
2664: Options Database Key:
2665: + -ksp_diagonal_scale -
2666: - -ksp_diagonal_scale_fix - scale the matrix back AFTER the solve
2668: Level: advanced
2670: Notes:
2671: Scales the matrix by D^(-1/2) A D^(-1/2) [D^(1/2) x ] = D^(-1/2) b
2672: where D_{ii} is 1/abs(A_{ii}) unless A_{ii} is zero and then it is 1.
2674: BE CAREFUL with this routine: it actually scales the matrix and right
2675: hand side that define the system. After the system is solved the matrix
2676: and right hand side remain scaled unless you use `KSPSetDiagonalScaleFix()`
2678: This should NOT be used within the `SNES` solves if you are using a line
2679: search.
2681: If you use this with the `PCType` `PCEISENSTAT` preconditioner than you can
2682: use the `PCEisenstatSetNoDiagonalScaling()` option, or -pc_eisenstat_no_diagonal_scaling
2683: to save some unneeded, redundant flops.
2685: .seealso: [](chapter_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2686: @*/
2687: PetscErrorCode KSPSetDiagonalScale(KSP ksp, PetscBool scale)
2688: {
2689: PetscFunctionBegin;
2692: ksp->dscale = scale;
2693: PetscFunctionReturn(PETSC_SUCCESS);
2694: }
2696: /*@
2697: KSPGetDiagonalScale - Checks if `KSP` solver scales the matrix and right hand side, that is if `KSPSetDiagonalScale()` has been called
2699: Not Collective
2701: Input Parameter:
2702: . ksp - the `KSP` context
2704: Output Parameter:
2705: . scale - `PETSC_TRUE` or `PETSC_FALSE`
2707: Level: intermediate
2709: .seealso: [](chapter_ksp), `KSP`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2710: @*/
2711: PetscErrorCode KSPGetDiagonalScale(KSP ksp, PetscBool *scale)
2712: {
2713: PetscFunctionBegin;
2716: *scale = ksp->dscale;
2717: PetscFunctionReturn(PETSC_SUCCESS);
2718: }
2720: /*@
2721: KSPSetDiagonalScaleFix - Tells `KSP` to diagonally scale the system back after solving.
2723: Logically Collective
2725: Input Parameters:
2726: + ksp - the `KSP` context
2727: - fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2728: rescale (default)
2730: Notes:
2731: Must be called after `KSPSetDiagonalScale()`
2733: Using this will slow things down, because it rescales the matrix before and
2734: after each linear solve. This is intended mainly for testing to allow one
2735: to easily get back the original system to make sure the solution computed is
2736: accurate enough.
2738: Level: intermediate
2740: .seealso: [](chapter_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPGetDiagonalScaleFix()`, `KSP`
2741: @*/
2742: PetscErrorCode KSPSetDiagonalScaleFix(KSP ksp, PetscBool fix)
2743: {
2744: PetscFunctionBegin;
2747: ksp->dscalefix = fix;
2748: PetscFunctionReturn(PETSC_SUCCESS);
2749: }
2751: /*@
2752: KSPGetDiagonalScaleFix - Determines if `KSP` diagonally scales the system back after solving. That is `KSPSetDiagonalScaleFix()` has been called
2754: Not Collective
2756: Input Parameter:
2757: . ksp - the `KSP` context
2759: Output Parameter:
2760: . fix - `PETSC_TRUE` to scale back after the system solve, `PETSC_FALSE` to not
2761: rescale (default)
2763: Level: intermediate
2765: .seealso: [](chapter_ksp), `KSPGetDiagonalScale()`, `KSPSetDiagonalScale()`, `KSPSetDiagonalScaleFix()`, `KSP`
2766: @*/
2767: PetscErrorCode KSPGetDiagonalScaleFix(KSP ksp, PetscBool *fix)
2768: {
2769: PetscFunctionBegin;
2772: *fix = ksp->dscalefix;
2773: PetscFunctionReturn(PETSC_SUCCESS);
2774: }
2776: /*@C
2777: KSPSetComputeOperators - set routine to compute the linear operators
2779: Logically Collective
2781: Input Parameters:
2782: + ksp - the `KSP` context
2783: . func - function to compute the operators
2784: - ctx - optional context
2786: Calling sequence of func:
2787: $ func(KSP ksp,Mat A,Mat B,void *ctx)
2789: + ksp - the `KSP` context
2790: . A - the linear operator
2791: . B - preconditioning matrix
2792: - ctx - optional user-provided context
2794: Level: beginner
2796: Notes:
2797: The user provided func() will be called automatically at the very next call to `KSPSolve()`. It will NOT be called at future `KSPSolve()` calls
2798: unless either `KSPSetComputeOperators()` or `KSPSetOperators()` is called before that `KSPSolve()` is called. This allows the same system to be solved several times
2799: with different right hand side functions but is a confusing API since one might expect it to be called for each `KSPSolve()`
2801: To reuse the same preconditioner for the next `KSPSolve()` and not compute a new one based on the most recently computed matrix call `KSPSetReusePreconditioner()`
2803: Developer Note:
2804: Perhaps this routine and `KSPSetComputeRHS()` could be combined into a new API that makes clear when new matrices are computing without requiring call this
2805: routine to indicate when the new matrix should be computed.
2807: .seealso: [](chapter_ksp), `KSP`, `KSPSetOperators()`, `KSPSetComputeRHS()`, `DMKSPSetComputeOperators()`, `KSPSetComputeInitialGuess()`
2808: @*/
2809: PetscErrorCode KSPSetComputeOperators(KSP ksp, PetscErrorCode (*func)(KSP, Mat, Mat, void *), void *ctx)
2810: {
2811: DM dm;
2813: PetscFunctionBegin;
2815: PetscCall(KSPGetDM(ksp, &dm));
2816: PetscCall(DMKSPSetComputeOperators(dm, func, ctx));
2817: if (ksp->setupstage == KSP_SETUP_NEWRHS) ksp->setupstage = KSP_SETUP_NEWMATRIX;
2818: PetscFunctionReturn(PETSC_SUCCESS);
2819: }
2821: /*@C
2822: KSPSetComputeRHS - set routine to compute the right hand side of the linear system
2824: Logically Collective
2826: Input Parameters:
2827: + ksp - the `KSP` context
2828: . func - function to compute the right hand side
2829: - ctx - optional context
2831: Calling sequence of func:
2832: $ func(KSP ksp,Vec b,void *ctx)
2834: + ksp - the `KSP` context
2835: . b - right hand side of linear system
2836: - ctx - optional user-provided context
2838: Notes:
2839: The routine you provide will be called EACH you call `KSPSolve()` to prepare the new right hand side for that solve
2841: Level: beginner
2843: .seealso: [](chapter_ksp), `KSP`, `KSPSolve()`, `DMKSPSetComputeRHS()`, `KSPSetComputeOperators()`, `KSPSetOperators()`
2844: @*/
2845: PetscErrorCode KSPSetComputeRHS(KSP ksp, PetscErrorCode (*func)(KSP, Vec, void *), void *ctx)
2846: {
2847: DM dm;
2849: PetscFunctionBegin;
2851: PetscCall(KSPGetDM(ksp, &dm));
2852: PetscCall(DMKSPSetComputeRHS(dm, func, ctx));
2853: PetscFunctionReturn(PETSC_SUCCESS);
2854: }
2856: /*@C
2857: KSPSetComputeInitialGuess - set routine to compute the initial guess of the linear system
2859: Logically Collective
2861: Input Parameters:
2862: + ksp - the `KSP` context
2863: . func - function to compute the initial guess
2864: - ctx - optional context
2866: Calling sequence of func:
2867: $ func(KSP ksp,Vec x,void *ctx)
2869: + ksp - the `KSP` context
2870: . x - solution vector
2871: - ctx - optional user-provided context
2873: Notes:
2874: This should only be used in conjunction with `KSPSetComputeRHS()` and `KSPSetComputeOperators()`, otherwise
2875: call `KSPSetInitialGuessNonzero()` and set the initial guess values in the solution vector passed to `KSPSolve()` before calling the solver
2877: Level: beginner
2879: .seealso: [](chapter_ksp), `KSP`, `KSPSolve()`, `KSPSetComputeRHS()`, `KSPSetComputeOperators()`, `DMKSPSetComputeInitialGuess()`, `KSPSetInitialGuessNonzero()`
2880: @*/
2881: PetscErrorCode KSPSetComputeInitialGuess(KSP ksp, PetscErrorCode (*func)(KSP, Vec, void *), void *ctx)
2882: {
2883: DM dm;
2885: PetscFunctionBegin;
2887: PetscCall(KSPGetDM(ksp, &dm));
2888: PetscCall(DMKSPSetComputeInitialGuess(dm, func, ctx));
2889: PetscFunctionReturn(PETSC_SUCCESS);
2890: }
2892: /*@
2893: KSPSetUseExplicitTranspose - Determines the explicit transpose of the operator is formed in `KSPSolveTranspose()`. In some configurations (like GPUs) it may
2894: be explicitly formed when possible since the solve is much more efficient.
2896: Logically Collective
2898: Input Parameter:
2899: . ksp - the `KSP` context
2901: Output Parameter:
2902: . flg - `PETSC_TRUE` to transpose the system in `KSPSolveTranspose()`, `PETSC_FALSE` to not transpose (default)
2904: Level: advanced
2906: .seealso: [](chapter_ksp), `KSPSolveTranspose()`, `KSP`
2907: @*/
2908: PetscErrorCode KSPSetUseExplicitTranspose(KSP ksp, PetscBool flg)
2909: {
2910: PetscFunctionBegin;
2913: ksp->transpose.use_explicittranspose = flg;
2914: PetscFunctionReturn(PETSC_SUCCESS);
2915: }