Actual source code: taosolver_fg.c
1: #include <petsc/private/taoimpl.h>
3: /*@
4: TaoSetSolution - Sets the vector holding the initial guess for the solve
6: Logically collective on tao
8: Input Parameters:
9: + tao - the Tao context
10: - x0 - the initial guess
12: Level: beginner
13: .seealso: `Tao`, `TaoCreate()`, `TaoSolve()`, `TaoGetSolution()`
14: @*/
15: PetscErrorCode TaoSetSolution(Tao tao, Vec x0)
16: {
19: PetscObjectReference((PetscObject)x0);
20: VecDestroy(&tao->solution);
21: tao->solution = x0;
22: return 0;
23: }
25: PetscErrorCode TaoTestGradient(Tao tao, Vec x, Vec g1)
26: {
27: Vec g2, g3;
28: PetscBool complete_print = PETSC_FALSE, test = PETSC_FALSE;
29: PetscReal hcnorm, fdnorm, hcmax, fdmax, diffmax, diffnorm;
30: PetscScalar dot;
31: MPI_Comm comm;
32: PetscViewer viewer, mviewer;
33: PetscViewerFormat format;
34: PetscInt tabs;
35: static PetscBool directionsprinted = PETSC_FALSE;
37: PetscObjectOptionsBegin((PetscObject)tao);
38: PetscOptionsName("-tao_test_gradient", "Compare hand-coded and finite difference Gradients", "None", &test);
39: PetscOptionsViewer("-tao_test_gradient_view", "View difference between hand-coded and finite difference Gradients element entries", "None", &mviewer, &format, &complete_print);
40: PetscOptionsEnd();
41: if (!test) {
42: if (complete_print) PetscViewerDestroy(&mviewer);
43: return 0;
44: }
46: PetscObjectGetComm((PetscObject)tao, &comm);
47: PetscViewerASCIIGetStdout(comm, &viewer);
48: PetscViewerASCIIGetTab(viewer, &tabs);
49: PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);
50: PetscViewerASCIIPrintf(viewer, " ---------- Testing Gradient -------------\n");
51: if (!complete_print && !directionsprinted) {
52: PetscViewerASCIIPrintf(viewer, " Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n");
53: PetscViewerASCIIPrintf(viewer, " of hand-coded and finite difference gradient entries greater than <threshold>.\n");
54: }
55: if (!directionsprinted) {
56: PetscViewerASCIIPrintf(viewer, " Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n");
57: PetscViewerASCIIPrintf(viewer, " O(1.e-8), the hand-coded Gradient is probably correct.\n");
58: directionsprinted = PETSC_TRUE;
59: }
60: if (complete_print) PetscViewerPushFormat(mviewer, format);
62: VecDuplicate(x, &g2);
63: VecDuplicate(x, &g3);
65: /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
66: TaoDefaultComputeGradient(tao, x, g2, NULL);
68: VecNorm(g2, NORM_2, &fdnorm);
69: VecNorm(g1, NORM_2, &hcnorm);
70: VecNorm(g2, NORM_INFINITY, &fdmax);
71: VecNorm(g1, NORM_INFINITY, &hcmax);
72: VecDot(g1, g2, &dot);
73: VecCopy(g1, g3);
74: VecAXPY(g3, -1.0, g2);
75: VecNorm(g3, NORM_2, &diffnorm);
76: VecNorm(g3, NORM_INFINITY, &diffmax);
77: PetscViewerASCIIPrintf(viewer, " ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot) / (fdnorm * hcnorm)));
78: PetscViewerASCIIPrintf(viewer, " 2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffnorm / PetscMax(hcnorm, fdnorm)), (double)diffnorm);
79: PetscViewerASCIIPrintf(viewer, " max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffmax / PetscMax(hcmax, fdmax)), (double)diffmax);
81: if (complete_print) {
82: PetscViewerASCIIPrintf(viewer, " Hand-coded gradient ----------\n");
83: VecView(g1, mviewer);
84: PetscViewerASCIIPrintf(viewer, " Finite difference gradient ----------\n");
85: VecView(g2, mviewer);
86: PetscViewerASCIIPrintf(viewer, " Hand-coded minus finite-difference gradient ----------\n");
87: VecView(g3, mviewer);
88: }
89: VecDestroy(&g2);
90: VecDestroy(&g3);
92: if (complete_print) {
93: PetscViewerPopFormat(mviewer);
94: PetscViewerDestroy(&mviewer);
95: }
96: PetscViewerASCIISetTab(viewer, tabs);
97: return 0;
98: }
100: /*@
101: TaoComputeGradient - Computes the gradient of the objective function
103: Collective on tao
105: Input Parameters:
106: + tao - the Tao context
107: - X - input vector
109: Output Parameter:
110: . G - gradient vector
112: Options Database Keys:
113: + -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
114: - -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
116: Note:
117: `TaoComputeGradient()` is typically used within the implementation of the optimization method,
118: so most users would not generally call this routine themselves.
120: Level: developer
122: .seealso: `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetGradient()`
123: @*/
124: PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
125: {
126: PetscReal dummy;
133: VecLockReadPush(X);
134: if (tao->ops->computegradient) {
135: PetscLogEventBegin(TAO_GradientEval, tao, X, G, NULL);
136: PetscCallBack("Tao callback gradient", (*tao->ops->computegradient)(tao, X, G, tao->user_gradP));
137: PetscLogEventEnd(TAO_GradientEval, tao, X, G, NULL);
138: tao->ngrads++;
139: } else if (tao->ops->computeobjectiveandgradient) {
140: PetscLogEventBegin(TAO_ObjGradEval, tao, X, G, NULL);
141: PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, &dummy, G, tao->user_objgradP));
142: PetscLogEventEnd(TAO_ObjGradEval, tao, X, G, NULL);
143: tao->nfuncgrads++;
144: } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetGradient() has not been called");
145: VecLockReadPop(X);
147: TaoTestGradient(tao, X, G);
148: return 0;
149: }
151: /*@
152: TaoComputeObjective - Computes the objective function value at a given point
154: Collective on tao
156: Input Parameters:
157: + tao - the Tao context
158: - X - input vector
160: Output Parameter:
161: . f - Objective value at X
163: Note:
164: `TaoComputeObjective()` is typically used within the implementation of the optimization algorithm
165: so most users would not generally call this routine themselves.
167: Level: developer
169: .seealso: `Tao`, `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()`
170: @*/
171: PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
172: {
173: Vec temp;
178: VecLockReadPush(X);
179: if (tao->ops->computeobjective) {
180: PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL);
181: PetscCallBack("Tao callback objective", (*tao->ops->computeobjective)(tao, X, f, tao->user_objP));
182: PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL);
183: tao->nfuncs++;
184: } else if (tao->ops->computeobjectiveandgradient) {
185: PetscInfo(tao, "Duplicating variable vector in order to call func/grad routine\n");
186: VecDuplicate(X, &temp);
187: PetscLogEventBegin(TAO_ObjGradEval, tao, X, NULL, NULL);
188: PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, f, temp, tao->user_objgradP));
189: PetscLogEventEnd(TAO_ObjGradEval, tao, X, NULL, NULL);
190: VecDestroy(&temp);
191: tao->nfuncgrads++;
192: } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetObjective() has not been called");
193: PetscInfo(tao, "TAO Function evaluation: %20.19e\n", (double)(*f));
194: VecLockReadPop(X);
195: return 0;
196: }
198: /*@
199: TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
201: Collective on tao
203: Input Parameters:
204: + tao - the Tao context
205: - X - input vector
207: Output Parameters:
208: + f - Objective value at X
209: - g - Gradient vector at X
211: Note:
212: `TaoComputeObjectiveAndGradient()` is typically used within the implementation of the optimization algorithm,
213: so most users would not generally call this routine themselves.
215: Level: developer
217: .seealso: `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()`
218: @*/
219: PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
220: {
226: VecLockReadPush(X);
227: if (tao->ops->computeobjectiveandgradient) {
228: PetscLogEventBegin(TAO_ObjGradEval, tao, X, G, NULL);
229: if (tao->ops->computegradient == TaoDefaultComputeGradient) {
230: TaoComputeObjective(tao, X, f);
231: TaoDefaultComputeGradient(tao, X, G, NULL);
232: } else {
233: PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, f, G, tao->user_objgradP));
234: }
235: PetscLogEventEnd(TAO_ObjGradEval, tao, X, G, NULL);
236: tao->nfuncgrads++;
237: } else if (tao->ops->computeobjective && tao->ops->computegradient) {
238: PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL);
239: PetscCallBack("Tao callback objective", (*tao->ops->computeobjective)(tao, X, f, tao->user_objP));
240: PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL);
241: tao->nfuncs++;
242: PetscLogEventBegin(TAO_GradientEval, tao, X, G, NULL);
243: PetscCallBack("Tao callback gradient", (*tao->ops->computegradient)(tao, X, G, tao->user_gradP));
244: PetscLogEventEnd(TAO_GradientEval, tao, X, G, NULL);
245: tao->ngrads++;
246: } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetObjective() or TaoSetGradient() not set");
247: PetscInfo(tao, "TAO Function evaluation: %20.19e\n", (double)(*f));
248: VecLockReadPop(X);
250: TaoTestGradient(tao, X, G);
251: return 0;
252: }
254: /*@C
255: TaoSetObjective - Sets the function evaluation routine for minimization
257: Logically collective on tao
259: Input Parameters:
260: + tao - the Tao context
261: . func - the objective function
262: - ctx - [optional] user-defined context for private data for the function evaluation
263: routine (may be NULL)
265: Calling sequence of func:
266: $ func (Tao tao, Vec x, PetscReal *f, void *ctx);
268: + x - input vector
269: . f - function value
270: - ctx - [optional] user-defined function context
272: Level: beginner
274: .seealso: `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetObjective()`
275: @*/
276: PetscErrorCode TaoSetObjective(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, void *), void *ctx)
277: {
279: if (ctx) tao->user_objP = ctx;
280: if (func) tao->ops->computeobjective = func;
281: return 0;
282: }
284: /*@C
285: TaoGetObjective - Gets the function evaluation routine for the function to be minimized
287: Not collective
289: Input Parameter:
290: . tao - the Tao context
292: Output Parameters
293: + func - the objective function
294: - ctx - the user-defined context for private data for the function evaluation
296: Calling sequence of func:
297: $ func (Tao tao, Vec x, PetscReal *f, void *ctx);
299: + x - input vector
300: . f - function value
301: - ctx - [optional] user-defined function context
303: Level: beginner
305: .seealso: `Tao`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjective()`
306: @*/
307: PetscErrorCode TaoGetObjective(Tao tao, PetscErrorCode (**func)(Tao, Vec, PetscReal *, void *), void **ctx)
308: {
310: if (func) *func = tao->ops->computeobjective;
311: if (ctx) *ctx = tao->user_objP;
312: return 0;
313: }
315: /*@C
316: TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
318: Logically collective on tao
320: Input Parameters:
321: + tao - the Tao context
322: . func - the residual evaluation routine
323: - ctx - [optional] user-defined context for private data for the function evaluation
324: routine (may be NULL)
326: Calling sequence of func:
327: $ func (Tao tao, Vec x, Vec f, void *ctx);
329: + x - input vector
330: . f - function value vector
331: - ctx - [optional] user-defined function context
333: Level: beginner
335: .seealso: `Tao`, `TaoSetObjective()`, `TaoSetJacobianRoutine()`
336: @*/
337: PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void *), void *ctx)
338: {
341: PetscObjectReference((PetscObject)res);
342: if (tao->ls_res) VecDestroy(&tao->ls_res);
343: tao->ls_res = res;
344: tao->user_lsresP = ctx;
345: tao->ops->computeresidual = func;
347: return 0;
348: }
350: /*@
351: TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give.
352: If this function is not provided, or if sigma_v and sigma_w are both NULL, then the identity matrix will be used for weights.
354: Collective on tao
356: Input Parameters:
357: + tao - the Tao context
358: . sigma_v - vector of weights (diagonal terms only)
359: . n - the number of weights (if using off-diagonal)
360: . rows - index list of rows for sigma_w
361: . cols - index list of columns for sigma_w
362: - vals - array of weights
364: Note: Either sigma_v or sigma_w (or both) should be NULL
366: Level: intermediate
368: .seealso: `Tao`, `TaoSetResidualRoutine()`
369: @*/
370: PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
371: {
372: PetscInt i;
376: PetscObjectReference((PetscObject)sigma_v);
377: VecDestroy(&tao->res_weights_v);
378: tao->res_weights_v = sigma_v;
379: if (vals) {
380: PetscFree(tao->res_weights_rows);
381: PetscFree(tao->res_weights_cols);
382: PetscFree(tao->res_weights_w);
383: PetscMalloc1(n, &tao->res_weights_rows);
384: PetscMalloc1(n, &tao->res_weights_cols);
385: PetscMalloc1(n, &tao->res_weights_w);
386: tao->res_weights_n = n;
387: for (i = 0; i < n; i++) {
388: tao->res_weights_rows[i] = rows[i];
389: tao->res_weights_cols[i] = cols[i];
390: tao->res_weights_w[i] = vals[i];
391: }
392: } else {
393: tao->res_weights_n = 0;
394: tao->res_weights_rows = NULL;
395: tao->res_weights_cols = NULL;
396: }
397: return 0;
398: }
400: /*@
401: TaoComputeResidual - Computes a least-squares residual vector at a given point
403: Collective on tao
405: Input Parameters:
406: + tao - the Tao context
407: - X - input vector
409: Output Parameter:
410: . f - Objective vector at X
412: Notes:
413: `TaoComputeResidual()` is typically used within the implementation of the optimization algorithm,
414: so most users would not generally call this routine themselves.
416: Level: advanced
418: .seealso: `Tao`, `TaoSetResidualRoutine()`
419: @*/
420: PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
421: {
427: if (tao->ops->computeresidual) {
428: PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL);
429: PetscCallBack("Tao callback least-squares residual", (*tao->ops->computeresidual)(tao, X, F, tao->user_lsresP));
430: PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL);
431: tao->nfuncs++;
432: } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetResidualRoutine() has not been called");
433: PetscInfo(tao, "TAO least-squares residual evaluation.\n");
434: return 0;
435: }
437: /*@C
438: TaoSetGradient - Sets the gradient evaluation routine for the function to be optimized
440: Logically collective on tao
442: Input Parameters:
443: + tao - the Tao context
444: . g - [optional] the vector to internally hold the gradient computation
445: . func - the gradient function
446: - ctx - [optional] user-defined context for private data for the gradient evaluation
447: routine (may be NULL)
449: Calling sequence of func:
450: $ func (Tao tao, Vec x, Vec g, void *ctx);
452: + x - input vector
453: . g - gradient value (output)
454: - ctx - [optional] user-defined function context
456: Level: beginner
458: .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetGradient()`
459: @*/
460: PetscErrorCode TaoSetGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, Vec, void *), void *ctx)
461: {
463: if (g) {
466: PetscObjectReference((PetscObject)g);
467: VecDestroy(&tao->gradient);
468: tao->gradient = g;
469: }
470: if (func) tao->ops->computegradient = func;
471: if (ctx) tao->user_gradP = ctx;
472: return 0;
473: }
475: /*@C
476: TaoGetGradient - Gets the gradient evaluation routine for the function being optimized
478: Not collective
480: Input Parameter:
481: . tao - the Tao context
483: Output Parameters:
484: + g - the vector to internally hold the gradient computation
485: . func - the gradient function
486: - ctx - user-defined context for private data for the gradient evaluation routine
488: Calling sequence of func:
489: $ func (Tao tao, Vec x, Vec g, void *ctx);
491: + x - input vector
492: . g - gradient value (output)
493: - ctx - [optional] user-defined function context
495: Level: beginner
497: .seealso: `Tao`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoSetGradient()`
498: @*/
499: PetscErrorCode TaoGetGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, Vec, void *), void **ctx)
500: {
502: if (g) *g = tao->gradient;
503: if (func) *func = tao->ops->computegradient;
504: if (ctx) *ctx = tao->user_gradP;
505: return 0;
506: }
508: /*@C
509: TaoSetObjectiveAndGradient - Sets a combined objective function and gradient evaluation routine for the function to be optimized
511: Logically collective on tao
513: Input Parameters:
514: + tao - the Tao context
515: . g - [optional] the vector to internally hold the gradient computation
516: . func - the gradient function
517: - ctx - [optional] user-defined context for private data for the gradient evaluation
518: routine (may be NULL)
520: Calling sequence of func:
521: $ func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
523: + x - input vector
524: . f - objective value (output)
525: . g - gradient value (output)
526: - ctx - [optional] user-defined function context
528: Level: beginner
530: Note:
531: For some optimization methods using a combined function can be more eifficient.
533: .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetGradient()`, `TaoGetObjectiveAndGradient()`
534: @*/
535: PetscErrorCode TaoSetObjectiveAndGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx)
536: {
538: if (g) {
541: PetscObjectReference((PetscObject)g);
542: VecDestroy(&tao->gradient);
543: tao->gradient = g;
544: }
545: if (ctx) tao->user_objgradP = ctx;
546: if (func) tao->ops->computeobjectiveandgradient = func;
547: return 0;
548: }
550: /*@C
551: TaoGetObjectiveAndGradient - Gets the combined objective function and gradient evaluation routine for the function to be optimized
553: Not collective
555: Input Parameter:
556: . tao - the Tao context
558: Output Parameters:
559: + g - the vector to internally hold the gradient computation
560: . func - the gradient function
561: - ctx - user-defined context for private data for the gradient evaluation routine
563: Calling sequence of func:
564: $ func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
566: + x - input vector
567: . f - objective value (output)
568: . g - gradient value (output)
569: - ctx - [optional] user-defined function context
571: Level: beginner
573: .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`
574: @*/
575: PetscErrorCode TaoGetObjectiveAndGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, PetscReal *, Vec, void *), void **ctx)
576: {
578: if (g) *g = tao->gradient;
579: if (func) *func = tao->ops->computeobjectiveandgradient;
580: if (ctx) *ctx = tao->user_objgradP;
581: return 0;
582: }
584: /*@
585: TaoIsObjectiveDefined - Checks to see if the user has
586: declared an objective-only routine. Useful for determining when
587: it is appropriate to call `TaoComputeObjective()` or
588: `TaoComputeObjectiveAndGradient()`
590: Not collective
592: Input Parameter:
593: . tao - the Tao context
595: Output Parameter:
596: . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
598: Level: developer
600: .seealso: `Tao`, `TaoSetObjective()`, `TaoIsGradientDefined()`, `TaoIsObjectiveAndGradientDefined()`
601: @*/
602: PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
603: {
605: if (tao->ops->computeobjective == NULL) *flg = PETSC_FALSE;
606: else *flg = PETSC_TRUE;
607: return 0;
608: }
610: /*@
611: TaoIsGradientDefined - Checks to see if the user has
612: declared an objective-only routine. Useful for determining when
613: it is appropriate to call `TaoComputeGradient()` or
614: `TaoComputeGradientAndGradient()`
616: Not Collective
618: Input Parameter:
619: . tao - the Tao context
621: Output Parameter:
622: . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
624: Level: developer
626: .seealso: `TaoSetGradient()`, `TaoIsObjectiveDefined()`, `TaoIsObjectiveAndGradientDefined()`
627: @*/
628: PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
629: {
631: if (tao->ops->computegradient == NULL) *flg = PETSC_FALSE;
632: else *flg = PETSC_TRUE;
633: return 0;
634: }
636: /*@
637: TaoIsObjectiveAndGradientDefined - Checks to see if the user has
638: declared a joint objective/gradient routine. Useful for determining when
639: it is appropriate to call `TaoComputeObjective()` or
640: `TaoComputeObjectiveAndGradient()`
642: Not Collective
644: Input Parameter:
645: . tao - the Tao context
647: Output Parameter:
648: . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
650: Level: developer
652: .seealso: `TaoSetObjectiveAndGradient()`, `TaoIsObjectiveDefined()`, `TaoIsGradientDefined()`
653: @*/
654: PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
655: {
657: if (tao->ops->computeobjectiveandgradient == NULL) *flg = PETSC_FALSE;
658: else *flg = PETSC_TRUE;
659: return 0;
660: }