Actual source code: tao_fghj.c
1: #include "src/tao_impl.h" /*I "tao_solver.h" I*/
4: /* --------------------------------------------------------------- */
7: /*@C
8: TaoComputeFunction - Computes the function that has been
9: set with TaoSetFunction().
11: Collective on TAO_SOLVER
13: Input Parameters:
14: + solver - the TAO_SOLVER solver context
15: - xx - input vector
17: Output Parameter:
18: . y - function value
20: TaoComputeFunction() is typically used within minimization
21: implementations, so most users would not generally call this routine
22: themselves.
24: Level: developer
26: .keywords: TAO_SOLVER, compute, minimization, function
28: .seealso: TaoApplication::EvaluateObjectiveFunction(),
29: TaoComputeFunctionGradient(), TaoComputeHessian()
30: @*/
31: int TaoComputeFunction(TAO_SOLVER solver,TaoVec* xx,double *y)
32: {
33: int info;
35: TaoFunctionBegin;
36: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
38: if ( solver->taoappl){
39: info = solver->taoappl->EvaluateObjectiveFunction(xx,y); CHKERRQ(info);
40: }
41: solver->nfuncs++;
42: TaoFunctionReturn(0);
43: }
49: /*@
50: TaoIncrementGradientsCounter - Increments the gradient
51: counted by TAO.
53: Not Collective
55: Input Parameter:
56: + solver - TAO_SOLVER context
57: - nevals - number of gradient evaluations to be added
59: Notes:
60: This counter is reset to zero for each successive call to TaoSolve().
62: Level: developer
64: .keywords: Linear Solver, Objective Function
65: @*/
66: int TaoIncrementGradientsCounter(TAO_SOLVER solver,int nevals)
67: {
68: TaoFunctionBegin;
69: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
70: solver->ngrads += nevals ;
71: TaoFunctionReturn(0);
72: }
76: /*@C
77: TaoComputeGradient - Computes the function value and its gradient
79: Collective on TAO_SOLVER
81: Input Parameters:
82: + solver - the TAO_SOLVER solver context
83: - xx - input vector
85: Output Parameter:
86: . gg - gradient vector
88: Notes:
89: TaoComputeGradient() is typically used within minimization
90: implementations, so most users would not generally call this routine
91: themselves.
93: Level: developer
95: .keywords: TAO_SOLVER, compute, gradient
97: .seealso: TaoApplication::EvaluateGradient(), TaoComputeFunction()
98: @*/
99: int TaoComputeGradient(TAO_SOLVER solver,TaoVec* xx, TaoVec* gg)
100: {
101: int info;
103: TaoFunctionBegin;
104: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
106: if ( solver->taoappl){
107: info = solver->taoappl->EvaluateGradient(xx,gg); CHKERRQ(info);
108: }
109: solver->ngrads++;
111: if (solver->viewgradient) {info=gg->View();CHKERRQ(info);}
113: TaoFunctionReturn(0);
114: }
119: /*@C
120: TaoComputeFunctionGradient - Computes the function value and its gradient
122: Collective on TAO_SOLVER
124: Input Parameters:
125: + solver - the TAO_SOLVER solver context
126: - xx - input vector
128: Output Parameter:
129: + f - function value
130: - gg - gradient vector
132: Notes:
133: TaoComputeFunctionGradient() is typically used within minimization
134: implementations, so most users would not generally call this routine
135: themselves.
137: Level: developer
139: .keywords: TAO_SOLVER, compute, gradient
141: .seealso: TaoApplication::EvaluateObjectiveAndGradient(), TaoComputeFunction()
142: @*/
143: int TaoComputeFunctionGradient(TAO_SOLVER solver,TaoVec* xx, double *f, TaoVec* gg)
144: {
145: int info;
147: TaoFunctionBegin;
148: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
150: if ( solver->taoappl){
151: info = solver->taoappl->EvaluateObjectiveAndGradient(xx,f,gg); CHKERRQ(info);
152: }
153: solver->nfgrads++;
155: if (solver->viewgradient) {info=gg->View();CHKERRQ(info);}
157: TaoFunctionReturn(0);
158: }
162: /*@C
163: TaoComputeHessian - Computes the Hessian matrix that has been
164: set with TaoSetHessian().
166: Collective on TAO_SOLVER and Mat
168: Input Parameters:
169: + solver - the TAO_SOLVER solver context
170: - xx - input vector
172: Output Parameters:
173: . HH - Hessian matrix
175: Notes:
176: Most users should not need to explicitly call this routine, as it
177: is used internally within the minimization solvers.
179: TaoComputeHessian() is typically used within minimization
180: implementations, so most users would not generally call this routine
181: themselves.
183: Level: developer
185: .keywords: TAO_SOLVER, compute, Hessian, matrix
187: .seealso: TaoApplication::EvaluateHessian(), TaoComputeFunctionGradient(),
188: TaoComputeFunction()
189: @*/
190: int TaoComputeHessian(TAO_SOLVER solver,TaoVec *xx,TaoMat *HH)
191: {
192: int info;
193: TaoTruth flg;
195: TaoFunctionBegin;
196: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
197: if ( solver->taoappl){
198: info = solver->taoappl->EvaluateHessian(xx,HH); CHKERRQ(info);
199: }
200: solver->nhesss++;
202: info = HH->Compatible(xx,xx,&flg); CHKERRQ(info);
203: if (flg==TAO_FALSE){
204: SETERRQ(1,"Hessian Matrix not Compatible with solution vector");
205: }
207: if (solver->viewhessian){
208: info=HH->View();CHKERRQ(info);
209: }
211: TaoFunctionReturn(0);
212: }
218: /*@C
219: TaoGetHessian - Sets the pointer to a TaoMat equal to the address
220: a the TaoMat containing the Hessian matrix.
222: Input Parameter:
223: + solver - the TAO_SOLVER solver context
224: - HH - address of a pointer to a TaoMat
226: Output Parameters:
227: . HH - address of pointer to the Hessian matrix (or TAO_NULL)
229: Note: This routine does not create a matrix. It sets a pointer
230: to the location of an existing matrix.
232: Level: developer
234: .seealso: TaoApplication::EvaluateHessian(), TaoComputeHessian()
236: .keywords: TAO_SOLVER, get, Hessian
237: @*/
238: int TaoGetHessian(TAO_SOLVER solver,TaoMat **HH)
239: {
240: int info;
241: TaoFunctionBegin;
242: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
243: if (!solver->hessian){
244: info = solver->taoappl->GetHessianMatrix(&solver->hessian); CHKERRQ(info);
245: }
246: if (HH) *HH = solver->hessian;
247: TaoFunctionReturn(0);
248: }
253: /*@C
254: TaoComputeJacobian - Computes the Jacobian matrix that has been
255: set.
257: Collective on TAO_SOLVER and Mat
259: Input Parameters:
260: + solver - the TAO_SOLVER solver context
261: . xx - input vector
263: Output Parameters:
264: + JJ - Jacobian matrix
266: Notes:
267: TaoComputeJacobian() is typically used within minimization
268: implementations, so most users would not generally call this routine
269: themselves.
271: Level: developer
273: .keywords: TAO_SOLVER, compute, Jacobian, matrix
275: .seealso: TaoApplication::EvaluateJacobian()
276: @*/
277: int TaoComputeJacobian(TAO_SOLVER solver,TaoVec* xx,TaoMat *JJ)
278: {
279: int info;
281: TaoFunctionBegin;
282: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
283: if ( solver->taoappl){
284: info = solver->taoappl->EvaluateJacobian(xx,JJ); CHKERRQ(info);
285: }
286: solver->njac++;
288: if (solver->viewjacobian){ info=JJ->View();CHKERRQ(info);}
290: TaoFunctionReturn(0);
291: }
297: /*@
298: TaoEvaluateVariableBounds - Evaluate the lower and upper bounds on the variables.
300: Collective on TAO_SOLVER
302: Input Parameters:
303: + tao - the TAO_SOLVER solver context
304: . xxll - vector of lower bounds upon the solution vector
305: - xxuu - vector of upper bounds upon the solution vector
307: Level: developer
309: .keywords: bounds
311: .seealso: TaoGetVariableBounds()
312: @*/
313: int TaoEvaluateVariableBounds(TAO_SOLVER tao,TaoVec *xxll,TaoVec *xxuu)
314: {
315: int info;
316: TaoFunctionBegin;
317: TaoValidHeaderSpecific(tao,TAO_COOKIE,1);
318: info = tao->taoappl->EvaluateVariableBounds(xxll,xxuu); CHKERRQ(info);
319: info = TaoCheckBounds(tao); CHKERRQ(info);
320: TaoFunctionReturn(0);
321: }
324: /*
327: int TaoAppSetConstraintsBounds(TAO_SOLVER solver,TaoVec *RXL,TaoMat *AA, TaoVec *RXU)
328: {
329: TaoFunctionBegin;
330: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
331: solver->RXL=RXL;
332: solver->CA=AA;
333: solver->RXU=RXU;
334: TaoFunctionReturn(0);
335: }
336: */
340: /*@C
341: TaoComputeConstraints - Computes the constraint vector that has been
342: set in the application.
344: Collective on TAO_SOLVER and Mat
346: Input Parameters:
347: + solver - the TAO_SOLVER solver context
348: - xx - input vector
350: Output Parameters:
351: . rr - Constraint values
353: Notes:
354: TaoComputeConstraints() is typically used within minimization
355: implementations, so most users would not generally call this routine
356: themselves.
358: Level: developer
360: .keywords: TAO_SOLVER, compute, constraint
362: .seealso: TaoAppSetConstraintRoutine()
363: @*/
364: int TaoComputeConstraints(TAO_SOLVER solver,TaoVec* xx,TaoVec* rr)
365: {
366: int info;
368: TaoFunctionBegin;
369: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
370: if ( solver->taoappl){
371: info = solver->taoappl->EvaluateConstraints(xx,rr); CHKERRQ(info);
372: }
373: solver->nvfunc++;
374: if (solver->viewvfunc){ info=rr->View(); CHKERRQ(info); }
376: TaoFunctionReturn(0);
377: }
381: /*@C
382: TaoGetConstraints - Get the constraint vector that has been
383: set.
385: Collective on TAO_SOLVER and Mat
387: Input Parameters:
388: . solver - the TAO_SOLVER solver context
390: Output Parameters:
391: . rr - Constraint values
393: Level: developer
395: .keywords: TAO_SOLVER, compute, constraints
397: .seealso: TaoApplication::EvaluateConstraints()
398: @*/
399: int TaoGetConstraints(TAO_SOLVER solver, TaoVec** rr)
400: {
401: TaoFunctionBegin;
402: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
403: *rr = solver->vfunc;
404: TaoFunctionReturn(0);
405: }
409: /*@C
410: TaoGetJacobian - Get the Jacobian matrix
412: Collective on TAO_SOLVER and Mat
414: Input Parameters:
415: . solver - the TAO_SOLVER solver context
417: Output Parameters:
418: . JJ - Jacobian Matrix
420: Level: advanced
422: .keywords: TAO_SOLVER, compute, constraint
424: .seealso: TaoApplication::EvaluateJacobian(), TaoAppSetConstraintRoutine()
425: @*/
426: int TaoGetJacobian(TAO_SOLVER solver, TaoMat **JJ)
427: {
428: TaoFunctionBegin;
429: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
430: *JJ = solver->jacobian;
431: TaoFunctionReturn(0);
432: }
439: /*@C
440: TaoGetSolution - Sets a pointer to a TaoVec to the address of the
441: vector containing the current solution.
443: Input Parameter:
444: + solver - the TAO_SOLVER solver context
445: - xx - the address of a pointer to a TaoVec
447: Output Parameter:
448: . xx - the solution
450: Level: advanced
451:
452: Note: This routine does not create a vector. It sets a pointer
453: to the location of an existing vector.
455: Note:
456: This vector is a reference to the vector set in the application and passed
457: to TAO in TaoSetApplication().
459: .keywords: solve, solution
461: .seealso: TaoCreate(), TaoGetGradient(), TaoSetApplication()
462: @*/
463: int TaoGetSolution(TAO_SOLVER solver,TaoVec** xx)
464: {
465: TaoFunctionBegin;
466: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
467: *xx=solver->vec_sol;
468: TaoFunctionReturn(0);
469: }
474: /*@
475: TaoSetLagrangianGradientVector - Sets a pointer to the address of a TaoVector that
476: contains the gradient of the Lagrangian function.
478: Input Parameter:
479: + solver - the TAO_SOLVER solver context
480: - gg - the gradient of the Lagrangian function
482: Level: developer
484: Note: This routine does not create a vector. The vector specified
485: here will be returned whenever TaoGetGradient() is called.
487: .keywords: Gradient
489: .seealso: TaoGetGradient()
491: @*/
492: int TaoSetLagrangianGradientVector(TAO_SOLVER solver,TaoVec* gg)
493: {
494: TaoFunctionBegin;
495: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
496: solver->vec_grad =gg;
497: TaoFunctionReturn(0);
498: }
502: /*@C
503: TaoGetGradient - Sets a pointer to the address of a TaoVector that
504: contains the gradient of the Lagrangian function.
506: Input Parameter:
507: . solver - the TAO_SOLVER solver context
509: Output Parameter:
510: . gg - the gradient of the Lagrangian function
512: Level: advanced
514: Note: This routine does not create a vector. It sets a pointer
515: to the location of an existing vector.
517: .keywords: Gradient
519: .seealso: TaoGetSolution(), TaoGetSolutionStatus(), TaoGetHessian(), TaoSetApplication()
521: @*/
522: int TaoGetGradient(TAO_SOLVER solver,TaoVec** gg)
523: {
524: TaoFunctionBegin;
525: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
526: *gg = solver->vec_grad;
527: TaoFunctionReturn(0);
528: }
531: /* --------------------------------------------------------------- */
534: /*@C
535: TaoComputeMeritFunction - Computes the function that has been
536: set with TaoSetMeritFunction().
538: Collective on TAO_SOLVER
540: Input Parameters:
541: + solver - the TAO_SOLVER solver context
542: - xx - input vector
544: Output Parameter:
545: . y - merit function value
547: TaoComputeMeritFunction() is typically used within minimization
548: implementations, so most users would not generally call this routine
549: themselves.
551: Level: developer
553: .keywords: TAO_SOLVER, compute, minimization, function
555: .seealso: TaoSetMeritFunction()
556: @*/
557: int TaoComputeMeritFunction(TAO_SOLVER solver,TaoVec* xx,double *y)
558: {
559: int info;
561: TaoFunctionBegin;
562: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
564: if ( solver->MeritFunctionApply){
565: info = solver->MeritFunctionApply(solver,xx,y,solver->meritctx); CHKERRQ(info);
566: }
567: TaoFunctionReturn(0);
568: }
573: /*@C
574: TaoSetMeritFunction - Sets the routine that evaluates the merit
575: function.
577: Collective on TAO_SOLVER
579: Input Parameters:
580: + solver - the TAO_SOLVER solver context
581: . func - merit function evaluation routine
582: . funcgrad - merit function and gradient evalution routine
583: . grad - merit gradient evaluation routine
584: . destroy - context destructor routine
585: - ctx - [optional] user-defined context for private data for the
586: function and gradient evaluation routine (may be TAO_NULL)
588: Calling sequence of func:
589: $ func (TAO_SOLVER solver,TaoVec *xx,double *f,void *ctx);
591: + solver - the TAO_SOLVER solver context
592: . xx - variable vector
593: . f - objective function value
594: - ctx - [optional] user-defined function context
596: Calling sequence of funcgrad:
597: $ funcgrad (TAO_SOLVER solver,TaoVec *xx,double *f,TaoVec *gg, void *ctx);
599: + solver - the TAO_SOLVER solver context
600: . xx - variable vector
601: . f - objective function value
602: . gg - gradient vector
603: - ctx - [optional] user-defined function context
605: Calling sequence of grad:
606: $ grad (TAO_SOLVER solver,TaoVec *xx,TaoVec *gg,void *ctx);
608: + solver - the TAO_SOLVER solver context
609: . xx - variable vector
610: . gg - gradient vector
611: - ctx - [optional] user-defined function context
613: Calling sequence of destroy:
614: $ destroy (TAO_SOLVER solver, void *ctx);
616: + solver - the TAO_SOLVER solver context
617: - ctx - [optional] user-defined function context
619: Level: developer
621: .keywords: TAO_SOLVER, merit function
623: .seealso: TaoComputeMeritFunction()
624: @*/
625: int TaoSetMeritFunction(TAO_SOLVER solver,int (*func)(TAO_SOLVER,TaoVec*,double*,void*),
626: int (*funcgrad)(TAO_SOLVER,TaoVec*,double*,TaoVec*,void*),
627: int (*grad)(TAO_SOLVER,TaoVec*,TaoVec*,void*),
628: int (*destroy)(TAO_SOLVER,void*),
629: void *ctx)
630: {
631: int info;
632: TaoFunctionBegin;
634: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
635: info = TaoMeritFunctionDestroy(solver); CHKERRQ(info);
636: solver->MeritFunctionApply =func;
637: solver->MeritFunctionGradientApply =funcgrad;
638: solver->MeritGradientApply =grad;
639: solver->MeritFunctionDestroy =destroy;
640: solver->meritctx = ctx;
642: TaoFunctionReturn(0);
643: }
646: /* --------------------------------------------------------------- */
649: /*@C
650: TaoComputeMeritFunctionGradient - Computes the function that has been
651: set with TaoSetMeritFunction().
653: Collective on TAO_SOLVER
655: Input Parameters:
656: + solver - the TAO_SOLVER solver context
657: - xx - input vector
659: Output Parameter:
660: . y - merit function value
661: . gg - merit gradient vector
663: TaoComputeMeritFunctionGradient() is typically used within minimization
664: implementations, so most users would not generally call this routine
665: themselves.
667: Level: developer
669: .keywords: TAO_SOLVER, compute, minimization, function
671: .seealso: TaoSetMeritFunctionGradient()
672: @*/
673: int TaoComputeMeritFunctionGradient(TAO_SOLVER solver,TaoVec* xx,double *y,TaoVec*gg)
674: {
675: int info;
677: TaoFunctionBegin;
678: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
680: if ( solver->MeritFunctionGradientApply){
681: info = solver->MeritFunctionGradientApply(solver,xx,y,gg,solver->meritctx); CHKERRQ(info);
682: }
683: TaoFunctionReturn(0);
684: }
687: /* --------------------------------------------------------------- */
690: /*@C
691: TaoComputeMeritGradient - Computes the function that has been
692: set with TaoSetMeritFunction().
694: Collective on TAO_SOLVER
696: Input Parameters:
697: + solver - the TAO_SOLVER solver context
698: - xx - input vector
700: Output Parameter:
701: . gg - merit gradient vector
703: TaoComputeMeritFunctionGradient() is typically used within minimization
704: implementations, so most users would not generally call this routine
705: themselves.
707: Level: developer
709: .keywords: TAO_SOLVER, compute, minimization, function
711: .seealso: TaoSetMeritFunction()
712: @*/
713: int TaoComputeMeritGradient(TAO_SOLVER solver,TaoVec* xx,TaoVec*gg)
714: {
715: int info;
716: double f;
717: TaoFunctionBegin;
718: TaoValidHeaderSpecific(solver,TAO_COOKIE,1);
720: if ( solver->MeritGradientApply){
721: info = solver->MeritGradientApply(solver,xx,gg,solver->meritctx); CHKERRQ(info);
722: } else if (solver->MeritFunctionGradientApply) {
723: info = solver->MeritFunctionGradientApply(solver,xx,&f,gg,solver->meritctx); CHKERRQ(info);
724: }
725: TaoFunctionReturn(0);
726: }
731: /*@C
732: TaoMeritFunctionDestroy - Destroy the data structures associated with
733: the merit function and set associated function pointers to NULL.
735: Collective on TAO_SOLVER
737: Input Parameters:
738: . solver - the TAO_SOLVER solver context
741: Level: developer
743: .keywords: TAO_SOLVER, compute, minimization, function
745: .seealso: TaoSetMeritFunction()
746: @*/
747: int TaoMeritFunctionDestroy(TAO_SOLVER solver){
748: int info;
749: TaoFunctionBegin;
750: if (solver->MeritFunctionDestroy){
751: info=solver->MeritFunctionDestroy(solver,solver->meritctx); CHKERRQ(info);
752: }
753: solver->MeritFunctionApply=0;
754: solver->MeritFunctionGradientApply=0;
755: solver->MeritGradientApply=0;
756: solver->MeritFunctionDestroy=0;
757: solver->meritctx=0;
758: TaoFunctionReturn(0);
759: }
764: /*@C
765: TaoSetApplication - Sets the user defined context for
766: use by the optimization solver. The application provides
767: the solver with function and derivative information as
768: well as data structures uses to store this information.
770: Collective on TAO_SOLVER
772: Input Parameters:
773: + tao - the TAO_SOLVER solver context
774: - taoapp - user application context
776: Note:
777: For PETSc users the TaoApplication* object is actually
778: TAO_APPLICATION structure.
780:
781: Level: advanced
783: .keywords: application, context
785: @*/
786: int TaoSetApplication(TAO_SOLVER tao, TaoApplication *myapp){
787: int info;
788: TaoVec *xx,*RR;
789: TaoMat *HH, *JJ;
790: TaoVec *RXL, *RXU;
791: TaoMat *CA;
793: TaoFunctionBegin;
794: tao->taoappl=myapp;
795: info = myapp->GetVariableVector(&xx);CHKERRQ(info);
796: info = myapp->GetHessianMatrix(&HH);CHKERRQ(info);
797: info = myapp->GetJacobianMatrix(&JJ); CHKERRQ(info);
798: info = myapp->GetConstraintVector(&RR);CHKERRQ(info);
799: info = myapp->GetInequalityConstraints(&RXL,&CA,&RXU);
801: tao->hessian=HH;
802: tao->vec_sol=xx;
803: tao->jacobian=JJ;
804: tao->vfunc=RR;
805: tao->RXL=RXL;
806: tao->RXU=RXU;
807: tao->CA=CA;
809: info = TaoSetUp(tao);CHKERRQ(info);
811: TaoFunctionReturn(0);
812: }
814: class TaoH0Mat: public TaoMat{
815: protected:
816: TaoApplication *H0;
817: public:
818: TaoH0Mat(TaoApplication*);
819: ~TaoH0Mat();
820: int Solve(TaoVec*, TaoVec*, TaoTruth*);
821: };
825: TaoH0Mat::TaoH0Mat(TaoApplication* theappobject){
826: this->H0=theappobject;
827: return;
828: }
832: TaoH0Mat::~TaoH0Mat(){
833: return;
834: }
838: int TaoH0Mat::Solve(TaoVec* tb, TaoVec* dx, TaoTruth *tt){
839: int info;
840: info=this->H0->HessianSolve(tb,dx,tt); CHKERRQ(info);
841: return 0;
842: }
844: #include "src/unconstrained/impls/lmvm/lmvm.h"
847: int TaoLMVMSetH0(TAO_SOLVER tao, TaoTruth flag)
848: {
849: TAO_LMVM *lmvm;
850: TaoMat *H0;
851: int info;
853: TaoFunctionBegin;
854: info = TaoGetSolverContext(tao, "tao_lmvm", (void **)&lmvm); CHKERRQ(info);
855: if (lmvm && lmvm->M) {
856: if (TAO_TRUE == flag) {
857: H0 = new TaoH0Mat(tao->taoappl);
858: info = lmvm->M->SetH0(H0); CHKERRQ(info);
859: }
860: else {
861: info = lmvm->M->SetH0(0); CHKERRQ(info);
862: }
863: }
864: TaoFunctionReturn(0);
865: }
867: #include "src/bound/impls/blmvm/blmvm.h"
870: int TaoBLMVMSetH0(TAO_SOLVER tao, TaoTruth flag)
871: {
872: TAO_BLMVM *blmvm;
873: TaoMat *H0;
874: int info;
876: TaoFunctionBegin;
877: info = TaoGetSolverContext(tao, "tao_blmvm", (void **)&blmvm); CHKERRQ(info);
878: if (blmvm && blmvm->M) {
879: if (TAO_TRUE == flag) {
880: H0 = new TaoH0Mat(tao->taoappl);
881: info = blmvm->M->SetH0(H0); CHKERRQ(info);
882: }
883: else {
884: info = blmvm->M->SetH0(0); CHKERRQ(info);
885: }
886: }
887: TaoFunctionReturn(0);
888: }