Actual source code: nls.c

  1: /*$Id$*/

  3: #include "nls.h"             /*I "tao_solver.h" I*/

  5: #ifdef TAO_USE_PETSC
  6: #include "petscksp.h"
  7: #include "petscpc.h"
  8: #include "src/petsctao/linearsolver/taolinearsolver_petsc.h"
  9: #include "src/petsctao/vector/taovec_petsc.h"

 11: #include "private/kspimpl.h"
 12: #include "private/pcimpl.h"

 14: #define NLS_KSP_CG        0
 15: #define NLS_KSP_STCG        1
 16: #define NLS_KSP_GLTR        2
 17: #define NLS_KSP_PETSC        3
 18: #define NLS_KSP_TYPES        4

 20: #define NLS_PC_NONE        0
 21: #define NLS_PC_AHESS        1
 22: #define NLS_PC_BFGS        2
 23: #define NLS_PC_PETSC        3
 24: #define NLS_PC_TYPES        4

 26: #define BFGS_SCALE_AHESS        0
 27: #define BFGS_SCALE_PHESS        1
 28: #define BFGS_SCALE_BFGS                2
 29: #define BFGS_SCALE_TYPES        3

 31: #define NLS_INIT_CONSTANT         0
 32: #define NLS_INIT_DIRECTION        1
 33: #define NLS_INIT_INTERPOLATION    2
 34: #define NLS_INIT_TYPES            3

 36: #define NLS_UPDATE_STEP           0
 37: #define NLS_UPDATE_REDUCTION      1
 38: #define NLS_UPDATE_INTERPOLATION  2
 39: #define NLS_UPDATE_TYPES          3

 41: static const char *NLS_KSP[64] = {
 42:   "cg", "stcg", "gltr", "petsc"
 43: };

 45: static const char *NLS_PC[64] = {
 46:   "none", "ahess", "bfgs", "petsc"
 47: };

 49: static const char *BFGS_SCALE[64] = {
 50:   "ahess", "phess", "bfgs"
 51: };

 53: static const char *NLS_INIT[64] = {
 54:   "constant", "direction", "interpolation"
 55: };

 57: static const char *NLS_UPDATE[64] = {
 58:   "step", "reduction", "interpolation"
 59: };

 61: // Routine for BFGS preconditioner

 65: static PetscErrorCode bfgs_apply(void *ptr, Vec xin, Vec xout)
 66: {
 67:   TaoLMVMMat *M = (TaoLMVMMat *)ptr;
 68:   TaoVecPetsc Xin(xin);
 69:   TaoVecPetsc Xout(xout);
 70:   TaoTruth info2;
 71:   int info;

 74:   info = M->Solve(&Xin, &Xout, &info2); CHKERRQ(info);
 75:   return(0);
 76: }

 78: // Implements Newton's Method with a line search approach for solving
 79: // unconstrained minimization problems.  A More'-Thuente line search 
 80: // is used to guarantee that the bfgs preconditioner remains positive
 81: // definite.
 82: //
 83: // The method can shift the Hessian matrix.  The shifting procedure is
 84: // adapted from the PATH algorithm for solving complementarity
 85: // problems.
 86: //
 87: // The linear system solve should be done with a conjugate gradient
 88: // method, although any method can be used.

 90: #define NLS_NEWTON                 0
 91: #define NLS_BFGS                 1
 92: #define NLS_SCALED_GRADIENT         2
 93: #define NLS_GRADIENT                 3

 97: static int TaoSolve_NLS(TAO_SOLVER tao, void *solver)
 98: {
 99:   TAO_NLS *ls = (TAO_NLS *)solver;
100:   TaoVec *X, *G = ls->G, *D = ls->D, *W = ls->W;
101:   TaoVec *Xold = ls->Xold, *Gold = ls->Gold, *Diag = ls->Diag;
102:   TaoMat *H;
103:   TaoLMVMMat *M = ls->M;

105:   TaoLinearSolver *tls;
106:   TaoLinearSolverPetsc *pls;

108:   KSP pksp;
109:   PC ppc;

111:   KSPConvergedReason ksp_reason;
112:   TaoTerminateReason reason;
113:   TaoTruth success;
114:   
115:   double fmin, ftrial, f_full, prered, actred, kappa, sigma;
116:   double tau, tau_1, tau_2, tau_max, tau_min, max_radius;
117:   double f, fold, gdx, gnorm, pert;
118:   double step = 1.0;

120:   double delta;
121:   double radius, norm_d = 0.0, e_min;

123:   int stepType;
124:   int iter = 0, status = 0, info;
125:   int bfgsUpdates = 0;
126:   int needH;

128:   int i_max = 5;
129:   int j_max = 1;
130:   int i, j;

132:   TaoFunctionBegin;

134:   // Initialized variables
135:   pert = ls->sval;

137:   // Initialize trust-region radius when using stcg or gltr
138:   // Will be reset during the first iteration
139:   if (NLS_KSP_STCG == ls->ksp_type || NLS_KSP_GLTR == ls->ksp_type) {
140:     info = TaoGetInitialTrustRegionRadius(tao, &radius); CHKERRQ(info);
141:     if (radius < 0.0) {
142:       SETERRQ(1, "Initial radius negative");
143:     }

145:     // Modify the radius if it is too large or small
146:     radius = TaoMax(radius, ls->min_radius);
147:     radius = TaoMin(radius, ls->max_radius);
148:   }

150:   // Get vectors we will need
151:   info = TaoGetSolution(tao, &X); CHKERRQ(info);
152:   info = TaoGetHessian(tao, &H); CHKERRQ(info);

154:   if (NLS_PC_BFGS == ls->pc_type && !M) {
155:     ls->M = new TaoLMVMMat(X);
156:     M = ls->M;
157:   }

159:   // Check convergence criteria
160:   info = TaoComputeFunctionGradient(tao, X, &f, G); CHKERRQ(info);
161:   info = G->Norm2(&gnorm); CHKERRQ(info);
162:   if (TaoInfOrNaN(f) || TaoInfOrNaN(gnorm)) {
163:     SETERRQ(1, "User provided compute function generated Inf or NaN");
164:   }
165:   needH = 1;

167:   info = TaoMonitor(tao, iter, f, gnorm, 0.0, 1.0, &reason); CHKERRQ(info);
168:   if (reason != TAO_CONTINUE_ITERATING) {
169:     TaoFunctionReturn(0);
170:   }

172:   // Create vectors for the limited memory preconditioner
173:   if ((NLS_PC_BFGS == ls->pc_type) && 
174:       (BFGS_SCALE_BFGS != ls->bfgs_scale_type)) {
175:     if (!Diag) {
176:       info = X->Clone(&ls->Diag); CHKERRQ(info);
177:       Diag = ls->Diag;
178:     }
179:   }

181:   // Modify the linear solver to a conjugate gradient method
182:   info = TaoGetLinearSolver(tao, &tls); CHKERRQ(info);
183:   pls  = dynamic_cast <TaoLinearSolverPetsc *> (tls);

185:   pksp = pls->GetKSP();
186:   switch(ls->ksp_type) {
187:   case NLS_KSP_CG:
188:     info = KSPSetType(pksp, KSPCG); CHKERRQ(info);
189:     if (pksp->ops->setfromoptions) {
190:       (*pksp->ops->setfromoptions)(pksp);
191:     }
192:     break;

194:   case NLS_KSP_STCG:
195:     info = KSPSetType(pksp, KSPSTCG); CHKERRQ(info);
196:     if (pksp->ops->setfromoptions) {
197:       (*pksp->ops->setfromoptions)(pksp);
198:     }
199:     break;

201:   case NLS_KSP_GLTR:
202:     info = KSPSetType(pksp, KSPGLTR); CHKERRQ(info);
203:     if (pksp->ops->setfromoptions) {
204:       (*pksp->ops->setfromoptions)(pksp);
205:     }
206:     break;

208:   default:
209:     // Use the method set by the ksp_type
210:     break;
211:   }

213:   // Modify the preconditioner to use the bfgs approximation
214:   info = KSPGetPC(pksp, &ppc); CHKERRQ(info);
215:   switch(ls->pc_type) {
216:   case NLS_PC_NONE:
217:     info = PCSetType(ppc, PCNONE); CHKERRQ(info);
218:     if (ppc->ops->setfromoptions) {
219:       (*ppc->ops->setfromoptions)(ppc);
220:     }
221:     break;

223:   case NLS_PC_AHESS:
224:     info = PCSetType(ppc, PCJACOBI); CHKERRQ(info);
225:     if (ppc->ops->setfromoptions) {
226:       (*ppc->ops->setfromoptions)(ppc);
227:     }
228:     info = PCJacobiSetUseAbs(ppc); CHKERRQ(info);
229:     break;

231:   case NLS_PC_BFGS:
232:     info = PCSetType(ppc, PCSHELL); CHKERRQ(info);
233:     if (ppc->ops->setfromoptions) {
234:       (*ppc->ops->setfromoptions)(ppc);
235:     }
236:     info = PCShellSetName(ppc, "bfgs"); CHKERRQ(info);
237:     info = PCShellSetContext(ppc, M); CHKERRQ(info);
238:     info = PCShellSetApply(ppc, bfgs_apply); CHKERRQ(info);
239:     break;

241:   default:
242:     // Use the pc method set by pc_type
243:     break;
244:   }

246:   // Initialize trust-region radius.  The initialization is only performed 
247:   // when we are using Steihaug-Toint or the Generalized Lanczos method.
248:   if (NLS_KSP_STCG == ls->ksp_type || NLS_KSP_GLTR == ls->ksp_type) {
249:     switch(ls->init_type) {
250:     case NLS_INIT_CONSTANT:
251:       // Use the initial radius specified
252:       break;

254:     case NLS_INIT_INTERPOLATION:
255:       // Use the initial radius specified
256:       max_radius = 0.0;
257:   
258:       for (j = 0; j < j_max; ++j) {
259:         fmin = f;
260:         sigma = 0.0;
261:   
262:         if (needH) {
263:           info = TaoComputeHessian(tao, X, H); CHKERRQ(info);
264:           needH = 0;
265:         }
266:   
267:         for (i = 0; i < i_max; ++i) {
268:           info = W->Waxpby(1.0, X, -radius / gnorm, G); CHKERRQ(info);
269:   
270:           info = TaoComputeFunction(tao, W, &ftrial); CHKERRQ(info);
271:           if (TaoInfOrNaN(ftrial)) {
272:             tau = ls->gamma1_i;
273:           }
274:           else {
275:             if (ftrial < fmin) {
276:               fmin = ftrial;
277:               sigma = -radius / gnorm;
278:             }
279:   
280:             info = H->Multiply(G, D); CHKERRQ(info);
281:             info = D->Dot(G, &prered); CHKERRQ(info);
282:   
283:             prered = radius * (gnorm - 0.5 * radius * prered / (gnorm * gnorm));
284:             actred = f - ftrial;
285:             if ((fabs(actred) <= ls->epsilon) && 
286:                 (fabs(prered) <= ls->epsilon)) {
287:               kappa = 1.0;
288:             }
289:             else {
290:               kappa = actred / prered;
291:             }
292:   
293:             tau_1 = ls->theta_i * gnorm * radius / (ls->theta_i * gnorm * radius + (1.0 - ls->theta_i) * prered - actred);
294:             tau_2 = ls->theta_i * gnorm * radius / (ls->theta_i * gnorm * radius - (1.0 + ls->theta_i) * prered + actred);
295:             tau_min = TaoMin(tau_1, tau_2);
296:             tau_max = TaoMax(tau_1, tau_2);
297:   
298:             if (fabs(kappa - 1.0) <= ls->mu1_i) {
299:               // Great agreement
300:               max_radius = TaoMax(max_radius, radius);
301:   
302:               if (tau_max < 1.0) {
303:                 tau = ls->gamma3_i;
304:               }
305:               else if (tau_max > ls->gamma4_i) {
306:                 tau = ls->gamma4_i;
307:               }
308:               else if (tau_1 >= 1.0 && tau_1 <= ls->gamma4_i && tau_2 < 1.0) {
309:                 tau = tau_1;
310:               }
311:               else if (tau_2 >= 1.0 && tau_2 <= ls->gamma4_i && tau_1 < 1.0) {
312:                 tau = tau_2;
313:               }
314:               else {
315:                 tau = tau_max;
316:               }
317:             }
318:             else if (fabs(kappa - 1.0) <= ls->mu2_i) {
319:               // Good agreement
320:               max_radius = TaoMax(max_radius, radius);
321:   
322:               if (tau_max < ls->gamma2_i) {
323:                 tau = ls->gamma2_i;
324:               }
325:               else if (tau_max > ls->gamma3_i) {
326:                 tau = ls->gamma3_i;
327:               }
328:               else {
329:                 tau = tau_max;
330:               }
331:             }
332:             else {
333:               // Not good agreement
334:               if (tau_min > 1.0) {
335:                 tau = ls->gamma2_i;
336:               }
337:               else if (tau_max < ls->gamma1_i) {
338:                 tau = ls->gamma1_i;
339:               }
340:               else if ((tau_min < ls->gamma1_i) && (tau_max >= 1.0)) {
341:                 tau = ls->gamma1_i;
342:               }
343:               else if ((tau_1 >= ls->gamma1_i) && (tau_1 < 1.0) &&
344:                        ((tau_2 < ls->gamma1_i) || (tau_2 >= 1.0))) {
345:                 tau = tau_1;
346:               }
347:               else if ((tau_2 >= ls->gamma1_i) && (tau_2 < 1.0) &&
348:                        ((tau_1 < ls->gamma1_i) || (tau_2 >= 1.0))) {
349:                 tau = tau_2;
350:               }
351:               else {
352:                 tau = tau_max;
353:               }
354:             }
355:           }
356:           radius = tau * radius;
357:         }
358:   
359:         if (fmin < f) {
360:           f = fmin;
361:           info = X->Waxpby(1.0, X, sigma, G); CHKERRQ(info);
362:           info = TaoComputeGradient(tao, X, G); CHKERRQ(info);
363:   
364:           info = G->Norm2(&gnorm); CHKERRQ(info);
365:           if (TaoInfOrNaN(f) || TaoInfOrNaN(gnorm)) {
366:             SETERRQ(1, "User provided compute function generated Inf or NaN");
367:           }
368:           needH = 1;
369:   
370:           info = TaoMonitor(tao, iter, f, gnorm, 0.0, 1.0, &reason); CHKERRQ(info);
371:           if (reason != TAO_CONTINUE_ITERATING) {
372:             TaoFunctionReturn(0);
373:           }
374:         }
375:       }
376:       radius = TaoMax(radius, max_radius);

378:       // Modify the radius if it is too large or small
379:       radius = TaoMax(radius, ls->min_radius);
380:       radius = TaoMin(radius, ls->max_radius);
381:       break;

383:     default:
384:       // Norm of the first direction will initialize radius
385:       radius = 0.0;
386:       break;
387:     }
388:   } 

390:   // Set initial scaling for the BFGS preconditioner
391:   // This step is done after computing the initial trust-region radius
392:   // since the function value may have decreased
393:   if (NLS_PC_BFGS == ls->pc_type) {
394:     if (f != 0.0) {
395:       delta = 2.0 * TaoAbsDouble(f) / (gnorm*gnorm);
396:     }
397:     else {
398:       delta = 2.0 / (gnorm*gnorm);
399:     }
400:     info = M->SetDelta(delta); CHKERRQ(info);
401:   }

403:   // Set counter for gradient/reset steps
404:   ls->newt = 0;
405:   ls->bfgs = 0;
406:   ls->sgrad = 0;
407:   ls->grad = 0;

409:   // Have not converged; continue with Newton method
410:   while (reason == TAO_CONTINUE_ITERATING) {
411:     ++iter;

413:     // Compute the Hessian
414:     if (needH) {
415:       info = TaoComputeHessian(tao, X, H); CHKERRQ(info);
416:       needH = 0;
417:     }

419:     if ((NLS_PC_BFGS == ls->pc_type) && 
420:         (BFGS_SCALE_AHESS == ls->bfgs_scale_type)) {
421:       // Obtain diagonal for the bfgs preconditioner 
422:       info = H->GetDiagonal(Diag); CHKERRQ(info);
423:       info = Diag->AbsoluteValue(); CHKERRQ(info);
424:       info = Diag->Reciprocal(); CHKERRQ(info);
425:       info = M->SetScale(Diag); CHKERRQ(info);
426:     }
427:  
428:     // Shift the Hessian matrix
429:     if (pert > 0) {
430:       info = H->ShiftDiagonal(pert); CHKERRQ(info);
431:     }
432:     
433:     if (NLS_PC_BFGS == ls->pc_type) {
434:       if (BFGS_SCALE_PHESS == ls->bfgs_scale_type) {
435:         // Obtain diagonal for the bfgs preconditioner 
436:         info = H->GetDiagonal(Diag); CHKERRQ(info);
437:         info = Diag->AbsoluteValue(); CHKERRQ(info);
438:         info = Diag->Reciprocal(); CHKERRQ(info);
439:         info = M->SetScale(Diag); CHKERRQ(info);
440:       }

442:       // Update the limited memory preconditioner
443:       info = M->Update(X, G); CHKERRQ(info);
444:       ++bfgsUpdates;
445:     }

447:     // Solve the Newton system of equations
448:     info = TaoPreLinearSolve(tao, H); CHKERRQ(info);
449:     if (NLS_KSP_STCG == ls->ksp_type || NLS_KSP_GLTR == ls->ksp_type) {
450:       info = TaoLinearSolveTrustRegion(tao, H, G, D, radius, &success); CHKERRQ(info);
451:       info = pls->GetNormDirection(&norm_d); CHKERRQ(info);
452:       if (0.0 == radius) {
453:         // Radius was uninitialized; use the norm of the direction
454:         if (norm_d > 0.0) {
455:           radius = norm_d;

457:           // Modify the radius if it is too large or small
458:           radius = TaoMax(radius, ls->min_radius);
459:           radius = TaoMin(radius, ls->max_radius);
460:         }
461:         else {
462:           // The direction was bad; set radius to default value and re-solve 
463:           // the trust-region subproblem to get a direction
464:           info = TaoGetInitialTrustRegionRadius(tao, &radius); CHKERRQ(info);

466:           // Modify the radius if it is too large or small
467:           radius = TaoMax(radius, ls->min_radius);
468:           radius = TaoMin(radius, ls->max_radius);

470:           info = TaoLinearSolveTrustRegion(tao, H, G, D, radius, &success); CHKERRQ(info);
471:           info = pls->GetNormDirection(&norm_d); CHKERRQ(info);
472:           if (norm_d == 0.0) {
473:             SETERRQ(1, "Initial direction zero");
474:           }
475:         }
476:       }
477:     }
478:     else {
479:       info = TaoLinearSolve(tao, H, G, D, &success); CHKERRQ(info);
480:     }
481:     info = D->Negate(); CHKERRQ(info);

483:     // Check for success (descent direction)
484:     info = D->Dot(G, &gdx); CHKERRQ(info);
485:     if ((gdx >= 0.0) || TaoInfOrNaN(gdx)) {
486:       // Newton step is not descent or direction produced Inf or NaN
487:       // Update the perturbation for next time
488:       if (pert <= 0.0) {
489:         // Initialize the perturbation
490:         pert = TaoMin(ls->imax, TaoMax(ls->imin, ls->imfac * gnorm));
491:         if (NLS_KSP_GLTR == ls->ksp_type) {
492:           info = pls->GetMinEig(&e_min); CHKERRQ(info);
493:           pert = TaoMax(pert, -e_min);
494:         }
495:       }
496:       else {
497:         // Increase the perturbation
498:         pert = TaoMin(ls->pmax, TaoMax(ls->pgfac * pert, ls->pmgfac * gnorm));
499:       }

501:       if (NLS_PC_BFGS != ls->pc_type) {
502:         // We don't have the bfgs matrix around and updated
503:         // Must use gradient direction in this case
504:         info = D->CopyFrom(G); CHKERRQ(info);
505:         info = D->Negate(); CHKERRQ(info);

507:         ++ls->grad;
508:         stepType = NLS_GRADIENT;
509:       }
510:       else {
511:         // Attempt to use the BFGS direction
512:         info = M->Solve(G, D, &success); CHKERRQ(info);
513:         info = D->Negate(); CHKERRQ(info);

515:         // Check for success (descent direction)
516:         info = D->Dot(G, &gdx); CHKERRQ(info);
517:         if ((gdx >= 0) || TaoInfOrNaN(gdx)) {
518:           // BFGS direction is not descent or direction produced not a number
519:           // We can assert bfgsUpdates > 1 in this case because
520:           // the first solve produces the scaled gradient direction,
521:           // which is guaranteed to be descent
522:           //
523:           // Use steepest descent direction (scaled)

525:           if (f != 0.0) {
526:             delta = 2.0 * TaoAbsDouble(f) / (gnorm*gnorm);
527:           }
528:           else {
529:             delta = 2.0 / (gnorm*gnorm);
530:           }
531:           info = M->SetDelta(delta); CHKERRQ(info);
532:           info = M->Reset(); CHKERRQ(info);
533:           info = M->Update(X, G); CHKERRQ(info);
534:           info = M->Solve(G, D, &success); CHKERRQ(info);
535:           info = D->Negate(); CHKERRQ(info);
536:   
537:           bfgsUpdates = 1;
538:           ++ls->sgrad;
539:           stepType = NLS_SCALED_GRADIENT;
540:         }
541:         else {
542:           if (1 == bfgsUpdates) {
543:             // The first BFGS direction is always the scaled gradient
544:             ++ls->sgrad;
545:             stepType = NLS_SCALED_GRADIENT;
546:           }
547:           else {
548:             ++ls->bfgs;
549:             stepType = NLS_BFGS;
550:           }
551:         }
552:       }
553:     }
554:     else {
555:       // Computed Newton step is descent
556:       info = KSPGetConvergedReason(pksp, &ksp_reason); CHKERRQ(info);
557:       switch (ksp_reason) {
558:       case KSP_DIVERGED_NAN:
559:       case KSP_DIVERGED_BREAKDOWN:
560:       case KSP_DIVERGED_INDEFINITE_MAT:
561:       case KSP_DIVERGED_INDEFINITE_PC:
562:       case KSP_CONVERGED_CG_NEG_CURVE:
563:         // Matrix or preconditioner is indefinite; increase perturbation
564:         if (pert <= 0.0) {
565:           // Initialize the perturbation
566:           pert = TaoMin(ls->imax, TaoMax(ls->imin, ls->imfac * gnorm));
567:           if (NLS_KSP_GLTR == ls->ksp_type) {
568:             info = pls->GetMinEig(&e_min); CHKERRQ(info);
569:             pert = TaoMax(pert, -e_min);
570:           }
571:         }
572:         else {
573:           // Increase the perturbation
574:           pert = TaoMin(ls->pmax, TaoMax(ls->pgfac * pert, ls->pmgfac * gnorm));
575:         }
576:         break;

578:       default:
579:         // Newton step computation is good; decrease perturbation
580:         pert = TaoMin(ls->psfac * pert, ls->pmsfac * gnorm);
581:         if (pert < ls->pmin) {
582:           pert = 0.0;
583:         }
584:         break; 
585:       }

587:       ++ls->newt;
588:       stepType = NLS_NEWTON;
589:     }

591:     // Perform the linesearch
592:     fold = f;
593:     info = Xold->CopyFrom(X); CHKERRQ(info);
594:     info = Gold->CopyFrom(G); CHKERRQ(info);

596:     step = 1.0;
597:     info = TaoLineSearchApply(tao, X, G, D, W, &f, &f_full, &step, &status); CHKERRQ(info);

599:     while (status && stepType != NLS_GRADIENT) {
600:       // Linesearch failed
601:       f = fold;
602:       info = X->CopyFrom(Xold); CHKERRQ(info);
603:       info = G->CopyFrom(Gold); CHKERRQ(info);

605:       switch(stepType) {
606:       case NLS_NEWTON:
607:         // Failed to obtain acceptable iterate with Newton step
608:         // Update the perturbation for next time
609:         if (pert <= 0.0) {
610:           // Initialize the perturbation
611:           pert = TaoMin(ls->imax, TaoMax(ls->imin, ls->imfac * gnorm));
612:           if (NLS_KSP_GLTR == ls->ksp_type) {
613:             info = pls->GetMinEig(&e_min); CHKERRQ(info);
614:             pert = TaoMax(pert, -e_min);
615:           }
616:         }
617:         else {
618:           // Increase the perturbation
619:           pert = TaoMin(ls->pmax, TaoMax(ls->pgfac * pert, ls->pmgfac * gnorm));
620:         }

622:         if (NLS_PC_BFGS != ls->pc_type) {
623:           // We don't have the bfgs matrix around and being updated
624:           // Must use gradient direction in this case
625:           info = D->CopyFrom(G); CHKERRQ(info);

627:           ++ls->grad;
628:           stepType = NLS_GRADIENT;
629:         }
630:         else {
631:           // Attempt to use the BFGS direction
632:           info = M->Solve(G, D, &success); CHKERRQ(info);

634:           // Check for success (descent direction)
635:           info = D->Dot(G, &gdx); CHKERRQ(info);
636:           if ((gdx <= 0) || TaoInfOrNaN(gdx)) {
637:             // BFGS direction is not descent or direction produced not a number
638:             // We can assert bfgsUpdates > 1 in this case
639:             // Use steepest descent direction (scaled)
640:     
641:             if (f != 0.0) {
642:               delta = 2.0 * TaoAbsDouble(f) / (gnorm*gnorm);
643:             }
644:             else {
645:               delta = 2.0 / (gnorm*gnorm);
646:             }
647:             info = M->SetDelta(delta); CHKERRQ(info);
648:             info = M->Reset(); CHKERRQ(info);
649:             info = M->Update(X, G); CHKERRQ(info);
650:             info = M->Solve(G, D, &success); CHKERRQ(info);
651:   
652:             bfgsUpdates = 1;
653:             ++ls->sgrad;
654:             stepType = NLS_SCALED_GRADIENT;
655:           }
656:           else {
657:             if (1 == bfgsUpdates) {
658:               // The first BFGS direction is always the scaled gradient
659:               ++ls->sgrad;
660:               stepType = NLS_SCALED_GRADIENT;
661:             }
662:             else {
663:               ++ls->bfgs;
664:               stepType = NLS_BFGS;
665:             }
666:           }
667:         }
668:         break;

670:       case NLS_BFGS:
671:         // Can only enter if pc_type == NLS_PC_BFGS
672:         // Failed to obtain acceptable iterate with BFGS step
673:         // Attempt to use the scaled gradient direction

675:         if (f != 0.0) {
676:           delta = 2.0 * TaoAbsDouble(f) / (gnorm*gnorm);
677:         }
678:         else {
679:           delta = 2.0 / (gnorm*gnorm);
680:         }
681:         info = M->SetDelta(delta); CHKERRQ(info);
682:         info = M->Reset(); CHKERRQ(info);
683:         info = M->Update(X, G); CHKERRQ(info);
684:         info = M->Solve(G, D, &success); CHKERRQ(info);

686:         bfgsUpdates = 1;
687:         ++ls->sgrad;
688:         stepType = NLS_SCALED_GRADIENT;
689:         break;

691:       case NLS_SCALED_GRADIENT:
692:         // Can only enter if pc_type == NLS_PC_BFGS
693:         // The scaled gradient step did not produce a new iterate;
694:         // attemp to use the gradient direction.
695:         // Need to make sure we are not using a different diagonal scaling
696:         info = M->SetScale(0); CHKERRQ(info);
697:         info = M->SetDelta(1.0); CHKERRQ(info);
698:         info = M->Reset(); CHKERRQ(info);
699:         info = M->Update(X, G); CHKERRQ(info);
700:         info = M->Solve(G, D, &success); CHKERRQ(info);

702:         bfgsUpdates = 1;
703:         ++ls->grad;
704:         stepType = NLS_GRADIENT;
705:         break;
706:       }
707:       info = D->Negate(); CHKERRQ(info);

709:       // This may be incorrect; linesearch has values for stepmax and stepmin
710:       // that should be reset.
711:       step = 1.0;
712:       info = TaoLineSearchApply(tao, X, G, D, W, &f, &f_full, &step, &status); CHKERRQ(info);
713:     }

715:     if (status) {
716:       // Failed to find an improving point
717:       f = fold;
718:       info = X->CopyFrom(Xold); CHKERRQ(info);
719:       info = G->CopyFrom(Gold); CHKERRQ(info);
720:       step = 0.0;
721:     }

723:     // Update trust region radius
724:     if (NLS_KSP_STCG == ls->ksp_type || NLS_KSP_GLTR == ls->ksp_type) {
725:       switch(ls->update_type) {
726:       case NLS_UPDATE_STEP:
727:         if (stepType == NLS_NEWTON) {
728:           if (step < ls->nu1) {
729:             // Very bad step taken; reduce radius
730:             radius = ls->omega1 * TaoMin(norm_d, radius);
731:           }
732:           else if (step < ls->nu2) {
733:             // Reasonably bad step taken; reduce radius
734:             radius = ls->omega2 * TaoMin(norm_d, radius);
735:           }
736:           else if (step < ls->nu3) {
737:             // Reasonable step was taken; leave radius alone
738:             if (ls->omega3 < 1.0) {
739:               radius = ls->omega3 * TaoMin(norm_d, radius);
740:             }
741:             else if (ls->omega3 > 1.0) {
742:               radius = TaoMax(ls->omega3 * norm_d, radius);  
743:             }
744:           }
745:           else if (step < ls->nu4) {
746:             // Full step taken; increase the radius
747:             radius = TaoMax(ls->omega4 * norm_d, radius);  
748:           }
749:           else {
750:             // More than full step taken; increase the radius
751:             radius = TaoMax(ls->omega5 * norm_d, radius);  
752:           }
753:         }
754:         else {
755:           // Newton step was not good; reduce the radius
756:           radius = ls->omega1 * TaoMin(norm_d, radius);
757:         }
758:         break;

760:       case NLS_UPDATE_REDUCTION:
761:         if (stepType == NLS_NEWTON) {
762:           // Get predicted reduction
763:           info = pls->GetObjFcn(&prered); CHKERRQ(info);

765:           if (prered >= 0.0) {
766:             // The predicted reduction has the wrong sign.  This cannot
767:             // happen in infinite precision arithmetic.  Step should
768:             // be rejected!
769:             radius = ls->alpha1 * TaoMin(radius, norm_d);
770:           }
771:           else {
772:             if (TaoInfOrNaN(f_full)) {
773:               radius = ls->alpha1 * TaoMin(radius, norm_d);
774:             }
775:             else {
776:               // Compute and actual reduction
777:               actred = fold - f_full;
778:               prered = -prered;
779:               if ((fabs(actred) <= ls->epsilon) && 
780:                   (fabs(prered) <= ls->epsilon)) {
781:                 kappa = 1.0;
782:               }
783:               else {
784:                 kappa = actred / prered;
785:               }
786:   
787:               // Accept of reject the step and update radius
788:               if (kappa < ls->eta1) {
789:                 // Very bad step
790:                 radius = ls->alpha1 * TaoMin(radius, norm_d);
791:               }
792:               else if (kappa < ls->eta2) {
793:                 // Marginal bad step
794:                 radius = ls->alpha2 * TaoMin(radius, norm_d);
795:               }
796:               else if (kappa < ls->eta3) {
797:                 // Reasonable step
798:                 if (ls->alpha3 < 1.0) {
799:                   radius = ls->alpha3 * TaoMin(norm_d, radius);
800:                 }
801:                 else if (ls->alpha3 > 1.0) {
802:                   radius = TaoMax(ls->alpha3 * norm_d, radius);  
803:                 }
804:               }
805:               else if (kappa < ls->eta4) {
806:                 // Good step
807:                 radius = TaoMax(ls->alpha4 * norm_d, radius);
808:               }
809:               else {
810:                 // Very good step
811:                 radius = TaoMax(ls->alpha5 * norm_d, radius);
812:               }
813:             }
814:           }
815:         }
816:         else {
817:           // Newton step was not good; reduce the radius
818:           radius = ls->alpha1 * TaoMin(norm_d, radius);
819:         }
820:         break;

822:       default:
823:         if (stepType == NLS_NEWTON) {
824:           // Get predicted reduction
825:           info = pls->GetObjFcn(&prered); CHKERRQ(info);

827:           if (prered >= 0.0) {
828:             // The predicted reduction has the wrong sign.  This cannot
829:             // happen in infinite precision arithmetic.  Step should
830:             // be rejected!
831:             radius = ls->gamma1 * TaoMin(radius, norm_d);
832:           }
833:           else {
834:             if (TaoInfOrNaN(f_full)) {
835:               radius = ls->gamma1 * TaoMin(radius, norm_d);
836:             }
837:             else {
838:               actred = fold - f_full;
839:               prered = -prered;
840:               if ((fabs(actred) <= ls->epsilon) && 
841:                   (fabs(prered) <= ls->epsilon)) {
842:                 kappa = 1.0;
843:               }
844:               else {
845:                 kappa = actred / prered;
846:               }

848:               tau_1 = ls->theta * gdx / (ls->theta * gdx - (1.0 - ls->theta) * prered + actred);
849:               tau_2 = ls->theta * gdx / (ls->theta * gdx + (1.0 + ls->theta) * prered - actred);
850:               tau_min = TaoMin(tau_1, tau_2);
851:               tau_max = TaoMax(tau_1, tau_2);

853:               if (kappa >= 1.0 - ls->mu1) {
854:                 // Great agreement
855:                 if (tau_max < 1.0) {
856:                   radius = TaoMax(radius, ls->gamma3 * norm_d);
857:                 }
858:                 else if (tau_max > ls->gamma4) {
859:                   radius = TaoMax(radius, ls->gamma4 * norm_d);
860:                 }
861:                 else {
862:                   radius = TaoMax(radius, tau_max * norm_d);
863:                 }
864:               }
865:               else if (kappa >= 1.0 - ls->mu2) {
866:                 // Good agreement

868:                 if (tau_max < ls->gamma2) {
869:                   radius = ls->gamma2 * TaoMin(radius, norm_d);
870:                 }
871:                 else if (tau_max > ls->gamma3) {
872:                   radius = TaoMax(radius, ls->gamma3 * norm_d);
873:                 }
874:                 else if (tau_max < 1.0) {
875:                   radius = tau_max * TaoMin(radius, norm_d);
876:                 }
877:                 else {
878:                   radius = TaoMax(radius, tau_max * norm_d);
879:                 }
880:               }
881:               else {
882:                 // Not good agreement
883:                 if (tau_min > 1.0) {
884:                   radius = ls->gamma2 * TaoMin(radius, norm_d);
885:                 }
886:                 else if (tau_max < ls->gamma1) {
887:                   radius = ls->gamma1 * TaoMin(radius, norm_d);
888:                 }
889:                 else if ((tau_min < ls->gamma1) && (tau_max >= 1.0)) {
890:                   radius = ls->gamma1 * TaoMin(radius, norm_d);
891:                 }
892:                 else if ((tau_1 >= ls->gamma1) && (tau_1 < 1.0) &&
893:                          ((tau_2 < ls->gamma1) || (tau_2 >= 1.0))) {
894:                   radius = tau_1 * TaoMin(radius, norm_d);
895:                 }
896:                 else if ((tau_2 >= ls->gamma1) && (tau_2 < 1.0) &&
897:                          ((tau_1 < ls->gamma1) || (tau_2 >= 1.0))) {
898:                   radius = tau_2 * TaoMin(radius, norm_d);
899:                 }
900:                 else {
901:                   radius = tau_max * TaoMin(radius, norm_d);
902:                 }
903:               }
904:             } 
905:           }
906:         }
907:         else {
908:           // Newton step was not good; reduce the radius
909:           radius = ls->gamma1 * TaoMin(norm_d, radius);
910:         }
911:         break;
912:       }

914:       // The radius may have been increased; modify if it is too large
915:       radius = TaoMin(radius, ls->max_radius);
916:     }

918:     // Check for termination
919:     info = G->Norm2(&gnorm); CHKERRQ(info);
920:     if (TaoInfOrNaN(f) || TaoInfOrNaN(gnorm)) {
921:       SETERRQ(1,"User provided compute function generated Not-a-Number")
922:     }
923:     needH = 1;

925:     info = TaoMonitor(tao, iter, f, gnorm, 0.0, step, &reason); CHKERRQ(info);
926:   }
927:   TaoFunctionReturn(0);
928: }

930: /* ---------------------------------------------------------- */
933: static int TaoSetUp_NLS(TAO_SOLVER tao, void *solver)
934: {
935:   TAO_NLS *ls = (TAO_NLS *)solver;
936:   TaoVec *X;
937:   TaoMat *H;
938:   int info;

940:   TaoFunctionBegin;

942:   info = TaoGetSolution(tao, &X); CHKERRQ(info);
943:   info = X->Clone(&ls->G); CHKERRQ(info);
944:   info = X->Clone(&ls->D); CHKERRQ(info);
945:   info = X->Clone(&ls->W); CHKERRQ(info);

947:   info = X->Clone(&ls->Xold); CHKERRQ(info);
948:   info = X->Clone(&ls->Gold); CHKERRQ(info);

950:   ls->Diag = 0;
951:   ls->M = 0;

953:   info = TaoSetLagrangianGradientVector(tao, ls->G); CHKERRQ(info);
954:   info = TaoSetStepDirectionVector(tao, ls->D); CHKERRQ(info);

956:   // Set linear solver to default for symmetric matrices
957:   info = TaoGetHessian(tao, &H); CHKERRQ(info);
958:   info = TaoCreateLinearSolver(tao, H, 200, 0); CHKERRQ(info);

960:   // Check sizes for compatability
961:   info = TaoCheckFGH(tao); CHKERRQ(info);
962:   TaoFunctionReturn(0);
963: }

965: /*------------------------------------------------------------*/
968: static int TaoDestroy_NLS(TAO_SOLVER tao, void *solver)
969: {
970:   TAO_NLS *ls = (TAO_NLS *)solver;
971:   int info;

973:   TaoFunctionBegin;
974:   info = TaoVecDestroy(ls->G); CHKERRQ(info);
975:   info = TaoVecDestroy(ls->D); CHKERRQ(info);
976:   info = TaoVecDestroy(ls->W); CHKERRQ(info);

978:   info = TaoVecDestroy(ls->Xold); CHKERRQ(info);
979:   info = TaoVecDestroy(ls->Gold); CHKERRQ(info);

981:   info = TaoSetLagrangianGradientVector(tao, 0); CHKERRQ(info);
982:   info = TaoSetStepDirectionVector(tao, 0); CHKERRQ(info);

984:   if (ls->Diag) {
985:     info = TaoVecDestroy(ls->Diag); CHKERRQ(info);
986:     ls->Diag = 0;
987:   }

989:   if (ls->M) {
990:     info = TaoMatDestroy(ls->M); CHKERRQ(info);
991:     ls->M = 0;
992:   }
993:   TaoFunctionReturn(0);
994: }

996: /*------------------------------------------------------------*/
999: static int TaoSetOptions_NLS(TAO_SOLVER tao, void *solver)
1000: {
1001:   TAO_NLS *ls = (TAO_NLS *)solver;
1002:   int info;

1004:   TaoFunctionBegin;
1005:   info = TaoOptionsHead("Newton line search method for unconstrained optimization"); CHKERRQ(info);
1006:   info = TaoOptionList("-tao_nls_ksp_type", "ksp type", "", NLS_KSP, NLS_KSP_TYPES, NLS_KSP[ls->ksp_type], &ls->ksp_type, 0); CHKERRQ(info);
1007:   info = TaoOptionList("-tao_nls_pc_type", "pc type", "", NLS_PC, NLS_PC_TYPES, NLS_PC[ls->pc_type], &ls->pc_type, 0); CHKERRQ(info);
1008:   info = TaoOptionList("-tao_nls_bfgs_scale_type", "bfgs scale type", "", BFGS_SCALE, BFGS_SCALE_TYPES, BFGS_SCALE[ls->bfgs_scale_type], &ls->bfgs_scale_type, 0); CHKERRQ(info);
1009:   info = TaoOptionList("-tao_nls_init_type", "radius initialization type", "", NLS_INIT, NLS_INIT_TYPES, NLS_INIT[ls->init_type], &ls->init_type, 0); CHKERRQ(info);
1010:   info = TaoOptionList("-tao_nls_update_type", "radius update type", "", NLS_UPDATE, NLS_UPDATE_TYPES, NLS_UPDATE[ls->update_type], &ls->update_type, 0); CHKERRQ(info);
1011:  info = TaoOptionDouble("-tao_nls_sval", "perturbation starting value", "", ls->sval, &ls->sval, 0); CHKERRQ(info);
1012:   info = TaoOptionDouble("-tao_nls_imin", "minimum initial perturbation", "", ls->imin, &ls->imin, 0); CHKERRQ(info);
1013:   info = TaoOptionDouble("-tao_nls_imax", "maximum initial perturbation", "", ls->imax, &ls->imax, 0); CHKERRQ(info);
1014:   info = TaoOptionDouble("-tao_nls_imfac", "initial merit factor", "", ls->imfac, &ls->imfac, 0); CHKERRQ(info);
1015:   info = TaoOptionDouble("-tao_nls_pmin", "minimum perturbation", "", ls->pmin, &ls->pmin, 0); CHKERRQ(info);
1016:   info = TaoOptionDouble("-tao_nls_pmax", "maximum perturbation", "", ls->pmax, &ls->pmax, 0); CHKERRQ(info);
1017:   info = TaoOptionDouble("-tao_nls_pgfac", "growth factor", "", ls->pgfac, &ls->pgfac, 0); CHKERRQ(info);
1018:   info = TaoOptionDouble("-tao_nls_psfac", "shrink factor", "", ls->psfac, &ls->psfac, 0); CHKERRQ(info);
1019:   info = TaoOptionDouble("-tao_nls_pmgfac", "merit growth factor", "", ls->pmgfac, &ls->pmgfac, 0); CHKERRQ(info);
1020:   info = TaoOptionDouble("-tao_nls_pmsfac", "merit shrink factor", "", ls->pmsfac, &ls->pmsfac, 0); CHKERRQ(info);
1021:   info = TaoOptionDouble("-tao_nls_eta1", "poor steplength; reduce radius", "", ls->eta1, &ls->eta1, 0); CHKERRQ(info);
1022:   info = TaoOptionDouble("-tao_nls_eta2", "reasonable steplength; leave radius alone", "", ls->eta2, &ls->eta2, 0); CHKERRQ(info);
1023:   info = TaoOptionDouble("-tao_nls_eta3", "good steplength; increase radius", "", ls->eta3, &ls->eta3, 0); CHKERRQ(info);
1024:   info = TaoOptionDouble("-tao_nls_eta4", "excellent steplength; greatly increase radius", "", ls->eta4, &ls->eta4, 0); CHKERRQ(info);
1025:   info = TaoOptionDouble("-tao_nls_alpha1", "", "", ls->alpha1, &ls->alpha1, 0); CHKERRQ(info);
1026:   info = TaoOptionDouble("-tao_nls_alpha2", "", "", ls->alpha2, &ls->alpha2, 0); CHKERRQ(info);
1027:   info = TaoOptionDouble("-tao_nls_alpha3", "", "", ls->alpha3, &ls->alpha3, 0); CHKERRQ(info);
1028:   info = TaoOptionDouble("-tao_nls_alpha4", "", "", ls->alpha4, &ls->alpha4, 0); CHKERRQ(info);
1029:   info = TaoOptionDouble("-tao_nls_alpha5", "", "", ls->alpha5, &ls->alpha5, 0); CHKERRQ(info);
1030:   info = TaoOptionDouble("-tao_nls_nu1", "poor steplength; reduce radius", "", ls->nu1, &ls->nu1, 0); CHKERRQ(info);
1031:   info = TaoOptionDouble("-tao_nls_nu2", "reasonable steplength; leave radius alone", "", ls->nu2, &ls->nu2, 0); CHKERRQ(info);
1032:   info = TaoOptionDouble("-tao_nls_nu3", "good steplength; increase radius", "", ls->nu3, &ls->nu3, 0); CHKERRQ(info);
1033:   info = TaoOptionDouble("-tao_nls_nu4", "excellent steplength; greatly increase radius", "", ls->nu4, &ls->nu4, 0); CHKERRQ(info);
1034:   info = TaoOptionDouble("-tao_nls_omega1", "", "", ls->omega1, &ls->omega1, 0); CHKERRQ(info);
1035:   info = TaoOptionDouble("-tao_nls_omega2", "", "", ls->omega2, &ls->omega2, 0); CHKERRQ(info);
1036:   info = TaoOptionDouble("-tao_nls_omega3", "", "", ls->omega3, &ls->omega3, 0); CHKERRQ(info);
1037:   info = TaoOptionDouble("-tao_nls_omega4", "", "", ls->omega4, &ls->omega4, 0); CHKERRQ(info);
1038:   info = TaoOptionDouble("-tao_nls_omega5", "", "", ls->omega5, &ls->omega5, 0); CHKERRQ(info);
1039:   info = TaoOptionDouble("-tao_nls_mu1_i", "", "", ls->mu1_i, &ls->mu1_i, 0); CHKERRQ(info);
1040:   info = TaoOptionDouble("-tao_nls_mu2_i", "", "", ls->mu2_i, &ls->mu2_i, 0); CHKERRQ(info);
1041:   info = TaoOptionDouble("-tao_nls_gamma1_i", "", "", ls->gamma1_i, &ls->gamma1_i, 0); CHKERRQ(info);
1042:   info = TaoOptionDouble("-tao_nls_gamma2_i", "", "", ls->gamma2_i, &ls->gamma2_i, 0); CHKERRQ(info);
1043:   info = TaoOptionDouble("-tao_nls_gamma3_i", "", "", ls->gamma3_i, &ls->gamma3_i, 0); CHKERRQ(info);
1044:   info = TaoOptionDouble("-tao_nls_gamma4_i", "", "", ls->gamma4_i, &ls->gamma4_i, 0); CHKERRQ(info);
1045:   info = TaoOptionDouble("-tao_nls_theta_i", "", "", ls->theta_i, &ls->theta_i, 0); CHKERRQ(info);
1046:   info = TaoOptionDouble("-tao_nls_mu1", "", "", ls->mu1, &ls->mu1, 0); CHKERRQ(info);
1047:   info = TaoOptionDouble("-tao_nls_mu2", "", "", ls->mu2, &ls->mu2, 0); CHKERRQ(info);
1048:   info = TaoOptionDouble("-tao_nls_gamma1", "", "", ls->gamma1, &ls->gamma1, 0); CHKERRQ(info);
1049:   info = TaoOptionDouble("-tao_nls_gamma2", "", "", ls->gamma2, &ls->gamma2, 0); CHKERRQ(info);
1050:   info = TaoOptionDouble("-tao_nls_gamma3", "", "", ls->gamma3, &ls->gamma3, 0); CHKERRQ(info);
1051:   info = TaoOptionDouble("-tao_nls_gamma4", "", "", ls->gamma4, &ls->gamma4, 0); CHKERRQ(info);
1052:   info = TaoOptionDouble("-tao_nls_theta", "", "", ls->theta, &ls->theta, 0); CHKERRQ(info);
1053:   info = TaoOptionDouble("-tao_nls_min_radius", "lower bound on initial radius", "", ls->min_radius, &ls->min_radius, 0); CHKERRQ(info);
1054:   info = TaoOptionDouble("-tao_nls_max_radius", "upper bound on radius", "", ls->max_radius, &ls->max_radius, 0); CHKERRQ(info);
1055:   info = TaoOptionDouble("-tao_nls_epsilon", "tolerance used when computing actual and predicted reduction", "", ls->epsilon, &ls->epsilon, 0); CHKERRQ(info);
1056:   info = TaoLineSearchSetFromOptions(tao); CHKERRQ(info);
1057:   info = TaoOptionsTail(); CHKERRQ(info);
1058:   TaoFunctionReturn(0);
1059: }


1062: /*------------------------------------------------------------*/
1065: static int TaoView_NLS(TAO_SOLVER tao,void* solver)
1066: {
1067:   TAO_NLS *ls = (TAO_NLS *)solver;
1068:   int info;

1070:   TaoFunctionBegin;
1071:   if (NLS_PC_BFGS == ls->pc_type && ls->M) {
1072:     info = TaoPrintInt(tao, "  Rejected matrix updates: %d\n", ls->M->GetRejects()); CHKERRQ(info);
1073:   }
1074:   info = TaoPrintInt(tao, "  Newton steps: %d\n", ls->newt); CHKERRQ(info);
1075:   info = TaoPrintInt(tao, "  BFGS steps: %d\n", ls->bfgs); CHKERRQ(info);
1076:   info = TaoPrintInt(tao, "  Scaled gradient steps: %d\n", ls->sgrad); CHKERRQ(info);
1077:   info = TaoPrintInt(tao, "  Gradient steps: %d\n", ls->grad); CHKERRQ(info);
1078:   info = TaoLineSearchView(tao); CHKERRQ(info);
1079:   TaoFunctionReturn(0);
1080: }

1082: /* ---------------------------------------------------------- */
1086: int TaoCreate_NLS(TAO_SOLVER tao)
1087: {
1088:   TAO_NLS *ls;
1089:   int info;

1091:   TaoFunctionBegin;

1093:   info = TaoNew(TAO_NLS, &ls); CHKERRQ(info);
1094:   info = PetscLogObjectMemory(tao, sizeof(TAO_NLS)); CHKERRQ(info);

1096:   info = TaoSetTaoSolveRoutine(tao, TaoSolve_NLS, (void *)ls); CHKERRQ(info);
1097:   info = TaoSetTaoSetUpDownRoutines(tao, TaoSetUp_NLS, TaoDestroy_NLS); CHKERRQ(info);
1098:   info = TaoSetTaoOptionsRoutine(tao, TaoSetOptions_NLS); CHKERRQ(info);
1099:   info = TaoSetTaoViewRoutine(tao, TaoView_NLS); CHKERRQ(info);

1101:   info = TaoSetMaximumIterates(tao, 50); CHKERRQ(info);
1102:   info = TaoSetTolerances(tao, 1e-10, 1e-10, 0, 0); CHKERRQ(info);

1104:   info = TaoSetTrustRegionRadius(tao, 100.0); CHKERRQ(info);
1105:   info = TaoSetTrustRegionTolerance(tao, 1.0e-12); CHKERRQ(info);

1107:   ls->sval   = 0.0;
1108:   ls->imin   = 1.0e-4;
1109:   ls->imax   = 1.0e+2;
1110:   ls->imfac  = 1.0e-1;

1112:   ls->pmin   = 1.0e-12;
1113:   ls->pmax   = 1.0e+2;
1114:   ls->pgfac  = 1.0e+1;
1115:   ls->psfac  = 4.0e-1;
1116:   ls->pmgfac = 1.0e-1;
1117:   ls->pmsfac = 1.0e-1;

1119:   // Default values for trust-region radius update based on steplength
1120:   ls->nu1 = 0.25;
1121:   ls->nu2 = 0.50;
1122:   ls->nu3 = 1.00;
1123:   ls->nu4 = 1.25;

1125:   ls->omega1 = 0.25;
1126:   ls->omega2 = 0.50;
1127:   ls->omega3 = 1.00;
1128:   ls->omega4 = 2.00;
1129:   ls->omega5 = 4.00;

1131:   // Default values for trust-region radius update based on reduction
1132:   ls->eta1 = 1.0e-4;
1133:   ls->eta2 = 0.25;
1134:   ls->eta3 = 0.50;
1135:   ls->eta4 = 0.90;

1137:   ls->alpha1 = 0.25;
1138:   ls->alpha2 = 0.50;
1139:   ls->alpha3 = 1.00;
1140:   ls->alpha4 = 2.00;
1141:   ls->alpha5 = 4.00;

1143:   // Default values for trust-region radius update based on interpolation
1144:   ls->mu1 = 0.10;
1145:   ls->mu2 = 0.50;

1147:   ls->gamma1 = 0.25;
1148:   ls->gamma2 = 0.50;
1149:   ls->gamma3 = 2.00;
1150:   ls->gamma4 = 4.00;

1152:   ls->theta = 0.05;

1154:   // Default values for trust region initialization based on interpolation
1155:   ls->mu1_i = 0.35;
1156:   ls->mu2_i = 0.50;

1158:   ls->gamma1_i = 0.0625;
1159:   ls->gamma2_i = 0.5;
1160:   ls->gamma3_i = 2.0;
1161:   ls->gamma4_i = 5.0;
1162:   
1163:   ls->theta_i = 0.25;

1165:   // Remaining parameters
1166:   ls->min_radius = 1.0e-10;
1167:   ls->max_radius = 1.0e10;
1168:   ls->epsilon = 1.0e-6;

1170:   ls->ksp_type        = NLS_KSP_CG;
1171:   ls->pc_type         = NLS_PC_BFGS;
1172:   ls->bfgs_scale_type = BFGS_SCALE_PHESS;
1173:   ls->init_type       = NLS_INIT_INTERPOLATION;
1174:   ls->update_type     = NLS_UPDATE_STEP;

1176:   info = TaoCreateMoreThuenteLineSearch(tao, 0, 0); CHKERRQ(info);
1177:   TaoFunctionReturn(0);
1178: }

1181: #endif