Actual source code: asils.c
1: #include "src/complementarity/impls/ssls/ssls.h"
2: // Context for ASXLS
3: // -- active-set - reduced matrices formed
4: // - inherit properties of original system
5: // -- semismooth (S) - function not differentiable
6: // - merit function continuously differentiable
7: // - Fischer-Burmeister reformulation of complementarity
8: // - Billups composition for two finite bounds
9: // -- infeasible (I) - iterates not guaranteed to remain within bounds
10: // -- feasible (F) - iterates guaranteed to remain within bounds
11: // -- linesearch (LS) - Armijo rule on direction
12: //
13: // Many other reformulations are possible and combinations of
14: // feasible/infeasible and linesearch/trust region are possible.
15: //
16: // Basic theory
17: // Fischer-Burmeister reformulation is semismooth with a continuously
18: // differentiable merit function and strongly semismooth if the F has
19: // lipschitz continuous derivatives.
20: //
21: // Every accumulation point generated by the algorithm is a stationary
22: // point for the merit function. Stationary points of the merit function
23: // are solutions of the complementarity problem if
24: // a. the stationary point has a BD-regular subdifferential, or
25: // b. the Schur complement F'/F'_ff is a P_0-matrix where ff is the
26: // index set corresponding to the free variables.
27: //
28: // If one of the accumulation points has a BD-regular subdifferential then
29: // a. the entire sequence converges to this accumulation point at
30: // a local q-superlinear rate
31: // b. if in addition the reformulation is strongly semismooth near
32: // this accumulation point, then the algorithm converges at a
33: // local q-quadratic rate.
34: //
35: // The theory for the feasible version follows from the feasible descent
36: // algorithm framework.
37: //
38: // References:
39: // Billups, "Algorithms for Complementarity Problems and Generalized
40: // Equations," Ph.D thesis, University of Wisconsin - Madison, 1995.
41: // De Luca, Facchinei, Kanzow, "A Semismooth Equation Approach to the
42: // Solution of Nonlinear Complementarity Problems," Mathematical
43: // Programming, 75, pages 407-439, 1996.
44: // Ferris, Kanzow, Munson, "Feasible Descent Algorithms for Mixed
45: // Complementarity Problems," Mathematical Programming, 86,
46: // pages 475-497, 1999.
47: // Fischer, "A Special Newton-type Optimization Method," Optimization,
48: // 24, pages 269-284, 1992
49: // Munson, Facchinei, Ferris, Fischer, Kanzow, "The Semismooth Algorithm
50: // for Large Scale Complementarity Problems," Technical Report 99-06,
51: // University of Wisconsin - Madison, 1999.
54: /*------------------------------------------------------------*/
57: static int TaoSolve_ASILS(TAO_SOLVER tao, void *solver)
58: {
59: TAO_SSLS *asls = (TAO_SSLS *)solver;
60: TaoVec *x, *l, *u, *ff, *dpsi, *d, *w, *t1, *t2, *da, *db;
61: TaoMat *J,*Jsub;
62: TaoVec *dxfree,*r1, *r2, *r3;
63: TaoIndexSet *FreeVariableSet,*FixedVariableSet;
64: // TaoLinearSolver *lsolver;
65: double psi, psi_full, ndpsi, normd, innerd, t=0;
66: double delta, rho;
67: int iter=0, info, nn, nf;
68: TaoTerminateReason reason;
69: TaoTruth flag;
71: TaoFunctionBegin;
73: // Assume that Setup has been called!
74: // Set the structure for the Jacobian and create a linear solver.
76: delta = asls->delta;
77: rho = asls->rho;
79: info = TaoGetSolution(tao, &x); CHKERRQ(info);
80: info = TaoGetVariableBounds(tao, &l, &u); CHKERRQ(info);
81: info = TaoEvaluateVariableBounds(tao,l,u); CHKERRQ(info);
82: info = TaoGetJacobian(tao, &J); CHKERRQ(info);
84: info = x->GetDimension(&nn); CHKERRQ(info);
86: info = x->CreateIndexSet(&FreeVariableSet); CHKERRQ(info);
87: info = x->CreateIndexSet(&FixedVariableSet); CHKERRQ(info);
88: info = J->CreateReducedMatrix(FreeVariableSet,FreeVariableSet,&Jsub); CHKERRQ(info);
89: info = x->Clone(&dxfree); CHKERRQ(info);
90: info = x->Clone(&r1); CHKERRQ(info);
91: info = x->Clone(&r2); CHKERRQ(info);
92: info = x->Clone(&r3); CHKERRQ(info);
94: ff = asls->ff;
95: dpsi = asls->dpsi;
96: d = asls->d;
97: w = asls->w;
98: t1 = asls->t1;
99: t2 = asls->t2;
100: da = asls->da;
101: db = asls->db;
103: info = TaoSetMeritFunction(tao, Tao_SSLS_Function, Tao_ASLS_FunctionGradient,
104: TAO_NULL, TAO_NULL, asls); CHKERRQ(info);
106: // Calculate the function value and fischer function value at the
107: // current iterate
109: info = TaoComputeMeritFunctionGradient(tao, x, &psi, dpsi); CHKERRQ(info);
110: info = dpsi->Norm2(&ndpsi); CHKERRQ(info);
112: while (1) {
114: // Check the termination criteria
116: info = TaoMonitor(tao, iter++, asls->merit, ndpsi, 0.0, t, &reason); CHKERRQ(info);
117: if (TAO_CONTINUE_ITERATING != reason) break;
119: // We are going to solve a linear system of equations. We need to
120: // set the tolerances for the solve so that we maintain an asymptotic
121: // rate of convergence that is superlinear.
122: // Note: these tolerances are for the reduced system. We really need
123: // to make sure that the full system satisfies the full-space conditions.
124:
125: // This rule gives superlinear asymptotic convergence
126: // asls->atol = TaoMin(0.5, asls->merit*sqrt(asls->merit));
127: // asls->rtol = 0.0;
129: // This rule gives quadratic asymptotic convergence
130: // asls->atol = TaoMin(0.5, asls->merit*asls->merit);
131: // asls->rtol = 0.0;
133: // Calculate a free and fixed set of variables. The fixed set of
134: // variables are those for the d_b is approximately equal to zero.
135: // The definition of approximately changes as we approach the solution
136: // to the problem.
138: // No one rule is guaranteed to work in all cases. The following
139: // definition is based on the norm of the Jacobian matrix. If the
140: // norm is large, the tolerance becomes smaller.
142: info = J->Norm1(&(asls->identifier)); CHKERRQ(info);
143: // asls->identifier = asls->atol / (1 + asls->identifier);
144: asls->identifier = TaoMin(asls->merit, 1e-2) / (1 + asls->identifier);
145: // asls->identifier = 1e-4 / (1 + asls->identifier);
146: // asls->identifier = 1e-4;
147: // asls->identifier = 1e-8;
149: info = t1->SetToConstant(-asls->identifier); CHKERRQ(info);
150: info = t2->SetToConstant( asls->identifier); CHKERRQ(info);
152: info = FixedVariableSet->WhichBetweenOrEqual(t1, db, t2); CHKERRQ(info);
153: info = FreeVariableSet->ComplementOf(FixedVariableSet); CHKERRQ(info);
155: info = FixedVariableSet->GetSize(&nf);
156: printf("Fixed size: %d\n", nf);
158: // We now have our partition. Now calculate the direction in the
159: // fixed variable space.
161: info = r1->SetReducedVec(ff, FixedVariableSet); CHKERRQ(info);
162: info = r2->SetReducedVec(da, FixedVariableSet); CHKERRQ(info);
163: info = r1->PointwiseDivide(r1, r2); CHKERRQ(info);
165: info = d->SetToZero(); CHKERRQ(info);
166: info = d->ReducedXPY(r1, FixedVariableSet); CHKERRQ(info);
168: // Our direction in the FixedVariableSet is fixed. Calculate the
169: // information needed for the step in the FreeVariableSet. To
170: // do this, we need to know the diagonal perturbation and the
171: // right hand side.
173: info = r1->SetReducedVec(da, FreeVariableSet); CHKERRQ(info);
174: info = r2->SetReducedVec(ff, FreeVariableSet); CHKERRQ(info);
175: info = r3->SetReducedVec(db, FreeVariableSet); CHKERRQ(info);
177: info = r1->PointwiseDivide(r1, r3); CHKERRQ(info);
178: info = r2->PointwiseDivide(r2, r3); CHKERRQ(info);
180: // r1 is the diagonal perturbation
181: // r2 is the right hand side
182: // r3 is no longer needed
184: // Now need to modify r2 for our direction choice in the fixed
185: // variable set: calculate t1 = J*d, take the reduced vector
186: // of t1 and modify r2.
188: info = J->Multiply(d, t1); CHKERRQ(info);
189: info = r3->SetReducedVec(t1, FreeVariableSet); CHKERRQ(info);
190: info = r2->Axpy(-1.0, r3);
192: // Calculate the reduced problem matrix and the direction
194: info = Jsub->SetReducedMatrix(J, FreeVariableSet, FreeVariableSet); CHKERRQ(info);
195: info = Jsub->AddDiagonal(r1); CHKERRQ(info);
196: info = dxfree->SetReducedVec(d, FreeVariableSet);CHKERRQ(info);
197: info = dxfree->SetToZero(); CHKERRQ(info);
199: // Set the convergence for the reduced problem solver.
200: // info = TaoGetLinearSolver(tao, &lsolver); CHKERRQ(info);
201: // info = lsolver->SetTolerances(0.1*asls->rtol, 0.1*asls->atol, 1e30, 10*nn); CHKERRQ(info);
203: // Calculate the reduced direction. (Really negative of Newton
204: // direction. Therefore, rest of the code uses -d.)
206: info = TaoPreLinearSolve(tao, Jsub); CHKERRQ(info);
207: info = TaoLinearSolve(tao, Jsub, r2, dxfree, &flag); CHKERRQ(info);
209: // Add the direction in the free variables back into the real direction.
211: info = d->ReducedXPY(dxfree, FreeVariableSet);CHKERRQ(info);
213: // Calculate the norm of the relative residual in the real space
215: // info = t1->PointwiseMultiply(da, d); CHKERRQ(info);
216: // info = J->Multiply(d, t2); CHKERRQ(info);
217: // info = t2->PointwiseMultiply(db, t2); CHKERRQ(info);
218: // info = t1->Axpy( 1.0, t2); CHKERRQ(info);
219: // info = t1->Axpy(-1.0, ff); CHKERRQ(info);
220: // info = t1->Norm2(&two_norm); CHKERRQ(info);
222: // if (two_norm >= asls->atol) {
223: // printf("Bad absolute residual: actual: %5.4e needed: %5.4e\n",
224: // two_norm, asls->atol);
225: // }
227: // Check the real direction for descent and if not, use the negative
228: // gradient direction.
230: info = d->Norm2(&normd); CHKERRQ(info);
231: info = d->Dot(dpsi, &innerd); CHKERRQ(info);
233: if (innerd <= delta*pow(normd, rho)) {
234: printf("Gradient direction: %5.4e.\n", innerd);
235: info = PetscInfo1(tao, "TaoSolve_ASILS: %d: newton direction not descent\n", iter); CHKERRQ(info);
237: info = d->CopyFrom(dpsi); CHKERRQ(info);
238: info = d->Dot(dpsi, &innerd); CHKERRQ(info);
239: }
241: info = d->Negate(); CHKERRQ(info);
242: innerd = -innerd;
244: // We now have a correct descent direction. Apply a linesearch to
245: // find the new iterate.
247: t = 1;
248: info = TaoLineSearchApply(tao, x, dpsi, d, w,
249: &psi, &psi_full, &t, &tao->lsflag); CHKERRQ(info);
250: info = dpsi->Norm2(&ndpsi); CHKERRQ(info);
251: }
253: info = TaoMatDestroy(Jsub);CHKERRQ(info);
254: info = TaoVecDestroy(dxfree);CHKERRQ(info);
255: info = TaoVecDestroy(r1);CHKERRQ(info);
256: info = TaoVecDestroy(r2);CHKERRQ(info);
257: info = TaoVecDestroy(r3);CHKERRQ(info);
258: info = TaoIndexSetDestroy(FreeVariableSet);CHKERRQ(info);
259: info = TaoIndexSetDestroy(FixedVariableSet);CHKERRQ(info);
261: TaoFunctionReturn(0);
262: }
264: /* ---------------------------------------------------------- */
268: int TaoCreate_ASILS(TAO_SOLVER tao)
269: {
270: TAO_SSLS *asls;
271: int info;
273: TaoFunctionBegin;
275: info = TaoNew(TAO_SSLS,&asls); CHKERRQ(info);
276: info = PetscLogObjectMemory(tao, sizeof(TAO_SSLS)); CHKERRQ(info);
278: asls->delta = 1e-10;
279: asls->rho = 2.1;
281: asls->identifier = 1e-5;
283: info=TaoSetTaoSolveRoutine(tao,TaoSolve_ASILS,(void*)asls); CHKERRQ(info);
284: info=TaoSetTaoSetUpDownRoutines(tao,TaoSetUp_SSLS,TaoSetDown_SSLS); CHKERRQ(info);
285: info=TaoSetTaoOptionsRoutine(tao,TaoSetOptions_SSLS); CHKERRQ(info);
286: info=TaoSetTaoViewRoutine(tao,TaoView_SSLS); CHKERRQ(info);
288: info = TaoCreateArmijoLineSearch(tao); CHKERRQ(info);
290: info = TaoSetMaximumIterates(tao,2000); CHKERRQ(info);
291: info = TaoSetMaximumFunctionEvaluations(tao,4000); CHKERRQ(info);
293: info = TaoSetTolerances(tao,0,0,0,0); CHKERRQ(info);
294: info = TaoSetGradientTolerances(tao,1.0e-16,0.0,0.0); CHKERRQ(info);
295: info = TaoSetFunctionLowerBound(tao,1.0e-8); CHKERRQ(info);
297: TaoFunctionReturn(0);
298: }