source: ntrip/trunk/BNC/newmat/newmatnl.cpp@ 8925

Last change on this file since 8925 was 8901, checked in by stuerze, 5 years ago

upgrade to newmat11 library

File size: 7.3 KB
Line 
1/// \ingroup newmat
2///@{
3
4/// \file newmatnl.cpp
5/// Non-linear optimisation.
6
7// Copyright (C) 1993,4,5,6: R B Davies
8
9
10#define WANT_MATH
11#define WANT_STREAM
12
13#include "newmatap.h"
14#include "newmatnl.h"
15
16#ifdef use_namespace
17namespace NEWMAT {
18#endif
19
20
21
22void FindMaximum2::Fit(ColumnVector& Theta, int n_it)
23{
24 Tracer tr("FindMaximum2::Fit");
25 enum State {Start, Restart, Continue, Interpolate, Extrapolate,
26 Fail, Convergence};
27 State TheState = Start;
28 Real z,w,x,x2,g,l1,l2,l3,d1,d2=0,d3;
29 ColumnVector Theta1, Theta2, Theta3;
30 int np = Theta.Nrows();
31 ColumnVector H1(np), H3, HP(np), K, K1(np);
32 bool oorg, conv;
33 int counter = 0;
34 Theta1 = Theta; HP = 0.0; g = 0.0;
35
36 // This is really a set of gotos and labels, but they do not work
37 // correctly in AT&T C++ and Sun 4.01 C++.
38
39 for(;;)
40 {
41 switch (TheState)
42 {
43 case Start:
44 tr.ReName("FindMaximum2::Fit/Start");
45 Value(Theta1, true, l1, oorg);
46 if (oorg) Throw(ProgramException("invalid starting value\n"));
47
48 case Restart:
49 tr.ReName("FindMaximum2::Fit/ReStart");
50 conv = NextPoint(H1, d1);
51 if (conv) { TheState = Convergence; break; }
52 if (counter++ > n_it) { TheState = Fail; break; }
53
54 z = 1.0 / sqrt(d1);
55 H3 = H1 * z; K = (H3 - HP) * g; HP = H3;
56 g = 0.0; // de-activate to use curved projection
57 if ( g == 0.0 ) K1 = 0.0; else K1 = K * 0.2 + K1 * 0.6;
58 // (K - K1) * alpha + K1 * (1 - alpha)
59 // = K * alpha + K1 * (1 - 2 * alpha)
60 K = K1 * d1; g = z;
61
62 case Continue:
63 tr.ReName("FindMaximum2::Fit/Continue");
64 Theta2 = Theta1 + H1 + K;
65 Value(Theta2, false, l2, oorg);
66 if (counter++ > n_it) { TheState = Fail; break; }
67 if (oorg)
68 {
69 H1 *= 0.5; K *= 0.25; d1 *= 0.5; g *= 2.0;
70 TheState = Continue; break;
71 }
72 d2 = LastDerivative(H1 + K * 2.0);
73
74 case Interpolate:
75 tr.ReName("FindMaximum2::Fit/Interpolate");
76 z = d1 + d2 - 3.0 * (l2 - l1);
77 w = z * z - d1 * d2;
78 if (w < 0.0) { TheState = Extrapolate; break; }
79 w = z + sqrt(w);
80 if (1.5 * w + d1 < 0.0)
81 { TheState = Extrapolate; break; }
82 if (d2 > 0.0 && l2 > l1 && w > 0.0)
83 { TheState = Extrapolate; break; }
84 x = d1 / (w + d1); x2 = x * x; g /= x;
85 Theta3 = Theta1 + H1 * x + K * x2;
86 Value(Theta3, true, l3, oorg);
87 if (counter++ > n_it) { TheState = Fail; break; }
88 if (oorg)
89 {
90 if (x <= 1.0)
91 { x *= 0.5; x2 = x*x; g *= 2.0; d1 *= x; H1 *= x; K *= x2; }
92 else
93 {
94 x = 0.5 * (x-1.0); x2 = x*x; Theta1 = Theta2;
95 H1 = (H1 + K * 2.0) * x;
96 K *= x2; g = 0.0; d1 = x * d2; l1 = l2;
97 }
98 TheState = Continue; break;
99 }
100
101 if (l3 >= l1 && l3 >= l2)
102 { Theta1 = Theta3; l1 = l3; TheState = Restart; break; }
103
104 d3 = LastDerivative(H1 + K * 2.0);
105 if (l1 > l2)
106 { H1 *= x; K *= x2; Theta2 = Theta3; d1 *= x; d2 = d3*x; }
107 else
108 {
109 Theta1 = Theta2; Theta2 = Theta3;
110 x -= 1.0; x2 = x*x; g = 0.0; H1 = (H1 + K * 2.0) * x;
111 K *= x2; l1 = l2; l2 = l3; d1 = x*d2; d2 = x*d3;
112 if (d1 <= 0.0) { TheState = Start; break; }
113 }
114 TheState = Interpolate; break;
115
116 case Extrapolate:
117 tr.ReName("FindMaximum2::Fit/Extrapolate");
118 Theta1 = Theta2; g = 0.0; K *= 4.0; H1 = (H1 * 2.0 + K);
119 d1 = 2.0 * d2; l1 = l2;
120 TheState = Continue; break;
121
122 case Fail:
123 Throw(ConvergenceException(Theta));
124
125 case Convergence:
126 Theta = Theta1; return;
127 }
128 }
129}
130
131
132
133void NonLinearLeastSquares::Value
134 (const ColumnVector& Parameters, bool, Real& v, bool& oorg)
135{
136 Tracer tr("NonLinearLeastSquares::Value");
137 Y.resize(n_obs); X.resize(n_obs,n_param);
138 // put the fitted values in Y, the derivatives in X.
139 Pred.Set(Parameters);
140 if (!Pred.IsValid()) { oorg=true; return; }
141 for (int i=1; i<=n_obs; i++)
142 {
143 Y(i) = Pred(i);
144 X.Row(i) = Pred.Derivatives();
145 }
146 if (!Pred.IsValid()) { oorg=true; return; } // check afterwards as well
147 Y = *DataPointer - Y; Real ssq = Y.SumSquare();
148 errorvar = ssq / (n_obs - n_param);
149 cout << endl;
150 cout << setw(15) << setprecision(10) << " " << errorvar;
151 Derivs = Y.t() * X; // get the derivative and stash it
152 oorg = false; v = -0.5 * ssq;
153}
154
155bool NonLinearLeastSquares::NextPoint(ColumnVector& Adj, Real& test)
156{
157 Tracer tr("NonLinearLeastSquares::NextPoint");
158 QRZ(X, U); QRZ(X, Y, M); // do the QR decomposition
159 test = M.SumSquare();
160 cout << " " << setw(15) << setprecision(10)
161 << test << " " << Y.SumSquare() / (n_obs - n_param);
162 Adj = U.i() * M;
163 if (test < errorvar * criterion) return true;
164 else return false;
165}
166
167Real NonLinearLeastSquares::LastDerivative(const ColumnVector& H)
168{ return (Derivs * H).AsScalar(); }
169
170void NonLinearLeastSquares::Fit(const ColumnVector& Data,
171 ColumnVector& Parameters)
172{
173 Tracer tr("NonLinearLeastSquares::Fit");
174 n_param = Parameters.Nrows(); n_obs = Data.Nrows();
175 DataPointer = &Data;
176 FindMaximum2::Fit(Parameters, Lim);
177 cout << "\nConverged" << endl;
178}
179
180void NonLinearLeastSquares::MakeCovariance()
181{
182 if (Covariance.Nrows()==0)
183 {
184 UpperTriangularMatrix UI = U.i();
185 Covariance << UI * UI.t() * errorvar;
186 SE << Covariance; // get diagonals
187 for (int i = 1; i<=n_param; i++) SE(i) = sqrt(SE(i));
188 }
189}
190
191void NonLinearLeastSquares::GetStandardErrors(ColumnVector& SEX)
192 { MakeCovariance(); SEX = SE.AsColumn(); }
193
194void NonLinearLeastSquares::GetCorrelations(SymmetricMatrix& Corr)
195 { MakeCovariance(); Corr << SE.i() * Covariance * SE.i(); }
196
197void NonLinearLeastSquares::GetHatDiagonal(DiagonalMatrix& Hat) const
198{
199 Hat.resize(n_obs);
200 for (int i = 1; i<=n_obs; i++) Hat(i) = X.Row(i).SumSquare();
201}
202
203
204// the MLE_D_FI routines
205
206void MLE_D_FI::Value
207 (const ColumnVector& Parameters, bool wg, Real& v, bool& oorg)
208{
209 Tracer tr("MLE_D_FI::Value");
210 if (!LL.IsValid(Parameters,wg)) { oorg=true; return; }
211 v = LL.LogLikelihood();
212 if (!LL.IsValid()) { oorg=true; return; } // check validity again
213 cout << endl;
214 cout << setw(20) << setprecision(10) << v;
215 oorg = false;
216 Derivs = LL.Derivatives(); // Get derivatives
217}
218
219bool MLE_D_FI::NextPoint(ColumnVector& Adj, Real& test)
220{
221 Tracer tr("MLE_D_FI::NextPoint");
222 SymmetricMatrix FI = LL.FI();
223 LT = Cholesky(FI);
224 ColumnVector Adj1 = LT.i() * Derivs;
225 Adj = LT.t().i() * Adj1;
226 test = SumSquare(Adj1);
227 cout << " " << setw(20) << setprecision(10) << test;
228 return (test < Criterion);
229}
230
231Real MLE_D_FI::LastDerivative(const ColumnVector& H)
232{ return (Derivs.t() * H).AsScalar(); }
233
234void MLE_D_FI::Fit(ColumnVector& Parameters)
235{
236 Tracer tr("MLE_D_FI::Fit");
237 FindMaximum2::Fit(Parameters,Lim);
238 cout << "\nConverged" << endl;
239}
240
241void MLE_D_FI::MakeCovariance()
242{
243 if (Covariance.Nrows()==0)
244 {
245 LowerTriangularMatrix LTI = LT.i();
246 Covariance << LTI.t() * LTI;
247 SE << Covariance; // get diagonal
248 int n = Covariance.Nrows();
249 for (int i=1; i <= n; i++) SE(i) = sqrt(SE(i));
250 }
251}
252
253void MLE_D_FI::GetStandardErrors(ColumnVector& SEX)
254{ MakeCovariance(); SEX = SE.AsColumn(); }
255
256void MLE_D_FI::GetCorrelations(SymmetricMatrix& Corr)
257{ MakeCovariance(); Corr << SE.i() * Covariance * SE.i(); }
258
259
260
261#ifdef use_namespace
262}
263#endif
264
265
266///@}
Note: See TracBrowser for help on using the repository browser.