changeset 9631:00958d0c4e3c

split __dogleg__ > __doglegm__
author Jaroslav Hajek <highegg@gmail.com>
date Wed, 09 Sep 2009 14:43:06 +0200
parents d52e405df4f7
children 40acd13920e3
files scripts/ChangeLog scripts/optimization/__dogleg__.m scripts/optimization/__doglegm__.m scripts/optimization/fminunc.m
diffstat 4 files changed, 77 insertions(+), 26 deletions(-) [+]
line wrap: on
line diff
--- a/scripts/ChangeLog	Tue Sep 08 23:37:00 2009 -0400
+++ b/scripts/ChangeLog	Wed Sep 09 14:43:06 2009 +0200
@@ -1,3 +1,9 @@
+2009-09-06  Jaroslav Hajek  <highegg@gmail.com>
+
+	* optimization/__dogleg__.m: Revert to revision 22c8272af34b.
+	* optimization/__doglegm__.m: New source.
+	* optimization/fminunc.m: Use it.
+
 2009-09-08  John W. Eaton  <jwe@octave.org>
 
 	* io/dlmwrite.m: Fix typo.
--- a/scripts/optimization/__dogleg__.m	Tue Sep 08 23:37:00 2009 -0400
+++ b/scripts/optimization/__dogleg__.m	Wed Sep 09 14:43:06 2009 +0200
@@ -17,42 +17,24 @@
 ## <http://www.gnu.org/licenses/>.
 
 ## -*- texinfo -*-
-## @deftypefn{Function File} {@var{x}} = __dogleg__ (@var{r}, @var{b}, @var{x}, @var{d}, @var{delta}, @var{ismin})
-## Solve the double dogleg trust-region problem:
-## Minimize 
-## @example
-## norm(@var{r}*@var{x}-@var{b}) 
-## @end example
-## subject to the constraint 
-## @example
-## norm(@var{d}.*@var{x}) <= @var{delta} ,
-## @end example
-## x being a convex combination of the gauss-newton and scaled gradient.
-## If @var{ismin} is true (default false), minimizes instead
-## @example
-## norm(@var{r}*@var{x})^2-2*@var{b}'*@var{x} 
-## @end example
+## @deftypefn{Function File} {@var{x}} = __dogleg__ (@var{r}, @var{b}, @var{x}, @var{d}, @var{delta})
+## Undocumented internal function.
 ## @end deftypefn
 
+## Solve the double dogleg trust-region least-squares problem:
+## Minimize norm(r*x-b) subject to the constraint norm(d.*x) <= delta,
+## x being a convex combination of the gauss-newton and scaled gradient.
 
 ## TODO: error checks
 ## TODO: handle singularity, or leave it up to mldivide?
 
-function x = __dogleg__ (r, b, d, delta, ismin = false)
+function x = __dogleg__ (r, b, d, delta)
   ## Get Gauss-Newton direction.
-  if (ismin)
-    g = b;
-    b = r' \ g;
-  endif
   x = r \ b;
   xn = norm (d .* x);
   if (xn > delta)
     ## GN is too big, get scaled gradient.
-    if (ismin)
-      s = g ./ d;
-    else
-      s = (r' * b) ./ d;
-    endif
+    s = (r' * b) ./ d;
     sn = norm (s);
     if (sn > 0)
       ## Normalize and rescale.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/optimization/__doglegm__.m	Wed Sep 09 14:43:06 2009 +0200
@@ -0,0 +1,63 @@
+## Copyright (C) 2008, 2009 Jaroslav Hajek
+##
+## This file is part of Octave.
+##
+## Octave is free software; you can redistribute it and/or modify it
+## under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 3 of the License, or (at
+## your option) any later version.
+##
+## Octave is distributed in the hope that it will be useful, but
+## WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+## General Public License for more details.
+##
+## You should have received a copy of the GNU General Public License
+## along with Octave; see the file COPYING.  If not, see
+## <http://www.gnu.org/licenses/>.
+
+## -*- texinfo -*-
+## @deftypefn{Function File} {@var{x}} = __doglegm__ (@var{r}, @var{b}, @var{x}, @var{d}, @var{delta})
+## @end deftypefn
+
+## Solve the double dogleg trust-region minimization problem:
+## Minimize 1/2*norm(r*x)^2  subject to the constraint norm(d.*x) <= delta,
+## x being a convex combination of the gauss-newton and scaled gradient.
+
+## TODO: error checks
+## TODO: handle singularity, or leave it up to mldivide?
+
+function x = __doglegm__ (r, g, d, delta)
+  ## Get Gauss-Newton direction.
+  b = r' \ g;
+  x = r \ b;
+  xn = norm (d .* x);
+  if (xn > delta)
+    ## GN is too big, get scaled gradient.
+    s = g ./ d;
+    sn = norm (s);
+    if (sn > 0)
+      ## Normalize and rescale.
+      s = (s / sn) ./ d;
+      ## Get the line minimizer in s direction.
+      tn = norm (r*s);
+      snm = (sn / tn) / tn;
+      if (snm < delta)
+	## Get the dogleg path minimizer.
+        bn = norm (b);
+        dxn = delta/xn; snmd = snm/delta;
+        t = (bn/sn) * (bn/xn) * snmd;
+        t -= dxn * snmd^2 - sqrt ((t-dxn)^2 + (1-dxn^2)*(1-snmd^2));
+        alpha = dxn*(1-snmd^2) / t;
+      else
+        alpha = 0;
+      endif
+    else
+      alpha = delta / xn;
+      snm = 0;
+    endif
+    ## Form the appropriate convex combination.
+    x = alpha * x + ((1-alpha) * min (snm, delta)) * s;
+  endif
+endfunction
+
--- a/scripts/optimization/fminunc.m	Tue Sep 08 23:37:00 2009 -0400
+++ b/scripts/optimization/fminunc.m	Wed Sep 09 14:43:06 2009 +0200
@@ -209,7 +209,7 @@
     ## Inner loop.
     while (! suc && niter <= maxiter && nfev < maxfev && ! info)
 
-      s = - __dogleg__ (hesr, grad, dg, delta, true);
+      s = - __doglegm__ (hesr, grad, dg, delta);
 
       sn = norm (dg .* s);
       if (niter == 1)