Skip to content

Commit

Permalink
Added Newton-Rahpson and changed Unidimensional#log to an Array
Browse files Browse the repository at this point in the history
  • Loading branch information
clbustos committed Apr 15, 2010
1 parent 3382768 commit 4698073
Show file tree
Hide file tree
Showing 6 changed files with 97 additions and 72 deletions.
23 changes: 0 additions & 23 deletions .autotest

This file was deleted.

4 changes: 3 additions & 1 deletion Manifest.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,6 @@ Manifest.txt
README.txt
Rakefile
lib/minimization.rb
test/test_minimization.rb
spec/minimization_unidimensional_spec.rb
spec/spec.opts
spec/spec_helper.rb
1 change: 1 addition & 0 deletions README.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ Minimization algorithms on pure Ruby.
== FEATURES/PROBLEMS:

Unidimensional:
* Newton-Rahpson (requires first and second derivative)
* Golden Section
* Brent (Port of GSL code)

Expand Down
2 changes: 2 additions & 0 deletions Rakefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@
require 'rubygems'
require 'hoe'
require './lib/minimization'
Hoe.plugin :git

Hoe.spec 'minimization' do
self.version=Minimization::VERSION
self.rubyforge_name = 'ruby-statsample' # if different than 'minimization'
self.developer('Claudio Bustos', 'clbustos_AT_gmail.com')
self.remote_rdoc_dir = 'minimization'
self.extra_deps << ['text-table', "~>1.2"]
end

# vim: syntax=ruby
111 changes: 91 additions & 20 deletions lib/minimization.rb
Original file line number Diff line number Diff line change
Expand Up @@ -16,25 +16,29 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#

require 'text-table'
# Algorithms for unidimensional minimization
module Minimization

VERSION="0.1.1"
VERSION="0.3.0"
FailedIteration=Class.new(Exception)
# Base class for unidimensional minimizers
class Unidimensional
# Default value for error
# Default value for error on f(x)
EPSILON=1e-6
# Default number of maximum iterations
MAX_ITERATIONS=100
# Minimum value for x
attr_reader :x_minimum
# Minimum value for f(x)
attr_reader :f_minimum
# Log of iterations
# Log of iterations. Should be an array
attr_reader :log
# Name of fields of log
attr_reader :log_header
# Absolute error on x
attr_accessor :epsilon
# Expected value. Fast minimum finding if set
attr_reader :expected
# Create a new minimizer
def initialize(lower, upper, proc)
raise "first argument should be lower than second" if lower>=upper
Expand All @@ -46,26 +50,93 @@ def initialize(lower, upper, proc)
@max_iteration=MAX_ITERATIONS
@epsilon=EPSILON
@iterations=0
@log=""
@log=[]
@log_header=%w{I xl xh f(xl) f(xh) dx df(x)}
end
# Set expected value
def expected=(v)
@expected=v
end
def log_summary
@log.join("\n")
end
# Convenience method to minimize
# Usage:
# == Parameters:
# * <tt>lower</tt>: Lower possible value
# * <tt>upper</tt>: Higher possible value
# * <tt>expected</tt>: Optional expected value. Faster the search is near correct value.
# * <tt>&block</tt>: Block with function to minimize
# == Usage:
# minimizer=Minimization::GoldenSection.minimize(-1000, 1000) {|x|
# x**2 }
#
#
def self.minimize(lower,upper,expected=nil,&block)
minimizer=new(lower,upper,block)
minimizer.expected=expected unless expected.nil?
raise FailedIteration unless minimizer.iterate
minimizer
end
# Iterate to find the minimum
def iterate
raise "You should implement this"
end
def f(x)
@proc.call(x)
end
end
# Classic Newton-Raphson minimization method.
# Requires first and second derivative
# == Usage
# f = lambda {|x| x**2}
# fd = lambda {|x| 2x}
# fdd = lambda {|x| 2}
# min = Minimization::NewtonRaphson.new(-1000,1000, f,fd,fdd)
# min.iterate
# min.x_minimum
# min.f_minimum
#
class NewtonRaphson < Unidimensional
# == Parameters:
# * <tt>lower</tt>: Lower possible value
# * <tt>upper</tt>: Higher possible value
# * <tt>proc</tt>: Original function
# * <tt>proc_1d</tt>: First derivative
# * <tt>proc_2d</tt>: Second derivative
#
def initialize(lower, upper, proc, proc_1d, proc_2d)
super(lower,upper,proc)
@proc_1d=proc_1d
@proc_2d=proc_2d
end
# Raises an error
def self.minimize(*args)
raise "You should use #new and #iterate"
end
def iterate
# First
x_prev=@lower
x=@expected
failed=true
k=0
while (x-x_prev).abs > @epsilon and k<@max_iteration
k+=1
x_prev=x
x=x-(@proc_1d.call(x).quo(@proc_2d.call(x)))
f_prev=f(x_prev)
f=f(x)
x_min,x_max=[x,x_prev].min, [x,x_prev].max
f_min,f_max=[f,f_prev].min, [f,f_prev].max
@log << [k, x_min, x_max, f_min, f_max, (x_prev-x).abs, (f-f_prev).abs]
end
raise FailedIteration, "Not converged" if k>=@max_iteration
@x_minimum = x;
@f_minimum = f(x);
end
end
# = Golden Section Minimizer.
# Basic minimization algorithm. Slow, but robust
# Use:
# Basic minimization algorithm. Slow, but robust.
# See Unidimensional for methods.
# == Usage.
# require 'minimization'
# min=Minimization::GoldenSection.new(-1000,20000 , proc {|x| (x+1)**2}
# min.expected=1.5 # Expected value
Expand All @@ -74,10 +145,6 @@ def f(x)
# min.f_minimum
# min.log
class GoldenSection < Unidimensional
attr_accessor :expected
def initialize(lower,upper, proc)
super
end
# Start the iteration
def iterate
ax=@lower
Expand All @@ -103,7 +170,6 @@ def iterate


while (x3-x0).abs > @epsilon and k<@max_iteration
@log+=sprintf("k=%4d, |a-b|=%e\n", k, (x3-x0).abs)
if f2 < f1
x0 = x1;
x1 = x2;
Expand All @@ -117,6 +183,8 @@ def iterate
f2 = f1;
f1 = f(x1);
end
@log << [k, x3,x0, f1,f2,(x3-x0).abs, (f1-f2).abs]

k +=1;
end

Expand All @@ -132,7 +200,9 @@ def iterate

end

# Direct port of Brent algorithm found on GSL
# Direct port of Brent algorithm found on GSL.
# See Unidimensional for methods.
# == Usage
# min=Minimization::Brent.new(-1000,20000 , proc {|x| (x+1)**2}
# min.expected=1.5 # Expected value
# min.iterate
Expand Down Expand Up @@ -175,6 +245,7 @@ def expected=(v)
@f_minimum=f(v)
@do_bracketing=false
end

def bracketing
eval_max=10
f_left = @f_lower;
Expand All @@ -198,7 +269,7 @@ def bracketing


begin
@log+=sprintf("B%d: [%0.5f - %0.5f] -> [%0.5f - %0.5f] E: %0.6f\n", nb_eval, x_left, x_right, f_left, f_right, (x_left-x_right).abs)
@log << ["B#{nb_eval}", x_left, x_right, f_left, f_right, (x_left-x_right).abs, (f_left-f_right).abs]
if (f_center < f_left )
if (f_center < f_right)
@x_lower = x_left;
Expand Down Expand Up @@ -249,11 +320,11 @@ def iterate
while k<@max_iteration and (@x_lower-@x_upper).abs>@epsilon
k+=1
result=brent_iterate
raise "Error on iteration" if !result
raise FailedIteration,"Error on iteration" if !result
begin
@log+=sprintf("%d: [%0.5f - %0.5f] -> [%0.5f - %0.5f] E: %0.6f\n", k, @x_lower, @x_upper, @f_lower, @f_upper, (@x_lower-@x_upper).abs)
@log << [k, @x_lower, @x_upper, @f_lower, @f_upper, (@x_lower-@x_upper).abs, (@f_lower-@f_upper).abs]
rescue =>@e
@log+=@e.to_s
@log << [k, @e.to_s,nil,nil,nil,nil,nil]
end
end
@iterations=k
Expand Down
28 changes: 0 additions & 28 deletions test/test_minimization.rb

This file was deleted.

0 comments on commit 4698073

Please sign in to comment.