Skip to content

Commit

Permalink
Simplify.
Browse files Browse the repository at this point in the history
  • Loading branch information
pkofod committed Mar 17, 2017
1 parent f668230 commit a0f0685
Show file tree
Hide file tree
Showing 4 changed files with 72 additions and 205 deletions.
3 changes: 0 additions & 3 deletions REQUIRE
Original file line number Diff line number Diff line change
@@ -1,4 +1 @@
julia 0.5
Calculus
ForwardDiff 0.3.0 0.5.0
ReverseDiff 0.0.3
1 change: 1 addition & 0 deletions src/NLSolversBase.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,6 @@ export NonDifferentiable,
hessian!

include("objective_types.jl")
include("interface.jl")

end # module
61 changes: 61 additions & 0 deletions src/interface.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
function _unchecked_value!(obj, x)
obj.f_calls .+= 1
copy!(obj.last_x_f, x)
obj.f_x = obj.f(x)
end
function value(obj, x)
if x != obj.last_x_f
obj.f_calls += 1
return obj.f(x)
end
obj.f_x
end
function value!(obj, x)
if x != obj.last_x_f
_unchecked_value!(obj, x)
end
obj.f_x
end


function _unchecked_gradient!(obj, x)
obj.g_calls .+= 1
copy!(obj.last_x_g, x)
obj.g!(x, obj.g)
end
function gradient!(obj::AbstractObjective, x)
if x != obj.last_x_g
_unchecked_gradient!(obj, x)
end
end

function value_gradient!(obj::AbstractObjective, x)
if x != obj.last_x_f && x != obj.last_x_g
obj.f_calls .+= 1
obj.g_calls .+= 1
obj.last_x_f[:], obj.last_x_g[:] = copy(x), copy(x)
obj.f_x = obj.fg!(x, obj.g)
elseif x != obj.last_x_f
_unchecked_value!(obj, x)
elseif x != obj.last_x_g
_unchecked_gradient!(obj, x)
end
obj.f_x
end

function _unchecked_hessian!(obj::AbstractObjective, x)
obj.h_calls .+= 1
copy!(obj.last_x_h, x)
obj.h!(x, obj.H)
end
function hessian!(obj::AbstractObjective, x)
if x != obj.last_x_h
_unchecked_hessian!(obj, x)
end
end

# Getters are without ! and accept only an objective and index or just an objective
value(obj::AbstractObjective) = obj.f_x
gradient(obj::AbstractObjective) = obj.g
gradient(obj::AbstractObjective, i::Integer) = obj.g[i]
hessian(obj::AbstractObjective) = obj.H
212 changes: 10 additions & 202 deletions src/objective_types.jl
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
@compat abstract type AbstractObjective end

# Used for objectives and solvers where no gradient is available/exists
type NonDifferentiable{T} <: AbstractObjective
f
f_x::T
last_x_f::Array{T}
f_calls::Vector{Int}
end
# The user friendly/short form NonDifferentiable constructor
NonDifferentiable{T}(f, x_seed::Array{T}) = NonDifferentiable(f, f(x_seed), copy(x_seed), [1])

# Used for objectives and solvers where the gradient is available/exists
type OnceDifferentiable{T, Tgrad} <: AbstractObjective
f
g!
Expand All @@ -18,57 +22,23 @@ type OnceDifferentiable{T, Tgrad} <: AbstractObjective
f_calls::Vector{Int}
g_calls::Vector{Int}
end
# The user friendly/short form OnceDifferentiable constructor
function OnceDifferentiable(f, g!, fg!, x_seed)
g = similar(x_seed)
g!(x_seed, g)
OnceDifferentiable(f, g!, fg!, f(x_seed), g, copy(x_seed), copy(x_seed), [1], [1])
end

# Automatically create the fg! helper function if only f and g! is provided
function OnceDifferentiable(f, g!, x_seed)
function fg!(x, storage)
g!(x, storage)
return f(x)
end
return OnceDifferentiable(f, g!, fg!, x_seed)
end
function OnceDifferentiable{T}(f, x_seed::Vector{T}; autodiff = :finite)
n_x = length(x_seed)
f_calls = [1]
g_calls = [1]
if autodiff == :finite
function g!(x, storage)
Calculus.finite_difference!(f, x, storage, :central)
return
end
function fg!(x, storage)
g!(x, storage)
return f(x)
end
elseif autodiff == :forward
gcfg = ForwardDiff.GradientConfig(x_seed)
g! = (x, out) -> ForwardDiff.gradient!(out, f, x, gcfg)

fg! = (x, out) -> begin
gr_res = DiffBase.DiffResult(zero(T), out)
ForwardDiff.gradient!(gr_res, f, x, gcfg)
DiffBase.value(gr_res)
end
elseif autodiff == :reverse
gcfg = ReverseDiff.GradientConfig(x_seed)
g! = (x, out) -> ReverseDiff.gradient!(out, f, x, gcfg)

fg! = (x, out) -> begin
gr_res = DiffBase.DiffResult(zero(T), out)
ReverseDiff.gradient!(gr_res, f, x, gcfg)
DiffBase.value(gr_res)
end
else
error("The autodiff value $autodiff is not supported. Use :finite, :forward or :reverse.")
end
g = similar(x_seed)
g!(x_seed, g)
return OnceDifferentiable(f, g!, fg!, f(x_seed), g, copy(x_seed), copy(x_seed), f_calls, g_calls)
end

# Used for objectives and solvers where the gradient and Hessian is available/exists
type TwiceDifferentiable{T<:Real} <: AbstractObjective
f
g!
Expand All @@ -84,6 +54,7 @@ type TwiceDifferentiable{T<:Real} <: AbstractObjective
g_calls::Vector{Int}
h_calls::Vector{Int}
end
# The user friendly/short form TwiceDifferentiable constructor
function TwiceDifferentiable{T}(f, g!, fg!, h!, x_seed::Array{T})
n_x = length(x_seed)
g = similar(x_seed)
Expand All @@ -95,6 +66,7 @@ function TwiceDifferentiable{T}(f, g!, fg!, h!, x_seed::Array{T})
copy(x_seed), copy(x_seed), [1], [1], [1])
end

# Automatically create the fg! helper function if only f, g! and h! is provided
function TwiceDifferentiable{T}(f,
g!,
h!,
Expand All @@ -105,167 +77,3 @@ function TwiceDifferentiable{T}(f,
end
return TwiceDifferentiable(f, g!, fg!, h!, x_seed)
end
function TwiceDifferentiable{T}(f, x_seed::Vector{T}; autodiff = :finite)
n_x = length(x_seed)
f_calls = [1]
g_calls = [1]
h_calls = [1]
if autodiff == :finite
function g!(x::Vector, storage::Vector)
Calculus.finite_difference!(f, x, storage, :central)
return
end
function fg!(x::Vector, storage::Vector)
g!(x, storage)
return f(x)
end
function h!(x::Vector, storage::Matrix)
Calculus.finite_difference_hessian!(f, x, storage)
return
end
elseif autodiff == :forward
gcfg = ForwardDiff.GradientConfig(x_seed)
g! = (x, out) -> ForwardDiff.gradient!(out, f, x, gcfg)

fg! = (x, out) -> begin
gr_res = DiffBase.DiffResult(zero(T), out)
ForwardDiff.gradient!(gr_res, f, x, gcfg)
DiffBase.value(gr_res)
end

hcfg = ForwardDiff.HessianConfig(x_seed)
h! = (x, out) -> ForwardDiff.hessian!(out, f, x, hcfg)
elseif autodiff == :reverse
gcfg = ReverseDiff.GradientConfig(x_seed)
g! = (x, out) -> ReverseDiff.gradient!(out, f, x, gcfg)

fg! = (x, out) -> begin
gr_res = DiffBase.DiffResult(zero(T), out)
ReverseDiff.gradient!(gr_res, f, x, gcfg)
DiffBase.value(gr_res)
end
hcfg = ReverseDiff.HessianConfig(x_seed)
h! = (x, out) -> ReverseDiff.hessian!(out, f, x, hcfg)
else
error("The autodiff value $(autodiff) is not supported. Use :finite, :forward or :reverse.")
end
g = similar(x_seed)
H = Array{T}(n_x, n_x)
g!(x_seed, g)
h!(x_seed, H)
return TwiceDifferentiable(f, g!, fg!, h!, f(x_seed),
g, H, copy(x_seed),
copy(x_seed), copy(x_seed), f_calls, g_calls, h_calls)
end


function TwiceDifferentiable{T}(f, g!, x_seed::Array{T}; autodiff = :finite)
n_x = length(x_seed)
f_calls = [1]
function fg!(x, storage)
g!(x, storage)
return f(x)
end
if autodiff == :finite
function h!(x, storage)
Calculus.finite_difference_hessian!(f, x, storage)
return
end
elseif autodiff == :forward
hcfg = ForwardDiff.HessianConfig(similar(x_seed))
h! = (x, out) -> ForwardDiff.hessian!(out, f, x, hcfg)
elseif autodiff == :reverse
hcfg = ReverseDiff.HessianConfig(x_seed)
h! = (x, out) -> ReverseDiff.hessian!(out, f, x, hcfg)
else
error("The autodiff value $(autodiff) is not supported. Use :finite, :forward or :reverse.")
end
g = similar(x_seed)
H = Array{T}(n_x, n_x)
g!(x_seed, g)
h!(x_seed, H)
return TwiceDifferentiable(f, g!, fg!, h!, f(x_seed),
g, H, copy(x_seed),
copy(x_seed), copy(x_seed), f_calls, [1], [1])
end

function TwiceDifferentiable(d::OnceDifferentiable; autodiff = :finite)
n_x = length(d.last_x_f)
T = eltype(d.last_x_f)
if autodiff == :finite
function h!(x::Vector, storage::Matrix)
Calculus.finite_difference_hessian!(d.f, x, storage)
return
end
elseif autodiff == :forward
hcfg = ForwardDiff.HessianConfig(similar(gradient(d)))
h! = (x, out) -> ForwardDiff.hessian!(out, d.f, x, hcfg)
end
H = Array{T}(n_x, n_x)
h!(d.last_x_g, H)
return TwiceDifferentiable(d.f, d.g!, d.fg!, h!, d.f_x,
gradient(d), H, d.last_x_f,
d.last_x_g, copy(d.last_x_g), d.f_calls, d.g_calls, [1])
end

function _unchecked_value!(obj, x)
obj.f_calls .+= 1
copy!(obj.last_x_f, x)
obj.f_x = obj.f(x)
end
function value(obj, x)
if x != obj.last_x_f
obj.f_calls += 1
return obj.f(x)
end
obj.f_x
end
function value!(obj, x)
if x != obj.last_x_f
_unchecked_value!(obj, x)
end
obj.f_x
end


function _unchecked_gradient!(obj, x)
obj.g_calls .+= 1
copy!(obj.last_x_g, x)
obj.g!(x, obj.g)
end
function gradient!(obj::AbstractObjective, x)
if x != obj.last_x_g
_unchecked_gradient!(obj, x)
end
end

function value_gradient!(obj::AbstractObjective, x)
if x != obj.last_x_f && x != obj.last_x_g
obj.f_calls .+= 1
obj.g_calls .+= 1
obj.last_x_f[:], obj.last_x_g[:] = copy(x), copy(x)
obj.f_x = obj.fg!(x, obj.g)
elseif x != obj.last_x_f
_unchecked_value!(obj, x)
elseif x != obj.last_x_g
_unchecked_gradient!(obj, x)
end
obj.f_x
end

function _unchecked_hessian!(obj::AbstractObjective, x)
obj.h_calls .+= 1
copy!(obj.last_x_h, x)
obj.h!(x, obj.H)
end
function hessian!(obj::AbstractObjective, x)
if x != obj.last_x_h
_unchecked_hessian!(obj, x)
end
end

# Getters are without ! and accept only an objective and index or just an objective
value(obj::AbstractObjective) = obj.f_x
gradient(obj::AbstractObjective) = obj.g
gradient(obj::AbstractObjective, i::Integer) = obj.g[i]
hessian(obj::AbstractObjective) = obj.H

0 comments on commit a0f0685

Please sign in to comment.