Skip to content

Commit

Permalink
Merge pull request #761 from sjdaines/add_bfgs_grad_tests
Browse files Browse the repository at this point in the history
Add tests for Optim.BFGS() with user-supplied grad and bounds
  • Loading branch information
ChrisRackauckas authored May 26, 2024
2 parents a5d5709 + 19da423 commit 867ef04
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 1 deletion.
2 changes: 1 addition & 1 deletion lib/OptimizationOptimJL/src/OptimizationOptimJL.jl
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ function SciMLBase.__init(prob::OptimizationProblem,
if prob.f isa OptimizationFunction && (!(prob.f.adtype isa SciMLBase.NoAD) || !isnothing(prob.f.grad))
opt = Optim.Fminbox(opt)
else
throw(ArgumentError("Fminbox($opt) requires gradients, since you didn't use `OptimizationFunction` with a valid AD backend https://docs.sciml.ai/Optimization/stable/API/ad/ the lower and upper bounds thus will be ignored."))
throw(ArgumentError("Fminbox($opt) requires gradients, use `OptimizationFunction` either with a valid AD backend https://docs.sciml.ai/Optimization/stable/API/ad/ or a provided 'grad' function."))
end
end
end
Expand Down
24 changes: 24 additions & 0 deletions lib/OptimizationOptimJL/test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,30 @@ end
sol = solve(prob, BFGS())
@test 10 * sol.objective < l1

function rosenbrock_grad!(dx, x, p)
dx[1] = -2*(p[1] - x[1]) -4 * p[2] * (x[2] - x[1]^2)*x[1]
dx[2]= 2*p[2]*(x[2]-x[1]^2)
return nothing
end

# https://github.com/SciML/Optimization.jl/issues/754 Optim.BFGS() with explicit gradient function
optprob = OptimizationFunction(rosenbrock; grad=rosenbrock_grad!)
prob = OptimizationProblem(optprob, x0, _p)
@test (sol = solve(prob, Optim.BFGS())) isa Any # test exception not thrown
@test 10 * sol.objective < l1

# https://github.com/SciML/Optimization.jl/issues/754 Optim.BFGS() with bounds and explicit gradient function
optprob = OptimizationFunction(rosenbrock; grad=rosenbrock_grad!)
prob = OptimizationProblem(optprob, x0, _p; lb = [-1.0, -1.0], ub = [0.8, 0.8])
@test (sol = solve(prob, Optim.BFGS())) isa Any # test exception not thrown
@test 10 * sol.objective < l1

# test that Optim.BFGS() with bounds but no AD or user-supplied gradient fails
optprob = OptimizationFunction(rosenbrock, SciMLBase.NoAD())
prob = OptimizationProblem(optprob, x0, _p; lb = [-1.0, -1.0], ub = [0.8, 0.8])
@test_throws ArgumentError (sol = solve(prob, Optim.BFGS())) isa Any # test exception is thrown
@test 10 * sol.objective < l1

@testset "cache" begin
objective(x, p) = (p[1] - x[1])^2
x0 = zeros(1)
Expand Down

0 comments on commit 867ef04

Please sign in to comment.