diff --git a/docs/Project.toml b/docs/Project.toml index 812a447f2..6bbbb6290 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -10,6 +10,7 @@ Ipopt_jll = "9cc047cb-c261-5740-88fc-0cf96f7bdcc7" IterTools = "c8e1da08-722c-5040-9ed9-7db0dc04731e" Juniper = "2ddba703-00a4-53a7-87a5-e8b9971dde84" Manifolds = "1cead3c2-87b3-11e9-0ccd-23c62b72b94e" +Manopt = "0fc0a36d-df90-57f3-8f93-d78a9fc72bb5" ModelingToolkit = "961ee093-0014-501f-94e3-6117800e7a78" NLopt = "76087f3c-5699-56af-9a33-bf431cd00edd" Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba" diff --git a/docs/src/optimization_packages/manopt.md b/docs/src/optimization_packages/manopt.md index 473a235a4..64cc69880 100644 --- a/docs/src/optimization_packages/manopt.md +++ b/docs/src/optimization_packages/manopt.md @@ -36,8 +36,8 @@ function or `OptimizationProblem`. The Rosenbrock function on the Euclidean manifold can be optimized using the `GradientDescentOptimizer` as follows: -```@example Manopt1 -using Optimization, OptimizationManopt, Manifolds +```@example Manopt +using Optimization, OptimizationManopt, Manifolds, LinearAlgebra rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 x0 = zeros(2) p = [1.0, 100.0] @@ -57,7 +57,7 @@ sol = Optimization.solve(prob, opt) The box-constrained Karcher mean problem on the SPD manifold with the Frank-Wolfe algorithm can be solved as follows: -```@example Manopt2 +```@example Manopt M = SymmetricPositiveDefinite(5) m = 100 σ = 0.005 @@ -99,7 +99,7 @@ This example is based on the [example](https://juliamanifolds.github.io/ManoptEx The following example is adapted from the Rayleigh Quotient example in ManoptExamples.jl. We solve the Rayleigh quotient problem on the Sphere manifold: -```@example Manopt3 +```@example Manopt using Optimization, OptimizationManopt using Manifolds, LinearAlgebra using Manopt @@ -120,7 +120,7 @@ sol = solve(prob, GradientDescentOptimizer()) Let's check that this indeed corresponds to the minimum eigenvalue of the matrix `A`. -```@example Manopt3 +```@example Manopt @show eigmin(A) @show sol.objective ``` diff --git a/lib/OptimizationManopt/src/OptimizationManopt.jl b/lib/OptimizationManopt/src/OptimizationManopt.jl index 102bf075c..3d97cc9b9 100644 --- a/lib/OptimizationManopt/src/OptimizationManopt.jl +++ b/lib/OptimizationManopt/src/OptimizationManopt.jl @@ -394,6 +394,8 @@ function SciMLBase.__solve(cache::OptimizationCache{ local x, cur, state manifold = haskey(cache.solver_args, :manifold) ? cache.solver_args[:manifold] : nothing + gradF = haskey(cache.solver_args, :riemannian_grad) ? cache.solver_args[:riemannian_grad] : nothing + hessF = haskey(cache.solver_args, :riemannian_hess) ? cache.solver_args[:riemannian_hess] : nothing if manifold === nothing throw(ArgumentError("Manifold not specified in the problem for e.g. `OptimizationProblem(f, x, p; manifold = SymmetricPositiveDefinite(5))`.")) @@ -433,9 +435,13 @@ function SciMLBase.__solve(cache::OptimizationCache{ _loss = build_loss(cache.f, cache, _cb) - gradF = build_gradF(cache.f, cur) + if gradF === nothing + gradF = build_gradF(cache.f, cur) + end - hessF = build_hessF(cache.f, cur) + if hessF === nothing + hessF = build_hessF(cache.f, cur) + end if haskey(solver_kwarg, :stopping_criterion) stopping_criterion = Manopt.StopWhenAny(solver_kwarg.stopping_criterion...) diff --git a/lib/OptimizationManopt/test/runtests.jl b/lib/OptimizationManopt/test/runtests.jl index 49bddb659..fac4e25ed 100644 --- a/lib/OptimizationManopt/test/runtests.jl +++ b/lib/OptimizationManopt/test/runtests.jl @@ -154,8 +154,8 @@ end optprob = OptimizationFunction(rosenbrock, AutoForwardDiff()) prob = OptimizationProblem(optprob, x0, p; manifold = R2) - @test_broken Optimization.solve(prob, opt) - @test_broken sol.minimum < 0.1 + sol = Optimization.solve(prob, opt) + @test sol.minimum < 0.1 end @testset "TrustRegions" begin @@ -207,7 +207,6 @@ end q = Matrix{Float64}(I, 5, 5) .+ 2.0 data2 = [exp(M, q, σ * rand(M; vector_at = q)) for i in 1:m] - f(M, x, p = nothing) = sum(distance(M, x, data2[i])^2 for i in 1:m) f(x, p = nothing) = sum(distance(M, x, data2[i])^2 for i in 1:m) optf = OptimizationFunction(f, Optimization.AutoFiniteDiff())