From 7913fce1bee5ef4517dbccea612ea20ad56e0fa7 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Mon, 17 Jun 2024 08:52:08 -0400 Subject: [PATCH 1/8] initial changes --- src/datatypes.jl | 158 ++++---- src/optimize.jl | 901 ++++------------------------------------- src/optimize_old.jl | 963 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 1124 insertions(+), 898 deletions(-) create mode 100644 src/optimize_old.jl diff --git a/src/datatypes.jl b/src/datatypes.jl index fabfe80e..910d38f1 100644 --- a/src/datatypes.jl +++ b/src/datatypes.jl @@ -1270,6 +1270,29 @@ struct NLPOperator{F <: Function, G, H} end end +################################################################################ +# TRANSFORMATION BACKEND +################################################################################ +""" + +""" +abstract type AbstractTransformationBackend end + +""" + +""" +abstract type AbstractJuMPTag end + +""" + +""" +struct JuMPBackend{T <: AbstractJuMPTag, D, C} <: AbstractTransformationBackend + model::JuMP.Model + tag::T + data::D + optimizer_constructor::C +end + ################################################################################ # INFINITE MODEL ################################################################################ @@ -1323,9 +1346,8 @@ mutable struct InfiniteModel <: JuMP.AbstractModel # Objects obj_dict::Dict{Symbol, Any} - # Optimize Data - optimizer_constructor::Any - optimizer_model::JuMP.Model + # Backend Data + backend::AbstractTransformationBackend ready_to_optimize::Bool # Extensions @@ -1333,6 +1355,7 @@ mutable struct InfiniteModel <: JuMP.AbstractModel optimize_hook::Any end +# TODO UPDATE THE DOCSTRING ONCE THE SYNTAX IS FINALIZED """ InfiniteModel([optimizer_constructor]; [OptimizerModel::Function = TranscriptionModel, @@ -1376,79 +1399,78 @@ CachingOptimizer state: EMPTY_OPTIMIZER Solver name: Ipopt ``` """ -function InfiniteModel(; - OptimizerModel::Function = TranscriptionModel, - kwargs... - )::InfiniteModel - return InfiniteModel(# Parameters - MOIUC.CleverDict{IndependentParameterIndex, ScalarParameterData{<:IndependentParameter}}(), - MOIUC.CleverDict{DependentParametersIndex, MultiParameterData}(), - MOIUC.CleverDict{FiniteParameterIndex, ScalarParameterData{FiniteParameter}}(), - nothing, 0, - Union{IndependentParameterIndex, DependentParametersIndex}[], - MOIUC.CleverDict{ParameterFunctionIndex, ParameterFunctionData{<:ParameterFunction}}(), - Dict{IndependentParameterIndex, Set{InfiniteVariableIndex}}(), - # Variables - MOIUC.CleverDict{InfiniteVariableIndex, VariableData{<:InfiniteVariable}}(), - MOIUC.CleverDict{SemiInfiniteVariableIndex, VariableData{SemiInfiniteVariable{GeneralVariableRef}}}(), - Dict{Tuple{GeneralVariableRef, Dict{Int, Float64}}, SemiInfiniteVariableIndex}(), - MOIUC.CleverDict{PointVariableIndex, VariableData{PointVariable{GeneralVariableRef}}}(), - Dict{Tuple{GeneralVariableRef, Vector{Float64}}, PointVariableIndex}(), - MOIUC.CleverDict{FiniteVariableIndex, VariableData{JuMP.ScalarVariable{Float64, Float64, Float64, Float64}}}(), - nothing, - # Derivatives - MOIUC.CleverDict{DerivativeIndex, VariableData{<:Derivative}}(), - Dict{Tuple{GeneralVariableRef, GeneralVariableRef, Int}, DerivativeIndex}(), - # Measures - MOIUC.CleverDict{MeasureIndex, MeasureData{<:Measure}}(), - # Constraints - MOIUC.CleverDict{InfOptConstraintIndex, ConstraintData{<:JuMP.AbstractConstraint}}(), - Dict{InfOptConstraintIndex, DomainRestrictions{GeneralVariableRef}}(), - nothing, - # Objective - MOI.FEASIBILITY_SENSE, - zero(JuMP.GenericAffExpr{Float64, GeneralVariableRef}), - false, - # registration - NLPOperator[], - Dict{Symbol, Tuple{Function, Int}}(), - # Object dictionary - Dict{Symbol, Any}(), - # Optimize data - nothing, OptimizerModel(; kwargs...), false, - # Extensions - Dict{Symbol, Any}(), - nothing - ) +function InfiniteModel(backend::AbstractTransformationBackend = TranscriptionModel()) + return InfiniteModel( + # Parameters + MOIUC.CleverDict{IndependentParameterIndex, ScalarParameterData{<:IndependentParameter}}(), + MOIUC.CleverDict{DependentParametersIndex, MultiParameterData}(), + MOIUC.CleverDict{FiniteParameterIndex, ScalarParameterData{FiniteParameter}}(), + nothing, 0, + Union{IndependentParameterIndex, DependentParametersIndex}[], + MOIUC.CleverDict{ParameterFunctionIndex, ParameterFunctionData{<:ParameterFunction}}(), + Dict{IndependentParameterIndex, Set{InfiniteVariableIndex}}(), + # Variables + MOIUC.CleverDict{InfiniteVariableIndex, VariableData{<:InfiniteVariable}}(), + MOIUC.CleverDict{SemiInfiniteVariableIndex, VariableData{SemiInfiniteVariable{GeneralVariableRef}}}(), + Dict{Tuple{GeneralVariableRef, Dict{Int, Float64}}, SemiInfiniteVariableIndex}(), + MOIUC.CleverDict{PointVariableIndex, VariableData{PointVariable{GeneralVariableRef}}}(), + Dict{Tuple{GeneralVariableRef, Vector{Float64}}, PointVariableIndex}(), + MOIUC.CleverDict{FiniteVariableIndex, VariableData{JuMP.ScalarVariable{Float64, Float64, Float64, Float64}}}(), + nothing, + # Derivatives + MOIUC.CleverDict{DerivativeIndex, VariableData{<:Derivative}}(), + Dict{Tuple{GeneralVariableRef, GeneralVariableRef, Int}, DerivativeIndex}(), + # Measures + MOIUC.CleverDict{MeasureIndex, MeasureData{<:Measure}}(), + # Constraints + MOIUC.CleverDict{InfOptConstraintIndex, ConstraintData{<:JuMP.AbstractConstraint}}(), + Dict{InfOptConstraintIndex, DomainRestrictions{GeneralVariableRef}}(), + nothing, + # Objective + MOI.FEASIBILITY_SENSE, + zero(JuMP.GenericAffExpr{Float64, GeneralVariableRef}), + false, + # registration + NLPOperator[], + Dict{Symbol, Tuple{Function, Int}}(), + # Object dictionary + Dict{Symbol, Any}(), + # Backend data + backend, + false, + # Extensions + Dict{Symbol, Any}(), + nothing + ) end ## Set the optimizer_constructor depending on what it is # MOI.OptimizerWithAttributes -function _set_optimizer_constructor( - model::InfiniteModel, - constructor::MOI.OptimizerWithAttributes - ) - model.optimizer_constructor = constructor.optimizer_constructor - return -end +# function _set_optimizer_constructor( +# model::InfiniteModel, +# constructor::MOI.OptimizerWithAttributes +# ) +# model.optimizer_constructor = constructor.optimizer_constructor +# return +# end # No attributes -function _set_optimizer_constructor(model::InfiniteModel, constructor) - model.optimizer_constructor = constructor - return -end +# function _set_optimizer_constructor(model::InfiniteModel, constructor) +# model.optimizer_constructor = constructor +# return +# end # Dispatch for InfiniteModel call with optimizer constructor -function InfiniteModel( - optimizer_constructor; - OptimizerModel::Function = TranscriptionModel, - kwargs... - ) - model = InfiniteModel() - model.optimizer_model = OptimizerModel(optimizer_constructor; kwargs...) - _set_optimizer_constructor(model, optimizer_constructor) - return model -end +# function InfiniteModel( +# optimizer_constructor; +# OptimizerModel::Function = TranscriptionModel, +# kwargs... +# ) +# model = InfiniteModel() +# model.optimizer_model = OptimizerModel(optimizer_constructor; kwargs...) +# _set_optimizer_constructor(model, optimizer_constructor) +# return model +# end # Define basic InfiniteModel extension functions Base.broadcastable(model::InfiniteModel) = Ref(model) diff --git a/src/optimize.jl b/src/optimize.jl index 01d0d84a..d4602f68 100644 --- a/src/optimize.jl +++ b/src/optimize.jl @@ -1,908 +1,121 @@ ################################################################################ -# OPTIMIZER MODEL BASICS +# CORE BACKEND API ################################################################################ """ - optimizer_model(model::InfiniteModel)::JuMP.Model - -Return the JuMP model stored in `model` that is used to solve it. + transformation_backend_ready(model::InfiniteModel)::Bool +Return `Bool` if the transformation backend model is up-to-date with `model` and +ready to be optimized. **Example** ```julia-repl -julia> opt_model = optimizer_model(model) -A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. -``` -""" -optimizer_model(model::InfiniteModel)::JuMP.Model = model.optimizer_model - -""" - JuMP.bridge_constraints(model::InfiniteModel)::Bool - -Extend `JuMP.bridge_constraints` to return if an infinite model `model` -has an optimizer model where the optimizer is set and unsupported constraints -are automatically bridged to equivalent supported constraints when an -appropriate transformation is available. - -**Example** -```julia-repl -julia> bridge_constraints(model) +julia> transformation_backend_ready(model) false ``` """ -function JuMP.bridge_constraints(model::InfiniteModel)::Bool - return JuMP.bridge_constraints(optimizer_model(model)) -end - -""" - JuMP.add_bridge(model::InfiniteModel, - BridgeType::Type{<:MOI.Bridges.AbstractBridge}) - -Extend `JuMP.add_bridge` to add `BridgeType` to the list of bridges that can -be used by the optimizer model to transform unsupported constraints into an -equivalent formulation using only constraints supported by the optimizer. -""" -function JuMP.add_bridge(model::InfiniteModel, - BridgeType::Type{<:MOI.Bridges.AbstractBridge}) - JuMP.add_bridge(optimizer_model(model), BridgeType) - return -end +transformation_backend_ready(model::InfiniteModel) = model.ready_to_optimize """ - optimizer_model_ready(model::InfiniteModel)::Bool + set_transformation_backend_ready(model::InfiniteModel, status::Bool) -Return `Bool` if the optimizer model is up to date with `model`. +Set the status of the transformation backend model to whether it is up-to-date or +not. Note is more intended as an internal function, but is useful for extensions. **Example** ```julia-repl -julia> optimizer_model_ready(model) -false -``` -""" -optimizer_model_ready(model::InfiniteModel)::Bool = model.ready_to_optimize - -""" - set_optimizer_model_ready(model::InfiniteModel, status::Bool) - -Set the status of the optimizer model to whether it is up to date or not. Note -is more intended as an internal function, but is useful for extensions. +julia> set_transformation_backend_ready(model, true) -**Example** -```julia-repl -julia> set_optimizer_model_ready(model, true) - -julia> optimizer_model_ready(model) +julia> transformation_backend_ready(model) true ``` """ -function set_optimizer_model_ready(model::InfiniteModel, status::Bool) +function set_transformation_backend_ready(model::InfiniteModel, status::Bool) model.ready_to_optimize = status return end """ - add_infinite_model_optimizer(opt_model::JuMP.Model, inf_model::InfiniteModel) -Parse the current optimizer and its attributes associated with `inf_model` and load -them into `opt_model`. This is intended to be used as an internal method -for [`set_optimizer_model`](@ref). """ -function add_infinite_model_optimizer(opt_model::JuMP.Model, - inf_model::InfiniteModel) - if !isa(inf_model.optimizer_constructor, Nothing) - bridge_constrs = JuMP.bridge_constraints(inf_model) - JuMP.set_optimizer(opt_model, inf_model.optimizer_constructor, - add_bridges = bridge_constrs) - end - # parse the attributes (this is a hacky workaround) - for (attr, val) in JuMP.backend(inf_model).model_cache.optattr - MOI.set(opt_model, attr, val) - end - return +function transform_model(backend::AbstractTransformationBackend) + error("") end """ - set_optimizer_model(inf_model::InfiniteModel, opt_model::JuMP.Model; - inherit_optimizer::Bool = true) -Specify the JuMP model that is used to solve `inf_model`. This is intended for -internal use and extensions. Note that `opt_model` should contain extension -data to allow it to map to `inf_model` in a manner similar to -[`TranscriptionModel`](@ref). `inherit_optimizer` indicates whether -[`add_infinite_model_optimizer`](@ref) should be invoked on the new optimizer -mode to inherit the optimizer constuctor and attributes currently stored in -`inf_model`. - -**Example** -```julia-repl -julia> set_optimizer_model(model, TranscriptionModel()) - -julia> optimizer_model(model) -A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. -``` """ -function set_optimizer_model( - inf_model::InfiniteModel, - opt_model::JuMP.Model; - inherit_optimizer::Bool = true - ) - if inherit_optimizer - add_infinite_model_optimizer(opt_model, inf_model) - end - inf_model.optimizer_model = opt_model - set_optimizer_model_ready(inf_model, false) - return -end - -""" - optimizer_model_key(model::JuMP.Model)::Any +transform_model(model::InfiniteModel) = backend_model(model.backend) -Return the extension key used in the optimizer model `model`. Errors if -`model.ext` contains more than one key. This is intended for internal -use and extensions. For extensions this is used to dispatch to the appropriate -optmizer model functions such as extensions to [`build_optimizer_model!`](@ref). -This is intended as an internal method. See -[`optimizer_model_key`](@ref optimizer_model_key(::InfiniteModel)) -for the public method """ -function optimizer_model_key(model::JuMP.Model) - if length(model.ext) != 1 - error("Optimizer models should have 1 and only 1 extension key of the " * - "form `Model.ext[:my_ext_key] = MyExtData`.") - end - return first(keys(model.ext)) -end """ - optimizer_model_key(model::InfiniteModel)::Any - -Return the extension key used in the optimizer model of `model`. Errors if -`optimizer_model.ext` contains more than one key. This is intended for internal -use and extensions. For extensions this is used to dispatch to the appropriate -optmizer model functions such as extensions to [`build_optimizer_model!`](@ref). - -**Example** -```julia-repl -julia> optimizer_model_key(model) -:TransData -``` -""" -function optimizer_model_key(model::InfiniteModel)::Any - return optimizer_model_key(optimizer_model(model)) -end - -################################################################################ -# OPTIMIZER METHOD EXTENSIONS -################################################################################ -""" - JuMP.set_optimizer(model::InfiniteModel, - [optimizer_constructor; - add_bridges::Bool = true]) - -Extend `JuMP.set_optimizer` to set optimizer of infinite models. -Specifically, the optimizer of the optimizer model is modified. - -**Example** -```julia-repl -julia> set_optimizer(model, Clp.Optimizer) - -julia> optimizer_model(model) -A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: EMPTY_OPTIMIZER -Solver name: SolverName() attribute not implemented by the optimizer. -``` -""" -function JuMP.set_optimizer( - model::InfiniteModel, - optimizer_constructor; - add_bridges::Bool = true +function set_transformation_backend( + model::InfiniteModel, + backend::AbstractTransformationBackend ) - JuMP.set_optimizer(optimizer_model(model), optimizer_constructor, - add_bridges = add_bridges) - _set_optimizer_constructor(model, optimizer_constructor) + model.backend = backend + set_transformation_backend_ready(model, false) return end """ - JuMP.set_silent(model::InfiniteModel) - -Extend `JuMP.set_silent` for infinite models to take precedence over any other -attribute controlling verbosity and requires the solver to produce no output. - -**Example** -```julia-repl -julia> set_silent(model) -true -``` -""" -function JuMP.set_silent(model::InfiniteModel) - return JuMP.set_silent(optimizer_model(model)) -end - -""" - JuMP.unset_silent(model::InfiniteModel) - -Extend `JuMP.unset_silent` for infinite models to neutralize the effect of the -`set_silent` function and let the solver attributes control the verbosity. - -**Example** -```julia-repl -julia> unset_silent(model) -false -``` -""" -function JuMP.unset_silent(model::InfiniteModel) - return JuMP.unset_silent(optimizer_model(model)) -end - -""" - JuMP.set_time_limit_sec(model::InfiniteModel, limit) - -Extend `set_time_limit_sec` to set the time limit (in seconds) of the solver. -Can be unset using `unset_time_limit_sec` or with `limit` set to `nothing`. - -**Example** -```julia-repl -julia> set_time_limit_sec(model, 100) -100 -``` -""" -function JuMP.set_time_limit_sec(model::InfiniteModel, limit) - return JuMP.set_time_limit_sec(optimizer_model(model), limit) -end """ - JuMP.unset_time_limit_sec(model::InfiniteModel) - -Extend `unset_time_limit_sec` to unset the time limit of the solver. Can be set -using `set_time_limit_sec`. - -**Example** -```julia-repl -julia> unset_time_limit_sec(model) -``` -""" -function JuMP.unset_time_limit_sec(model::InfiniteModel) - return JuMP.unset_time_limit_sec(optimizer_model(model)) -end - -""" - JuMP.time_limit_sec(model::InfiniteModel) - -Extend `time_limit_sec` to get the time limit (in seconds) of the solve used by -the optimizer model (`nothing` if unset). Can be set using `set_time_limit_sec`. - -**Example** -```julia-repl -julia> time_limit_sec(model) -100 -``` -""" -function JuMP.time_limit_sec(model::InfiniteModel) - return JuMP.time_limit_sec(optimizer_model(model)) +function JuMP.get_attribute( + backend::AbstractTransformationBackend, + attr + ) + error("") end """ - JuMP.set_optimizer_attribute(model::InfiniteModel, name::String, value) -Extend `set_optimizer_attribute` to specify a solver-specific attribute -identified by `name` to `value`. - -**Example** -```julia-repl -julia> set_optimizer_attribute(model, "SolverSpecificAttributeName", true) -true -``` """ -function JuMP.set_optimizer_attribute(model::InfiniteModel, name::String, value) - return JuMP.set_optimizer_attribute(optimizer_model(model), name, value) +function JuMP.get_attribute(model::InfiniteModel, attr) + return JuMP.get_attribute(model.backend, attr) end """ - JuMP.set_optimizer_attribute(model::InfiniteModel, - attr::MOI.AbstractOptimizerAttribute, - value) -Extend `set_optimizer_attribute` to set the solver-specific attribute `attr` in -`model` to `value`. - -**Example** -```julia-repl -julia> set_optimizer_attribute(model, MOI.Silent(), true) -true -``` """ -function JuMP.set_optimizer_attribute( - model::InfiniteModel, - attr::MOI.AbstractOptimizerAttribute, +function JuMP.set_attribute( + backend::AbstractTransformationBackend, + attr, value ) - return MOI.set(optimizer_model(model), attr, value) + error("") end """ - JuMP.set_optimizer_attributes(model::InfiniteModel, pairs::Pair...) - -Extend `set_optimizer_attributes` to set multiple solver attributes given a -list of `attribute => value` pairs. Calls -`set_optimizer_attribute(model, attribute, value)` for each pair. - -**Example** -```julia-repl -julia> model = Model(Ipopt.Optimizer); - -julia> set_optimizer_attributes(model, "tol" => 1e-4, "max_iter" => 100) -``` -is equivalent to: -```julia-repl -julia> set_optimizer_attribute(model, "tol", 1e-4); - -julia> set_optimizer_attribute(model, "max_iter", 100); -``` -""" -function JuMP.set_optimizer_attributes(model::InfiniteModel, pairs::Pair...) - for (name, value) in pairs - JuMP.set_optimizer_attribute(model, name, value) - end - return -end - -""" - JuMP.get_optimizer_attribute(model::InfiniteModel, name::String) - -Extend `get_optimizer_attribute` to return the value associated with the -solver-specific attribute named `name`. -**Example** -```julia-repl -julia> get_optimizer_attribute(model, "tol") -0.0001 -```` """ -function JuMP.get_optimizer_attribute(model::InfiniteModel, name::String) - return JuMP.get_optimizer_attribute(optimizer_model(model), name) +function JuMP.set_attribute(model::InfiniteModel, attr, value) + return JuMP.set_attribute(model.backend, attr, value) end """ - JuMP.get_optimizer_attribute(model::InfiniteModel, - attr::MOI.AbstractOptimizerAttribute) - -Extend `get_optimizer_attribute` to return the value of the solver-specific -attribute `attr` in `model`. -**Example** -```julia-repl -julia> get_optimizer_attribute(model, MOI.Silent()) -true -```` """ -function JuMP.get_optimizer_attribute( - model::InfiniteModel, - attr::MOI.AbstractOptimizerAttribute +function build_transformation_model!( + model::InfiniteModel, + backend::AbstractTransformationBackend; + kwargs... ) - return MOI.get(optimizer_model(model), attr) -end - -""" - JuMP.solver_name(model::InfiniteModel) - -Extend `solver_name` to return the name of the solver being used if there is an -optimizer selected and it has a name attribute. Otherwise, an error is thrown. - -**Example** -```julia-repl -julia> solver_name(model) -"Gurobi" -``` -""" -function JuMP.solver_name(model::InfiniteModel) - return JuMP.solver_name(optimizer_model(model)) -end - -""" - JuMP.backend(model::InfiniteModel) - -Extend `backend` to return the `MathOptInterface` backend associated with the -optimizer model. Note this will be empty if the optimizer model has not been -build yet. - -**Example** -```julia-repl -julia> moi_model = backend(model); -``` -""" -function JuMP.backend(model::InfiniteModel) - return JuMP.backend(optimizer_model(model)) -end - -""" - JuMP.mode(model::InfiniteModel) - -Extend `mode` to return the `MathOptInterface` mode the optimizer model is in. - -**Example** -```julia-repl -julia> mode(model) -AUTOMATIC::ModelMode = 0 -``` -""" -function JuMP.mode(model::InfiniteModel) - return JuMP.mode(optimizer_model(model)) -end - -""" - JuMP.result_count(model::InfiniteModel) - -Extend `result_count` to return the number of results available to query after a -call to `optimize!`. - -**Example** -```julia-repla -julia> result_count(model) -1 -``` -""" -function JuMP.result_count(model::InfiniteModel)::Int - return MOI.get(optimizer_model(model), MOI.ResultCount()) -end - -################################################################################ -# OPTIMIZER MODEL BUILD METHODS -################################################################################ -""" - build_optimizer_model!(model::InfiniteModel, key::Val{ext_key_name}; - [kwargs...]) - -Build the optimizer model stored in `model` such that it can be -treated as a normal JuMP model, where the `Model.ext` field contains a key -that points to a datastructure that appropriately maps the data between the -two models. The key argument should be be typed to `Val{ext_key_name}`. This -should also use [`clear_optimizer_model_build!`](@ref) to empty the out the current -optimizer model. Ultimately, [`set_optimizer_model`](@ref) should be called -to insert the build optimizer model into `model` and [`set_optimizer_model_ready`](@ref) -should be used to update the optimizer model's status. -""" -function build_optimizer_model! end - -""" - clear_optimizer_model_build!(model::JuMP.Model)::JuMP.Model - -Empty the optimizer model using appropriate calls of `Base.empty!`. This -effectively resets `model` except the optimizer, its attributes, and an an emptied -optimizer model data struct are maintained. This is intended as an internal -method for use by [`build_optimizer_model!`](@ref). -""" -function clear_optimizer_model_build!(model::JuMP.Model) - key = optimizer_model_key(model) - data_type = typeof(model.ext[key]) - empty!(model) - model.ext[key] = data_type() - model.operator_counter = 0 - return model + return error("") end """ - clear_optimizer_model_build!(model::InfiniteModel)::JuMP.Model - -Empty the optimizer model using appropriate calls of `Base.empty!`. This -effectively resets `model.optimizer_model` except the optimizer, its attributes, -and an an emptied optimizer model data struct are maintained. This is intended -as an internal method for use by [`build_optimizer_model!`](@ref). -""" -function clear_optimizer_model_build!(model::InfiniteModel)::JuMP.Model - return clear_optimizer_model_build!(optimizer_model(model)) -end """ - build_optimizer_model!(model::InfiniteModel; [kwargs...]) - -Build the optimizer model stored in `model` such that it can be -treated as a normal JuMP model. Specifically, translate the variables and -constraints stored in `model` into ones that are stored in the optimizer model -and can be solved. This is provided generally to accomodate extensions that use -custom optimizer model types in accordance with [`optimizer_model_key`](@ref). -However, it may be useful in certain applications when the user desires to -force a build without calling `optimize!`. -Extensions will need to implement their own version of the function -`build_optimizer_model!(model::InfiniteModel, key::Val{ext_key_name}; kwargs...)`. - -**Example** -```julia-repl -julia> build_optimizer_model!(model) - -julia> optimizer_model_ready(model) -true -``` -""" -function build_optimizer_model!(model::InfiniteModel; kwargs...) +function build_transformation_model!(model::InfiniteModel; kwargs...) if num_parameters(model, InfiniteParameter) == 0 @warn("Finite models (i.e., `InfiniteModel`s with no infinite " * "parameters) should be modeled directly via a `Model` in JuMP.jl.") end - key = optimizer_model_key(model) - build_optimizer_model!(model, Val(key); kwargs...) + build_optimizer_model!(model, model.backend; kwargs...) + set_transformation_backend_ready(model, true) return end -################################################################################ -# OPTIMIZER MODEL MAPPING METHODS (VARIABLES) -################################################################################ -""" - optimizer_model_variable(vref::GeneralVariableRef, key::Val{ext_key_name}; - [kwargs...]) - -Return the reformulation variable(s) stored in the optimizer model that correspond -to `vref`. This needs to be defined for extensions that implement a custom -optimizer model type. Principally, this is accomplished by typed the `key` -argument to `Val{ext_key_name}`. Keyword arguments can be added as needed. -""" -function optimizer_model_variable end - -# Fallback for unextended keys -function optimizer_model_variable(vref::GeneralVariableRef, key; kwargs...) - error("`optimizer_model_variable` not implemented for optimizer model " * - "key `$(typeof(key).parameters[1])`.") -end - -""" - optimizer_model_variable(vref::GeneralVariableRef; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, - kwargs...]) - -Return the reformulation variable(s) stored in the optimizer model that correspond -to `vref`. Also errors if no such variable can be found in -the optimizer model. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of [`optimizer_model_variable`](@ref). Errors if such an -extension has not been written. - -By default only the variables associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, infinite variables are -returned as a list corresponding to their supports. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the variable has multiple -infinite parameter dependencies. The corresponding supports are obtained via -`supports` using the same keyword arguments. - -**Example** -```julia-repl -julia> optimizer_model_variable(x) # infinite variable -2-element Array{VariableRef,1}: - x(support: 1) - x(support: 2) - -julia> optimizer_model_variable(z) # finite variable -z -``` -""" -function optimizer_model_variable(vref::GeneralVariableRef; kwargs...) - key = optimizer_model_key(JuMP.owner_model(vref)) - return optimizer_model_variable(vref, Val(key); kwargs...) -end - -""" - variable_supports(optimizer_model::JuMP.Model, vref, - key::Val{ext_key_name}; - [kwargs...])::Vector - -Return the supports associated with the mappings of `vref` in `optimizer_model`. -This dispatches off of `key` which permits optimizer model extensions. This -should throw an error if `vref` is not associated with the variable mappings -stored in `optimizer_model`. Keyword arguments can be added as needed. Note that -no extension is necessary for point or finite variables. -""" -function variable_supports end - -# fallback for unextended keys -function variable_supports(optimizer_model::JuMP.Model, vref, key; kwargs...) - error("`variable_supports` not implemented for optimizer model key " * - "`$(typeof(key).parameters[1])` and/or variable type $(typeof(vref)).") -end - -# FiniteRef -function variable_supports(optimizer_model::JuMP.Model, vref::FiniteRef, - key; kwargs...) - return () -end - -""" - supports(vref::DecisionVariableRef; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, - kwargs...]) - -Return the supports associated with `vref` in the optimizer -model. Errors if [`InfiniteOpt.variable_supports`](@ref) has not been extended for the -optimizer model type or if `vref` is not be reformulated in the optimizer model. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of `variable_supports`. Errors if such an -extension has not been written. - -By default only the public supports are returned, the -full set can be accessed via `label = All`. Moreover, the supports of infinite -variables are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the variable has multiple -infinite parameter dependencies. - -**Example** -```julia-repl -julia> supports(vref) -2-element Array{Tuple{Float64},1}: - (0.0,) - (1.0,) -``` -""" -function supports( - vref::Union{DecisionVariableRef, MeasureRef, ParameterFunctionRef}; - kwargs... - ) - model = optimizer_model(JuMP.owner_model(vref)) - key = optimizer_model_key(JuMP.owner_model(vref)) - return variable_supports(model, vref, Val(key); kwargs...) -end - -################################################################################ -# OPTIMIZER MODEL MAPPING METHODS (EXPRESSIONS) -################################################################################ -""" - optimizer_model_expression(expr, key::Val{ext_key_name}; [kwargs...]) - -Return the reformulation expression(s) stored in the optimizer model that correspond -to `expr`. This needs to be defined for extensions that implement a custom -optimizer model type. Principally, this is accomplished by typed the `key` -argument to `Val{ext_key_name}`. Keyword arguments can be added as needed. -Note that if `expr` is a `GeneralVariableRef` this just dispatches to -`optimizer_model_variable`. -""" -function optimizer_model_expression end - -# Fallback for unextended keys -function optimizer_model_expression(expr, key; kwargs...) - error("`optimizer_model_expression` not defined for optimizer model " * - "key `$(typeof(key).parameters[1])` and expression type " * - "`$(typeof(expr))`.") -end - -# Define for variable reference expressions -function optimizer_model_expression(expr::GeneralVariableRef, key; kwargs...) - return optimizer_model_variable(expr, key; kwargs...) -end - -""" - optimizer_model_expression(expr::JuMP.AbstractJuMPScalar; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, - kwargs...]) - -Return the reformulation expression(s) stored in the optimizer model that correspond -to `expr`. Also errors if no such expression can be found in -the optimizer model (meaning one or more of the underlying variables have not -been transcribed). - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of [`optimizer_model_expression`](@ref). Errors if such an -extension has not been written. - -By default only the expressions associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, infinite expressions are -returned as a list corresponding to their supports. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the expression has multiple -infinite parameter dependencies. The corresponding supports are obtained via -`supports` using the same keyword arguments. - -**Example** -```julia-repl -julia> optimizer_model_expression(my_expr) # finite expression -x(support: 1) - y -``` -""" -function optimizer_model_expression(expr::JuMP.AbstractJuMPScalar; kwargs...) - model = JuMP.owner_model(expr) - if isnothing(model) - return zero(JuMP.AffExpr) + JuMP.constant(expr) - else - key = optimizer_model_key(model) - return optimizer_model_expression(expr, Val(key); kwargs...) - end -end - -""" - expression_supports(optimizer_model::JuMP.Model, expr, - key::Val{ext_key_name}; [kwargs...]) - -Return the supports associated with the mappings of `expr` in `optimizer_model`. -This dispatches off of `key` which permits optimizer model extensions. This -should throw an error if `expr` is not associated with the variable mappings -stored in `optimizer_model`. Keyword arguments can be added as needed. Note that -if `expr` is a `GeneralVariableRef` this just dispatches to `variable_supports`. -""" -function expression_supports end - -# fallback for unextended keys -function expression_supports(optimizer_model::JuMP.Model, expr, key; kwargs...) - error("`constraint_supports` not implemented for optimizer model key " * - "`$(typeof(key).parameters[1])` and/or expressions of type " * - "`$(typeof(expr))`.") -end - -# Variable reference expressions -function expression_supports(model::JuMP.Model, vref::GeneralVariableRef, key; - kwargs...) - return variable_supports(model, dispatch_variable_ref(vref), key; kwargs...) -end - -""" - supports(expr::JuMP.AbstractJuMPScalar; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, - kwargs...]) - -Return the support associated with `expr`. Errors if `expr` is -not associated with the constraint mappings stored in `optimizer_model`. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of `expression_supports`. Errors if such an -extension has not been written. - -By default only the public supports are returned, the -full set can be accessed via `label = All`. Moreover, the supports of infinite -expressions are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the expression has multiple -infinite parameter dependencies. - -**Example** -```julia-repl -julia> supports(cref) -2-element Array{Tuple{Float64},1}: - (0.0,) - (1.0,) -``` -""" -function supports(expr::JuMP.AbstractJuMPScalar; kwargs...) - model = JuMP.owner_model(expr) - if isnothing(model) - return () - else - key = optimizer_model_key(model) - opt_model = optimizer_model(model) - return expression_supports(opt_model, expr, Val(key); kwargs...) - end -end - -################################################################################ -# OPTIMIZER MODEL MAPPING METHODS (CONSTRAINTS) -################################################################################ -""" - optimizer_model_constraint(cref::InfOptConstraintRef, - key::Val{ext_key_name}; [kwargs...]) - -Return the reformulation constraint(s) stored in the optimizer model that correspond -to `cref`. This needs to be defined for extensions that implement a custom -optimizer model type. Principally, this is accomplished by typed the `key` -argument to `Val{ext_key_name}`. Keyword arguments can be added as needed. -""" -function optimizer_model_constraint end - -# Fallback for unextended keys -function optimizer_model_constraint( - cref::InfOptConstraintRef, - key; - kwargs... - ) - error("`optimizer_model_constraint` not implemented for optimizer model " * - "key `$(typeof(key).parameters[1])`.") -end - -""" - optimizer_model_constraint(cref::InfOptConstraintRef; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, - kwargs...]) - -Return the reformulation constraint(s) stored in the optimizer model that correspond -to `cref`. Errors if no such constraint can be found in -the optimizer model. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of [`optimizer_model_constraint`](@ref). Errors if such an -extension has not been written. - -By default only the constraints associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, infinite constraints are -returned as a list corresponding to their supports. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the constraint has multiple -infinite parameter dependencies. The corresponding supports are obtained via -`supports` using the same keyword arguments. - -**Example** -```julia-repl -julia> optimizer_model_constraint(c1) # finite constraint -c1 : x(support: 1) - y <= 3.0 -``` -""" -function optimizer_model_constraint( - cref::InfOptConstraintRef; - kwargs... - ) - key = optimizer_model_key(JuMP.owner_model(cref)) - return optimizer_model_constraint(cref, Val(key); kwargs...) -end - -""" - constraint_supports(optimizer_model::JuMP.Model, - cref::InfOptConstraintRef, - key::Val{ext_key_name}; [kwargs...]) - -Return the supports associated with the mappings of `cref` in `optimizer_model`. -This dispatches off of `key` which permits optimizer model extensions. This -should throw an error if `cref` is not associated with the variable mappings -stored in `optimizer_model`. Keyword arguments can be added as needed. -""" -function constraint_supports end - -# fallback for unextended keys -function constraint_supports(optimizer_model::JuMP.Model, - cref::InfOptConstraintRef, - key; kwargs...) - error("`constraint_supports` not implemented for optimizer model key " * - "`$(typeof(key).parameters[1])`.") -end - -""" - supports(cref::InfOptConstraintRef; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, - kwargs...]) - -Return the support associated with `cref`. Errors if `cref` is -not associated with the constraint mappings stored in `optimizer_model`. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of `constraint_supports`. Errors if such an -extension has not been written. - -By default only the public supports are returned, the -full set can be accessed via `label = All`. Moreover, the supports of infinite -constraints are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the constraint has multiple -infinite parameter dependencies. - -**Example** -```julia-repl -julia> supports(cref) -2-element Array{Tuple{Float64},1}: - (0.0,) - (1.0,) -``` -""" -function supports(cref::InfOptConstraintRef; kwargs...) - model = optimizer_model(JuMP.owner_model(cref)) - key = optimizer_model_key(JuMP.owner_model(cref)) - return constraint_supports(model, cref, Val(key); kwargs...) -end - -################################################################################ -# OPTIMIZATION METHODS -################################################################################ """ JuMP.set_optimize_hook( model::InfiniteModel, @@ -910,7 +123,7 @@ end )::Nothing Set the function `hook` as the optimize hook for `model` where `hook` should -have be of the form `hook(model::GenericModel; hook_specfic_kwargs..., kwargs...)`. +have be of the form `hook(model::InfiniteModel; hook_specfic_kwargs..., kwargs...)`. The `kwargs` are those passed to [`optimize!`](@ref). The `hook_specifc_kwargs` are passed as additional keywords by the user when they call [`optimize!`](@ref). @@ -930,6 +143,14 @@ function JuMP.set_optimize_hook( return end +""" + +""" +function JuMP.optimize!(backend::AbstractTransformationBackend) + return error("") +end + + """ JuMP.optimize!(model::InfiniteModel; [kwargs...]) @@ -958,6 +179,26 @@ function JuMP.optimize!( if !optimizer_model_ready(model) build_optimizer_model!(model; kwargs...) end - JuMP.optimize!(optimizer_model(model)) + JuMP.optimize!(model.backend) return end + +""" + +""" +function optimizer_model_variable( + vref::GeneralVariableRef, + backend::AbstractTransformationBackend; + kwargs... + ) + error("") +end + +""" + +""" +function optimizer_model_variable(vref::GeneralVariableRef; kwargs...) + model = JuMP.owner_model(vref) + return optimizer_model_variable(vref, model.backend; kwargs...) +end + diff --git a/src/optimize_old.jl b/src/optimize_old.jl new file mode 100644 index 00000000..01d0d84a --- /dev/null +++ b/src/optimize_old.jl @@ -0,0 +1,963 @@ +################################################################################ +# OPTIMIZER MODEL BASICS +################################################################################ +""" + optimizer_model(model::InfiniteModel)::JuMP.Model + +Return the JuMP model stored in `model` that is used to solve it. + +**Example** +```julia-repl +julia> opt_model = optimizer_model(model) +A JuMP Model +Feasibility problem with: +Variables: 0 +Model mode: AUTOMATIC +CachingOptimizer state: NO_OPTIMIZER +Solver name: No optimizer attached. +``` +""" +optimizer_model(model::InfiniteModel)::JuMP.Model = model.optimizer_model + +""" + JuMP.bridge_constraints(model::InfiniteModel)::Bool + +Extend `JuMP.bridge_constraints` to return if an infinite model `model` +has an optimizer model where the optimizer is set and unsupported constraints +are automatically bridged to equivalent supported constraints when an +appropriate transformation is available. + +**Example** +```julia-repl +julia> bridge_constraints(model) +false +``` +""" +function JuMP.bridge_constraints(model::InfiniteModel)::Bool + return JuMP.bridge_constraints(optimizer_model(model)) +end + +""" + JuMP.add_bridge(model::InfiniteModel, + BridgeType::Type{<:MOI.Bridges.AbstractBridge}) + +Extend `JuMP.add_bridge` to add `BridgeType` to the list of bridges that can +be used by the optimizer model to transform unsupported constraints into an +equivalent formulation using only constraints supported by the optimizer. +""" +function JuMP.add_bridge(model::InfiniteModel, + BridgeType::Type{<:MOI.Bridges.AbstractBridge}) + JuMP.add_bridge(optimizer_model(model), BridgeType) + return +end + +""" + optimizer_model_ready(model::InfiniteModel)::Bool + +Return `Bool` if the optimizer model is up to date with `model`. + +**Example** +```julia-repl +julia> optimizer_model_ready(model) +false +``` +""" +optimizer_model_ready(model::InfiniteModel)::Bool = model.ready_to_optimize + +""" + set_optimizer_model_ready(model::InfiniteModel, status::Bool) + +Set the status of the optimizer model to whether it is up to date or not. Note +is more intended as an internal function, but is useful for extensions. + +**Example** +```julia-repl +julia> set_optimizer_model_ready(model, true) + +julia> optimizer_model_ready(model) +true +``` +""" +function set_optimizer_model_ready(model::InfiniteModel, status::Bool) + model.ready_to_optimize = status + return +end + +""" + add_infinite_model_optimizer(opt_model::JuMP.Model, inf_model::InfiniteModel) + +Parse the current optimizer and its attributes associated with `inf_model` and load +them into `opt_model`. This is intended to be used as an internal method +for [`set_optimizer_model`](@ref). +""" +function add_infinite_model_optimizer(opt_model::JuMP.Model, + inf_model::InfiniteModel) + if !isa(inf_model.optimizer_constructor, Nothing) + bridge_constrs = JuMP.bridge_constraints(inf_model) + JuMP.set_optimizer(opt_model, inf_model.optimizer_constructor, + add_bridges = bridge_constrs) + end + # parse the attributes (this is a hacky workaround) + for (attr, val) in JuMP.backend(inf_model).model_cache.optattr + MOI.set(opt_model, attr, val) + end + return +end + +""" + set_optimizer_model(inf_model::InfiniteModel, opt_model::JuMP.Model; + inherit_optimizer::Bool = true) + +Specify the JuMP model that is used to solve `inf_model`. This is intended for +internal use and extensions. Note that `opt_model` should contain extension +data to allow it to map to `inf_model` in a manner similar to +[`TranscriptionModel`](@ref). `inherit_optimizer` indicates whether +[`add_infinite_model_optimizer`](@ref) should be invoked on the new optimizer +mode to inherit the optimizer constuctor and attributes currently stored in +`inf_model`. + +**Example** +```julia-repl +julia> set_optimizer_model(model, TranscriptionModel()) + +julia> optimizer_model(model) +A JuMP Model +Feasibility problem with: +Variables: 0 +Model mode: AUTOMATIC +CachingOptimizer state: NO_OPTIMIZER +Solver name: No optimizer attached. +``` +""" +function set_optimizer_model( + inf_model::InfiniteModel, + opt_model::JuMP.Model; + inherit_optimizer::Bool = true + ) + if inherit_optimizer + add_infinite_model_optimizer(opt_model, inf_model) + end + inf_model.optimizer_model = opt_model + set_optimizer_model_ready(inf_model, false) + return +end + +""" + optimizer_model_key(model::JuMP.Model)::Any + +Return the extension key used in the optimizer model `model`. Errors if +`model.ext` contains more than one key. This is intended for internal +use and extensions. For extensions this is used to dispatch to the appropriate +optmizer model functions such as extensions to [`build_optimizer_model!`](@ref). +This is intended as an internal method. See +[`optimizer_model_key`](@ref optimizer_model_key(::InfiniteModel)) +for the public method +""" +function optimizer_model_key(model::JuMP.Model) + if length(model.ext) != 1 + error("Optimizer models should have 1 and only 1 extension key of the " * + "form `Model.ext[:my_ext_key] = MyExtData`.") + end + return first(keys(model.ext)) +end + +""" + optimizer_model_key(model::InfiniteModel)::Any + +Return the extension key used in the optimizer model of `model`. Errors if +`optimizer_model.ext` contains more than one key. This is intended for internal +use and extensions. For extensions this is used to dispatch to the appropriate +optmizer model functions such as extensions to [`build_optimizer_model!`](@ref). + +**Example** +```julia-repl +julia> optimizer_model_key(model) +:TransData +``` +""" +function optimizer_model_key(model::InfiniteModel)::Any + return optimizer_model_key(optimizer_model(model)) +end + +################################################################################ +# OPTIMIZER METHOD EXTENSIONS +################################################################################ +""" + JuMP.set_optimizer(model::InfiniteModel, + [optimizer_constructor; + add_bridges::Bool = true]) + +Extend `JuMP.set_optimizer` to set optimizer of infinite models. +Specifically, the optimizer of the optimizer model is modified. + +**Example** +```julia-repl +julia> set_optimizer(model, Clp.Optimizer) + +julia> optimizer_model(model) +A JuMP Model +Feasibility problem with: +Variables: 0 +Model mode: AUTOMATIC +CachingOptimizer state: EMPTY_OPTIMIZER +Solver name: SolverName() attribute not implemented by the optimizer. +``` +""" +function JuMP.set_optimizer( + model::InfiniteModel, + optimizer_constructor; + add_bridges::Bool = true + ) + JuMP.set_optimizer(optimizer_model(model), optimizer_constructor, + add_bridges = add_bridges) + _set_optimizer_constructor(model, optimizer_constructor) + return +end + +""" + JuMP.set_silent(model::InfiniteModel) + +Extend `JuMP.set_silent` for infinite models to take precedence over any other +attribute controlling verbosity and requires the solver to produce no output. + +**Example** +```julia-repl +julia> set_silent(model) +true +``` +""" +function JuMP.set_silent(model::InfiniteModel) + return JuMP.set_silent(optimizer_model(model)) +end + +""" + JuMP.unset_silent(model::InfiniteModel) + +Extend `JuMP.unset_silent` for infinite models to neutralize the effect of the +`set_silent` function and let the solver attributes control the verbosity. + +**Example** +```julia-repl +julia> unset_silent(model) +false +``` +""" +function JuMP.unset_silent(model::InfiniteModel) + return JuMP.unset_silent(optimizer_model(model)) +end + +""" + JuMP.set_time_limit_sec(model::InfiniteModel, limit) + +Extend `set_time_limit_sec` to set the time limit (in seconds) of the solver. +Can be unset using `unset_time_limit_sec` or with `limit` set to `nothing`. + +**Example** +```julia-repl +julia> set_time_limit_sec(model, 100) +100 +``` +""" +function JuMP.set_time_limit_sec(model::InfiniteModel, limit) + return JuMP.set_time_limit_sec(optimizer_model(model), limit) +end + +""" + JuMP.unset_time_limit_sec(model::InfiniteModel) + +Extend `unset_time_limit_sec` to unset the time limit of the solver. Can be set +using `set_time_limit_sec`. + +**Example** +```julia-repl +julia> unset_time_limit_sec(model) +``` +""" +function JuMP.unset_time_limit_sec(model::InfiniteModel) + return JuMP.unset_time_limit_sec(optimizer_model(model)) +end + +""" + JuMP.time_limit_sec(model::InfiniteModel) + +Extend `time_limit_sec` to get the time limit (in seconds) of the solve used by +the optimizer model (`nothing` if unset). Can be set using `set_time_limit_sec`. + +**Example** +```julia-repl +julia> time_limit_sec(model) +100 +``` +""" +function JuMP.time_limit_sec(model::InfiniteModel) + return JuMP.time_limit_sec(optimizer_model(model)) +end + +""" + JuMP.set_optimizer_attribute(model::InfiniteModel, name::String, value) + +Extend `set_optimizer_attribute` to specify a solver-specific attribute +identified by `name` to `value`. + +**Example** +```julia-repl +julia> set_optimizer_attribute(model, "SolverSpecificAttributeName", true) +true +``` +""" +function JuMP.set_optimizer_attribute(model::InfiniteModel, name::String, value) + return JuMP.set_optimizer_attribute(optimizer_model(model), name, value) +end + +""" + JuMP.set_optimizer_attribute(model::InfiniteModel, + attr::MOI.AbstractOptimizerAttribute, + value) + +Extend `set_optimizer_attribute` to set the solver-specific attribute `attr` in +`model` to `value`. + +**Example** +```julia-repl +julia> set_optimizer_attribute(model, MOI.Silent(), true) +true +``` +""" +function JuMP.set_optimizer_attribute( + model::InfiniteModel, + attr::MOI.AbstractOptimizerAttribute, + value + ) + return MOI.set(optimizer_model(model), attr, value) +end + +""" + JuMP.set_optimizer_attributes(model::InfiniteModel, pairs::Pair...) + +Extend `set_optimizer_attributes` to set multiple solver attributes given a +list of `attribute => value` pairs. Calls +`set_optimizer_attribute(model, attribute, value)` for each pair. + +**Example** +```julia-repl +julia> model = Model(Ipopt.Optimizer); + +julia> set_optimizer_attributes(model, "tol" => 1e-4, "max_iter" => 100) +``` +is equivalent to: +```julia-repl +julia> set_optimizer_attribute(model, "tol", 1e-4); + +julia> set_optimizer_attribute(model, "max_iter", 100); +``` +""" +function JuMP.set_optimizer_attributes(model::InfiniteModel, pairs::Pair...) + for (name, value) in pairs + JuMP.set_optimizer_attribute(model, name, value) + end + return +end + +""" + JuMP.get_optimizer_attribute(model::InfiniteModel, name::String) + +Extend `get_optimizer_attribute` to return the value associated with the +solver-specific attribute named `name`. + +**Example** +```julia-repl +julia> get_optimizer_attribute(model, "tol") +0.0001 +```` +""" +function JuMP.get_optimizer_attribute(model::InfiniteModel, name::String) + return JuMP.get_optimizer_attribute(optimizer_model(model), name) +end + +""" + JuMP.get_optimizer_attribute(model::InfiniteModel, + attr::MOI.AbstractOptimizerAttribute) + +Extend `get_optimizer_attribute` to return the value of the solver-specific +attribute `attr` in `model`. + +**Example** +```julia-repl +julia> get_optimizer_attribute(model, MOI.Silent()) +true +```` +""" +function JuMP.get_optimizer_attribute( + model::InfiniteModel, + attr::MOI.AbstractOptimizerAttribute + ) + return MOI.get(optimizer_model(model), attr) +end + +""" + JuMP.solver_name(model::InfiniteModel) + +Extend `solver_name` to return the name of the solver being used if there is an +optimizer selected and it has a name attribute. Otherwise, an error is thrown. + +**Example** +```julia-repl +julia> solver_name(model) +"Gurobi" +``` +""" +function JuMP.solver_name(model::InfiniteModel) + return JuMP.solver_name(optimizer_model(model)) +end + +""" + JuMP.backend(model::InfiniteModel) + +Extend `backend` to return the `MathOptInterface` backend associated with the +optimizer model. Note this will be empty if the optimizer model has not been +build yet. + +**Example** +```julia-repl +julia> moi_model = backend(model); +``` +""" +function JuMP.backend(model::InfiniteModel) + return JuMP.backend(optimizer_model(model)) +end + +""" + JuMP.mode(model::InfiniteModel) + +Extend `mode` to return the `MathOptInterface` mode the optimizer model is in. + +**Example** +```julia-repl +julia> mode(model) +AUTOMATIC::ModelMode = 0 +``` +""" +function JuMP.mode(model::InfiniteModel) + return JuMP.mode(optimizer_model(model)) +end + +""" + JuMP.result_count(model::InfiniteModel) + +Extend `result_count` to return the number of results available to query after a +call to `optimize!`. + +**Example** +```julia-repla +julia> result_count(model) +1 +``` +""" +function JuMP.result_count(model::InfiniteModel)::Int + return MOI.get(optimizer_model(model), MOI.ResultCount()) +end + +################################################################################ +# OPTIMIZER MODEL BUILD METHODS +################################################################################ +""" + build_optimizer_model!(model::InfiniteModel, key::Val{ext_key_name}; + [kwargs...]) + +Build the optimizer model stored in `model` such that it can be +treated as a normal JuMP model, where the `Model.ext` field contains a key +that points to a datastructure that appropriately maps the data between the +two models. The key argument should be be typed to `Val{ext_key_name}`. This +should also use [`clear_optimizer_model_build!`](@ref) to empty the out the current +optimizer model. Ultimately, [`set_optimizer_model`](@ref) should be called +to insert the build optimizer model into `model` and [`set_optimizer_model_ready`](@ref) +should be used to update the optimizer model's status. +""" +function build_optimizer_model! end + +""" + clear_optimizer_model_build!(model::JuMP.Model)::JuMP.Model + +Empty the optimizer model using appropriate calls of `Base.empty!`. This +effectively resets `model` except the optimizer, its attributes, and an an emptied +optimizer model data struct are maintained. This is intended as an internal +method for use by [`build_optimizer_model!`](@ref). +""" +function clear_optimizer_model_build!(model::JuMP.Model) + key = optimizer_model_key(model) + data_type = typeof(model.ext[key]) + empty!(model) + model.ext[key] = data_type() + model.operator_counter = 0 + return model +end + +""" + clear_optimizer_model_build!(model::InfiniteModel)::JuMP.Model + +Empty the optimizer model using appropriate calls of `Base.empty!`. This +effectively resets `model.optimizer_model` except the optimizer, its attributes, +and an an emptied optimizer model data struct are maintained. This is intended +as an internal method for use by [`build_optimizer_model!`](@ref). +""" +function clear_optimizer_model_build!(model::InfiniteModel)::JuMP.Model + return clear_optimizer_model_build!(optimizer_model(model)) +end + +""" + build_optimizer_model!(model::InfiniteModel; [kwargs...]) + +Build the optimizer model stored in `model` such that it can be +treated as a normal JuMP model. Specifically, translate the variables and +constraints stored in `model` into ones that are stored in the optimizer model +and can be solved. This is provided generally to accomodate extensions that use +custom optimizer model types in accordance with [`optimizer_model_key`](@ref). +However, it may be useful in certain applications when the user desires to +force a build without calling `optimize!`. +Extensions will need to implement their own version of the function +`build_optimizer_model!(model::InfiniteModel, key::Val{ext_key_name}; kwargs...)`. + +**Example** +```julia-repl +julia> build_optimizer_model!(model) + +julia> optimizer_model_ready(model) +true +``` +""" +function build_optimizer_model!(model::InfiniteModel; kwargs...) + if num_parameters(model, InfiniteParameter) == 0 + @warn("Finite models (i.e., `InfiniteModel`s with no infinite " * + "parameters) should be modeled directly via a `Model` in JuMP.jl.") + end + key = optimizer_model_key(model) + build_optimizer_model!(model, Val(key); kwargs...) + return +end + +################################################################################ +# OPTIMIZER MODEL MAPPING METHODS (VARIABLES) +################################################################################ +""" + optimizer_model_variable(vref::GeneralVariableRef, key::Val{ext_key_name}; + [kwargs...]) + +Return the reformulation variable(s) stored in the optimizer model that correspond +to `vref`. This needs to be defined for extensions that implement a custom +optimizer model type. Principally, this is accomplished by typed the `key` +argument to `Val{ext_key_name}`. Keyword arguments can be added as needed. +""" +function optimizer_model_variable end + +# Fallback for unextended keys +function optimizer_model_variable(vref::GeneralVariableRef, key; kwargs...) + error("`optimizer_model_variable` not implemented for optimizer model " * + "key `$(typeof(key).parameters[1])`.") +end + +""" + optimizer_model_variable(vref::GeneralVariableRef; + [label::Type{<:AbstractSupportLabel} = PublicLabel, + ndarray::Bool = false, + kwargs...]) + +Return the reformulation variable(s) stored in the optimizer model that correspond +to `vref`. Also errors if no such variable can be found in +the optimizer model. + +The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +and `kwargs` denote extra ones that user extensions may employ in accordance with +their implementation of [`optimizer_model_variable`](@ref). Errors if such an +extension has not been written. + +By default only the variables associated with public supports are returned, the +full set can be accessed via `label = All`. Moreover, infinite variables are +returned as a list corresponding to their supports. However, a n-dimensional array +can be obtained via `ndarray = true` which is handy when the variable has multiple +infinite parameter dependencies. The corresponding supports are obtained via +`supports` using the same keyword arguments. + +**Example** +```julia-repl +julia> optimizer_model_variable(x) # infinite variable +2-element Array{VariableRef,1}: + x(support: 1) + x(support: 2) + +julia> optimizer_model_variable(z) # finite variable +z +``` +""" +function optimizer_model_variable(vref::GeneralVariableRef; kwargs...) + key = optimizer_model_key(JuMP.owner_model(vref)) + return optimizer_model_variable(vref, Val(key); kwargs...) +end + +""" + variable_supports(optimizer_model::JuMP.Model, vref, + key::Val{ext_key_name}; + [kwargs...])::Vector + +Return the supports associated with the mappings of `vref` in `optimizer_model`. +This dispatches off of `key` which permits optimizer model extensions. This +should throw an error if `vref` is not associated with the variable mappings +stored in `optimizer_model`. Keyword arguments can be added as needed. Note that +no extension is necessary for point or finite variables. +""" +function variable_supports end + +# fallback for unextended keys +function variable_supports(optimizer_model::JuMP.Model, vref, key; kwargs...) + error("`variable_supports` not implemented for optimizer model key " * + "`$(typeof(key).parameters[1])` and/or variable type $(typeof(vref)).") +end + +# FiniteRef +function variable_supports(optimizer_model::JuMP.Model, vref::FiniteRef, + key; kwargs...) + return () +end + +""" + supports(vref::DecisionVariableRef; + [label::Type{<:AbstractSupportLabel} = PublicLabel, + ndarray::Bool = false, + kwargs...]) + +Return the supports associated with `vref` in the optimizer +model. Errors if [`InfiniteOpt.variable_supports`](@ref) has not been extended for the +optimizer model type or if `vref` is not be reformulated in the optimizer model. + +The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +and `kwargs` denote extra ones that user extensions may employ in accordance with +their implementation of `variable_supports`. Errors if such an +extension has not been written. + +By default only the public supports are returned, the +full set can be accessed via `label = All`. Moreover, the supports of infinite +variables are returned as a list. However, a n-dimensional array +can be obtained via `ndarray = true` which is handy when the variable has multiple +infinite parameter dependencies. + +**Example** +```julia-repl +julia> supports(vref) +2-element Array{Tuple{Float64},1}: + (0.0,) + (1.0,) +``` +""" +function supports( + vref::Union{DecisionVariableRef, MeasureRef, ParameterFunctionRef}; + kwargs... + ) + model = optimizer_model(JuMP.owner_model(vref)) + key = optimizer_model_key(JuMP.owner_model(vref)) + return variable_supports(model, vref, Val(key); kwargs...) +end + +################################################################################ +# OPTIMIZER MODEL MAPPING METHODS (EXPRESSIONS) +################################################################################ +""" + optimizer_model_expression(expr, key::Val{ext_key_name}; [kwargs...]) + +Return the reformulation expression(s) stored in the optimizer model that correspond +to `expr`. This needs to be defined for extensions that implement a custom +optimizer model type. Principally, this is accomplished by typed the `key` +argument to `Val{ext_key_name}`. Keyword arguments can be added as needed. +Note that if `expr` is a `GeneralVariableRef` this just dispatches to +`optimizer_model_variable`. +""" +function optimizer_model_expression end + +# Fallback for unextended keys +function optimizer_model_expression(expr, key; kwargs...) + error("`optimizer_model_expression` not defined for optimizer model " * + "key `$(typeof(key).parameters[1])` and expression type " * + "`$(typeof(expr))`.") +end + +# Define for variable reference expressions +function optimizer_model_expression(expr::GeneralVariableRef, key; kwargs...) + return optimizer_model_variable(expr, key; kwargs...) +end + +""" + optimizer_model_expression(expr::JuMP.AbstractJuMPScalar; + [label::Type{<:AbstractSupportLabel} = PublicLabel, + ndarray::Bool = false, + kwargs...]) + +Return the reformulation expression(s) stored in the optimizer model that correspond +to `expr`. Also errors if no such expression can be found in +the optimizer model (meaning one or more of the underlying variables have not +been transcribed). + +The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +and `kwargs` denote extra ones that user extensions may employ in accordance with +their implementation of [`optimizer_model_expression`](@ref). Errors if such an +extension has not been written. + +By default only the expressions associated with public supports are returned, the +full set can be accessed via `label = All`. Moreover, infinite expressions are +returned as a list corresponding to their supports. However, a n-dimensional array +can be obtained via `ndarray = true` which is handy when the expression has multiple +infinite parameter dependencies. The corresponding supports are obtained via +`supports` using the same keyword arguments. + +**Example** +```julia-repl +julia> optimizer_model_expression(my_expr) # finite expression +x(support: 1) - y +``` +""" +function optimizer_model_expression(expr::JuMP.AbstractJuMPScalar; kwargs...) + model = JuMP.owner_model(expr) + if isnothing(model) + return zero(JuMP.AffExpr) + JuMP.constant(expr) + else + key = optimizer_model_key(model) + return optimizer_model_expression(expr, Val(key); kwargs...) + end +end + +""" + expression_supports(optimizer_model::JuMP.Model, expr, + key::Val{ext_key_name}; [kwargs...]) + +Return the supports associated with the mappings of `expr` in `optimizer_model`. +This dispatches off of `key` which permits optimizer model extensions. This +should throw an error if `expr` is not associated with the variable mappings +stored in `optimizer_model`. Keyword arguments can be added as needed. Note that +if `expr` is a `GeneralVariableRef` this just dispatches to `variable_supports`. +""" +function expression_supports end + +# fallback for unextended keys +function expression_supports(optimizer_model::JuMP.Model, expr, key; kwargs...) + error("`constraint_supports` not implemented for optimizer model key " * + "`$(typeof(key).parameters[1])` and/or expressions of type " * + "`$(typeof(expr))`.") +end + +# Variable reference expressions +function expression_supports(model::JuMP.Model, vref::GeneralVariableRef, key; + kwargs...) + return variable_supports(model, dispatch_variable_ref(vref), key; kwargs...) +end + +""" + supports(expr::JuMP.AbstractJuMPScalar; + [label::Type{<:AbstractSupportLabel} = PublicLabel, + ndarray::Bool = false, + kwargs...]) + +Return the support associated with `expr`. Errors if `expr` is +not associated with the constraint mappings stored in `optimizer_model`. + +The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +and `kwargs` denote extra ones that user extensions may employ in accordance with +their implementation of `expression_supports`. Errors if such an +extension has not been written. + +By default only the public supports are returned, the +full set can be accessed via `label = All`. Moreover, the supports of infinite +expressions are returned as a list. However, a n-dimensional array +can be obtained via `ndarray = true` which is handy when the expression has multiple +infinite parameter dependencies. + +**Example** +```julia-repl +julia> supports(cref) +2-element Array{Tuple{Float64},1}: + (0.0,) + (1.0,) +``` +""" +function supports(expr::JuMP.AbstractJuMPScalar; kwargs...) + model = JuMP.owner_model(expr) + if isnothing(model) + return () + else + key = optimizer_model_key(model) + opt_model = optimizer_model(model) + return expression_supports(opt_model, expr, Val(key); kwargs...) + end +end + +################################################################################ +# OPTIMIZER MODEL MAPPING METHODS (CONSTRAINTS) +################################################################################ +""" + optimizer_model_constraint(cref::InfOptConstraintRef, + key::Val{ext_key_name}; [kwargs...]) + +Return the reformulation constraint(s) stored in the optimizer model that correspond +to `cref`. This needs to be defined for extensions that implement a custom +optimizer model type. Principally, this is accomplished by typed the `key` +argument to `Val{ext_key_name}`. Keyword arguments can be added as needed. +""" +function optimizer_model_constraint end + +# Fallback for unextended keys +function optimizer_model_constraint( + cref::InfOptConstraintRef, + key; + kwargs... + ) + error("`optimizer_model_constraint` not implemented for optimizer model " * + "key `$(typeof(key).parameters[1])`.") +end + +""" + optimizer_model_constraint(cref::InfOptConstraintRef; + [label::Type{<:AbstractSupportLabel} = PublicLabel, + ndarray::Bool = false, + kwargs...]) + +Return the reformulation constraint(s) stored in the optimizer model that correspond +to `cref`. Errors if no such constraint can be found in +the optimizer model. + +The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +and `kwargs` denote extra ones that user extensions may employ in accordance with +their implementation of [`optimizer_model_constraint`](@ref). Errors if such an +extension has not been written. + +By default only the constraints associated with public supports are returned, the +full set can be accessed via `label = All`. Moreover, infinite constraints are +returned as a list corresponding to their supports. However, a n-dimensional array +can be obtained via `ndarray = true` which is handy when the constraint has multiple +infinite parameter dependencies. The corresponding supports are obtained via +`supports` using the same keyword arguments. + +**Example** +```julia-repl +julia> optimizer_model_constraint(c1) # finite constraint +c1 : x(support: 1) - y <= 3.0 +``` +""" +function optimizer_model_constraint( + cref::InfOptConstraintRef; + kwargs... + ) + key = optimizer_model_key(JuMP.owner_model(cref)) + return optimizer_model_constraint(cref, Val(key); kwargs...) +end + +""" + constraint_supports(optimizer_model::JuMP.Model, + cref::InfOptConstraintRef, + key::Val{ext_key_name}; [kwargs...]) + +Return the supports associated with the mappings of `cref` in `optimizer_model`. +This dispatches off of `key` which permits optimizer model extensions. This +should throw an error if `cref` is not associated with the variable mappings +stored in `optimizer_model`. Keyword arguments can be added as needed. +""" +function constraint_supports end + +# fallback for unextended keys +function constraint_supports(optimizer_model::JuMP.Model, + cref::InfOptConstraintRef, + key; kwargs...) + error("`constraint_supports` not implemented for optimizer model key " * + "`$(typeof(key).parameters[1])`.") +end + +""" + supports(cref::InfOptConstraintRef; + [label::Type{<:AbstractSupportLabel} = PublicLabel, + ndarray::Bool = false, + kwargs...]) + +Return the support associated with `cref`. Errors if `cref` is +not associated with the constraint mappings stored in `optimizer_model`. + +The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +and `kwargs` denote extra ones that user extensions may employ in accordance with +their implementation of `constraint_supports`. Errors if such an +extension has not been written. + +By default only the public supports are returned, the +full set can be accessed via `label = All`. Moreover, the supports of infinite +constraints are returned as a list. However, a n-dimensional array +can be obtained via `ndarray = true` which is handy when the constraint has multiple +infinite parameter dependencies. + +**Example** +```julia-repl +julia> supports(cref) +2-element Array{Tuple{Float64},1}: + (0.0,) + (1.0,) +``` +""" +function supports(cref::InfOptConstraintRef; kwargs...) + model = optimizer_model(JuMP.owner_model(cref)) + key = optimizer_model_key(JuMP.owner_model(cref)) + return constraint_supports(model, cref, Val(key); kwargs...) +end + +################################################################################ +# OPTIMIZATION METHODS +################################################################################ +""" + JuMP.set_optimize_hook( + model::InfiniteModel, + hook::Union{Function, Nothing} + )::Nothing + +Set the function `hook` as the optimize hook for `model` where `hook` should +have be of the form `hook(model::GenericModel; hook_specfic_kwargs..., kwargs...)`. +The `kwargs` are those passed to [`optimize!`](@ref). The `hook_specifc_kwargs` +are passed as additional keywords by the user when they call [`optimize!`](@ref). + +## Notes + +* The optimize hook should generally modify the model, or some external state +in some way, and then call `optimize!(model; ignore_optimize_hook = true)` to +optimize the problem, bypassing the hook. +* Use `set_optimize_hook(model, nothing)` to unset an optimize hook. +""" +function JuMP.set_optimize_hook( + model::InfiniteModel, + hook::Union{Function, Nothing} + ) + model.optimize_hook = hook + set_optimizer_model_ready(model, false) + return +end + +""" + JuMP.optimize!(model::InfiniteModel; [kwargs...]) + +Extend `JuMP.optimize!` to optimize infinite models using the internal +optimizer model. Calls [`build_optimizer_model!`](@ref) if the optimizer +model isn't up to date. The `kwargs` correspond to keyword arguments passed to +[`build_optimizer_model!`](@ref) if any are defined. The `kwargs` can also +include arguments that are passed to an optimize hook if one was set with +[`JuMP.set_optimize_hook`](@ref). + +**Example** +```julia-repl +julia> optimize!(model) + +julia> has_values(model) +true +``` +""" +function JuMP.optimize!( + model::InfiniteModel; + ignore_optimize_hook = isnothing(model.optimize_hook), + kwargs...) + if !ignore_optimize_hook + return model.optimize_hook(model; kwargs...) + end + if !optimizer_model_ready(model) + build_optimizer_model!(model; kwargs...) + end + JuMP.optimize!(optimizer_model(model)) + return +end From 3f8286124829c5c8a024750c04053276b7e9da93 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Fri, 28 Jun 2024 17:52:21 -0400 Subject: [PATCH 2/8] More progress --- docs/src/develop/extensions.md | 4 +- docs/src/guide/optimize.md | 2 +- docs/src/manual/optimize.md | 4 +- src/InfiniteOpt.jl | 2 +- src/TranscriptionOpt/TranscriptionOpt.jl | 7 +- src/TranscriptionOpt/measures.jl | 49 +- src/TranscriptionOpt/model.jl | 656 ++++++++------ src/TranscriptionOpt/optimize.jl | 44 - src/TranscriptionOpt/transcribe.jl | 495 ++++++----- src/array_parameters.jl | 12 +- src/backends.jl | 899 +++++++++++++++++++ src/constraints.jl | 12 +- src/datatypes.jl | 127 +-- src/derivative_evaluations.jl | 39 +- src/derivatives.jl | 4 +- src/expressions.jl | 2 +- src/infinite_variables.jl | 4 +- src/measure_expansions.jl | 296 ++++--- src/measures.jl | 10 +- src/objective.jl | 6 +- src/optimize.jl | 204 ----- src/optimize_old.jl | 963 --------------------- src/point_variables.jl | 2 +- src/results.jl | 845 +++++++++--------- src/scalar_parameters.jl | 20 +- src/semi_infinite_variables.jl | 16 +- src/show.jl | 137 +-- src/variable_basics.jl | 10 +- test/TranscriptionOpt/measure.jl | 43 +- test/TranscriptionOpt/model.jl | 521 +++++------ test/TranscriptionOpt/optimize.jl | 43 - test/TranscriptionOpt/transcribe.jl | 489 ++++++----- test/array_parameters.jl | 4 +- test/{optimizer.jl => backend_mappings.jl} | 141 ++- test/backend_setup.jl | 122 +++ test/constraints.jl | 4 +- test/datatypes.jl | 13 +- test/derivatives.jl | 6 +- test/extensions.jl | 2 +- test/finite_variables.jl | 2 +- test/infinite_variables.jl | 4 +- test/measure_expansions.jl | 22 +- test/objective.jl | 4 +- test/optimizer_setup.jl | 174 ---- test/point_variables.jl | 4 +- test/results.jl | 2 +- test/runtests.jl | 9 +- test/show.jl | 117 ++- test/utilities.jl | 12 + test/variable_info.jl | 30 +- 50 files changed, 3188 insertions(+), 3451 deletions(-) delete mode 100644 src/TranscriptionOpt/optimize.jl create mode 100644 src/backends.jl delete mode 100644 src/optimize.jl delete mode 100644 src/optimize_old.jl delete mode 100644 test/TranscriptionOpt/optimize.jl rename test/{optimizer.jl => backend_mappings.jl} (57%) create mode 100644 test/backend_setup.jl delete mode 100644 test/optimizer_setup.jl diff --git a/docs/src/develop/extensions.md b/docs/src/develop/extensions.md index e295d3bd..16e9a715 100644 --- a/docs/src/develop/extensions.md +++ b/docs/src/develop/extensions.md @@ -938,7 +938,7 @@ Now let's extend [`build_optimizer_model!`](@ref) for `DeterministicModel`s. Such extensions should build an optimizer model in place and in general should employ the following: - [`clear_optimizer_model_build!`](@ref InfiniteOpt.clear_optimizer_model_build!(::InfiniteModel)) -- [`set_optimizer_model_ready`](@ref). +- [`set_transformation_backend_ready`](@ref). In place builds without the use of `clear_optimizer_model_build!` are also possible, but will require some sort of active mapping scheme to update in accordance with the `InfiniteModel` in the case that the @@ -989,7 +989,7 @@ function InfiniteOpt.build_optimizer_model!( end # update the status - set_optimizer_model_ready(model, true) + set_transformation_backend_ready(model, true) return end diff --git a/docs/src/guide/optimize.md b/docs/src/guide/optimize.md index f8abeb64..3305f312 100644 --- a/docs/src/guide/optimize.md +++ b/docs/src/guide/optimize.md @@ -90,7 +90,7 @@ steps: Here `build_optimizer_model!` creates a reformulated finite version of the `InfiniteModel`, stores it in `InfiniteModel.optimizer_model` via [`set_optimizer_model`](@ref), and indicates that the optimizer model is ready -via [`set_optimizer_model_ready`](@ref). These steps are all automated when +via [`set_transformation_backend_ready`](@ref). These steps are all automated when [`optimize!`](@ref JuMP.optimize!(::InfiniteModel)) is invoked on the `InfiniteModel`. diff --git a/docs/src/manual/optimize.md b/docs/src/manual/optimize.md index 5cf33b52..2d35f853 100644 --- a/docs/src/manual/optimize.md +++ b/docs/src/manual/optimize.md @@ -54,6 +54,6 @@ InfiniteOpt.optimizer_model_constraint(::InfOptConstraintRef) optimizer_model_constraint supports(::InfOptConstraintRef) InfiniteOpt.constraint_supports -optimizer_model_ready -set_optimizer_model_ready +transformation_backend_ready +set_transformation_backend_ready ``` diff --git a/src/InfiniteOpt.jl b/src/InfiniteOpt.jl index c70f9de0..49d29e99 100644 --- a/src/InfiniteOpt.jl +++ b/src/InfiniteOpt.jl @@ -46,7 +46,7 @@ include("constraints.jl") include("objective.jl") include("measure_expansions.jl") include("derivative_evaluations.jl") -include("optimize.jl") +include("backends.jl") include("results.jl") include("show.jl") include("utilities.jl") diff --git a/src/TranscriptionOpt/TranscriptionOpt.jl b/src/TranscriptionOpt/TranscriptionOpt.jl index 16b27109..9f335778 100644 --- a/src/TranscriptionOpt/TranscriptionOpt.jl +++ b/src/TranscriptionOpt/TranscriptionOpt.jl @@ -6,13 +6,8 @@ using ..InfiniteOpt include("model.jl") include("measures.jl") include("transcribe.jl") -include("optimize.jl") # Export transcription datatypes -export TranscriptionData, TranscriptionModel - -# Export transcription methods -export is_transcription_model, transcription_data, transcription_variable, -transcription_constraint, transcription_model, transcription_expression +export TranscriptionBackend end # end module diff --git a/src/TranscriptionOpt/measures.jl b/src/TranscriptionOpt/measures.jl index 8c4a1d3c..9fcf3dd1 100644 --- a/src/TranscriptionOpt/measures.jl +++ b/src/TranscriptionOpt/measures.jl @@ -1,22 +1,22 @@ """ - InfiniteOpt.add_point_variable(model::JuMP.Model, - var::InfiniteOpt.PointVariable, - key::Val{:TransData} - )::InfiniteOpt.GeneralVariableRef + InfiniteOpt.add_point_variable( + backend::TranscriptionBackend, + var::InfiniteOpt.PointVariable, + support::Vector{Float64} + )::InfiniteOpt.GeneralVariableRef Make a `PointVariableRef` and map it to the appropriate transcription variable and return the `GeneralVariableRef`. This is an extension of -[`add_point_variable`](@ref InfiniteOpt.add_point_variable(::JuMP.Model,::Any,::Any, ::Any)) +[`add_point_variable`](@ref InfiniteOpt.add_point_variable(::InfiniteOpt.AbstractTransformationBackend,::Any,::Any)) for `TranscriptionOpt`. """ function InfiniteOpt.add_point_variable( - model::JuMP.Model, + backend::TranscriptionBackend, ivref::InfiniteOpt.GeneralVariableRef, support::Vector{Float64}, - ::Val{:TransData} - )::InfiniteOpt.GeneralVariableRef + ) # check if an internal variable was already created - data = transcription_data(model) + data = transcription_data(backend) internal_vref = get(data.point_lookup, (ivref, support), nothing) if !isnothing(internal_vref) return internal_vref @@ -30,9 +30,8 @@ function InfiniteOpt.add_point_variable( # make negative index to not conflict with the InfiniteModel raw_index = data.last_point_index -= 1 # make the reference and map it to a transcription variable - pvref = InfiniteOpt.GeneralVariableRef(JuMP.owner_model(ivref), raw_index, - InfiniteOpt.PointVariableIndex) - trans_var = lookup_by_support(model, ivref, support) + pvref = InfiniteOpt.GeneralVariableRef(inf_model, raw_index, InfiniteOpt.PointVariableIndex) + trans_var = lookup_by_support(ivref, backend, support) data.finvar_mappings[pvref] = trans_var data.point_lookup[(ivref, support)] = pvref return pvref @@ -40,26 +39,25 @@ function InfiniteOpt.add_point_variable( end """ - InfiniteOpt.add_semi_infinite_variable(model::JuMP.Model, - var::InfiniteOpt.SemiInfiniteVariable, - key::Val{:TransData} - )::InfiniteOpt.GeneralVariableRef + InfiniteOpt.add_semi_infinite_variable( + backend::TranscriptionBackend, + var::InfiniteOpt.SemiInfiniteVariable + )::InfiniteOpt.GeneralVariableRef Make a `SemiInfiniteVariableRef` and add `var` to the transcription data and return the `GeneralVariableRef`. This is an extension of -[`add_semi_infinite_variable`](@ref InfiniteOpt.add_semi_infinite_variable(::JuMP.Model,::Any,::Any)) +[`add_semi_infinite_variable`](@ref InfiniteOpt.add_semi_infinite_variable(::InfiniteOpt.AbstractTransformationBackend,::Any)) for `TranscriptionOpt`. Note that `internal_semi_infinite_variable` is also extended to be able to access the `var`. """ function InfiniteOpt.add_semi_infinite_variable( - model::JuMP.Model, - var::InfiniteOpt.SemiInfiniteVariable, - ::Val{:TransData} - )::InfiniteOpt.GeneralVariableRef + backend::TranscriptionBackend, + var::InfiniteOpt.SemiInfiniteVariable + ) # check if an internal variable was already created ivref = var.infinite_variable_ref eval_supps = var.eval_supports - data = transcription_data(model) + data = transcription_data(backend) internal_vref = get(data.semi_lookup, (ivref, eval_supps), nothing) if !isnothing(internal_vref) return internal_vref @@ -71,13 +69,12 @@ function InfiniteOpt.add_semi_infinite_variable( return InfiniteOpt._make_variable_ref(inf_model, inf_model_index) else # make negative index to not conflict with the InfiniteModel - semi_infinite_vars = transcription_data(model).semi_infinite_vars + semi_infinite_vars = data.semi_infinite_vars raw_index = -1 * (length(semi_infinite_vars) + 1) # make the reference and map it to a transcription variable - rvref = InfiniteOpt.GeneralVariableRef(JuMP.owner_model(ivref), raw_index, - InfiniteOpt.SemiInfiniteVariableIndex) + rvref = InfiniteOpt.GeneralVariableRef(inf_model, raw_index, InfiniteOpt.SemiInfiniteVariableIndex) push!(semi_infinite_vars, var) - _set_semi_infinite_variable_mapping(model, var, rvref, InfiniteOpt._index_type(ivref)) + _set_semi_infinite_variable_mapping(backend, var, rvref, InfiniteOpt._index_type(ivref)) data.semi_lookup[(ivref, eval_supps)] = rvref return rvref end diff --git a/src/TranscriptionOpt/model.jl b/src/TranscriptionOpt/model.jl index beba7201..88e219d9 100644 --- a/src/TranscriptionOpt/model.jl +++ b/src/TranscriptionOpt/model.jl @@ -1,14 +1,14 @@ ################################################################################ -# BASIC MODEL DEFINITION +# BASIC BACKEND DEFINITION ################################################################################ """ TranscriptionData A DataType for storing the data mapping an [`InfiniteOpt.InfiniteModel`](@ref) that has been transcribed to a regular `JuMP.Model` that contains the -transcribed variables. This is stored in the `ext` field of a `JuMP.Model` to -make what is called a `TranscriptionModel` via the [`TranscriptionModel`](@ref) -constructor. +transcribed variables. This is stored in the `data` field of +[`InfiniteOpt.JuMPBackend`](@ref) to make what is called a `TranscriptionBackend` +via the [`TranscriptionBackend`](@ref) constructor. **Fields** - `infvar_lookup::Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, Int}}`: @@ -102,79 +102,123 @@ mutable struct TranscriptionData Dict{InfiniteOpt.InfOptConstraintRef, Vector{Vector{Float64}}}(), Dict{InfiniteOpt.InfOptConstraintRef, Vector{Set{DataType}}}(), # support storage - (), (), false) + (), + (), + false + ) end end +# Extend Base.empty! +function Base.empty!(data::TranscriptionData) + empty!(data.infvar_lookup) + empty!(data.infvar_mappings) + empty!(data.infvar_supports) + empty!(data.infvar_support_labels) + empty!(data.finvar_mappings) + empty!(data.semi_infinite_vars) + empty!(data.semi_lookup) + data.last_point_index = 0 + empty!(data.point_lookup) + empty!(data.measure_lookup) + empty!(data.measure_mappings) + empty!(data.measure_supports) + empty!(data.measure_support_labels) + empty!(data.constr_mappings) + empty!(data.constr_supports) + empty!(data.constr_support_labels) + data.supports = () + data.support_labels = () + data.has_internal_supports = false + return data +end + """ - TranscriptionModel([optimizer_constructor; - caching_mode::MOIU.CachingOptimizerMode = MOIU.AUTOMATIC, - bridge_constraints::Bool = true])::JuMP.Model + Transcription <: InfiniteOpt.AbstractJuMPTag -Return a `JuMP.Model` with [`TranscriptionData`](@ref) included in the -`ext` data field. Accepts the same arguments as a typical JuMP `Model`. -More detailed variable and constraint naming can be enabled via `verbose_naming`. +Dispatch tag needed for [`TranscriptionBackend`](@ref) to be based on +[`InfiniteOpt.JuMPBackend`](@ref). +""" +struct Transcription <: InfiniteOpt.AbstractJuMPTag end + +""" + TranscriptionBackend( + [optimizer_constructor]; + [add_bridges::Bool = true] + )::InfiniteOpt.JuMPBackend{Transcription} + +Return an `InfiniteOpt.JuMPBackend` that uses [`TranscriptionData`](@ref) +and the [`Transcription`](@ref) tag. Accepts the same arguments as a typical +`JuMP.Model`. More detailed variable and constraint naming can be enabled +via `verbose_naming`. **Example** ```julia-repl -julia> TranscriptionModel() -A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +julia> backend = TranscriptionBackend(); ``` """ -function TranscriptionModel(; kwargs...) +const TranscriptionBackend = InfiniteOpt.JuMPBackend{Transcription, Float64, TranscriptionData} + +# Constructors +function TranscriptionBackend(; kwargs...) model = JuMP.Model(; kwargs...) - model.ext[:TransData] = TranscriptionData() - return model + return InfiniteOpt.JuMPBackend{Transcription}(model, TranscriptionData()) end -# Accept optimizer constructors -function TranscriptionModel(optimizer_constructor; - kwargs...) +function TranscriptionBackend(optimizer_constructor; kwargs...) model = JuMP.Model(optimizer_constructor; kwargs...) - model.ext[:TransData] = TranscriptionData() - return model + return InfiniteOpt.JuMPBackend{Transcription}(model, TranscriptionData()) end -################################################################################ -# BASIC QUERIES -################################################################################ -""" - is_transcription_model(model::JuMP.Model)::Bool - -Return true if `model` is a `TranscriptionModel` or false otherwise. +# Printing +function JuMP.show_backend_summary( + io::IO, + model::InfiniteOpt.InfiniteModel, + backend::TranscriptionBackend + ) + println(io, " Backend type: TranscriptionBackend") + # reformulation information + data = transcription_data(backend) + supp_tuple = data.supports + obj_idxs = InfiniteOpt._param_object_indices(model) + for (i, supps) in enumerate(supp_tuple) + # support info + pref_group = InfiniteOpt._make_param_tuple_element(model, obj_idxs[i]) + param_name = InfiniteOpt._get_param_group_name(pref_group) + println(io, " `", param_name, "` transcribed over ", length(supps) - 1, " supports") + # TODO add approximation method info (requires InfiniteOpt refactoring) + end + # solver name + moi_summary = sprint(JuMP.show_backend_summary, backend.model) + solver_str = filter(startswith("Solver"), split(moi_summary, "\n"))[1] + println(io, " ", solver_str) + return +end -**Example** -```julia-repl -julia> is_transcription_model(model) -true -``` -""" -function is_transcription_model(model::JuMP.Model) - return haskey(model.ext, :TransData) +# Showing the backend +function Base.show(io::IO, backend::TranscriptionBackend) + println(io, "A TranscriptionBackend that uses a") + show(io, backend.model) end +################################################################################ +# BASIC QUERIES +################################################################################ """ - transcription_data(model::JuMP.Model)::TranscriptionData + transcription_data(backend::TranscriptionBackend)::TranscriptionData -Return the `TranscriptionData` from a `TranscriptionModel`. Errors if it is not -a `TranscriptionModel`. +Return the mapping data used by `backend`. """ -function transcription_data(model::JuMP.Model)::TranscriptionData - !is_transcription_model(model) && error("Model is not a transcription model.") - return model.ext[:TransData] +function transcription_data(backend::TranscriptionBackend) + return InfiniteOpt.transformation_data(backend) end """ - has_internal_supports(model::JuMP.Model)::Bool + has_internal_supports(backend::TranscriptionBackend)::Bool -Return a `Bool` whether `model` has any internal supports that were collected. +Return a `Bool` whether `backend` has any internal supports that were collected. """ -function has_internal_supports(model::JuMP.Model) - return transcription_data(model).has_internal_supports +function has_internal_supports(backend::TranscriptionBackend) + return transcription_data(backend).has_internal_supports end ################################################################################ @@ -182,28 +226,32 @@ end ################################################################################ # Define method for checking if the label needs to be accounted for function _ignore_label( - model::JuMP.Model, + backend::TranscriptionBackend, label::Type{<:InfiniteOpt.AbstractSupportLabel} ) return label == InfiniteOpt.All || - (!has_internal_supports(model) && + (!has_internal_supports(backend) && label == InfiniteOpt.PublicLabel) end """ - transcription_variable(model::JuMP.Model, - vref::InfiniteOpt.GeneralVariableRef; + transcription_variable( + vref::InfiniteOpt.GeneralVariableRef, + backend::TranscriptionBackend; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false]) + ndarray::Bool = false] + ) Return the transcribed variable reference(s) corresponding to `vref`. Errors if no transcription variable is found. Also can query via the syntax: ```julia -transcription_variable(vref::InfiniteOpt.GeneralVariableRef; +transcription_variable( + vref::InfiniteOpt.GeneralVariableRef; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false]) + ndarray::Bool = false] + ) ``` -If the infinite model contains a built transcription model. By default, this +If the infinite model contains a built `TranscriptionBackend`. By default, this method returns only transcribed variables associated with public supports. All the variables can be returned by setting `label = All`. @@ -215,12 +263,12 @@ considers the union. **Example** ```julia-repl -julia> transcription_variable(trans_model, infvar) +julia> transcription_variable(infvar, trans_backend) 2-element Array{VariableRef,1}: infvar(support: 1) infvar(support: 2) -julia> transcription_variable(trans_model, hdvar) +julia> transcription_variable(hdvar, trans_backend) hdvar julia> transcription_variable(infvar) @@ -233,56 +281,65 @@ hdvar ``` """ function transcription_variable( - model::JuMP.Model, - vref::InfiniteOpt.GeneralVariableRef; + vref::InfiniteOpt.GeneralVariableRef, + backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false ) - return transcription_variable(model, vref, InfiniteOpt._index_type(vref), - label, ndarray) + return transcription_variable( + vref, + InfiniteOpt._index_type(vref), + backend, + label, + ndarray + ) end # define convenient aliases -const InfVarIndex = Union{InfiniteOpt.InfiniteVariableIndex, - InfiniteOpt.SemiInfiniteVariableIndex, - InfiniteOpt.DerivativeIndex} -const FinVarIndex = Union{InfiniteOpt.FiniteVariableIndex, - InfiniteOpt.PointVariableIndex} +const InfVarIndex = Union{ + InfiniteOpt.InfiniteVariableIndex, + InfiniteOpt.SemiInfiniteVariableIndex, + InfiniteOpt.DerivativeIndex + } +const FinVarIndex = Union{ + InfiniteOpt.FiniteVariableIndex, + InfiniteOpt.PointVariableIndex + } ## Define the variable mapping functions # FinVarIndex function transcription_variable( - model::JuMP.Model, vref::InfiniteOpt.GeneralVariableRef, - index_type::Type{V}, + ::Type{V}, + backend::TranscriptionBackend, label::Type{<:InfiniteOpt.AbstractSupportLabel}, ndarray::Bool ) where {V <: FinVarIndex} - var = get(transcription_data(model).finvar_mappings, vref, nothing) + var = get(transcription_data(backend).finvar_mappings, vref, nothing) if isnothing(var) - error("Variable reference $vref not used in transcription model.") + error("Variable reference $vref not used in transcription backend.") end return var end # InfVarIndex function transcription_variable( - model::JuMP.Model, vref::InfiniteOpt.GeneralVariableRef, - index_type::Type{V}, + ::Type{V}, + backend::TranscriptionBackend, label::Type{<:InfiniteOpt.AbstractSupportLabel}, ndarray::Bool ) where {V <: InfVarIndex} - vars = get(transcription_data(model).infvar_mappings, vref, nothing) + vars = get(transcription_data(backend).infvar_mappings, vref, nothing) if isnothing(vars) - error("Variable reference $vref not used in transcription model.") + error("Variable reference $vref not used in transcription backend.") end if ndarray - return make_ndarray(model, vref, vars, label) - elseif _ignore_label(model, label) + return make_ndarray(backend, vref, vars, label) + elseif _ignore_label(backend, label) return vars else - labels = transcription_data(model).infvar_support_labels[vref] + labels = transcription_data(backend).infvar_support_labels[vref] inds = map(s -> any(l -> l <: label, s), labels) return vars[inds] end @@ -290,38 +347,39 @@ end # ParameterFunctionIndex function transcription_variable( - model::JuMP.Model, fref::InfiniteOpt.GeneralVariableRef, - index_type::Type{InfiniteOpt.ParameterFunctionIndex}, + ::Type{InfiniteOpt.ParameterFunctionIndex}, + backend::TranscriptionBackend, label::Type{<:InfiniteOpt.AbstractSupportLabel}, ndarray::Bool ) # get the object numbers of the expression and form the support iterator obj_nums = InfiniteOpt._object_numbers(fref) - support_indices = support_index_iterator(model, obj_nums) + support_indices = support_index_iterator(backend, obj_nums) vals = Vector{Float64}(undef, length(support_indices)) - check_labels = length(vals) > 1 && !_ignore_label(model, label) + check_labels = length(vals) > 1 && !_ignore_label(backend, label) label_inds = ones(Bool, length(vals)) # iterate over the indices and compute the values for (i, idx) in enumerate(support_indices) - supp = index_to_support(model, idx) - if check_labels && !any(l -> l <: label, index_to_labels(model, idx)) + supp = index_to_support(backend, idx) + if check_labels && !any(l -> l <: label, index_to_labels(backend, idx)) @inbounds label_inds[i] = false end - @inbounds vals[i] = transcription_expression(model, fref, supp) + @inbounds vals[i] = transcription_expression(fref, backend, supp) end # return the values if ndarray - return make_ndarray(model, fref, vals, label) + return make_ndarray(backend, fref, vals, label) else return vals[label_inds] end end # Fallback -function transcription_variable(model::JuMP.Model, +function transcription_variable( vref::InfiniteOpt.GeneralVariableRef, index_type, + backend::TranscriptionBackend, label, ndarray ) @@ -329,74 +387,82 @@ function transcription_variable(model::JuMP.Model, "type $(index_type) and/or is not defined for labels of type $(label).") end -# Dispatch for internal models +# Dispatch for internal backends function transcription_variable( vref::InfiniteOpt.GeneralVariableRef; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false ) - trans_model = InfiniteOpt.optimizer_model(JuMP.owner_model(vref)) - return transcription_variable(trans_model, vref, label = label, - ndarray = ndarray) + return transcription_variable( + vref, + JuMP.owner_model(vref).backend, + label = label, + ndarray = ndarray + ) end """ - InfiniteOpt.optimizer_model_variable(vref::InfiniteOpt.GeneralVariableRef, - ::Val{:TransData}; + InfiniteOpt.transformation_model_variable( + vref::InfiniteOpt.GeneralVariableRef, + [backend::TranscriptionBackend]; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false]) + ndarray::Bool = false] + ) -Proper extension of [`InfiniteOpt.optimizer_model_variable`](@ref) for -`TranscriptionModel`s. This simply dispatches to [`transcription_variable`](@ref). +Proper extension of [`InfiniteOpt.transformation_model_variable`](@ref) for +`TranscriptionBackend`s. This simply dispatches to [`transcription_variable`](@ref). """ -function InfiniteOpt.optimizer_model_variable( +function InfiniteOpt.transformation_model_variable( vref::InfiniteOpt.GeneralVariableRef, - ::Val{:TransData}; + backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false ) - return transcription_variable(vref, label = label, ndarray = ndarray) + return transcription_variable(vref, backend, label = label, ndarray = ndarray) end """ - InfiniteOpt.variable_supports(model::JuMP.Model, + InfiniteOpt.variable_supports( vref::InfiniteOpt.DecisionVariableRef, - key::Val{:TransData} = Val(:TransData); + backend::TranscriptionBackend; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false]) + ndarray::Bool = false] + ) -Return the support alias mapping associated with `vref` in the transcription model. +Return the support alias mapping associated with `vref` in the transcription backend. Errors if `vref` does not have transcripted variables. See `transcription_variable` for an explanation of `ndarray`. """ function InfiniteOpt.variable_supports( - model::JuMP.Model, - dvref::Union{InfiniteOpt.InfiniteVariableRef, InfiniteOpt.SemiInfiniteVariableRef, - InfiniteOpt.DerivativeRef}, - key::Val{:TransData} = Val(:TransData); + dvref::Union{ + InfiniteOpt.InfiniteVariableRef, + InfiniteOpt.SemiInfiniteVariableRef, + InfiniteOpt.DerivativeRef + }, + backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false ) vref = InfiniteOpt._make_variable_ref(JuMP.owner_model(dvref), JuMP.index(dvref)) - if !haskey(transcription_data(model).infvar_mappings, vref) - error("Variable reference $vref not used in transcription model.") - elseif !haskey(transcription_data(model).infvar_supports, vref) + if !haskey(transcription_data(backend).infvar_mappings, vref) + error("Variable reference $vref not used in transcription backend.") + elseif !haskey(transcription_data(backend).infvar_supports, vref) prefs = InfiniteOpt.raw_parameter_refs(dvref) - lookups = transcription_data(model).infvar_lookup[vref] + lookups = transcription_data(backend).infvar_lookup[vref] type = typeof(Tuple(first(keys(lookups)), prefs)) supps = Vector{type}(undef, length(lookups)) for (s, i) in lookups supps[i] = Tuple(s, prefs) end - transcription_data(model).infvar_supports[vref] = supps + transcription_data(backend).infvar_supports[vref] = supps end - supps = transcription_data(model).infvar_supports[vref] + supps = transcription_data(backend).infvar_supports[vref] if ndarray - return make_ndarray(model, dvref, supps, label) - elseif _ignore_label(model, label) + return make_ndarray(backend, dvref, supps, label) + elseif _ignore_label(backend, label) return supps else - labels = transcription_data(model).infvar_support_labels[vref] + labels = transcription_data(backend).infvar_support_labels[vref] inds = map(s -> any(l -> l <: label, s), labels) return supps[inds] end @@ -404,78 +470,79 @@ end # ParameterFunctionRef function InfiniteOpt.variable_supports( - model::JuMP.Model, dvref::InfiniteOpt.ParameterFunctionRef, - key::Val{:TransData} = Val(:TransData); + backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false ) # get the object numbers of the expression and form the support iterator obj_nums = sort(InfiniteOpt._object_numbers(dvref)) - support_indices = support_index_iterator(model, obj_nums) + support_indices = support_index_iterator(backend, obj_nums) supps = Vector{Tuple}(undef, length(support_indices)) - check_labels = length(supps) > 1 && !_ignore_label(model, label) - param_supps = parameter_supports(model) + check_labels = length(supps) > 1 && !_ignore_label(backend, label) + param_supps = parameter_supports(backend) label_inds = ones(Bool, length(supps)) # iterate over the indices and compute the values for (i, idx) in enumerate(support_indices) - if check_labels && !any(l -> l <: label, index_to_labels(model, idx)) + if check_labels && !any(l -> l <: label, index_to_labels(backend, idx)) @inbounds label_inds[i] = false end @inbounds supps[i] = Tuple(param_supps[j][idx[j]] for j in obj_nums) end # return the supports if ndarray - return make_ndarray(model, dvref, supps, label) + return make_ndarray(backend, dvref, supps, label) else return supps[label_inds] end end """ - lookup_by_support(model::JuMP.Model, - vref::InfiniteOpt.GeneralVariableRef, - support::Vector) + lookup_by_support( + vref::InfiniteOpt.GeneralVariableRef, + backend::TranscriptionBackend, + support::Vector + ) Return the transcription expression of `vref` defined at its `support`. This is intended as a helper method for automated transcription. """ function lookup_by_support( - model::JuMP.Model, vref::InfiniteOpt.GeneralVariableRef, + backend::TranscriptionBackend, support::Vector ) - return lookup_by_support(model, vref, InfiniteOpt._index_type(vref), support) + return lookup_by_support(vref, InfiniteOpt._index_type(vref), backend, support) end # define error function for not being able to find a variable by its support -_supp_error() = error("Unable to locate transcription variable by support, ", - "consider rebuilding the infinite model with less ", - "significant digits. Note this might be due to partially ", - "evaluating dependent parameters which is not supported ", - "by TranscriptionOpt. Such is the case with " * - "derivatives/measures that dependent on single ", - "dependent parameters.") +_supp_error() = error(""" + Unable to locate transcription variable by support, consider rebuilding the + infinite model with less significant digits. Note this might be due to partially + evaluating dependent parameters which is not supported by TranscriptionOpt. Such + is the case with derivatives/measures that dependent on single dependent + parameters. + """) # InfiniteIndex function lookup_by_support( - model::JuMP.Model, vref::InfiniteOpt.GeneralVariableRef, - index_type::Type{V}, + ::Type{V}, + backend::TranscriptionBackend, support::Vector ) where {V <: InfVarIndex} - if !haskey(transcription_data(model).infvar_lookup, vref) - error("Variable reference $vref not used in transcription model.") + if !haskey(transcription_data(backend).infvar_lookup, vref) + error("Variable reference $vref not used in transcription backend.") end - idx = get(_supp_error, transcription_data(model).infvar_lookup[vref], support) - return transcription_data(model).infvar_mappings[vref][idx] + idx = get(_supp_error, transcription_data(backend).infvar_lookup[vref], support) + return transcription_data(backend).infvar_mappings[vref][idx] end # ParameterFunctionIndex function lookup_by_support( - model::JuMP.Model, fref::InfiniteOpt.GeneralVariableRef, - index_type::Type{InfiniteOpt.ParameterFunctionIndex}, + ::Type{InfiniteOpt.ParameterFunctionIndex}, + backend::TranscriptionBackend, support::Vector ) prefs = InfiniteOpt.raw_parameter_refs(fref) @@ -485,21 +552,21 @@ end # FiniteIndex function lookup_by_support( - model::JuMP.Model, vref::InfiniteOpt.GeneralVariableRef, - index_type::Type{V}, + ::Type{V}, + backend::TranscriptionBackend, support::Vector ) where {V <: FinVarIndex} - if !haskey(transcription_data(model).finvar_mappings, vref) - error("Variable reference $vref not used in transcription model.") + if !haskey(transcription_data(backend).finvar_mappings, vref) + error("Variable reference $vref not used in transcription backend.") end - return transcription_data(model).finvar_mappings[vref] + return transcription_data(backend).finvar_mappings[vref] end """ InfiniteOpt.internal_semi_infinite_variable( vref::InfiniteOpt.SemiInfiniteVariableRef, - ::Val{:TransData} + backend::TranscriptionBackend )::InfiniteOpt.SemiInfiniteVariable{InfiniteOpt.GeneralVariableRef} Return the internal semi-infinite variable associated with `vref`, assuming it was @@ -509,10 +576,9 @@ docstring. Errors, if no such variable can be found. """ function InfiniteOpt.internal_semi_infinite_variable( vref::InfiniteOpt.SemiInfiniteVariableRef, - ::Val{:TransData} + backend::TranscriptionBackend ) - trans_model = InfiniteOpt.optimizer_model(JuMP.owner_model(vref)) - semi_infinite_vars = transcription_data(trans_model).semi_infinite_vars + semi_infinite_vars = transcription_data(backend).semi_infinite_vars idx = -1 * JuMP.index(vref).value if idx in keys(semi_infinite_vars) return semi_infinite_vars[idx] @@ -527,22 +593,22 @@ end ################################################################################ # MeasureIndex function transcription_variable( - model::JuMP.Model, mref::InfiniteOpt.GeneralVariableRef, - index_type::Type{InfiniteOpt.MeasureIndex}, + ::Type{InfiniteOpt.MeasureIndex}, + backend::TranscriptionBackend, label::Type{<:InfiniteOpt.AbstractSupportLabel}, ndarray::Bool = false ) - exprs = get(transcription_data(model).measure_mappings, mref, nothing) + exprs = get(transcription_data(backend).measure_mappings, mref, nothing) if isnothing(exprs) - error("Measure reference $mref not used in transcription model.") + error("Measure reference $mref not used in transcription backend.") end if ndarray - return make_ndarray(model, mref, exprs, label) - elseif length(exprs) > 1 && _ignore_label(model, label) + return make_ndarray(backend, mref, exprs, label) + elseif length(exprs) > 1 && _ignore_label(backend, label) return exprs elseif length(exprs) > 1 - labels = transcription_data(model).measure_support_labels[mref] + labels = transcription_data(backend).measure_support_labels[mref] inds = map(s -> any(l -> l <: label, s), labels) return exprs[inds] else @@ -552,31 +618,30 @@ end # Extend transcription_expression function lookup_by_support( - model::JuMP.Model, mref::InfiniteOpt.GeneralVariableRef, - index_type::Type{InfiniteOpt.MeasureIndex}, + ::Type{InfiniteOpt.MeasureIndex}, + backend::TranscriptionBackend, support::Vector ) - if !haskey(transcription_data(model).measure_lookup, mref) - error("Measure reference $mref not used in transcription model.") + if !haskey(transcription_data(backend).measure_lookup, mref) + error("Measure reference $mref not used in transcription backend.") end - idx = get(_supp_error, transcription_data(model).measure_lookup[mref], support) - return transcription_data(model).measure_mappings[mref][idx] + idx = get(_supp_error, transcription_data(backend).measure_lookup[mref], support) + return transcription_data(backend).measure_mappings[mref][idx] end # Extend variable_supports function InfiniteOpt.variable_supports( - model::JuMP.Model, dmref::InfiniteOpt.MeasureRef, - key::Val{:TransData} = Val(:TransData); + backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false ) mref = InfiniteOpt._make_variable_ref(JuMP.owner_model(dmref), JuMP.index(dmref)) - if !haskey(transcription_data(model).measure_mappings, mref) - error("Measure reference $mref not used in transcription model.") - elseif !haskey(transcription_data(model).measure_supports, mref) - lookups = transcription_data(model).measure_lookup[mref] + if !haskey(transcription_data(backend).measure_mappings, mref) + error("Measure reference $mref not used in transcription backend.") + elseif !haskey(transcription_data(backend).measure_supports, mref) + lookups = transcription_data(backend).measure_lookup[mref] prefs = InfiniteOpt.parameter_refs(dmref) vt_prefs = InfiniteOpt.Collections.VectorTuple(prefs) type = typeof(Tuple(first(keys(lookups)), vt_prefs)) @@ -584,15 +649,15 @@ function InfiniteOpt.variable_supports( for (supp, i) in lookups supps[i] = Tuple(supp, vt_prefs) end - transcription_data(model).measure_supports[mref] = supps + transcription_data(backend).measure_supports[mref] = supps end - supps = transcription_data(model).measure_supports[mref] + supps = transcription_data(backend).measure_supports[mref] if ndarray - return make_ndarray(model, dmref, supps, label) - elseif length(supps) > 1 && _ignore_label(model, label) + return make_ndarray(backend, dmref, supps, label) + elseif length(supps) > 1 && _ignore_label(backend, label) return supps elseif length(supps) > 1 - labels = transcription_data(model).measure_support_labels[mref] + labels = transcription_data(backend).measure_support_labels[mref] inds = map(s -> any(l -> l <: label, s), labels) return supps[inds] else @@ -605,19 +670,22 @@ end ################################################################################ """ transcription_expression( - model::JuMP.Model, - expr::JuMP.AbstractJuMPScalar; + expr::JuMP.AbstractJuMPScalar, + backend::TranscriptionBackend; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false]) + ndarray::Bool = false] + ) Return the transcribed expression(s) corresponding to `expr`. Errors if `expr` cannot be transcribed. Also can query via the syntax: ```julia -transcription_expression(expr::JuMP.AbstractJuMPScalar; - [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false]) +transcription_expression( + expr::JuMP.AbstractJuMPScalar; + [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, + ndarray::Bool = false] + ) ``` -If the infinite model contains a built transcription model. By default, this +If the infinite model contains a built transcription backend. By default, this method returns only transcribed expressions associated with public supports. All the expressions can be returned by setting `label = All`. @@ -629,7 +697,7 @@ considers the union. **Example** ```julia-repl -julia> transcription_expression(trans_model, my_expr) +julia> transcription_expression(my_expr, backend) x(support: 1) - y julia> transcription_expression(my_expr) @@ -637,28 +705,28 @@ x(support: 1) - y ``` """ function transcription_expression( - model::JuMP.Model, - expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.GenericNonlinearExpr}; + expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.GenericNonlinearExpr}, + backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false ) # get the object numbers of the expression and form the support iterator obj_nums = InfiniteOpt._object_numbers(expr) - support_indices = support_index_iterator(model, obj_nums) + support_indices = support_index_iterator(backend, obj_nums) exprs = Vector{JuMP.AbstractJuMPScalar}(undef, length(support_indices)) - check_labels = length(exprs) > 1 && !_ignore_label(model, label) + check_labels = length(exprs) > 1 && !_ignore_label(backend, label) label_inds = ones(Bool, length(exprs)) # iterate over the indices and compute the values for (i, idx) in enumerate(support_indices) - supp = index_to_support(model, idx) - if check_labels && !any(l -> l <: label, index_to_labels(model, idx)) + supp = index_to_support(backend, idx) + if check_labels && !any(l -> l <: label, index_to_labels(backend, idx)) @inbounds label_inds[i] = false end - @inbounds exprs[i] = transcription_expression(model, expr, supp) + @inbounds exprs[i] = transcription_expression(expr, backend, supp) end # return the expressions if ndarray - return make_ndarray(model, expr, exprs, label) + return make_ndarray(backend, expr, exprs, label) else exprs = exprs[label_inds] return length(support_indices) > 1 ? exprs : first(exprs) @@ -667,80 +735,83 @@ end # Define for variables function transcription_expression( - model::JuMP.Model, - vref::InfiniteOpt.GeneralVariableRef; + vref::InfiniteOpt.GeneralVariableRef, + backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false) - return transcription_variable(model, vref, label = label, ndarray = ndarray) + return transcription_variable(vref, backend, label = label, ndarray = ndarray) end -# Dispatch for internal models +# Dispatch for internal backends function transcription_expression( expr::JuMP.AbstractJuMPScalar; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false ) model = JuMP.owner_model(expr) - if isnothing(model) - return zero(JuMP.AffExpr) + JuMP.constant(expr) - else - trans_model = InfiniteOpt.optimizer_model(model) - end - return transcription_expression(trans_model, expr, label = label, - ndarray = ndarray) + isnothing(model) && return zero(JuMP.AffExpr) + JuMP.constant(expr) + return transcription_expression( + expr, + model.backend, + label = label, + ndarray = ndarray + ) end """ - InfiniteOpt.optimizer_model_expression(expr::JuMP.AbstractJuMPScalar, - ::Val{:TransData}; + InfiniteOpt.transformation_model_expression( + expr::JuMP.AbstractJuMPScalar, + backend::TranscriptionBackend; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false]) + ndarray::Bool = false] + ) -Proper extension of [`InfiniteOpt.optimizer_model_expression`](@ref) for -`TranscriptionModel`s. This simply dispatches to [`transcription_expression`](@ref). +Proper extension of [`InfiniteOpt.transformation_model_expression`](@ref) for +`TranscriptionBackend`s. This simply dispatches to [`transcription_expression`](@ref). """ -function InfiniteOpt.optimizer_model_expression( +function InfiniteOpt.transformation_model_expression( expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.GenericNonlinearExpr}, - ::Val{:TransData}; + backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false) - return transcription_expression(expr, label = label, ndarray = ndarray) + ndarray::Bool = false + ) + return transcription_expression(expr, backend, label = label, ndarray = ndarray) end """ - InfiniteOpt.expression_supports(model::JuMP.Model, + InfiniteOpt.expression_supports( expr::JuMP.AbstractJuMPScalar, - key::Val{:TransData} = Val(:TransData); + backend::TranscriptionBackend; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false]) + ndarray::Bool = false] + ) Return the support alias mappings associated with `expr`. Errors if `expr` cannot be transcribed. """ function InfiniteOpt.expression_supports( - model::JuMP.Model, expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.GenericNonlinearExpr}, - key::Val{:TransData} = Val(:TransData); + backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false ) # get the object numbers of the expression and form the support iterator obj_nums = sort(InfiniteOpt._object_numbers(expr)) - support_indices = support_index_iterator(model, obj_nums) + support_indices = support_index_iterator(backend, obj_nums) supps = Vector{Tuple}(undef, length(support_indices)) - check_labels = length(supps) > 1 && !_ignore_label(model, label) - param_supps = parameter_supports(model) + check_labels = length(supps) > 1 && !_ignore_label(backend, label) + param_supps = parameter_supports(backend) label_inds = ones(Bool, length(supps)) # iterate over the indices and compute the values for (i, idx) in enumerate(support_indices) - if check_labels && !any(l -> l <: label, index_to_labels(model, idx)) + if check_labels && !any(l -> l <: label, index_to_labels(backend, idx)) @inbounds label_inds[i] = false end @inbounds supps[i] = Tuple(param_supps[j][idx[j]] for j in obj_nums) end # return the supports if ndarray - return make_ndarray(model, expr, supps, label) + return make_ndarray(backend, expr, supps, label) else supps = supps[label_inds] return length(support_indices) > 1 ? supps : first(supps) @@ -751,19 +822,23 @@ end # CONSTRAINT QUERIES ################################################################################ """ - transcription_constraint(model::JuMP.Model, - cref::InfiniteOpt.InfOptConstraintRef; + transcription_constraint( + cref::InfiniteOpt.InfOptConstraintRef, + backend::TranscriptionBackend; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false]) + ndarray::Bool = false] + ) Return the transcribed constraint reference(s) corresponding to `cref`. Errors if `cref` has not been transcribed. Also can query via the syntax: ```julia -transcription_constraint(cref::InfiniteOpt.InfOptConstraintRef; +transcription_constraint( + cref::InfiniteOpt.InfOptConstraintRef; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false]) + ndarray::Bool = false] + ) ``` -If the infinite model contains a built transcription model. By default, this +If the infinite model contains a built transcription backend. By default, this method returns only transcribed constraints associated with public supports. All the constraints can be returned by setting `label = All`. @@ -775,7 +850,7 @@ considers the union. **Example** ```julia-repl -julia> transcription_constraint(trans_model, fin_con) +julia> transcription_constraint(fin_con, backend) fin_con : x(support: 1) - y <= 3.0 julia> transcription_constraint(fin_con) @@ -783,21 +858,21 @@ fin_con : x(support: 1) - y <= 3.0 ``` """ function transcription_constraint( - model::JuMP.Model, - cref::InfiniteOpt.InfOptConstraintRef; + cref::InfiniteOpt.InfOptConstraintRef, + backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false ) - constr = get(transcription_data(model).constr_mappings, cref, nothing) + constr = get(transcription_data(backend).constr_mappings, cref, nothing) if isnothing(constr) - error("Constraint reference $cref not used in transcription model.") + error("Constraint reference $cref not used in transcription backend.") end if ndarray - return make_ndarray(model, cref, constr, label) - elseif length(constr) > 1 && _ignore_label(model, label) + return make_ndarray(backend, cref, constr, label) + elseif length(constr) > 1 && _ignore_label(backend, label) return constr elseif length(constr) > 1 - labels = transcription_data(model).constr_support_labels[cref] + labels = transcription_data(backend).constr_support_labels[cref] inds = map(s -> any(l -> l <: label, s), labels) return constr[inds] else @@ -805,40 +880,44 @@ function transcription_constraint( end end -# Dispatch for internal models +# Dispatch for internal backends function transcription_constraint( cref::InfiniteOpt.InfOptConstraintRef; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false ) - trans_model = InfiniteOpt.optimizer_model(JuMP.owner_model(cref)) - return transcription_constraint(trans_model, cref, label = label, - ndarray = ndarray) + return transcription_constraint( + cref, + JuMP.owner_model(cref).backend, + label = label, + ndarray = ndarray + ) end """ - InfiniteOpt.optimizer_model_constraint( + InfiniteOpt.transformation_model_constraint( cref::InfiniteOpt.InfOptConstraintRef, - ::Val{:TransData}; + backend::TranscriptionBackend; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel - ndarray::Bool = false]) + ndarray::Bool = false] + ) -Proper extension of [`InfiniteOpt.optimizer_model_constraint`](@ref) for -`TranscriptionModel`s. This simply dispatches to [`transcription_constraint`](@ref). +Proper extension of [`InfiniteOpt.transformation_model_constraint`](@ref) for +`TranscriptionBackend`s. This simply dispatches to [`transcription_constraint`](@ref). """ -function InfiniteOpt.optimizer_model_constraint( +function InfiniteOpt.transformation_model_constraint( cref::InfiniteOpt.InfOptConstraintRef, - ::Val{:TransData}; + backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false ) - return transcription_constraint(cref, label = label, ndarray = ndarray) + return transcription_constraint(cref, backend, label = label, ndarray = ndarray) end """ - InfiniteOpt.constraint_supports(model::JuMP.Model, + InfiniteOpt.constraint_supports( cref::InfiniteOpt.InfOptConstraintRef, - key::Val{:TransData} = Val(:TransData); + backend::TranscriptionBackend; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false]) @@ -846,22 +925,21 @@ Return the support alias mappings associated with `cref`. Errors if `cref` is not transcribed. """ function InfiniteOpt.constraint_supports( - model::JuMP.Model, cref::InfiniteOpt.InfOptConstraintRef, - key::Val{:TransData} = Val(:TransData); + backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false ) - supps = get(transcription_data(model).constr_supports, cref, nothing) + supps = get(transcription_data(backend).constr_supports, cref, nothing) if isnothing(supps) - error("Constraint reference $cref not used in transcription model.") + error("Constraint reference $cref not used in transcription backend.") end if ndarray - return make_ndarray(model, cref, supps, label) - elseif length(supps) > 1 && _ignore_label(model, label) + return make_ndarray(backend, cref, supps, label) + elseif length(supps) > 1 && _ignore_label(backend, label) return supps elseif length(supps) > 1 - labels = transcription_data(model).constr_support_labels[cref] + labels = transcription_data(backend).constr_support_labels[cref] inds = map(s -> any(l -> l <: label, s), labels) return supps[inds] else @@ -873,20 +951,20 @@ end # OTHER QUERIES ################################################################################ """ - parameter_supports(model::JuMP.Model)::Tuple + parameter_supports(backend::TranscriptionBackend)::Tuple Return the collected parameter support tuple that is stored in `TranscriptionData.supports`. """ -function parameter_supports(model::JuMP.Model) - return transcription_data(model).supports +function parameter_supports(backend::TranscriptionBackend) + return transcription_data(backend).supports end """ - support_index_iterator(model::JuMP.Model, [obj_nums::Vector{Int}])::CartesianIndices + support_index_iterator(backend::TranscriptionBackend, [obj_nums::Vector{Int}])::CartesianIndices Return the `CartesianIndices` that determine the indices of the unique combinations -of `TranscriptionData.supports` stored in `model`. If `obj_nums` is specified, +of `TranscriptionData.supports` stored in `backend`. If `obj_nums` is specified, then the indices will only include the tuple elements uses indices are included in the object numbers `obj_nums` and all others will be assigned the last index which should correspond to an appropriately sized placeholder comprised of `NaN`s. @@ -894,17 +972,17 @@ Note this method assumes that [`set_parameter_supports`](@ref) has already been called and that the last elements of each support vector contains a placeholder value. """ -function support_index_iterator(model::JuMP.Model) - raw_supps = parameter_supports(model) +function support_index_iterator(backend::TranscriptionBackend) + raw_supps = parameter_supports(backend) return CartesianIndices(ntuple(i -> 1:length(raw_supps[i])-1, length(raw_supps))) end # Generate for a subset of object numbers (use last index as placeholder --> support with NaNs) function support_index_iterator( - model::JuMP.Model, + backend::TranscriptionBackend, obj_nums::Vector{Int} - )::CartesianIndices - raw_supps = parameter_supports(model) + ) + raw_supps = parameter_supports(backend) lens = map(i -> length(i), raw_supps) # prepare the indices of each support combo # note that the actual supports are from 1:length-1 and the placeholders are at the ends @@ -913,31 +991,31 @@ function support_index_iterator( end """ - index_to_support(model::JuMP.Model, index::CartesianIndex)::Vector{Float64} + index_to_support(backend::TranscriptionBackend, index::CartesianIndex)::Vector{Float64} Given a particular support `index` generated via [`support_index_iterator`](@ref) -using `model`, return the corresponding support from `TranscriptionData.supports` +using `backend`, return the corresponding support from `TranscriptionData.supports` using placeholder `NaN`s as appropriate for tuple elements that are unneeded. """ function index_to_support( - model::JuMP.Model, + backend::TranscriptionBackend, index::CartesianIndex )::Vector{Float64} - raw_supps = parameter_supports(model) + raw_supps = parameter_supports(backend) return [j for i in eachindex(index.I) for j in raw_supps[i][index[i]]] end """ - index_to_labels(model::JuMP.Model, index::CartesianIndex)::Set{DataType} + index_to_labels(backend::TranscriptionBackend, index::CartesianIndex)::Set{DataType} Given a particular support `index` generated via [`support_index_iterator`](@ref) -using `model`, return the corresponding support label set from `TranscriptionData.support_labels`. +using `backend`, return the corresponding support label set from `TranscriptionData.support_labels`. """ function index_to_labels( - model::JuMP.Model, + backend::TranscriptionBackend, index::CartesianIndex ) - raw_labels = transcription_data(model).support_labels + raw_labels = transcription_data(backend).support_labels labels = Set{DataType}() for (i, j) in enumerate(index.I) union!(labels, raw_labels[i][j]) @@ -967,19 +1045,21 @@ function _get_object_numbers( end """ - make_narray(model::JuMP.Model, - ref::Union{JuMP.AbstractJuMPScalar, InfiniteOpt.InfOptConstraintRef}, - info::Vector, - label::Type{<:InfiniteOpt.AbstractSupportLabel})::Array - -Take the results`info` associated with `ref` and rearrange them into an + make_narray( + backend::TranscriptionBackend, + ref::Union{JuMP.AbstractJuMPScalar, InfiniteOpt.InfOptConstraintRef}, + info::Vector, + label::Type{<:InfiniteOpt.AbstractSupportLabel} + )::Array + +Take the results `info` associated with `ref` and rearrange them into an n-dimensional array where the axes correspond to the infinite parameter dependencies in accordance with their creation. Note that this works by querying the object numbers. Thus, independent infinite parameters will each get their own dimension (even if they are defined at the same time in an array) and each dependent infinite parameter group will have its own dimension. """ -function make_ndarray(model::JuMP.Model, ref, info::Vector, label::DataType) +function make_ndarray(backend::TranscriptionBackend, ref, info::Vector, label::DataType) # get the object numbers obj_nums = _get_object_numbers(ref) # return result if it is from a finite object @@ -987,7 +1067,7 @@ function make_ndarray(model::JuMP.Model, ref, info::Vector, label::DataType) return info end # determine the dimensions of the new array - raw_supps = parameter_supports(model) + raw_supps = parameter_supports(backend) dims = Tuple(length(raw_supps[i]) - 1 for i in eachindex(raw_supps) if i in obj_nums) # check that the lengths match (otherwise we'll have some sparse set) # TODO add capability to avoid this problem (make reduced array by looking at the supports) @@ -1004,10 +1084,10 @@ function make_ndarray(model::JuMP.Model, ref, info::Vector, label::DataType) # rearrange the array as needed to match the object number order sorted_array = issorted(obj_nums) ? narray : permutedims(narray, sortperm(obj_nums)) # consider the label specified (this will enforce the intersection of labels) - if _ignore_label(model, label) + if _ignore_label(backend, label) return sorted_array else - labels = transcription_data(model).support_labels[obj_nums] + labels = transcription_data(backend).support_labels[obj_nums] inds = map(sets -> findall(s -> any(l -> l <: label, s), sets), labels) return sorted_array[inds...] end diff --git a/src/TranscriptionOpt/optimize.jl b/src/TranscriptionOpt/optimize.jl deleted file mode 100644 index 6094ac22..00000000 --- a/src/TranscriptionOpt/optimize.jl +++ /dev/null @@ -1,44 +0,0 @@ -""" - transcription_model(model::InfiniteOpt.InfiniteModel)::JuMP.Model - -Return the transcription model stored in `model` if that is what is stored in -`model.optimizer_model`. -""" -function transcription_model(model::InfiniteOpt.InfiniteModel) - trans_model = InfiniteOpt.optimizer_model(model) - if !is_transcription_model(trans_model) - error("The model does not contain a transcription model.") - end - return trans_model -end - -""" - InfiniteOpt.build_optimizer_model!(model::InfiniteOpt.InfiniteModel, - key::Val{:TransData}; - check_support_dims::Bool = true)::Nothing - -Transcribe `model` and store it as a `TranscriptionModel` in the -`model.optimizer_model` field which can be accessed with `transcription_model`. -Ths clears the existing `TranscriptionModel` via -[`InfiniteOpt.clear_optimizer_model_build!`](@ref) and then builds a new one -using [`build_transcription_model!`](@ref). -""" -function InfiniteOpt.build_optimizer_model!( - model::InfiniteOpt.InfiniteModel, - key::Val{:TransData}; - check_support_dims::Bool = true, - extra_kwargs... - ) - # throw error for extra keywords - for (kw, _) in extra_kwargs - error("Unrecognized keyword argument `$kw` for building transcription models.") - end - # clear the optimzier model contents - trans_model = InfiniteOpt.clear_optimizer_model_build!(model) - # build the transcription model based on model - build_transcription_model!(trans_model, model, - check_support_dims = check_support_dims) - # update the optimizer model status - InfiniteOpt.set_optimizer_model_ready(model, true) - return -end diff --git a/src/TranscriptionOpt/transcribe.jl b/src/TranscriptionOpt/transcribe.jl index e9105f7c..33c50c53 100644 --- a/src/TranscriptionOpt/transcribe.jl +++ b/src/TranscriptionOpt/transcribe.jl @@ -40,11 +40,13 @@ function _collected_support_labels( end """ - set_parameter_supports(trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel)::Nothing + set_parameter_supports( + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel + )::Nothing Collect the infinite parameter supports stored in their respective dictionaries -form `inf_model` and process them into a tuple of vectors where each vector +form `model` and process them into a tuple of vectors where each vector contains the collected supports of a particular infinite parameter. These support collections are ordered in accordance with the definition order of the parameters (i.e., their object numbers). A support collection assocciated with @@ -57,13 +59,13 @@ comprised of `NaN`s for convenience in generating support indices via Before this is all done, `InfiniteOpt.add_generative_supports` is invoked as needed. """ function set_parameter_supports( - trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel ) # gather the basic information - param_indices = InfiniteOpt._param_object_indices(inf_model) - prefs = map(idx -> _temp_parameter_ref(inf_model, idx), param_indices) - data = transcription_data(trans_model) + param_indices = InfiniteOpt._param_object_indices(model) + prefs = map(idx -> _temp_parameter_ref(model, idx), param_indices) + data = transcription_data(backend) # check and add supports to prefs as needed for pref in prefs InfiniteOpt.add_generative_supports(pref) @@ -84,24 +86,25 @@ end # VARIABLE INITIALIZATION METHODS ################################################################################ """ - transcribe_finite_variables!(trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel)::Nothing + transcribe_finite_variables!( + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel + )::Nothing Create a transcription variable (i.e., a JuMP variable) for each `FiniteVariable` -stored in `inf_model` and add it to `trans_model`. The variable mapping is +stored in `model` and add it to `backend`. The variable mapping is also stored in `TranscriptionData.finvar_mappings` which enables [`transcription_variable`](@ref) and [`lookup_by_support`](@ref). """ function transcribe_finite_variables!( - trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel - ) - for (idx, object) in InfiniteOpt._data_dictionary(inf_model, InfiniteOpt.FiniteVariable) - hvref = InfiniteOpt._make_variable_ref(inf_model, idx) - vref = JuMP.add_variable(trans_model, - JuMP.ScalarVariable(object.variable.info), - object.name) - transcription_data(trans_model).finvar_mappings[hvref] = vref + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel + ) + for (idx, object) in InfiniteOpt._data_dictionary(model, InfiniteOpt.FiniteVariable) + hvref = InfiniteOpt._make_variable_ref(model, idx) + v = JuMP.ScalarVariable(object.variable.info) + vref = JuMP.add_variable(backend.model, v, object.name) + transcription_data(backend).finvar_mappings[hvref] = vref end return end @@ -125,11 +128,13 @@ function _format_infinite_info( end """ - transcribe_infinite_variables!(trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel)::Nothing + transcribe_infinite_variables!( + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel + )::Nothing Create transcription variables (i.e., JuMP variables) for each `InfiniteVariable` -stored in `inf_model` and add them to `trans_model`. The variable mappings are +stored in `model` and add them to `backend`. The variable mappings are also stored in `TranscriptionData.infvar_mappings` in accordance with `TranscriptionData.infvar_lookup` which enable [`transcription_variable`](@ref) and [`lookup_by_support`](@ref). Note that the supports will not be generated @@ -137,34 +142,33 @@ until `InfiniteOpt.variable_supports` is invoked via `InfiniteOpt.supports`. Note that `TranscriptionData.infvar_support_labels` is also populated. """ function transcribe_infinite_variables!( - trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel ) - for (idx, object) in InfiniteOpt._data_dictionary(inf_model, InfiniteOpt.InfiniteVariable) + for (idx, object) in InfiniteOpt._data_dictionary(model, InfiniteOpt.InfiniteVariable) # get the basic variable information var = object.variable base_name = object.name param_nums = var.parameter_nums # prepare for iterating over its supports - supp_indices = support_index_iterator(trans_model, var.object_nums) + supp_indices = support_index_iterator(backend, var.object_nums) vrefs = Vector{JuMP.VariableRef}(undef, length(supp_indices)) labels = Vector{Set{DataType}}(undef, length(supp_indices)) lookup_dict = Dict{Vector{Float64}, Int}() # create a variable for each support for (counter, i) in enumerate(supp_indices) - raw_supp = index_to_support(trans_model, i) + raw_supp = index_to_support(backend, i) supp = raw_supp[param_nums] info = _format_infinite_info(var, supp) - var_name = string(base_name, "(support: ", counter, ")") - @inbounds vrefs[counter] = JuMP.add_variable(trans_model, - JuMP.ScalarVariable(info), - var_name) + v_name = string(base_name, "(support: ", counter, ")") + v = JuMP.ScalarVariable(info) + @inbounds vrefs[counter] = JuMP.add_variable(backend.model, v, v_name) lookup_dict[supp] = counter - @inbounds labels[counter] = index_to_labels(trans_model, i) + @inbounds labels[counter] = index_to_labels(backend, i) end # save the transcription information - ivref = InfiniteOpt._make_variable_ref(inf_model, idx) - data = transcription_data(trans_model) + ivref = InfiniteOpt._make_variable_ref(model, idx) + data = transcription_data(backend) data.infvar_lookup[ivref] = lookup_dict data.infvar_mappings[ivref] = vrefs data.infvar_support_labels[ivref] = labels @@ -188,29 +192,28 @@ function _format_derivative_info(d::InfiniteOpt.Derivative, support::Vector{Floa info.has_start, start, info.binary, info.integer) end -function _transcribe_derivative_variable(dref, d, trans_model) +function _transcribe_derivative_variable(dref, d, backend) base_name = InfiniteOpt.variable_string(MIME("text/plain"), dispatch_variable_ref(dref)) param_nums = InfiniteOpt._parameter_numbers(d.variable_ref) obj_nums = InfiniteOpt._object_numbers(d.variable_ref) # prepare for iterating over its supports - supp_indices = support_index_iterator(trans_model, obj_nums) + supp_indices = support_index_iterator(backend, obj_nums) vrefs = Vector{JuMP.VariableRef}(undef, length(supp_indices)) labels = Vector{Set{DataType}}(undef, length(supp_indices)) lookup_dict = Dict{Vector{Float64}, Int}() # create a variable for each support for (counter, i) in enumerate(supp_indices) - raw_supp = index_to_support(trans_model, i) + raw_supp = index_to_support(backend, i) supp = raw_supp[param_nums] info = _format_derivative_info(d, supp) - deriv_name = string(base_name, "(support: ", counter, ")") - @inbounds vrefs[counter] = JuMP.add_variable(trans_model, - JuMP.ScalarVariable(info), - deriv_name) + d_name = string(base_name, "(support: ", counter, ")") + d_var = JuMP.ScalarVariable(info) + @inbounds vrefs[counter] = JuMP.add_variable(backend.model, d_var, d_name) lookup_dict[supp] = counter - @inbounds labels[counter] = index_to_labels(trans_model, i) + @inbounds labels[counter] = index_to_labels(backend, i) end # save the transcription information - data = transcription_data(trans_model) + data = transcription_data(backend) data.infvar_lookup[dref] = lookup_dict data.infvar_mappings[dref] = vrefs data.infvar_support_labels[dref] = labels @@ -218,11 +221,13 @@ function _transcribe_derivative_variable(dref, d, trans_model) end """ - transcribe_derivative_variables!(trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel)::Nothing + transcribe_derivative_variables!( + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel + )::Nothing Create transcription variables (i.e., JuMP variables) for each `Derivative` -stored in `inf_model` and add them to `trans_model`. The variable mappings are +stored in `model` and add them to `backend`. The variable mappings are also stored in `TranscriptionData.infvar_mappings` in accordance with `TranscriptionData.infvar_lookup` which enable [`transcription_variable`](@ref) and [`lookup_by_support`](@ref). Note that the supports will not be generated @@ -232,35 +237,35 @@ futher derivative evaluation constraints are added when `TranscriptionData.infvar_support_labels` is also populated. """ function transcribe_derivative_variables!( - trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel ) - for (idx, object) in InfiniteOpt._data_dictionary(inf_model, InfiniteOpt.Derivative) + for (idx, object) in InfiniteOpt._data_dictionary(model, InfiniteOpt.Derivative) # get the basic derivative information - dref = InfiniteOpt._make_variable_ref(inf_model, idx) + dref = InfiniteOpt._make_variable_ref(model, idx) d = object.variable method = InfiniteOpt.derivative_method(dref) # if needed process lower order derivatives if !InfiniteOpt.allows_high_order_derivatives(method) && d.order > 1 for o in d.order-1:-1:1 - if !haskey(inf_model.deriv_lookup, (d.variable_ref, d.parameter_ref, o)) + if !haskey(model.deriv_lookup, (d.variable_ref, d.parameter_ref, o)) info = JuMP.VariableInfo(false, NaN, false, NaN, false, NaN, false, s -> NaN, false, false) new_d = InfiniteOpt.Derivative(info, true, d.variable_ref, d.parameter_ref, o) - new_dref = InfiniteOpt.add_derivative(inf_model, new_d) - _transcribe_derivative_variable(new_dref, d, trans_model) + new_dref = InfiniteOpt.add_derivative(model, new_d) + _transcribe_derivative_variable(new_dref, d, backend) end end end # process the derivative - _transcribe_derivative_variable(dref, d, trans_model) + _transcribe_derivative_variable(dref, d, backend) end return end # Setup the mapping for a given semi_infinite variable function _set_semi_infinite_variable_mapping( - trans_model::JuMP.Model, + backend::TranscriptionBackend, var::InfiniteOpt.SemiInfiniteVariable, rvref::InfiniteOpt.GeneralVariableRef, index_type @@ -270,14 +275,14 @@ function _set_semi_infinite_variable_mapping( ivref_param_nums = InfiniteOpt._parameter_numbers(ivref) eval_supps = var.eval_supports # prepare for iterating over its supports - supp_indices = support_index_iterator(trans_model, var.object_nums) + supp_indices = support_index_iterator(backend, var.object_nums) vrefs = Vector{JuMP.VariableRef}(undef, length(supp_indices)) labels = Vector{Set{DataType}}(undef, length(supp_indices)) lookup_dict = Dict{Vector{Float64}, Int}() counter = 1 # map a variable for each support for i in supp_indices - raw_supp = index_to_support(trans_model, i) + raw_supp = index_to_support(backend, i) # ensure this support is valid with the reduced restriction if any(!isnan(raw_supp[ivref_param_nums[k]]) && raw_supp[ivref_param_nums[k]] != v for (k, v) in eval_supps) @@ -287,16 +292,16 @@ function _set_semi_infinite_variable_mapping( supp = raw_supp[param_nums] ivref_supp = [haskey(eval_supps, j) ? eval_supps[j] : raw_supp[k] for (j, k) in enumerate(ivref_param_nums)] - @inbounds vrefs[counter] = lookup_by_support(trans_model, ivref, ivref_supp) + @inbounds vrefs[counter] = lookup_by_support(ivref, backend, ivref_supp) lookup_dict[supp] = counter - @inbounds labels[counter] = index_to_labels(trans_model, i) + @inbounds labels[counter] = index_to_labels(backend, i) counter += 1 end # truncate vrefs if any supports were skipped because of dependent parameter supps deleteat!(vrefs, counter:length(vrefs)) deleteat!(labels, counter:length(vrefs)) # save the transcription information - data = transcription_data(trans_model) + data = transcription_data(backend) data.infvar_lookup[rvref] = lookup_dict data.infvar_mappings[rvref] = vrefs data.infvar_support_labels[rvref] = labels @@ -305,7 +310,7 @@ end # Empty mapping dispatch for infinite parameter functions function _set_semi_infinite_variable_mapping( - trans_model::JuMP.Model, + backend::TranscriptionBackend, var::InfiniteOpt.SemiInfiniteVariable, rvref::InfiniteOpt.GeneralVariableRef, index_type::Type{InfiniteOpt.ParameterFunctionIndex} @@ -314,11 +319,13 @@ function _set_semi_infinite_variable_mapping( end """ - transcribe_semi_infinite_variables!(trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel)::Nothing + transcribe_semi_infinite_variables!( + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel + )::Nothing -Map each `SemiInfiniteVariable` in `inf_model` to transcription variables stored in -`trans_model`. The variable mappings are also stored in +Map each `SemiInfiniteVariable` in `model` to transcription variables stored in +`backend`. The variable mappings are also stored in `TranscriptionData.infvar_mappings` in accordance with `TranscriptionData.infvar_lookup` which enable [`transcription_variable`](@ref) and [`lookup_by_support`](@ref). Note that [`transcribe_infinite_variables!`](@ref) @@ -327,17 +334,16 @@ until `InfiniteOpt.variable_supports` is invoked via `InfiniteOpt.supports`. Note that `TranscriptionData.infvar_support_labels` is also populated. """ function transcribe_semi_infinite_variables!( - trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel ) - for (idx, object) in InfiniteOpt._data_dictionary(inf_model, InfiniteOpt.SemiInfiniteVariable) + for (idx, object) in InfiniteOpt._data_dictionary(model, InfiniteOpt.SemiInfiniteVariable) # get the basic variable information var = object.variable - rvref = InfiniteOpt._make_variable_ref(inf_model, idx) + rvref = InfiniteOpt._make_variable_ref(model, idx) # setup the mappings - ivref = InfiniteOpt.infinite_variable_ref(rvref) - _set_semi_infinite_variable_mapping(trans_model, var, rvref, - InfiniteOpt._index_type(ivref)) + idx_type = InfiniteOpt._index_type(InfiniteOpt.infinite_variable_ref(rvref)) + _set_semi_infinite_variable_mapping(backend, var, rvref, idx_type) end return end @@ -381,11 +387,13 @@ function _update_point_info( end """ - transcribe_point_variables!(trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel)::Nothing + transcribe_point_variables!( + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel + )::Nothing -Map each `PointVariable` in `inf_model` to a transcription variable stored in -`trans_model`. The variable mapping is also stored in +Map each `PointVariable` in `model` to a transcription variable stored in +`backend`. The variable mapping is also stored in `TranscriptionData.finvar_mappings` which enables [`transcription_variable`](@ref) and [`lookup_by_support`](@ref). Note that [`transcribe_infinite_variables!`](@ref) must be called first and that the @@ -393,18 +401,18 @@ info constraints associated with the transcription variable will be updated in accordance with the point variable. """ function transcribe_point_variables!( - trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel ) - for (idx, object) in InfiniteOpt._data_dictionary(inf_model, InfiniteOpt.PointVariable) + for (idx, object) in InfiniteOpt._data_dictionary(model, InfiniteOpt.PointVariable) # get the basic variable information var = object.variable ivref = var.infinite_variable_ref supp = var.parameter_values # find the corresponding variable record the mapping - vref = lookup_by_support(trans_model, ivref, supp) - pvref = InfiniteOpt._make_variable_ref(inf_model, idx) - transcription_data(trans_model).finvar_mappings[pvref] = vref + vref = lookup_by_support(ivref, backend, supp) + pvref = InfiniteOpt._make_variable_ref(model, idx) + transcription_data(backend).finvar_mappings[pvref] = vref # update the info constraints as needed _update_point_info(pvref, vref) end @@ -415,44 +423,48 @@ end # TRANSCRIPTION EXPRESSION METHODS ################################################################################ """ - transcription_expression(trans_model::JuMP.Model, expr, support::Vector{Float64}) + transcription_expression( + expr, + backend::TranscriptionBackend, + support::Vector{Float64} + ) Given the `expr` from an `InfiniteModel`, form its transcripted version in -accordance with the variable mappings available in `trans_model` defined at +accordance with the variable mappings available in `backend` defined at `support`. This should only be used once all variables and measures have been transcribed (e.g., via [`transcribe_finite_variables!`](@ref)). """ -function transcription_expression(trans_model::JuMP.Model, expr, support) +function transcription_expression(expr, backend::TranscriptionBackend, support) error("Unsupported expression type `$(typeof(expr))` for automated " * "transcription.") end # GeneralVariableRef function transcription_expression( - trans_model::JuMP.Model, vref::InfiniteOpt.GeneralVariableRef, + backend::TranscriptionBackend, support::Vector{Float64} ) - return transcription_expression(trans_model, vref, - InfiniteOpt._index_type(vref), support) + idx_type = InfiniteOpt._index_type(vref) + return transcription_expression(vref, idx_type, backend, support) end # Infinite variables, infinite parameter functions, and measures function transcription_expression( - trans_model::JuMP.Model, vref::InfiniteOpt.GeneralVariableRef, index_type::Type{V}, + backend::TranscriptionBackend, support::Vector{Float64} ) where {V <: Union{InfVarIndex, InfiniteOpt.ParameterFunctionIndex, InfiniteOpt.MeasureIndex}} param_nums = InfiniteOpt._parameter_numbers(vref) - return lookup_by_support(trans_model, vref, index_type, support[param_nums]) + return lookup_by_support(vref, index_type, backend, support[param_nums]) end # Semi-Infinite variables function transcription_expression( - trans_model::JuMP.Model, vref::InfiniteOpt.GeneralVariableRef, index_type::Type{InfiniteOpt.SemiInfiniteVariableIndex}, + backend::TranscriptionBackend, support::Vector{Float64} ) ivref = InfiniteOpt.infinite_variable_ref(vref) @@ -463,25 +475,25 @@ function transcription_expression( return func(Tuple(support[param_nums], prefs)...) else param_nums = InfiniteOpt._parameter_numbers(vref) - return lookup_by_support(trans_model, vref, index_type, support[param_nums]) + return lookup_by_support(vref, index_type, backend, support[param_nums]) end end # Point variables and finite variables function transcription_expression( - trans_model::JuMP.Model, vref::InfiniteOpt.GeneralVariableRef, index_type::Type{V}, + backend::TranscriptionBackend, support::Vector{Float64} ) where {V <: FinVarIndex} - return lookup_by_support(trans_model, vref, index_type, support) + return lookup_by_support(vref, index_type, backend, support) end # Infinite parameters function transcription_expression( - trans_model::JuMP.Model, vref::InfiniteOpt.GeneralVariableRef, index_type::Type{V}, + backend::TranscriptionBackend, support::Vector{Float64} ) where {V <: InfiniteOpt.InfiniteParameterIndex} param_num = InfiniteOpt._parameter_number(vref) @@ -490,9 +502,9 @@ end # Finite parameters function transcription_expression( - trans_model::JuMP.Model, vref::InfiniteOpt.GeneralVariableRef, index_type::Type{InfiniteOpt.FiniteParameterIndex}, + backend::TranscriptionBackend, support::Vector{Float64} ) return InfiniteOpt.parameter_value(vref) @@ -500,19 +512,19 @@ end # AffExpr and QuadExpr and NonlinearExpr function transcription_expression( - trans_model::JuMP.Model, expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.GenericNonlinearExpr}, + backend::TranscriptionBackend, support::Vector{Float64} ) return InfiniteOpt.map_expression( - v -> transcription_expression(trans_model, v, support), + v -> transcription_expression(v, backend, support), expr) end # Real Number function transcription_expression( - trans_model::JuMP.Model, num::Real, + backend::TranscriptionBackend, support::Vector{Float64} ) return zero(JuMP.AffExpr) + num @@ -522,10 +534,12 @@ end # MEASURE TRANSCRIPTION METHODS ################################################################################ """ - transcribe_measures!(trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel)::Nothing + transcribe_measures!( + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel + )::Nothing -For each `Measure` in `inf_model` expand it via `InfiniteOpt.expand_measure` or +For each `Measure` in `model` expand it via `InfiniteOpt.expand_measure` or `analytic_expansion` as appropriate and transcribe the expanded expression via [`transcription_expression`](@ref). Then store the measure to transcripted expression mappings in `TranscriptionData.measure_mappings` and @@ -535,35 +549,34 @@ until `InfiniteOpt.variable_supports` is invoked via `InfiniteOpt.supports`. Note that `TranscriptionData.measure_support_labels` is also populated. """ function transcribe_measures!( - trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel ) - for (idx, object) in InfiniteOpt._data_dictionary(inf_model, InfiniteOpt.Measure) + for (idx, object) in InfiniteOpt._data_dictionary(model, InfiniteOpt.Measure) # get the basic information meas = object.measure # expand the measure if meas.constant_func - new_expr = InfiniteOpt.analytic_expansion(meas.func, meas.data, trans_model) + new_expr = InfiniteOpt.analytic_expansion(meas.func, meas.data, backend) else - new_expr = InfiniteOpt.expand_measure(meas.func, meas.data, trans_model) + new_expr = InfiniteOpt.expand_measure(meas.func, meas.data, backend) end # prepare to transcribe over the supports - supp_indices = support_index_iterator(trans_model, meas.object_nums) + supp_indices = support_index_iterator(backend, meas.object_nums) exprs = Vector{JuMP.AbstractJuMPScalar}(undef, length(supp_indices)) labels = Vector{Set{DataType}}(undef, length(supp_indices)) lookup_dict = Dict{Vector{Float64}, Int}() # map a variable for each support for (counter, i) in enumerate(supp_indices) - raw_supp = index_to_support(trans_model, i) - @inbounds exprs[counter] = transcription_expression(trans_model, - new_expr, raw_supp) + raw_supp = index_to_support(backend, i) + @inbounds exprs[counter] = transcription_expression(new_expr, backend, raw_supp) supp = raw_supp[meas.parameter_nums] lookup_dict[supp] = counter - @inbounds labels[counter] = index_to_labels(trans_model, i) + @inbounds labels[counter] = index_to_labels(backend, i) end # save the transcription information - mref = InfiniteOpt._make_variable_ref(inf_model, idx) - data = transcription_data(trans_model) + mref = InfiniteOpt._make_variable_ref(model, idx) + data = transcription_data(backend) data.measure_lookup[mref] = lookup_dict data.measure_mappings[mref] = exprs data.measure_support_labels[mref] = labels @@ -575,21 +588,23 @@ end # OBJECTIVE TRANSCRIPTION METHODS ################################################################################ """ - transcribe_objective!(trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel)::Nothing + transcribe_objective!( + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel + )::Nothing -Form the transcripted version of the objective stored in `inf_model` and add it -to `trans_model`. Note that all the variables and measures in `inf_model` must +Form the transcripted version of the objective stored in `model` and add it +to `backend`. Note that all the variables and measures in `model` must by transcripted first (e.g., via [`transcribe_infinite_variables!`](@ref)). """ function transcribe_objective!( - trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel ) - expr = JuMP.objective_function(inf_model) - sense = JuMP.objective_sense(inf_model) - trans_expr = transcription_expression(trans_model, expr, Float64[]) - JuMP.set_objective(trans_model, sense, trans_expr) + expr = JuMP.objective_function(model) + sense = JuMP.objective_sense(model) + trans_expr = transcription_expression(expr, backend, Float64[]) + JuMP.set_objective(backend.model, sense, trans_expr) return end @@ -599,56 +614,56 @@ end ## Given a variable and its set from an info constraint, get the transcribed version # Lower bound constraint function _get_info_constr_from_var( - trans_model::JuMP.Model, + backend::TranscriptionBackend, vref::InfiniteOpt.GeneralVariableRef, set::MOI.GreaterThan, support::Vector{Float64} ) - trans_vref = transcription_expression(trans_model, vref, support) + trans_vref = transcription_expression(vref, backend, support) return JuMP.has_lower_bound(trans_vref) ? JuMP.LowerBoundRef(trans_vref) : nothing end # Upper bound constraint function _get_info_constr_from_var( - trans_model::JuMP.Model, + backend::TranscriptionBackend, vref::InfiniteOpt.GeneralVariableRef, set::MOI.LessThan, support::Vector{Float64} ) - trans_vref = transcription_expression(trans_model, vref, support) + trans_vref = transcription_expression(vref, backend, support) return JuMP.has_upper_bound(trans_vref) ? JuMP.UpperBoundRef(trans_vref) : nothing end # Fix constraint function _get_info_constr_from_var( - trans_model::JuMP.Model, + backend::TranscriptionBackend, vref::InfiniteOpt.GeneralVariableRef, set::MOI.EqualTo, support::Vector{Float64} ) - trans_vref = transcription_expression(trans_model, vref, support) + trans_vref = transcription_expression(vref, backend, support) return JuMP.is_fixed(trans_vref) ? JuMP.FixRef(trans_vref) : nothing end # Binary constraint function _get_info_constr_from_var( - trans_model::JuMP.Model, + backend::TranscriptionBackend, vref::InfiniteOpt.GeneralVariableRef, set::MOI.ZeroOne, support::Vector{Float64} ) - trans_vref = transcription_expression(trans_model, vref, support) + trans_vref = transcription_expression(vref, backend, support) return JuMP.is_binary(trans_vref) ? JuMP.BinaryRef(trans_vref) : nothing end # Integer constraint function _get_info_constr_from_var( - trans_model::JuMP.Model, + backend::TranscriptionBackend, vref::InfiniteOpt.GeneralVariableRef, set::MOI.Integer, support::Vector{Float64} ) - trans_vref = transcription_expression(trans_model, vref, support) + trans_vref = transcription_expression(vref, backend, support) return JuMP.is_integer(trans_vref) ? JuMP.IntegerRef(trans_vref) : nothing end @@ -672,38 +687,38 @@ end ## transcribed version # JuMP.ScalarConstraint function _process_constraint( - trans_model::JuMP.Model, + backend::TranscriptionBackend, constr::JuMP.ScalarConstraint, func::JuMP.AbstractJuMPScalar, set::MOI.AbstractScalarSet, raw_supp::Vector{Float64}, name::String ) - new_func = transcription_expression(trans_model, func, raw_supp) + new_func = transcription_expression(func, backend, raw_supp) trans_constr = JuMP.build_constraint(error, new_func, set) - return JuMP.add_constraint(trans_model, trans_constr, name) + return JuMP.add_constraint(backend.model, trans_constr, name) end # JuMP.VectorConstraint function _process_constraint( - trans_model::JuMP.Model, + backend::TranscriptionBackend, constr::JuMP.VectorConstraint, func::Vector{<:JuMP.AbstractJuMPScalar}, set::MOI.AbstractVectorSet, raw_supp::Vector{Float64}, name::String ) - new_func = map(f -> transcription_expression(trans_model, f, raw_supp), func) + new_func = map(f -> transcription_expression(f, backend, raw_supp), func) shape = JuMP.shape(constr) shaped_func = JuMP.reshape_vector(new_func, shape) shaped_set = JuMP.reshape_set(set, shape) trans_constr = JuMP.build_constraint(error, shaped_func, shaped_set) - return JuMP.add_constraint(trans_model, trans_constr, name) + return JuMP.add_constraint(backend.model, trans_constr, name) end # Fallback function _process_constraint( - trans_model::JuMP.Model, + backend::TranscriptionBackend, constr, func, set, @@ -715,11 +730,13 @@ function _process_constraint( end """ - transcribe_constraints!(trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel)::Nothing + transcribe_constraints!( + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel + )::Nothing -For each constraint in `inf_model` form its transcripted version(s) and add them -to `trans_model`. The mappings are stored in `TranscriptionData.constr_mappings` +For each constraint in `model` form its transcripted version(s) and add them +to `backend`. The mappings are stored in `TranscriptionData.constr_mappings` and the associated supports are stored in `TranscriptionData.constr_supports` to enable [`transcription_constraint`](@ref) and `InfiniteOpt.constraint_supports`. Note that variable info constraints are simply mapped to the existing info @@ -729,19 +746,19 @@ the variables and measures must all first be transcripted (e.g., via `TranscriptionData.constr_support_labels` is also populated. """ function transcribe_constraints!( - trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel ) - param_supps = parameter_supports(trans_model) - for (idx, object) in inf_model.constraints + param_supps = parameter_supports(backend) + for (idx, object) in model.constraints # get the basic information constr = object.constraint func = JuMP.jump_function(constr) set = JuMP.moi_set(constr) obj_nums = object.object_nums - cref = InfiniteOpt._make_constraint_ref(inf_model, idx) + cref = InfiniteOpt._make_constraint_ref(model, idx) # prepare the iteration helpers - supp_indices = support_index_iterator(trans_model, obj_nums) + supp_indices = support_index_iterator(backend, obj_nums) crefs = Vector{JuMP.ConstraintRef}(undef, length(supp_indices)) supps = Vector{Tuple}(undef, length(supp_indices)) labels = Vector{Set{DataType}}(undef, length(supp_indices)) @@ -749,15 +766,15 @@ function transcribe_constraints!( # iterate over the support indices for the info constraints if object.is_info_constraint for i in supp_indices - raw_supp = index_to_support(trans_model, i) - info_ref = _get_info_constr_from_var(trans_model, func, set, + raw_supp = index_to_support(backend, i) + info_ref = _get_info_constr_from_var(backend, func, set, raw_supp) # not all supports may be defined if overwritten by a point variable if !isnothing(info_ref) @inbounds crefs[counter] = info_ref @inbounds supps[counter] = Tuple(param_supps[j][i[j]] for j in obj_nums) - @inbounds labels[counter] = index_to_labels(trans_model, i) + @inbounds labels[counter] = index_to_labels(backend, i) counter += 1 end end @@ -770,17 +787,17 @@ function transcribe_constraints!( restrict_domains = map(p -> restrictions[p], prefs) name = object.name for i in supp_indices - raw_supp = index_to_support(trans_model, i) + raw_supp = index_to_support(backend, i) # ensure the support satisfies parameter bounds and then add it if _support_in_restrictions(raw_supp, restrict_indices, restrict_domains) new_name = isempty(name) ? "" : string(name, "(support: ", counter, ")") - new_cref = _process_constraint(trans_model, constr, func, + new_cref = _process_constraint(backend, constr, func, set, raw_supp, new_name) @inbounds crefs[counter] = new_cref @inbounds supps[counter] = Tuple(param_supps[j][i[j]] for j in obj_nums) - @inbounds labels[counter] = index_to_labels(trans_model, i) + @inbounds labels[counter] = index_to_labels(backend, i) counter += 1 end end @@ -790,7 +807,7 @@ function transcribe_constraints!( deleteat!(supps, counter:length(supps)) deleteat!(labels, counter:length(supps)) # add the constraint mappings to the trans model - data = transcription_data(trans_model) + data = transcription_data(backend) data.constr_mappings[cref] = crefs data.constr_supports[cref] = supps data.constr_support_labels[cref] = labels @@ -802,23 +819,25 @@ end # DERIVATIVE CONSTRAINT TRANSCRIPTION METHODS ################################################################################ """ - transcribe_derivative_evaluations!(trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel)::Nothing + transcribe_derivative_evaluations!( + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel + )::Nothing Generate the auxiliary derivative evaluation equations and transcribe them -appropriately for all the derivatives in `inf_model`. These are in turn added to -`trans_model`. Note that no mapping information is recorded since the InfiniteModel +appropriately for all the derivatives in `model`. These are in turn added to +`backend`. Note that no mapping information is recorded since the InfiniteModel won't have any constraints that correspond to these equations. Also note that the variables and measures must all first be transcripted (e.g., via [`transcribe_derivative_variables!`](@ref)). """ function transcribe_derivative_evaluations!( - trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel ) - for (idx, object) in InfiniteOpt._data_dictionary(inf_model, InfiniteOpt.Derivative) + for (idx, object) in InfiniteOpt._data_dictionary(model, InfiniteOpt.Derivative) # get the basic variable information - dref = InfiniteOpt._make_variable_ref(inf_model, idx) + dref = InfiniteOpt._make_variable_ref(model, idx) pref = dispatch_variable_ref(object.variable.parameter_ref) method = InfiniteOpt.derivative_method(pref) order = object.variable.order @@ -827,22 +846,22 @@ function transcribe_derivative_evaluations!( # generate the evaluation expressions vref = object.variable.variable_ref if !InfiniteOpt.allows_high_order_derivatives(method) && order > 1 - d_idx = inf_model.deriv_lookup[vref, object.variable.parameter_ref, order - 1] - vref = InfiniteOpt._make_variable_ref(inf_model, d_idx) + d_idx = model.deriv_lookup[vref, object.variable.parameter_ref, order - 1] + vref = InfiniteOpt._make_variable_ref(model, d_idx) end - exprs = InfiniteOpt.evaluate_derivative(dref, vref, method, trans_model) + exprs = InfiniteOpt.evaluate_derivative(dref, vref, method, backend) # prepare the iteration helpers param_obj_num = InfiniteOpt._object_number(pref) obj_nums = filter(!isequal(param_obj_num), InfiniteOpt._object_numbers(dref)) - supp_indices = support_index_iterator(trans_model, obj_nums) + supp_indices = support_index_iterator(backend, obj_nums) # transcribe the constraints set = MOI.EqualTo(0.0) for i in supp_indices - raw_supp = index_to_support(trans_model, i) + raw_supp = index_to_support(backend, i) for expr in exprs - new_expr = transcription_expression(trans_model, expr, raw_supp) + new_expr = transcription_expression(expr, backend, raw_supp) trans_constr = JuMP.build_constraint(error, new_expr, set) - JuMP.add_constraint(trans_model, trans_constr) # TODO maybe add name? + JuMP.add_constraint(backend.model, trans_constr) # TODO maybe add name? end end end @@ -851,21 +870,23 @@ function transcribe_derivative_evaluations!( end """ - transcribe_variable_collocation_restictions!(trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel)::Nothing + transcribe_variable_collocation_restictions!( + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel + )::Nothing -Add constraints to `trans_model` that make infinite variables constant over collocation +Add constraints to `backend` that make infinite variables constant over collocation points following the calls made to [`InfiniteOpt.constant_over_collocation`](@ref). Note that [`set_parameter_supports`](@ref) and [`transcribe_infinite_variables!`](@ref) must be called first. """ function transcribe_variable_collocation_restictions!( - trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel ) - data = transcription_data(trans_model) + data = transcription_data(backend) set = MOI.EqualTo(0.0) - for (pidx, vidxs) in inf_model.piecewise_vars - pref = InfiniteOpt._make_variable_ref(inf_model, pidx) + for (pidx, vidxs) in model.piecewise_vars + pref = InfiniteOpt._make_variable_ref(model, pidx) if !InfiniteOpt.has_generative_supports(pref) continue end @@ -873,21 +894,21 @@ function transcribe_variable_collocation_restictions!( supps = reverse!(data.supports[obj_num][1:end-1]) labels = reverse!(data.support_labels[obj_num][1:end-1]) @assert any(l -> l <: InfiniteOpt.PublicLabel, first(labels)) - v_manip = GeneralVariableRef(inf_model, -1, IndependentParameterIndex) # placeholder + v_manip = GeneralVariableRef(model, -1, IndependentParameterIndex) # placeholder for vidx in vidxs - vref = InfiniteOpt._make_variable_ref(inf_model, vidx) + vref = InfiniteOpt._make_variable_ref(model, vidx) obj_nums = filter(!isequal(obj_num), InfiniteOpt._object_numbers(vref)) - supp_indices = support_index_iterator(trans_model, obj_nums) + supp_indices = support_index_iterator(backend, obj_nums) for (s, ls) in zip(supps, labels) if any(l -> l <: InfiniteOpt.PublicLabel, ls) - v_manip = InfiniteOpt.make_reduced_expr(vref, pref, s, trans_model) + v_manip = InfiniteOpt.make_reduced_expr(vref, pref, s, backend) else - inf_expr = v_manip - InfiniteOpt.make_reduced_expr(vref, pref, s, trans_model) + inf_expr = v_manip - InfiniteOpt.make_reduced_expr(vref, pref, s, backend) for i in supp_indices - raw_supp = index_to_support(trans_model, i) - new_expr = transcription_expression(trans_model, inf_expr, raw_supp) + raw_supp = index_to_support(backend, i) + new_expr = transcription_expression(inf_expr, backend, raw_supp) trans_constr = JuMP.build_constraint(error, new_expr, set) - JuMP.add_constraint(trans_model, trans_constr) + JuMP.add_constraint(backend.model, trans_constr) end end end @@ -900,17 +921,19 @@ end # INFINITEMODEL TRANSCRIPTION METHODS ################################################################################ """ - build_transcription_model!(trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel; - [check_support_dims::Bool = true])::Nothing - -Given an empty `trans_model` build it using the information stored in `inf_model`. -This is intended for a `TranscriptionModel` that serves as a internal optimizer model -of `inf_model`. This detail is important to correctly enable internally generated -semi-infinite variables during the transcription process such that `inf_model` is not -modified. Note that this will add supports to `inf_model` via + build_transcription_backend!( + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel; + [check_support_dims::Bool = true] + )::Nothing + +Given an empty `backend` build it using the information stored in `model`. +This is intended for a `TranscriptionModel` that serves as a internal transformation backend +of `model`. This detail is important to correctly enable internally generated +semi-infinite variables during the transcription process such that `model` is not +modified. Note that this will add supports to `model` via [`InfiniteOpt.fill_in_supports!`](@ref) for infinite parameters that contain -no supports. Also a warning is thrown when the transcription model contains +no supports. Also a warning is thrown when the transcription backend contains more than 15,000 support points to alert users when they may naively have a few independent supports whose product quickly yields a very large grid. For example having 3 independent parameters with 100 supports each would result @@ -918,15 +941,16 @@ in 1,000,000 supports if all three are together in at least 1 constraint. This behavior can be overcome using dependent parameters. The warning can be turned off via `check_support_dims = false`. """ -function build_transcription_model!( - trans_model::JuMP.Model, - inf_model::InfiniteOpt.InfiniteModel; check_support_dims::Bool = true +function build_transcription_backend!( + backend::TranscriptionBackend, + model::InfiniteOpt.InfiniteModel; + check_support_dims::Bool = true ) # ensure there are supports to add and add them to the trans model - InfiniteOpt.fill_in_supports!(inf_model, modify = false) - set_parameter_supports(trans_model, inf_model) + InfiniteOpt.fill_in_supports!(model, modify = false) + set_parameter_supports(backend, model) # check that there isn't a crazy amount of supports from taking the product - supps = parameter_supports(trans_model) + supps = parameter_supports(backend) num_supps = isempty(supps) ? 0 : prod(length, supps) if check_support_dims && length(supps) > 1 && num_supps > 15000 # NOTE this is an arbitrary cutoff @warn("Due to necessarily considering the combinatorics of independent " * @@ -936,22 +960,59 @@ function build_transcription_model!( "This warning can be turned off via `check_support_dims = false`.") end # add nonlinear operators as needed - InfiniteOpt.add_operators_to_jump(trans_model, inf_model) + InfiniteOpt.add_operators_to_jump(backend.model, model) # define the variables - transcribe_finite_variables!(trans_model, inf_model) - transcribe_infinite_variables!(trans_model, inf_model) - transcribe_derivative_variables!(trans_model, inf_model) - transcribe_semi_infinite_variables!(trans_model, inf_model) - transcribe_point_variables!(trans_model, inf_model) - transcribe_measures!(trans_model, inf_model) + transcribe_finite_variables!(backend, model) + transcribe_infinite_variables!(backend, model) + transcribe_derivative_variables!(backend, model) + transcribe_semi_infinite_variables!(backend, model) + transcribe_point_variables!(backend, model) + transcribe_measures!(backend, model) # define the objective - transcribe_objective!(trans_model, inf_model) + transcribe_objective!(backend, model) # define the constraints - transcribe_constraints!(trans_model, inf_model) + transcribe_constraints!(backend, model) # define the derivative evaluation constraints - transcribe_derivative_evaluations!(trans_model, inf_model) + transcribe_derivative_evaluations!(backend, model) # define constraints for variables that are constant over collocation points - transcribe_variable_collocation_restictions!(trans_model, inf_model) + transcribe_variable_collocation_restictions!(backend, model) + return +end + +""" + InfiniteOpt.build_transformation_backend!( + model::InfiniteOpt.InfiniteModel, + backend::TranscriptionBackend; + check_support_dims::Bool = true + )::Nothing + +Build `backend` and set it as the transformation backend to `model`. +Ths clears out the existing `backend` and rebuilds it. Optionally, +the dimension check to through a warning if there is potentially +a very large number of supports can be turned off via +`check_support_dims = false`. +""" +function InfiniteOpt.build_transformation_backend!( + model::InfiniteOpt.InfiniteModel, + backend::TranscriptionBackend; + check_support_dims::Bool = true, + extra_kwargs... + ) + # throw error for extra keywords + for (kw, _) in extra_kwargs + error("Unrecognized keyword argument `$kw` for building transcription backends.") + end + # clear the the backend model contents + empty!(backend) + backend.model.operator_counter = 0 + # build the transcription backend based on model + build_transcription_backend!( + backend, + model, + check_support_dims = check_support_dims + ) + # update the transformation backend status + InfiniteOpt.set_transformation_backend_ready(model, true) return end diff --git a/src/array_parameters.jl b/src/array_parameters.jl index 0a20a3d9..866012ae 100644 --- a/src/array_parameters.jl +++ b/src/array_parameters.jl @@ -626,7 +626,7 @@ function set_derivative_method(pref::DependentParameterRef, _adaptive_method_update(pref, _core_variable_object(pref), method) _reset_derivative_constraints(pref) if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) + set_transformation_backend_ready(JuMP.owner_model(pref), false) end return end @@ -743,7 +743,7 @@ function _update_parameter_domain(pref::DependentParameterRef, end _set_has_internal_supports(pref, false) if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) + set_transformation_backend_ready(JuMP.owner_model(pref), false) end return end @@ -1156,7 +1156,7 @@ function _update_parameter_supports(prefs::AbstractArray{<:DependentParameterRef _reset_derivative_constraints(pref) end if any(is_used(pref) for pref in prefs) - set_optimizer_model_ready(JuMP.owner_model(first(prefs)), false) + set_transformation_backend_ready(JuMP.owner_model(first(prefs)), false) end return end @@ -1332,7 +1332,7 @@ function add_supports( _reset_derivative_constraints(pref) end if any(is_used(pref) for pref in prefs) - set_optimizer_model_ready(JuMP.owner_model(first(prefs)), false) + set_transformation_backend_ready(JuMP.owner_model(first(prefs)), false) end end return @@ -1384,7 +1384,7 @@ function delete_supports( end end if any(is_used(pref) for pref in prefs) - set_optimizer_model_ready(JuMP.owner_model(first(prefs)), false) + set_transformation_backend_ready(JuMP.owner_model(first(prefs)), false) end return end @@ -1750,7 +1750,7 @@ function JuMP.delete( end # update optimizer model status if any(is_used(pref) for pref in prefs) - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) end # delete dependence of measures and constraints on prefs for pref in gvrefs diff --git a/src/backends.jl b/src/backends.jl new file mode 100644 index 00000000..da318599 --- /dev/null +++ b/src/backends.jl @@ -0,0 +1,899 @@ +################################################################################ +# CORE BACKEND API +################################################################################ +""" + transformation_backend_ready(model::InfiniteModel)::Bool + +Return `Bool` if the transformation backend model is up-to-date with `model` and +ready to be optimized. + +**Example** +```julia-repl +julia> transformation_backend_ready(model) +false +``` +""" +transformation_backend_ready(model::InfiniteModel) = model.ready_to_optimize + +""" + set_transformation_backend_ready(model::InfiniteModel, status::Bool)::Nothing + +Set the status of the transformation backend model to whether it is up-to-date or +not. Note is more intended as an internal function, but is useful for extensions. + +**Example** +```julia-repl +julia> set_transformation_backend_ready(model, true) + +julia> transformation_backend_ready(model) +true +``` +""" +function set_transformation_backend_ready(model::InfiniteModel, status::Bool) + model.ready_to_optimize = status + return +end + +""" + transformation_model(backend::AbstractTransformationBackend) + +Return the underlying model used by the `backend`. This serves as an +extension point for new backend types. No extension is needed for +[`JuMPBackend`](@ref)s. +""" +function transformation_model(backend::AbstractTransformationBackend) + error("`transformation_model` not implemented for transformation backends " * + "of type `$(typeof(backend))`.") +end + +""" + transformation_model(model::InfiniteModel) + +Return the underlying model used by the transformation backend. +``` +""" +transformation_model(model::InfiniteModel) = transformation_model(model.backend) + +""" + transformation_data(backend::AbstractTransformationBackend) + +Return the underlying data (typically mapping data) used by the `backend`. +This serves as an extension point for new backend types. No extension +is needed for [`JuMPBackend`](@ref)s. +""" +function transformation_data(backend::AbstractTransformationBackend) + error("`transformation_data` not implemented for transformation backends " * + "of type `$(typeof(backend))`.") +end + +""" + transformation_data(model::InfiniteModel) + +Return the underlying data (typically mapping data) used by the +transformation backend. +""" +function transformation_data(model::InfiniteModel) + return transformation_data(model.backend) +end + +""" + set_transformation_backend( + model::InfiniteModel, + backend::AbstractTransformationBackend + )::Nothing + +Specify a new transformation backend `backend` for the `model`. Note +that all data/settings/results associated with the previous backend +will be removed. +""" +function set_transformation_backend( + model::InfiniteModel, + backend::AbstractTransformationBackend + ) + model.backend = backend + set_transformation_backend_ready(model, false) + return +end + +""" + JuMP.get_attribute(backend::AbstractTransformationBackend, attr) + +Retrieve some attribute `attr` from the `backend`. This is a general +purpose method typically used to query optimizer related information. +This serves as an extension point for new backend types. No extension +is needed for [`JuMPBackend`](@ref)s. +""" +function JuMP.get_attribute( + backend::AbstractTransformationBackend, + attr + ) + error("`JuMP.get_attribute` not implemented for transformation backends " * + "of type `$(typeof(backend))`.") +end + +""" + JuMP.get_attribute(model::InfiniteModel, attr) + +Retrieve an attribute `attr` from the transformation backend of +`model`. Typically, this corresponds to `MOI.AbstractOptimizerAttribute`s. +""" +function JuMP.get_attribute(model::InfiniteModel, attr) + return JuMP.get_attribute(model.backend, attr) +end + +""" + JuMP.set_attribute(backend::AbstractTransformationBackend, attr, value)::Nothing + +Specify some attribute `attr` to the `backend`. This is a general +purpose method typically used to set optimizer related information. +This serves as an extension point for new backend types. No extension +is needed for [`JuMPBackend`](@ref)s. +""" +function JuMP.set_attribute( + backend::AbstractTransformationBackend, + attr, + value + ) + error("`JuMP.set_attribute` not implemented for transformation backends " * + "of type `$(typeof(backend))`.") +end + +""" + JuMP.set_attribute(model::InfiniteModel, attr, value)::Nothing + +Specify an attribute `attr` to the transformation backend of +`model`. Typically, this corresponds to `MOI.AbstractOptimizerAttribute`s. +""" +function JuMP.set_attribute(model::InfiniteModel, attr, value) + return JuMP.set_attribute(model.backend, attr, value) +end + +""" + Base.empty!(backend::AbstractTransformationBackend) + +Empty `backend` of all its contents. For new backend types, this needs to +be defined such that `empty!(model::InfiniteModel)` works. For +[`JuMPBackend`](@ref)s this defaults to +```julia +empty!(transformation_model(backend)) +empty!(transformation_data(backend)) +``` +""" +function Base.empty!(backend::AbstractTransformationBackend) + error("`empty!` not implemented for transformation backends of type " * + "`$(typeof(backend))`.") +end + +""" + build_transformation_backend!( + model::InfiniteModel, + backend::AbstractTransformationBackend; + [kwargs...] + )::Nothing + +Given `model`, transform it into the representation used by `backend`. +Once completed, `backend` should be ready to be solved. This serves as +an extension point for new types of backends. If needed, keyword arguments +can be added. +""" +function build_transformation_backend!( + model::InfiniteModel, + backend::AbstractTransformationBackend; + kwargs... + ) + error("`build_transformation_backend!` not implemented for transformation backends " * + "of type `$(typeof(backend))`.") +end + +""" + build_transformation_backend!(model::InfiniteModel; [kwargs...])::Nothing + +Build the model used by the underlying transformation backend stored in `model` such +that it is ready to solve. Specifically, translate the InfiniteOpt formulation +stored in `model` into (typically an appoximate) formulation that is compatible +with the backend. This is called automatically by `optimize!`; however, it this +method can be used to build the transformation model without solving it. + +**Example** +```julia-repl +julia> build_transformation_backend!(model) + +julia> transformation_model_ready(model) +true +``` +""" +function build_transformation_backend!(model::InfiniteModel; kwargs...) + if num_parameters(model, InfiniteParameter) == 0 + @warn("Finite models (i.e., `InfiniteModel`s with no infinite " * + "parameters) should be modeled directly via a `Model` in JuMP.jl.") + end + build_transformation_backend!(model, model.backend; kwargs...) + set_transformation_backend_ready(model, true) + return +end + +""" + JuMP.set_optimize_hook( + model::InfiniteModel, + hook::Union{Function, Nothing} + )::Nothing + +Set the function `hook` as the optimize hook for `model` where `hook` should +have be of the form `hook(model::InfiniteModel; hook_specfic_kwargs..., kwargs...)`. +The `kwargs` are those passed to [`optimize!`](@ref). The `hook_specifc_kwargs` +are passed as additional keywords by the user when they call [`optimize!`](@ref). + +## Notes + +* The optimize hook should generally modify the model, or some external state +in some way, and then call `optimize!(model; ignore_optimize_hook = true)` to +optimize the problem, bypassing the hook. +* Use `set_optimize_hook(model, nothing)` to unset an optimize hook. +""" +function JuMP.set_optimize_hook( + model::InfiniteModel, + hook::Union{Function, Nothing} + ) + model.optimize_hook = hook + set_transformation_backend_ready(model, false) + return +end + +""" + JuMP.optimize!(backend::AbstractTransformationBackend)::Nothing + +Invoke the relevant routines to solve the underlying model used by +`backend`. Note that [`build_transformation_backend!`](@ref) will be +called before this method is. This needs to be extended for new +backend types, but no extension is needed for [`JuMPBackend`](@ref)s. +""" +function JuMP.optimize!(backend::AbstractTransformationBackend) + error("`JuMP.optimize!` not implemented for transformation backends " * + "of type `$(typeof(backend))`.") +end + + +""" + JuMP.optimize!(model::InfiniteModel; [kwargs...])::Nothing + +Extend `JuMP.optimize!` to optimize infinite models using the internal +optimizer model. Calls [`build_transformation_backend!`](@ref) if the optimizer +model isn't up-to-date. The `kwargs` correspond to keyword arguments passed to +[`build_transformation_backend!`](@ref) if any are defined. The `kwargs` can also +include arguments that are passed to an optimize hook if one was set with +[`JuMP.set_optimize_hook`](@ref). + +**Example** +```julia-repl +julia> optimize!(model) + +julia> has_values(model) +true +``` +""" +function JuMP.optimize!( + model::InfiniteModel; + ignore_optimize_hook = isnothing(model.optimize_hook), + kwargs... + ) + if !ignore_optimize_hook + return model.optimize_hook(model; kwargs...) + end + if !transformation_backend_ready(model) + build_transformation_backend!(model; kwargs...) + end + JuMP.optimize!(model.backend) + return +end + +################################################################################ +# JUMP-BASED OPTIMIZER API +################################################################################ +# Single argument methods +for func in (:set_silent, :unset_silent, :bridge_constraints, + :unset_time_limit_sec, :time_limit_sec, :solver_name, :backend, + :mode, :unsafe_backend, :compute_conflict!, :copy_conflict, + :set_string_names_on_creation) + @eval begin + @doc """ + JuMP.$($func)(backend::AbstractTransformationBackend) + + Implment `JuMP.$($func)` for transformation backends. If applicable, this + should be extended for new backend types. No extension is needed for + [`JuMPBackend`](@ref)s. + """ + function JuMP.$func(backend::AbstractTransformationBackend) + error("`JuMP.$($func)` not defined for backends of type " * + "`$(typeof(backend))`.") + end + + # Define for JuMPBackend + function JuMP.$func(backend::JuMPBackend) + return JuMP.$func(backend.model) + end + + @doc """ + JuMP.$($func)(model::InfiniteModel) + + Extend [`JuMP.$($func)`](https://jump.dev/JuMP.jl/v1/api/JuMP/#$($func)) + to accept `InfiniteModel`s. This relies on the underlying transformation + backend supporting `JuMP.$($func)`. + """ + function JuMP.$func(model::InfiniteModel) + return JuMP.$func(model.backend) + end + end +end + +# Two argument setters +for func in (:set_time_limit_sec, :set_string_names_on_creation, :add_bridge) + @eval begin + @doc """ + JuMP.$($func)(backend::AbstractTransformationBackend, value) + + Implment `JuMP.$($func)` for transformation backends. If applicable, this + should be extended for new backend types. No extension is needed for + [`JuMPBackend`](@ref)s. + """ + function JuMP.$func(backend::AbstractTransformationBackend, value) + error("`JuMP.$($func)` not defined for backends of type " * + "`$(typeof(backend))`.") + end + + # Define for JuMPBackend + function JuMP.$func(backend::JuMPBackend, value) + return JuMP.$func(backend.model, value) + end + + @doc """ + JuMP.$($func)(model::InfiniteModel, value) + + Extend [`JuMP.$($func)`](https://jump.dev/JuMP.jl/v1/api/JuMP/#$($func)) + to accept `InfiniteModel`s. This relies on the underlying transformation + backend supporting `JuMP.$($func)`. + """ + function JuMP.$func(model::InfiniteModel, value) + return JuMP.$func(model.backend, value) + end + end +end + +""" + JuMP.print_active_bridges( + io::IO, + backend::AbstractTransformationBackend, + args... + ) + +Implment `JuMP.print_active_bridges` for transformation backends. If applicable, this +should be extended for new backend types. No extension is needed for +[`JuMPBackend`](@ref)s. Here, `args` can be one of the following: +- empty (print all the bridges) +- the objective type (print the objective bridges) +- a function type and set type from a constraint +- a constraint set type +""" +function JuMP.print_active_bridges( + io::IO, + backend::AbstractTransformationBackend, + args... + ) + error("`JuMP.print_active_bridges` not defined for backends of type " * + "`$(typeof(backend))`.") +end + +# Define for JuMPBackend +function JuMP.print_active_bridges(io::IO, backend::JuMPBackend, args...) + return JuMP.print_active_bridges(io, backend.model, args...) +end + +""" + JuMP.print_active_bridges([io::IO = stdout,] model::InfiniteModel) + + JuMP.print_active_bridges([io::IO = stdout,] model::InfiniteModel, ::Type{<:JuMP.AbstractJuMPScalar}) + + JuMP.print_active_bridges([io::IO = stdout,] model::InfiniteModel, ::Type{<:JuMP.AbstractJuMPScalar}, ::Type{<:MOI.AbstractSet}) + + JuMP.print_active_bridges([io::IO = stdout,] model::InfiniteModel, ::Type{<:MOI.AbstractSet}) + +Extend [`JuMP.print_active_bridges`](https://jump.dev/JuMP.jl/v1/api/JuMP/#print_active_bridges) +to accept `InfiniteModel`s. This relies on the underlying transformation +backend supporting `JuMP.print_active_bridges`. +""" +function JuMP.print_active_bridges(io::IO, model::InfiniteModel, args...) + return JuMP.print_active_bridges(io, model.backend, args...) +end +function JuMP.print_active_bridges(model::InfiniteModel, args...) + return JuMP.print_active_bridges(Base.stdout, model.backend, args...) +end + +""" + JuMP.print_bridge_graph( + io::IO, + backend::AbstractTransformationBackend + ) + +Implment `JuMP.print_bridge_graph` for transformation backends. If applicable, this +should be extended for new backend types. No extension is needed for +[`JuMPBackend`](@ref)s. +""" +function JuMP.print_bridge_graph( + io::IO, + backend::AbstractTransformationBackend, + ) + error("`JuMP.print_bridge_graph` not defined for backends of type " * + "`$(typeof(backend))`.") +end + +# Define for JuMPBackend +function JuMP.print_bridge_graph(io::IO, backend::JuMPBackend) + return JuMP.print_bridge_graph(io, backend.model) +end + +""" + JuMP.print_bridge_graph([io::IO = stdout,] model::InfiniteModel) + +Extend [`JuMP.print_bridge_graph`](https://jump.dev/JuMP.jl/v1/api/JuMP/#print_bridge_graph) +to accept `InfiniteModel`s. This relies on the underlying transformation +backend supporting `JuMP.print_bridge_graph`. +""" +function JuMP.print_bridge_graph(io::IO, model::InfiniteModel) + return JuMP.print_bridge_graph(io, model.backend) +end +function JuMP.print_bridge_graph(model::InfiniteModel) + return JuMP.print_bridge_graph(Base.stdout, model.backend) +end + +""" + JuMP.set_optimizer( + backend::AbstractTransformationBackend, + optimizer_constructor; + [kwargs...] + )::Nothing + +Specify the optimizer `optimizer_constructor` that should be used by `backend`. +This is intended as an extension point for new transformation backend types. +Keyword arguments can be added as needed. No extension is necessary for +[`JuMPBackend`](@ref)s. +""" +function JuMP.set_optimizer( + backend::AbstractTransformationBackend, + optimizer_constructor; + kwargs... + ) + error("`JuMP.set_optimizer` not defined for transformation backends " * + "of type `$(typeof(backend))` with optimizer input " * + "`$(optimizer_constructor)`.") +end + +# JuMPBackend +function JuMP.set_optimizer( + backend::JuMPBackend, + optimizer_constructor; + add_bridges::Bool = true + ) + JuMP.set_optimizer(backend.model, optimizer_constructor, + add_bridges = add_bridges) + return +end + +""" + JuMP.set_optimizer( + model::InfiniteModel, + [optimizer_constructor; + add_bridges::Bool = true, + kwargs...] + ) + +Extend `JuMP.set_optimizer` to set optimizer used by the underlying +transformation backend associated with `model`. If a backend uses +`JuMP` then `add_bridges` can be used as a keyword argument. + +**Example** +```julia-repl +julia> set_optimizer(model, HiGHS.Optimizer) + +julia> transformation_model(model) +A JuMP Model +Feasibility problem with: +Variables: 0 +Model mode: AUTOMATIC +CachingOptimizer state: EMPTY_OPTIMIZER +Solver name: HiGHS +``` +""" +function JuMP.set_optimizer( + model::InfiniteModel, + optimizer_constructor; + kwargs... + ) + return JuMP.set_optimizer(model.backend, optimizer_constructor; kwargs...) +end + +# JuMPBackend dispatches +transformation_model(backend::JuMPBackend) = backend.model +transformation_data(backend::JuMPBackend) = backend.data +function JuMP.get_attribute(backend::JuMPBackend, attr) + return JuMP.get_attribute(backend.model, attr) +end +function JuMP.set_attribute(backend::JuMPBackend, attr, val) + return JuMP.set_attribute(backend.model, attr, val) +end +function Base.empty!(backend::JuMPBackend) + empty!(transformation_model(backend)) + empty!(transformation_data(backend)) + return backend +end +function JuMP.optimize!(backend::JuMPBackend) + return JuMP.optimize!(backend.model) +end + +################################################################################ +# VARIABLE MAPPING API +################################################################################ +""" + transformation_model_variable( + vref::GeneralVariableRef, + backend::AbstractTransformationBackend; + [kwargs...] + ) + +Return the variable(s) that map to `vref` used by `backend`. This serves as an +extension point for new backend types. If needed, keywords arguments can be +added. +""" +function transformation_model_variable( + vref::GeneralVariableRef, + backend::AbstractTransformationBackend; + kwargs... + ) + error("`transformation_model_variable` not defined for backends of type " * + "`$(typeof(backend))`.") +end + +""" + transformation_model_variable(vref::GeneralVariableRef; [kwargs...]) + +Returns the variable(s) used by the transformation backend to represent `vref`. +Certain backends may also allow the use of keyward arguments. + +The default backend `TranscriptionOpt` uses the keyword arguments: +- `label::Type{<:AbstractSupportLabel} = PublicLabel` +- `ndarray::Bool = false` +By default only variables corresponding to public supports are returned, the +full set can be accessed via `label = All`. Moreover, all the transcripted variables +of infinite variables are returned as a list. However, a n-dimensional array +can be obtained via `ndarray = true` which is handy when the variable has multiple +infinite parameter dependencies. + +**Example** +```julia-repl +julia> transformation_model_variable(x) # infinite variable +2-element Array{VariableRef,1}: + x(support: 1) + x(support: 2) + +julia> transformation_model_variable(z) # finite variable +z +``` +""" +function transformation_model_variable(vref::GeneralVariableRef; kwargs...) + model = JuMP.owner_model(vref) + return transformation_model_variable(vref, model.backend; kwargs...) +end + +""" + variable_supports( + vref::DecisionVariableRef, + backend::AbstractTransformationBackend; + [kwargs...] + ) + +Return the supports associated with the mappings of `vref` in `backend`. +This dispatches off of `backend` which permits optimizer model extensions. This +should throw an error if `vref` is not associated with the variable mappings +stored in `backend`. Keyword arguments can be added as needed. Note that +no extension is necessary for point or finite variables. +""" +function variable_supports(vref, backend::AbstractTransformationBackend; kwargs...) + error("`variable_supports` not implemented for transformation backend of type " * + "`$(typeof(backend))` and/or variable type $(typeof(vref)).") +end + +# FiniteRef +function variable_supports( + vref::FiniteRef, + backend::AbstractTransformationBackend; + kwargs... + ) + return () +end + +""" + supports( + vref::DecisionVariableRef; + [label::Type{<:AbstractSupportLabel} = PublicLabel, + ndarray::Bool = false, + kwargs...] + )::Vector{<:Tuple} + +Return the supports associated with `vref` in the transformation +model. Errors if [`InfiniteOpt.variable_supports`](@ref) has not been extended for the +transformation backend type or if `vref` is not reformulated in the transformation model. + +The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +and `kwargs` denote extra ones that user extensions may employ in accordance with +their implementation of `variable_supports`. Errors if such an +extension has not been written. + +By default only the public supports are returned, the +full set can be accessed via `label = All`. Moreover, the supports of infinite +variables are returned as a list. However, a n-dimensional array +can be obtained via `ndarray = true` which is handy when the variable has multiple +infinite parameter dependencies. + +**Example** +```julia-repl +julia> supports(vref) +2-element Array{Tuple{Float64},1}: + (0.0,) + (1.0,) +``` +""" +function supports( + vref::Union{DecisionVariableRef, MeasureRef, ParameterFunctionRef}; + kwargs... + ) + backend = JuMP.owner_model(vref).backend + return variable_supports(vref, backend; kwargs...) +end + +################################################################################ +# EXPRESSION MAPPING API +################################################################################ +""" + transformation_model_expression(expr, backend::AbstractTransformationBackend; [kwargs...]) + +Return the reformulation expression(s) stored in the transformation model that correspond +to `expr`. This needs to be defined for extensions that implement a new +[`AbstractTransformationBackend`](@ref). Keyword arguments can be added as needed. +Note that if `expr` is a `GeneralVariableRef` this just dispatches to +`transformation_model_variable`. +""" +function transformation_model_expression( + expr, + backend::AbstractTransformationBackend; + kwargs... + ) + error("`transformation_model_expression` not defined for transformation backends " * + "of type `$(typeof(backend))` and expression type `$(typeof(expr))`.") +end + +# Define for variable reference expressions +function transformation_model_expression( + expr::GeneralVariableRef, + backend::AbstractTransformationBackend; + kwargs... + ) + return transformation_model_variable(expr, backend; kwargs...) +end + +""" + transformation_model_expression( + expr::JuMP.AbstractJuMPScalar; + [label::Type{<:AbstractSupportLabel} = PublicLabel, + ndarray::Bool = false, + kwargs...] + ) + +Return the reformulation expression(s) stored in the transformation model that correspond +to `expr`. Also errors if no such expression can be found in +the transformation model (meaning one or more of the underlying variables have not +been transformed). + +The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +and `kwargs` denote extra ones that user extensions may employ in accordance with +their implementation of [`transformation_model_expression`](@ref). Errors if such an +extension has not been written. + +By default only the expressions associated with public supports are returned, the +full set can be accessed via `label = All`. Moreover, infinite expressions are +returned as a list corresponding to their supports. However, a n-dimensional array +can be obtained via `ndarray = true` which is handy when the expression has multiple +infinite parameter dependencies. The corresponding supports are obtained via +`supports` using the same keyword arguments. + +**Example** +```julia-repl +julia> transformation_model_expression(my_expr) # finite expression +x(support: 1) - y +``` +""" +function transformation_model_expression(expr::JuMP.AbstractJuMPScalar; kwargs...) + model = JuMP.owner_model(expr) + if isnothing(model) + return zero(JuMP.AffExpr) + JuMP.constant(expr) + else + return transformation_model_expression(expr, model.backend; kwargs...) + end +end + +""" + expression_supports( + expr, + backend::AbstractTransformationBackend; + [kwargs...] + ) + +Return the supports associated with the mappings of `expr` in `backend`. +This should throw an error if `expr` is not associated with the variable mappings +stored in `backend`. Keyword arguments can be added as needed. Note that +if `expr` is a `GeneralVariableRef` this just dispatches to `variable_supports`. +""" +function expression_supports(expr, backend::AbstractTransformationBackend; kwargs...) + error("`expression_supports` not implemented for transformation model of type " * + "`$(typeof(backend))` and/or expressions of type `$(typeof(expr))`.") +end + +# Variable reference expressions +function expression_supports( + vref::GeneralVariableRef, + backend::AbstractTransformationBackend; + kwargs... + ) + return variable_supports(dispatch_variable_ref(vref), backend; kwargs...) +end + +""" + supports( + expr::JuMP.AbstractJuMPScalar; + [label::Type{<:AbstractSupportLabel} = PublicLabel, + ndarray::Bool = false, + kwargs...] + ) + +Return the support associated with `expr`. Errors if `expr` is +not associated with the constraint mappings stored in the transformation model. + +The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +and `kwargs` denote extra ones that user extensions may employ in accordance with +their implementation of `expression_supports`. Errors if such an +extension has not been written. + +By default only the public supports are returned, the +full set can be accessed via `label = All`. Moreover, the supports of infinite +expressions are returned as a list. However, a n-dimensional array +can be obtained via `ndarray = true` which is handy when the expression has multiple +infinite parameter dependencies. + +**Example** +```julia-repl +julia> supports(cref) +2-element Array{Tuple{Float64},1}: + (0.0,) + (1.0,) +``` +""" +function supports(expr::JuMP.AbstractJuMPScalar; kwargs...) + model = JuMP.owner_model(expr) + if isnothing(model) + return () + else + return expression_supports(expr, model.backend; kwargs...) + end +end + +################################################################################ +# CONSTRAINT MAPPING API +################################################################################ +""" + transformation_model_constraint( + cref::InfOptConstraintRef, + backend::AbstractTransformationBackend; + [kwargs...] + ) + +Return the reformulation constraint(s) stored in the transformation model +that correspond to `cref`. This needs to be defined for extensions that +implement a custom transformation model type. Keyword arguments can be +added as needed. +""" +function transformation_model_constraint( + cref::InfOptConstraintRef, + backend::AbstractTransformationBackend; + kwargs... + ) + error("`transformation_model_constraint` not implemented for " * + "transformation model backends of type `$(typeof(backend))`.") +end + +""" + transformation_model_constraint( + cref::InfOptConstraintRef; + [label::Type{<:AbstractSupportLabel} = PublicLabel, + ndarray::Bool = false, + kwargs...] + ) + +Return the reformulation constraint(s) stored in the transformation model that +correspond to `cref`. Errors if no such constraint can be found in +the transformation model. + +The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +and `kwargs` denote extra ones that user extensions may employ in accordance with +their implementation of [`transformation_model_constraint`](@ref). Errors if such an +extension has not been written. + +By default only the constraints associated with public supports are returned, the +full set can be accessed via `label = All`. Moreover, infinite constraints are +returned as a list corresponding to their supports. However, a n-dimensional array +can be obtained via `ndarray = true` which is handy when the constraint has multiple +infinite parameter dependencies. The corresponding supports are obtained via +`supports` using the same keyword arguments. + +**Example** +```julia-repl +julia> transformation_model_constraint(c1) # finite constraint +c1 : x(support: 1) - y <= 3.0 +``` +""" +function transformation_model_constraint( + cref::InfOptConstraintRef; + kwargs... + ) + backend = JuMP.owner_model(cref).backend + return transformation_model_constraint(cref, backend; kwargs...) +end + +""" + constraint_supports( + cref::InfOptConstraintRef + backend::AbstractTransformationBackend; + [kwargs...] + ) + +Return the supports associated with the mappings of `cref` in `backend`. +This should throw an error if `cref` is not associated with the variable mappings +stored in `backend`. Keyword arguments can be added as needed. +""" +function constraint_supports( + cref::InfOptConstraintRef, + backend::AbstractTransformationBackend; + kwargs... + ) + error("`constraint_supports` not implemented for transformation model backends " * + "of type `$(typeof(backend))`.") +end + +""" + supports(cref::InfOptConstraintRef; + [label::Type{<:AbstractSupportLabel} = PublicLabel, + ndarray::Bool = false, + kwargs...]) + +Return the support associated with `cref`. Errors if `cref` is +not associated with the constraint mappings stored in the transformation model. + +The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +and `kwargs` denote extra ones that user extensions may employ in accordance with +their implementation of `constraint_supports`. Errors if such an +extension has not been written. + +By default only the public supports are returned, the +full set can be accessed via `label = All`. Moreover, the supports of infinite +constraints are returned as a list. However, a n-dimensional array +can be obtained via `ndarray = true` which is handy when the constraint has multiple +infinite parameter dependencies. + +**Example** +```julia-repl +julia> supports(cref) +2-element Array{Tuple{Float64},1}: + (0.0,) + (1.0,) +``` +""" +function supports(cref::InfOptConstraintRef; kwargs...) + model = JuMP.owner_model(cref) + return constraint_supports(cref, model.backend; kwargs...) +end diff --git a/src/constraints.jl b/src/constraints.jl index 6475aa61..498bd267 100644 --- a/src/constraints.jl +++ b/src/constraints.jl @@ -107,7 +107,7 @@ function _set_core_constraint_object( constr::JuMP.AbstractConstraint ) _adaptive_data_update(cref, constr, _data_object(cref)) - set_optimizer_model_ready(JuMP.owner_model(cref), false) + set_transformation_backend_ready(JuMP.owner_model(cref), false) return end @@ -303,7 +303,7 @@ function JuMP.add_constraint( cref = InfOptConstraintRef(model, cindex) # update the variable mappings and model status _update_var_constr_mapping(vrefs, cref) - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) # clear out the name dictionary model.name_to_constr = nothing # return the constraint reference @@ -832,7 +832,7 @@ function set_domain_restrictions( # set the new restrictions model.constraint_restrictions[JuMP.index(cref)] = restrictions # update status - set_optimizer_model_ready(JuMP.owner_model(cref), false) + set_transformation_backend_ready(JuMP.owner_model(cref), false) return end @@ -898,7 +898,7 @@ function add_domain_restrictions( model.constraint_restrictions[JuMP.index(cref)] = new_restrictions end # update the optimizer model status - set_optimizer_model_ready(JuMP.owner_model(cref), false) + set_transformation_backend_ready(JuMP.owner_model(cref), false) return end @@ -923,7 +923,7 @@ function delete_domain_restrictions(cref::InfOptConstraintRef) # delete the restrictions if there are any delete!(JuMP.owner_model(cref).constraint_restrictions, JuMP.index(cref)) # update status - set_optimizer_model_ready(JuMP.owner_model(cref), false) + set_transformation_backend_ready(JuMP.owner_model(cref), false) return end @@ -966,6 +966,6 @@ function JuMP.delete(model::InfiniteModel, cref::InfOptConstraintRef) # delete constraint information _delete_data_object(cref) # reset optimizer model status - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) return end diff --git a/src/datatypes.jl b/src/datatypes.jl index 910d38f1..b5fce1b3 100644 --- a/src/datatypes.jl +++ b/src/datatypes.jl @@ -1274,23 +1274,47 @@ end # TRANSFORMATION BACKEND ################################################################################ """ + AbstractTransformationBackend +Abstract type for transformation backends to `InfiniteModel`s. Any user-defined +backend type should inherit this type. """ abstract type AbstractTransformationBackend end """ + AbstractJuMPTag +Abstract type to enable dispatch between differnent transformation backends that +use the extension API provided by [`JuMPBackend`](@ref). """ abstract type AbstractJuMPTag end """ + JuMPBackend{TAG <: AbstractJuMPTag, T, D} <: AbstractTransformationBackend +A transformation backend type for transformation backends that use JuMP `Model`s. +This serves as the main extension point for defining new JuMP-based backends. In +which case a new [`AbstractJuMPTag`](@ref) should be made with which the +`JuMPBackend` is created: +```julia +backend = JuMPBackend{MyTag}(model::JuMP.GenericModel, data) +``` +where `data` stores information used by the backend (typically mapping information +to the overlying `InfiniteModel`). + +The JuMP `Model` can be accessed by [`transformation_model`](@ref) and the `data` +can be retrieved via [`transformation_data`](@ref). """ -struct JuMPBackend{T <: AbstractJuMPTag, D, C} <: AbstractTransformationBackend - model::JuMP.Model - tag::T +struct JuMPBackend{TAG <: AbstractJuMPTag, T, D} <: AbstractTransformationBackend + model::JuMP.GenericModel{T} data::D - optimizer_constructor::C + # constructor + function JuMPBackend{TYPE}( + model::JuMP.GenericModel{T}, + data::D + ) where {TYPE <: AbstractJuMPTag, T, D} + return new{TYPE, T, D}(model, data) + end end ################################################################################ @@ -1355,18 +1379,23 @@ mutable struct InfiniteModel <: JuMP.AbstractModel optimize_hook::Any end -# TODO UPDATE THE DOCSTRING ONCE THE SYNTAX IS FINALIZED """ - InfiniteModel([optimizer_constructor]; - [OptimizerModel::Function = TranscriptionModel, - add_bridges::Bool = true, optimizer_model_kwargs...]) + InfiniteModel([backend::AbstractTransformationBackend = TranscriptionBackend()]) -Return a new infinite model where an optimizer is specified if an -`optimizer_constructor` is given. The optimizer -can also later be set with the [`JuMP.set_optimizer`](@ref) call. By default -the `optimizer_model` data field is initialized with a -[`TranscriptionModel`](@ref), but a different type of model can be assigned via -[`set_optimizer_model`](@ref) as can be required by extensions. +Return a new infinite model that uses `backend`. For the default case with +`TranscriptionBackend`, the `opimizer_constructor` and other arguments can be +given directly: +```julia +InfiniteModel( + optimizer_constructor; + [add_bridges::Bool = true] + ) +``` +where `optimizer_constructor` and `add_bridges` are passed on to the underying +JuMP `Model`. + +A different transformation backend can be specified later on using +[`set_transformation_backend`](@ref). **Example** ```jldoctest @@ -1375,31 +1404,31 @@ julia> using InfiniteOpt, JuMP, Ipopt; julia> model = InfiniteModel() An InfiniteOpt Model Feasibility problem with: -Finite Parameters: 0 -Infinite Parameters: 0 -Variables: 0 -Measures: 0 -Derivatives: 0 -Optimizer model backend information: -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. + Finite parameters: 0 + Infinite parameters: 0 + Variables: 0 + Derivatives: 0 + Measures: 0 +Transformation backend information: + Backend type: TranscriptionBackend + Solver name: No optimizer attached. + Transformation built and up-to-date: false julia> model = InfiniteModel(Ipopt.Optimizer) An InfiniteOpt Model Feasibility problem with: -Finite Parameters: 0 -Infinite Parameters: 0 -Variables: 0 -Measures: 0 -Derivatives: 0 -Optimizer model backend information: -Model mode: AUTOMATIC -CachingOptimizer state: EMPTY_OPTIMIZER -Solver name: Ipopt + Finite parameters: 0 + Infinite parameters: 0 + Variables: 0 + Derivatives: 0 + Measures: 0 +Transformation backend information: + Backend type: TranscriptionBackend + Solver name: Ipopt + Transformation built and up-to-date: false ``` """ -function InfiniteModel(backend::AbstractTransformationBackend = TranscriptionModel()) +function InfiniteModel(backend::AbstractTransformationBackend = TranscriptionBackend()) return InfiniteModel( # Parameters MOIUC.CleverDict{IndependentParameterIndex, ScalarParameterData{<:IndependentParameter}}(), @@ -1444,33 +1473,11 @@ function InfiniteModel(backend::AbstractTransformationBackend = TranscriptionMod ) end -## Set the optimizer_constructor depending on what it is -# MOI.OptimizerWithAttributes -# function _set_optimizer_constructor( -# model::InfiniteModel, -# constructor::MOI.OptimizerWithAttributes -# ) -# model.optimizer_constructor = constructor.optimizer_constructor -# return -# end - -# No attributes -# function _set_optimizer_constructor(model::InfiniteModel, constructor) -# model.optimizer_constructor = constructor -# return -# end - # Dispatch for InfiniteModel call with optimizer constructor -# function InfiniteModel( -# optimizer_constructor; -# OptimizerModel::Function = TranscriptionModel, -# kwargs... -# ) -# model = InfiniteModel() -# model.optimizer_model = OptimizerModel(optimizer_constructor; kwargs...) -# _set_optimizer_constructor(model, optimizer_constructor) -# return model -# end +function InfiniteModel(optimizer_constructor; kwargs...) + backend = TranscriptionBackend(optimizer_constructor; kwargs...) + return InfiniteModel(backend) +end # Define basic InfiniteModel extension functions Base.broadcastable(model::InfiniteModel) = Ref(model) @@ -1528,7 +1535,7 @@ function Base.empty!(model::InfiniteModel) empty!(model.operators) empty!(model.op_lookup) empty!(model.obj_dict) - empty!(model.optimizer_model) + empty!(model.backend) model.ready_to_optimize = false empty!(model.ext) model.optimize_hook = nothing diff --git a/src/derivative_evaluations.jl b/src/derivative_evaluations.jl index e3c3e67b..bab385c6 100644 --- a/src/derivative_evaluations.jl +++ b/src/derivative_evaluations.jl @@ -125,11 +125,20 @@ function support_label(method::GenerativeDerivativeMethod) end """ - make_reduced_expr(vref::GeneralVariableRef, pref::GeneralVariableRef, - support::Float64, write_model::Union{InfiniteModel, JuMP.Model}) + make_reduced_expr( + vref::GeneralVariableRef, + pref::GeneralVariableRef, + support::Float64, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) - make_reduced_expr(vref::GeneralVariableRef, pref::GeneralVariableRef, - supports::Vector{Float64}, idx::Int, write_model::Union{InfiniteModel, JuMP.Model}) + make_reduced_expr( + vref::GeneralVariableRef, + pref::GeneralVariableRef, + supports::Vector{Float64}, + idx::Int, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) Given the argument variable `vref` and the operator parameter `pref` from a derivative, build and return the reduced expression in accordance to the support @@ -143,7 +152,7 @@ function make_reduced_expr( vref::GeneralVariableRef, pref::GeneralVariableRef, support::Float64, - write_model::JuMP.AbstractModel + write_model::Union{InfiniteModel, AbstractTransformationBackend} ) return make_reduced_expr(vref, _index_type(vref), pref, support, write_model) end @@ -154,7 +163,7 @@ function make_reduced_expr( pref::GeneralVariableRef, supps::Vector{Float64}, idx, - write_model::JuMP.AbstractModel + write_model::Union{InfiniteModel, AbstractTransformationBackend} ) return make_reduced_expr(vref, pref, supps[idx], write_model) end @@ -233,7 +242,7 @@ end order::Int, idx, supps::Vector{Float64}, - write_model::JuMP.AbstractModel, + write_model::Union{InfiniteModel, AbstractTransformationBackend}, method::AbstractDerivativeMethod, expr_params... ) @@ -261,7 +270,7 @@ function make_indexed_derivative_expr( order::Int, idx, supps::Vector{Float64}, - write_model::JuMP.AbstractModel, + write_model::Union{InfiniteModel, AbstractTransformationBackend}, method::AbstractDerivativeMethod, constants... ) @@ -325,7 +334,7 @@ function make_indexed_derivative_expr( order::Int, idx, supps::Vector{Float64}, # ordered - write_model::JuMP.AbstractModel, + write_model::Union{InfiniteModel, AbstractTransformationBackend}, ::FiniteDifference{Forward}, supp_product ) @@ -358,7 +367,7 @@ function make_indexed_derivative_expr( order::Int, idx, supps::Vector{Float64}, # ordered - write_model::JuMP.AbstractModel, + write_model::Union{InfiniteModel, AbstractTransformationBackend}, ::FiniteDifference{Central}, supp_product ) @@ -411,7 +420,7 @@ function make_indexed_derivative_expr( order::Int, idx, supps::Vector{Float64}, # ordered - write_model::JuMP.AbstractModel, + write_model::Union{InfiniteModel, AbstractTransformationBackend}, ::FiniteDifference{Backward}, supp_product ) @@ -444,7 +453,7 @@ function make_indexed_derivative_expr( order::Int, idx, supps::Vector{Float64}, # ordered - write_model::JuMP.AbstractModel, + write_model::Union{InfiniteModel, AbstractTransformationBackend}, ::OrthogonalCollocation{MeasureToolbox.GaussLobatto}, lb_idx, coeffs... @@ -494,7 +503,7 @@ end dref::GeneralVariableRef, vref::GeneralVariableRef, method::AbstractDerivativeMethod, - write_model::JuMP.AbstractModel + write_model::Union{InfiniteModel, AbstractTransformationBackend} )::Vector{JuMP.AbstractJuMPScalar} Build expressions for derivative `dref` evaluated in accordance with @@ -504,7 +513,7 @@ will be substituted with appropriate placeholder variables such that `dref` can be reformulated as a first derivative. The expressions are of the form `lhs - rhs`, where `lhs` is a function of derivatives evaluated at some supports for certain infinite parameter, and `rhs` is a function of the -derivative argumentsevaluated at some supports for certain infinite parameter. +derivative arguments evaluated at some supports for certain infinite parameter. For example, for finite difference methods at point `t = 1`, `lhs` is `Δt * ∂/∂t[T(1)]`, and `rhs` could be `T(1+Δt) - T(1)` in case of forward difference mode. This is intended as a helper function for `evaluate`, which @@ -523,7 +532,7 @@ function evaluate_derivative( dref::GeneralVariableRef, vref::GeneralVariableRef, method::AbstractDerivativeMethod, - write_model::JuMP.AbstractModel + write_model::Union{InfiniteModel, AbstractTransformationBackend} ) # gather the arugment and parameter pref = operator_parameter(dref) diff --git a/src/derivatives.jl b/src/derivatives.jl index 84af9f47..7133e14b 100644 --- a/src/derivatives.jl +++ b/src/derivatives.jl @@ -661,7 +661,7 @@ function set_start_value_function( start::Union{Real, Function} )::Nothing info = _variable_info(dref) - set_optimizer_model_ready(JuMP.owner_model(dref), false) + set_transformation_backend_ready(JuMP.owner_model(dref), false) prefs = raw_parameter_refs(dref) temp_info = JuMP.VariableInfo(info.has_lb, info.lower_bound, info.has_ub, info.upper_bound, info.has_fix, info.fixed_value, @@ -688,7 +688,7 @@ julia> reset_start_value_function(dref) """ function reset_start_value_function(dref::DerivativeRef)::Nothing info = _variable_info(dref) - set_optimizer_model_ready(JuMP.owner_model(dref), false) + set_transformation_backend_ready(JuMP.owner_model(dref), false) new_info = JuMP.VariableInfo(info.has_lb, info.lower_bound, info.has_ub, info.upper_bound, info.has_fix, info.fixed_value, false, s -> NaN, info.binary, info.integer) diff --git a/src/expressions.jl b/src/expressions.jl index d65df854..2c4b2c85 100644 --- a/src/expressions.jl +++ b/src/expressions.jl @@ -423,7 +423,7 @@ function JuMP.delete(model::InfiniteModel, fref::ParameterFunctionRef)::Nothing @assert JuMP.is_valid(model, fref) "Parameter function is invalid." # update the optimizer model status if is_used(fref) - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) end # update parameter mapping all_prefs = parameter_list(fref) diff --git a/src/infinite_variables.jl b/src/infinite_variables.jl index f1045fa1..c8643a4a 100644 --- a/src/infinite_variables.jl +++ b/src/infinite_variables.jl @@ -704,7 +704,7 @@ function set_start_value_function( start::Union{Real, Function} ) info = _variable_info(vref) - set_optimizer_model_ready(JuMP.owner_model(vref), false) + set_transformation_backend_ready(JuMP.owner_model(vref), false) prefs = raw_parameter_refs(vref) temp_info = JuMP.VariableInfo(info.has_lb, info.lower_bound, info.has_ub, info.upper_bound, info.has_fix, info.fixed_value, @@ -731,7 +731,7 @@ julia> reset_start_value_function(vref) """ function reset_start_value_function(vref::InfiniteVariableRef) info = _variable_info(vref) - set_optimizer_model_ready(JuMP.owner_model(vref), false) + set_transformation_backend_ready(JuMP.owner_model(vref), false) start_func = (s::Vector{<:Real}) -> NaN new_info = JuMP.VariableInfo(info.has_lb, info.lower_bound, info.has_ub, info.upper_bound, info.has_fix, info.fixed_value, diff --git a/src/measure_expansions.jl b/src/measure_expansions.jl index 663b4285..874725e0 100644 --- a/src/measure_expansions.jl +++ b/src/measure_expansions.jl @@ -2,20 +2,21 @@ # MEASURE VARIABLE CREATION METHODS ################################################################################ """ - make_point_variable_ref(write_model::Union{InfiniteModel, JuMP.Model}, - ivref::GeneralVariableRef, - support::Vector{Float64} - )::GeneralVariableRef + make_point_variable_ref( + write_model::Union{InfiniteModel, AbstractTransformationBackend}, + ivref::GeneralVariableRef, + support::Vector{Float64} + )::GeneralVariableRef Make a point variable for infinite variable/derivative `ivref` at `support`, add it to the `write_model`, and return the `GeneralVariableRef`. This is an internal method for point variables produced by expanding measures via [`expand_measure`](@ref). -This is also useful for those writing extension optimizer models and wish to +This is also useful for those writing extension transformation backends and wish to expand measures without modifiying the `InfiniteModel`. In such cases, `write_model` -should be the optimizer model and -[`add_point_variable`](@ref add_point_variable(::JuMP.Model, ::Any, ::Any, ::Any)) +should be the transformation backend and +[`add_point_variable`](@ref add_point_variable(::AbstractTransformationBackend, ::Any, ::Any)) should be extended appropriately for point variables. Errors if `write_model` is -an optimizer model and `add_point_variable` is not properly extended. +an transformation backend and `add_point_variable` is not properly extended. Note this is also accomodates infinite parameter functions, in which case the infinite parameter function is called with the support as input. @@ -34,7 +35,7 @@ function make_point_variable_ref( ivref, support, ::Union{Type{InfiniteVariableIndex}, Type{DerivativeIndex}} - )::GeneralVariableRef + ) prefs = parameter_list(ivref) for i in eachindex(support) support[i] = round(support[i], sigdigits = significant_digits(prefs[i])) @@ -57,7 +58,7 @@ function make_point_variable_ref( fref, support, ::Type{ParameterFunctionIndex} - )::Float64 + ) prefs = raw_parameter_refs(fref) supp_tuple = Tuple(support, prefs) func = raw_function(fref) @@ -65,27 +66,29 @@ function make_point_variable_ref( end """ - add_point_variable(model::JuMP.Model, ivref::GeneralVariableRef, - support::Vector{Float64}, key::Val{:ext_key_name} - )::GeneralVariableRef + add_point_variable( + backend::AbstractTransformationBackend, + ivref::GeneralVariableRef, + support::Vector{Float64} + )::GeneralVariableRef Add a point variable (defined by restricting `ivref` to `support`) to the -optimizer model `model` (with `key`) and return the correct `InfiniteOpt` +tranformation backend `backend` and return the correct `InfiniteOpt` variable reference. This is an internal method used by [`make_point_variable_ref`](@ref) to make point variables when the `write_model` -is an optimizer model. This is useful for extensions that wish to expand +is an transformation backend. This is useful for extensions that wish to expand measures, but without changing the original `InfiniteModel`. An error is thrown -for unextended optimizer model types. +for unextended backend types. """ -function add_point_variable(model::JuMP.Model, ivref, supp, key) - error("`add_point_variable` not defined for an optimizer model with key ", - "`$(typeof(key).parameters[1])`.") +function add_point_variable(backend::AbstractTransformationBackend, ivref, supp) + error("`add_point_variable` not defined for transformation backends of type " * + "`$(typeof(backend))`.") end # Store/add the variable to the optimizer model via add_measure_variable # This avoids changing the InfiniteModel unexpectedly function make_point_variable_ref( - write_model::JuMP.Model, # this should be an optimizer model + write_model::AbstractTransformationBackend, ivref::GeneralVariableRef, support::Vector{Float64} ) @@ -94,45 +97,45 @@ end # Infinite variable index function make_point_variable_ref( - write_model::JuMP.Model, + write_model::AbstractTransformationBackend, ivref, support, ::Union{Type{InfiniteVariableIndex}, Type{DerivativeIndex}} - )::GeneralVariableRef + ) prefs = parameter_list(ivref) for i in eachindex(support) support[i] = round(support[i], sigdigits = significant_digits(prefs[i])) end - opt_key = optimizer_model_key(write_model) - return add_point_variable(write_model, ivref, support, Val(opt_key)) + return add_point_variable(write_model, ivref, support) end """ - make_semi_infinite_variable_ref(write_model::Union{InfiniteModel, JuMP.Model}, - ivref::GeneralVariableRef, - indices::Vector{Int}, - values::Vector{Float64} - )::GeneralVariableRef + make_semi_infinite_variable_ref( + write_model::Union{InfiniteModel, AbstractTransformationBackend}, + ivref::GeneralVariableRef, + indices::Vector{Int}, + values::Vector{Float64} + )::GeneralVariableRef Make a semi-infinite variable for infinite variable/derivative/parameter function `ivref` at `support`, add it to the `write_model`, and return the `GeneralVariableRef`. This is an internal method for semi-infinite variables produced by expanding measures via [`expand_measure`](@ref). This is also useful -for those writing extension optimizer models and wish to expand measures without +for those writing extension transformation backends and wish to expand measures without modifiying the `InfiniteModel`. In such cases, `write_model` should be the -optimizer model and -[`add_semi_infinite_variable`](@ref add_semi_infinite_variable(::JuMP.Model, ::Any, ::Any)) +transformation backend and +[`add_semi_infinite_variable`](@ref add_semi_infinite_variable(::AbstractTransformationBackend, ::Any)) should be extended appropriately for semi-infinite variables. Errors if -`write_model` is an optimizer model and `add_semi_infinite_variable` is not -properly extended. Note this is only intended for optimizer models that are -currently stored in `InfiniteModel.optimizer_model`. +`write_model` is an transformation backend and `add_semi_infinite_variable` is not +properly extended. Note this is only intended for transformation backends that are +currently stored in `InfiniteModel.backend`. """ function make_semi_infinite_variable_ref( write_model::InfiniteModel, ivref::GeneralVariableRef, indices::Vector{Int}, values::Vector{Float64} - )::GeneralVariableRef + ) eval_supps = Dict(indices[i] => values[i] for i in eachindex(indices)) existing_index = get(write_model.semi_lookup, (ivref, eval_supps), nothing) if isnothing(existing_index) @@ -144,36 +147,37 @@ function make_semi_infinite_variable_ref( end """ - add_semi_infinite_variable(model::JuMP.Model, var::SemiInfiniteVariable, - key::Val{:ext_key_name})::GeneralVariableRef + add_semi_infinite_variable( + backend::AbstractTransformationBackend, + var::SemiInfiniteVariable, + )::GeneralVariableRef -Add a semi-infinite variable `var` to the optimizer model `model` (with `key`) +Add a semi-infinite variable `var` to the transformation backend `backend` and return the correct `InfiniteOpt` variable reference. This is an internal method used by [`make_semi_infinite_variable_ref`](@ref) to make semi-infinite -variables when the `write_model` is an optimizer model. This is useful for +variables when the `write_model` is a transformation backend. This is useful for extensions that wish to expand measures, but without changing the original `InfiniteModel`. An error is thrown for optimizer model types. Note if this is extended, than [`internal_semi_infinite_variable`](@ref) should also be extended in order to direct semi-infinite variables references to the underlying [`SemiInfiniteVariable`](@ref). """ -function add_semi_infinite_variable(model::JuMP.Model, var, key) - error("`add_semi_infinite_variable` not defined for an optimizer model ", - "with key `$(typeof(key).parameters[1])`.") +function add_semi_infinite_variable(backend::AbstractTransformationBackend, var) + error("`add_semi_infinite_variable` not defined for transformation backends of type " * + "`$(typeof(backend))`.") end # Add semi-infinite infinite variables in the optimizer model without modifying the InfiniteModel function make_semi_infinite_variable_ref( - write_model::JuMP.Model, + write_model::AbstractTransformationBackend, ivref::GeneralVariableRef, indices::Vector{Int}, values::Vector{Float64} - )::GeneralVariableRef + ) eval_supps = Dict(indices[i] => values[i] for i in eachindex(indices)) var = JuMP.build_variable(error, ivref, eval_supps, check = false) - key = optimizer_model_key(write_model) - return add_semi_infinite_variable(write_model, var, Val(key)) + return add_semi_infinite_variable(write_model, var) end # Helper function for reducing singleton affine expressions @@ -201,8 +205,11 @@ end # EXPAND_MEASURE DEFINITIONS ################################################################################ """ - expand_measure(expr, data::AbstractMeasureData, - write_model::JuMP.AbstractModel)::JuMP.AbstractJuMPScalar + expand_measure( + expr, + data::AbstractMeasureData, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + )::JuMP.AbstractJuMPScalar Return the finite reformulation of a measure containing a variable/parameter expression `expr` with measure data `data`. Here `write_model` is the target @@ -217,18 +224,21 @@ measure data types. Principally, this is leveraged to enable the user methods function expand_measure end # GeneralVariableRef -function expand_measure(vref::GeneralVariableRef, - data::DiscreteMeasureData, - write_model::JuMP.AbstractModel - )::Union{JuMP.AbstractJuMPScalar, Float64} +function expand_measure( + vref::GeneralVariableRef, + data::DiscreteMeasureData, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) return expand_measure(vref, _index_type(vref), data, write_model) end # InfiniteVariableRef/DerivativeRef/ParameterFunctionRef (1D DiscreteMeasureData) -function expand_measure(ivref::GeneralVariableRef, - index_type::Union{Type{InfiniteVariableIndex}, Type{DerivativeIndex}, Type{ParameterFunctionIndex}}, - data::DiscreteMeasureData{GeneralVariableRef, 1}, - write_model::JuMP.AbstractModel) +function expand_measure( + ivref::GeneralVariableRef, + index_type::Union{Type{InfiniteVariableIndex}, Type{DerivativeIndex}, Type{ParameterFunctionIndex}}, + data::DiscreteMeasureData{GeneralVariableRef, 1}, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) # pull in the needed information var_prefs = parameter_list(ivref) pref = parameter_refs(data) @@ -254,10 +264,12 @@ function expand_measure(ivref::GeneralVariableRef, end # InfiniteVariableRef/DerivativeRef/ParameterFunctionRef (Multi DiscreteMeasureData) -function expand_measure(ivref::GeneralVariableRef, - index_type::Union{Type{InfiniteVariableIndex}, Type{DerivativeIndex}, Type{ParameterFunctionIndex}}, - data::DiscreteMeasureData{Vector{GeneralVariableRef}, 2}, - write_model::JuMP.AbstractModel) +function expand_measure( + ivref::GeneralVariableRef, + index_type::Union{Type{InfiniteVariableIndex}, Type{DerivativeIndex}, Type{ParameterFunctionIndex}}, + data::DiscreteMeasureData{Vector{GeneralVariableRef}, 2}, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) # pull in the needed information var_prefs = parameter_list(ivref) prefs = parameter_refs(data) @@ -302,15 +314,17 @@ function _make_point_support( support_dict::Dict{Int, Float64}, idx::Int, value::Float64 - )::Vector{Float64} + ) return [i == idx ? value : support_dict[i] for i in eachindex(orig_prefs)] end # SemiInfiniteVariableRef (1D DiscreteMeasureData) -function expand_measure(rvref::GeneralVariableRef, - index_type::Type{SemiInfiniteVariableIndex}, - data::DiscreteMeasureData{GeneralVariableRef, 1}, - write_model::JuMP.AbstractModel) +function expand_measure( + rvref::GeneralVariableRef, + index_type::Type{SemiInfiniteVariableIndex}, + data::DiscreteMeasureData{GeneralVariableRef, 1}, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) # pull in the needed information drvref = dispatch_variable_ref(rvref) ivref = infinite_variable_ref(drvref) @@ -346,10 +360,12 @@ function expand_measure(rvref::GeneralVariableRef, end # Write point support given semi_infinite info and the support -function _make_point_support(orig_prefs::Vector{GeneralVariableRef}, - support_dict::Dict{Int, Float64}, - new_indices::Vector{Int}, - values::Vector{Float64})::Vector{Float64} +function _make_point_support( + orig_prefs::Vector{GeneralVariableRef}, + support_dict::Dict{Int, Float64}, + new_indices::Vector{Int}, + values::Vector{Float64} + ) # these might overlap with the old dict, so we favor the old dict new_dict = Dict(new_indices[i] => values[i] for i in eachindex(values)) return [haskey(support_dict, i) ? support_dict[i] : new_dict[i] @@ -357,10 +373,12 @@ function _make_point_support(orig_prefs::Vector{GeneralVariableRef}, end # SemiInfiniteVariableRef (Multi DiscreteMeasureData) -function expand_measure(rvref::GeneralVariableRef, - index_type::Type{SemiInfiniteVariableIndex}, - data::DiscreteMeasureData{Vector{GeneralVariableRef}, 2}, - write_model::JuMP.AbstractModel) +function expand_measure( + rvref::GeneralVariableRef, + index_type::Type{SemiInfiniteVariableIndex}, + data::DiscreteMeasureData{Vector{GeneralVariableRef}, 2}, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) # pull in the needed information drvref = dispatch_variable_ref(rvref) ivref = infinite_variable_ref(drvref) @@ -413,11 +431,12 @@ function expand_measure(rvref::GeneralVariableRef, end # FiniteRef (1D DiscreteMeasureData) -function expand_measure(vref::GeneralVariableRef, - index_type::Type{V}, - data::DiscreteMeasureData{GeneralVariableRef, 1}, - write_model::JuMP.AbstractModel - )::JuMP.GenericAffExpr where {V <: FiniteIndex} +function expand_measure( + vref::GeneralVariableRef, + index_type::Type{V}, + data::DiscreteMeasureData{GeneralVariableRef, 1}, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) where {V <: FiniteIndex} # pull in the needed information supps = supports(data) coeffs = coefficients(data) @@ -428,11 +447,12 @@ function expand_measure(vref::GeneralVariableRef, end # FiniteRef (Multi DiscreteMeasureData) -function expand_measure(vref::GeneralVariableRef, - index_type::Type{V}, - data::DiscreteMeasureData{Vector{GeneralVariableRef}, 2}, - write_model::JuMP.AbstractModel - )::JuMP.GenericAffExpr where {V <: FiniteIndex} +function expand_measure( + vref::GeneralVariableRef, + index_type::Type{V}, + data::DiscreteMeasureData{Vector{GeneralVariableRef}, 2}, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) where {V <: FiniteIndex} # pull in the needed information supps = supports(data) coeffs = coefficients(data) @@ -443,11 +463,12 @@ function expand_measure(vref::GeneralVariableRef, end # InfiniteParameterRef (1D DiscreteMeasureData) -function expand_measure(pref::GeneralVariableRef, - index_type::Type{P}, - data::DiscreteMeasureData{GeneralVariableRef, 1}, - write_model::JuMP.AbstractModel - )::Union{JuMP.GenericAffExpr, Float64} where {P <: InfiniteParameterIndex} +function expand_measure( + pref::GeneralVariableRef, + index_type::Type{P}, + data::DiscreteMeasureData{GeneralVariableRef, 1}, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) where {P <: InfiniteParameterIndex} # pull in the needed information meas_pref = parameter_refs(data) supps = supports(data) @@ -464,11 +485,12 @@ function expand_measure(pref::GeneralVariableRef, end # InfiniteParameterRef (Multi DiscreteMeasureData) -function expand_measure(pref::GeneralVariableRef, - index_type::Type{P}, - data::DiscreteMeasureData{Vector{GeneralVariableRef}, 2}, - write_model::JuMP.AbstractModel - )::Union{JuMP.GenericAffExpr, Float64} where {P <: InfiniteParameterIndex} +function expand_measure( + pref::GeneralVariableRef, + index_type::Type{P}, + data::DiscreteMeasureData{Vector{GeneralVariableRef}, 2}, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) where {P <: InfiniteParameterIndex} # pull in the needed information prefs = parameter_refs(data) supps = supports(data) @@ -487,10 +509,11 @@ function expand_measure(pref::GeneralVariableRef, end # GenericAffExpr (1D DiscreteMeasureData) -function expand_measure(expr::JuMP.GenericAffExpr{C, GeneralVariableRef}, - data::DiscreteMeasureData{GeneralVariableRef, 1}, - write_model::JuMP.AbstractModel - )::Union{JuMP.AbstractJuMPScalar, Float64} where {C} +function expand_measure( + expr::JuMP.GenericAffExpr{C, GeneralVariableRef}, + data::DiscreteMeasureData{GeneralVariableRef, 1}, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) where {C} # pull in the needed information supps = supports(data) coeffs = coefficients(data) @@ -503,10 +526,11 @@ function expand_measure(expr::JuMP.GenericAffExpr{C, GeneralVariableRef}, end # GenericAffExpr (Multi DiscreteMeasureData) -function expand_measure(expr::JuMP.GenericAffExpr{C, GeneralVariableRef}, - data::DiscreteMeasureData{Vector{GeneralVariableRef}, 2}, - write_model::JuMP.AbstractModel - )::Union{JuMP.AbstractJuMPScalar, Float64} where {C} +function expand_measure( + expr::JuMP.GenericAffExpr{C, GeneralVariableRef}, + data::DiscreteMeasureData{Vector{GeneralVariableRef}, 2}, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) where {C} # pull in the needed information supps = supports(data) coeffs = coefficients(data) @@ -522,7 +546,7 @@ end function expand_measure( expr::JuMP.GenericQuadExpr, data::DiscreteMeasureData{GeneralVariableRef, 1}, - write_model::JuMP.AbstractModel + write_model::Union{InfiniteModel, AbstractTransformationBackend} ) # get needed info pref = parameter_refs(data) @@ -548,7 +572,7 @@ end function expand_measure( expr::JuMP.GenericQuadExpr, data::DiscreteMeasureData{Vector{GeneralVariableRef}, 2}, - write_model::JuMP.AbstractModel + write_model::Union{InfiniteModel, AbstractTransformationBackend} ) # get needed info prefs = parameter_refs(data) @@ -574,7 +598,7 @@ end function expand_measure( expr::JuMP.GenericNonlinearExpr, data::DiscreteMeasureData{GeneralVariableRef, 1}, - write_model::JuMP.AbstractModel + write_model::Union{InfiniteModel, AbstractTransformationBackend} ) # get needed info pref = parameter_refs(data) @@ -598,7 +622,7 @@ end function expand_measure( expr::JuMP.GenericNonlinearExpr, data::DiscreteMeasureData{Vector{GeneralVariableRef}, 2}, - write_model::JuMP.AbstractModel + write_model::Union{InfiniteModel, AbstractTransformationBackend} ) # get needed info prefs = parameter_refs(data) @@ -620,11 +644,12 @@ end # MeasureRef -function expand_measure(mref::GeneralVariableRef, - index_type::Type{MeasureIndex}, - data::DiscreteMeasureData, - write_model::JuMP.AbstractModel - )::Union{JuMP.AbstractJuMPScalar, Float64} +function expand_measure( + mref::GeneralVariableRef, + index_type::Type{MeasureIndex}, + data::DiscreteMeasureData, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) # determine function and data of the inner measure deeper_func = measure_function(mref) deeper_data = measure_data(mref) @@ -635,10 +660,10 @@ function expand_measure(mref::GeneralVariableRef, end # Call add_generative_supports if needed -function _prep_generative_supps(prefs, info_type::Type{NoGenerativeSupports})::Nothing +function _prep_generative_supps(prefs, info_type::Type{NoGenerativeSupports}) return end -function _prep_generative_supps(pref, info_type)::Nothing +function _prep_generative_supps(pref, info_type) add_generative_supports(pref) return end @@ -647,7 +672,7 @@ end function expand_measure( expr, data::FunctionalDiscreteMeasureData{P, B, I}, - write_model::JuMP.AbstractModel + write_model::Union{InfiniteModel, AbstractTransformationBackend} ) where {P, B, I} # get the info prefs = parameter_refs(data) @@ -669,7 +694,7 @@ end function expand_measure( expr, data::AbstractMeasureData, - ::JuMP.AbstractModel + ::Union{InfiniteModel, AbstractTransformationBackend} ) expr_type = typeof(expr) data_type = typeof(data) @@ -682,8 +707,11 @@ end # ANALYTIC EXPANSION METHODS ################################################################################ """ - analytic_expansion(expr, data::AbstractMeasureData, - write_model::JuMP.AbstractModel)::JuMP.AbstractJuMPScalar + analytic_expansion( + expr, + data::AbstractMeasureData, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + )::JuMP.AbstractJuMPScalar Analytically, evaluate measure in the simple case where the measure expression `expr` doesn't depend on `data` and thus `expr` can be treated as a constant in @@ -700,7 +728,7 @@ function analytic_expansion( expr::JuMP.AbstractJuMPScalar, data::Union{DiscreteMeasureData{GeneralVariableRef, 1}, FunctionalDiscreteMeasureData{GeneralVariableRef}}, - write_model::JuMP.AbstractModel # needed for fallback + write_model::Union{InfiniteModel, AbstractTransformationBackend} # needed for fallback ) # get the bounds and expect lb = JuMP.lower_bound(data) @@ -725,7 +753,7 @@ function analytic_expansion( expr::JuMP.AbstractJuMPScalar, data::Union{DiscreteMeasureData{Vector{GeneralVariableRef}, 2}, FunctionalDiscreteMeasureData{Vector{GeneralVariableRef}}}, - write_model::JuMP.AbstractModel # needed for fallback + write_model::Union{InfiniteModel, AbstractTransformationBackend} # needed for fallback ) # get the bounds and expect lbs = JuMP.lower_bound(data) @@ -749,7 +777,7 @@ end function analytic_expansion( expr, data::AbstractMeasureData, - write_model::JuMP.AbstractModel + write_model::Union{InfiniteModel, AbstractTransformationBackend} ) return expand_measure(expr, data, write_model) end @@ -768,7 +796,7 @@ expansion is undefined for the measure data and/or the measure expression. If desired this can be used in combination with [`measure`](@ref) to expand measures on the fly. -This is useful for extensions that employ a custom optimizer model since it +This is useful for extensions that employ a custom transformation backend since it can be used evaluate measures before expressions are translated to the new model. This method can also be extended to handle custom measure data types by extending [`expand_measure`](@ref). Optionally, [`analytic_expansion`](@ref) can also @@ -795,12 +823,15 @@ end """ - expand_measures(expr, write_model::JuMP.AbstractModel)::JuMP.AbstractJuMPScalar + expand_measures( + expr, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) Expand all `MeasureRef`s in `expr` in-place via [`expand_measure`](@ref) and return the expanded expression. This is an internal method used by [`expand_all_measures!`](@ref) and `TranscriptionOpt` but can be useful for -user-defined optimizer model extensions that add implement +user-defined transformation backend extensions that add implement [`add_point_variable`](@ref)/[`add_semi_infinite_variable`](@ref) in combination with `expand_measure`. `write_model` is the model that the measure variables are added to as described in [`expand_measure`](@ref). @@ -811,7 +842,7 @@ function expand_measures end function expand_measures( mref::GeneralVariableRef, ::Type{MeasureIndex}, - write_model::JuMP.AbstractModel + write_model::Union{InfiniteModel, AbstractTransformationBackend} ) if is_analytic(mref) return analytic_expansion(measure_function(mref), measure_data(mref), @@ -826,7 +857,7 @@ end function expand_measures( vref::GeneralVariableRef, ::Type{V}, - ::JuMP.AbstractModel + ::Union{InfiniteModel, AbstractTransformationBackend} ) where {V <: AbstractInfOptIndex} return vref end @@ -834,7 +865,7 @@ end # GeneralVariableRef function expand_measures( vref::GeneralVariableRef, - write_model::JuMP.AbstractModel + write_model::Union{InfiniteModel, AbstractTransformationBackend} ) return expand_measures(vref, _index_type(vref), write_model) end @@ -842,20 +873,21 @@ end # Expressions function expand_measures( expr::JuMP.AbstractJuMPScalar, - write_model::JuMP.AbstractModel + write_model::Union{InfiniteModel, AbstractTransformationBackend} ) return map_expression(v -> expand_measures(v, write_model), expr) end # AbstractArray of expressions -function expand_measures(arr::AbstractArray, - write_model::JuMP.AbstractModel - ) +function expand_measures( + arr::AbstractArray, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) return map(e -> expand_measures(e, write_model), arr) end # Fallback -function expand_measures(expr, ::JuMP.AbstractModel) +function expand_measures(expr, ::Union{InfiniteModel, AbstractTransformationBackend}) error("`expand_measures` not defined for expressions of type ", "`$(typeof(expr))`.") end @@ -869,7 +901,7 @@ variables are added to the model as necessary to accomodate the expansion (i.e., point variables and semi-infinite variables are made as needed). Errors if expansion is undefined for the measure data and/or the measure expression. -This is useful for extensions that employ a custom optimizer model since it +This is useful for extensions that employ a custom transformation backend since it can be used evaluate measures before `model` is translated into the new model. This method can also be extended to handle custom measure data types by extending [`expand_measure`](@ref). Note that this method leverages `expand_measure` via diff --git a/src/measures.jl b/src/measures.jl index 41379489..6dec53f5 100644 --- a/src/measures.jl +++ b/src/measures.jl @@ -1160,7 +1160,7 @@ constructors can be used to for `data`. The variable expression `expr` can conta measures can be nested), and constants. Typically, this is called inside of `JuMP.@expression`, `JuMP.@objective`, and `JuMP.@constraint` in a manner similar to `sum`. Note measures are not explicitly evaluated until -[`build_optimizer_model!`](@ref) is called or unless they are expanded via +[`build_transformation_backend!`](@ref) is called or unless they are expanded via [`expand`](@ref) or [`expand_all_measures!`](@ref). **Example** @@ -1180,7 +1180,7 @@ function measure( expr::JuMP.AbstractJuMPScalar, data::AbstractMeasureData; name::String = "measure" - )::GeneralVariableRef + ) model = JuMP.owner_model(expr) if isnothing(model) error("Expression contains no variables or parameters.") @@ -1221,7 +1221,7 @@ end Extend `JuMP.name` to return the name associated with a measure reference. """ -function JuMP.name(mref::MeasureRef)::String +function JuMP.name(mref::MeasureRef) object = get(_data_dictionary(mref), JuMP.index(mref), nothing) return isnothing(object) ? "" : object.name end @@ -1231,7 +1231,7 @@ end Extend `JuMP.set_name` to specify the name of a measure reference. """ -function JuMP.set_name(mref::MeasureRef, name::String)::Nothing +function JuMP.set_name(mref::MeasureRef, name::String) _data_object(mref).name = name return end @@ -1388,7 +1388,7 @@ function JuMP.delete(model::InfiniteModel, mref::MeasureRef)::Nothing @assert JuMP.is_valid(model, mref) "Invalid measure reference." # Reset the transcription status if is_used(mref) - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) end gvref = _make_variable_ref(JuMP.owner_model(mref), JuMP.index(mref)) # Remove from dependent measures if there are any diff --git a/src/objective.jl b/src/objective.jl index 7b21fd0e..9d0d68eb 100644 --- a/src/objective.jl +++ b/src/objective.jl @@ -116,7 +116,7 @@ function JuMP.set_objective_function( model.objective_has_measures = true end end - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) return end @@ -142,7 +142,7 @@ function JuMP.set_objective_function(model::InfiniteModel, func::Real)::Nothing end # update function model.objective_function = JuMP.GenericAffExpr{Float64, GeneralVariableRef}(func) - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) return end @@ -166,7 +166,7 @@ function JuMP.set_objective_sense( sense::MOI.OptimizationSense )::Nothing model.objective_sense = sense - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) return end diff --git a/src/optimize.jl b/src/optimize.jl deleted file mode 100644 index d4602f68..00000000 --- a/src/optimize.jl +++ /dev/null @@ -1,204 +0,0 @@ -################################################################################ -# CORE BACKEND API -################################################################################ -""" - transformation_backend_ready(model::InfiniteModel)::Bool - -Return `Bool` if the transformation backend model is up-to-date with `model` and -ready to be optimized. -**Example** -```julia-repl -julia> transformation_backend_ready(model) -false -``` -""" -transformation_backend_ready(model::InfiniteModel) = model.ready_to_optimize - -""" - set_transformation_backend_ready(model::InfiniteModel, status::Bool) - -Set the status of the transformation backend model to whether it is up-to-date or -not. Note is more intended as an internal function, but is useful for extensions. - -**Example** -```julia-repl -julia> set_transformation_backend_ready(model, true) - -julia> transformation_backend_ready(model) -true -``` -""" -function set_transformation_backend_ready(model::InfiniteModel, status::Bool) - model.ready_to_optimize = status - return -end - -""" - -""" -function transform_model(backend::AbstractTransformationBackend) - error("") -end - -""" - -""" -transform_model(model::InfiniteModel) = backend_model(model.backend) - -""" - -""" -function set_transformation_backend( - model::InfiniteModel, - backend::AbstractTransformationBackend - ) - model.backend = backend - set_transformation_backend_ready(model, false) - return -end - -""" - -""" -function JuMP.get_attribute( - backend::AbstractTransformationBackend, - attr - ) - error("") -end - -""" - -""" -function JuMP.get_attribute(model::InfiniteModel, attr) - return JuMP.get_attribute(model.backend, attr) -end - -""" - -""" -function JuMP.set_attribute( - backend::AbstractTransformationBackend, - attr, - value - ) - error("") -end - -""" - -""" -function JuMP.set_attribute(model::InfiniteModel, attr, value) - return JuMP.set_attribute(model.backend, attr, value) -end - -""" - -""" -function build_transformation_model!( - model::InfiniteModel, - backend::AbstractTransformationBackend; - kwargs... - ) - return error("") -end - -""" - -""" -function build_transformation_model!(model::InfiniteModel; kwargs...) - if num_parameters(model, InfiniteParameter) == 0 - @warn("Finite models (i.e., `InfiniteModel`s with no infinite " * - "parameters) should be modeled directly via a `Model` in JuMP.jl.") - end - build_optimizer_model!(model, model.backend; kwargs...) - set_transformation_backend_ready(model, true) - return -end - -""" - JuMP.set_optimize_hook( - model::InfiniteModel, - hook::Union{Function, Nothing} - )::Nothing - -Set the function `hook` as the optimize hook for `model` where `hook` should -have be of the form `hook(model::InfiniteModel; hook_specfic_kwargs..., kwargs...)`. -The `kwargs` are those passed to [`optimize!`](@ref). The `hook_specifc_kwargs` -are passed as additional keywords by the user when they call [`optimize!`](@ref). - -## Notes - -* The optimize hook should generally modify the model, or some external state -in some way, and then call `optimize!(model; ignore_optimize_hook = true)` to -optimize the problem, bypassing the hook. -* Use `set_optimize_hook(model, nothing)` to unset an optimize hook. -""" -function JuMP.set_optimize_hook( - model::InfiniteModel, - hook::Union{Function, Nothing} - ) - model.optimize_hook = hook - set_optimizer_model_ready(model, false) - return -end - -""" - -""" -function JuMP.optimize!(backend::AbstractTransformationBackend) - return error("") -end - - -""" - JuMP.optimize!(model::InfiniteModel; [kwargs...]) - -Extend `JuMP.optimize!` to optimize infinite models using the internal -optimizer model. Calls [`build_optimizer_model!`](@ref) if the optimizer -model isn't up to date. The `kwargs` correspond to keyword arguments passed to -[`build_optimizer_model!`](@ref) if any are defined. The `kwargs` can also -include arguments that are passed to an optimize hook if one was set with -[`JuMP.set_optimize_hook`](@ref). - -**Example** -```julia-repl -julia> optimize!(model) - -julia> has_values(model) -true -``` -""" -function JuMP.optimize!( - model::InfiniteModel; - ignore_optimize_hook = isnothing(model.optimize_hook), - kwargs...) - if !ignore_optimize_hook - return model.optimize_hook(model; kwargs...) - end - if !optimizer_model_ready(model) - build_optimizer_model!(model; kwargs...) - end - JuMP.optimize!(model.backend) - return -end - -""" - -""" -function optimizer_model_variable( - vref::GeneralVariableRef, - backend::AbstractTransformationBackend; - kwargs... - ) - error("") -end - -""" - -""" -function optimizer_model_variable(vref::GeneralVariableRef; kwargs...) - model = JuMP.owner_model(vref) - return optimizer_model_variable(vref, model.backend; kwargs...) -end - diff --git a/src/optimize_old.jl b/src/optimize_old.jl deleted file mode 100644 index 01d0d84a..00000000 --- a/src/optimize_old.jl +++ /dev/null @@ -1,963 +0,0 @@ -################################################################################ -# OPTIMIZER MODEL BASICS -################################################################################ -""" - optimizer_model(model::InfiniteModel)::JuMP.Model - -Return the JuMP model stored in `model` that is used to solve it. - -**Example** -```julia-repl -julia> opt_model = optimizer_model(model) -A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. -``` -""" -optimizer_model(model::InfiniteModel)::JuMP.Model = model.optimizer_model - -""" - JuMP.bridge_constraints(model::InfiniteModel)::Bool - -Extend `JuMP.bridge_constraints` to return if an infinite model `model` -has an optimizer model where the optimizer is set and unsupported constraints -are automatically bridged to equivalent supported constraints when an -appropriate transformation is available. - -**Example** -```julia-repl -julia> bridge_constraints(model) -false -``` -""" -function JuMP.bridge_constraints(model::InfiniteModel)::Bool - return JuMP.bridge_constraints(optimizer_model(model)) -end - -""" - JuMP.add_bridge(model::InfiniteModel, - BridgeType::Type{<:MOI.Bridges.AbstractBridge}) - -Extend `JuMP.add_bridge` to add `BridgeType` to the list of bridges that can -be used by the optimizer model to transform unsupported constraints into an -equivalent formulation using only constraints supported by the optimizer. -""" -function JuMP.add_bridge(model::InfiniteModel, - BridgeType::Type{<:MOI.Bridges.AbstractBridge}) - JuMP.add_bridge(optimizer_model(model), BridgeType) - return -end - -""" - optimizer_model_ready(model::InfiniteModel)::Bool - -Return `Bool` if the optimizer model is up to date with `model`. - -**Example** -```julia-repl -julia> optimizer_model_ready(model) -false -``` -""" -optimizer_model_ready(model::InfiniteModel)::Bool = model.ready_to_optimize - -""" - set_optimizer_model_ready(model::InfiniteModel, status::Bool) - -Set the status of the optimizer model to whether it is up to date or not. Note -is more intended as an internal function, but is useful for extensions. - -**Example** -```julia-repl -julia> set_optimizer_model_ready(model, true) - -julia> optimizer_model_ready(model) -true -``` -""" -function set_optimizer_model_ready(model::InfiniteModel, status::Bool) - model.ready_to_optimize = status - return -end - -""" - add_infinite_model_optimizer(opt_model::JuMP.Model, inf_model::InfiniteModel) - -Parse the current optimizer and its attributes associated with `inf_model` and load -them into `opt_model`. This is intended to be used as an internal method -for [`set_optimizer_model`](@ref). -""" -function add_infinite_model_optimizer(opt_model::JuMP.Model, - inf_model::InfiniteModel) - if !isa(inf_model.optimizer_constructor, Nothing) - bridge_constrs = JuMP.bridge_constraints(inf_model) - JuMP.set_optimizer(opt_model, inf_model.optimizer_constructor, - add_bridges = bridge_constrs) - end - # parse the attributes (this is a hacky workaround) - for (attr, val) in JuMP.backend(inf_model).model_cache.optattr - MOI.set(opt_model, attr, val) - end - return -end - -""" - set_optimizer_model(inf_model::InfiniteModel, opt_model::JuMP.Model; - inherit_optimizer::Bool = true) - -Specify the JuMP model that is used to solve `inf_model`. This is intended for -internal use and extensions. Note that `opt_model` should contain extension -data to allow it to map to `inf_model` in a manner similar to -[`TranscriptionModel`](@ref). `inherit_optimizer` indicates whether -[`add_infinite_model_optimizer`](@ref) should be invoked on the new optimizer -mode to inherit the optimizer constuctor and attributes currently stored in -`inf_model`. - -**Example** -```julia-repl -julia> set_optimizer_model(model, TranscriptionModel()) - -julia> optimizer_model(model) -A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. -``` -""" -function set_optimizer_model( - inf_model::InfiniteModel, - opt_model::JuMP.Model; - inherit_optimizer::Bool = true - ) - if inherit_optimizer - add_infinite_model_optimizer(opt_model, inf_model) - end - inf_model.optimizer_model = opt_model - set_optimizer_model_ready(inf_model, false) - return -end - -""" - optimizer_model_key(model::JuMP.Model)::Any - -Return the extension key used in the optimizer model `model`. Errors if -`model.ext` contains more than one key. This is intended for internal -use and extensions. For extensions this is used to dispatch to the appropriate -optmizer model functions such as extensions to [`build_optimizer_model!`](@ref). -This is intended as an internal method. See -[`optimizer_model_key`](@ref optimizer_model_key(::InfiniteModel)) -for the public method -""" -function optimizer_model_key(model::JuMP.Model) - if length(model.ext) != 1 - error("Optimizer models should have 1 and only 1 extension key of the " * - "form `Model.ext[:my_ext_key] = MyExtData`.") - end - return first(keys(model.ext)) -end - -""" - optimizer_model_key(model::InfiniteModel)::Any - -Return the extension key used in the optimizer model of `model`. Errors if -`optimizer_model.ext` contains more than one key. This is intended for internal -use and extensions. For extensions this is used to dispatch to the appropriate -optmizer model functions such as extensions to [`build_optimizer_model!`](@ref). - -**Example** -```julia-repl -julia> optimizer_model_key(model) -:TransData -``` -""" -function optimizer_model_key(model::InfiniteModel)::Any - return optimizer_model_key(optimizer_model(model)) -end - -################################################################################ -# OPTIMIZER METHOD EXTENSIONS -################################################################################ -""" - JuMP.set_optimizer(model::InfiniteModel, - [optimizer_constructor; - add_bridges::Bool = true]) - -Extend `JuMP.set_optimizer` to set optimizer of infinite models. -Specifically, the optimizer of the optimizer model is modified. - -**Example** -```julia-repl -julia> set_optimizer(model, Clp.Optimizer) - -julia> optimizer_model(model) -A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: EMPTY_OPTIMIZER -Solver name: SolverName() attribute not implemented by the optimizer. -``` -""" -function JuMP.set_optimizer( - model::InfiniteModel, - optimizer_constructor; - add_bridges::Bool = true - ) - JuMP.set_optimizer(optimizer_model(model), optimizer_constructor, - add_bridges = add_bridges) - _set_optimizer_constructor(model, optimizer_constructor) - return -end - -""" - JuMP.set_silent(model::InfiniteModel) - -Extend `JuMP.set_silent` for infinite models to take precedence over any other -attribute controlling verbosity and requires the solver to produce no output. - -**Example** -```julia-repl -julia> set_silent(model) -true -``` -""" -function JuMP.set_silent(model::InfiniteModel) - return JuMP.set_silent(optimizer_model(model)) -end - -""" - JuMP.unset_silent(model::InfiniteModel) - -Extend `JuMP.unset_silent` for infinite models to neutralize the effect of the -`set_silent` function and let the solver attributes control the verbosity. - -**Example** -```julia-repl -julia> unset_silent(model) -false -``` -""" -function JuMP.unset_silent(model::InfiniteModel) - return JuMP.unset_silent(optimizer_model(model)) -end - -""" - JuMP.set_time_limit_sec(model::InfiniteModel, limit) - -Extend `set_time_limit_sec` to set the time limit (in seconds) of the solver. -Can be unset using `unset_time_limit_sec` or with `limit` set to `nothing`. - -**Example** -```julia-repl -julia> set_time_limit_sec(model, 100) -100 -``` -""" -function JuMP.set_time_limit_sec(model::InfiniteModel, limit) - return JuMP.set_time_limit_sec(optimizer_model(model), limit) -end - -""" - JuMP.unset_time_limit_sec(model::InfiniteModel) - -Extend `unset_time_limit_sec` to unset the time limit of the solver. Can be set -using `set_time_limit_sec`. - -**Example** -```julia-repl -julia> unset_time_limit_sec(model) -``` -""" -function JuMP.unset_time_limit_sec(model::InfiniteModel) - return JuMP.unset_time_limit_sec(optimizer_model(model)) -end - -""" - JuMP.time_limit_sec(model::InfiniteModel) - -Extend `time_limit_sec` to get the time limit (in seconds) of the solve used by -the optimizer model (`nothing` if unset). Can be set using `set_time_limit_sec`. - -**Example** -```julia-repl -julia> time_limit_sec(model) -100 -``` -""" -function JuMP.time_limit_sec(model::InfiniteModel) - return JuMP.time_limit_sec(optimizer_model(model)) -end - -""" - JuMP.set_optimizer_attribute(model::InfiniteModel, name::String, value) - -Extend `set_optimizer_attribute` to specify a solver-specific attribute -identified by `name` to `value`. - -**Example** -```julia-repl -julia> set_optimizer_attribute(model, "SolverSpecificAttributeName", true) -true -``` -""" -function JuMP.set_optimizer_attribute(model::InfiniteModel, name::String, value) - return JuMP.set_optimizer_attribute(optimizer_model(model), name, value) -end - -""" - JuMP.set_optimizer_attribute(model::InfiniteModel, - attr::MOI.AbstractOptimizerAttribute, - value) - -Extend `set_optimizer_attribute` to set the solver-specific attribute `attr` in -`model` to `value`. - -**Example** -```julia-repl -julia> set_optimizer_attribute(model, MOI.Silent(), true) -true -``` -""" -function JuMP.set_optimizer_attribute( - model::InfiniteModel, - attr::MOI.AbstractOptimizerAttribute, - value - ) - return MOI.set(optimizer_model(model), attr, value) -end - -""" - JuMP.set_optimizer_attributes(model::InfiniteModel, pairs::Pair...) - -Extend `set_optimizer_attributes` to set multiple solver attributes given a -list of `attribute => value` pairs. Calls -`set_optimizer_attribute(model, attribute, value)` for each pair. - -**Example** -```julia-repl -julia> model = Model(Ipopt.Optimizer); - -julia> set_optimizer_attributes(model, "tol" => 1e-4, "max_iter" => 100) -``` -is equivalent to: -```julia-repl -julia> set_optimizer_attribute(model, "tol", 1e-4); - -julia> set_optimizer_attribute(model, "max_iter", 100); -``` -""" -function JuMP.set_optimizer_attributes(model::InfiniteModel, pairs::Pair...) - for (name, value) in pairs - JuMP.set_optimizer_attribute(model, name, value) - end - return -end - -""" - JuMP.get_optimizer_attribute(model::InfiniteModel, name::String) - -Extend `get_optimizer_attribute` to return the value associated with the -solver-specific attribute named `name`. - -**Example** -```julia-repl -julia> get_optimizer_attribute(model, "tol") -0.0001 -```` -""" -function JuMP.get_optimizer_attribute(model::InfiniteModel, name::String) - return JuMP.get_optimizer_attribute(optimizer_model(model), name) -end - -""" - JuMP.get_optimizer_attribute(model::InfiniteModel, - attr::MOI.AbstractOptimizerAttribute) - -Extend `get_optimizer_attribute` to return the value of the solver-specific -attribute `attr` in `model`. - -**Example** -```julia-repl -julia> get_optimizer_attribute(model, MOI.Silent()) -true -```` -""" -function JuMP.get_optimizer_attribute( - model::InfiniteModel, - attr::MOI.AbstractOptimizerAttribute - ) - return MOI.get(optimizer_model(model), attr) -end - -""" - JuMP.solver_name(model::InfiniteModel) - -Extend `solver_name` to return the name of the solver being used if there is an -optimizer selected and it has a name attribute. Otherwise, an error is thrown. - -**Example** -```julia-repl -julia> solver_name(model) -"Gurobi" -``` -""" -function JuMP.solver_name(model::InfiniteModel) - return JuMP.solver_name(optimizer_model(model)) -end - -""" - JuMP.backend(model::InfiniteModel) - -Extend `backend` to return the `MathOptInterface` backend associated with the -optimizer model. Note this will be empty if the optimizer model has not been -build yet. - -**Example** -```julia-repl -julia> moi_model = backend(model); -``` -""" -function JuMP.backend(model::InfiniteModel) - return JuMP.backend(optimizer_model(model)) -end - -""" - JuMP.mode(model::InfiniteModel) - -Extend `mode` to return the `MathOptInterface` mode the optimizer model is in. - -**Example** -```julia-repl -julia> mode(model) -AUTOMATIC::ModelMode = 0 -``` -""" -function JuMP.mode(model::InfiniteModel) - return JuMP.mode(optimizer_model(model)) -end - -""" - JuMP.result_count(model::InfiniteModel) - -Extend `result_count` to return the number of results available to query after a -call to `optimize!`. - -**Example** -```julia-repla -julia> result_count(model) -1 -``` -""" -function JuMP.result_count(model::InfiniteModel)::Int - return MOI.get(optimizer_model(model), MOI.ResultCount()) -end - -################################################################################ -# OPTIMIZER MODEL BUILD METHODS -################################################################################ -""" - build_optimizer_model!(model::InfiniteModel, key::Val{ext_key_name}; - [kwargs...]) - -Build the optimizer model stored in `model` such that it can be -treated as a normal JuMP model, where the `Model.ext` field contains a key -that points to a datastructure that appropriately maps the data between the -two models. The key argument should be be typed to `Val{ext_key_name}`. This -should also use [`clear_optimizer_model_build!`](@ref) to empty the out the current -optimizer model. Ultimately, [`set_optimizer_model`](@ref) should be called -to insert the build optimizer model into `model` and [`set_optimizer_model_ready`](@ref) -should be used to update the optimizer model's status. -""" -function build_optimizer_model! end - -""" - clear_optimizer_model_build!(model::JuMP.Model)::JuMP.Model - -Empty the optimizer model using appropriate calls of `Base.empty!`. This -effectively resets `model` except the optimizer, its attributes, and an an emptied -optimizer model data struct are maintained. This is intended as an internal -method for use by [`build_optimizer_model!`](@ref). -""" -function clear_optimizer_model_build!(model::JuMP.Model) - key = optimizer_model_key(model) - data_type = typeof(model.ext[key]) - empty!(model) - model.ext[key] = data_type() - model.operator_counter = 0 - return model -end - -""" - clear_optimizer_model_build!(model::InfiniteModel)::JuMP.Model - -Empty the optimizer model using appropriate calls of `Base.empty!`. This -effectively resets `model.optimizer_model` except the optimizer, its attributes, -and an an emptied optimizer model data struct are maintained. This is intended -as an internal method for use by [`build_optimizer_model!`](@ref). -""" -function clear_optimizer_model_build!(model::InfiniteModel)::JuMP.Model - return clear_optimizer_model_build!(optimizer_model(model)) -end - -""" - build_optimizer_model!(model::InfiniteModel; [kwargs...]) - -Build the optimizer model stored in `model` such that it can be -treated as a normal JuMP model. Specifically, translate the variables and -constraints stored in `model` into ones that are stored in the optimizer model -and can be solved. This is provided generally to accomodate extensions that use -custom optimizer model types in accordance with [`optimizer_model_key`](@ref). -However, it may be useful in certain applications when the user desires to -force a build without calling `optimize!`. -Extensions will need to implement their own version of the function -`build_optimizer_model!(model::InfiniteModel, key::Val{ext_key_name}; kwargs...)`. - -**Example** -```julia-repl -julia> build_optimizer_model!(model) - -julia> optimizer_model_ready(model) -true -``` -""" -function build_optimizer_model!(model::InfiniteModel; kwargs...) - if num_parameters(model, InfiniteParameter) == 0 - @warn("Finite models (i.e., `InfiniteModel`s with no infinite " * - "parameters) should be modeled directly via a `Model` in JuMP.jl.") - end - key = optimizer_model_key(model) - build_optimizer_model!(model, Val(key); kwargs...) - return -end - -################################################################################ -# OPTIMIZER MODEL MAPPING METHODS (VARIABLES) -################################################################################ -""" - optimizer_model_variable(vref::GeneralVariableRef, key::Val{ext_key_name}; - [kwargs...]) - -Return the reformulation variable(s) stored in the optimizer model that correspond -to `vref`. This needs to be defined for extensions that implement a custom -optimizer model type. Principally, this is accomplished by typed the `key` -argument to `Val{ext_key_name}`. Keyword arguments can be added as needed. -""" -function optimizer_model_variable end - -# Fallback for unextended keys -function optimizer_model_variable(vref::GeneralVariableRef, key; kwargs...) - error("`optimizer_model_variable` not implemented for optimizer model " * - "key `$(typeof(key).parameters[1])`.") -end - -""" - optimizer_model_variable(vref::GeneralVariableRef; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, - kwargs...]) - -Return the reformulation variable(s) stored in the optimizer model that correspond -to `vref`. Also errors if no such variable can be found in -the optimizer model. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of [`optimizer_model_variable`](@ref). Errors if such an -extension has not been written. - -By default only the variables associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, infinite variables are -returned as a list corresponding to their supports. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the variable has multiple -infinite parameter dependencies. The corresponding supports are obtained via -`supports` using the same keyword arguments. - -**Example** -```julia-repl -julia> optimizer_model_variable(x) # infinite variable -2-element Array{VariableRef,1}: - x(support: 1) - x(support: 2) - -julia> optimizer_model_variable(z) # finite variable -z -``` -""" -function optimizer_model_variable(vref::GeneralVariableRef; kwargs...) - key = optimizer_model_key(JuMP.owner_model(vref)) - return optimizer_model_variable(vref, Val(key); kwargs...) -end - -""" - variable_supports(optimizer_model::JuMP.Model, vref, - key::Val{ext_key_name}; - [kwargs...])::Vector - -Return the supports associated with the mappings of `vref` in `optimizer_model`. -This dispatches off of `key` which permits optimizer model extensions. This -should throw an error if `vref` is not associated with the variable mappings -stored in `optimizer_model`. Keyword arguments can be added as needed. Note that -no extension is necessary for point or finite variables. -""" -function variable_supports end - -# fallback for unextended keys -function variable_supports(optimizer_model::JuMP.Model, vref, key; kwargs...) - error("`variable_supports` not implemented for optimizer model key " * - "`$(typeof(key).parameters[1])` and/or variable type $(typeof(vref)).") -end - -# FiniteRef -function variable_supports(optimizer_model::JuMP.Model, vref::FiniteRef, - key; kwargs...) - return () -end - -""" - supports(vref::DecisionVariableRef; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, - kwargs...]) - -Return the supports associated with `vref` in the optimizer -model. Errors if [`InfiniteOpt.variable_supports`](@ref) has not been extended for the -optimizer model type or if `vref` is not be reformulated in the optimizer model. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of `variable_supports`. Errors if such an -extension has not been written. - -By default only the public supports are returned, the -full set can be accessed via `label = All`. Moreover, the supports of infinite -variables are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the variable has multiple -infinite parameter dependencies. - -**Example** -```julia-repl -julia> supports(vref) -2-element Array{Tuple{Float64},1}: - (0.0,) - (1.0,) -``` -""" -function supports( - vref::Union{DecisionVariableRef, MeasureRef, ParameterFunctionRef}; - kwargs... - ) - model = optimizer_model(JuMP.owner_model(vref)) - key = optimizer_model_key(JuMP.owner_model(vref)) - return variable_supports(model, vref, Val(key); kwargs...) -end - -################################################################################ -# OPTIMIZER MODEL MAPPING METHODS (EXPRESSIONS) -################################################################################ -""" - optimizer_model_expression(expr, key::Val{ext_key_name}; [kwargs...]) - -Return the reformulation expression(s) stored in the optimizer model that correspond -to `expr`. This needs to be defined for extensions that implement a custom -optimizer model type. Principally, this is accomplished by typed the `key` -argument to `Val{ext_key_name}`. Keyword arguments can be added as needed. -Note that if `expr` is a `GeneralVariableRef` this just dispatches to -`optimizer_model_variable`. -""" -function optimizer_model_expression end - -# Fallback for unextended keys -function optimizer_model_expression(expr, key; kwargs...) - error("`optimizer_model_expression` not defined for optimizer model " * - "key `$(typeof(key).parameters[1])` and expression type " * - "`$(typeof(expr))`.") -end - -# Define for variable reference expressions -function optimizer_model_expression(expr::GeneralVariableRef, key; kwargs...) - return optimizer_model_variable(expr, key; kwargs...) -end - -""" - optimizer_model_expression(expr::JuMP.AbstractJuMPScalar; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, - kwargs...]) - -Return the reformulation expression(s) stored in the optimizer model that correspond -to `expr`. Also errors if no such expression can be found in -the optimizer model (meaning one or more of the underlying variables have not -been transcribed). - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of [`optimizer_model_expression`](@ref). Errors if such an -extension has not been written. - -By default only the expressions associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, infinite expressions are -returned as a list corresponding to their supports. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the expression has multiple -infinite parameter dependencies. The corresponding supports are obtained via -`supports` using the same keyword arguments. - -**Example** -```julia-repl -julia> optimizer_model_expression(my_expr) # finite expression -x(support: 1) - y -``` -""" -function optimizer_model_expression(expr::JuMP.AbstractJuMPScalar; kwargs...) - model = JuMP.owner_model(expr) - if isnothing(model) - return zero(JuMP.AffExpr) + JuMP.constant(expr) - else - key = optimizer_model_key(model) - return optimizer_model_expression(expr, Val(key); kwargs...) - end -end - -""" - expression_supports(optimizer_model::JuMP.Model, expr, - key::Val{ext_key_name}; [kwargs...]) - -Return the supports associated with the mappings of `expr` in `optimizer_model`. -This dispatches off of `key` which permits optimizer model extensions. This -should throw an error if `expr` is not associated with the variable mappings -stored in `optimizer_model`. Keyword arguments can be added as needed. Note that -if `expr` is a `GeneralVariableRef` this just dispatches to `variable_supports`. -""" -function expression_supports end - -# fallback for unextended keys -function expression_supports(optimizer_model::JuMP.Model, expr, key; kwargs...) - error("`constraint_supports` not implemented for optimizer model key " * - "`$(typeof(key).parameters[1])` and/or expressions of type " * - "`$(typeof(expr))`.") -end - -# Variable reference expressions -function expression_supports(model::JuMP.Model, vref::GeneralVariableRef, key; - kwargs...) - return variable_supports(model, dispatch_variable_ref(vref), key; kwargs...) -end - -""" - supports(expr::JuMP.AbstractJuMPScalar; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, - kwargs...]) - -Return the support associated with `expr`. Errors if `expr` is -not associated with the constraint mappings stored in `optimizer_model`. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of `expression_supports`. Errors if such an -extension has not been written. - -By default only the public supports are returned, the -full set can be accessed via `label = All`. Moreover, the supports of infinite -expressions are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the expression has multiple -infinite parameter dependencies. - -**Example** -```julia-repl -julia> supports(cref) -2-element Array{Tuple{Float64},1}: - (0.0,) - (1.0,) -``` -""" -function supports(expr::JuMP.AbstractJuMPScalar; kwargs...) - model = JuMP.owner_model(expr) - if isnothing(model) - return () - else - key = optimizer_model_key(model) - opt_model = optimizer_model(model) - return expression_supports(opt_model, expr, Val(key); kwargs...) - end -end - -################################################################################ -# OPTIMIZER MODEL MAPPING METHODS (CONSTRAINTS) -################################################################################ -""" - optimizer_model_constraint(cref::InfOptConstraintRef, - key::Val{ext_key_name}; [kwargs...]) - -Return the reformulation constraint(s) stored in the optimizer model that correspond -to `cref`. This needs to be defined for extensions that implement a custom -optimizer model type. Principally, this is accomplished by typed the `key` -argument to `Val{ext_key_name}`. Keyword arguments can be added as needed. -""" -function optimizer_model_constraint end - -# Fallback for unextended keys -function optimizer_model_constraint( - cref::InfOptConstraintRef, - key; - kwargs... - ) - error("`optimizer_model_constraint` not implemented for optimizer model " * - "key `$(typeof(key).parameters[1])`.") -end - -""" - optimizer_model_constraint(cref::InfOptConstraintRef; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, - kwargs...]) - -Return the reformulation constraint(s) stored in the optimizer model that correspond -to `cref`. Errors if no such constraint can be found in -the optimizer model. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of [`optimizer_model_constraint`](@ref). Errors if such an -extension has not been written. - -By default only the constraints associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, infinite constraints are -returned as a list corresponding to their supports. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the constraint has multiple -infinite parameter dependencies. The corresponding supports are obtained via -`supports` using the same keyword arguments. - -**Example** -```julia-repl -julia> optimizer_model_constraint(c1) # finite constraint -c1 : x(support: 1) - y <= 3.0 -``` -""" -function optimizer_model_constraint( - cref::InfOptConstraintRef; - kwargs... - ) - key = optimizer_model_key(JuMP.owner_model(cref)) - return optimizer_model_constraint(cref, Val(key); kwargs...) -end - -""" - constraint_supports(optimizer_model::JuMP.Model, - cref::InfOptConstraintRef, - key::Val{ext_key_name}; [kwargs...]) - -Return the supports associated with the mappings of `cref` in `optimizer_model`. -This dispatches off of `key` which permits optimizer model extensions. This -should throw an error if `cref` is not associated with the variable mappings -stored in `optimizer_model`. Keyword arguments can be added as needed. -""" -function constraint_supports end - -# fallback for unextended keys -function constraint_supports(optimizer_model::JuMP.Model, - cref::InfOptConstraintRef, - key; kwargs...) - error("`constraint_supports` not implemented for optimizer model key " * - "`$(typeof(key).parameters[1])`.") -end - -""" - supports(cref::InfOptConstraintRef; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, - kwargs...]) - -Return the support associated with `cref`. Errors if `cref` is -not associated with the constraint mappings stored in `optimizer_model`. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of `constraint_supports`. Errors if such an -extension has not been written. - -By default only the public supports are returned, the -full set can be accessed via `label = All`. Moreover, the supports of infinite -constraints are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the constraint has multiple -infinite parameter dependencies. - -**Example** -```julia-repl -julia> supports(cref) -2-element Array{Tuple{Float64},1}: - (0.0,) - (1.0,) -``` -""" -function supports(cref::InfOptConstraintRef; kwargs...) - model = optimizer_model(JuMP.owner_model(cref)) - key = optimizer_model_key(JuMP.owner_model(cref)) - return constraint_supports(model, cref, Val(key); kwargs...) -end - -################################################################################ -# OPTIMIZATION METHODS -################################################################################ -""" - JuMP.set_optimize_hook( - model::InfiniteModel, - hook::Union{Function, Nothing} - )::Nothing - -Set the function `hook` as the optimize hook for `model` where `hook` should -have be of the form `hook(model::GenericModel; hook_specfic_kwargs..., kwargs...)`. -The `kwargs` are those passed to [`optimize!`](@ref). The `hook_specifc_kwargs` -are passed as additional keywords by the user when they call [`optimize!`](@ref). - -## Notes - -* The optimize hook should generally modify the model, or some external state -in some way, and then call `optimize!(model; ignore_optimize_hook = true)` to -optimize the problem, bypassing the hook. -* Use `set_optimize_hook(model, nothing)` to unset an optimize hook. -""" -function JuMP.set_optimize_hook( - model::InfiniteModel, - hook::Union{Function, Nothing} - ) - model.optimize_hook = hook - set_optimizer_model_ready(model, false) - return -end - -""" - JuMP.optimize!(model::InfiniteModel; [kwargs...]) - -Extend `JuMP.optimize!` to optimize infinite models using the internal -optimizer model. Calls [`build_optimizer_model!`](@ref) if the optimizer -model isn't up to date. The `kwargs` correspond to keyword arguments passed to -[`build_optimizer_model!`](@ref) if any are defined. The `kwargs` can also -include arguments that are passed to an optimize hook if one was set with -[`JuMP.set_optimize_hook`](@ref). - -**Example** -```julia-repl -julia> optimize!(model) - -julia> has_values(model) -true -``` -""" -function JuMP.optimize!( - model::InfiniteModel; - ignore_optimize_hook = isnothing(model.optimize_hook), - kwargs...) - if !ignore_optimize_hook - return model.optimize_hook(model; kwargs...) - end - if !optimizer_model_ready(model) - build_optimizer_model!(model; kwargs...) - end - JuMP.optimize!(optimizer_model(model)) - return -end diff --git a/src/point_variables.jl b/src/point_variables.jl index e191f522..688196e6 100644 --- a/src/point_variables.jl +++ b/src/point_variables.jl @@ -388,7 +388,7 @@ function _update_info_constraints(info::JuMP.VariableInfo, gvref, vref)::Nothing # finalize the update _update_variable_info(vref, info) - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) return end diff --git a/src/results.jl b/src/results.jl index 6ef30839..47609ec3 100644 --- a/src/results.jl +++ b/src/results.jl @@ -2,38 +2,76 @@ # MODEL QUERIES ################################################################################ # Simple model queries -for op in (:termination_status, :raw_status, :solve_time, :simplex_iterations, - :barrier_iterations, :node_count, :objective_bound, :relative_gap) +for func in (:termination_status, :raw_status, :solve_time, :simplex_iterations, + :barrier_iterations, :node_count, :objective_bound, :relative_gap, + :result_count) @eval begin @doc """ - JuMP.$($op)(model::InfiniteModel) + JuMP.$($func)(backend::AbstractTransformationBackend) - Extend [`JuMP.$($op)`](https://jump.dev/JuMP.jl/v0.22/reference/solutions/#JuMP.$($op)) - for `InfiniteModel`s in accordance with that reported by its optimizer - model. Errors if such a query is not supported or if the optimizer model - hasn't be solved. + Implment `JuMP.$($func)` for transformation backends. If applicable, this + should be extended for new backend types. No extension is needed for + [`JuMPBackend`](@ref)s. """ - function JuMP.$op(model::InfiniteModel) - return JuMP.$op(optimizer_model(model)) + function JuMP.$func(backend::AbstractTransformationBackend) + error("`JuMP.$($func)` not defined for backends of type " * + "`$(typeof(backend))`.") + end + + # Define for JuMPBackend + function JuMP.$func(backend::JuMPBackend) + return JuMP.$func(backend.model) + end + + @doc """ + JuMP.$($func)(model::InfiniteModel) + + Extend [`JuMP.$($func)`](https://jump.dev/JuMP.jl/v1/api/JuMP/#$($func)) + for `InfiniteModel`s in accordance with that reported by its + transformation backend. Errors if such a query is not supported or if + the transformation backend hasn't be solved. + """ + function JuMP.$func(model::InfiniteModel) + return JuMP.$func(model.backend) end end end # Simple result dependent model queries -for op in (:primal_status, :dual_status, :has_values, :has_duals, - :objective_value, :dual_objective_value) +for func in (:primal_status, :dual_status, :has_values, :has_duals, + :objective_value, :dual_objective_value) @eval begin @doc """ - JuMP.$($op)(model::InfiniteModel; [result::Int = 1]) + JuMP.$($func)(backend::AbstractTransformationBackend; [kwargs...]) + + Implment `JuMP.$($func)` for transformation backends. If applicable, this + should be extended for new backend types. No extension is needed for + [`JuMPBackend`](@ref)s. As needed keyword arguments can be added. + `JuMPBackend`s use the `result::Int = 1` keyword argument. + """ + function JuMP.$func(backend::AbstractTransformationBackend; kwargs...) + error("`JuMP.$($func)` not defined for backends of type " * + "`$(typeof(backend))`.") + end + + # Define for JuMPBackend + function JuMP.$func(backend::JuMPBackend; kwargs...) + return JuMP.$func(backend.model; kwargs...) + end - Extend [`JuMP.$($op)`](https://jump.dev/JuMP.jl/v0.22/reference/solutions/#JuMP.$($op)) - for `InfiniteModel`s in accordance with that reported by its optimizer - model and the result index `result` of the most recent solution obtained. - Errors if such a query is not supported or if the optimizer model hasn't - be solved. + @doc """ + JuMP.$($func)(model::InfiniteModel; [kwargs...]) + + Extend [`JuMP.$($func)`](https://jump.dev/JuMP.jl/v1/api/JuMP/#$($func)) + for `InfiniteModel`s in accordance with that reported by its + transformation backend. Errors if such a query is not supported or if the + transformation backend hasn't be solved. Accepts keywords depending + on the backend. JuMP-based backends use the `result::Int = 1` keyword + argument to access the solution index of interest (if the solver supports + multiple solutions). """ - function JuMP.$op(model::InfiniteModel; result::Int = 1) - return JuMP.$op(optimizer_model(model); result = result) + function JuMP.$func(model::InfiniteModel; kwargs...) + return JuMP.$func(model.backend; kwargs...) end end end @@ -42,100 +80,141 @@ end # VALUE QUERIES ################################################################################ """ - map_value([ref/expr], key::Val{ext_key_name}, result::Int; kwargs...) + map_value([ref/expr], backend::AbstractTransformationBackend; [kwargs...]) -Map the value(s) of `ref` to its counterpart in the optimizer model type that is -distininguished by its extension key `key` as type `Val{ext_key_name}`. +Map the value(s) of `ref` to its counterpart in the `backend`. Here `ref` need refer to methods for both variable references and constraint -references. This only needs to be defined for reformulation extensions that cannot -readily extend `optimizer_model_variable`, `optimizer_model_expression`, and/or -`optimizer_model_constraint`. Such as is the case with reformuations that do not -have a direct mapping between variables and/or constraints in the original -infinite form. Otherwise, `optimizer_model_variable`, -`optimizer_model_expression`, and `optimizer_model_constraint` are used to make -these mappings by default where `kwargs` are passed on these functions. Here -`result` is the result index used in `value`. +references. No extension is needed for [`JuMPBackend`](@ref)s that support +`transformation_model_variable`, `transformation_model_expression`, and +`transformation_model_constraint`. In this case, `transformation_model_variable`, +`transformation_model_expression`, and `transformation_model_constraint` are +used to make these mappings by default where `kwargs` are passed on these functions. +For mapping the values of infinite parameters, refer to +[`map_infinite_parameter_value`](@ref). """ -function map_value end +function map_value(ref, backend::AbstractTransformationBackend; kwargs...) + error("Value queries are not supported for `$(typeof(ref))`s with a " * + "transformation backend of type `$(typeof(backend))`. If you are " * + "writing an extension be sure to extend `map_value`.") +end + +# Dispatch to deal with what is returned by parameter functions +_get_jump_value(v, result) = JuMP.value(v, result = result) +_get_jump_value(v::Real, result) = v -# Default method that depends on optimizer_model_variable --> making extensions easier -function map_value(vref::GeneralVariableRef, key, result::Int; kwargs...) - opt_vref = optimizer_model_variable(vref, key; kwargs...) +# Default method that depends on transformation_model_variable --> making extensions easier +function map_value( + vref::GeneralVariableRef, + backend::JuMPBackend; + result::Int = 1, + kwargs... + ) + opt_vref = transformation_model_variable(vref, backend; kwargs...) if opt_vref isa AbstractArray - return map(v -> JuMP.value(v; result = result), opt_vref) + return map(v -> _get_jump_value(v, result), opt_vref) else - return JuMP.value(opt_vref; result = result) + return _get_jump_value(opt_vref, result) end end -# Default method that depends on optimizer_model_constraint --> making extensions easier -function map_value(cref::InfOptConstraintRef, key, result::Int; kwargs...) - func = JuMP.jump_function(JuMP.constraint_object(cref)) - return map_value(func, key, result; kwargs...) +# Default method that depends on transformation_model_expression --> making extensions easier +function map_value( + expr::JuMP.AbstractJuMPScalar, + backend::JuMPBackend; + result::Int = 1, + kwargs... + ) + opt_expr = transformation_model_expression(expr, backend; kwargs...) + if opt_expr isa AbstractArray + return map(v -> _get_jump_value(v, result), opt_expr) + else + return _get_jump_value(opt_expr, result) + end end -# Default method that depends on optimizer_model_expression --> making extensions easier -function map_value(expr::JuMP.AbstractJuMPScalar, key, result::Int; kwargs...) - opt_expr = optimizer_model_expression(expr, key; kwargs...) - if opt_expr isa AbstractArray - return map(e -> JuMP.value(e; result = result), opt_expr) +# Default method that depends on transformation_model_constraint --> making extensions easier +function map_value( + cref::InfOptConstraintRef, + backend::JuMPBackend; + result::Int = 1, + kwargs... + ) + opt_cref = transformation_model_constraint(cref, backend; kwargs...) + if opt_cref isa AbstractArray + return map(c -> _get_jump_value(c, result), opt_cref) else - return JuMP.value(opt_expr; result = result) + return _get_jump_value(opt_cref, result) end end +""" + map_infinite_parameter_value( + pref::GeneralVariableRef, + backend::AbstractTransformationBackend; + [kwargs...] + ) + +Return the mapped value of the infinite parameter `pref` according to the +`backend`. This serves as an optional extension point for new type of +backends that do not rely on using supports. Otherwise, this defaults +to: +```julia +map_infinite_parameter_value(pref; [label = PublicLabel]) = supports(pref, label = label) +``` +""" +function map_infinite_parameter_value( + pref::GeneralVariableRef, + backend::AbstractTransformationBackend; + label = PublicLabel + ) + return supports(pref, label = label) +end + ## Define dispatch methods to collect value of parameters # InfiniteParameter -function _get_value(pref, ::Type{<:InfiniteParameterIndex}, result; kwargs...) - label = get(kwargs, :label, PublicLabel) - return supports(pref, label = label) # TODO generalize this once we decouple the supports +function _get_value(pref, ::Type{<:InfiniteParameterIndex}; kwargs...) + backend = JuMP.owner_model(pref).backend + return map_infinite_parameter_value(pref, backend; kwargs...) end # FiniteParameter -function _get_value(pref, ::Type{FiniteParameterIndex}, result; kwargs...) +function _get_value(pref, ::Type{FiniteParameterIndex}; kwargs...) return parameter_value(pref) end # Others -function _get_value(vref, index_type, result; kwargs...) - return map_value(vref, Val(optimizer_model_key(JuMP.owner_model(vref))), - result; kwargs...) -end - -# Extend JuMP.value to handle numbers as needed for parameter functions -function JuMP.value(val::Real; result = 1) - return val +function _get_value(vref, index_type; kwargs...) + return map_value(vref, JuMP.owner_model(vref).backend; kwargs...) end """ - JuMP.value(vref::GeneralVariableRef; [result::Int = 1, - label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, kwargs...]) + JuMP.value(vref::GeneralVariableRef; [kwargs...]) Extend `JuMP.value` to return the value(s) of `vref` in accordance with its -reformulation variable(s) stored in the optimizer model and the result index -`result` of the most recent solution obtained. Use +reformulation variable(s) stored in the transformation backend. Use [`JuMP.has_values`](@ref JuMP.has_values(::InfiniteModel)) to check -if a result exists before asking for values. +whether a result exists before checking the values. -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ. - -By default only the values associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, the values of infinite -variables are returned as a list. However, a n-dimensional array +Thw keyword arguments `kwargs` depend on the transformation backend that is +being used. The default backend `TranscriptionOpt` uses the keyword +arguments: +- `result::Int = 1`: indexes the solution result to be queried +- `label::Type{<:AbstractSupportLabel} = PublicLabel`: the label of supports to be returned +- `ndarray::Bool = false`: indicates whether the output should be formatted as an array +By default only the values associated with public supports (i.e., `PublicLabel`s) +are returned, the full set can be accessed via `label = All`. Moreover, the values +of infinite variables are returned as a list. However, a n-dimensional array can be obtained via `ndarray = true` which is handy when the variable has multiple infinite parameter dependencies. -To provide context for the -results it may be helpful to also query the variable's `parameter_refs` and -`supports` which will have a one-to-one correspondence with the value(s). -It may also be helpful to query via [`optimizer_model_variable`](@ref) to -retrieve the variables(s) that these values are based on. These functions should -all be called with the same keyword arugments for consistency. +To provide context for the values, it may be helpful to also query the variable's +`parameter_refs` and `supports` which will have a one-to-one correspondence with +the value(s). It may also be helpful to query via [`transformation_model_variable`](@ref) +to retrieve the variables(s) that these values are based on. These functions should +all be called with the same keyword arguments for consistency. -For extensions, this only works if -[`optimizer_model_variable`](@ref) has been extended correctly and/or +For extensions, this only works if +[`transformation_model_variable`](@ref) has been extended correctly and/or [`map_value`](@ref) has been extended for variables. **Example** @@ -144,86 +223,39 @@ julia> value(z) 42.0 ``` """ -function JuMP.value(vref::GeneralVariableRef; result::Int = 1, kwargs...) - return _get_value(vref, _index_type(vref), result; kwargs...) +function JuMP.value(vref::GeneralVariableRef; kwargs...) + return _get_value(vref, _index_type(vref); kwargs...) end """ - JuMP.value(cref::InfOptConstraintRef; [result::Int = 1, - label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, kwargs...]) - -Extend `JuMP.value` to return the value(s) of `cref` in accordance with its -reformulation constraint(s) stored in the optimizer model and the result index -`result` of the most recent solution obtained. Use -[`JuMP.has_values`](@ref JuMP.has_values(::InfiniteModel)) to check if a result -exists before asking for values. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ. - -By default only the values associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, the values of infinite -constraints are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the constraint has multiple -infinite parameter dependencies. - -To provide context for -the results it may be helpful to also query the constraint's `parameter_refs` -and `supports` which will have a one-to-one correspondence with the value(s). -It may also be helpful to query via [`optimizer_model_constraint`](@ref) to -retrieve the constraint(s) that these values are based on. By default, only the -values corresponding to public supports are returned. These functions should -all be called with the same keyword arugments for consistency. - -For extensions, this only -works if [`optimizer_model_constraint`](@ref) has been extended correctly and/or -[`map_value`](@ref) has been extended for constraints. - -**Example** -```julia-repl -julia> value(c1) -4-element Array{Float64,1}: - -0.0 - 20.9 - 20.9 - 20.9 -``` -""" -function JuMP.value(cref::InfOptConstraintRef; result::Int = 1, - kwargs...) - return map_value(cref, Val(optimizer_model_key(JuMP.owner_model(cref))), - result; kwargs...) -end + JuMP.value(expr::JuMP.AbstractJuMPScalar; [kwargs...]) -""" - JuMP.value(expr::JuMP.AbstractJuMPScalar; [result::Int = 1, - label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, kwargs...]) - -Return the value(s) of `expr` in accordance with the optimized variable values -the result index `result` of the most recent solution obtained. Use -[`JuMP.has_values`](@ref JuMP.has_values(::InfiniteModel)) to check if a result -exists before asking for values. +Extend `JuMP.value` to return the value(s) of `vref` in accordance with its +reformulation expression(s) stored in the transformation backend. Use +[`JuMP.has_values`](@ref JuMP.has_values(::InfiniteModel)) to check +whether a result exists before checking the values. -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ. - -By default only the values associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, the values of infinite -expressions are returned as a list. However, a n-dimensional array +Thw keyword arguments `kwargs` depend on the transformation backend that is +being used. The default backend `TranscriptionOpt` uses the keyword +arguments: +- `result::Int = 1`: indexes the solution result to be queried +- `label::Type{<:AbstractSupportLabel} = PublicLabel`: the label of supports to be returned +- `ndarray::Bool = false`: indicates whether the output should be formatted as an array +By default only the values associated with public supports (i.e., `PublicLabel`s) +are returned, the full set can be accessed via `label = All`. Moreover, the values +of infinite expressions are returned as a list. However, a n-dimensional array can be obtained via `ndarray = true` which is handy when the expression has multiple infinite parameter dependencies. - -To provide context for the results it may be -helpful to also query the expression's `parameter_refs` and `supports` which -will have a one-to-one correspondence with the value(s). It may also be helpful -to query via [`optimizer_model_expression`](@ref) to retrieve the expression(s) -that these values are based on. These should use the same keyword arguments for -consistency. -For extensions, this only works if [`optimizer_model_expression`](@ref) has been -extended correctly and/or [`map_value`](@ref) has been extended for expressions. +To provide context for the values, it may be helpful to also query the expression's +`parameter_refs` and `supports` which will have a one-to-one correspondence with +the value(s). It may also be helpful to query via [`transformation_model_expression`](@ref) +to retrieve the expression(s) that these values are based on. These functions should +all be called with the same keyword arguments for consistency. + +For extensions, this only works if +[`transformation_model_expression`](@ref) has been extended correctly and/or +[`map_value`](@ref) has been extended for expressions. **Example** ```julia-repl @@ -239,8 +271,11 @@ julia> value(my_infinite_expr) ``` """ function JuMP.value( - expr::Union{JuMP.GenericAffExpr{Float64, GeneralVariableRef}, JuMP.GenericQuadExpr{Float64, GeneralVariableRef}, JuMP.GenericNonlinearExpr{GeneralVariableRef}}; - result::Int = 1, + expr::Union{ + JuMP.GenericAffExpr{Float64, GeneralVariableRef}, + JuMP.GenericQuadExpr{Float64, GeneralVariableRef}, + JuMP.GenericNonlinearExpr{GeneralVariableRef} + }; kwargs... ) # get the model @@ -251,238 +286,199 @@ function JuMP.value( return JuMP.constant(expr) # otherwise let's call map_value else - key = optimizer_model_key(model) - return map_value(expr, Val(key), result; kwargs...) + return map_value(expr, model.backend; kwargs...) end end -################################################################################ -# REDUCED COST -################################################################################ -""" - map_reduced_cost(vref::GeneralVariableRef, key::Val{ext_key_name}, - result::Int; kwargs...) - -Map the reduced cost(s) of `vref` to its counterpart in the optimizer model type that is -distininguished by its extension key `key` as type `Val{ext_key_name}`. -This only needs to be defined for reformulation extensions that cannot -readily extend `optimizer_model_variable`. Such as is the case with reformulations -that do not have a direct mapping between variables in the original -infinite form. Otherwise, `optimizer_model_variable`, is used to make -these mappings by default where `kwargs` are passed on these functions. Here -`result` is the result index used in `value`. """ -function map_reduced_cost end + JuMP.value(cref::InfOptConstraintRef; [kwargs...]) -# Default definition for when optimizer_model_variable is defined -function map_reduced_cost(vref::GeneralVariableRef, key; kwargs...) - opt_vref = optimizer_model_variable(vref, key; kwargs...) - if opt_vref isa AbstractArray - return map(v -> JuMP.reduced_cost(v), opt_vref) - else - return JuMP.reduced_cost(opt_vref) - end -end +Extend `JuMP.value` to return the value(s) of `cref` in accordance with its +reformulation constraint(s) stored in the transformation backend. Use +[`JuMP.has_values`](@ref JuMP.has_values(::InfiniteModel)) to check +whether a result exists before checking the values. + +Thw keyword arguments `kwargs` depend on the transformation backend that is +being used. The default backend `TranscriptionOpt` uses the keyword +arguments: +- `result::Int = 1`: indexes the solution result to be queried +- `label::Type{<:AbstractSupportLabel} = PublicLabel`: the label of supports to be returned +- `ndarray::Bool = false`: indicates whether the output should be formatted as an array +By default only the values associated with public supports (i.e., `PublicLabel`s) +are returned, the full set can be accessed via `label = All`. Moreover, the values +of infinite constraints are returned as a list. However, a n-dimensional array +can be obtained via `ndarray = true` which is handy when the constraint has multiple +infinite parameter dependencies. -""" - JuMP.reduced_cost(vref::GeneralVariableRef) +To provide context for the values, it may be helpful to also query the constraint's +`parameter_refs` and `supports` which will have a one-to-one correspondence with +the value(s). It may also be helpful to query via [`transformation_model_constraint`](@ref) +to retrieve the constraint(s) that these values are based on. These functions should +all be called with the same keyword arguments for consistency. -Extend `JuMP.reduced_cost`. This returns the reduced cost(s) of a variable. This -will be a vector of scalar values for an infinite variable or will be a scalar -value for finite variables. +For extensions, this only works if +[`transformation_model_constraint`](@ref) has been extended correctly and/or +[`map_value`](@ref) has been extended for constraints. **Example** ```julia-repl -julia> reduced_cost(x) -12.81 +julia> value(c1) +4-element Array{Float64,1}: + -0.0 + 20.9 + 20.9 + 20.9 ``` """ -function JuMP.reduced_cost(vref::GeneralVariableRef; kwargs...) - return map_reduced_cost(vref, Val(optimizer_model_key(JuMP.owner_model(vref))); - kwargs...) +function JuMP.value(cref::InfOptConstraintRef; kwargs...) + return map_value(cref, JuMP.owner_model(cref).backend; kwargs...) end ################################################################################ -# OPTIMIZER INDEX QUERIES +# BOILERPLATE REF QUERIES ################################################################################ -""" - map_optimizer_index(ref, key::Val{ext_key_name}; kwargs...) - -Map the `MathOptInterface` index(es) of `ref` to its counterpart in the optimizer -model type that is distininguished by its extension key `key` as type `Val{ext_key_name}`. -Here `ref` need refer to methods for both variable references and constraint -references. This only needs to be defined for reformulation extensions that cannot -readily extend `optimizer_model_variable` and `optimizer_model_constraint`. -Such as is the case with reformuations that do not have a direct mapping between -variables and/or constraints in the original infinite form. Otherwise, -`optimizer_model_variable` and `optimizer_model_constraint` are used to make -these mappings by default where `kwargs` are passed on as well. -""" -function map_optimizer_index end +for (Ref, func, mapper) in ( + (:GeneralVariableRef, :reduced_cost, :transformation_model_variable), + (:GeneralVariableRef, :optimizer_index, :transformation_model_variable), + (:InfOptConstraintRef, :optimizer_index, :transformation_model_constraint), + (:InfOptConstraintRef, :shadow_price, :transformation_model_constraint) + ) + @eval begin + @doc """ + map_$($func)( + ref::$($Ref), + backend::AbstractTransformationBackend; + [kwargs...] + ) + + Map `JuMP.$($func)` of `ref` to its counterpart in the `backend`. + No extension is needed for [`JuMPBackend`](@ref)s that support + `$($mapper)`, in which case, `$($mapper)` is used to make these + mappings using `kwargs`. + """ + function $(Symbol(string("map_", func)))( + ref::$Ref, + backend::AbstractTransformationBackend; + kwargs... + ) + error("`$($func)` queries are not supported for a " * + "transformation backend of type `$(typeof(backend))`. If you " * + "are writing an extension be sure to extend `map_$($func)`.") + end -# Default method that depends on optimizer_model_variable --> making extensions easier -function map_optimizer_index(vref::GeneralVariableRef, key; kwargs...) - opt_vref = optimizer_model_variable(vref, key; kwargs...) - if opt_vref isa AbstractArray - return map(v -> JuMP.optimizer_index(v), opt_vref) - else - return JuMP.optimizer_index(opt_vref) - end -end + # JuMPBackend + function $(Symbol(string("map_", func)))( + ref::$Ref, + backend::JuMPBackend; + kwargs... + ) + opt_ref = $mapper(ref, backend; kwargs...) + if opt_ref isa AbstractArray + return map(r -> JuMP.$func(r), opt_ref) + else + return JuMP.$func(opt_ref) + end + end -# Default method that depends on optimizer_model_constraint --> making extensions easier -function map_optimizer_index(cref::InfOptConstraintRef, key; kwargs...) - opt_cref = optimizer_model_constraint(cref, key; kwargs...) - if opt_cref isa AbstractArray - return map(c -> JuMP.optimizer_index(c), opt_cref) - else - return JuMP.optimizer_index(opt_cref) + @doc """ + JuMP.$($func)(ref::$($Ref); [kwargs...]) + + Extend [`JuMP.$($func)`](https://jump.dev/JuMP.jl/v1/api/JuMP/#$($func)) + for `ref`s in InfiniteModel. The exact format of output will depend + on the transformation backend that is being used. + + Thw keyword arguments `kwargs` depend on the transformation backend that is + being used. The default backend `TranscriptionOpt` uses the keyword + arguments: + - `label::Type{<:AbstractSupportLabel} = PublicLabel`: the label of supports to be returned + - `ndarray::Bool = false`: indicates whether the output should be formatted as an array + By default only the values associated with public supports (i.e., `PublicLabel`s) + are returned, the full set can be accessed via `label = All`. Moreover, the values + of infinite variables/constraints are returned as a list. However, a n-dimensional array + can be obtained via `ndarray = true` which is handy when the constraint has multiple + infinite parameter dependencies. + + To provide context for the values, it may be helpful to also query the + `parameter_refs` and `supports` which will have a one-to-one correspondence with + the output(s) of this function. These functions should + all be called with the same keyword arguments for consistency. + """ + function JuMP.$func(ref::$Ref; kwargs...) + backend = JuMP.owner_model(ref).backend + return $(Symbol(string("map_", func)))(ref, backend; kwargs...) + end end end -""" - JuMP.optimizer_index(vref::GeneralVariableRef; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, kwargs...]) - -Extend `JuMP.optimizer_index` to return the `MathOptInterface` index(es) of -`vref` in accordance with its reformulation variable(s) stored in the optimizer -model. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ. - -By default only the optimizer indices associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, the indices of infinite -variables are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the variable has multiple -infinite parameter dependencies. - -It may also be helpful to query via [`optimizer_model_variable`](@ref) to -retrieve the variables(s) that these indices are based on. These should use the -same keyword arguments for consistency. - -For extensions, this -only works if [`optimizer_model_variable`](@ref) has been extended correctly -and/or [`map_optimizer_index`](@ref) has been extended for variables. - -**Example** -```julia-repl -julia> optimizer_index(x) -4-element Array{MathOptInterface.VariableIndex,1}: - MathOptInterface.VariableIndex(2) - MathOptInterface.VariableIndex(3) - MathOptInterface.VariableIndex(4) - MathOptInterface.VariableIndex(5) -``` -""" -function JuMP.optimizer_index(vref::GeneralVariableRef; kwargs...) - return map_optimizer_index(vref, Val(optimizer_model_key(JuMP.owner_model(vref))); - kwargs...) -end - -""" - JuMP.optimizer_index(cref::InfOptConstraintRef; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, kwargs...]) - -Extend `JuMP.optimizer_index` to return the `MathOptInterface` index(es) of -`cref` in accordance with its reformulation constraints(s) stored in the -optimizer model. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ. - -By default only the optimizer indices associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, the indices of infinite -constraints are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the constraint has multiple -infinite parameter dependencies. - -It may also be helpful to query via [`optimizer_model_constraint`](@ref) to -retrieve the constraints(s) that these indices are based on. The same keyword -arguments should be used for consistency. - -For extensions, this -only works if [`optimizer_model_constraint`](@ref) has been extended correctly -and/or [`map_optimizer_index`](@ref) has been extended for constraints. - -**Example** -```julia-repl -julia> optimizer_index(c1) -4-element Array{MathOptInterface.ConstraintIndex{MathOptInterface.ScalarAffineFunction{Float64},MathOptInterface.GreaterThan{Float64}},1}: - MathOptInterface.ConstraintIndex{MathOptInterface.ScalarAffineFunction{Float64},MathOptInterface.GreaterThan{Float64}}(1) - MathOptInterface.ConstraintIndex{MathOptInterface.ScalarAffineFunction{Float64},MathOptInterface.GreaterThan{Float64}}(2) - MathOptInterface.ConstraintIndex{MathOptInterface.ScalarAffineFunction{Float64},MathOptInterface.GreaterThan{Float64}}(3) - MathOptInterface.ConstraintIndex{MathOptInterface.ScalarAffineFunction{Float64},MathOptInterface.GreaterThan{Float64}}(4) -``` -""" -function JuMP.optimizer_index(cref::InfOptConstraintRef; kwargs...) - return map_optimizer_index(cref, Val(optimizer_model_key(JuMP.owner_model(cref))); - kwargs...) -end - ################################################################################ # DUAL QUERIES ################################################################################ """ - map_dual(cref::InfOptConstraintRef, key::Val{ext_key_name}, result::Int; - kwargs...) + map_dual( + cref::InfOptConstraintRef, + backend::AbstractTransformationBackend; + [kwargs...] + ) -Map the dual(s) of `cref` to its counterpart in the optimizer -model type that is distininguished by its extension key `key` as type `Val{ext_key_name}`. -Here `ref` need refer to methods for both variable references and constraint -references. This only needs to be defined for reformulation extensions that cannot -readily extend `optimizer_model_variable` and `optimizer_model_constraint`. -Such as is the case with reformuations that do not have a direct mapping between -variables and/or constraints in the original infinite form. Otherwise, -`optimizer_model_variable` and `optimizer_model_constraint` are used to make -these mappings by default where `kwargs` are also pass on to. Here `result` is -the result index that is used in `dual`. +Map the dual(s) of `cref` to its counterpart in the `backend`. +No extension is needed for [`JuMPBackend`](@ref)s that support +`transformation_model_constraint`. In this case, `transformation_model_constraint` +are used to make these mappings by default where `kwargs` are passed on these +functions. """ -function map_dual end - -# Default method that depends on optimizer_model_constraint --> making extensions easier -function map_dual(cref::InfOptConstraintRef, key, result::Int; kwargs...) - opt_cref = optimizer_model_constraint(cref, key; kwargs...) - if opt_cref isa AbstractArray && first(opt_cref) isa JuMP.NonlinearConstraintRef - return map(c -> JuMP.dual(c), opt_cref) - elseif opt_cref isa AbstractArray - return map(c -> JuMP.dual(c; result = result), opt_cref) - elseif opt_cref isa JuMP.NonlinearConstraintRef - return JuMP.dual(opt_cref) +function map_dual( + cref::InfOptConstraintRef, + backend::AbstractTransformationBackend; + kwargs... + ) + error("Dual queries are not supported for a " * + "transformation backend of type `$(typeof(backend))`. If you are " * + "writing an extension be sure to extend `map_dual`.") +end + +# JuMPBackend default +function map_dual( + cref::InfOptConstraintRef, + backend::JuMPBackend; + result::Int = 1, + kwargs... + ) + opt_cref = transformation_model_constraint(cref, backend; kwargs...) + if opt_cref isa AbstractArray + return map(c -> JuMP.dual(c, result = result), opt_cref) else - return JuMP.dual(opt_cref; result = result) + return JuMP.dual(opt_cref, result = result) end end """ - JuMP.dual(cref::InfOptConstraintRef; [result::Int = 1, - label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, kwargs...]) + JuMP.dual(cref::InfOptConstraintRef; [kwargs...]) Extend `JuMP.dual` to return the dual(s) of `cref` in accordance with its -reformulation constraint(s) stored in the optimizer model and the result index -`result` of the most recent solution obtained. Use -[`JuMP.has_duals`](@ref JuMP.has_duals(::InfiniteModel)) to check if a result -exists before asking for duals. - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ. - -By default only the duals associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, the duals of infinite -constraints are returned as a list. However, a n-dimensional array +reformulation constraint(s) stored in the transformation backend. Use +[`JuMP.has_duals`](@ref JuMP.has_duals(::InfiniteModel)) to check +whether a result exists before checking the duals. + +Thw keyword arguments `kwargs` depend on the transformation backend that is +being used. The default backend `TranscriptionOpt` uses the keyword +arguments: +- `result::Int = 1`: indexes the solution result to be queried +- `label::Type{<:AbstractSupportLabel} = PublicLabel`: the label of supports to be returned +- `ndarray::Bool = false`: indicates whether the output should be formatted as an array +By default only the values associated with public supports (i.e., `PublicLabel`s) +are returned, the full set can be accessed via `label = All`. Moreover, the duals +of infinite constraints are returned as a list. However, a n-dimensional array can be obtained via `ndarray = true` which is handy when the constraint has multiple infinite parameter dependencies. -It may also be helpful to -query via [`optimizer_model_constraint`](@ref) to retrieve the constraint(s) -that these duals are based on. Calling `parameter_refs` and `supports` may also -be insightful. Be sure to use the same keyword arguments for consistency. +To provide context for the duals, it may be helpful to also query the constraint's +`parameter_refs` and `supports` which will have a one-to-one correspondence with +the value(s). It may also be helpful to query via [`transformation_model_constraint`](@ref) +to retrieve the constraint(s) that these values are based on. These functions should +all be called with the same keyword arguments for consistency. -For extensions, this only -works if [`optimizer_model_constraint`](@ref) has been extended correctly and/or +For extensions, this only works if +[`transformation_model_constraint`](@ref) has been extended correctly and/or [`map_dual`](@ref) has been extended for constraints. **Example** @@ -495,106 +491,15 @@ julia> dual(c1) 0.0 ``` """ -function JuMP.dual(cref::InfOptConstraintRef; result::Int = 1, kwargs...) - return map_dual(cref, Val(optimizer_model_key(JuMP.owner_model(cref))), - result; kwargs...) +function JuMP.dual(cref::InfOptConstraintRef; kwargs...) + return map_dual(cref, JuMP.owner_model(cref).backend; kwargs...) end -# Error redriect for variable call +# Error redirect for variable call function JuMP.dual(vref::GeneralVariableRef; kwargs...) return JuMP.dual(JuMP.VariableRef(JuMP.Model(), MOI.VariableIndex(1))) end -################################################################################ -# SHADOW PRICE QUERIES -################################################################################ -# Dispatch functions for computing the shadow price -function _process_shadow_price(::MOI.LessThan, sense, duals) - if sense == MOI.MAX_SENSE - return -duals - elseif sense == MOI.MIN_SENSE - return duals - else - error( - "The shadow price is not available because the objective sense " * - "$sense is not minimization or maximization.", - ) - end -end -function _process_shadow_price(::MOI.GreaterThan, sense, duals) - if sense == MOI.MAX_SENSE - return duals - elseif sense == MOI.MIN_SENSE - return -duals - else - error( - "The shadow price is not available because the objective sense " * - "$sense is not minimization or maximization.", - ) - end -end -function _process_shadow_price(::MOI.EqualTo, sense, duals) - if sense == MOI.MAX_SENSE - return map(d -> d > 0 ? d : -d, duals) - elseif sense == MOI.MIN_SENSE - return map(d -> d > 0 ? -d : d, duals) - else - error( - "The shadow price is not available because the objective sense " * - "$sense is not minimization or maximization.", - ) - end -end - -""" - JuMP.shadow_price(cref::InfOptConstraintRef; - [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, kwargs...]) - -Extend `JuMP.shadow_price` to return the shadow price(s) of `cref` in accordance -with its reformulation constraint(s) stored in the optimizer model. Use -[`JuMP.has_duals`](@ref JuMP.has_duals(::InfiniteModel)) to check if a result -exists before asking for the shadow price (it uses the duals). - -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ -and `kwargs` denote extra ones that user extensions may employ. - -By default only the shadow prices associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, the prices of infinite -constraints are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the constraint has multiple -infinite parameter dependencies. - -It may also be helpful to query via [`optimizer_model_constraint`](@ref) to -retrieve the constraint(s) that these shadow prices are based on. Calling -`parameter_refs` and `supports` may also be insightful. Be sure to use the same -keyword arguments for consistency. - -For extensions, this only works if [`optimizer_model_constraint`](@ref) has been -extended correctly and/or [`map_dual`](@ref) has been extended for constraints. - -**Example** -```julia-repl -julia> shadow_price(c1) -4-element Array{Float64,1}: - 42.0 - 42.0 - -32.3 - -0.0 -``` -""" -function JuMP.shadow_price(cref::InfOptConstraintRef; kwargs...) - model = JuMP.owner_model(cref) - set = JuMP.moi_set(JuMP.constraint_object(cref)) - sense = JuMP.objective_sense(model) - if !JuMP.has_duals(model) - error("The shadow price is not available because no dual result is " * - "available.") - end - duals = map_dual(cref, Val(optimizer_model_key(model)), 1; kwargs...) - return _process_shadow_price(set, sense, duals) -end - ################################################################################ # LP SENSITIVITY ANALYSIS ################################################################################ @@ -602,9 +507,9 @@ end InfOptSensitivityReport A wrapper `DataType` for `JuMP.SensitivityReport`s in `InfiniteOpt`. -These are generated based on the optimizer model and should be made via the use of -[`lp_sensitivity_report`](@ref JuMP.lp_sensitivity_report(::InfiniteModel)). Once -made these can be indexed to get the sensitivies with respect to variables and/or +These are generated based on the transformation backend and should be made via +the use of [`lp_sensitivity_report`](@ref JuMP.lp_sensitivity_report(::InfiniteModel)). +Once made these can be indexed to get the sensitivies with respect to variables and/or constraints. The indexing syntax for these is: ```julia report[ref::[GeneralVariableRef/InfOptConstraintRef]; @@ -612,11 +517,12 @@ report[ref::[GeneralVariableRef/InfOptConstraintRef]; ndarray::Bool = false, kwargs...]] ``` -This is enabled in user-defined optimizer model extensions by appropriately -extending [`optimizer_model_variable`](@ref) and [`optimizer_model_constraint`](@ref). +This is enabled for new transformation backends by appropriately +extending [`transformation_model_variable`](@ref) and +[`transformation_model_constraint`](@ref). **Fields** -- `opt_report::JuMP.SensitivityReport`: The LP sensitivity captured from the optimizer model. +- `opt_report::JuMP.SensitivityReport`: The LP sensitivity captured from the backend. """ struct InfOptSensitivityReport opt_report::JuMP.SensitivityReport @@ -624,8 +530,8 @@ end # Extend Base.getindex for variables on InfOptSensitivityReport function Base.getindex(s::InfOptSensitivityReport, v::GeneralVariableRef; kwargs...) - key = Val(optimizer_model_key(JuMP.owner_model(v))) - opt_vref = optimizer_model_variable(v, key; kwargs...) + backend = JuMP.owner_model(v).backend + opt_vref = transformation_model_variable(v, backend; kwargs...) if opt_vref isa AbstractArray return map(v -> s.opt_report[v], opt_vref) else @@ -635,8 +541,8 @@ end # Extend Base.getindex for constraints on InfOptSensitivityReport function Base.getindex(s::InfOptSensitivityReport, c::InfOptConstraintRef; kwargs...) - key = Val(optimizer_model_key(JuMP.owner_model(c))) - opt_cref = optimizer_model_constraint(c, key; kwargs...) + backend = JuMP.owner_model(c).backend + opt_cref = transformation_model_constraint(c, backend; kwargs...) if opt_cref isa AbstractArray return map(c -> s.opt_report[c], opt_cref) else @@ -645,8 +551,34 @@ function Base.getindex(s::InfOptSensitivityReport, c::InfOptConstraintRef; kwarg end """ - JuMP.lp_sensitivity_report(model::InfiniteModel; - [atol::Float64 = 1e-8])::InfOptSensitivityReport + JuMP.lp_sensitivity_report( + backend::AbstractTransformationBackend; + [atol::Float64 = 1e-8] + )::InfOptSensitivityReport + +Extend `JuMP.lp_sensitivity_report` as appropriate for `backend`. This is +intended as an extension point. For [`JuMPBackend`](@ref)s, this simply +calls `JuMP.lp_sensitivity_report` on the underlying JuMP model. +""" +function JuMP.lp_sensitivity_report( + backend::AbstractTransformationBackend; + kwargs... + ) + error("`JuMP.lp_sensitivity_report` not defined for backends of type " * + "`$(typeof(backend))`.") +end + +# JuMPBackend +function JuMP.lp_sensitivity_report(backend::JuMPBackend; atol::Float64 = 1e-8) + report = JuMP.lp_sensitivity_report(backend.model, atol = atol) + return InfOptSensitivityReport(report) +end + +""" + JuMP.lp_sensitivity_report( + model::InfiniteModel; + [atol::Float64 = 1e-8] + )::InfOptSensitivityReport Extends `JuMP.lp_sensitivity_report` to generate and return an LP sensitivity report in accordance with the optimizer model. See @@ -663,11 +595,6 @@ julia> report[x] (0.0, 0.5) ``` """ -function JuMP.lp_sensitivity_report( - model::InfiniteModel; - atol::Float64 = 1e-8 - )::InfOptSensitivityReport - opt_model = optimizer_model(model) - opt_report = JuMP.lp_sensitivity_report(opt_model, atol = atol) - return InfOptSensitivityReport(opt_report) +function JuMP.lp_sensitivity_report(model::InfiniteModel; atol::Float64 = 1e-8) + return JuMP.lp_sensitivity_report(model.backend; atol = atol) end diff --git a/src/scalar_parameters.jl b/src/scalar_parameters.jl index 2c59bd17..66a460da 100644 --- a/src/scalar_parameters.jl +++ b/src/scalar_parameters.jl @@ -679,7 +679,7 @@ function _set_generative_support_info(pref::IndependentParameterRef, _reset_generative_supports(pref) _set_core_variable_object(pref, new_param) if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) + set_transformation_backend_ready(JuMP.owner_model(pref), false) end return end @@ -833,7 +833,7 @@ function set_derivative_method(pref::IndependentParameterRef, _reset_derivative_constraints(pref) _set_core_variable_object(pref, new_param) if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) + set_transformation_backend_ready(JuMP.owner_model(pref), false) end return end @@ -857,7 +857,7 @@ function set_derivative_method(pref::IndependentParameterRef, _reset_generative_supports(pref) _set_core_variable_object(pref, new_param) if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) + set_transformation_backend_ready(JuMP.owner_model(pref), false) end return end @@ -882,7 +882,7 @@ function _update_parameter_domain(pref::IndependentParameterRef, _set_has_generative_supports(pref, false) _set_has_internal_supports(pref, false) if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) + set_transformation_backend_ready(JuMP.owner_model(pref), false) end return end @@ -1079,7 +1079,7 @@ function _update_parameter_supports(pref::IndependentParameterRef, _reset_derivative_constraints(pref) _set_has_generative_supports(pref, false) if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) + set_transformation_backend_ready(JuMP.owner_model(pref), false) end return end @@ -1311,7 +1311,7 @@ function add_supports(pref::IndependentParameterRef, _reset_derivative_constraints(pref) _reset_generative_supports(pref) if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) + set_transformation_backend_ready(JuMP.owner_model(pref), false) end end return @@ -1365,7 +1365,7 @@ function delete_supports(pref::IndependentParameterRef; end end if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) + set_transformation_backend_ready(JuMP.owner_model(pref), false) end return end @@ -1410,7 +1410,7 @@ julia> value(cost) function JuMP.set_value(pref::FiniteParameterRef, value::Real)::Nothing _data_object(pref).parameter = FiniteParameter(value) if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) + set_transformation_backend_ready(JuMP.owner_model(pref), false) end return end @@ -1653,7 +1653,7 @@ function JuMP.delete( end # update optimizer model status if is_used(pref) - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) end # delete dependence of measures on pref _update_measures(model, gvref) @@ -1677,7 +1677,7 @@ function JuMP.delete(model::InfiniteModel, pref::FiniteParameterRef)::Nothing @assert JuMP.is_valid(model, pref) "Parameter reference is invalid." # update optimizer model status if is_used(pref) - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) end gvref = _make_parameter_ref(model, JuMP.index(pref)) # delete dependence of measures on pref diff --git a/src/semi_infinite_variables.jl b/src/semi_infinite_variables.jl index 68e4d613..1d6076bd 100644 --- a/src/semi_infinite_variables.jl +++ b/src/semi_infinite_variables.jl @@ -46,23 +46,23 @@ function _data_object( end """ - internal_semi_infinite_variable(vref::SemiInfiniteVariableRef, - key::Val{:my_ext_key})::SemiInfiniteVariable + internal_semi_infinite_variable( + vref::SemiInfiniteVariableRef, + backend::AbstractTransformationBackend + )::SemiInfiniteVariable Return the semi-infinite variable object of `vref` assuming it is an internal variable -made during measure expansion within an optimizer model. This will apply to -optimizer model extensions that utilize `add_measure_variable` in combination +made during measure expansion within a transformation backend. This will apply to +transformation backend extensions that utilize `add_measure_variable` in combination with `expand_measure`. """ function internal_semi_infinite_variable end # Extend _core_variable_object -function _core_variable_object( - vref::SemiInfiniteVariableRef - )::SemiInfiniteVariable{GeneralVariableRef} +function _core_variable_object(vref::SemiInfiniteVariableRef) if !haskey(_data_dictionary(vref), JuMP.index(vref)) model = JuMP.owner_model(vref) - return internal_semi_infinite_variable(vref, Val(optimizer_model_key(model))) + return internal_semi_infinite_variable(vref, model.backend) else return _data_object(vref).variable end diff --git a/src/show.jl b/src/show.jl index e11e9081..cc64f08c 100644 --- a/src/show.jl +++ b/src/show.jl @@ -2,7 +2,7 @@ # MATH SYMBOL METHODS ################################################################################ # Support additional math symbols beyond what JuMP does -function _math_symbol(::MIME"text/plain", name::Symbol)::String +function _math_symbol(::MIME"text/plain", name::Symbol) if name == :intersect return Sys.iswindows() ? "and" : "∩" elseif name == :prop @@ -59,7 +59,7 @@ function _math_symbol(::MIME"text/plain", name::Symbol)::String end # Support additional math symbols beyond what JuMP does -function _math_symbol(::MIME"text/latex", name::Symbol)::String +function _math_symbol(::MIME"text/latex", name::Symbol) if name == :intersect return "\\cap" elseif name == :prop @@ -119,30 +119,30 @@ end # INFINITE SET METHODS ################################################################################ # Return "s" if n is greater than one -_plural(n)::String = (isone(n) ? "" : "s") +_plural(n) = (isone(n) ? "" : "s") # Round numbers in strings -function _string_round(f::Float64)::String +function _string_round(f::Float64) iszero(f) && return "0" # strip sign off zero str = string(f) return length(str) >= 2 && str[end-1:end] == ".0" ? str[1:end-2] : str end -_string_round(f)::String = string(f) +_string_round(f) = string(f) ## Return the string of an infinite domain # IntervalDomain -function domain_string(print_mode, domain::IntervalDomain)::String +function domain_string(print_mode, domain::IntervalDomain) return string("[", _string_round(JuMP.lower_bound(domain)), ", ", _string_round(JuMP.upper_bound(domain)), "]") end # DistributionDomain -function domain_string(print_mode, domain::DistributionDomain)::String +function domain_string(print_mode, domain::DistributionDomain) return string(domain.distribution) end # CollectionDomain -function domain_string(print_mode, domain::CollectionDomain)::String +function domain_string(print_mode, domain::CollectionDomain) domains = collection_domains(domain) num_domains = length(domains) cs_string = string("CollectionDomain with ", num_domains, " domain", @@ -162,7 +162,7 @@ end ## Return in domain strings of infinite domains # Extend to return of in domain string for interval domains -function in_domain_string(print_mode, domain::IntervalDomain)::String +function in_domain_string(print_mode, domain::IntervalDomain) if JuMP.lower_bound(domain) != JuMP.upper_bound(domain) return string(_math_symbol(print_mode, :in), " ", domain_string(print_mode, domain)) @@ -173,7 +173,7 @@ function in_domain_string(print_mode, domain::IntervalDomain)::String end # Extend to return of in domain string for distribution domains -function in_domain_string(print_mode, domain::DistributionDomain)::String +function in_domain_string(print_mode, domain::DistributionDomain) dist = domain.distribution name = string(typeof(dist)) bracket_index = findfirst(isequal('{'), name) @@ -188,7 +188,7 @@ function in_domain_string(print_mode, domain::DistributionDomain)::String end # Extend to return of in domain string of other domains -function in_domain_string(print_mode, domain::AbstractInfiniteDomain)::String +function in_domain_string(print_mode, domain::AbstractInfiniteDomain) return string(_math_symbol(print_mode, :in), " ", domain_string(print_mode, domain)) end @@ -198,7 +198,7 @@ end function in_domain_string(print_mode, pref::GeneralVariableRef, domain::IntervalDomain, - restrictions::DomainRestrictions{GeneralVariableRef})::String + restrictions::DomainRestrictions{GeneralVariableRef}) # determine if in restrictions in_restrictions = haskey(restrictions, pref) # make the string @@ -210,7 +210,7 @@ end function in_domain_string(print_mode, pref::GeneralVariableRef, domain::InfiniteScalarDomain, - restrictions::DomainRestrictions{GeneralVariableRef})::String + restrictions::DomainRestrictions{GeneralVariableRef}) # determine if in restrictions if haskey(restrictions, pref) bound_domain = restrictions[pref] @@ -234,7 +234,7 @@ end function measure_data_string(print_mode, data::Union{DiscreteMeasureData{GeneralVariableRef}, FunctionalDiscreteMeasureData{GeneralVariableRef}} - )::String + ) pref = parameter_refs(data) lb = JuMP.lower_bound(data) ub = JuMP.upper_bound(data) @@ -252,7 +252,7 @@ end function measure_data_string(print_mode, data::Union{DiscreteMeasureData{Vector{GeneralVariableRef}}, FunctionalDiscreteMeasureData{Vector{GeneralVariableRef}}} - )::String + ) prefs = parameter_refs(data) lbs = JuMP.lower_bound(data) ubs = JuMP.upper_bound(data) @@ -278,7 +278,7 @@ function measure_data_string(print_mode, end # extract the most compact parameter name possible -function _get_root_parameter_name(data::AbstractMeasureData)::String +function _get_root_parameter_name(data::AbstractMeasureData) prefs = parameter_refs(data) names = map(p -> _remove_name_index(p), prefs) if _allequal(names) @@ -289,12 +289,12 @@ function _get_root_parameter_name(data::AbstractMeasureData)::String end # Fallback for measure_data_string -function measure_data_string(print_mode, data::AbstractMeasureData)::String +function measure_data_string(print_mode, data::AbstractMeasureData) return _get_root_parameter_name(data) end # Make strings to represent measures in REPLMode -function variable_string(m::MIME"text/plain", mref::MeasureRef)::String +function variable_string(m::MIME"text/plain", mref::MeasureRef) data = measure_data(mref) data_str = measure_data_string(m, data) func_str = JuMP.function_string(m, measure_function(mref)) @@ -308,7 +308,7 @@ function variable_string(m::MIME"text/plain", mref::MeasureRef)::String end # Make strings to represent measures in IJuliaMode -function variable_string(m::MIME"text/latex", mref::MeasureRef)::String +function variable_string(m::MIME"text/latex", mref::MeasureRef) data = measure_data(mref) data_str = measure_data_string(m, data) func_str = JuMP.function_string(m, measure_function(mref)) @@ -330,7 +330,7 @@ end ################################################################################ ## helper function for getting the variable names # REPLMode -function _get_base_name(::MIME"text/plain", vref)::String +function _get_base_name(::MIME"text/plain", vref) var_name = JuMP.name(vref) if !isempty(var_name) return var_name @@ -340,7 +340,7 @@ function _get_base_name(::MIME"text/plain", vref)::String end # IJuliaMode -function _get_base_name(::MIME"text/latex", vref)::String +function _get_base_name(::MIME"text/latex", vref) var_name = JuMP.name(vref) if !isempty(var_name) # TODO: This is wrong if variable name constains extra "]" @@ -350,28 +350,32 @@ function _get_base_name(::MIME"text/latex", vref)::String end end +function _get_param_group_name(element_prefs) + type = _index_type(first(element_prefs)) + if type == DependentParameterIndex + return _remove_name_index(first(element_prefs)) + elseif length(element_prefs) == 1 + return JuMP.name(first(element_prefs)) + else + # TODO this isn't quite right with a subset of an independent container + names = map(p -> _remove_name_index(p), element_prefs) + if _allequal(names) + return first(names) + else + return string("[", join(element_prefs, ", "), "]") + end + end +end + # Helper method for infinite variable construction function _add_on_parameter_refs( base_name::String, prefs::Collections.VectorTuple - )::String + ) param_name_tuple = "(" for i in 1:size(prefs, 1) element_prefs = prefs[i, :] - type = _index_type(first(element_prefs)) - if type == DependentParameterIndex - param_name = _remove_name_index(first(element_prefs)) - elseif length(element_prefs) == 1 - param_name = JuMP.name(first(element_prefs)) - else - # TODO this isn't quite right with a subset of an independent container - names = map(p -> _remove_name_index(p), element_prefs) - if _allequal(names) - param_name = first(names) - else - param_name = string("[", join(element_prefs, ", "), "]") - end - end + param_name = _get_param_group_name(element_prefs) if i != size(prefs, 1) param_name_tuple *= string(param_name, ", ") else @@ -382,7 +386,7 @@ function _add_on_parameter_refs( end # Make a string for InfiniteVariableRef -function variable_string(print_mode, vref::InfiniteVariableRef)::String +function variable_string(print_mode, vref::InfiniteVariableRef) base_name = _get_base_name(print_mode, vref) if !haskey(_data_dictionary(vref), JuMP.index(vref)) return base_name @@ -393,7 +397,7 @@ function variable_string(print_mode, vref::InfiniteVariableRef)::String end # Make a string for ParameterFunctionRef -function variable_string(print_mode, vref::ParameterFunctionRef)::String +function variable_string(print_mode, vref::ParameterFunctionRef) base_name = _get_base_name(print_mode, vref) if !haskey(_data_dictionary(vref), JuMP.index(vref)) return base_name @@ -456,12 +460,12 @@ end ## Return the parameter value as an appropriate string # Number -function _make_str_value(value)::String +function _make_str_value(value) return _string_round(value) end # Array{<:Number} -function _make_str_value(values::Array)::String +function _make_str_value(values::Array) if length(values) == 1 return _make_str_value(first(values)) end @@ -483,7 +487,7 @@ end # Make a string for PointVariableRef # TODO improve so numerator of derivative contains the point -function variable_string(print_mode, vref::PointVariableRef)::String +function variable_string(print_mode, vref::PointVariableRef) if !haskey(_data_dictionary(vref), JuMP.index(vref)) || !isempty(JuMP.name(vref)) return _get_base_name(print_mode, vref) else @@ -509,7 +513,7 @@ end # Make a string for SemiInfiniteVariableRef # TODO improve so numerator of derivative contains the parameter tuple -function variable_string(print_mode, vref::SemiInfiniteVariableRef)::String +function variable_string(print_mode, vref::SemiInfiniteVariableRef) if !haskey(_data_dictionary(vref), JuMP.index(vref)) || !isempty(JuMP.name(vref)) return _get_base_name(print_mode, vref) else @@ -537,31 +541,31 @@ function variable_string(print_mode, vref::SemiInfiniteVariableRef)::String end # Fallback -function variable_string(print_mode, vref::JuMP.AbstractVariableRef)::String +function variable_string(print_mode, vref::JuMP.AbstractVariableRef) return _get_base_name(print_mode, vref) end # Extend function string for DispatchVariableRefs (REPL) function JuMP.function_string(::MIME"text/plain", - vref::DispatchVariableRef)::String + vref::DispatchVariableRef) return variable_string(MIME("text/plain"), vref) end # Extend function string for DispatchVariableRefs (IJulia) function JuMP.function_string(::MIME"text/latex", - vref::DispatchVariableRef)::String + vref::DispatchVariableRef) return variable_string(MIME("text/latex"), vref) end # Extend function string for GeneralVariableRefs (REPL) function JuMP.function_string(::MIME"text/plain", - vref::GeneralVariableRef)::String + vref::GeneralVariableRef) return variable_string(MIME("text/plain"), dispatch_variable_ref(vref)) end # Extend function string for GeneralVariableRefs (IJulia) function JuMP.function_string(::MIME"text/latex", - vref::GeneralVariableRef)::String + vref::GeneralVariableRef) return variable_string(MIME("text/latex"), dispatch_variable_ref(vref)) end @@ -572,7 +576,7 @@ end function restrict_string( print_mode, restrictions::DomainRestrictions{GeneralVariableRef} - )::String + ) string_list = "" for (pref, domain) in restrictions string_list *= string(JuMP.function_string(print_mode, pref), " ", @@ -586,7 +590,7 @@ end function _param_domain_string(print_mode, model::InfiniteModel, index::IndependentParameterIndex, restrictions::DomainRestrictions{GeneralVariableRef} - )::String + ) pref = dispatch_variable_ref(model, index) domain = infinite_domain(pref) gvref = GeneralVariableRef(model, MOIUC.key_to_index(index), typeof(index)) @@ -598,7 +602,7 @@ end function _param_domain_string(print_mode, model::InfiniteModel, index::DependentParametersIndex, restrictions::DomainRestrictions{GeneralVariableRef} - )::String + ) # parse the infinite domain first_gvref = GeneralVariableRef(model, MOIUC.key_to_index(index), DependentParameterIndex, 1) @@ -638,7 +642,7 @@ end function JuMP.constraint_string(print_mode, cref::InfOptConstraintRef; in_math_mode = false - )::String + ) # get the function and set strings func_str = JuMP.function_string(print_mode, _core_constraint_object(cref)) in_set_str = JuMP.in_set_string(print_mode, _core_constraint_object(cref)) @@ -741,16 +745,27 @@ function Base.show(io::IO, ::MIME"text/latex", ref::InfOptConstraintRef) print(io, JuMP.constraint_string(MIME("text/latex"), ref)) end -# Show the backend information associated with the optimizer model +# Fallback for backend printing +function JuMP.show_backend_summary( + io::IO, + model::InfiniteModel, + backend::AbstractTransformationBackend + ) + println(io, " Backend type: ", typeof(backend)) + return +end + +# Show the backend information associated with the transformation backend function JuMP.show_backend_summary(io::IO, model::InfiniteModel) - println(io, "Optimizer model backend information: ") - JuMP.show_backend_summary(io, optimizer_model(model)) + println(io, "Transformation backend information: ") + JuMP.show_backend_summary(io, model, model.backend) + println(io, " Transformation built and up-to-date: ", transformation_backend_ready(model)) return end # Show the objective function type function JuMP.show_objective_function_summary(io::IO, model::InfiniteModel) - println(io, "Objective function type: ", + println(io, " Objective function type: ", JuMP.objective_function_type(model)) return end @@ -759,7 +774,7 @@ end function JuMP.show_constraints_summary(io::IO, model::InfiniteModel) for (F, S) in JuMP.list_of_constraint_types(model) n_constraints = JuMP.num_constraints(model, F, S) - println(io, "`$F`-in-`$S`: $n_constraints constraint", + println(io, " `$F`-in-`$S`: $n_constraints constraint", _plural(n_constraints)) end return @@ -781,21 +796,21 @@ function Base.show(io::IO, model::InfiniteModel) println(io, " problem with:") # show finite parameter info num_finite_params = num_parameters(model, FiniteParameter) - println(io, "Finite Parameter", _plural(num_finite_params), ": ", + println(io, " Finite parameter", _plural(num_finite_params), ": ", num_finite_params) # show infinite parameter info num_infinite_params = num_parameters(model, InfiniteParameter) - println(io, "Infinite Parameter", _plural(num_infinite_params), ": ", + println(io, " Infinite parameter", _plural(num_infinite_params), ": ", num_infinite_params) # show variable info num_vars = JuMP.num_variables(model) - println(io, "Variable", _plural(num_vars), ": ", num_vars) + println(io, " Variable", _plural(num_vars), ": ", num_vars) # show the derivative info num_derivs = num_derivatives(model) - println(io, "Derivative", _plural(num_derivs), ": ", num_derivs) + println(io, " Derivative", _plural(num_derivs), ": ", num_derivs) # show measure info num_meas = num_measures(model) - println(io, "Measure", _plural(num_meas), ": ", num_meas) + println(io, " Measure", _plural(num_meas), ": ", num_meas) # show objective function info if sense != MOI.FEASIBILITY_SENSE JuMP.show_objective_function_summary(io, model) diff --git a/src/variable_basics.jl b/src/variable_basics.jl index f0986287..aa2a5c19 100644 --- a/src/variable_basics.jl +++ b/src/variable_basics.jl @@ -362,7 +362,7 @@ function JuMP.set_lower_bound( cindex = _lower_bound_index(vref) cref = _make_constraint_ref(model, cindex) _set_core_constraint_object(cref, new_constr) - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) else @assert !JuMP.is_fixed(vref) "$vref is fixed, cannot set lower bound." cref = JuMP.add_constraint(model, new_constr, is_info_constr = true) @@ -498,7 +498,7 @@ function JuMP.set_upper_bound( cindex = _upper_bound_index(vref) cref = _make_constraint_ref(model, cindex) _set_core_constraint_object(cref, new_constr) - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) else @assert !JuMP.is_fixed(vref) "$vref is fixed, cannot set upper bound." cref = JuMP.add_constraint(model, new_constr, is_info_constr = true) @@ -640,7 +640,7 @@ function JuMP.fix( cindex = _fix_index(vref) cref = _make_constraint_ref(model, cindex) _set_core_constraint_object(cref, new_constr) - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) else # Add a new fixing constraint. if JuMP.has_upper_bound(vref) || JuMP.has_lower_bound(vref) if !force @@ -750,7 +750,7 @@ function JuMP.set_start_value( value::Real )::Nothing info = _variable_info(vref) - set_optimizer_model_ready(JuMP.owner_model(vref), false) + set_transformation_backend_ready(JuMP.owner_model(vref), false) _update_variable_info(vref, JuMP.VariableInfo(info.has_lb, info.lower_bound, info.has_ub, info.upper_bound, @@ -1118,7 +1118,7 @@ function JuMP.delete(model::InfiniteModel, vref::DecisionVariableRef)::Nothing @assert JuMP.is_valid(model, vref) "Variable is invalid." # update the optimizer model status if is_used(vref) - set_optimizer_model_ready(model, false) + set_transformation_backend_ready(model, false) end # delete attributes specific to the variable type _delete_variable_dependencies(vref) diff --git a/test/TranscriptionOpt/measure.jl b/test/TranscriptionOpt/measure.jl index da9df463..cf2b205a 100644 --- a/test/TranscriptionOpt/measure.jl +++ b/test/TranscriptionOpt/measure.jl @@ -8,12 +8,12 @@ @variable(m, y, Infinite(par, pars)) @variable(m, x0, Point(x, 0)) @variable(m, y0, SemiInfinite(y, 0, pars)) - tm = optimizer_model(m) - @variable(tm, a) - @variable(tm, b) - @variable(tm, c) - @variable(tm, d) - data = transcription_data(tm) + tb = m.backend + @variable(tb.model, a) + @variable(tb.model, b) + @variable(tb.model, c) + @variable(tb.model, d) + data = IOTO.transcription_data(tb) data.infvar_mappings[x] = [a, b, c] data.infvar_supports[x] = [(0.,), (0.5,), (1.,)] data.infvar_lookup[x] = Dict{Vector{Float64}, Int}([0] => 1, [0.5] => 2, [1] => 3) @@ -25,40 +25,39 @@ data.infvar_supports[y0] = [(0., [0., 0.]), (0., [1., 1.])] data.infvar_lookup[y0] = Dict{Vector{Float64}, Int}([0, 0, 0] => 1, [0, 1, 1] => 2) data.finvar_mappings[x0] = a - key = Val(:TransData) - IOTO.set_parameter_supports(tm, m) + IOTO.set_parameter_supports(tb, m) # test add_point_variable @testset "add_point_variable" begin # add one that was already added to the infinite model - @test isequal(InfiniteOpt.add_point_variable(tm, x, Float64[0], key), x0) - @test transcription_variable(x0) == a + @test isequal(InfiniteOpt.add_point_variable(tb, x, Float64[0]), x0) + @test IOTO.transcription_variable(x0) == a # add one that hasn't been added vref = GeneralVariableRef(m, -1, PointVariableIndex) - @test isequal(InfiniteOpt.add_point_variable(tm, x, Float64[1], key), vref) - @test transcription_variable(vref) == c + @test isequal(InfiniteOpt.add_point_variable(tb, x, Float64[1]), vref) + @test IOTO.transcription_variable(vref) == c # add one that has been added internally - @test isequal(InfiniteOpt.add_point_variable(tm, x, Float64[1], key), vref) - @test transcription_variable(vref) == c + @test isequal(InfiniteOpt.add_point_variable(tb, x, Float64[1]), vref) + @test IOTO.transcription_variable(vref) == c end # test add_semi_infinite_variable @testset "add_semi_infinite_variable" begin # add one that was already added to the infinite model var = SemiInfiniteVariable(y, Dict{Int, Float64}(1 => 0), [2, 3], [2]) - @test isequal(InfiniteOpt.add_semi_infinite_variable(tm, var, key), y0) - @test transcription_variable(y0) == [a, b] + @test isequal(InfiniteOpt.add_semi_infinite_variable(tb, var), y0) + @test IOTO.transcription_variable(y0) == [a, b] # add a new one var = SemiInfiniteVariable(y, Dict{Int, Float64}(1 => 1), [2, 3], [2]) vref = GeneralVariableRef(m, -1, SemiInfiniteVariableIndex) - @test isequal(InfiniteOpt.add_semi_infinite_variable(tm, var, key), vref) + @test isequal(InfiniteOpt.add_semi_infinite_variable(tb, var), vref) @test isequal(data.semi_infinite_vars, [var]) - @test c in transcription_variable(vref) - @test d in transcription_variable(vref) + @test c in IOTO.transcription_variable(vref) + @test d in IOTO.transcription_variable(vref) @test sort!(supports(vref)) == [([0., 0.], ), ([1., 1.], )] # add one that has already been added internally - @test isequal(InfiniteOpt.add_semi_infinite_variable(tm, var, key), vref) + @test isequal(InfiniteOpt.add_semi_infinite_variable(tb, var), vref) @test isequal(data.semi_infinite_vars, [var]) - @test c in transcription_variable(vref) - @test d in transcription_variable(vref) + @test c in IOTO.transcription_variable(vref) + @test d in IOTO.transcription_variable(vref) @test sort!(supports(vref)) == [([0., 0.], ), ([1., 1.], )] end end diff --git a/test/TranscriptionOpt/model.jl b/test/TranscriptionOpt/model.jl index be5893fa..fc985f06 100644 --- a/test/TranscriptionOpt/model.jl +++ b/test/TranscriptionOpt/model.jl @@ -1,38 +1,49 @@ -# Test the TranscriptionData datatype -@testset "TranscriptionData" begin - @test TranscriptionData isa DataType - @test TranscriptionData().finvar_mappings isa Dict -end - -# Test basic definition and queries -@testset "Basic Definition and Queries" begin - # initialize needed data +# Test the datatypes +@testset "Data Structures" begin mockoptimizer = () -> MOIU.MockOptimizer(MOIU.UniversalFallback(MOIU.Model{Float64}()), eval_objective_value=false) - # test TranscriptionModel (no optimizer) - @testset "TranscriptionModel (Default)" begin - @test haskey(TranscriptionModel().ext, :TransData) - end - # test TranscriptionModel (with optimizer) - @testset "TranscriptionModel (Optimizer)" begin - @test haskey(TranscriptionModel(mockoptimizer).ext, :TransData) - end - # test is_transcription_model - @testset "is_transcription_model" begin - @test is_transcription_model(TranscriptionModel()) - @test !is_transcription_model(Model()) - end + @test IOTO.TranscriptionData isa DataType + @test IOTO.TranscriptionData().finvar_mappings isa Dict + @test JuMPBackend{IOTO.Transcription}(Model(), IOTO.TranscriptionData()) isa TranscriptionBackend + @test TranscriptionBackend() isa TranscriptionBackend + @test solver_name(TranscriptionBackend(mockoptimizer, add_bridges = true).model) == "Mock" +end + +# Test basic methods and extensions +@testset "Basic Methods and Extensions" begin + m = InfiniteModel() + @infinite_parameter(m, t in [0, 1], num_supports = 4) + @infinite_parameter(m, x[1:3] in [-1, 1], num_supports = 5) + tb = m.backend + tb.data.supports = ([NaN, supports(t)...], [NaN, eachcol(supports(x))...]) # test transcription_data @testset "transcription_data" begin - @test transcription_data(TranscriptionModel()).semi_infinite_vars isa Vector - @test_throws ErrorException transcription_data(Model()) + @test IOTO.transcription_data(tb) === tb.data + end + # test JuMP.show_backend_summary + @testset "JuMP.show_backend_summary" begin + expected = " Backend type: TranscriptionBackend\n `t` transcribed over 4 supports\n " * + "`x` transcribed over 5 supports\n Solver name: No optimizer attached.\n" + @test sprint(show_backend_summary, m, tb) == expected + end + # test show + @testset "Base.show (TranscriptionBackend)" begin + expected = "A TranscriptionBackend that uses a\nA JuMP Model\nFeasibility problem with:" * + "\nVariables: 0\nModel mode: AUTOMATIC\nCachingOptimizer state: NO_OPTIMIZER" * + "\nSolver name: No optimizer attached." + @test sprint(show, tb) == expected + end + # test empty! + @testset "Base.empty!" begin + @test empty!(tb) === tb + @test isempty(tb.data.supports) end # test has_internal_supports @testset "has_internal_supports" begin - m = TranscriptionModel() - @test !IOTO.has_internal_supports(m) - transcription_data(m).has_internal_supports = true - @test IOTO.has_internal_supports(m) + tb2 = TranscriptionBackend() + @test !IOTO.has_internal_supports(tb2) + tb2.data.has_internal_supports = true + @test IOTO.has_internal_supports(tb2) end end @@ -49,8 +60,8 @@ end @variable(m, y) add_supports(par, 0.5, label = InternalLabel) @variable(m, xrv, SemiInfinite(x, par, [1, pars[2]])) - tm = optimizer_model(m) - data = transcription_data(tm) + tb = m.backend + data = IOTO.transcription_data(tb) data.has_internal_supports = true data.supports = ([0., 0.5, 1., NaN], [[0., 0.], [1., 1.], [NaN, NaN]]) s1 = Set([UserDefined]) @@ -70,19 +81,19 @@ end # test make_ndarray @testset "make_ndarray" begin # test finite variable - @test IOTO.make_ndarray(tm, y, [1], PublicLabel) == [1] + @test IOTO.make_ndarray(tb, y, [1], PublicLabel) == [1] # test ordered infinite variable - @test IOTO.make_ndarray(tm, x, collect(1:6), All) == [1 4; 2 5; 3 6] - @test IOTO.make_ndarray(tm, x, collect(1:6), PublicLabel) == [1 4; 3 6] + @test IOTO.make_ndarray(tb, x, collect(1:6), All) == [1 4; 2 5; 3 6] + @test IOTO.make_ndarray(tb, x, collect(1:6), PublicLabel) == [1 4; 3 6] # test unordered infinite variable - @test IOTO.make_ndarray(tm, q, collect(1:6), All) == [1 2 3; 4 5 6] - @test IOTO.make_ndarray(tm, q, collect(1:6), PublicLabel) == [1 3; 4 6] + @test IOTO.make_ndarray(tb, q, collect(1:6), All) == [1 2 3; 4 5 6] + @test IOTO.make_ndarray(tb, q, collect(1:6), PublicLabel) == [1 3; 4 6] # test infinite variable with single parameter - @test IOTO.make_ndarray(tm, w, collect(1:3), All) == [1, 2, 3] + @test IOTO.make_ndarray(tb, w, collect(1:3), All) == [1, 2, 3] # test expression - @test IOTO.make_ndarray(tm, w + x, collect(1:6), All) == [1 4; 2 5; 3 6] + @test IOTO.make_ndarray(tb, w + x, collect(1:6), All) == [1 4; 2 5; 3 6] # test error - @test_throws ErrorException IOTO.make_ndarray(tm, q, collect(1:3), All) + @test_throws ErrorException IOTO.make_ndarray(tb, q, collect(1:3), All) end end @@ -100,8 +111,8 @@ end add_supports(par, 0.5, label = InternalLabel) supps = Dict{Int, Float64}(2 => 1) xrv = add_variable(m, build_variable(error, x, supps)) - tm = optimizer_model(m) - data = transcription_data(tm) + tb = m.backend + data = IOTO.transcription_data(tb) data.has_internal_supports = true data.supports = ([0., 0.5, 1., NaN], [[0., 0.], [1., 1.], [NaN, NaN]]) s1 = Set([UserDefined]) @@ -109,83 +120,83 @@ end s3 = union(s1, s2) sd = Set{DataType}() data.support_labels = ([s1, s2, s1, sd], [s1, s1, sd]) - @variable(tm, a) - @variable(tm, b) - @variable(tm, c) - @variable(tm, d) - @variable(tm, e) - @variable(tm, f) + @variable(tb.model, a) + @variable(tb.model, b) + @variable(tb.model, c) + @variable(tb.model, d) + @variable(tb.model, e) + @variable(tb.model, f) # test _ignore_label @testset "_ignore_label" begin - @test IOTO._ignore_label(tm, All) - @test !IOTO._ignore_label(tm, PublicLabel) - @test !IOTO._ignore_label(tm, UserDefined) + @test IOTO._ignore_label(tb, All) + @test !IOTO._ignore_label(tb, PublicLabel) + @test !IOTO._ignore_label(tb, UserDefined) end - # test transcription_variable (finite variables) - @testset "transcription_variable (Finite)" begin + # test IOTO.transcription_variable (finite variables) + @testset "IOTO.transcription_variable (Finite)" begin # test error - @test_throws ErrorException transcription_variable(tm, y) - @test_throws ErrorException transcription_variable(tm, x0) + @test_throws ErrorException IOTO.transcription_variable(y, tb) + @test_throws ErrorException IOTO.transcription_variable(x0, tb) # test normal data.finvar_mappings[y] = a - @test transcription_variable(tm, y) == a + @test IOTO.transcription_variable(y, tb) == a data.finvar_mappings[x0] = b - @test transcription_variable(tm, x0, ndarray = true, label = All) == b + @test IOTO.transcription_variable(x0, tb, ndarray = true, label = All) == b end - # test transcription_variable (Infinite, semi-infinite, and derivative) - @testset "transcription_variable (Infinite)" begin + # test IOTO.transcription_variable (Infinite, semi-infinite, and derivative) + @testset "IOTO.transcription_variable (Infinite)" begin # test error - @test_throws ErrorException transcription_variable(tm, x) - @test_throws ErrorException transcription_variable(tm, xrv) + @test_throws ErrorException IOTO.transcription_variable(x, tb) + @test_throws ErrorException IOTO.transcription_variable(xrv, tb) # test normal data.infvar_mappings[x] = [a, b, c, d, e, f] data.infvar_support_labels[x] = [s1, s3, s1, s1, s3, s1] - @test transcription_variable(tm, x) == [a, b, c, d, e, f] - @test transcription_variable(tm, x, label = All) == [a, b, c, d, e, f] - @test transcription_variable(tm, x, label = InternalLabel) == [b, e] + @test IOTO.transcription_variable(x, tb) == [a, b, c, d, e, f] + @test IOTO.transcription_variable(x, tb, label = All) == [a, b, c, d, e, f] + @test IOTO.transcription_variable(x, tb, label = InternalLabel) == [b, e] data.infvar_mappings[xrv] = [d, e, f] data.infvar_support_labels[xrv] = [s1, s3, s1] - @test transcription_variable(tm, xrv) == [d, e, f] - @test transcription_variable(tm, xrv, label = All) == [d, e, f] - @test transcription_variable(tm, xrv, label = InternalLabel) == [e] + @test IOTO.transcription_variable(xrv, tb) == [d, e, f] + @test IOTO.transcription_variable(xrv, tb, label = All) == [d, e, f] + @test IOTO.transcription_variable(xrv, tb, label = InternalLabel) == [e] # test ndarray - @test transcription_variable(tm, x, label = All, ndarray = true) == [a d; b e; c f] - @test transcription_variable(tm, x, ndarray = true) == [a d; c f] - @test_throws ErrorException transcription_variable(tm, xrv, ndarray = true) + @test IOTO.transcription_variable(x, tb, label = All, ndarray = true) == [a d; b e; c f] + @test IOTO.transcription_variable(x, tb, ndarray = true) == [a d; c f] + @test_throws ErrorException IOTO.transcription_variable(xrv, tb, ndarray = true) end - # test transcription_variable (Parameter Function) - @testset "transcription_variable (Parameter Function)" begin + # test IOTO.transcription_variable (Parameter Function) + @testset "IOTO.transcription_variable (Parameter Function)" begin # test normal - @test transcription_variable(tm, f1) == [sin(0), sin(1)] - @test transcription_variable(tm, f1, label = All) == sin.([0, 0.5, 1]) + @test IOTO.transcription_variable(f1, tb) == [sin(0), sin(1)] + @test IOTO.transcription_variable(f1, tb, label = All) == sin.([0, 0.5, 1]) # test ndarray - @test transcription_variable(tm, f1, label = All, ndarray = true) == sin.([0, 0.5, 1]) - @test transcription_variable(tm, f2, ndarray = true) == ones(2, 2) - end - # test transcription_variable (Fallback) - @testset "transcription_variable (Fallback)" begin - @test_throws ErrorException transcription_variable(tm, par) - end - # test transcription_variable (Single argument) - @testset "transcription_variable (Single)" begin - @test transcription_variable(y) == a - @test transcription_variable(x, label = All) == [a, b, c, d, e, f] - @test transcription_variable(x0) == b - @test transcription_variable(f2, ndarray = true) == ones(2, 2) - end - # test optimizer_model_variable extension - @testset "optimizer_model_variable" begin - @test optimizer_model_variable(y, Val(:TransData), label = All) == a - @test optimizer_model_variable(x, Val(:TransData), label = All) == [a, b, c, d, e, f] - @test optimizer_model_variable(x, Val(:TransData)) == [a, b, c, d, e, f] - @test optimizer_model_variable(x0, Val(:TransData)) == b + @test IOTO.transcription_variable(f1, tb, label = All, ndarray = true) == sin.([0, 0.5, 1]) + @test IOTO.transcription_variable(f2, tb, ndarray = true) == ones(2, 2) + end + # test IOTO.transcription_variable (Fallback) + @testset "IOTO.transcription_variable (Fallback)" begin + @test_throws ErrorException IOTO.transcription_variable(par, tb) + end + # test IOTO.transcription_variable (Single argument) + @testset "IOTO.transcription_variable (Single)" begin + @test IOTO.transcription_variable(y) == a + @test IOTO.transcription_variable(x, label = All) == [a, b, c, d, e, f] + @test IOTO.transcription_variable(x0) == b + @test IOTO.transcription_variable(f2, ndarray = true) == ones(2, 2) + end + # test transformation_model_variable extension + @testset "transformation_model_variable" begin + @test transformation_model_variable(y, tb, label = All) == a + @test transformation_model_variable(x, tb, label = All) == [a, b, c, d, e, f] + @test transformation_model_variable(x, tb) == [a, b, c, d, e, f] + @test transformation_model_variable(x0, tb) == b end # test variable_supports for infinite variable with 2 inputs - @testset "variable_supports (Model, Infinite)" begin + @testset "variable_supports (Backend, Infinite)" begin # test not in mappings error dvref = dispatch_variable_ref(x) delete!(data.infvar_mappings, x) - @test_throws ErrorException InfiniteOpt.variable_supports(tm, dvref) + @test_throws ErrorException InfiniteOpt.variable_supports(dvref, tb) data.infvar_mappings[x] = [a, b, c, d, e, f] # test supports are empty lookups = Dict{Vector{Float64}, Int}([0, 0, 0] => 1, [0.5, 0, 0] => 2, [1, 0, 0] => 3, @@ -193,33 +204,33 @@ end data.infvar_lookup[x] = lookups expected = [(0., [0., 0.]), (0.5, [0., 0.]), (1., [0., 0.]), (0., [1., 1.]), (0.5, [1., 1.]), (1., [1., 1.])] - @test InfiniteOpt.variable_supports(tm, dvref) == expected + @test InfiniteOpt.variable_supports(dvref, tb) == expected # test normal - @test InfiniteOpt.variable_supports(tm, dvref, label = All) == expected - @test InfiniteOpt.variable_supports(tm, dvref, label = InternalLabel) == expected[[2, 5]] + @test InfiniteOpt.variable_supports(dvref, tb, label = All) == expected + @test InfiniteOpt.variable_supports(dvref, tb, label = InternalLabel) == expected[[2, 5]] # test ndarray expected = permutedims([(0., [0., 0.]) (0.5, [0., 0.]) (1., [0., 0.]); (0., [1., 1.]) (0.5, [1., 1.]) (1., [1., 1.])], (2, 1)) - @test InfiniteOpt.variable_supports(tm, dvref, ndarray = true) == expected[[1, 3], :] - @test InfiniteOpt.variable_supports(tm, dvref, ndarray = true, label = All) == expected + @test InfiniteOpt.variable_supports(dvref, tb, ndarray = true) == expected[[1, 3], :] + @test InfiniteOpt.variable_supports(dvref, tb, ndarray = true, label = All) == expected # test with semi-infinite variable lookups = Dict{Vector{Float64}, Int}([0, 1] => 1, [0.5, 1] => 2, [1, 1] => 3) data.infvar_lookup[xrv] = lookups dvref = dispatch_variable_ref(xrv) expected = [(0., 1.), (0.5, 1.), (1., 1.)] - @test InfiniteOpt.variable_supports(tm, dvref) == expected - @test InfiniteOpt.variable_supports(tm, dvref, label = InternalLabel) == [expected[2]] + @test InfiniteOpt.variable_supports(dvref, tb) == expected + @test InfiniteOpt.variable_supports(dvref, tb, label = InternalLabel) == [expected[2]] end # test variable_supports for infinite parameter functions with 2 inputs - @testset "variable_supports (Model, Parameter Function)" begin + @testset "variable_supports (Backend, Parameter Function)" begin # test normal df1 = dispatch_variable_ref(f1) - @test InfiniteOpt.variable_supports(tm, df1) == [(0.,), (1.,)] - @test InfiniteOpt.variable_supports(tm, df1, label = All) == [(0.,), (0.5,), (1.,)] + @test InfiniteOpt.variable_supports(df1, tb) == [(0.,), (1.,)] + @test InfiniteOpt.variable_supports(df1, tb, label = All) == [(0.,), (0.5,), (1.,)] # test ndarray df2 = dispatch_variable_ref(f2) - @test InfiniteOpt.variable_supports(tm, df1, label = All, ndarray = true) == [(0.,), (0.5,), (1.,)] - @test InfiniteOpt.variable_supports(tm, df2, ndarray = true) isa Array + @test InfiniteOpt.variable_supports(df1, tb, label = All, ndarray = true) == [(0.,), (0.5,), (1.,)] + @test InfiniteOpt.variable_supports(df2, tb, ndarray = true) isa Array end # test supports for infinite variable @testset "supports (Infinite)" begin @@ -231,39 +242,39 @@ end @testset "lookup_by_support (Infinite)" begin # test errors @variable(m, x2, Infinite(par)) - @test_throws ErrorException IOTO.lookup_by_support(tm, x2, [0.]) - @test_throws ErrorException IOTO.lookup_by_support(tm, x, [0.9, 0., 0.]) - @test_throws ErrorException IOTO.lookup_by_support(tm, xrv, [0., 0., 0.]) + @test_throws ErrorException IOTO.lookup_by_support(x2, tb, [0.]) + @test_throws ErrorException IOTO.lookup_by_support(x, tb, [0.9, 0., 0.]) + @test_throws ErrorException IOTO.lookup_by_support(xrv, tb, [0., 0., 0.]) # test normal - @test IOTO.lookup_by_support(tm, x, [0., 0., 0.]) == a - @test IOTO.lookup_by_support(tm, x, [0., 1., 1.]) == d - @test IOTO.lookup_by_support(tm, x, [1., 1., 1.]) == f - @test IOTO.lookup_by_support(tm, xrv, [0., 1.]) == d - @test IOTO.lookup_by_support(tm, xrv, [1., 1.]) == f + @test IOTO.lookup_by_support(x, tb, [0., 0., 0.]) == a + @test IOTO.lookup_by_support(x, tb, [0., 1., 1.]) == d + @test IOTO.lookup_by_support(x, tb, [1., 1., 1.]) == f + @test IOTO.lookup_by_support(xrv, tb, [0., 1.]) == d + @test IOTO.lookup_by_support(xrv, tb, [1., 1.]) == f end # test lookup_by_support (infinite parameter functions) @testset "lookup_by_support (Parameter Function)" begin - @test IOTO.lookup_by_support(tm, f1, [0.]) == 0 - @test IOTO.lookup_by_support(tm, f2, [0., 0., 1.]) == 1 + @test IOTO.lookup_by_support(f1, tb, [0.]) == 0 + @test IOTO.lookup_by_support(f2, tb, [0., 0., 1.]) == 1 end # test lookup_by_support (finite vars) @testset "lookup_by_support (Finite)" begin # test errors @variable(m, z2) - @test_throws ErrorException IOTO.lookup_by_support(tm, z2, [0.]) + @test_throws ErrorException IOTO.lookup_by_support(z2, tb, [0.]) # test normal - @test IOTO.lookup_by_support(tm, x0, [0., 0., 0.]) == b - @test IOTO.lookup_by_support(tm, y, [0., 0., 1.]) == a + @test IOTO.lookup_by_support(x0, tb, [0., 0., 0.]) == b + @test IOTO.lookup_by_support(y, tb, [0., 0., 1.]) == a end # test internal_semi_infinite_variable @testset "internal_semi_infinite_variable" begin rv = SemiInfiniteVariableRef(m, SemiInfiniteVariableIndex(-1)) # test errors - @test_throws ErrorException InfiniteOpt.internal_semi_infinite_variable(rv, Val(:TransData)) + @test_throws ErrorException InfiniteOpt.internal_semi_infinite_variable(rv, tb) # test normal var = build_variable(error, x, supps) - push!(transcription_data(tm).semi_infinite_vars, var) - @test InfiniteOpt.internal_semi_infinite_variable(rv, Val(:TransData)) == var + push!(IOTO.transcription_data(tb).semi_infinite_vars, var) + @test InfiniteOpt.internal_semi_infinite_variable(rv, tb) == var eval_supports(rv) == supps end end @@ -279,65 +290,65 @@ end @variable(m, y) meas1 = support_sum(2par -2, par) meas2 = support_sum(x^2 - y, pars) - tm = optimizer_model(m) - data = transcription_data(tm) + tb = m.backend + data = IOTO.transcription_data(tb) data.has_internal_supports = true s1 = Set([UserDefined]) s2 = Set([InternalLabel]) sd = Set{DataType}() data.supports = ([0., 1., NaN], [[0., 0.], [1., 1.], [NaN, NaN]]) data.support_labels = ([s1, s2, sd], [s1, s1, sd]) - @variable(tm, a) - @variable(tm, b) - @variable(tm, c) - @variable(tm, d) - # test transcription_variable - @testset "transcription_variable" begin + @variable(tb.model, a) + @variable(tb.model, b) + @variable(tb.model, c) + @variable(tb.model, d) + # test IOTO.transcription_variable + @testset "IOTO.transcription_variable" begin # test errors - @test_throws ErrorException IOTO.transcription_variable(tm, meas1) - @test_throws ErrorException IOTO.transcription_variable(tm, meas2) + @test_throws ErrorException IOTO.transcription_variable(meas1, tb) + @test_throws ErrorException IOTO.transcription_variable(meas2, tb) # test normal data.measure_mappings[meas1] = [-2 * zero(AffExpr)] data.measure_support_labels[meas1] = [s1] data.measure_mappings[meas2] = [a^2 + c^2 - 2a, b^2 + d^2 - 2a] data.measure_support_labels[meas2] = [s1, s2] - @test IOTO.transcription_variable(tm, meas1) == -2 * zero(AffExpr) + @test IOTO.transcription_variable(meas1, tb) == -2 * zero(AffExpr) expected = [a^2 + c^2 - 2a, b^2 + d^2 - 2a] - @test IOTO.transcription_variable(tm, meas2) == [expected[1]] - @test IOTO.transcription_variable(tm, meas2, label = All) == expected + @test IOTO.transcription_variable(meas2, tb) == [expected[1]] + @test IOTO.transcription_variable(meas2, tb, label = All) == expected # test ndarray - @test IOTO.transcription_variable(tm, meas2, ndarray = true) == [expected[1]] - @test IOTO.transcription_variable(tm, meas2, label = All, ndarray = true) == expected + @test IOTO.transcription_variable(meas2, tb, ndarray = true) == [expected[1]] + @test IOTO.transcription_variable(meas2, tb, label = All, ndarray = true) == expected end # test lookup_by_support @testset "lookup_by_support" begin # test errors - @test_throws ErrorException IOTO.lookup_by_support(tm, meas1, Float64[]) - @test_throws ErrorException IOTO.lookup_by_support(tm, meas2, [0.]) + @test_throws ErrorException IOTO.lookup_by_support(meas1, tb, Float64[]) + @test_throws ErrorException IOTO.lookup_by_support(meas2, tb, [0.]) # test normal data.measure_lookup[meas1] = Dict(Float64[] => 1) data.measure_lookup[meas2] = Dict{Vector{Float64}, Int}([0] => 1, [1] => 2) - @test IOTO.lookup_by_support(tm, meas1, Float64[]) == -2 * zero(AffExpr) - @test IOTO.lookup_by_support(tm, meas2, [1.]) == b^2 + d^2 - 2a + @test IOTO.lookup_by_support(meas1, tb, Float64[]) == -2 * zero(AffExpr) + @test IOTO.lookup_by_support(meas2, tb, [1.]) == b^2 + d^2 - 2a end # test InfiniteOpt.variable_supports @testset "InfiniteOpt.variable_supports" begin # test not in mappings error dvref = dispatch_variable_ref(meas1) delete!(data.measure_mappings, meas1) - @test_throws ErrorException InfiniteOpt.variable_supports(tm, dvref) + @test_throws ErrorException InfiniteOpt.variable_supports(dvref, tb) data.measure_mappings[meas1] = [-2 * zero(AffExpr)] # test supports are empty - @test InfiniteOpt.variable_supports(tm, dvref) == () + @test InfiniteOpt.variable_supports(dvref, tb) == () dvref2 = dispatch_variable_ref(meas2) - @test InfiniteOpt.variable_supports(tm, dvref2, label = All) == [(0.,), (1.,)] + @test InfiniteOpt.variable_supports(dvref2, tb, label = All) == [(0.,), (1.,)] # test normal - @test InfiniteOpt.variable_supports(tm, dvref) == () - @test InfiniteOpt.variable_supports(tm, dvref2) == [(0.,)] + @test InfiniteOpt.variable_supports(dvref, tb) == () + @test InfiniteOpt.variable_supports(dvref2, tb) == [(0.,)] # test ndarray - @test InfiniteOpt.variable_supports(tm, dvref, ndarray = true) == [()] - @test InfiniteOpt.variable_supports(tm, dvref2, ndarray = true) == [(0.,)] - @test InfiniteOpt.variable_supports(tm, dvref2, label = All, ndarray = true) == [(0.,), (1.,)] + @test InfiniteOpt.variable_supports(dvref, tb, ndarray = true) == [()] + @test InfiniteOpt.variable_supports(dvref2, tb, ndarray = true) == [(0.,)] + @test InfiniteOpt.variable_supports(dvref2, tb, label = All, ndarray = true) == [(0.,), (1.,)] end end @@ -350,7 +361,7 @@ end derivative_method = OrthogonalCollocation(3)) @variable(m, y, Infinite(par)) d1 = @deriv(y, par) - tm = optimizer_model(m) + tb = m.backend # test _temp_parameter_ref @testset "_temp_parameter_ref" begin @test IOTO._temp_parameter_ref(m, index(par)) == dispatch_variable_ref(par) @@ -379,43 +390,43 @@ end # test set_parameter_supports @testset "set_parameter_supports" begin add_supports(par, 0.6, label = InternalLabel) - @test IOTO.set_parameter_supports(tm, m) isa Nothing + @test IOTO.set_parameter_supports(tb, m) isa Nothing expected = ([[0., 0.], [1., 1.], [NaN, NaN]], [0., 0.3, 0.6, 0.8, 1., NaN]) - @test isequal(sort.(transcription_data(tm).supports), expected) - @test IOTO.has_internal_supports(tm) + @test isequal(sort.(IOTO.transcription_data(tb).supports), expected) + @test IOTO.has_internal_supports(tb) expected = ([Set([UniformGrid]), Set([UniformGrid]), Set{DataType}()], [Set([UserDefined]), Set([InternalGaussLobatto]), Set([InternalLabel]), Set([InternalGaussLobatto]), Set([UserDefined]), Set{DataType}()]) - @test isequal(transcription_data(tm).support_labels, expected) + @test isequal(IOTO.transcription_data(tb).support_labels, expected) @test supports(par, label = All) == [0., 0.3, 0.6, 0.8, 1.] @test has_generative_supports(par) end # test parameter_supports @testset "parameter_supports" begin expected = ([[0., 0.], [1., 1.], [NaN, NaN]], [0., 0.3, 0.6, 0.8, 1., NaN]) - @test isequal(sort.(IOTO.parameter_supports(tm)), expected) + @test isequal(sort.(IOTO.parameter_supports(tb)), expected) end # test support_index_iterator with 1 argument @testset "support_index_iterator (1 Arg)" begin - @test IOTO.support_index_iterator(tm) == CartesianIndices((1:2, 1:5)) + @test IOTO.support_index_iterator(tb) == CartesianIndices((1:2, 1:5)) end # test support_index_iterator with 2 argument2 @testset "support_index_iterator (2 Args)" begin - @test IOTO.support_index_iterator(tm, [1, 2]) == CartesianIndices((1:2, 1:5)) - @test IOTO.support_index_iterator(tm, [1]) == CartesianIndices((1:2, 6:6)) + @test IOTO.support_index_iterator(tb, [1, 2]) == CartesianIndices((1:2, 1:5)) + @test IOTO.support_index_iterator(tb, [1]) == CartesianIndices((1:2, 6:6)) end # test index_to_support @testset "index_to_support" begin - @test IOTO.index_to_support(tm, first(CartesianIndices((1:2, 1:5)))) isa Vector - @test isnan(IOTO.index_to_support(tm, last(IOTO.support_index_iterator(tm, [1])))[3]) + @test IOTO.index_to_support(tb, first(CartesianIndices((1:2, 1:5)))) isa Vector + @test isnan(IOTO.index_to_support(tb, last(IOTO.support_index_iterator(tb, [1])))[3]) end # test index_to_labels @testset "index_to_labels" begin idxs = CartesianIndices((1:2, 1:5)) - @test IOTO.index_to_labels(tm, first(idxs)) == Set([UserDefined, UniformGrid]) - @test IOTO.index_to_labels(tm, idxs[3]) == Set([UniformGrid, InternalGaussLobatto]) - @test IOTO.index_to_labels(tm, last(IOTO.support_index_iterator(tm, Int[]))) == Set{DataType}() + @test IOTO.index_to_labels(tb, first(idxs)) == Set([UserDefined, UniformGrid]) + @test IOTO.index_to_labels(tb, idxs[3]) == Set([UniformGrid, InternalGaussLobatto]) + @test IOTO.index_to_labels(tb, last(IOTO.support_index_iterator(tb, Int[]))) == Set{DataType}() end end @@ -433,13 +444,13 @@ end add_supports(par, 1, label = InternalLabel) meas1 = support_sum(2par -2, par) meas2 = support_sum(x^2 - y, pars) - tm = optimizer_model(m) - @variable(tm, a) - @variable(tm, b) - @variable(tm, c) - @variable(tm, d) + tb = m.backend + @variable(tb.model, a) + @variable(tb.model, b) + @variable(tb.model, c) + @variable(tb.model, d) # transcribe the variables and measures - data = transcription_data(tm) + data = IOTO.transcription_data(tb) data.has_internal_supports = true s1 = Set([UserDefined]) s2 = Set([InternalLabel]) @@ -455,104 +466,104 @@ end data.infvar_lookup[x] = lookups data.measure_lookup[meas1] = Dict(Float64[] => 1) data.measure_lookup[meas2] = Dict{Vector{Float64}, Int}([0] => 1, [1] => 2) - @test IOTO.set_parameter_supports(tm, m) isa Nothing - # test transcription_expression in accordance with the methods defined in transcribe.jl - @testset "transcription_expression (Fallback)" begin - @test_throws ErrorException IOTO.transcription_expression(tm, a, Float64[]) + @test IOTO.set_parameter_supports(tb, m) isa Nothing + # test IOTO.transcription_expression in accordance with the methods defined in transcribe.jl + @testset "IOTO.transcription_expression (Fallback)" begin + @test_throws ErrorException IOTO.transcription_expression(a, tb, Float64[]) end # test transcription expression for infinite variables with 3 args - @testset "transcription_expression (Infinite Variable)" begin - @test IOTO.transcription_expression(tm, x, [0., 1., 0.]) == c - @test IOTO.transcription_expression(tm, meas1, [0., 0., 1.]) == -2 * zero(AffExpr) - @test IOTO.transcription_expression(tm, f, [0., 1., 0.]) == 1 + @testset "IOTO.transcription_expression (Infinite Variable)" begin + @test IOTO.transcription_expression(x, tb, [0., 1., 0.]) == c + @test IOTO.transcription_expression(meas1, tb, [0., 0., 1.]) == -2 * zero(AffExpr) + @test IOTO.transcription_expression(f, tb, [0., 1., 0.]) == 1 end # test transcription expression for semi_infinite variables with 3 args - @testset "transcription_expression (Semi-Infinite Variable)" begin + @testset "IOTO.transcription_expression (Semi-Infinite Variable)" begin # semi_infinite of parameter function rv = add_variable(m, build_variable(error, f, Dict(1=>1.)), add_support = false) - @test IOTO.transcription_expression(tm, rv, [0., 1., 0.]) == 1 + @test IOTO.transcription_expression(rv, tb, [0., 1., 0.]) == 1 # semi_infinite of infinite variable rv = add_variable(m, build_variable(error, x, Dict(1=>1.)), add_support = false) data.infvar_mappings[rv] = [b, c] lookups = Dict{Vector{Float64}, Int}([0, 0] => 1, [1, 0] => 2) data.infvar_lookup[rv] = lookups - @test IOTO.transcription_expression(tm, rv, [1., 0., 0.]) == c + @test IOTO.transcription_expression(rv, tb, [1., 0., 0.]) == c end # test transcription expression for finite variables with 3 args - @testset "transcription_expression (Finite Variable)" begin - @test IOTO.transcription_expression(tm, x0, [0., 1., 0.]) == b - @test IOTO.transcription_expression(tm, y, [0., 0., 1.]) == a + @testset "IOTO.transcription_expression (Finite Variable)" begin + @test IOTO.transcription_expression(x0, tb, [0., 1., 0.]) == b + @test IOTO.transcription_expression(y, tb, [0., 0., 1.]) == a end # test transcription expression for infinite parameters with 3 args - @testset "transcription_expression (Infinite Parameter)" begin - @test IOTO.transcription_expression(tm, par, [0.5, 1., 0.]) == 0. - @test IOTO.transcription_expression(tm, pars[1], [0.5, 0., 1.]) == 0.5 + @testset "IOTO.transcription_expression (Infinite Parameter)" begin + @test IOTO.transcription_expression(par, tb, [0.5, 1., 0.]) == 0. + @test IOTO.transcription_expression(pars[1], tb, [0.5, 0., 1.]) == 0.5 end # test transcription expression for finite parameters with 3 args - @testset "transcription_expression (Finite Parameter)" begin - @test IOTO.transcription_expression(tm, finpar, [0.5, 1., 0.]) == 42 + @testset "IOTO.transcription_expression (Finite Parameter)" begin + @test IOTO.transcription_expression(finpar, tb, [0.5, 1., 0.]) == 42 end # test transcription expression for AffExprs with 3 args - @testset "transcription_expression (AffExpr)" begin - @test IOTO.transcription_expression(tm, x0 - y + 2x - 2.3, [1., 1., 1.]) == b - a + 2d - 2.3 + @testset "IOTO.transcription_expression (AffExpr)" begin + @test IOTO.transcription_expression(x0 - y + 2x - 2.3, tb, [1., 1., 1.]) == b - a + 2d - 2.3 end # test transcription expression for QuadExprs with 3 args - @testset "transcription_expression (QuadExpr)" begin + @testset "IOTO.transcription_expression (QuadExpr)" begin # test normal expr = meas2 - 3y^2 - x0 - 2.3 expected = b^2 + d^2 - 2a - 3a^2 - b - 2.3 - @test IOTO.transcription_expression(tm, expr, [1., 1., 1.]) == expected + @test IOTO.transcription_expression(expr, tb, [1., 1., 1.]) == expected # test becomes a nonlinear expression expr = meas2 * x0 expected = +((-2.0 * a + b * b + d * d) * b, 0.0) - @test isequal(IOTO.transcription_expression(tm, expr, [1., 1., 1.]), expected) + @test isequal(IOTO.transcription_expression(expr, tb, [1., 1., 1.]), expected) end # test transcription expression for NonlinearExprs with 3 args - @testset "transcription_expression (NonlinearExpr)" begin - @test isequal(IOTO.transcription_expression(tm, sin(y), [1., 1., 1.]), sin(a)) + @testset "IOTO.transcription_expression (NonlinearExpr)" begin + @test isequal(IOTO.transcription_expression(sin(y), tb, [1., 1., 1.]), sin(a)) end # test transcription expression for numbers with 3 args - @testset "transcription_expression (Real)" begin + @testset "IOTO.transcription_expression (Real)" begin expected = zero(AffExpr) + 42 - @test IOTO.transcription_expression(tm, 42, [1., 1., 1.]) == expected + @test IOTO.transcription_expression(42, tb, [1., 1., 1.]) == expected end # test transcription expression for Exprs with 2 args - @testset "transcription_expression (Expr 2 Args)" begin + @testset "IOTO.transcription_expression (Expr 2 Args)" begin # test infinite expr expr = meas2 - 3y^2 - x0 - 2.3 expected = [-2a^2 + c^2 - 2a - b - 2.3, -3a^2 + b^2 + d^2 - 2a - b - 2.3] - @test IOTO.transcription_expression(tm, expr, label = All) == expected - @test IOTO.transcription_expression(tm, expr, label = All, ndarray = true) == expected + @test IOTO.transcription_expression(expr, tb, label = All) == expected + @test IOTO.transcription_expression(expr, tb, label = All, ndarray = true) == expected expected = [-2a^2 + c^2 - 2a - b - 2.3] - @test IOTO.transcription_expression(tm, expr) == expected - @test IOTO.transcription_expression(tm, expr, ndarray = true) == expected + @test IOTO.transcription_expression(expr, tb) == expected + @test IOTO.transcription_expression(expr, tb, ndarray = true) == expected # test finite expr expr = 2x0 -y expected = 2b- a - @test IOTO.transcription_expression(tm, expr) == expected - @test IOTO.transcription_expression(tm, expr, ndarray = true) == [expected] + @test IOTO.transcription_expression(expr, tb) == expected + @test IOTO.transcription_expression(expr, tb, ndarray = true) == [expected] # test NonlinearExpr - @test isequal(IOTO.transcription_expression(tm, sin(x0)), sin(b)) + @test isequal(IOTO.transcription_expression(sin(x0), tb), sin(b)) end # test transcription expression for variables with 2 args - @testset "transcription_expression (Variable 2 Args)" begin - @test IOTO.transcription_expression(tm, x0) == b - @test IOTO.transcription_expression(tm, x) == [b, d] - @test IOTO.transcription_expression(tm, x, label = All) == [b, c, d] + @testset "IOTO.transcription_expression (Variable 2 Args)" begin + @test IOTO.transcription_expression(x0, tb) == b + @test IOTO.transcription_expression(x, tb) == [b, d] + @test IOTO.transcription_expression(x, tb, label = All) == [b, c, d] end # test transcription expression with 1 argument - @testset "transcription_expression (1 Arg)" begin + @testset "IOTO.transcription_expression (1 Arg)" begin @test IOTO.transcription_expression(x0) == b @test IOTO.transcription_expression(x0 - y) == b - a @test IOTO.transcription_expression(zero(QuadExpr) + 2) == zero(AffExpr) + 2 end - # test optimizer_model_expression - @testset "optimizer_model_expression" begin - @test optimizer_model_expression(x0) == b - @test optimizer_model_expression(x0 - y) == b - a - @test optimizer_model_expression(zero(QuadExpr) + 2) == zero(AffExpr) + 2 + # test transformation_model_expression + @testset "transformation_model_expression" begin + @test transformation_model_expression(x0) == b + @test transformation_model_expression(x0 - y) == b - a + @test transformation_model_expression(zero(QuadExpr) + 2) == zero(AffExpr) + 2 end # test expression_supports @testset "expression_supports" begin @@ -578,71 +589,71 @@ end @constraint(m, c1, x + y - 2 <= 0) @constraint(m, c2, support_sum(x, par) == 0) @constraint(m, c3, x0 + y == 5) - tm = optimizer_model(m) - data = transcription_data(tm) + tb = m.backend + data = IOTO.transcription_data(tb) data.has_internal_supports = true s1 = Set([UserDefined]) s2 = Set([InternalLabel]) - @variable(tm, a) - @variable(tm, b) - @variable(tm, c) - @constraint(tm, tc1, b + a <= 2) - @constraint(tm, tc2, c + a <= 2) - @constraint(tm, tc3, 0.5b + 0.5c == 0) - @constraint(tm, tc4, b + a == 5) - @test IOTO.set_parameter_supports(tm, m) isa Nothing - # test transcription_constraint (2 Args) - @testset "transcription_constraint (Infinite)" begin + @variable(tb.model, a) + @variable(tb.model, b) + @variable(tb.model, c) + @constraint(tb.model, tc1, b + a <= 2) + @constraint(tb.model, tc2, c + a <= 2) + @constraint(tb.model, tc3, 0.5b + 0.5c == 0) + @constraint(tb.model, tc4, b + a == 5) + @test IOTO.set_parameter_supports(tb, m) isa Nothing + # test IOTO.transcription_constraint (2 Args) + @testset "IOTO.transcription_constraint (Infinite)" begin # test error - @test_throws ErrorException transcription_constraint(tm, c1) + @test_throws ErrorException IOTO.transcription_constraint(c1, tb) # test normal data.constr_mappings[c1] = [tc1, tc2] data.constr_support_labels[c1] = [s1, s2] - @test transcription_constraint(tm, c1, label = All) == [tc1, tc2] - @test transcription_constraint(tm, c1) == [tc1] - @test transcription_constraint(tm, c1, label = All, ndarray = true) == [tc1, tc2] - @test transcription_constraint(tm, c1, ndarray = true) == [tc1] + @test IOTO.transcription_constraint(c1, tb, label = All) == [tc1, tc2] + @test IOTO.transcription_constraint(c1, tb) == [tc1] + @test IOTO.transcription_constraint(c1, tb, label = All, ndarray = true) == [tc1, tc2] + @test IOTO.transcription_constraint(c1, tb, ndarray = true) == [tc1] # test error - @test_throws ErrorException transcription_constraint(tm, c2) + @test_throws ErrorException IOTO.transcription_constraint(c2, tb) # test normal data.constr_mappings[c2] = [tc3] data.constr_support_labels[c2] = [s1] - @test transcription_constraint(tm, c2) == tc3 - @test transcription_constraint(tm, c2, ndarray = true) == [tc3] + @test IOTO.transcription_constraint(c2, tb) == tc3 + @test IOTO.transcription_constraint(c2, tb, ndarray = true) == [tc3] # test error - @test_throws ErrorException transcription_constraint(tm, c3) + @test_throws ErrorException IOTO.transcription_constraint(c3, tb) # test normal data.constr_mappings[c3] = [tc4] data.constr_support_labels[c3] = [s1] - @test transcription_constraint(tm, c3) == tc4 + @test IOTO.transcription_constraint(c3, tb) == tc4 end - # test transcription_constraint (Single argument) - @testset "transcription_constraint (1 Arg)" begin - @test transcription_constraint(c1) == [tc1] - @test transcription_constraint(c1, label = All) == [tc1, tc2] - @test transcription_constraint(c2) == tc3 - @test transcription_constraint(c3, label = All) == tc4 + # test IOTO.transcription_constraint (Single argument) + @testset "IOTO.transcription_constraint (1 Arg)" begin + @test IOTO.transcription_constraint(c1) == [tc1] + @test IOTO.transcription_constraint(c1, label = All) == [tc1, tc2] + @test IOTO.transcription_constraint(c2) == tc3 + @test IOTO.transcription_constraint(c3, label = All) == tc4 end - # test optimizer_model_constraint extension - @testset "optimizer_model_constraint" begin - @test optimizer_model_constraint(c1, Val(:TransData), label = All) == [tc1, tc2] - @test optimizer_model_constraint(c2, Val(:TransData)) == tc3 - @test optimizer_model_constraint(c3, Val(:TransData)) == tc4 + # test transformation_model_constraint extension + @testset "transformation_model_constraint" begin + @test transformation_model_constraint(c1, tb, label = All) == [tc1, tc2] + @test transformation_model_constraint(c2, tb) == tc3 + @test transformation_model_constraint(c3, tb) == tc4 end # test constraint_supports @testset "constraint_supports" begin # test error - @test_throws ErrorException InfiniteOpt.constraint_supports(tm, c1) + @test_throws ErrorException InfiniteOpt.constraint_supports(c1, tb) # test normal data.constr_supports[c1] = [(0.,), (1.,)] - @test InfiniteOpt.constraint_supports(tm, c1) == [(0.,)] - @test InfiniteOpt.constraint_supports(tm, c1, label = All) == [(0.,), (1.,)] - @test InfiniteOpt.constraint_supports(tm, c1, ndarray = true) == [(0.,)] - @test InfiniteOpt.constraint_supports(tm, c1, label = All, ndarray = true) == [(0.,), (1.,)] + @test InfiniteOpt.constraint_supports(c1, tb) == [(0.,)] + @test InfiniteOpt.constraint_supports(c1, tb, label = All) == [(0.,), (1.,)] + @test InfiniteOpt.constraint_supports(c1, tb, ndarray = true) == [(0.,)] + @test InfiniteOpt.constraint_supports(c1, tb, label = All, ndarray = true) == [(0.,), (1.,)] # test finite data.constr_supports[c3] = [()] - @test InfiniteOpt.constraint_supports(tm, c3) == () - @test InfiniteOpt.constraint_supports(tm, c3, ndarray = true) == [()] + @test InfiniteOpt.constraint_supports(c3, tb) == () + @test InfiniteOpt.constraint_supports(c3, tb, ndarray = true) == [()] end # test supports @testset "supports" begin diff --git a/test/TranscriptionOpt/optimize.jl b/test/TranscriptionOpt/optimize.jl deleted file mode 100644 index 39c62931..00000000 --- a/test/TranscriptionOpt/optimize.jl +++ /dev/null @@ -1,43 +0,0 @@ -# Test transcription_model -@testset "transcription_model" begin - # initialize model - m = InfiniteModel() - # test normal usage - @test isa(transcription_model(m), Model) - # test error - set_optimizer_model(m, Model()) - @test_throws ErrorException transcription_model(m) -end - -# Test build_optimizer_model! -@testset "build_optimizer_model!" begin - # initialize model - mockoptimizer = () -> MOIU.MockOptimizer(MOIU.UniversalFallback(MOIU.Model{Float64}()), - eval_objective_value=false) - m = InfiniteModel(mockoptimizer) - @infinite_parameter(m, par in [0, 1], num_supports = 3) - @infinite_parameter(m, pars[1:2] in [0, 1], supports = [0, 1]) - @variable(m, 1 >= x >= 0, Infinite(par), Int) - @variable(m, y == 2, Infinite(par, pars), Bin, start = 0) - @variable(m, x0, Point(x, 0)) - @variable(m, 0 <= y0 <= 1, Point(y, 0, [0, 0]), Int) - @variable(m, 0 <= z <= 1, Bin) - @variable(m, w == 1, Int, start = 1) - meas1 = support_sum(x - w, par) - meas2 = integral(y, pars) - @constraint(m, c1, x + par - z == 0) - @constraint(m, c2, z + x0 >= -3) - @constraint(m, c3, meas1 + z == 0) - @constraint(m, c4, meas2 - 2y0 + x <= 1, DomainRestrictions(par => [0.5, 1])) - @constraint(m, c5, meas2 == 0) - @constraint(m, @deriv(x, par) == 0) - @constraint(m, sin(w) + integral(x^3, par) == 0) - @objective(m, Min, x0 + meas1) - set_silent(m) - set_time_limit_sec(m, 42.) - # test normal usage - @test isa(build_optimizer_model!(m, Val(:TransData)), Nothing) - @test optimizer_model_ready(m) - @test num_variables(optimizer_model(m)) == 44 - @test time_limit_sec(optimizer_model(m)) == 42 -end diff --git a/test/TranscriptionOpt/transcribe.jl b/test/TranscriptionOpt/transcribe.jl index e8f7f561..d0996bbf 100644 --- a/test/TranscriptionOpt/transcribe.jl +++ b/test/TranscriptionOpt/transcribe.jl @@ -21,22 +21,22 @@ @constraint(m, c1, x + z - 2 <= 0) @constraint(m, c2, measure(x + y, data) - w == 0) @constraint(m, c3, x0 + y0 == 5) - tm = optimizer_model(m) - IOTO.set_parameter_supports(tm, m) + tb = m.backend + IOTO.set_parameter_supports(tb, m) # test transcribe_finite_variables! @testset "transcribe_finite_variables!" begin - @test isa(IOTO.transcribe_finite_variables!(tm, m), Nothing) - @test length(transcription_data(tm).finvar_mappings) == 2 - @test transcription_variable(tm, z) isa VariableRef - @test transcription_variable(tm, w) isa VariableRef - @test name(transcription_variable(tm, z)) == name(z) - @test name(transcription_variable(tm, w)) == name(w) - @test has_lower_bound(transcription_variable(tm, z)) - @test has_upper_bound(transcription_variable(tm, z)) - @test is_binary(transcription_variable(tm, z)) - @test is_fixed(transcription_variable(tm, w)) - @test is_integer(transcription_variable(tm, w)) - @test start_value(transcription_variable(tm, w)) == 1. + @test isa(IOTO.transcribe_finite_variables!(tb, m), Nothing) + @test length(IOTO.transcription_data(tb).finvar_mappings) == 2 + @test IOTO.transcription_variable(z, tb) isa VariableRef + @test IOTO.transcription_variable(w, tb) isa VariableRef + @test name(IOTO.transcription_variable(z, tb)) == name(z) + @test name(IOTO.transcription_variable(w, tb)) == name(w) + @test has_lower_bound(IOTO.transcription_variable(z, tb)) + @test has_upper_bound(IOTO.transcription_variable(z, tb)) + @test is_binary(IOTO.transcription_variable(z, tb)) + @test is_fixed(IOTO.transcription_variable(w, tb)) + @test is_integer(IOTO.transcription_variable(w, tb)) + @test start_value(IOTO.transcription_variable(w, tb)) == 1. end # test _format_infinite_info @testset "_format_infinite_info" begin @@ -60,17 +60,17 @@ end # test transcribe_infinite_variables! @testset "transcribe_infinite_variables!" begin - @test isa(IOTO.transcribe_infinite_variables!(tm, m), Nothing) - @test length(transcription_data(tm).infvar_mappings) == 2 - @test transcription_variable(tm, x) isa Vector{VariableRef} - @test transcription_variable(tm, y) isa Vector{VariableRef} - @test name(transcription_variable(tm, x)[1]) == "x(support: 1)" - @test name(transcription_variable(tm, y)[3]) == "y(support: 3)" - @test has_lower_bound(transcription_variable(tm, x)[1]) - @test is_binary(transcription_variable(tm, y)[2]) - @test is_fixed(transcription_variable(tm, y)[4]) - @test is_integer(transcription_variable(tm, x)[2]) - @test sort!(start_value.(transcription_variable(tm, y))) == [0., 1, 2, 3] + @test isa(IOTO.transcribe_infinite_variables!(tb, m), Nothing) + @test length(IOTO.transcription_data(tb).infvar_mappings) == 2 + @test IOTO.transcription_variable(x, tb) isa Vector{VariableRef} + @test IOTO.transcription_variable(y, tb) isa Vector{VariableRef} + @test name(IOTO.transcription_variable(x, tb)[1]) == "x(support: 1)" + @test name(IOTO.transcription_variable(y, tb)[3]) == "y(support: 3)" + @test has_lower_bound(IOTO.transcription_variable(x)[1]) + @test is_binary(IOTO.transcription_variable(y, tb)[2]) + @test is_fixed(IOTO.transcription_variable(y, tb)[4]) + @test is_integer(IOTO.transcription_variable(x, tb)[2]) + @test sort!(start_value.(IOTO.transcription_variable(y, tb))) == [0., 1, 2, 3] @test supports(x) == [(0,), (1,)] @test length(supports(y)) == 4 end @@ -95,18 +95,18 @@ end # test transcribe_derivative_variables! @testset "transcribe_derivative_variables!" begin - @test isa(IOTO.transcribe_derivative_variables!(tm, m), Nothing) - @test length(transcription_data(tm).infvar_mappings) == 6 + @test isa(IOTO.transcribe_derivative_variables!(tb, m), Nothing) + @test length(IOTO.transcription_data(tb).infvar_mappings) == 6 @test num_derivatives(m) == 4 - @test transcription_variable(tm, dx) isa Vector{VariableRef} - @test transcription_variable(tm, dy) isa Vector{VariableRef} - @test transcription_variable(tm, dx3) isa Vector{VariableRef} - @test name(transcription_variable(tm, dx)[1]) == "d/dpar[x(par)](support: 1)" - @test name(transcription_variable(tm, dx3)[1]) == "d^3/dpar^3[x(par)](support: 1)" - @test name(transcription_variable(tm, deriv(dx, par))[1]) == "d²/dpar²[x(par)](support: 1)" - @test name(transcription_variable(tm, dy)[3]) == (Sys.iswindows() ? "d/dpar[y(par, pars)](support: 3)" : "∂/∂par[y(par, pars)](support: 3)") - @test has_lower_bound(transcription_variable(tm, dx)[1]) - @test sort!(start_value.(transcription_variable(tm, dy))) == [0., 1, 2, 3] + @test IOTO.transcription_variable(dx, tb) isa Vector{VariableRef} + @test IOTO.transcription_variable(dy, tb) isa Vector{VariableRef} + @test IOTO.transcription_variable(dx3, tb) isa Vector{VariableRef} + @test name(IOTO.transcription_variable(dx, tb)[1]) == "d/dpar[x(par)](support: 1)" + @test name(IOTO.transcription_variable(dx3, tb)[1]) == "d^3/dpar^3[x(par)](support: 1)" + @test name(IOTO.transcription_variable(deriv(dx, par), tb)[1]) == "d²/dpar²[x(par)](support: 1)" + @test name(IOTO.transcription_variable(dy, tb)[3]) == (Sys.iswindows() ? "d/dpar[y(par, pars)](support: 3)" : "∂/∂par[y(par, pars)](support: 3)") + @test has_lower_bound(IOTO.transcription_variable(dx, tb)[1]) + @test sort!(start_value.(IOTO.transcription_variable(dy, tb))) == [0., 1, 2, 3] @test supports(dx) == [(0,), (1,)] @test length(supports(dy)) == 4 end @@ -114,22 +114,22 @@ @testset "_set_semi_infinite_variable_mapping" begin var = SemiInfiniteVariable(y, Dict{Int, Float64}(1 => 0), [1, 2], [1]) vref = GeneralVariableRef(m, -1, SemiInfiniteVariableIndex) - @test IOTO._set_semi_infinite_variable_mapping(tm, var, vref, SemiInfiniteVariableIndex) isa Nothing - @test transcription_variable(vref) isa Vector{VariableRef} - @test length(transcription_data(tm).infvar_mappings) == 7 - @test IOTO.lookup_by_support(tm, y, [0., 0, 0]) == IOTO.lookup_by_support(tm, vref, [0., 0]) - @test IOTO._set_semi_infinite_variable_mapping(tm, var, vref, ParameterFunctionIndex) isa Nothing + @test IOTO._set_semi_infinite_variable_mapping(tb, var, vref, SemiInfiniteVariableIndex) isa Nothing + @test IOTO.transcription_variable(vref) isa Vector{VariableRef} + @test length(IOTO.transcription_data(tb).infvar_mappings) == 7 + @test IOTO.lookup_by_support(y, tb, [0., 0, 0]) == IOTO.lookup_by_support(vref, tb, [0., 0]) + @test IOTO._set_semi_infinite_variable_mapping(tb, var, vref, ParameterFunctionIndex) isa Nothing end # test transcribe_semi_infinite_variables! @testset "transcribe_semi_infinite_variables!" begin - @test IOTO.transcribe_semi_infinite_variables!(tm, m) isa Nothing - @test transcription_variable(yrv) isa Vector{VariableRef} - @test length(transcription_data(tm).infvar_mappings) == 8 - @test IOTO.lookup_by_support(tm, y, [1., 0., 0.]) == IOTO.lookup_by_support(tm, yrv, [1., 0]) + @test IOTO.transcribe_semi_infinite_variables!(tb, m) isa Nothing + @test IOTO.transcription_variable(yrv) isa Vector{VariableRef} + @test length(IOTO.transcription_data(tb).infvar_mappings) == 8 + @test IOTO.lookup_by_support(y, tb, [1., 0., 0.]) == IOTO.lookup_by_support(yrv, tb, [1., 0]) end # test _update_point_info @testset "_update_point_info" begin - vref = @variable(tm) + vref = @variable(tb.model) # test lower bound update fix(vref, 0, force = true) set_lower_bound(x0, 0) @@ -166,19 +166,19 @@ end # test transcribe_point_variables! @testset "transcribe_point_variables!" begin - @test isa(IOTO.transcribe_point_variables!(tm, m), Nothing) - @test length(transcription_data(tm).finvar_mappings) == 4 - @test transcription_variable(tm, x0) == IOTO.lookup_by_support(tm, x, [0.]) - @test transcription_variable(tm, y0) == IOTO.lookup_by_support(tm, y, [0., 0., 0.]) - @test name(transcription_variable(tm, x0)) == "x(support: 1)" - @test name(transcription_variable(tm, y0))[1:end-2] == "y(support: " - @test lower_bound(transcription_variable(tm, x0)) == 0 - @test is_integer(transcription_variable(tm, x0)) - @test lower_bound(transcription_variable(tm, y0)) == 0 - @test upper_bound(transcription_variable(tm, y0)) == 1 - @test is_integer(transcription_variable(tm, y0)) - @test start_value(transcription_variable(tm, y0)) == 0. - @test has_lower_bound(IOTO.lookup_by_support(tm, y, [0., 0., 0.])) + @test isa(IOTO.transcribe_point_variables!(tb, m), Nothing) + @test length(IOTO.transcription_data(tb).finvar_mappings) == 4 + @test IOTO.transcription_variable(x0, tb) == IOTO.lookup_by_support(x, tb, [0.]) + @test IOTO.transcription_variable(y0, tb) == IOTO.lookup_by_support(y, tb, [0., 0., 0.]) + @test name(IOTO.transcription_variable(x0, tb)) == "x(support: 1)" + @test name(IOTO.transcription_variable(y0, tb))[1:end-2] == "y(support: " + @test lower_bound(IOTO.transcription_variable(x0, tb)) == 0 + @test is_integer(IOTO.transcription_variable(x0, tb)) + @test lower_bound(IOTO.transcription_variable(y0, tb)) == 0 + @test upper_bound(IOTO.transcription_variable(y0, tb)) == 1 + @test is_integer(IOTO.transcription_variable(y0, tb)) + @test start_value(IOTO.transcription_variable(y0, tb)) == 0. + @test has_lower_bound(IOTO.lookup_by_support(y, tb, [0., 0., 0.])) end end @@ -196,21 +196,21 @@ end meas2 = integral(w, par) meas3 = integral(y^2, pars) meas4 = integral(pars[1], pars[1]) - tm = transcription_model(m) - IOTO.set_parameter_supports(tm, m) - IOTO.transcribe_finite_variables!(tm, m) - IOTO.transcribe_infinite_variables!(tm, m) - tx = transcription_variable(x) - ty = transcription_variable(y) - tz = transcription_variable(z) - tw = transcription_variable(w) + tb = m.backend + IOTO.set_parameter_supports(tb, m) + IOTO.transcribe_finite_variables!(tb, m) + IOTO.transcribe_infinite_variables!(tb, m) + tx = IOTO.transcription_variable(x) + ty = IOTO.transcription_variable(y) + tz = IOTO.transcription_variable(z) + tw = IOTO.transcription_variable(w) # test transcribe_measures! @testset "transcribe_measures!" begin - @test IOTO.transcribe_measures!(tm, m) isa Nothing - @test transcription_variable(meas1) == tx[1] + 4tw + tx[2] - @test transcription_variable(meas2) == tw + 0 - @test transcription_variable(meas3) isa Vector - @test transcription_variable(meas4) isa AffExpr + @test IOTO.transcribe_measures!(tb, m) isa Nothing + @test IOTO.transcription_variable(meas1) == tx[1] + 4tw + tx[2] + @test IOTO.transcription_variable(meas2) == tw + 0 + @test IOTO.transcription_variable(meas3) isa Vector + @test IOTO.transcription_variable(meas4) isa AffExpr @test supports(meas1) == () @test supports(meas2) == () @test sort!(supports(meas3)) == [(0.,), (1., )] @@ -219,14 +219,14 @@ end @testset "transcribe_objective!" begin # normal @objective(m, Min, 2z^2 - meas1) - @test IOTO.transcribe_objective!(tm, m) isa Nothing - @test objective_sense(tm) == MOI.MIN_SENSE - @test objective_function(tm) == 2tz^2 - tx[1] - 4tw - tx[2] + @test IOTO.transcribe_objective!(tb, m) isa Nothing + @test objective_sense(tb.model) == MOI.MIN_SENSE + @test objective_function(tb.model) == 2tz^2 - tx[1] - 4tw - tx[2] # nonlinear objective @objective(m, Max, z^4) - @test IOTO.transcribe_objective!(tm, m) isa Nothing - @test objective_sense(tm) == MOI.MAX_SENSE - @test isequal(objective_function(tm), tz^4) + @test IOTO.transcribe_objective!(tb, m) isa Nothing + @test objective_sense(tb.model) == MOI.MAX_SENSE + @test isequal(objective_function(tb.model), tz^4) end end @@ -249,54 +249,54 @@ end @constraint(m, c6, [z, x] in MOI.Zeros(2)) @constraint(m, c7, sin(z) ^ x == 0) @constraint(m, c8, integral(sin(y), par) == 0) - tm = transcription_model(m) - IOTO.set_parameter_supports(tm, m) - IOTO.transcribe_finite_variables!(tm, m) - IOTO.transcribe_infinite_variables!(tm, m) - IOTO.transcribe_point_variables!(tm, m) - IOTO.transcribe_measures!(tm, m) - xt = transcription_variable(x) - yt = transcription_variable(y) - x0t = transcription_variable(x0) - yft = transcription_variable(yf) - zt = transcription_variable(z) + tb = m.backend + IOTO.set_parameter_supports(tb, m) + IOTO.transcribe_finite_variables!(tb, m) + IOTO.transcribe_infinite_variables!(tb, m) + IOTO.transcribe_point_variables!(tb, m) + IOTO.transcribe_measures!(tb, m) + xt = IOTO.transcription_variable(x) + yt = IOTO.transcription_variable(y) + x0t = IOTO.transcription_variable(x0) + yft = IOTO.transcription_variable(yf) + zt = IOTO.transcription_variable(z) # test _get_info_constr_from_var @testset "_get_info_constr_from_var" begin # lower bounds set = moi_set(InfiniteOpt._core_constraint_object(LowerBoundRef(x))) - expected = LowerBoundRef(IOTO.lookup_by_support(tm, x, [1., 1., 1.])) - @test IOTO._get_info_constr_from_var(tm, x, set, [1., 1., 1.]) == expected - @test IOTO._get_info_constr_from_var(tm, x, set, [0., 0., 0.]) isa Nothing + expected = LowerBoundRef(IOTO.lookup_by_support(x, tb, [1., 1., 1.])) + @test IOTO._get_info_constr_from_var(tb, x, set, [1., 1., 1.]) == expected + @test IOTO._get_info_constr_from_var(tb, x, set, [0., 0., 0.]) isa Nothing # upper bounds set = moi_set(InfiniteOpt._core_constraint_object(UpperBoundRef(x))) - expected = UpperBoundRef(IOTO.lookup_by_support(tm, x, [0., 1., 1.])) - @test IOTO._get_info_constr_from_var(tm, x, set, [1., 1., 0.]) == expected - @test IOTO._get_info_constr_from_var(tm, x, set, [0., 0., 0.]) isa Nothing + expected = UpperBoundRef(IOTO.lookup_by_support(x, tb, [0., 1., 1.])) + @test IOTO._get_info_constr_from_var(tb, x, set, [1., 1., 0.]) == expected + @test IOTO._get_info_constr_from_var(tb, x, set, [0., 0., 0.]) isa Nothing set = moi_set(InfiniteOpt._core_constraint_object(UpperBoundRef(yf))) expected = UpperBoundRef(yft) - @test IOTO._get_info_constr_from_var(tm, y, set, [1., 1., 1.]) == expected + @test IOTO._get_info_constr_from_var(tb, y, set, [1., 1., 1.]) == expected # fix set = moi_set(InfiniteOpt._core_constraint_object(FixRef(y))) - expected = FixRef(IOTO.lookup_by_support(tm, y, [0.])) - @test IOTO._get_info_constr_from_var(tm, y, set, [1., 1., 0.]) == expected - @test IOTO._get_info_constr_from_var(tm, y, set, [0., 0., 1.]) isa Nothing + expected = FixRef(IOTO.lookup_by_support(y, tb, [0.])) + @test IOTO._get_info_constr_from_var(tb, y, set, [1., 1., 0.]) == expected + @test IOTO._get_info_constr_from_var(tb, y, set, [0., 0., 1.]) isa Nothing set = moi_set(InfiniteOpt._core_constraint_object(FixRef(x0))) expected = FixRef(x0t) - @test IOTO._get_info_constr_from_var(tm, x, set, [0., 0., 0.]) == expected + @test IOTO._get_info_constr_from_var(tb, x, set, [0., 0., 0.]) == expected # binary set = moi_set(InfiniteOpt._core_constraint_object(BinaryRef(x0))) - expected = BinaryRef(IOTO.lookup_by_support(tm, x, [0., 0., 0.])) - @test IOTO._get_info_constr_from_var(tm, x, set, [0., 0., 0.]) == expected - @test IOTO._get_info_constr_from_var(tm, x, set, [1., 1., 1.]) isa Nothing - @test IOTO._get_info_constr_from_var(tm, z, set, [1., 1., 1.]) == BinaryRef(zt) + expected = BinaryRef(IOTO.lookup_by_support(x, tb, [0., 0., 0.])) + @test IOTO._get_info_constr_from_var(tb, x, set, [0., 0., 0.]) == expected + @test IOTO._get_info_constr_from_var(tb, x, set, [1., 1., 1.]) isa Nothing + @test IOTO._get_info_constr_from_var(tb, z, set, [1., 1., 1.]) == BinaryRef(zt) # integer set = moi_set(InfiniteOpt._core_constraint_object(IntegerRef(x))) - expected = IntegerRef(IOTO.lookup_by_support(tm, x, [0., 1., 1.])) - @test IOTO._get_info_constr_from_var(tm, x, set, [1., 1., 0.]) == expected - @test IOTO._get_info_constr_from_var(tm, x, set, [0., 0., 0.]) isa Nothing + expected = IntegerRef(IOTO.lookup_by_support(x, tb, [0., 1., 1.])) + @test IOTO._get_info_constr_from_var(tb, x, set, [1., 1., 0.]) == expected + @test IOTO._get_info_constr_from_var(tb, x, set, [0., 0., 0.]) isa Nothing set = moi_set(InfiniteOpt._core_constraint_object(IntegerRef(yf))) expected = IntegerRef(yft) - @test IOTO._get_info_constr_from_var(tm, yf, set, [1., 1., 1.]) == expected + @test IOTO._get_info_constr_from_var(tb, yf, set, [1., 1., 1.]) == expected end # test _support_in_restrictions @testset "_support_in_restrictions" begin @@ -312,62 +312,62 @@ end con = constraint_object(c1) func = jump_function(con) set = moi_set(con) - @test IOTO._process_constraint(tm, con, func, set, zeros(3), "test1") isa ConstraintRef - @test num_constraints(tm, typeof(func), typeof(set)) == 1 - cref = constraint_by_name(tm, "test1") - delete(tm, cref) + @test IOTO._process_constraint(tb, con, func, set, zeros(3), "test1") isa ConstraintRef + @test num_constraints(tb.model, typeof(func), typeof(set)) == 1 + cref = constraint_by_name(tb.model, "test1") + delete(tb.model, cref) # nonlinear scalar constraint con = constraint_object(c7) func = jump_function(con) set = moi_set(con) - @test IOTO._process_constraint(tm, con, func, set, zeros(3), "test1") isa ConstraintRef - @test num_constraints(tm, typeof(func), typeof(set)) == 1 - cref = constraint_by_name(tm, "test1") - delete(tm, cref) + @test IOTO._process_constraint(tb, con, func, set, zeros(3), "test1") isa ConstraintRef + @test num_constraints(tb.model, typeof(func), typeof(set)) == 1 + cref = constraint_by_name(tb.model, "test1") + delete(tb.model, cref) # vector constraint con = constraint_object(c6) func = jump_function(con) set = moi_set(con) - @test IOTO._process_constraint(tm, con, func, set, zeros(3), "test2") isa ConstraintRef - @test num_constraints(tm, typeof(func), typeof(set)) == 1 - cref = constraint_by_name(tm, "test2") - delete(tm, cref) + @test IOTO._process_constraint(tb, con, func, set, zeros(3), "test2") isa ConstraintRef + @test num_constraints(tb.model, typeof(func), typeof(set)) == 1 + cref = constraint_by_name(tb.model, "test2") + delete(tb.model, cref) # test nonlinear vector constraint con = VectorConstraint([sin(z)], MOI.Zeros(1)) func = [sin(z)] set = MOI.Zeros(1) - @test IOTO._process_constraint(tm, con, func, set, zeros(3), "test2") isa ConstraintRef + @test IOTO._process_constraint(tb, con, func, set, zeros(3), "test2") isa ConstraintRef # fallback - @test_throws ErrorException IOTO._process_constraint(tm, :bad, func, set, + @test_throws ErrorException IOTO._process_constraint(tb, :bad, func, set, zeros(3), "bad") end # test transcribe_constraints! @testset "transcribe_constraints!" begin - @test IOTO.transcribe_constraints!(tm, m) isa Nothing + @test IOTO.transcribe_constraints!(tb, m) isa Nothing # test the info constraint transcriptions - @test transcription_constraint(LowerBoundRef(x)) isa Vector{ConstraintRef} - @test transcription_constraint(UpperBoundRef(x)) isa Vector{ConstraintRef} - @test transcription_constraint(IntegerRef(x)) isa Vector{ConstraintRef} - @test length(transcription_constraint(LowerBoundRef(x))) == 5 - @test transcription_constraint(FixRef(x0)) == FixRef(x0t) - @test transcription_constraint(BinaryRef(x0)) == BinaryRef(x0t) - @test transcription_constraint(FixRef(y)) == [FixRef(yt[i]) for i in 1:2] - @test transcription_constraint(UpperBoundRef(yf)) == UpperBoundRef(yft) - @test transcription_constraint(BinaryRef(z)) == BinaryRef(zt) + @test IOTO.transcription_constraint(LowerBoundRef(x)) isa Vector{ConstraintRef} + @test IOTO.transcription_constraint(UpperBoundRef(x)) isa Vector{ConstraintRef} + @test IOTO.transcription_constraint(IntegerRef(x)) isa Vector{ConstraintRef} + @test length(IOTO.transcription_constraint(LowerBoundRef(x))) == 5 + @test IOTO.transcription_constraint(FixRef(x0)) == FixRef(x0t) + @test IOTO.transcription_constraint(BinaryRef(x0)) == BinaryRef(x0t) + @test IOTO.transcription_constraint(FixRef(y)) == [FixRef(yt[i]) for i in 1:2] + @test IOTO.transcription_constraint(UpperBoundRef(yf)) == UpperBoundRef(yft) + @test IOTO.transcription_constraint(BinaryRef(z)) == BinaryRef(zt) # test constraint transcriptions - @test transcription_constraint(c1) isa Vector{ConstraintRef} - @test length(transcription_constraint(c1)) == 6 - @test constraint_object(transcription_constraint(c2)).func == yt[1] - zt^2 - xf = IOTO.lookup_by_support(tm, x, [1., 1., 1.]) - @test constraint_object(transcription_constraint(c3)).func == 4xf - @test constraint_object(transcription_constraint(c3)).set == MOI.LessThan(5.) + @test IOTO.transcription_constraint(c1) isa Vector{ConstraintRef} + @test length(IOTO.transcription_constraint(c1)) == 6 + @test constraint_object(IOTO.transcription_constraint(c2)).func == yt[1] - zt^2 + xf = IOTO.lookup_by_support(x, tb, [1., 1., 1.]) + @test constraint_object(IOTO.transcription_constraint(c3)).func == 4xf + @test constraint_object(IOTO.transcription_constraint(c3)).set == MOI.LessThan(5.) expected = [yt[1] + zt, yt[2] + zt] - @test jump_function.(constraint_object.(transcription_constraint(c4))) == expected - @test constraint_object(transcription_constraint(c5)).func == 2zt^2 - @test length(transcription_constraint(c6)) == 6 - @test moi_set(constraint_object(first(transcription_constraint(c6)))) == MOI.Zeros(2) - @test length(transcription_constraint(c7)) == 6 - @test transcription_constraint(c8) isa ConstraintRef + @test jump_function.(constraint_object.(IOTO.transcription_constraint(c4))) == expected + @test constraint_object(IOTO.transcription_constraint(c5)).func == 2zt^2 + @test length(IOTO.transcription_constraint(c6)) == 6 + @test moi_set(constraint_object(first(IOTO.transcription_constraint(c6)))) == MOI.Zeros(2) + @test length(IOTO.transcription_constraint(c7)) == 6 + @test IOTO.transcription_constraint(c8) isa ConstraintRef # test the info constraint supports expected = [([0., 0.], 0.5), ([0., 0.], 1.), ([1., 1.], 0.), ([1., 1.], 0.5), ([1., 1.], 1.)] @test sort(supports(LowerBoundRef(x))) == expected @@ -398,18 +398,18 @@ end @variable(m, y, Infinite(t)) d1 = @deriv(y, t) d2 = @deriv(y, t^2) - tm = transcription_model(m) - IOTO.set_parameter_supports(tm, m) - IOTO.transcribe_infinite_variables!(tm, m) - IOTO.transcribe_derivative_variables!(tm, m) + tb = m.backend + IOTO.set_parameter_supports(tb, m) + IOTO.transcribe_infinite_variables!(tb, m) + IOTO.transcribe_derivative_variables!(tb, m) # main test - @test IOTO.transcribe_derivative_evaluations!(tm, m) isa Nothing - @test num_constraints(tm, AffExpr, MOI.EqualTo{Float64}) == 18 + @test IOTO.transcribe_derivative_evaluations!(tb, m) isa Nothing + @test num_constraints(tb.model, AffExpr, MOI.EqualTo{Float64}) == 18 @test num_supports(t) == 4 @test num_supports(t, label = All) == 10 @test length(supports(d1)) == 4 @test length(supports(d2, label = All)) == 10 - @test IOTO.has_internal_supports(tm) + @test IOTO.has_internal_supports(tb) @test has_generative_supports(t) end @@ -423,14 +423,14 @@ end @variable(m, y, Infinite(t, x)) constant_over_collocation(y, t) constant_over_collocation(y, x) - tm = transcription_model(m) - IOTO.set_parameter_supports(tm, m) - IOTO.transcribe_infinite_variables!(tm, m) + tb = m.backend + IOTO.set_parameter_supports(tb, m) + IOTO.transcribe_infinite_variables!(tb, m) # main test - @test IOTO.transcribe_variable_collocation_restictions!(tm, m) isa Nothing - @test num_constraints(tm, count_variable_in_set_constraints = false) == 3 * 3 - yt = transcription_variable(y, label = All, ndarray = true) - cons = all_constraints(tm, include_variable_in_set_constraints = false) + @test IOTO.transcribe_variable_collocation_restictions!(tb, m) isa Nothing + @test num_constraints(tb.model, count_variable_in_set_constraints = false) == 3 * 3 + yt = IOTO.transcription_variable(y, label = All, ndarray = true) + cons = all_constraints(tb.model, include_variable_in_set_constraints = false) @test jump_function(constraint_object(first(cons))) == yt[7, 1] - yt[6, 1] # test assertion error m = InfiniteModel() @@ -439,14 +439,14 @@ end @variable(m, y, Infinite(t)) add_supports(t, 1, label = InternalGaussLobatto) constant_over_collocation(y, t) - tm = transcription_model(m) - IOTO.set_parameter_supports(tm, m) - IOTO.transcribe_infinite_variables!(tm, m) - @test_throws AssertionError IOTO.transcribe_variable_collocation_restictions!(tm, m) + tb = m.backend + IOTO.set_parameter_supports(tb, m) + IOTO.transcribe_infinite_variables!(tb, m) + @test_throws AssertionError IOTO.transcribe_variable_collocation_restictions!(tb, m) end -# Test build_transcription_model! -@testset "build_transcription_model!" begin +# Test build_transcription_backend! +@testset "build_transcription_backend!" begin # initialize model mockoptimizer = () -> MOIU.MockOptimizer(MOIU.UniversalFallback(MOIU.Model{Float64}()), eval_objective_value=false) @@ -480,12 +480,12 @@ end @constraint(m, c7, gr(z) == 2) @objective(m, Min, x0 + meas1) # test basic usage - tm = optimizer_model(m) - @test IOTO.build_transcription_model!(tm, m, + tb = m.backend + @test IOTO.build_transcription_backend!(tb, m, check_support_dims = false) isa Nothing # test finite variables - zt = transcription_variable(z) - wt = transcription_variable(w) + zt = IOTO.transcription_variable(z) + wt = IOTO.transcription_variable(w) @test zt isa VariableRef @test wt isa VariableRef @test name(zt) == name(z) @@ -497,84 +497,117 @@ end @test is_integer(wt) @test start_value(wt) == 1. # test infinite variables - @test transcription_variable(x) isa Vector{VariableRef} - @test transcription_variable(y) isa Vector{VariableRef} - @test name(transcription_variable(x)[1]) == "x(support: 1)" - @test name(transcription_variable(y)[3]) == "y(support: 3)" - @test has_lower_bound(transcription_variable(x)[1]) - @test is_binary(transcription_variable(y)[2]) - @test is_fixed(transcription_variable(y)[4]) - @test is_integer(transcription_variable(x)[2]) - @test start_value(transcription_variable(y)[1]) == 0. + @test IOTO.transcription_variable(x) isa Vector{VariableRef} + @test IOTO.transcription_variable(y) isa Vector{VariableRef} + @test name(IOTO.transcription_variable(x)[1]) == "x(support: 1)" + @test name(IOTO.transcription_variable(y)[3]) == "y(support: 3)" + @test has_lower_bound(IOTO.transcription_variable(x)[1]) + @test is_binary(IOTO.transcription_variable(y)[2]) + @test is_fixed(IOTO.transcription_variable(y)[4]) + @test is_integer(IOTO.transcription_variable(x)[2]) + @test start_value(IOTO.transcription_variable(y)[1]) == 0. @test supports(x) == [(0.,), (1.,)] @test length(supports(y)) == 4 # test point variables - @test transcription_variable(x0) isa VariableRef - @test transcription_variable(y0) isa VariableRef - @test name(transcription_variable(x0)) == "x(support: 1)" - @test name(transcription_variable(y0))[1:end-2] == "y(support: " - @test has_lower_bound(transcription_variable(x0)) - @test is_integer(transcription_variable(x0)) - @test has_lower_bound(transcription_variable(y0)) - @test has_upper_bound(transcription_variable(y0)) - @test is_integer(transcription_variable(y0)) - @test start_value(transcription_variable(y0)) == 0. + @test IOTO.transcription_variable(x0) isa VariableRef + @test IOTO.transcription_variable(y0) isa VariableRef + @test name(IOTO.transcription_variable(x0)) == "x(support: 1)" + @test name(IOTO.transcription_variable(y0))[1:end-2] == "y(support: " + @test has_lower_bound(IOTO.transcription_variable(x0)) + @test is_integer(IOTO.transcription_variable(x0)) + @test has_lower_bound(IOTO.transcription_variable(y0)) + @test has_upper_bound(IOTO.transcription_variable(y0)) + @test is_integer(IOTO.transcription_variable(y0)) + @test start_value(IOTO.transcription_variable(y0)) == 0. # test derivatives - d1t = transcription_variable(d1) - d2t = transcription_variable(d2) + d1t = IOTO.transcription_variable(d1) + d2t = IOTO.transcription_variable(d2) @test length(d1t) == 4 @test length(d2t) == 2 @test upper_bound(d1t[1]) == 2 @test supports(d2) == [(0.,), (1.,)] # test operators - attr_dict = backend(tm).model_cache.modattr + attr_dict = backend(tb).model_cache.modattr @test length(attr_dict) == 1 @test attr_dict[MOI.UserDefinedFunction(:gr, 1)] == (g,) # test objective - xt = transcription_variable(tm, x) - @test objective_function(tm) == 2xt[1] + xt[2] - 2wt - d2t[1] - d2t[2] - @test objective_sense(tm) == MOI.MIN_SENSE + xt = IOTO.transcription_variable(x, tb) + @test objective_function(tb.model) == 2xt[1] + xt[2] - 2wt - d2t[1] - d2t[2] + @test objective_sense(tb.model) == MOI.MIN_SENSE # test constraints - yt = transcription_variable(y) - dt_c1 = IOTO.lookup_by_support(tm, d1, zeros(3)) - @test constraint_object(transcription_constraint(c1)).func == -zt + xt[1] + dt_c1 - @test constraint_object(transcription_constraint(c2)).func == zt + xt[1] - expected = transcription_variable(meas2)[2] - 2 * transcription_variable(y0) + xt[2] - @test constraint_object(transcription_constraint(c4)).func == expected - @test constraint_object(transcription_constraint(c3)).func == xt[1] - 2wt + xt[2] + zt - d2t[1] - d2t[2] - @test constraint_object(transcription_constraint(c6)).func == [zt, wt] - @test transcription_constraint(c5) isa Vector{ConstraintRef} - @test name(transcription_constraint(c2)) == "c2(support: 1)" - @test name(transcription_constraint(c1)) == "c1(support: 1)" + yt = IOTO.transcription_variable(y) + dt_c1 = IOTO.lookup_by_support(d1, tb, zeros(3)) + @test constraint_object(IOTO.transcription_constraint(c1)).func == -zt + xt[1] + dt_c1 + @test constraint_object(IOTO.transcription_constraint(c2)).func == zt + xt[1] + expected = IOTO.transcription_variable(meas2)[2] - 2 * IOTO.transcription_variable(y0) + xt[2] + @test constraint_object(IOTO.transcription_constraint(c4)).func == expected + @test constraint_object(IOTO.transcription_constraint(c3)).func == xt[1] - 2wt + xt[2] + zt - d2t[1] - d2t[2] + @test constraint_object(IOTO.transcription_constraint(c6)).func == [zt, wt] + @test IOTO.transcription_constraint(c5) isa Vector{ConstraintRef} + @test name(IOTO.transcription_constraint(c2)) == "c2(support: 1)" + @test name(IOTO.transcription_constraint(c1)) == "c1(support: 1)" @test supports(c1) == (0., [0., 0.]) - @test transcription_constraint(c7) isa ConstraintRef - @test isequal(constraint_object(transcription_constraint(c7)).func, gr(zt) - 2.) + @test IOTO.transcription_constraint(c7) isa ConstraintRef + @test isequal(constraint_object(IOTO.transcription_constraint(c7)).func, gr(zt) - 2.) # test info constraints - @test transcription_constraint(LowerBoundRef(z)) == LowerBoundRef(zt) - @test transcription_constraint(UpperBoundRef(z)) == UpperBoundRef(zt) - @test transcription_constraint(BinaryRef(z)) == BinaryRef(zt) - @test transcription_constraint(FixRef(w)) == FixRef(wt) - @test transcription_constraint(IntegerRef(w)) == IntegerRef(wt) - @test transcription_constraint(LowerBoundRef(x0)) == LowerBoundRef(xt[1]) - @test transcription_constraint(UpperBoundRef(x0)) == UpperBoundRef(xt[1]) - @test transcription_constraint(IntegerRef(x0)) == IntegerRef(xt[1]) - @test transcription_constraint(LowerBoundRef(y0)) isa ConstraintRef - @test transcription_constraint(UpperBoundRef(y0)) isa ConstraintRef - @test transcription_constraint(IntegerRef(y0)) isa ConstraintRef - @test transcription_constraint(LowerBoundRef(x)) == LowerBoundRef.(xt) - @test transcription_constraint(UpperBoundRef(x)) == UpperBoundRef.(xt) - @test transcription_constraint(IntegerRef(x)) == IntegerRef.(xt) - @test transcription_constraint(FixRef(y)) isa Vector{ConstraintRef} - @test transcription_constraint(BinaryRef(y)) isa Vector{ConstraintRef} - @test transcription_constraint(UpperBoundRef(d1)) == UpperBoundRef.(d1t) + @test IOTO.transcription_constraint(LowerBoundRef(z)) == LowerBoundRef(zt) + @test IOTO.transcription_constraint(UpperBoundRef(z)) == UpperBoundRef(zt) + @test IOTO.transcription_constraint(BinaryRef(z)) == BinaryRef(zt) + @test IOTO.transcription_constraint(FixRef(w)) == FixRef(wt) + @test IOTO.transcription_constraint(IntegerRef(w)) == IntegerRef(wt) + @test IOTO.transcription_constraint(LowerBoundRef(x0)) == LowerBoundRef(xt[1]) + @test IOTO.transcription_constraint(UpperBoundRef(x0)) == UpperBoundRef(xt[1]) + @test IOTO.transcription_constraint(IntegerRef(x0)) == IntegerRef(xt[1]) + @test IOTO.transcription_constraint(LowerBoundRef(y0)) isa ConstraintRef + @test IOTO.transcription_constraint(UpperBoundRef(y0)) isa ConstraintRef + @test IOTO.transcription_constraint(IntegerRef(y0)) isa ConstraintRef + @test IOTO.transcription_constraint(LowerBoundRef(x)) == LowerBoundRef.(xt) + @test IOTO.transcription_constraint(UpperBoundRef(x)) == UpperBoundRef.(xt) + @test IOTO.transcription_constraint(IntegerRef(x)) == IntegerRef.(xt) + @test IOTO.transcription_constraint(FixRef(y)) isa Vector{ConstraintRef} + @test IOTO.transcription_constraint(BinaryRef(y)) isa Vector{ConstraintRef} + @test IOTO.transcription_constraint(UpperBoundRef(d1)) == UpperBoundRef.(d1t) # test a finite model m = InfiniteModel() @variable(m, y >= 0) @objective(m, Min, y) - tm = transcription_model(m) - @test IOTO.build_transcription_model!(tm, m) isa Nothing - @test transcription_variable(y) isa VariableRef - @test lower_bound(transcription_variable(y)) == 0 - @test objective_sense(tm) == MOI.MIN_SENSE + tb = m.backend + @test IOTO.build_transcription_backend!(tb, m) isa Nothing + @test IOTO.transcription_variable(y) isa VariableRef + @test lower_bound(IOTO.transcription_variable(y)) == 0 + @test objective_sense(tb.model) == MOI.MIN_SENSE end + +# Test build_transformation_backend! +@testset "build_transformation_backend!" begin + # initialize model + mockoptimizer = () -> MOIU.MockOptimizer(MOIU.UniversalFallback(MOIU.Model{Float64}()), + eval_objective_value=false) + m = InfiniteModel(mockoptimizer) + @infinite_parameter(m, par in [0, 1], num_supports = 3) + @infinite_parameter(m, pars[1:2] in [0, 1], supports = [0, 1]) + @variable(m, 1 >= x >= 0, Infinite(par), Int) + @variable(m, y == 2, Infinite(par, pars), Bin, start = 0) + @variable(m, x0, Point(x, 0)) + @variable(m, 0 <= y0 <= 1, Point(y, 0, [0, 0]), Int) + @variable(m, 0 <= z <= 1, Bin) + @variable(m, w == 1, Int, start = 1) + meas1 = support_sum(x - w, par) + meas2 = integral(y, pars) + @constraint(m, c1, x + par - z == 0) + @constraint(m, c2, z + x0 >= -3) + @constraint(m, c3, meas1 + z == 0) + @constraint(m, c4, meas2 - 2y0 + x <= 1, DomainRestrictions(par => [0.5, 1])) + @constraint(m, c5, meas2 == 0) + @constraint(m, @deriv(x, par) == 0) + @constraint(m, sin(w) + integral(x^3, par) == 0) + @objective(m, Min, x0 + meas1) + set_silent(m) + set_time_limit_sec(m, 42.) + # test normal usage + @test isa(build_transformation_backend!(m, m.backend), Nothing) + @test transformation_backend_ready(m) + @test num_variables(m.backend.model) == 44 + @test time_limit_sec(m.backend) == 42 +end \ No newline at end of file diff --git a/test/array_parameters.jl b/test/array_parameters.jl index 66ff6c4b..57a3e477 100644 --- a/test/array_parameters.jl +++ b/test/array_parameters.jl @@ -602,7 +602,7 @@ end new_domain = CollectionDomain([IntervalDomain(0, 1), IntervalDomain(0, 2)]) @test InfiniteOpt._update_parameter_domain(prefs1[1], new_domain) isa Nothing @test num_supports(prefs1) == 0 - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test infinite_domain(prefs1) == new_domain @test InfiniteOpt._update_parameter_domain(prefs1[1], old_domain) isa Nothing end @@ -799,7 +799,7 @@ end @test InfiniteOpt._update_parameter_supports(prefs1, ones(Int, 2, 3), UserDefined) isa Nothing @test supports(prefs1) == ones(Float64, 2, 1) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._update_parameter_supports(prefs1, old_supports, UniformGrid) isa Nothing end diff --git a/test/optimizer.jl b/test/backend_mappings.jl similarity index 57% rename from test/optimizer.jl rename to test/backend_mappings.jl index 0db9b98d..37c03763 100644 --- a/test/optimizer.jl +++ b/test/backend_mappings.jl @@ -1,5 +1,5 @@ -# Test build_optimizer_model! -@testset "build_optimizer_model!" begin +# Test build_transformation_backend! +@testset "build_transformation_backend!" begin # initialize model m = InfiniteModel() @infinite_parameter(m, par in [0, 1], supports = [0, 1], @@ -22,25 +22,25 @@ @constraint(m, c5, meas2 == 0) @objective(m, Min, x0 + meas1) # test extra keywords - @test_throws ErrorException build_optimizer_model!(m, bad = 42) + @test_throws ErrorException build_transformation_backend!(m, bad = 42) # test normal usage - @test isa(build_optimizer_model!(m), Nothing) - @test optimizer_model_ready(m) - @test num_variables(optimizer_model(m)) == 14 + @test isa(build_transformation_backend!(m), Nothing) + @test transformation_backend_ready(m) + @test num_variables(m.backend.model) == 14 # test repeated build - @test isa(build_optimizer_model!(m), Nothing) - @test optimizer_model_ready(m) - @test num_variables(optimizer_model(m)) == 14 + @test isa(build_transformation_backend!(m), Nothing) + @test transformation_backend_ready(m) + @test num_variables(m.backend.model) == 14 # test finite model m = InfiniteModel() @variable(m, y >= 0) @objective(m, Min, y) warn = "Finite models (i.e., `InfiniteModel`s with no infinite " * "parameters) should be modeled directly via a `Model` in JuMP.jl." - @test_logs (:warn, warn) build_optimizer_model!(m) + @test_logs (:warn, warn) build_transformation_backend!(m) end -# Test optimizer model querying methods +# Test transformation model querying methods @testset "Optimizer Model Queries" begin # initialize model m = InfiniteModel() @@ -56,28 +56,28 @@ end @constraint(m, c2, z + x0 >= -3) @constraint(m, c3, meas1 + z == 0) f = parameter_function(sin, par) - build_optimizer_model!(m) - tm = optimizer_model(m) - tdata = transcription_data(tm) - # Test optimizer_model_variable - @testset "optimizer_model_variable" begin + build_transformation_backend!(m) + tb = m.backend + tdata = IOTO.transcription_data(tb) + # Test transformation_model_variable + @testset "transformation_model_variable" begin # test normal usage - @test optimizer_model_variable(x, label = All) == transcription_variable(x, label = All) - @test optimizer_model_variable(x, label = All, ndarray = true) == transcription_variable(x, label = All, ndarray = true) - @test optimizer_model_variable(x0) == transcription_variable(x0) - @test optimizer_model_variable(z) == transcription_variable(z) - @test optimizer_model_variable(d1, label = InternalLabel) == transcription_variable(d1, label = InternalLabel) - @test optimizer_model_variable(f) == [0, sin(1)] + @test transformation_model_variable(x, label = All) == IOTO.transcription_variable(x, label = All) + @test transformation_model_variable(x, label = All, ndarray = true) == IOTO.transcription_variable(x, label = All, ndarray = true) + @test transformation_model_variable(x0) == IOTO.transcription_variable(x0) + @test transformation_model_variable(z) == IOTO.transcription_variable(z) + @test transformation_model_variable(d1, label = InternalLabel) == IOTO.transcription_variable(d1, label = InternalLabel) + @test transformation_model_variable(f) == [0, sin(1)] # test fallback - @test_throws ErrorException optimizer_model_variable(x, Val(:Bad), my_key = true) + @test_throws ErrorException transformation_model_variable(x, TestBackend(), my_key = true) end # Test variable_supports @testset "variable_supports" begin # test finite variable fallback - @test InfiniteOpt.variable_supports(tm, dispatch_variable_ref(z), :key) == () - @test InfiniteOpt.variable_supports(tm, dispatch_variable_ref(x0), :key, label = All) == () + @test InfiniteOpt.variable_supports(dispatch_variable_ref(z), TestBackend()) == () + @test InfiniteOpt.variable_supports(dispatch_variable_ref(x0), TestBackend(), label = All) == () # test fallback - @test_throws ErrorException InfiniteOpt.variable_supports(tm, x, Val(:Bad)) + @test_throws ErrorException InfiniteOpt.variable_supports(x, TestBackend()) end # Test supports (variables) @testset "supports (Variables)" begin @@ -89,35 +89,35 @@ end @test supports(d1, label = InternalLabel) == [(0.5,)] @test supports(f, label = All) == [(0.,), (0.5,), (1.,)] end - # Test optimizer_model_expression - @testset "optimizer_model_expression" begin + # Test transformation_model_expression + @testset "transformation_model_expression" begin # test variable references - @test optimizer_model_expression(x, label = All) == transcription_variable(x, label = All) - @test optimizer_model_expression(z) == transcription_variable(z) - @test optimizer_model_expression(x0) == transcription_variable(x0) - @test optimizer_model_expression(x0, ndarray = true) == transcription_variable(x0) + @test transformation_model_expression(x, label = All) == IOTO.transcription_variable(x, label = All) + @test transformation_model_expression(z) == IOTO.transcription_variable(z) + @test transformation_model_expression(x0) == IOTO.transcription_variable(x0) + @test transformation_model_expression(x0, ndarray = true) == IOTO.transcription_variable(x0) # test expression without variables expr = zero(JuMP.GenericAffExpr{Float64, GeneralVariableRef}) + 42 - @test optimizer_model_expression(expr) == zero(AffExpr) + 42 + @test transformation_model_expression(expr) == zero(AffExpr) + 42 # test normal expressions - xt = transcription_variable(x, label = All) - zt = transcription_variable(z) - @test optimizer_model_expression(x^2 + z) == [xt[1]^2 + zt, xt[3]^2 + zt] - @test optimizer_model_expression(x^2 + z, ndarray = true) == [xt[1]^2 + zt, xt[3]^2 + zt] - @test optimizer_model_expression(x^2 + z, label = All) == [xt[1]^2 + zt, xt[2]^2 + zt, xt[3]^2 + zt] - @test optimizer_model_expression(2z - 3) == 2zt - 3 - @test optimizer_model_expression(2 * f) == [zero(AffExpr), zero(AffExpr) + sin(1) * 2] + xt = IOTO.transcription_variable(x, label = All) + zt = IOTO.transcription_variable(z) + @test transformation_model_expression(x^2 + z) == [xt[1]^2 + zt, xt[3]^2 + zt] + @test transformation_model_expression(x^2 + z, ndarray = true) == [xt[1]^2 + zt, xt[3]^2 + zt] + @test transformation_model_expression(x^2 + z, label = All) == [xt[1]^2 + zt, xt[2]^2 + zt, xt[3]^2 + zt] + @test transformation_model_expression(2z - 3) == 2zt - 3 + @test transformation_model_expression(2 * f) == [zero(AffExpr), zero(AffExpr) + sin(1) * 2] # test fallback - @test_throws ErrorException optimizer_model_expression(c1, Val(:Bad), my_key = true) + @test_throws ErrorException transformation_model_expression(c1, TestBackend(), my_key = true) end # Test expression_supports @testset "expression_supports" begin # test normal usage - @test InfiniteOpt.expression_supports(tm, x, Val(:TransData)) == [(0.,), (1.,)] - @test InfiniteOpt.expression_supports(tm, x^2 - x0, Val(:TransData)) == [(0.,), (1.,)] - @test InfiniteOpt.expression_supports(tm, x^2 - x0, Val(:TransData), label = All) == [(0.,), (0.5,), (1.,)] + @test InfiniteOpt.expression_supports(x, tb) == [(0.,), (1.,)] + @test InfiniteOpt.expression_supports(x^2 - x0, tb) == [(0.,), (1.,)] + @test InfiniteOpt.expression_supports(x^2 - x0, tb, label = All) == [(0.,), (0.5,), (1.,)] # test fallback - @test_throws ErrorException InfiniteOpt.expression_supports(tm, z^2, Val(:Bad)) + @test_throws ErrorException InfiniteOpt.expression_supports(z^2, TestBackend()) end # Test supports (expressions) @testset "supports (Expressions)" begin @@ -128,24 +128,24 @@ end expr = zero(JuMP.GenericAffExpr{Float64, GeneralVariableRef}) + 42 @test supports(expr, label = All) == () end - # Test optimizer_model_constraint - @testset "optimizer_model_constraint" begin + # Test transformation_model_constraint + @testset "transformation_model_constraint" begin # test normal usage - @test optimizer_model_constraint(c1) == transcription_constraint(c1) - @test optimizer_model_constraint(c2, label = All) == transcription_constraint(c2, label = All) - @test optimizer_model_constraint(c2, label = All, ndarray = true) == transcription_constraint(c2, label = All, ndarray = true) - @test optimizer_model_constraint(c3) == transcription_constraint(c3) + @test transformation_model_constraint(c1) == IOTO.transcription_constraint(c1) + @test transformation_model_constraint(c2, label = All) == IOTO.transcription_constraint(c2, label = All) + @test transformation_model_constraint(c2, label = All, ndarray = true) == IOTO.transcription_constraint(c2, label = All, ndarray = true) + @test transformation_model_constraint(c3) == IOTO.transcription_constraint(c3) # test fallback - @test_throws ErrorException optimizer_model_constraint(c1, Val(:Bad), my_key = true) + @test_throws ErrorException transformation_model_constraint(c1, TestBackend(), my_key = true) end # Test constraint_supports @testset "constraint_supports" begin # test normal usage - @test InfiniteOpt.constraint_supports(tm, c1) == [(0.,), (1.,)] - @test InfiniteOpt.constraint_supports(tm, c1, label = All) == [(0.,), (0.5,), (1.,)] - @test InfiniteOpt.constraint_supports(tm, c1, label = All, ndarray = true) == [(0.,), (0.5,), (1.,)] + @test InfiniteOpt.constraint_supports(c1, tb) == [(0.,), (1.,)] + @test InfiniteOpt.constraint_supports(c1, tb, label = All) == [(0.,), (0.5,), (1.,)] + @test InfiniteOpt.constraint_supports(c1, tb, label = All, ndarray = true) == [(0.,), (0.5,), (1.,)] # test fallback - @test_throws ErrorException InfiniteOpt.constraint_supports(tm, c1, Val(:Bad)) + @test_throws ErrorException InfiniteOpt.constraint_supports(c1, TestBackend()) end # Test supports (constraints) @testset "supports (Constraints)" begin @@ -181,8 +181,8 @@ end @objective(m, Min, x0 + meas1) # test normal usage @test isa(optimize!(m, check_support_dims = false), Nothing) - @test optimizer_model_ready(m) - @test num_variables(optimizer_model(m)) == 8 + @test transformation_backend_ready(m) + @test num_variables(m.backend.model) == 8 # test optimize hook function myhook(model; n = "", ub = 2, kwargs...) if !isempty(n) @@ -194,29 +194,16 @@ end end @test set_optimize_hook(m, myhook) isa Nothing @test optimize!(m, n = "x", check_support_dims = false) isa Nothing - @test optimizer_model_ready(m) - @test num_variables(optimizer_model(m)) == 8 + @test transformation_backend_ready(m) + @test num_variables(m.backend.model) == 8 @test upper_bound(x) == 2 @test set_optimize_hook(m, nothing) isa Nothing @test isnothing(m.optimize_hook) @test_throws ErrorException optimize!(m, n = "x") @test optimize!(m) isa Nothing - @test optimizer_model_ready(m) -end - -# Test JuMP.result_count -@testset "JuMP.result_count" begin - # Setup the infinite model - optimizer = () -> MOIU.MockOptimizer(MOIU.UniversalFallback(MOIU.Model{Float64}()), - eval_objective_value=false) - m = InfiniteModel(optimizer) - tm = optimizer_model(m) - model = MOIU.Model{Float64}() - JuMP.optimize!(tm) - mockoptimizer = JuMP.backend(tm).optimizer.model - MOI.set(mockoptimizer, MOI.ResultCount(), 2) - MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.OPTIMAL) - @test result_count(m) == 2 + @test transformation_backend_ready(m) + set_transformation_backend(m, TestBackend()) + @test_throws ErrorException optimize!(m) end # Test that we avoid world age problems with generated parameter functions @@ -228,7 +215,7 @@ end myfunc(ts, a) = ts + a @parameter_function(m, d[i = 1:3] == (t) -> myfunc(t, i)) @constraint(m, [i = 1:3], y >= d[i]) - build_optimizer_model!(m) + build_transformation_backend!(m) return m end @test make_model() isa InfiniteModel diff --git a/test/backend_setup.jl b/test/backend_setup.jl new file mode 100644 index 00000000..7c9b1b1b --- /dev/null +++ b/test/backend_setup.jl @@ -0,0 +1,122 @@ +# Test the backend methods +@testset "Transformation Backends" begin + mockoptimizer = () -> MOIU.MockOptimizer(MOIU.UniversalFallback(MOIU.Model{Float64}()), + eval_objective_value=false) + m = InfiniteModel(mockoptimizer) + bmodel = Model(mockoptimizer) + jump_backend = JuMPBackend{TestJuMPTag}(bmodel, 42) + # transformation_backend_ready + @testset "transformation_backend_ready" begin + @test !transformation_backend_ready(m) + m.ready_to_optimize = true + @test transformation_backend_ready(m) + end + # set_transformation_backend_ready + @testset "set_transformation_backend_ready" begin + @test isa(set_transformation_backend_ready(m, false), Nothing) + @test !transformation_backend_ready(m) + end + # transformation_model + @testset "transformation_model" begin + @test isa(transformation_model(m), Model) + @test transformation_model(jump_backend) == bmodel + @test_throws ErrorException transformation_model(TestBackend()) + end + # transformation_data + @testset "transformation_data" begin + @test isa(transformation_data(m), IOTO.TranscriptionData) + @test transformation_data(jump_backend) == 42 + @test_throws ErrorException transformation_data(TestBackend()) + end + # set_transformation_backend + @testset "set_transformation_backend" begin + current_backend = m.backend + @test set_transformation_backend(m, jump_backend) isa Nothing + @test transformation_model(m) == bmodel + @test set_transformation_backend(m, current_backend) isa Nothing + @test transformation_data(m) isa IOTO.TranscriptionData + end + # JuMP.get_attribute and JuMP.set_attribute + @testset "JuMP.[get/set]_attribute" begin + @test set_attribute(m, MOI.TimeLimitSec(), 10.) isa Nothing + @test get_attribute(m, MOI.TimeLimitSec()) == 10 + @test set_attribute(jump_backend, MOI.TimeLimitSec(), 10.) isa Nothing + @test get_attribute(jump_backend, MOI.TimeLimitSec()) == 10 + @test_throws ErrorException get_attribute(TestBackend(), MOI.TimeLimitSec()) + @test_throws ErrorException set_attribute(TestBackend(), MOI.TimeLimitSec(), 10.) + end + # Base.empty! + @testset "Base.empty!" begin + @test empty!(JuMPBackend{TestJuMPTag}(Model(), [42])).data == [] + @test_throws ErrorException empty!(TestBackend()) + end +end + +# Test JuMP extensions +@testset "JuMP Extensions" begin + mockoptimizer = () -> MOIU.MockOptimizer(MOIU.UniversalFallback(MOIU.Model{Float64}()), + eval_objective_value=false) + m = InfiniteModel(mockoptimizer) + bmodel = Model(mockoptimizer) + jump_backend = JuMPBackend{TestJuMPTag}(bmodel, 42) + set_time_limit_sec(bmodel, 10) + @testset "Single Argument Methods" begin + for f in (set_silent, unset_silent, bridge_constraints, + time_limit_sec, unset_time_limit_sec, solver_name, backend, + JuMP.mode, unsafe_backend, compute_conflict!, copy_conflict, + set_string_names_on_creation) + @test_throws ErrorException f(TestBackend()) + if f != copy_conflict + @test f(jump_backend) == f(bmodel) + @test f(m) == f(m.backend) + else + @test f(jump_backend) isa Tuple + @test f(m) isa Tuple + end + end + end + @testset "JuMP.set_time_limit_sec" begin + @test_throws ErrorException set_time_limit_sec(TestBackend(), 42) + @test set_time_limit_sec(jump_backend, 42) isa Nothing + @test time_limit_sec(jump_backend) == 42 + @test set_time_limit_sec(m, 42) isa Nothing + @test time_limit_sec(m) == 42 + end + @testset "JuMP.set_string_names_on_creation" begin + @test_throws ErrorException set_string_names_on_creation(TestBackend(), false) + @test set_string_names_on_creation(jump_backend, false) isa Nothing + @test set_string_names_on_creation(jump_backend) == false + @test set_string_names_on_creation(m, true) isa Nothing + @test set_string_names_on_creation(m) == true + end + @testset "JuMP.add_bridge" begin + bridge = MOI.Bridges.Variable.VectorizeBridge + @test_throws ErrorException add_bridge(TestBackend(), bridge) + @test add_bridge(jump_backend, bridge) isa Nothing + @test add_bridge(m, bridge) isa Nothing + end + @testset "JuMP.print_active_bridges" begin + @test_throws ErrorException sprint(print_active_bridges, TestBackend()) + expected = " * Supported objective: MOI.ScalarAffineFunction{Float64}\n" + @test sprint(print_active_bridges, jump_backend) == expected + @test sprint(print_active_bridges, m) == expected + stdout_test(print_active_bridges, expected, m) + end + @testset "print_bridge_graph" begin + @test_throws ErrorException sprint(print_bridge_graph, TestBackend()) + expected = "Bridge graph with 0 variable nodes, 0 constraint nodes and 0 objective nodes.\n" + @test sprint(print_bridge_graph, jump_backend) == expected + @test sprint(print_bridge_graph, m) == expected + stdout_test(print_bridge_graph, expected, m) + end + @testset "JuMP.set_optimizer" begin + @test_throws ErrorException set_optimizer(TestBackend(), mockoptimizer) + bmodel2 = Model() + jump_backend2 = JuMPBackend{TestJuMPTag}(bmodel2, 42) + @test set_optimizer(jump_backend2, mockoptimizer) isa Nothing + @test solver_name(jump_backend2) == "Mock" + m2 = InfiniteModel() + @test set_optimizer(m2, mockoptimizer) isa Nothing + @test solver_name(m) == "Mock" + end +end diff --git a/test/constraints.jl b/test/constraints.jl index 2dc15265..e7b92f5e 100644 --- a/test/constraints.jl +++ b/test/constraints.jl @@ -248,7 +248,7 @@ end @test add_constraint(m, con, "d") == cref @test name(cref) == "d" @test !InfiniteOpt._is_info_constraint(cref) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test used_by_constraint(pt) # test vector constraint con = VectorConstraint([inf + pt, 2inf], MOI.Zeros(2)) @@ -321,7 +321,7 @@ end # test normal @test set_domain_restrictions(c2, rs) isa Nothing @test domain_restrictions(c2) == rs - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) # test test error with restrictions rs2 = DomainRestrictions(par => [-1, 1]) @test_throws ErrorException set_domain_restrictions(c1, rs2, force = true) diff --git a/test/datatypes.jl b/test/datatypes.jl index 9e128c00..c01eaaf7 100644 --- a/test/datatypes.jl +++ b/test/datatypes.jl @@ -176,6 +176,11 @@ end @test MultiParameterData(params, 1, 1:1, ["par[1]"]) isa MultiParameterData end +# Test Backends +@testset "Backends" begin + @test JuMPBackend{TestJuMPTag}(Model(), Dict()) isa JuMPBackend{TestJuMPTag, Float64, Dict{Any, Any}} +end + # Test the InfiniteModel datatype @testset "InfiniteModel" begin # test basic @@ -184,20 +189,18 @@ end # prepare optimizer constructor mockoptimizer = () -> MOIU.MockOptimizer(MOIU.UniversalFallback(MOIU.Model{Float64}()), eval_objective_value=false) - mockattributes = MOI.OptimizerWithAttributes(mockoptimizer, MOI.Silent() => true) # test optimizer constructors - @test InfiniteModel(mockoptimizer).optimizer_constructor == mockoptimizer - @test InfiniteModel(mockattributes).optimizer_constructor == mockoptimizer + @test solver_name(InfiniteModel(mockoptimizer).backend) == "Mock" m = InfiniteModel(); @test isa(Base.broadcastable(m), Base.RefValue{InfiniteModel}) @test length(JuMP.object_dictionary(m)) == 0 @test InfiniteModel() isa JuMP.AbstractModel - @test InfiniteModel(mockoptimizer, add_bridges = false) isa JuMP.AbstractModel + @test InfiniteModel(mockoptimizer, add_bridges = false) isa InfiniteModel # test accessors @test InfiniteOpt._last_param_num(m) == 0 @test InfiniteOpt._param_object_indices(m) isa Vector{Union{IndependentParameterIndex, DependentParametersIndex}} # test other methods - @test empty!(InfiniteModel(mockoptimizer)).optimizer_constructor == mockoptimizer + @test empty!(InfiniteModel(mockoptimizer)).backend isa TranscriptionBackend @test variable_ref_type(InfiniteModel) == GeneralVariableRef @test variable_ref_type(InfiniteModel()) == GeneralVariableRef end diff --git a/test/derivatives.jl b/test/derivatives.jl index 86464851..3dd34b6e 100644 --- a/test/derivatives.jl +++ b/test/derivatives.jl @@ -278,7 +278,7 @@ end dref = DerivativeRef(m, idx) gvref = InfiniteOpt._make_variable_ref(m, idx) @test isequal(add_derivative(m, d, "name"), gvref) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._derivative_dependencies(prefs[1]) == [idx] @test InfiniteOpt._derivative_dependencies(x) == [DerivativeIndex(i) for i = 1:2] @test !InfiniteOpt._is_vector_start(dref) @@ -535,7 +535,7 @@ end @test set_start_value_function(d1, 1.5) isa Nothing @test start_value_function(d1)([0]) == 1.5 @test InfiniteOpt._is_vector_start(dref) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) func = (a) -> 1 @test set_start_value_function(d1, func) isa Nothing @test start_value_function(d1)(0) == 1 @@ -546,7 +546,7 @@ end @test reset_start_value_function(d1) isa Nothing @test start_value_function(d1) isa Nothing @test InfiniteOpt._is_vector_start(dref) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) end end diff --git a/test/extensions.jl b/test/extensions.jl index 815dbf4a..d7a7e68e 100644 --- a/test/extensions.jl +++ b/test/extensions.jl @@ -242,7 +242,7 @@ end # test optimization with rebuild mockoptimizer = () -> MOIU.MockOptimizer(MOIU.UniversalFallback(MOIU.Model{Float64}()), eval_objective_value=false) - @test set_optimizer_model_ready(m, false) isa Nothing + @test set_transformation_backend_ready(m, false) isa Nothing @test set_optimizer(m, mockoptimizer) isa Nothing @test set_silent(m) isa Nothing @test set_time_limit_sec(m, 42.) isa Nothing diff --git a/test/finite_variables.jl b/test/finite_variables.jl index 44793b54..6e6ee7d4 100644 --- a/test/finite_variables.jl +++ b/test/finite_variables.jl @@ -149,7 +149,7 @@ end vref = FiniteVariableRef(m, idx) gvref = InfiniteOpt._make_variable_ref(m, idx) @test isequal(add_variable(m, v, "name"), gvref) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) # lower bound cindex = InfOptConstraintIndex(1) cref = InfOptConstraintRef(m, cindex) diff --git a/test/infinite_variables.jl b/test/infinite_variables.jl index 602a1372..ecaae5fe 100644 --- a/test/infinite_variables.jl +++ b/test/infinite_variables.jl @@ -346,7 +346,7 @@ end vref = InfiniteVariableRef(m, idx) gvref = InfiniteOpt._make_variable_ref(m, idx) @test isequal(add_variable(m, v, "name"), gvref) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test !InfiniteOpt._is_vector_start(vref) @test InfiniteOpt._variable_info(vref).start == func1 # lower bound @@ -390,7 +390,7 @@ end vref = InfiniteVariableRef(m, idx) gvref = InfiniteOpt._make_variable_ref(m, idx) @test isequal(add_variable(m, v, "name"), gvref) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._is_vector_start(vref) @test InfiniteOpt._variable_info(vref).start isa Function cindex = InfOptConstraintIndex(8) diff --git a/test/measure_expansions.jl b/test/measure_expansions.jl index 06d66ce4..ce16fba1 100644 --- a/test/measure_expansions.jl +++ b/test/measure_expansions.jl @@ -39,17 +39,15 @@ @testset "make_point_variable_ref (Parameter Function)" begin f = parameter_function(sin, par1) @test make_point_variable_ref(m, f, [0.]) == 0 - @test make_point_variable_ref(Model(), f, [0.]) == 0 + @test make_point_variable_ref(TestBackend(), f, [0.]) == 0 end # test add_point_variable @testset "add_point_variable" begin - @test_throws ErrorException add_point_variable(Model(), d1, [0.], Val(:some_key)) + @test_throws ErrorException add_point_variable(TestBackend(), d1, [0.]) end - # test make_point_variable_ref (optmizer_model) - @testset "make_point_variable_ref (optimizer_model)" begin - opt_m = Model() - opt_m.ext[:my_key] = 42 - @test_throws ErrorException make_point_variable_ref(opt_m, inf1, Float64[0]) + # test make_point_variable_ref (backend) + @testset "make_point_variable_ref (backend)" begin + @test_throws ErrorException make_point_variable_ref(TestBackend(), inf1, Float64[0]) end # test make_semi_infinite_variable_ref (InfiniteModel) @testset "make_semi_infinite_variable_ref (InfiniteModel)" begin @@ -79,13 +77,11 @@ end # test add_semi_infinite_variable @testset "add_semi_infinite_variable" begin - @test_throws ErrorException add_semi_infinite_variable(Model(), Bad(), Val(:some_key)) + @test_throws ErrorException add_semi_infinite_variable(TestBackend(), Bad()) end - # test make_semi_infinite_variable_ref (optimizer_model) - @testset "make_semi_infinite_variable_ref (optimizer_model)" begin - opt_m = Model() - opt_m.ext[:my_key] = 42 - @test_throws ErrorException make_semi_infinite_variable_ref(opt_m, inf2, [1], Float64[1]) + # test make_semi_infinite_variable_ref (backend) + @testset "make_semi_infinite_variable_ref (backend)" begin + @test_throws ErrorException make_semi_infinite_variable_ref(TestBackend(), inf2, [1], Float64[1]) end # test _process_aff_result @testset "_process_aff_result" begin diff --git a/test/objective.jl b/test/objective.jl index f0302bcb..d7c3f078 100644 --- a/test/objective.jl +++ b/test/objective.jl @@ -89,7 +89,7 @@ end @test used_by_objective(x) @test used_by_objective(pt) @test used_by_objective(meas) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test objective_has_measures(m) # test reset @test isa(set_objective_function(m, pt), Nothing) @@ -122,7 +122,7 @@ end @test used_by_objective(x) @test used_by_objective(pt) @test used_by_objective(meas) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) # test reset @test isa(set_objective(m, MOI.MAX_SENSE, pt), Nothing) @test isequal(objective_function(m), pt) diff --git a/test/optimizer_setup.jl b/test/optimizer_setup.jl deleted file mode 100644 index 5a0ebb49..00000000 --- a/test/optimizer_setup.jl +++ /dev/null @@ -1,174 +0,0 @@ -# Test add_infinite_model_optimizer -@testset "add_infinite_model_optimizer" begin - # initialize model - mockoptimizer = () -> MOIU.MockOptimizer(MOIU.UniversalFallback(MOIU.Model{Float64}()), - eval_objective_value=false) - m = InfiniteModel(mockoptimizer) - set_silent(m) - set_time_limit_sec(m, 42.) - # test normal - tm = Model() - @test InfiniteOpt.add_infinite_model_optimizer(tm, m) isa Nothing - @test time_limit_sec(tm) == 42 - @test get_optimizer_attribute(tm, MOI.Silent()) -end - -# Test the optimizer model methods -@testset "Optimizer Model" begin - mockoptimizer = () -> MOIU.MockOptimizer(MOIU.UniversalFallback(MOIU.Model{Float64}()), - eval_objective_value=false) - m = InfiniteModel(mockoptimizer) - # optimizer_model - @testset "optimizer_model" begin - @test isa(optimizer_model(m), Model) - end - # optimizer_model_ready - @testset "optimizer_model_ready" begin - @test !optimizer_model_ready(m) - m.ready_to_optimize = true - @test optimizer_model_ready(m) - end - # set_optimizer_model_ready - @testset "set_optimizer_model_ready" begin - @test isa(set_optimizer_model_ready(m, false), Nothing) - @test !optimizer_model_ready(m) - end - # set_optimizer_model - @testset "set_optimizer_model" begin - # test with inheritance - set_time_limit_sec(optimizer_model(m), 42.) - @test isa(set_optimizer_model(m, Model()), Nothing) - @test length(optimizer_model(m).ext) == 0 - @test time_limit_sec(optimizer_model(m)) == 42 - @test optimize!(optimizer_model(m)) isa Nothing - # test without inheritance - @test isa(set_optimizer_model(m, Model(), inherit_optimizer = false), Nothing) - @test length(optimizer_model(m).ext) == 0 - @test_throws( - Union{ErrorException,MOI.GetAttributeNotAllowed{MOI.TimeLimitSec}}, - time_limit_sec(optimizer_model(m)), - ) - @test_throws NoOptimizer optimize!(optimizer_model(m)) - end - # optimizer_model_key (optimizer models) - @testset "optimizer_model_key (Model)" begin - m = InfiniteModel() - @test optimizer_model_key(optimizer_model(m)) == :TransData - optimizer_model(m).ext[:extra] = 42 - @test_throws ErrorException optimizer_model_key(optimizer_model(m)) - end - # optimizer_model_key (InfiniteModel) - @testset "optimizer_model_key (InfiniteModel)" begin - m = InfiniteModel() - @test optimizer_model_key(m) == :TransData - optimizer_model(m).ext[:extra] = 42 - @test_throws ErrorException optimizer_model_key(m) - end - # clear_optimizer_model_build! (optimizer models) - @testset "clear_optimizer_model_build! (Model)" begin - # setup - m = TranscriptionModel(mockoptimizer) - set_time_limit_sec(m, 42.) - @variable(m, t) - # test - @test clear_optimizer_model_build!(m) isa Model - @test num_variables(m) == 0 - @test length(m.ext) == 1 - @test time_limit_sec(m) == 42 - # test add variable again - @test @variable(m, t) isa VariableRef - end - # clear_optimizer_model_build! (InfiniteModel) - @testset "clear_optimizer_model_build! (InfiniteModel)" begin - # setup - m = InfiniteModel(mockoptimizer) - set_time_limit_sec(optimizer_model(m), 42.) - @variable(optimizer_model(m), t) - # test - @test clear_optimizer_model_build!(m) isa Model - @test num_variables(optimizer_model(m)) == 0 - @test length(optimizer_model(m).ext) == 1 - @test time_limit_sec(optimizer_model(m)) == 42 - # test add variable again - @test @variable(optimizer_model(m), t) isa VariableRef - end -end - -# Test JuMP extensions -@testset "JuMP Extensions" begin - m = InfiniteModel() - mockoptimizer = () -> MOIU.MockOptimizer(MOIU.UniversalFallback(MOIU.Model{Float64}()), - eval_objective_value=false) - # set_optimizer - @testset "JuMP.set_optimizer" begin - m2 = InfiniteModel() - @test isa(set_optimizer(m2, mockoptimizer), Nothing) - @test m2.optimizer_constructor == mockoptimizer - end - # bridge_constraints - @testset "JuMP.bridge_constraints" begin - @test !bridge_constraints(m) - set_optimizer(optimizer_model(m), mockoptimizer) - @test bridge_constraints(m) - end - # add_bridge - @testset "JuMP.add_bridge" begin - # @test isa(add_bridge(m, TestBridge), Nothing) - @test isa(add_bridge(m, MOI.Bridges.Variable.VectorizeBridge), Nothing) - end - # set_silent - @testset "JuMP.set_silent" begin - @test set_silent(m) isa Nothing - end - # unset_silent - @testset "JuMP.unset_silent" begin - @test unset_silent(m) isa Nothing - end - # set_time_limit_sec - @testset "JuMP.set_time_limit_sec" begin - @test set_time_limit_sec(m, 100.) isa Nothing - @test time_limit_sec(m) == 100 - end - # unset_time_limit_sec - @testset "JuMP.unset_time_limit_sec" begin - @test isa(unset_time_limit_sec(m), Nothing) - end - # time_limit_sec - @testset "JuMP.time_limit_sec" begin - @test time_limit_sec(m) === nothing - end - # set_optimizer_attribute - @testset "JuMP.set_optimizer_attribute (String)" begin - @test set_optimizer_attribute(m, "mine", 42.) isa Nothing - end - # set_optimizer_attribute - @testset "JuMP.set_optimizer_attribute (MOI)" begin - @test set_optimizer_attribute(m, MOI.Silent(), true) isa Nothing - end - # set_optimizer_attributes - @testset "JuMP.set_optimizer_attributes" begin - @test isa(set_optimizer_attributes(m, MOI.Silent() => false, "mine" => 1), Nothing) - @test !MOI.get(optimizer_model(m), MOI.Silent()) - @test MOI.get(optimizer_model(m), MOI.RawOptimizerAttribute("mine")) == 1 - end - # get_optimizer_attribute - @testset "JuMP.get_optimizer_attribute (String)" begin - @test get_optimizer_attribute(m, "mine") == 1 - end - # get_optimizer_attribute - @testset "JuMP.get_optimizer_attribute (MOI)" begin - @test !get_optimizer_attribute(m, MOI.Silent()) - end - # solver_name - @testset "JuMP.solver_name" begin - @test solver_name(m) == "Mock" - end - # backend - @testset "JuMP.backend" begin - @test backend(m) == backend(optimizer_model(m)) - end - # mode - @testset "JuMP.mode" begin - @test JuMP.mode(m) == JuMP.mode(optimizer_model(m)) - end -end diff --git a/test/point_variables.jl b/test/point_variables.jl index 785f61db..e61e9992 100644 --- a/test/point_variables.jl +++ b/test/point_variables.jl @@ -373,7 +373,7 @@ end vref = PointVariableRef(m, idx) gvref = InfiniteOpt._make_variable_ref(m, idx) @test isequal(add_variable(m, v, "name"), gvref) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) # lower bound cindex = InfOptConstraintIndex(1) cref = InfOptConstraintRef(m, cindex) @@ -417,7 +417,7 @@ end vref = PointVariableRef(m, idx) gvref = InfiniteOpt._make_variable_ref(m, idx) @test isequal(add_variable(m, v, "name"), gvref) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) cindex = InfOptConstraintIndex(8) cref = InfOptConstraintRef(m, cindex) @test has_upper_bound(vref) diff --git a/test/results.jl b/test/results.jl index b345686b..1a06c866 100644 --- a/test/results.jl +++ b/test/results.jl @@ -436,4 +436,4 @@ end @test lp_sensitivity_report(m)[g] == (0, 0) @test lp_sensitivity_report(m)[inf, label = UserDefined] == [(0, 0), (0, 0)] @test lp_sensitivity_report(m)[inf, ndarray = true] == [(0, 0), (0, 0)] -end +end \ No newline at end of file diff --git a/test/runtests.jl b/test/runtests.jl index 7886ed26..2ddd2531 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -40,7 +40,7 @@ println("") include("general_variables.jl") end println("") -@time @testset "Optimizer Setup Methods" begin include("optimizer_setup.jl") end +@time @testset "Backend Setup Methods" begin include("backend_setup.jl") end println("") @time @testset "Macro Utilities" begin include("macro_utilities.jl") end println("") @@ -91,15 +91,14 @@ println("") @testset "Model" begin include("TranscriptionOpt/model.jl") end @testset "Measures" begin include("TranscriptionOpt/measure.jl") end @testset "Transcribe" begin include("TranscriptionOpt/transcribe.jl") end - @testset "Optimize" begin include("TranscriptionOpt/optimize.jl") end end println("") -@time @testset "Solution Methods" begin include("optimizer.jl") end +@time @testset "Solution Methods" begin include("backend_mappings.jl") end println("") @time @testset "Solution Queries" begin include("results.jl") end println("") -@time @testset "Extensions" begin include("extensions.jl") end -println("") +# @time @testset "Extensions" begin include("extensions.jl") end +# println("") println("----------------------------------------------------------------------------") println("-----------------------------TESTING COMPLETE!------------------------------") println("----------------------------------------------------------------------------") diff --git a/test/show.jl b/test/show.jl index 551a9471..be274c24 100644 --- a/test/show.jl +++ b/test/show.jl @@ -759,98 +759,79 @@ end # test show_backend_summary @testset "JuMP.show_backend_summary" begin # test without optimizer - str = "Optimizer model backend information: \nModel mode: AUTOMATIC\n" * - "CachingOptimizer state: NO_OPTIMIZER\nSolver name: No optimizer" * - " attached." + str = "Transformation backend information: \n Backend type: TranscriptionBackend\n " * + "Solver name: No optimizer attached.\n Transformation built and up-to-date: false\n" io_test(show_backend_summary, str, m) # test with optimizer - set_optimizer(optimizer_model(m), mockoptimizer) - str = "Optimizer model backend information: \nModel mode: AUTOMATIC\n" * - "CachingOptimizer state: EMPTY_OPTIMIZER\nSolver name: Mock" + set_optimizer(m, mockoptimizer) + str = "Transformation backend information: \n Backend type: TranscriptionBackend\n " * + "Solver name: Mock\n Transformation built and up-to-date: false\n" io_test(show_backend_summary, str, m) end # test show_objective_function_summary @testset "JuMP.show_objective_function_summary" begin - str = "Objective function type: GenericAffExpr{Float64,GeneralVariableRef}\n" - str2 = "Objective function type: GenericAffExpr{Float64, GeneralVariableRef}\n" + str = " Objective function type: GenericAffExpr{Float64,GeneralVariableRef}\n" + str2 = " Objective function type: GenericAffExpr{Float64, GeneralVariableRef}\n" io_test(show_objective_function_summary, [str, str2], m) end # test show_constraints_summary @testset "JuMP.show_constraints_summary" begin # test the main function - str = "`GenericAffExpr{Float64,GeneralVariableRef}`-in-`MathOptInter" * + str = " `GenericAffExpr{Float64,GeneralVariableRef}`-in-`MathOptInter" * "face.LessThan{Float64}`: 2 constraints\n" - str2 = "`GenericAffExpr{Float64, GeneralVariableRef}`-in-`MathOptInter" * + str2 = " `GenericAffExpr{Float64, GeneralVariableRef}`-in-`MathOptInter" * "face.LessThan{Float64}`: 2 constraints\n" io_test(show_constraints_summary, [str, str2], m) end # test show_objective_function_summary @testset "Base.show (InfiniteModel)" begin # test minimization - str = "An InfiniteOpt Model\nMinimization problem with:\nFinite " * - "Parameters: 0\nInfinite Parameters: 3\nVariables: 3" * - "\nDerivatives: 0\nMeasures: 0" * - "\nObjective function type: GenericAffExpr{Float64,General" * - "VariableRef}\n`GenericAffExpr{Float64,GeneralVariableRef}`-in-" * - "`MathOptInterface.LessThan{Float64}`: 2 constraints" * - "\nNames registered in the model: c1, c3, par1, " * - "pars, x, y, z\nOptimizer model backend information: \nModel " * - "mode: AUTOMATIC\nCachingOptimizer state: EMPTY_OPTIMIZER\n" * - "Solver name: Mock" - str2 = "An InfiniteOpt Model\nMinimization problem with:\nFinite " * - "Parameters: 0\nInfinite Parameters: 3\nVariables: 3" * - "\nDerivatives: 0\nMeasures: 0" * - "\nObjective function type: GenericAffExpr{Float64, General" * - "VariableRef}\n`GenericAffExpr{Float64, GeneralVariableRef}`-in-" * - "`MathOptInterface.LessThan{Float64}`: 2 constraints" * - "\nNames registered in the model: c1, c3, par1, " * - "pars, x, y, z\nOptimizer model backend information: \nModel " * - "mode: AUTOMATIC\nCachingOptimizer state: EMPTY_OPTIMIZER\n" * - "Solver name: Mock" + str = "An InfiniteOpt Model\nMinimization problem with:\n Finite parameters: 0\n " * + "Infinite parameters: 3\n Variables: 3\n Derivatives: 0\n Measures: 0\n " * + "Objective function type: GenericAffExpr{Float64, GeneralVariableRef}\n " * + "`GenericAffExpr{Float64, GeneralVariableRef}`-in-`MathOptInterface.LessThan{Float64}`: " * + "2 constraints\nNames registered in the model: c1, c3, par1, pars, x, y, z\n" * + "Transformation backend information: \n Backend type: TranscriptionBackend\n " * + "Solver name: Mock\n Transformation built and up-to-date: false\n" + str2 = "An InfiniteOpt Model\nMinimization problem with:\n Finite parameters: 0\n " * + "Infinite parameters: 3\n Variables: 3\n Derivatives: 0\n Measures: 0\n " * + "Objective function type: GenericAffExpr{Float64,GeneralVariableRef}\n " * + "`GenericAffExpr{Float64,GeneralVariableRef}`-in-`MathOptInterface.LessThan{Float64}`: " * + "2 constraints\nNames registered in the model: c1, c3, par1, pars, x, y, z\n" * + "Transformation backend information: \n Backend type: TranscriptionBackend\n " * + "Solver name: Mock\n Transformation built and up-to-date: false\n" show_test(MIME("text/plain"), m, [str, str2], repl=:show) # test maximization set_objective_sense(m, MOI.MAX_SENSE) - str = "An InfiniteOpt Model\nMaximization problem with:\nFinite " * - "Parameters: 0\nInfinite Parameters: 3\nVariables: 3" * - "\nDerivatives: 0\nMeasures: 0" * - "\nObjective function type: GenericAffExpr{Float64,General" * - "VariableRef}\n`GenericAffExpr{Float64,GeneralVariableRef}`-in-" * - "`MathOptInterface.LessThan{Float64}`: 2 constraints" * - "\nNames registered in the model: c1, c3, par1, " * - "pars, x, y, z\nOptimizer model backend information: \nModel " * - "mode: AUTOMATIC\nCachingOptimizer state: EMPTY_OPTIMIZER\n" * - "Solver name: Mock" - str2 = "An InfiniteOpt Model\nMaximization problem with:\nFinite " * - "Parameters: 0\nInfinite Parameters: 3\nVariables: 3" * - "\nDerivatives: 0\nMeasures: 0" * - "\nObjective function type: GenericAffExpr{Float64, General" * - "VariableRef}\n`GenericAffExpr{Float64, GeneralVariableRef}`-in-" * - "`MathOptInterface.LessThan{Float64}`: 2 constraints" * - "\nNames registered in the model: c1, c3, par1, " * - "pars, x, y, z\nOptimizer model backend information: \nModel " * - "mode: AUTOMATIC\nCachingOptimizer state: EMPTY_OPTIMIZER\n" * - "Solver name: Mock" + str = "An InfiniteOpt Model\nMaximization problem with:\n Finite parameters: 0\n " * + "Infinite parameters: 3\n Variables: 3\n Derivatives: 0\n Measures: 0\n " * + "Objective function type: GenericAffExpr{Float64, GeneralVariableRef}\n " * + "`GenericAffExpr{Float64, GeneralVariableRef}`-in-`MathOptInterface.LessThan{Float64}`: " * + "2 constraints\nNames registered in the model: c1, c3, par1, pars, x, y, z\n" * + "Transformation backend information: \n Backend type: TranscriptionBackend\n " * + "Solver name: Mock\n Transformation built and up-to-date: false\n" + str2 = "An InfiniteOpt Model\nMaximization problem with:\n Finite parameters: 0\n " * + "Infinite parameters: 3\n Variables: 3\n Derivatives: 0\n Measures: 0\n " * + "Objective function type: GenericAffExpr{Float64,GeneralVariableRef}\n " * + "`GenericAffExpr{Float64,GeneralVariableRef}`-in-`MathOptInterface.LessThan{Float64}`: " * + "2 constraints\nNames registered in the model: c1, c3, par1, pars, x, y, z\n" * + "Transformation backend information: \n Backend type: TranscriptionBackend\n " * + "Solver name: Mock\n Transformation built and up-to-date: false\n" show_test(MIME("text/plain"), m, [str, str2], repl=:show) # test feasibility set_objective_sense(m, MOI.FEASIBILITY_SENSE) - str = "An InfiniteOpt Model\nFeasibility problem with:\nFinite " * - "Parameters: 0\nInfinite Parameters: 3\nVariables: 3" * - "\nDerivatives: 0\nMeasures: 0" * - "\n`GenericAffExpr{Float64,GeneralVariableRef}`-in-`MathOpt" * - "Interface.LessThan{Float64}`: 2 constraints" * - "\nNames registered in the model: c1, c3, par1, " * - "pars, x, y, z\nOptimizer model backend information: \nModel " * - "mode: AUTOMATIC\nCachingOptimizer state: EMPTY_OPTIMIZER\n" * - "Solver name: Mock" - str2 = "An InfiniteOpt Model\nFeasibility problem with:\nFinite " * - "Parameters: 0\nInfinite Parameters: 3\nVariables: 3" * - "\nDerivatives: 0\nMeasures: 0" * - "\n`GenericAffExpr{Float64, GeneralVariableRef}`-in-`MathOpt" * - "Interface.LessThan{Float64}`: 2 constraints" * - "\nNames registered in the model: c1, c3, par1, " * - "pars, x, y, z\nOptimizer model backend information: \nModel " * - "mode: AUTOMATIC\nCachingOptimizer state: EMPTY_OPTIMIZER\n" * - "Solver name: Mock" + str = "An InfiniteOpt Model\nFeasibility problem with:\n Finite parameters: 0\n " * + "Infinite parameters: 3\n Variables: 3\n Derivatives: 0\n Measures: 0\n " * + "`GenericAffExpr{Float64, GeneralVariableRef}`-in-`MathOptInterface.LessThan{Float64}`: " * + "2 constraints\nNames registered in the model: c1, c3, par1, pars, x, y, z\n" * + "Transformation backend information: \n Backend type: TranscriptionBackend\n " * + "Solver name: Mock\n Transformation built and up-to-date: false\n" + str2 = "An InfiniteOpt Model\nFeasibility problem with:\n Finite parameters: 0\n " * + "Infinite parameters: 3\n Variables: 3\n Derivatives: 0\n Measures: 0\n " * + "`GenericAffExpr{Float64,GeneralVariableRef}`-in-`MathOptInterface.LessThan{Float64}`: " * + "2 constraints\nNames registered in the model: c1, c3, par1, pars, x, y, z\n" * + "Transformation backend information: \n Backend type: TranscriptionBackend\n " * + "Solver name: Mock\n Transformation built and up-to-date: false\n" show_test(MIME("text/plain"), m, [str, str2], repl=:show) end end diff --git a/test/utilities.jl b/test/utilities.jl index 580e13fe..0964673e 100644 --- a/test/utilities.jl +++ b/test/utilities.jl @@ -36,6 +36,8 @@ struct TestGenInfo <: AbstractGenerativeInfo end struct BadData <: AbstractMeasureData end struct Bad end struct NotADomainType end +struct TestJuMPTag <: AbstractJuMPTag end +struct TestBackend <: AbstractTransformationBackend end struct TestIndex <: ObjectIndex value::Int end @@ -94,6 +96,16 @@ io_test(f::Function, exp_str::Vector{String}, args...) = begin @test String(take!(io)) in exp_str end +# Test the output of a function that prints to stdout +function stdout_test(f::Function, exp_str, args...) + original_stdout = stdout + (read_pipe, write_pipe) = redirect_stdout() + @test f(args...) isa Nothing + redirect_stdout(original_stdout) + close(write_pipe) + @test read(read_pipe, String) == exp_str +end + # Make method for sorting matrices sortcols(A) = sortslices(A, dims=2, lt=(x,y)->isless(x[:],y[:])) diff --git a/test/variable_info.jl b/test/variable_info.jl index 67afb265..a1f55976 100644 --- a/test/variable_info.jl +++ b/test/variable_info.jl @@ -67,7 +67,7 @@ @test lower_bound(vref1) == 1 cref = LowerBoundRef(vref1) @test InfiniteOpt._data_object(cref).is_info_constraint - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._core_constraint_object(cref) == ScalarConstraint(gvref1, MOI.GreaterThan(1.)) # test changing lower bound @test isa(set_lower_bound(vref2, 1.5), Nothing) @@ -75,7 +75,7 @@ @test lower_bound(gvref2) == 1.5 cref = LowerBoundRef(vref2) @test InfiniteOpt._data_object(cref).is_info_constraint - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._core_constraint_object(cref) == ScalarConstraint(gvref2, MOI.GreaterThan(1.5)) # test fixed variable error @test_throws AssertionError set_lower_bound(vref3, 0) @@ -145,7 +145,7 @@ end @test upper_bound(vref1) == 1 cref = UpperBoundRef(vref1) @test InfiniteOpt._data_object(cref).is_info_constraint - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._core_constraint_object(cref) == ScalarConstraint(gvref1, MOI.LessThan(1.)) # test changing upper bound @test isa(set_upper_bound(vref2, 1.5), Nothing) @@ -153,7 +153,7 @@ end @test upper_bound(gvref2) == 1.5 cref = UpperBoundRef(vref2) @test InfiniteOpt._data_object(cref).is_info_constraint - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._core_constraint_object(cref) == ScalarConstraint(gvref2, MOI.LessThan(1.5)) # test fixed variable error @test_throws AssertionError set_upper_bound(vref3, 0) @@ -218,7 +218,7 @@ end @test fix_value(vref1) == 1 cref = FixRef(vref1) @test InfiniteOpt._data_object(cref).is_info_constraint - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._core_constraint_object(cref) == ScalarConstraint(gvref1, MOI.EqualTo(1.)) # test changing fix @test isa(fix(gvref2, 1.5), Nothing) @@ -226,7 +226,7 @@ end @test fix_value(vref2) == 1.5 cref = FixRef(vref2) @test InfiniteOpt._data_object(cref).is_info_constraint - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._core_constraint_object(cref) == ScalarConstraint(gvref2, MOI.EqualTo(1.5)) # add lower and upper bounds to vars 3 and 4 set_lower_bound(vref3, 0.0) @@ -240,7 +240,7 @@ end @test fix_value(vref3) == 1 cref = FixRef(vref3) @test InfiniteOpt._data_object(cref).is_info_constraint - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._core_constraint_object(cref) == ScalarConstraint(gvref3, MOI.EqualTo(1.)) # test forcing with upper @test isa(fix(gvref4, 1.5, force = true), Nothing) @@ -248,7 +248,7 @@ end @test fix_value(vref4) == 1.5 cref = FixRef(vref4) @test InfiniteOpt._data_object(cref).is_info_constraint - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._core_constraint_object(cref) == ScalarConstraint(gvref4, MOI.EqualTo(1.5)) end end @@ -279,7 +279,7 @@ end @testset "JuMP.set_start_value" begin @test isa(set_start_value(vref, 1.5), Nothing) @test start_value(vref) == 1.5 - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test isa(set_start_value(gvref, 1), Nothing) @test_throws ErrorException set_start_value(inf, 0) @test_throws ErrorException set_start_value(dinf, 0) @@ -297,7 +297,7 @@ end @test set_start_value_function(inf, 1.5) isa Nothing @test start_value_function(inf)([0]) == 1.5 @test InfiniteOpt._is_vector_start(dinf) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) func = (a) -> 1 @test set_start_value_function(inf, func) isa Nothing @test start_value_function(inf)(0) == 1 @@ -308,7 +308,7 @@ end @test reset_start_value_function(inf) isa Nothing @test start_value_function(inf) isa Nothing @test InfiniteOpt._is_vector_start(dinf) - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) end end @@ -361,14 +361,14 @@ end @test is_binary(vref1) cref = BinaryRef(vref1) @test InfiniteOpt._data_object(cref).is_info_constraint - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._core_constraint_object(cref) == ScalarConstraint(gvref1, MOI.ZeroOne()) # test setting binary again @test isa(set_binary(gvref2), Nothing) @test is_binary(vref2) cref = BinaryRef(vref2) @test InfiniteOpt._data_object(cref).is_info_constraint - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._core_constraint_object(cref) == ScalarConstraint(gvref2, MOI.ZeroOne()) # test integer variable error @test_throws ErrorException set_binary(vref3) @@ -424,14 +424,14 @@ end @test is_integer(vref1) cref = IntegerRef(vref1) @test InfiniteOpt._data_object(cref).is_info_constraint - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._core_constraint_object(cref) == ScalarConstraint(gvref1, MOI.Integer()) # test setting integer again @test isa(set_integer(gvref2), Nothing) @test is_integer(vref2) cref = IntegerRef(vref2) @test InfiniteOpt._data_object(cref).is_info_constraint - @test !optimizer_model_ready(m) + @test !transformation_backend_ready(m) @test InfiniteOpt._core_constraint_object(cref) == ScalarConstraint(gvref2, MOI.Integer()) # test integer variable error @test_throws ErrorException set_integer(vref3) From 66c4c99bb636df1b8638fc0574670568d4f95d50 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Sat, 29 Jun 2024 22:12:36 -0400 Subject: [PATCH 3/8] completed tests and naming --- src/InfiniteOpt.jl | 4 + src/TranscriptionOpt/model.jl | 18 +- src/TranscriptionOpt/transcribe.jl | 2 - src/backends.jl | 176 +++++++++---- src/results.jl | 56 ++-- test/TranscriptionOpt/model.jl | 32 +-- test/TranscriptionOpt/transcribe.jl | 2 +- test/backend_mappings.jl | 68 ++--- test/backend_setup.jl | 7 + test/extensions.jl | 117 ++++----- test/extensions/backend.jl | 366 +++++++++++++++++++++++++++ test/extensions/derivative_method.jl | 2 +- test/extensions/generative_info.jl | 4 +- test/extensions/measure_data.jl | 21 +- test/extensions/measure_eval.jl | 4 +- test/extensions/optimizer_model.jl | 201 --------------- test/results.jl | 166 ++++++------ test/runtests.jl | 4 +- 18 files changed, 759 insertions(+), 491 deletions(-) create mode 100644 test/extensions/backend.jl delete mode 100644 test/extensions/optimizer_model.jl diff --git a/src/InfiniteOpt.jl b/src/InfiniteOpt.jl index 49d29e99..f7ec59c6 100644 --- a/src/InfiniteOpt.jl +++ b/src/InfiniteOpt.jl @@ -62,6 +62,10 @@ macro register(args...) "the nonlinear documenation page for details.") end Base.@deprecate map_nlp_to_ast(f, expr) map_expression_to_ast(f, expr) +Base.@deprecate optimizer_model_variable(v; kwargs...) transformation_variable(v; kwargs...) +Base.@deprecate optimizer_model_expression(e; kwargs...) transformation_expression(e; kwargs...) +Base.@deprecate optimizer_model_constraint(c; kwargs...) transformation_constraint(c; kwargs...) +Base.@deprecate optimizer_model(m) transformation_model(m) # Define additional stuff that should not be exported const _EXCLUDE_SYMBOLS = [ diff --git a/src/TranscriptionOpt/model.jl b/src/TranscriptionOpt/model.jl index 88e219d9..d24683aa 100644 --- a/src/TranscriptionOpt/model.jl +++ b/src/TranscriptionOpt/model.jl @@ -402,17 +402,17 @@ function transcription_variable( end """ - InfiniteOpt.transformation_model_variable( + InfiniteOpt.transformation_variable( vref::InfiniteOpt.GeneralVariableRef, [backend::TranscriptionBackend]; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false] ) -Proper extension of [`InfiniteOpt.transformation_model_variable`](@ref) for +Proper extension of [`InfiniteOpt.transformation_variable`](@ref) for `TranscriptionBackend`s. This simply dispatches to [`transcription_variable`](@ref). """ -function InfiniteOpt.transformation_model_variable( +function InfiniteOpt.transformation_variable( vref::InfiniteOpt.GeneralVariableRef, backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, @@ -759,17 +759,17 @@ function transcription_expression( end """ - InfiniteOpt.transformation_model_expression( + InfiniteOpt.transformation_expression( expr::JuMP.AbstractJuMPScalar, backend::TranscriptionBackend; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, ndarray::Bool = false] ) -Proper extension of [`InfiniteOpt.transformation_model_expression`](@ref) for +Proper extension of [`InfiniteOpt.transformation_expression`](@ref) for `TranscriptionBackend`s. This simply dispatches to [`transcription_expression`](@ref). """ -function InfiniteOpt.transformation_model_expression( +function InfiniteOpt.transformation_expression( expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.GenericNonlinearExpr}, backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, @@ -895,17 +895,17 @@ function transcription_constraint( end """ - InfiniteOpt.transformation_model_constraint( + InfiniteOpt.transformation_constraint( cref::InfiniteOpt.InfOptConstraintRef, backend::TranscriptionBackend; [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ndarray::Bool = false] ) -Proper extension of [`InfiniteOpt.transformation_model_constraint`](@ref) for +Proper extension of [`InfiniteOpt.transformation_constraint`](@ref) for `TranscriptionBackend`s. This simply dispatches to [`transcription_constraint`](@ref). """ -function InfiniteOpt.transformation_model_constraint( +function InfiniteOpt.transformation_constraint( cref::InfiniteOpt.InfOptConstraintRef, backend::TranscriptionBackend; label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, diff --git a/src/TranscriptionOpt/transcribe.jl b/src/TranscriptionOpt/transcribe.jl index 33c50c53..e2b4e4d8 100644 --- a/src/TranscriptionOpt/transcribe.jl +++ b/src/TranscriptionOpt/transcribe.jl @@ -1011,8 +1011,6 @@ function InfiniteOpt.build_transformation_backend!( model, check_support_dims = check_support_dims ) - # update the transformation backend status - InfiniteOpt.set_transformation_backend_ready(model, true) return end diff --git a/src/backends.jl b/src/backends.jl index da318599..131b5413 100644 --- a/src/backends.jl +++ b/src/backends.jl @@ -50,6 +50,16 @@ end transformation_model(model::InfiniteModel) Return the underlying model used by the transformation backend. + +**Example** +```julia-repl +julia> trans_model = transformation_model(model) +A JuMP Model +Feasibility problem with: +Variables: 0 +Model mode: AUTOMATIC +CachingOptimizer state: NO_OPTIMIZER +Solver name: No optimizer attached. ``` """ transformation_model(model::InfiniteModel) = transformation_model(model.backend) @@ -71,6 +81,11 @@ end Return the underlying data (typically mapping data) used by the transformation backend. + +**Example** +```julia-repl +julia> mapping_data = transformation_data(model); +``` """ function transformation_data(model::InfiniteModel) return transformation_data(model.backend) @@ -85,6 +100,29 @@ end Specify a new transformation backend `backend` for the `model`. Note that all data/settings/results associated with the previous backend will be removed. + +**Example** +```julia-repl +julia> transformation_backend(model) +A TranscriptionBackend that uses a +A JuMP Model +Feasibility problem with: +Variables: 0 +Model mode: AUTOMATIC +CachingOptimizer state: NO_OPTIMIZER +Solver name: No optimizer attached. + +julia> set_transformation_backend(model, TranscriptionBackend(Ipopt.Optimizer)) + +julia> transformation_backend(model) +A TranscriptionBackend that uses a +A JuMP Model +Feasibility problem with: +Variables: 0 +Model mode: AUTOMATIC +CachingOptimizer state: EMPTY_OPTIMIZER +Solver name: Ipopt +``` """ function set_transformation_backend( model::InfiniteModel, @@ -95,6 +133,29 @@ function set_transformation_backend( return end +""" + transformation_backend( + model::InfiniteModel + )::AbstractTransformationBackend + +Retrieve the transformation backend used by the `model`. + +**Example** +```julia-repl +julia> transformation_backend(model) +A TranscriptionBackend that uses a +A JuMP Model +Feasibility problem with: +Variables: 0 +Model mode: AUTOMATIC +CachingOptimizer state: NO_OPTIMIZER +Solver name: No optimizer attached. +``` +""" +function transformation_backend(model::InfiniteModel) + return model.backend +end + """ JuMP.get_attribute(backend::AbstractTransformationBackend, attr) @@ -116,10 +177,19 @@ end Retrieve an attribute `attr` from the transformation backend of `model`. Typically, this corresponds to `MOI.AbstractOptimizerAttribute`s. + +**Example** +```julia-repl +julia> get_attribute(model, MOI.TimeLimitSec()) +60.0 +``` """ function JuMP.get_attribute(model::InfiniteModel, attr) return JuMP.get_attribute(model.backend, attr) end +function JuMP.get_optimizer_attribute(model::InfiniteModel, attr) + return JuMP.get_attribute(model, attr) +end """ JuMP.set_attribute(backend::AbstractTransformationBackend, attr, value)::Nothing @@ -143,10 +213,18 @@ end Specify an attribute `attr` to the transformation backend of `model`. Typically, this corresponds to `MOI.AbstractOptimizerAttribute`s. + +**Example** +```julia-repl +julia> set_attribute(model, MOI.TimeLimitSec(), 42.0) +``` """ function JuMP.set_attribute(model::InfiniteModel, attr, value) return JuMP.set_attribute(model.backend, attr, value) end +function JuMP.set_optimizer_attribute(model::InfiniteModel, attr, value) + return JuMP.set_attribute(model, attr, value) +end """ Base.empty!(backend::AbstractTransformationBackend) @@ -174,7 +252,8 @@ end Given `model`, transform it into the representation used by `backend`. Once completed, `backend` should be ready to be solved. This serves as an extension point for new types of backends. If needed, keyword arguments -can be added. +can be added. Typically, this should clear out the backend before reconstructing +it. """ function build_transformation_backend!( model::InfiniteModel, @@ -192,13 +271,13 @@ Build the model used by the underlying transformation backend stored in `model` that it is ready to solve. Specifically, translate the InfiniteOpt formulation stored in `model` into (typically an appoximate) formulation that is compatible with the backend. This is called automatically by `optimize!`; however, it this -method can be used to build the transformation model without solving it. +method can be used to build the transformation backend without solving it. **Example** ```julia-repl julia> build_transformation_backend!(model) -julia> transformation_model_ready(model) +julia> transformation_backend_ready(model) true ``` """ @@ -240,12 +319,13 @@ function JuMP.set_optimize_hook( end """ - JuMP.optimize!(backend::AbstractTransformationBackend)::Nothing + JuMP.optimize!(backend::AbstractTransformationBackend) Invoke the relevant routines to solve the underlying model used by `backend`. Note that [`build_transformation_backend!`](@ref) will be called before this method is. This needs to be extended for new backend types, but no extension is needed for [`JuMPBackend`](@ref)s. +Optionally, information can be returned if desired. """ function JuMP.optimize!(backend::AbstractTransformationBackend) error("`JuMP.optimize!` not implemented for transformation backends " * @@ -254,14 +334,15 @@ end """ - JuMP.optimize!(model::InfiniteModel; [kwargs...])::Nothing + JuMP.optimize!(model::InfiniteModel; [kwargs...]) Extend `JuMP.optimize!` to optimize infinite models using the internal optimizer model. Calls [`build_transformation_backend!`](@ref) if the optimizer model isn't up-to-date. The `kwargs` correspond to keyword arguments passed to [`build_transformation_backend!`](@ref) if any are defined. The `kwargs` can also include arguments that are passed to an optimize hook if one was set with -[`JuMP.set_optimize_hook`](@ref). +[`JuMP.set_optimize_hook`](@ref). Typically, this returns `nothing`, but +certain backends may return something. **Example** ```julia-repl @@ -282,8 +363,7 @@ function JuMP.optimize!( if !transformation_backend_ready(model) build_transformation_backend!(model; kwargs...) end - JuMP.optimize!(model.backend) - return + return JuMP.optimize!(model.backend) end ################################################################################ @@ -532,7 +612,7 @@ end # VARIABLE MAPPING API ################################################################################ """ - transformation_model_variable( + transformation_variable( vref::GeneralVariableRef, backend::AbstractTransformationBackend; [kwargs...] @@ -542,17 +622,17 @@ Return the variable(s) that map to `vref` used by `backend`. This serves as an extension point for new backend types. If needed, keywords arguments can be added. """ -function transformation_model_variable( +function transformation_variable( vref::GeneralVariableRef, backend::AbstractTransformationBackend; kwargs... ) - error("`transformation_model_variable` not defined for backends of type " * + error("`transformation_variable` not defined for backends of type " * "`$(typeof(backend))`.") end """ - transformation_model_variable(vref::GeneralVariableRef; [kwargs...]) + transformation_variable(vref::GeneralVariableRef; [kwargs...]) Returns the variable(s) used by the transformation backend to represent `vref`. Certain backends may also allow the use of keyward arguments. @@ -568,18 +648,18 @@ infinite parameter dependencies. **Example** ```julia-repl -julia> transformation_model_variable(x) # infinite variable +julia> transformation_variable(x) # infinite variable 2-element Array{VariableRef,1}: x(support: 1) x(support: 2) -julia> transformation_model_variable(z) # finite variable +julia> transformation_variable(z) # finite variable z ``` """ -function transformation_model_variable(vref::GeneralVariableRef; kwargs...) +function transformation_variable(vref::GeneralVariableRef; kwargs...) model = JuMP.owner_model(vref) - return transformation_model_variable(vref, model.backend; kwargs...) + return transformation_variable(vref, model.backend; kwargs...) end """ @@ -619,7 +699,7 @@ end Return the supports associated with `vref` in the transformation model. Errors if [`InfiniteOpt.variable_supports`](@ref) has not been extended for the -transformation backend type or if `vref` is not reformulated in the transformation model. +transformation backend type or if `vref` is not reformulated in the transformation backend. The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ and `kwargs` denote extra ones that user extensions may employ in accordance with @@ -652,48 +732,48 @@ end # EXPRESSION MAPPING API ################################################################################ """ - transformation_model_expression(expr, backend::AbstractTransformationBackend; [kwargs...]) + transformation_expression(expr, backend::AbstractTransformationBackend; [kwargs...]) -Return the reformulation expression(s) stored in the transformation model that correspond +Return the reformulation expression(s) stored in the transformation backend that correspond to `expr`. This needs to be defined for extensions that implement a new [`AbstractTransformationBackend`](@ref). Keyword arguments can be added as needed. Note that if `expr` is a `GeneralVariableRef` this just dispatches to -`transformation_model_variable`. +`transformation_variable`. """ -function transformation_model_expression( +function transformation_expression( expr, backend::AbstractTransformationBackend; kwargs... ) - error("`transformation_model_expression` not defined for transformation backends " * + error("`transformation_expression` not defined for transformation backends " * "of type `$(typeof(backend))` and expression type `$(typeof(expr))`.") end # Define for variable reference expressions -function transformation_model_expression( +function transformation_expression( expr::GeneralVariableRef, backend::AbstractTransformationBackend; kwargs... ) - return transformation_model_variable(expr, backend; kwargs...) + return transformation_variable(expr, backend; kwargs...) end """ - transformation_model_expression( + transformation_expression( expr::JuMP.AbstractJuMPScalar; [label::Type{<:AbstractSupportLabel} = PublicLabel, ndarray::Bool = false, kwargs...] ) -Return the reformulation expression(s) stored in the transformation model that correspond +Return the reformulation expression(s) stored in the transformation backend that correspond to `expr`. Also errors if no such expression can be found in -the transformation model (meaning one or more of the underlying variables have not +the transformation backend (meaning one or more of the underlying variables have not been transformed). The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of [`transformation_model_expression`](@ref). Errors if such an +their implementation of [`transformation_expression`](@ref). Errors if such an extension has not been written. By default only the expressions associated with public supports are returned, the @@ -705,16 +785,16 @@ infinite parameter dependencies. The corresponding supports are obtained via **Example** ```julia-repl -julia> transformation_model_expression(my_expr) # finite expression +julia> transformation_expression(my_expr) # finite expression x(support: 1) - y ``` """ -function transformation_model_expression(expr::JuMP.AbstractJuMPScalar; kwargs...) +function transformation_expression(expr::JuMP.AbstractJuMPScalar; kwargs...) model = JuMP.owner_model(expr) if isnothing(model) return zero(JuMP.AffExpr) + JuMP.constant(expr) else - return transformation_model_expression(expr, model.backend; kwargs...) + return transformation_expression(expr, model.backend; kwargs...) end end @@ -731,7 +811,7 @@ stored in `backend`. Keyword arguments can be added as needed. Note that if `expr` is a `GeneralVariableRef` this just dispatches to `variable_supports`. """ function expression_supports(expr, backend::AbstractTransformationBackend; kwargs...) - error("`expression_supports` not implemented for transformation model of type " * + error("`expression_supports` not implemented for transformation backend of type " * "`$(typeof(backend))` and/or expressions of type `$(typeof(expr))`.") end @@ -753,7 +833,7 @@ end ) Return the support associated with `expr`. Errors if `expr` is -not associated with the constraint mappings stored in the transformation model. +not associated with the constraint mappings stored in the transformation backend. The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ and `kwargs` denote extra ones that user extensions may employ in accordance with @@ -787,41 +867,41 @@ end # CONSTRAINT MAPPING API ################################################################################ """ - transformation_model_constraint( + transformation_constraint( cref::InfOptConstraintRef, backend::AbstractTransformationBackend; [kwargs...] ) -Return the reformulation constraint(s) stored in the transformation model +Return the reformulation constraint(s) stored in the transformation backend that correspond to `cref`. This needs to be defined for extensions that -implement a custom transformation model type. Keyword arguments can be +implement a custom transformation backend type. Keyword arguments can be added as needed. """ -function transformation_model_constraint( +function transformation_constraint( cref::InfOptConstraintRef, backend::AbstractTransformationBackend; kwargs... ) - error("`transformation_model_constraint` not implemented for " * - "transformation model backends of type `$(typeof(backend))`.") + error("`transformation_constraint` not implemented for " * + "transformation backends of type `$(typeof(backend))`.") end """ - transformation_model_constraint( + transformation_constraint( cref::InfOptConstraintRef; [label::Type{<:AbstractSupportLabel} = PublicLabel, ndarray::Bool = false, kwargs...] ) -Return the reformulation constraint(s) stored in the transformation model that +Return the reformulation constraint(s) stored in the transformation backend that correspond to `cref`. Errors if no such constraint can be found in -the transformation model. +the transformation backend. The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ and `kwargs` denote extra ones that user extensions may employ in accordance with -their implementation of [`transformation_model_constraint`](@ref). Errors if such an +their implementation of [`transformation_constraint`](@ref). Errors if such an extension has not been written. By default only the constraints associated with public supports are returned, the @@ -833,16 +913,16 @@ infinite parameter dependencies. The corresponding supports are obtained via **Example** ```julia-repl -julia> transformation_model_constraint(c1) # finite constraint +julia> transformation_constraint(c1) # finite constraint c1 : x(support: 1) - y <= 3.0 ``` """ -function transformation_model_constraint( +function transformation_constraint( cref::InfOptConstraintRef; kwargs... ) backend = JuMP.owner_model(cref).backend - return transformation_model_constraint(cref, backend; kwargs...) + return transformation_constraint(cref, backend; kwargs...) end """ @@ -861,7 +941,7 @@ function constraint_supports( backend::AbstractTransformationBackend; kwargs... ) - error("`constraint_supports` not implemented for transformation model backends " * + error("`constraint_supports` not implemented for transformation backends " * "of type `$(typeof(backend))`.") end @@ -872,7 +952,7 @@ end kwargs...]) Return the support associated with `cref`. Errors if `cref` is -not associated with the constraint mappings stored in the transformation model. +not associated with the constraint mappings stored in the transformation backend. The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ and `kwargs` denote extra ones that user extensions may employ in accordance with diff --git a/src/results.jl b/src/results.jl index 47609ec3..5826dbe9 100644 --- a/src/results.jl +++ b/src/results.jl @@ -5,7 +5,7 @@ for func in (:termination_status, :raw_status, :solve_time, :simplex_iterations, :barrier_iterations, :node_count, :objective_bound, :relative_gap, :result_count) - @eval begin + @eval begin @doc """ JuMP.$($func)(backend::AbstractTransformationBackend) @@ -85,9 +85,9 @@ end Map the value(s) of `ref` to its counterpart in the `backend`. Here `ref` need refer to methods for both variable references and constraint references. No extension is needed for [`JuMPBackend`](@ref)s that support -`transformation_model_variable`, `transformation_model_expression`, and -`transformation_model_constraint`. In this case, `transformation_model_variable`, -`transformation_model_expression`, and `transformation_model_constraint` are +`transformation_variable`, `transformation_expression`, and +`transformation_constraint`. In this case, `transformation_variable`, +`transformation_expression`, and `transformation_constraint` are used to make these mappings by default where `kwargs` are passed on these functions. For mapping the values of infinite parameters, refer to [`map_infinite_parameter_value`](@ref). @@ -102,14 +102,14 @@ end _get_jump_value(v, result) = JuMP.value(v, result = result) _get_jump_value(v::Real, result) = v -# Default method that depends on transformation_model_variable --> making extensions easier +# Default method that depends on transformation_variable --> making extensions easier function map_value( vref::GeneralVariableRef, backend::JuMPBackend; result::Int = 1, kwargs... ) - opt_vref = transformation_model_variable(vref, backend; kwargs...) + opt_vref = transformation_variable(vref, backend; kwargs...) if opt_vref isa AbstractArray return map(v -> _get_jump_value(v, result), opt_vref) else @@ -117,14 +117,14 @@ function map_value( end end -# Default method that depends on transformation_model_expression --> making extensions easier +# Default method that depends on transformation_expression --> making extensions easier function map_value( expr::JuMP.AbstractJuMPScalar, backend::JuMPBackend; result::Int = 1, kwargs... ) - opt_expr = transformation_model_expression(expr, backend; kwargs...) + opt_expr = transformation_expression(expr, backend; kwargs...) if opt_expr isa AbstractArray return map(v -> _get_jump_value(v, result), opt_expr) else @@ -132,14 +132,14 @@ function map_value( end end -# Default method that depends on transformation_model_constraint --> making extensions easier +# Default method that depends on transformation_constraint --> making extensions easier function map_value( cref::InfOptConstraintRef, backend::JuMPBackend; result::Int = 1, kwargs... ) - opt_cref = transformation_model_constraint(cref, backend; kwargs...) + opt_cref = transformation_constraint(cref, backend; kwargs...) if opt_cref isa AbstractArray return map(c -> _get_jump_value(c, result), opt_cref) else @@ -209,12 +209,12 @@ infinite parameter dependencies. To provide context for the values, it may be helpful to also query the variable's `parameter_refs` and `supports` which will have a one-to-one correspondence with -the value(s). It may also be helpful to query via [`transformation_model_variable`](@ref) +the value(s). It may also be helpful to query via [`transformation_variable`](@ref) to retrieve the variables(s) that these values are based on. These functions should all be called with the same keyword arguments for consistency. For extensions, this only works if -[`transformation_model_variable`](@ref) has been extended correctly and/or +[`transformation_variable`](@ref) has been extended correctly and/or [`map_value`](@ref) has been extended for variables. **Example** @@ -249,12 +249,12 @@ infinite parameter dependencies. To provide context for the values, it may be helpful to also query the expression's `parameter_refs` and `supports` which will have a one-to-one correspondence with -the value(s). It may also be helpful to query via [`transformation_model_expression`](@ref) +the value(s). It may also be helpful to query via [`transformation_expression`](@ref) to retrieve the expression(s) that these values are based on. These functions should all be called with the same keyword arguments for consistency. For extensions, this only works if -[`transformation_model_expression`](@ref) has been extended correctly and/or +[`transformation_expression`](@ref) has been extended correctly and/or [`map_value`](@ref) has been extended for expressions. **Example** @@ -312,12 +312,12 @@ infinite parameter dependencies. To provide context for the values, it may be helpful to also query the constraint's `parameter_refs` and `supports` which will have a one-to-one correspondence with -the value(s). It may also be helpful to query via [`transformation_model_constraint`](@ref) +the value(s). It may also be helpful to query via [`transformation_constraint`](@ref) to retrieve the constraint(s) that these values are based on. These functions should all be called with the same keyword arguments for consistency. For extensions, this only works if -[`transformation_model_constraint`](@ref) has been extended correctly and/or +[`transformation_constraint`](@ref) has been extended correctly and/or [`map_value`](@ref) has been extended for constraints. **Example** @@ -338,10 +338,10 @@ end # BOILERPLATE REF QUERIES ################################################################################ for (Ref, func, mapper) in ( - (:GeneralVariableRef, :reduced_cost, :transformation_model_variable), - (:GeneralVariableRef, :optimizer_index, :transformation_model_variable), - (:InfOptConstraintRef, :optimizer_index, :transformation_model_constraint), - (:InfOptConstraintRef, :shadow_price, :transformation_model_constraint) + (:GeneralVariableRef, :reduced_cost, :transformation_variable), + (:GeneralVariableRef, :optimizer_index, :transformation_variable), + (:InfOptConstraintRef, :optimizer_index, :transformation_constraint), + (:InfOptConstraintRef, :shadow_price, :transformation_constraint) ) @eval begin @doc """ @@ -422,7 +422,7 @@ end Map the dual(s) of `cref` to its counterpart in the `backend`. No extension is needed for [`JuMPBackend`](@ref)s that support -`transformation_model_constraint`. In this case, `transformation_model_constraint` +`transformation_constraint`. In this case, `transformation_constraint` are used to make these mappings by default where `kwargs` are passed on these functions. """ @@ -443,7 +443,7 @@ function map_dual( result::Int = 1, kwargs... ) - opt_cref = transformation_model_constraint(cref, backend; kwargs...) + opt_cref = transformation_constraint(cref, backend; kwargs...) if opt_cref isa AbstractArray return map(c -> JuMP.dual(c, result = result), opt_cref) else @@ -473,12 +473,12 @@ infinite parameter dependencies. To provide context for the duals, it may be helpful to also query the constraint's `parameter_refs` and `supports` which will have a one-to-one correspondence with -the value(s). It may also be helpful to query via [`transformation_model_constraint`](@ref) +the value(s). It may also be helpful to query via [`transformation_constraint`](@ref) to retrieve the constraint(s) that these values are based on. These functions should all be called with the same keyword arguments for consistency. For extensions, this only works if -[`transformation_model_constraint`](@ref) has been extended correctly and/or +[`transformation_constraint`](@ref) has been extended correctly and/or [`map_dual`](@ref) has been extended for constraints. **Example** @@ -518,8 +518,8 @@ report[ref::[GeneralVariableRef/InfOptConstraintRef]; ``` This is enabled for new transformation backends by appropriately -extending [`transformation_model_variable`](@ref) and -[`transformation_model_constraint`](@ref). +extending [`transformation_variable`](@ref) and +[`transformation_constraint`](@ref). **Fields** - `opt_report::JuMP.SensitivityReport`: The LP sensitivity captured from the backend. @@ -531,7 +531,7 @@ end # Extend Base.getindex for variables on InfOptSensitivityReport function Base.getindex(s::InfOptSensitivityReport, v::GeneralVariableRef; kwargs...) backend = JuMP.owner_model(v).backend - opt_vref = transformation_model_variable(v, backend; kwargs...) + opt_vref = transformation_variable(v, backend; kwargs...) if opt_vref isa AbstractArray return map(v -> s.opt_report[v], opt_vref) else @@ -542,7 +542,7 @@ end # Extend Base.getindex for constraints on InfOptSensitivityReport function Base.getindex(s::InfOptSensitivityReport, c::InfOptConstraintRef; kwargs...) backend = JuMP.owner_model(c).backend - opt_cref = transformation_model_constraint(c, backend; kwargs...) + opt_cref = transformation_constraint(c, backend; kwargs...) if opt_cref isa AbstractArray return map(c -> s.opt_report[c], opt_cref) else diff --git a/test/TranscriptionOpt/model.jl b/test/TranscriptionOpt/model.jl index fc985f06..7117c377 100644 --- a/test/TranscriptionOpt/model.jl +++ b/test/TranscriptionOpt/model.jl @@ -184,12 +184,12 @@ end @test IOTO.transcription_variable(x0) == b @test IOTO.transcription_variable(f2, ndarray = true) == ones(2, 2) end - # test transformation_model_variable extension - @testset "transformation_model_variable" begin - @test transformation_model_variable(y, tb, label = All) == a - @test transformation_model_variable(x, tb, label = All) == [a, b, c, d, e, f] - @test transformation_model_variable(x, tb) == [a, b, c, d, e, f] - @test transformation_model_variable(x0, tb) == b + # test transformation_variable extension + @testset "transformation_variable" begin + @test transformation_variable(y, tb, label = All) == a + @test transformation_variable(x, tb, label = All) == [a, b, c, d, e, f] + @test transformation_variable(x, tb) == [a, b, c, d, e, f] + @test transformation_variable(x0, tb) == b end # test variable_supports for infinite variable with 2 inputs @testset "variable_supports (Backend, Infinite)" begin @@ -559,11 +559,11 @@ end @test IOTO.transcription_expression(x0 - y) == b - a @test IOTO.transcription_expression(zero(QuadExpr) + 2) == zero(AffExpr) + 2 end - # test transformation_model_expression - @testset "transformation_model_expression" begin - @test transformation_model_expression(x0) == b - @test transformation_model_expression(x0 - y) == b - a - @test transformation_model_expression(zero(QuadExpr) + 2) == zero(AffExpr) + 2 + # test transformation_expression + @testset "transformation_expression" begin + @test transformation_expression(x0) == b + @test transformation_expression(x0 - y) == b - a + @test transformation_expression(zero(QuadExpr) + 2) == zero(AffExpr) + 2 end # test expression_supports @testset "expression_supports" begin @@ -634,11 +634,11 @@ end @test IOTO.transcription_constraint(c2) == tc3 @test IOTO.transcription_constraint(c3, label = All) == tc4 end - # test transformation_model_constraint extension - @testset "transformation_model_constraint" begin - @test transformation_model_constraint(c1, tb, label = All) == [tc1, tc2] - @test transformation_model_constraint(c2, tb) == tc3 - @test transformation_model_constraint(c3, tb) == tc4 + # test transformation_constraint extension + @testset "transformation_constraint" begin + @test transformation_constraint(c1, tb, label = All) == [tc1, tc2] + @test transformation_constraint(c2, tb) == tc3 + @test transformation_constraint(c3, tb) == tc4 end # test constraint_supports @testset "constraint_supports" begin diff --git a/test/TranscriptionOpt/transcribe.jl b/test/TranscriptionOpt/transcribe.jl index d0996bbf..38007b55 100644 --- a/test/TranscriptionOpt/transcribe.jl +++ b/test/TranscriptionOpt/transcribe.jl @@ -606,7 +606,7 @@ end set_silent(m) set_time_limit_sec(m, 42.) # test normal usage - @test isa(build_transformation_backend!(m, m.backend), Nothing) + @test isa(build_transformation_backend!(m), Nothing) @test transformation_backend_ready(m) @test num_variables(m.backend.model) == 44 @test time_limit_sec(m.backend) == 42 diff --git a/test/backend_mappings.jl b/test/backend_mappings.jl index 37c03763..27682b61 100644 --- a/test/backend_mappings.jl +++ b/test/backend_mappings.jl @@ -40,8 +40,8 @@ @test_logs (:warn, warn) build_transformation_backend!(m) end -# Test transformation model querying methods -@testset "Optimizer Model Queries" begin +# Test transformation backend querying methods +@testset "Transformation Backend Queries" begin # initialize model m = InfiniteModel() @infinite_parameter(m, par in [0, 1], supports = [0, 1], @@ -59,17 +59,19 @@ end build_transformation_backend!(m) tb = m.backend tdata = IOTO.transcription_data(tb) - # Test transformation_model_variable - @testset "transformation_model_variable" begin + # Test transformation_variable + @testset "transformation_variable" begin # test normal usage - @test transformation_model_variable(x, label = All) == IOTO.transcription_variable(x, label = All) - @test transformation_model_variable(x, label = All, ndarray = true) == IOTO.transcription_variable(x, label = All, ndarray = true) - @test transformation_model_variable(x0) == IOTO.transcription_variable(x0) - @test transformation_model_variable(z) == IOTO.transcription_variable(z) - @test transformation_model_variable(d1, label = InternalLabel) == IOTO.transcription_variable(d1, label = InternalLabel) - @test transformation_model_variable(f) == [0, sin(1)] + @test transformation_variable(x, label = All) == IOTO.transcription_variable(x, label = All) + @test transformation_variable(x, label = All, ndarray = true) == IOTO.transcription_variable(x, label = All, ndarray = true) + @test transformation_variable(x0) == IOTO.transcription_variable(x0) + @test transformation_variable(z) == IOTO.transcription_variable(z) + @test transformation_variable(d1, label = InternalLabel) == IOTO.transcription_variable(d1, label = InternalLabel) + @test transformation_variable(f) == [0, sin(1)] + # test deprecation + @test (@test_deprecated optimizer_model_variable(z)) == transformation_variable(z) # test fallback - @test_throws ErrorException transformation_model_variable(x, TestBackend(), my_key = true) + @test_throws ErrorException transformation_variable(x, TestBackend(), my_key = true) end # Test variable_supports @testset "variable_supports" begin @@ -89,26 +91,28 @@ end @test supports(d1, label = InternalLabel) == [(0.5,)] @test supports(f, label = All) == [(0.,), (0.5,), (1.,)] end - # Test transformation_model_expression - @testset "transformation_model_expression" begin + # Test transformation_expression + @testset "transformation_expression" begin # test variable references - @test transformation_model_expression(x, label = All) == IOTO.transcription_variable(x, label = All) - @test transformation_model_expression(z) == IOTO.transcription_variable(z) - @test transformation_model_expression(x0) == IOTO.transcription_variable(x0) - @test transformation_model_expression(x0, ndarray = true) == IOTO.transcription_variable(x0) + @test transformation_expression(x, label = All) == IOTO.transcription_variable(x, label = All) + @test transformation_expression(z) == IOTO.transcription_variable(z) + @test transformation_expression(x0) == IOTO.transcription_variable(x0) + @test transformation_expression(x0, ndarray = true) == IOTO.transcription_variable(x0) # test expression without variables expr = zero(JuMP.GenericAffExpr{Float64, GeneralVariableRef}) + 42 - @test transformation_model_expression(expr) == zero(AffExpr) + 42 + @test transformation_expression(expr) == zero(AffExpr) + 42 # test normal expressions xt = IOTO.transcription_variable(x, label = All) zt = IOTO.transcription_variable(z) - @test transformation_model_expression(x^2 + z) == [xt[1]^2 + zt, xt[3]^2 + zt] - @test transformation_model_expression(x^2 + z, ndarray = true) == [xt[1]^2 + zt, xt[3]^2 + zt] - @test transformation_model_expression(x^2 + z, label = All) == [xt[1]^2 + zt, xt[2]^2 + zt, xt[3]^2 + zt] - @test transformation_model_expression(2z - 3) == 2zt - 3 - @test transformation_model_expression(2 * f) == [zero(AffExpr), zero(AffExpr) + sin(1) * 2] + @test transformation_expression(x^2 + z) == [xt[1]^2 + zt, xt[3]^2 + zt] + @test transformation_expression(x^2 + z, ndarray = true) == [xt[1]^2 + zt, xt[3]^2 + zt] + @test transformation_expression(x^2 + z, label = All) == [xt[1]^2 + zt, xt[2]^2 + zt, xt[3]^2 + zt] + @test transformation_expression(2z - 3) == 2zt - 3 + @test transformation_expression(2 * f) == [zero(AffExpr), zero(AffExpr) + sin(1) * 2] + # test deprecation + @test (@test_deprecated optimizer_model_expression(2z-4)) == 2zt - 4 # test fallback - @test_throws ErrorException transformation_model_expression(c1, TestBackend(), my_key = true) + @test_throws ErrorException transformation_expression(c1, TestBackend(), my_key = true) end # Test expression_supports @testset "expression_supports" begin @@ -128,15 +132,17 @@ end expr = zero(JuMP.GenericAffExpr{Float64, GeneralVariableRef}) + 42 @test supports(expr, label = All) == () end - # Test transformation_model_constraint - @testset "transformation_model_constraint" begin + # Test transformation_constraint + @testset "transformation_constraint" begin # test normal usage - @test transformation_model_constraint(c1) == IOTO.transcription_constraint(c1) - @test transformation_model_constraint(c2, label = All) == IOTO.transcription_constraint(c2, label = All) - @test transformation_model_constraint(c2, label = All, ndarray = true) == IOTO.transcription_constraint(c2, label = All, ndarray = true) - @test transformation_model_constraint(c3) == IOTO.transcription_constraint(c3) + @test transformation_constraint(c1) == IOTO.transcription_constraint(c1) + @test transformation_constraint(c2, label = All) == IOTO.transcription_constraint(c2, label = All) + @test transformation_constraint(c2, label = All, ndarray = true) == IOTO.transcription_constraint(c2, label = All, ndarray = true) + @test transformation_constraint(c3) == IOTO.transcription_constraint(c3) + # test deprecation + @test (@test_deprecated optimizer_model_constraint(c1)) == transformation_constraint(c1) # test fallback - @test_throws ErrorException transformation_model_constraint(c1, TestBackend(), my_key = true) + @test_throws ErrorException transformation_constraint(c1, TestBackend(), my_key = true) end # Test constraint_supports @testset "constraint_supports" begin diff --git a/test/backend_setup.jl b/test/backend_setup.jl index 7c9b1b1b..e822c448 100644 --- a/test/backend_setup.jl +++ b/test/backend_setup.jl @@ -21,6 +21,7 @@ @test isa(transformation_model(m), Model) @test transformation_model(jump_backend) == bmodel @test_throws ErrorException transformation_model(TestBackend()) + @test (@test_deprecated optimizer_model(m)) == transformation_model(m) end # transformation_data @testset "transformation_data" begin @@ -28,6 +29,10 @@ @test transformation_data(jump_backend) == 42 @test_throws ErrorException transformation_data(TestBackend()) end + # transformation_backend + @testset "transformation_backend" begin + @test transformation_backend(m) isa TranscriptionBackend + end # set_transformation_backend @testset "set_transformation_backend" begin current_backend = m.backend @@ -44,6 +49,8 @@ @test get_attribute(jump_backend, MOI.TimeLimitSec()) == 10 @test_throws ErrorException get_attribute(TestBackend(), MOI.TimeLimitSec()) @test_throws ErrorException set_attribute(TestBackend(), MOI.TimeLimitSec(), 10.) + @test set_optimizer_attribute(m, MOI.TimeLimitSec(), 12.) isa Nothing + @test get_optimizer_attribute(m, MOI.TimeLimitSec()) == 12 end # Base.empty! @testset "Base.empty!" begin diff --git a/test/extensions.jl b/test/extensions.jl index d7a7e68e..5aeb9890 100644 --- a/test/extensions.jl +++ b/test/extensions.jl @@ -44,8 +44,8 @@ @test @constraint(m, x + par <= 0) isa InfOptConstraintRef # transcribe the model - @test build_optimizer_model!(m) isa Nothing - @test num_variables(optimizer_model(m)) == 4 + @test build_transformation_backend!(m) isa Nothing + @test num_variables(m.backend.model) == 4 end # test using the new derivative evaluation method @@ -117,8 +117,8 @@ end # test transcription @test @constraint(m, z == measure(x, new_data1)) isa InfOptConstraintRef - @test build_optimizer_model!(m) isa Nothing - @test num_variables(optimizer_model(m)) == 6 + @test build_transformation_backend!(m) isa Nothing + @test num_variables(m.backend.model) == 6 # test deletion @test_throws ErrorException delete(m, t) @@ -174,10 +174,10 @@ end @test num_supports(t, label = All) == 10 end -# Test otpimizer model extensions -@testset "Optimizer Model" begin +# Test Backend extensions +@testset "Backend" begin # load in the extension - include("./extensions/optimizer_model.jl") + include("./extensions/backend.jl") # setup the infinite model m = InfiniteModel() @@ -191,48 +191,49 @@ end @constraint(m, c3, x0 + y == 5) @objective(m, Min, y) - # test optimizer model constructor - @test NewReformModel() isa Model - @test NewReformModel().ext[:ReformData] isa NewReformData + # test backend constructor + @test NewReformBackend() isa NewReformBackend + @test NewReformBackend().data isa NewReformData - # test data extraction - @test_throws ErrorException reform_data(optimizer_model(m)) - @test reform_data(NewReformModel()) isa NewReformData + # test basic extraction + @test transformation_model(NewReformBackend()) isa Model + @test transformation_data(NewReformBackend()) isa NewReformData - # test replacing the optimizer model - @test set_optimizer_model(m, NewReformModel()) isa Nothing - @test haskey(optimizer_model(m).ext, :ReformData) + # test replacing the backend + b = NewReformBackend() + @test set_transformation_backend(m, b) isa Nothing + @test transformation_backend(m) === b # test making InfiniteModel with the new optimizer model - @test InfiniteModel(OptimizerModel = NewReformModel) isa InfiniteModel - @test optimizer_model_key(InfiniteModel(OptimizerModel = NewReformModel)) == :ReformData + @test InfiniteModel(NewReformBackend()) isa InfiniteModel + @test transformation_backend(InfiniteModel(NewReformBackend())) isa NewReformBackend # test retrival errors - @test_throws ErrorException optimizer_model_variable(x) - @test_throws ErrorException optimizer_model_variable(x0) - @test_throws ErrorException optimizer_model_variable(y) - @test_throws ErrorException optimizer_model_constraint(c1) - @test_throws ErrorException optimizer_model_constraint(c2) - @test_throws ErrorException optimizer_model_constraint(c3) + @test_throws ErrorException transformation_variable(x) + @test_throws ErrorException transformation_variable(x0) + @test_throws ErrorException transformation_variable(y) + @test_throws ErrorException transformation_constraint(c1) + @test_throws ErrorException transformation_constraint(c2) + @test_throws ErrorException transformation_constraint(c3) @test_throws ErrorException supports(x) @test_throws ErrorException supports(c1) @test_throws ErrorException supports(c2) @test_throws ErrorException supports(c3) - # test build_optimizer_model! - @test build_optimizer_model!(m, my_kwarg = true) isa Nothing - @test haskey(optimizer_model(m).ext, :ReformData) - @test num_variables(optimizer_model(m)) == 13 + # test build_transformation_backend! + @test build_transformation_backend!(m, my_kwarg = true) isa Nothing + @test transformation_backend_ready(m) + @test num_variables(transformation_model(m)) == 13 # test retrivals - @test optimizer_model_variable(x, my_kwarg = true) isa Vector{VariableRef} - @test optimizer_model_variable(x0, my_kwarg = true) isa VariableRef - @test optimizer_model_variable(y, my_kwarg = true) isa VariableRef - @test optimizer_model_variable(meas, my_kwarg = true) isa Vector{VariableRef} - @test optimizer_model_constraint(c1, my_kwarg = true) isa Vector{<:ConstraintRef} - @test optimizer_model_constraint(c2, my_kwarg = true) isa Vector{<:ConstraintRef} - @test optimizer_model_constraint(c3, my_kwarg = true) isa Vector{<:ConstraintRef} - @test optimizer_model_expression(x^2) == zero(AffExpr) + @test transformation_variable(x, my_kwarg = true) isa Vector{VariableRef} + @test transformation_variable(x0, my_kwarg = true) isa VariableRef + @test transformation_variable(y, my_kwarg = true) isa VariableRef + @test transformation_variable(meas, my_kwarg = true) isa Vector{VariableRef} + @test transformation_constraint(c1, my_kwarg = true) isa Vector{<:ConstraintRef} + @test transformation_constraint(c2, my_kwarg = true) isa Vector{<:ConstraintRef} + @test transformation_constraint(c3, my_kwarg = true) isa Vector{<:ConstraintRef} + @test transformation_expression(x^2) == zero(AffExpr) @test supports(x, my_kwarg = true) == [(0.,), (1.,)] @test supports(y) == () @test supports(meas) == [(-1.,), (-2.,)] @@ -247,7 +248,7 @@ end @test set_silent(m) isa Nothing @test set_time_limit_sec(m, 42.) isa Nothing @test optimize!(m) isa Nothing - @test get_optimizer_attribute(m, MOI.Silent()) + @test get_attribute(m, MOI.Silent()) # prepare for result queries mockoptimizer = JuMP.backend(m).optimizer.model @@ -260,29 +261,29 @@ end MOI.set(mockoptimizer, MOI.ObjectiveValue(), -1.0) MOI.set(mockoptimizer, MOI.ObjectiveBound(), 2.0) MOI.set(mockoptimizer, MOI.VariablePrimal(), - JuMP.optimizer_index(optimizer_model_variable(x)[1]), -1.0) + JuMP.optimizer_index(transformation_variable(x)[1]), -1.0) MOI.set(mockoptimizer, MOI.VariablePrimal(), - JuMP.optimizer_index(optimizer_model_variable(x)[2]), 0.0) + JuMP.optimizer_index(transformation_variable(x)[2]), 0.0) MOI.set(mockoptimizer, MOI.VariablePrimal(), - JuMP.optimizer_index(optimizer_model_variable(y)), 1.0) + JuMP.optimizer_index(transformation_variable(y)), 1.0) MOI.set(mockoptimizer, MOI.VariablePrimal(), - JuMP.optimizer_index(optimizer_model_variable(x0)), 42.) + JuMP.optimizer_index(transformation_variable(x0)), 42.) MOI.set(mockoptimizer, MOI.VariablePrimal(), - JuMP.optimizer_index(optimizer_model_variable(meas)[1]), 2.0) + JuMP.optimizer_index(transformation_variable(meas)[1]), 2.0) MOI.set(mockoptimizer, MOI.VariablePrimal(), - JuMP.optimizer_index(optimizer_model_variable(meas)[2]), -2.0) + JuMP.optimizer_index(transformation_variable(meas)[2]), -2.0) MOI.set(mockoptimizer, MOI.ConstraintDual(), - JuMP.optimizer_index(optimizer_model_constraint(c1)[1]), -1.0) + JuMP.optimizer_index(transformation_constraint(c1)[1]), -1.0) MOI.set(mockoptimizer, MOI.ConstraintDual(), - JuMP.optimizer_index(optimizer_model_constraint(c1)[2]), -1.0) + JuMP.optimizer_index(transformation_constraint(c1)[2]), -1.0) MOI.set(mockoptimizer, MOI.ConstraintDual(), - JuMP.optimizer_index(optimizer_model_constraint(c2)[1]), 0.0) + JuMP.optimizer_index(transformation_constraint(c2)[1]), 0.0) MOI.set(mockoptimizer, MOI.ConstraintDual(), - JuMP.optimizer_index(optimizer_model_constraint(c2)[2]), -1.0) + JuMP.optimizer_index(transformation_constraint(c2)[2]), -1.0) MOI.set(mockoptimizer, MOI.ConstraintDual(), - JuMP.optimizer_index(optimizer_model_constraint(c3)[1]), 0.0) + JuMP.optimizer_index(transformation_constraint(c3)[1]), 0.0) MOI.set(mockoptimizer, MOI.ConstraintDual(), - JuMP.optimizer_index(optimizer_model_constraint(c3)[2]), -1.0) + JuMP.optimizer_index(transformation_constraint(c3)[2]), -1.0) # test result queries @test termination_status(m) == MOI.OPTIMAL @@ -303,13 +304,13 @@ end @test dual(c1) == [-1, -1] @test dual(c2) == [0., -1.] @test dual(c3) == [0., -1.] - @test optimizer_index(x) == optimizer_index.(optimizer_model_variable(x)) - @test optimizer_index(x0) == optimizer_index(optimizer_model_variable(x0)) - @test optimizer_index(y) == optimizer_index(optimizer_model_variable(y)) - @test optimizer_index(c1) == optimizer_index.(optimizer_model_constraint(c1)) - @test optimizer_index(c2) == optimizer_index.(optimizer_model_constraint(c2)) - @test optimizer_index(c3) == optimizer_index.(optimizer_model_constraint(c3)) - @test shadow_price(c1) == [-1, -1] - @test shadow_price(c2) == [0., -1.] - @test shadow_price(c3) == [0., -1.] +# @test optimizer_index(x) == optimizer_index.(transformation_variable(x)) +# @test optimizer_index(x0) == optimizer_index(transformation_variable(x0)) +# @test optimizer_index(y) == optimizer_index(transformation_variable(y)) +# @test optimizer_index(c1) == optimizer_index.(transformation_constraint(c1)) +# @test optimizer_index(c2) == optimizer_index.(transformation_constraint(c2)) +# @test optimizer_index(c3) == optimizer_index.(transformation_constraint(c3)) +# @test shadow_price(c1) == [-1, -1] +# @test shadow_price(c2) == [0., -1.] +# @test shadow_price(c3) == [0., -1.] end diff --git a/test/extensions/backend.jl b/test/extensions/backend.jl new file mode 100644 index 00000000..34667be5 --- /dev/null +++ b/test/extensions/backend.jl @@ -0,0 +1,366 @@ +## A template for defining a new transformation backend + +# NOTE THAT USING `JuMPBackend` GREATLY SIMPLIFIES THIS (SEE DOCS) + +# Define a mutable struct for storing infinite model to backend mappings +# plus other needed information +mutable struct NewReformData + # Variable mapping (REPLACE AND REFORMAT BELOW AS NEEDED) + infvar_mappings::Dict{GeneralVariableRef, Vector{JuMP.VariableRef}} + finvar_mappings::Dict{GeneralVariableRef, JuMP.VariableRef} + + # Map other variable info (REPLACE AND REFORMAT BELOW AS NEEDED) + infvar_to_supports::Dict{GeneralVariableRef, Vector{<:Tuple}} + + # Measure mappings and other info (ONLY NEEDED TO ENABLE MEASURE QUERIES) + meas_mappings::Dict{GeneralVariableRef, Vector{JuMP.VariableRef}} + meas_to_supports::Dict{GeneralVariableRef, Vector{<:Tuple}} + + # Constraint mapping (REPLACE AND REFORMAT BELOW AS NEEDED) + constr_mappings::Dict{InfOptConstraintRef, Vector{JuMP.ConstraintRef}} + + # Map other constraint info (REPLACE AND REFORMAT BELOW AS NEEDED) + constr_to_supports::Dict{InfOptConstraintRef, Vector{<:Tuple}} + + # ADD OTHER MAPPING/MODEL INFORMATION HERE + + # default constructor + function NewReformData() # REFORMAT BELOW IN ACCORDANCE WITH ABOVE ATTRIBUTES + return new(Dict{GeneralVariableRef, Vector{JuMP.VariableRef}}(), + Dict{GeneralVariableRef, JuMP.VariableRef}(), + Dict{GeneralVariableRef, Vector{Tuple}}(), + Dict{GeneralVariableRef, Vector{JuMP.VariableRef}}(), + Dict{GeneralVariableRef, Vector{Tuple}}(), + Dict{InfOptConstraintRef, Vector{JuMP.ConstraintRef}}(), + Dict{InfOptConstraintRef, Vector{Tuple}}() + ) + end +end + +# Extend Base.empty! +function Base.empty!(data::NewReformData) + # REFORMAT BELOW AS APPROPRIATE + empty!(data.infvar_mappings) + empty!(data.finvar_mappings) + empty!(data.infvar_to_supports) + empty!(data.meas_mappings) + empty!(data.meas_to_supports) + empty!(data.constr_mappings) + empty!(data.constr_to_supports) + return data +end + +# Define the new backend (Note that backends based on JuMP should actually use JuMPBackend) +struct NewReformBackend <: AbstractTransformationBackend + model::JuMP.Model + data::NewReformData +end + +# Make a constructor for the new backend +function NewReformBackend(args...; kwargs...) # ADD EXPLICT ARGS AS NEEDED + # CREATE THE BACKEND AS NEEDED + return NewReformBackend(JuMP.Model(args...; kwargs...), NewReformData()) +end + +# Extend Base.empty! for the backend (not needed for `JuMPBackend`s) +function Base.empty!(backend::NewReformBackend) + # REPLACE WITH ACTUAL + empty!(backend.model) + empty!(backend.data) + return backend +end + +# Extend transformation_model and transformation_data (not needed for `JuMPBackend`s) +InfiniteOpt.transformation_model(backend::NewReformBackend) = backend.model +InfiniteOpt.transformation_data(backend::NewReformBackend) = backend.data + +# Extend JuMP.[get/set]_attribute such to pass and retrieve settings for the backend +# Not necessary for `JuMPBackend`s +function JuMP.get_attribute(backend::NewReformBackend, attr) + # REPLACE WITH ACTUAL + return JuMP.get_attribute(backend.model, attr) +end +function JuMP.set_attribute(backend::NewReformBackend, attr, val) + # REPLACE WITH ACTUAL + JuMP.set_attribute(backend.model, attr, val) + return +end + +# Extend JuMP.show_backend_summary (optional for better printing) +function JuMP.show_backend_summary(io::IO, model::InfiniteModel, backend::NewReformBackend) + # ADD DESIRED INFORMATION (BE SURE TO INDENT BY 2 SPACES) + println(io, " Backend type: NewReformBackend") + println(io, " Some useful info: 42") + return +end + +# Extend build_transformation_backend! +function InfiniteOpt.build_transformation_backend!( + model::InfiniteModel, + backend::NewReformBackend; + my_kwarg::Bool = true # ADD KEYWORD ARGUMENTS AS NEEDED + ) + # clear backend for a build + empty!(backend) + backend.model.operator_counter = 0 + + # load in user defined nonlinear operators + # THIS HELPER FUNCTION ONLY IS FOR BACKENDS THAT USE JuMP + add_operators_to_jump(backend.model, model) + + # IT MAY BE USEFUL TO CALL `expand_all_measures!` TO HANDLE MEASURES FIRST + # otherwise can extend `add_measure_variable` and `delete_semi_infinite_variable` to + # expand in place without modifying the infinite model + + # IT MAY BE USEFUL TO CALL `evaluate_all_derivatives!` TO HANDLE DERIVATIVES FIRST + # otherwise can use `evaluate_derivative` or the combo of `derivative_expr_data` & `make_indexed_derivative_expr` + + # REPLACE BELOW WITH OPERATIONS TO BUILD A `NewReformBackend` BASED ON `backend` + # these lines just generate artificial data, see `TranscriptionOpt` for a thorough example of implementation + data = backend.data + reform_model = backend.model + for vref in all_variables(model) + if index(vref) isa Union{InfiniteVariableIndex, SemiInfiniteVariableIndex} + data.infvar_mappings[vref] = [@variable(reform_model) for _ = 1:2] + data.infvar_to_supports[vref] = [(0.,), (1.,)] + else + data.finvar_mappings[vref] = @variable(reform_model) + end + end + # TODO add derivatives + for mref in all_measures(model) + data.meas_mappings[mref] = [@variable(reform_model) for _ = 1:2] + data.meas_to_supports[mref] = [(-1.,), (-2.,)] + end + for cref in all_constraints(model) + data.constr_mappings[cref] = [@constraint(reform_model, @variable(reform_model) >= 0) for _ = 1:2] + data.constr_to_supports[cref] = [(2.,), (3.,)] + end + set_objective(reform_model, objective_sense(model), @variable(reform_model)) + return +end + +# Extend JuMP.optimize! (not needed for `JuMPBackend`s) +function JuMP.optimize!(backend::NewReformBackend) + # DO WANT NEEDS TO BE DONE TO SOLVE THE TRANSFORMED MODEL + return JuMP.optimize!(backend.model) +end + +# To the extend desired, extend any or all of the JuMP API below +# Not needed for `JuMPBackend`s +for func in (:set_silent, :unset_silent, :bridge_constraints, + :unset_time_limit_sec, :time_limit_sec, :solver_name, :backend, + :mode, :unsafe_backend, :compute_conflict!, :copy_conflict, + :set_string_names_on_creation) + @eval begin + # EXTEND FUNCTION AS APPROPRIATE + function JuMP.$func(backend::NewReformBackend) + return JuMP.$func(backend.model) + end + end +end +for func in (:set_time_limit_sec, :set_string_names_on_creation, :add_bridge) + @eval begin + # EXTEND FUNCTION AS APPROPRIATE + function JuMP.$func(backend::NewReformBackend, value) + return JuMP.$func(backend.model, value) + end + end +end +function JuMP.print_active_bridges(io::IO, backend::NewReformBackend, args...) + return JuMP.print_active_bridges(io, backend.model, args...) +end +function JuMP.print_bridge_graph(io::IO, backend::NewReformBackend) + return JuMP.print_bridge_graph(io, backend.model) +end +function JuMP.set_optimizer( + backend::NewReformBackend, + optimizer_constructor; + add_bridges::Bool = true # ADD KWARGS AS DESIRED + ) + JuMP.set_optimizer(backend.model, optimizer_constructor, add_bridges = add_bridges) + return +end + +# Extend transformation_variable if appropriate to enable variable related queries +function InfiniteOpt.transformation_variable( + vref::GeneralVariableRef, + backend::NewReformBackend; + my_kwarg::Bool = true # ADD KEY ARGS AS NEEDED + ) + # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO THE OPTIMIZER MODEL VARIABLE(S) + vindex = index(vref) + if vindex isa Union{InfiniteVariableIndex, SemiInfiniteVariableIndex, DerivativeIndex} + map_dict = backend.data.infvar_mappings + elseif vindex isa MeasureIndex + map_dict = backend.data.meas_mappings + else + map_dict = backend.data.finvar_mappings + end + haskey(map_dict, vref) || error("Variable $vref not used in the backend.") + return map_dict[vref] +end + +# Extend transformation_expression if appropriate to enable expression related queries +function InfiniteOpt.transformation_expression( + expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.GenericNonlinearExpr}, # POSSIBLY BREAK THESE UP INTO 3 SEPARATE FUNCTIONS + backend::NewReformBackend; + my_kwarg::Bool = true # ADD KEY ARGS AS NEEDED + ) + # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO REFORMULATED EXPRESSIONS + reform_expr = zero(AffExpr) + return reform_expr +end + +# Extend transformation_constraint if appropriate to enable constraint related queries +function InfiniteOpt.transformation_constraint( + cref::InfOptConstraintRef, + backend::NewReformBackend; + my_kwarg::Bool = true) # ADD KEY ARGS AS NEEDED + # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO THE OPTIMIZER MODEL CONSTRAINT(S) + map_dict = backend.data.constr_mappings + haskey(map_dict, cref) || error("Constraint $cref not used in the backend.") + return map_dict[cref] +end + +# If appropriate extend variable_supports (enables support queries of infinite variables) +function InfiniteOpt.variable_supports( + vref::Union{InfiniteVariableRef, SemiInfiniteVariableRef, DerivativeRef}, + backend::NewReformBackend; + my_kwarg::Bool = true # ADD KEY ARGS AS NEEDED + ) + # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO THE INFINITE VARIABLE SUPPORT VALUES + map_dict = backend.data.infvar_to_supports + gvref = InfiniteOpt._make_variable_ref(owner_model(vref), index(vref)) + haskey(map_dict, gvref) || error("Variable $gvref not used in the backend.") + return map_dict[gvref] +end + +# If appropriate extend variable_supports for measures (enables support queries of measures) +function InfiniteOpt.variable_supports( + mref::MeasureRef, + backend::NewReformBackend; + my_kwarg::Bool = true # ADD KEY ARGS AS NEEDED + ) + # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO THE INFINITE VARIABLE SUPPORT VALUES + map_dict = backend.data.meas_to_supports + gvref = InfiniteOpt._make_variable_ref(owner_model(mref), index(mref)) + haskey(map_dict, gvref) || error("Variable $gvref not used in the backend.") + return map_dict[gvref] +end + +# If appropriate extend expression_supports (enables support queries of expressions) +function InfiniteOpt.expression_supports( + expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.GenericNonlinearExpr}, + backend::NewReformBackend; + my_kwarg::Bool = true # ADD KEY ARGS AS NEEDED + ) + # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO SUPPORT(S) OF THE EXPRESSION(S) + supps = [(-42.,), (1.,)] + return supps +end + +# If appropriate extend constraint_supports (enables support queries of constraints) +function InfiniteOpt.constraint_supports( + cref::InfOptConstraintRef, + backend::NewReformBackend; + my_kwarg::Bool = true) # ADD KEY ARGS AS NEEDED + # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO THE INFINITE VARIABLE SUPPORT VALUES + map_dict = backend.data.constr_to_supports + haskey(map_dict, cref) || error("Constraint $cref does not have supports in the backend.") + return length(map_dict[cref]) == 1 ? first(map_dict[cref]) : map_dict[cref] +end + +# As desired and as appropriate extend any or all of the following JuMP query API +# Not required for `JuMPBackend`S +for func in (:termination_status, :raw_status, :solve_time, :simplex_iterations, + :barrier_iterations, :node_count, :objective_bound, :relative_gap, + :result_count) + @eval begin + # EXTEND AS NEEDED + function JuMP.$func(backend::NewReformBackend) + return JuMP.$func(backend.model) + end + end +end +# Simple result dependent model queries +for func in (:primal_status, :dual_status, :has_values, :has_duals, + :objective_value, :dual_objective_value) + @eval begin + # EXTEND AS NEEDED + function JuMP.$func(backend::NewReformBackend; kwargs...) + return JuMP.$func(backend.model; kwargs...) + end + end +end + +# Extend map_value to query optimal values +# Not needed for `JuMPBackend`s that implement `transformation_[variable/expression/constraint]` +function InfiniteOpt.map_value( + vref::GeneralVariableRef, + backend::NewReformBackend; + result::Int = 1, # ADD ANY DESIRED KEY ARGS + kwargs... # EXTRA CAN BE PASSED ON TO THE MAPPING + ) + # REPLACE WITH ACTUAL MAPPING + opt_vref = transformation_variable(vref, backend; kwargs...) + if opt_vref isa AbstractArray + return map(v -> JuMP.value(v, result = result), opt_vref) + else + return JuMP.value(opt_vref, result = result) + end +end +function InfiniteOpt.map_value( + expr::JuMP.AbstractJuMPScalar, + backend::NewReformBackend; + result::Int = 1, # ADD ANY DESIRED KEY ARGS + kwargs... # EXTRA CAN BE PASSED ON TO THE MAPPING + ) + # REPLACE WITH ACTUAL MAPPING + opt_expr = transformation_expression(expr, backend; kwargs...) + if opt_expr isa AbstractArray + return map(v -> JuMP.value(v, result = result), opt_expr) + else + return JuMP.value(opt_expr, result = result) + end +end +function InfiniteOpt.map_value( + cref::InfOptConstraintRef, + backend::NewReformBackend; + result::Int = 1, # ADD ANY DESIRED KEY ARGS + kwargs... # EXTRA CAN BE PASSED ON TO THE MAPPING + ) + # REPLACE WITH ACTUAL MAPPING + opt_cref = transformation_constraint(cref, backend; kwargs...) + if opt_cref isa AbstractArray + return map(c -> JuMP.value(c, result = result), opt_cref) + else + return JuMP.value(opt_cref, result = result) + end +end + +# IF NEEDED, EXTEND `InfiniteOpt.map_infinite_parameter_value` (defaults to using `supports(pref)`) + +# Extend `map_dual` to be able to query duals +# Not needed for `JuMPBackend`s that implement `transformation_constraint` +function InfiniteOpt.map_dual( + cref::InfOptConstraintRef, + backend::NewReformBackend; + result::Int = 1, # ADD ANY DESIRED KEY ARGS + kwargs... # EXTRA CAN BE PASSED ON TO THE MAPPING + ) + # REPLACE WITH ACTUAL MAPPING + opt_cref = transformation_constraint(cref, backend; kwargs...) + if opt_cref isa AbstractArray + return map(c -> JuMP.dual(c, result = result), opt_cref) + else + return JuMP.dual(opt_cref, result = result) + end +end + +# IF APPROPRIATE, EXTEND THE FOLLOWING +# Not required for `JuMPBackend`s that use `transformation_[variable/constraint]` +# `InfiniteOpt.map_reduced_cost` +# `InfiniteOpt.map_optimizer_index` +# `InfiniteOpt.map_shadow_price` +# `JuMP.lp_sensitivity_report` diff --git a/test/extensions/derivative_method.jl b/test/extensions/derivative_method.jl index 3da91c9f..bf2d9f51 100644 --- a/test/extensions/derivative_method.jl +++ b/test/extensions/derivative_method.jl @@ -44,7 +44,7 @@ function InfiniteOpt.make_indexed_derivative_expr( order::Int, idx, supps::Vector{Float64}, # ordered - write_model::JuMP.AbstractModel, + write_model::Union{InfiniteModel, AbstractTransformationBackend}, ::MyDerivMethod, supp_diff ) diff --git a/test/extensions/generative_info.jl b/test/extensions/generative_info.jl index 093b9db3..24f5c925 100644 --- a/test/extensions/generative_info.jl +++ b/test/extensions/generative_info.jl @@ -10,7 +10,7 @@ end struct MyGenLabel <: InternalLabel end # Extend support_label -function InfiniteOpt.support_label(info::MyGenInfo)::Type{MyGenLabel} # REPLACE WITH ACTUAL INFO TYPE +function InfiniteOpt.support_label(info::MyGenInfo) return MyGenLabel # REPLACE WITH ACTUAL MAPPING end @@ -19,7 +19,7 @@ function InfiniteOpt.make_generative_supports( info::MyGenInfo, pref, supps - )::Vector{Float64} # REPLACE WITH ACTUAL INFO TYPE + ) # REPLACE BELOW WITH ACTUAL CODE TO CREATE THE GENERATIVE SUPPORTS BASED ON THE EXISTING num_existing = length(supps) num_existing <= 1 && error("`$pref` doesn't have enough supports.") diff --git a/test/extensions/measure_data.jl b/test/extensions/measure_data.jl index 16f12d27..8a46925f 100644 --- a/test/extensions/measure_data.jl +++ b/test/extensions/measure_data.jl @@ -11,15 +11,16 @@ struct NewMeasureData <: AbstractMeasureData end # Extend parameter_refs to return the parameter(s) being measured by a measure using NewMeasureData -function InfiniteOpt.parameter_refs(data::NewMeasureData)::Union{GeneralVariableRef, AbstractArray{<:GeneralVariableRef}} +function InfiniteOpt.parameter_refs(data::NewMeasureData) return data.attr2.parameter_refs # REPLACE WITH ACTUAL PARAMETER LOCATION end # Extend expand_measure to return the finite reformulation of a measure using NewMeasureData -function InfiniteOpt.expand_measure(expr::JuMP.AbstractJuMPScalar, - data::NewMeasureData, - write_model::JuMP.AbstractModel - )::JuMP.AbstractJuMPScalar +function InfiniteOpt.expand_measure( + expr::JuMP.AbstractJuMPScalar, + data::NewMeasureData, + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) # INSERT APPROPRIATE METHODS HERE # USING make_point_variable_ref AND make_semi_infinite_variable_ref MAY BE USEFUL return expand_measure(expr, data.attr2, write_model) # REPLACE ACTUAL RESULT @@ -33,25 +34,25 @@ end # Extend supports to return any infinite parameter supports employed by NewMeasureData # This is only optional if the new abstraction doesn't use supports at all -function InfiniteOpt.supports(data::NewMeasureData)::Vector +function InfiniteOpt.supports(data::NewMeasureData) return data.attr2.supports # REPLACE WITH ACTUAL LOCATION end # Extend support_label to return the label of supports employed by NewMeasureData # This is only optional if the new abstraction doesn't use supports at all -function InfiniteOpt.support_label(data::NewMeasureData)::Type{<:AbstractSupportLabel} +function InfiniteOpt.support_label(data::NewMeasureData) return data.attr2.label # REPLACE WITH ACTUAL LOCATION end # Extend coefficients to return the coefficients stored in NewMeasureData if appropriate # This is optional (returns empty vector otherwise) -function InfiniteOpt.coefficients(data::NewMeasureData)::Vector{Float64} +function InfiniteOpt.coefficients(data::NewMeasureData) return data.attr2.coefficients # REPLACE WITH ACTUAL LOCATION end # Extend weight_function to return the weight function stored in NewMeasureData if appropriate # This is optional (returns default_weight otherwise) -function InfiniteOpt.weight_function(data::NewMeasureData)::Function +function InfiniteOpt.weight_function(data::NewMeasureData) return data.attr2.weight_function # REPLACE WITH ACTUAL LOCATION end @@ -64,7 +65,7 @@ function new_measure( ub::Number; name::String = "NewMeas", num_supports::Int = 10 - )::GeneralVariableRef # REPLACE ARGS WITH ACTUAL DESIRED + ) # REPLACE ARGS WITH ACTUAL DESIRED # INSERT RELAVENT CHECKS AND OPERATIONS HERE # REPLACE BELOW WITH ACTUAL CONSTRUCTION attr2 = DiscreteMeasureData(param, ones(num_supports), [lb + (ub - lb) / (num_supports - 1) * i for i in 1:num_supports]) # just an example diff --git a/test/extensions/measure_eval.jl b/test/extensions/measure_eval.jl index fc046c75..bacfee5c 100644 --- a/test/extensions/measure_eval.jl +++ b/test/extensions/measure_eval.jl @@ -11,7 +11,7 @@ function InfiniteOpt.MeasureToolbox.generate_integral_data( method::NewUniEvalMethod; num_supports::Int = InfiniteOpt.DefaultNumSupports, weight_func::Function = InfiniteOpt.default_weight - )::InfiniteOpt.AbstractMeasureData # REPLACE WITH ACTUAL ALIAS + ) # REPLACE WITH ACTUAL ALIAS # REPLACE WITH ACTUAL FUNCTIONALITY # ADD CHECKS IF NECESSARY increment = (upper_bound - lower_bound) / (num_supports - 1) @@ -33,7 +33,7 @@ function InfiniteOpt.MeasureToolbox.generate_integral_data( method::NewMultiEvalMethod; num_supports::Int = InfiniteOpt.DefaultNumSupports, weight_func::Function = InfiniteOpt.default_weight - )::InfiniteOpt.AbstractMeasureData # REPLACE WITH ACTUAL ARGUMENTS + ) # REPLACE WITH ACTUAL ARGUMENTS # REPLACE WITH ACTUAL FUNCTIONALITY # ADD CHECKS IF NECESSARY if !all(i -> (dispatch_variable_ref(i) isa InfiniteOpt.IndependentParameterRef), prefs) diff --git a/test/extensions/optimizer_model.jl b/test/extensions/optimizer_model.jl deleted file mode 100644 index c2af9f0b..00000000 --- a/test/extensions/optimizer_model.jl +++ /dev/null @@ -1,201 +0,0 @@ -## A template for defining a new optimizer model - -# Define a mutable struct for storing infinite model to optimizer model mappings -# plus other needed information -mutable struct NewReformData - # Variable mapping (REPLACE AND REFORMAT BELOW AS NEEDED) - infvar_mappings::Dict{GeneralVariableRef, Vector{JuMP.VariableRef}} - finvar_mappings::Dict{GeneralVariableRef, JuMP.VariableRef} - - # Map other variable info (REPLACE AND REFORMAT BELOW AS NEEDED) - infvar_to_supports::Dict{GeneralVariableRef, Vector{<:Tuple}} - - # Measure mappings and other info (ONLY NEEDED TO ENABLE MEASURE QUERIES) - meas_mappings::Dict{GeneralVariableRef, Vector{JuMP.VariableRef}} - meas_to_supports::Dict{GeneralVariableRef, Vector{<:Tuple}} - - # Constraint mapping (REPLACE AND REFORMAT BELOW AS NEEDED) - constr_mappings::Dict{InfOptConstraintRef, Vector{JuMP.ConstraintRef}} - - # Map other constraint info (REPLACE AND REFORMAT BELOW AS NEEDED) - constr_to_supports::Dict{InfOptConstraintRef, Vector{<:Tuple}} - - # ADD OTHER MAPPING/MODEL INFORMATION HERE - - # default constructor - function NewReformData() # REFORMAT BELOW IN ACCORDANCE WITH ABOVE ATTRIBUTES - return new(Dict{GeneralVariableRef, Vector{JuMP.VariableRef}}(), - Dict{GeneralVariableRef, JuMP.VariableRef}(), - Dict{GeneralVariableRef, Vector{Tuple}}(), - Dict{GeneralVariableRef, Vector{JuMP.VariableRef}}(), - Dict{GeneralVariableRef, Vector{Tuple}}(), - Dict{InfOptConstraintRef, Vector{JuMP.ConstraintRef}}(), - Dict{InfOptConstraintRef, Vector{Tuple}}() - ) - end -end - -# Define the optimizer model key -const OptKey = :ReformData # REPLACE WITH A DESIRED UNIQUE KEY - -# Make a constructor for new optimizer model type (extension of JuMP.Model) -function NewReformModel(args...; kwargs...) # ADD EXPLICT ARGS AS NEEDED - # initialize the JuMP Model - model = JuMP.Model(args...; kwargs...) - - # ADD ADDITIONAL OPERATIONS IF NEEDED - - # add the extension data with a chosen optimizer model key and return - model.ext[OptKey] = NewReformData() - return model -end - -# Make function for extracting the data from the model (optional) -function reform_data(model::JuMP.Model) - # UPDATE THE NOMENCLATURE AS NEEDED - haskey(model.ext, OptKey) || error("Model is not a NewReformModel.") - return model.ext[OptKey] -end - -# Extend build_optimizer_model! (enables build_optimizer_model! and optimize!) -function InfiniteOpt.build_optimizer_model!( - model::InfiniteModel, - key::Val{OptKey}; - my_kwarg::Bool = true # ADD KEYWORD ARGUMENTS AS NEEDED - ) - # clear the model for a build/rebuild - reform_model = clear_optimizer_model_build!(model) - - # load in nonlinear operators - add_operators_to_jump(reform_model, model) - - # IT MAY BE USEFUL TO CALL `expand_all_measures!` TO HANDLE MEASURES FIRST - # otherwise can extend `add_measure_variable` and `delete_semi_infinite_variable` to - # expand in place without modifying the infinite model - - # REPLACE LINES 73-89 WITH OPERATIONS TO BUILD A `NewReformModel` BASED ON `model` - # these lines just generate artificial data, see `TranscriptionOpt` for a thorough example of implementation - data = reform_data(reform_model) - for vref in all_variables(model) - if index(vref) isa Union{InfiniteVariableIndex, SemiInfiniteVariableIndex} - data.infvar_mappings[vref] = [@variable(reform_model) for i = 1:2] - data.infvar_to_supports[vref] = [(0.,), (1.,)] - else - data.finvar_mappings[vref] = @variable(reform_model) - end - end - for mref in all_measures(model) - data.meas_mappings[mref] = [@variable(reform_model) for i = 1:2] - data.meas_to_supports[mref] = [(-1.,), (-2.,)] - end - for cref in all_constraints(model) - data.constr_mappings[cref] = [@constraint(reform_model, @variable(reform_model) >= 0) for i = 1:2] - data.constr_to_supports[cref] = [(2.,), (3.,)] - end - set_objective(reform_model, objective_sense(model), @variable(reform_model)) - # update the optimizer model status - set_optimizer_model_ready(model, true) - return -end - -# Extend optimizer_model_variable if appropriate to enable variable related queries -function InfiniteOpt.optimizer_model_variable( - vref::GeneralVariableRef, - key::Val{OptKey}; - my_kwarg::Bool = true # ADD KEY ARGS AS NEEDED - ) - # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO THE OPTIMIZER MODEL VARIABLE(S) - model = optimizer_model(JuMP.owner_model(vref)) - vindex = index(vref) - if vindex isa Union{InfiniteVariableIndex, SemiInfiniteVariableIndex} - map_dict = reform_data(model).infvar_mappings - elseif vindex isa MeasureIndex - map_dict = reform_data(model).meas_mappings - else - map_dict = reform_data(model).finvar_mappings - end - haskey(map_dict, vref) || error("Variable $vref not used in the optimizer model.") - return map_dict[vref] -end - -# Extend optimizer_model_expression if appropriate to enable expression related queries -function InfiniteOpt.optimizer_model_expression( - expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.NonlinearExpr}, # POSSIBLY BREAK THESE UP INTO 3 SEPARATE FUNCTIONS - key::Val{OptKey}; - my_kwarg::Bool = true # ADD KEY ARGS AS NEEDED - ) - # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO REFORMULATED EXPRESSIONS - reform_expr = zero(AffExpr) - return reform_expr -end - -# Extend optimizer_model_constraint if appropriate to enable constraint related queries -function InfiniteOpt.optimizer_model_constraint( - cref::InfOptConstraintRef, - key::Val{OptKey}; # ADD KEY ARGS AS NEEDED - my_kwarg::Bool = true) - # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO THE OPTIMIZER MODEL CONSTRAINT(S) - model = optimizer_model(JuMP.owner_model(cref)) - map_dict = reform_data(model).constr_mappings - haskey(map_dict, cref) || error("Constraint $cref not used in the optimizer model.") - return map_dict[cref] -end - -# If appropriate extend variable_supports (enables support queries of infinite variables) -function InfiniteOpt.variable_supports( - model::JuMP.Model, - vref::Union{InfiniteVariableRef, SemiInfiniteVariableRef}, - key::Val{OptKey}; - my_kwarg::Bool = true # ADD KEY ARGS AS NEEDED - ) - # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO THE INFINITE VARIABLE SUPPORT VALUES - map_dict = reform_data(model).infvar_to_supports - gvref = InfiniteOpt._make_variable_ref(owner_model(vref), index(vref)) - haskey(map_dict, gvref) || error("Variable $gvref not used in the optimizer model.") - return map_dict[gvref] -end - -# If appropriate extend variable_supports for measures (enables support queries of measures) -function InfiniteOpt.variable_supports( - model::JuMP.Model, - mref::MeasureRef, - key::Val{OptKey}; - my_kwarg::Bool = true # ADD KEY ARGS AS NEEDED - ) - # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO THE INFINITE VARIABLE SUPPORT VALUES - map_dict = reform_data(model).meas_to_supports - gvref = InfiniteOpt._make_variable_ref(owner_model(mref), index(mref)) - haskey(map_dict, gvref) || error("Variable $gvref not used in the optimizer model.") - return map_dict[gvref] -end - -# If appropriate extend expression_supports (enables support queries of expressions) -function InfiniteOpt.expression_supports( - model::JuMP.Model, - expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.NonlinearExpr}, - key::Val{OptKey}; - my_kwarg::Bool = true # ADD KEY ARGS AS NEEDED - ) - # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO SUPPORT(S) OF THE EXPRESSION(S) - supps = [(-42.,), (1.,)] - return supps -end - -# If appropriate extend constraint_supports (enables support queries of constraints) -function InfiniteOpt.constraint_supports( - model::JuMP.Model, - cref::InfOptConstraintRef, - key::Val{OptKey}; # ADD KEY ARGS AS NEEDED - my_kwarg::Bool = true) - # REPLACE BELOW WITH ACTUAL CORRESPONDENCE TO THE INFINITE VARIABLE SUPPORT VALUES - map_dict = reform_data(model).constr_to_supports - haskey(map_dict, cref) || error("Constraint $cref does not have supports in the optimizer model.") - return length(map_dict[cref]) == 1 ? first(map_dict[cref]) : map_dict[cref] -end - -# If it doesn't make sense to extend optimizer_model_variable, -# optimizer_model_expression and/or optimizer_model_constraint then you'll need -# to extend the following: -# - InfiniteOpt.map_value to enable JuMP.value -# - InfiniteOpt.map_optimizer_index to enable JuMP.optimizer_index -# - InfiniteOpt.map_dual to enable JuMP.dual diff --git a/test/results.jl b/test/results.jl index 1a06c866..855ee084 100644 --- a/test/results.jl +++ b/test/results.jl @@ -9,10 +9,10 @@ @variable(m, g) @objective(m, Min, g^2) @constraint(m, c1, 2 * inf * g <= 1) - tm = transcription_model(m) + tb = m.backend JuMP.optimize!(m) # setup the results - mockoptimizer = JuMP.backend(tm).optimizer.model + mockoptimizer = JuMP.backend(tb).optimizer.model MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.OPTIMAL) MOI.set(mockoptimizer, MOI.RawStatusString(), "solver specific string") MOI.set(mockoptimizer, MOI.ResultCount(), Int64(2)) @@ -63,6 +63,17 @@ @testset "JuMP.relative_gap" begin @test relative_gap(m) == 9 end + # test result_count + @testset "JuMP.result_count" begin + @test result_count(m) == 2 + end + # test fallbacks + @testset "Fallbacks" begin + for f in (termination_status, raw_status, solve_time, simplex_iterations, + barrier_iterations, node_count, result_count) + @test_throws ErrorException f(TestBackend()) + end + end end # Test objective queries @@ -76,10 +87,10 @@ end @variable(m, g) @objective(m, Min, g^2) @constraint(m, c1, 2 * g <= 1) - tm = transcription_model(m) + tb = m.backend JuMP.optimize!(m) # setup the results - mockoptimizer = JuMP.backend(tm).optimizer.model + mockoptimizer = JuMP.backend(tb).optimizer.model MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.OPTIMAL) MOI.set(mockoptimizer, MOI.ObjectiveValue(), -1.0) MOI.set(mockoptimizer, MOI.DualObjectiveValue(), -2.0) @@ -99,6 +110,12 @@ end @testset "JuMP.dual_objective_value" begin @test dual_objective_value(m) == -2. end + # test fallbacks + @testset "Fallbacks" begin + for f in (objective_bound, objective_value, dual_objective_value) + @test_throws ErrorException f(TestBackend()) + end + end end # Test variable queries @@ -120,23 +137,23 @@ end rv = add_variable(m, var) @objective(m, Min, g^2) @constraint(m, c1, 2 * g <= 1) - tm = transcription_model(m) + tb = m.backend JuMP.optimize!(m) - inft = transcription_variable(inf, label = All) - gt = transcription_variable(g) - inf2t = transcription_variable(inf2, label = All) - d1t = transcription_variable(d1, label = All) - rvt = transcription_variable(rv, label = All) + inft = transformation_variable(inf, label = All) + gt = transformation_variable(g) + inf2t = transformation_variable(inf2, label = All) + d1t = transformation_variable(d1, label = All) + rvt = transformation_variable(rv, label = All) cref = UpperBoundRef(g) - creft = transcription_constraint(cref, label = All) + creft = transformation_constraint(cref, label = All) # setup the optimizer - mockoptimizer = JuMP.backend(tm).optimizer.model + mockoptimizer = JuMP.backend(tb).optimizer.model MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.OPTIMAL) MOI.set(mockoptimizer, MOI.ResultCount(), 1) MOI.set(mockoptimizer, MOI.PrimalStatus(1), MOI.FEASIBLE_POINT) MOI.set(mockoptimizer, MOI.DualStatus(1), MOI.FEASIBLE_POINT) - for t in list_of_constraint_types(tm) - for c in all_constraints(tm, t...) + for t in list_of_constraint_types(tb.model) + for c in all_constraints(tb.model, t...) MOI.set(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizer_index(c), -12.0) end end @@ -156,15 +173,16 @@ end end # test map_value @testset "map_value" begin - @test InfiniteOpt.map_value(inf, Val(:TransData), 1, label = All) == [2., 1., 2.] - @test InfiniteOpt.map_value(g, Val(:TransData), 1) == 1. - @test InfiniteOpt.map_value(rv, Val(:TransData), 1, label = All) == [-2., -1.] + @test InfiniteOpt.map_value(inf, tb, label = All) == [2., 1., 2.] + @test InfiniteOpt.map_value(g, tb) == 1. + @test InfiniteOpt.map_value(rv, tb, label = All) == [-2., -1.] + @test_throws ErrorException InfiniteOpt.map_value(g, TestBackend()) end # test _get_value @testset "_get_value " begin - @test InfiniteOpt._get_value(g, FiniteVariableIndex, 1) == 1. - @test InfiniteOpt._get_value(par, IndependentParameterIndex, 1, label = All) == [0., 0.5, 1.] - @test InfiniteOpt._get_value(fin, FiniteParameterIndex, 1) == 42 + @test InfiniteOpt._get_value(g, FiniteVariableIndex) == 1. + @test InfiniteOpt._get_value(par, IndependentParameterIndex, label = All) == [0., 0.5, 1.] + @test InfiniteOpt._get_value(fin, FiniteParameterIndex) == 42 end # test value @testset "JuMP.value" begin @@ -184,19 +202,21 @@ end end #test Reduced Cost @testset "map_reduced_cost" begin - @test InfiniteOpt.map_reduced_cost(inf, Val(:TransData), label = All) == [0.0, 0.0, 0.0] - @test InfiniteOpt.map_reduced_cost(g, Val(:TransData)) == 26.0 -end + @test InfiniteOpt.map_reduced_cost(inf, tb, label = All) == [0.0, 0.0, 0.0] + @test InfiniteOpt.map_reduced_cost(g, tb) == 26.0 + @test_throws ErrorException InfiniteOpt.map_reduced_cost(g, TestBackend()) + end #test Reduced Cost @testset "JuMP.reduced_cost" begin - @test JuMP.reduced_cost(inf, label = All) == [0.0, 0.0, 0.0] - @test JuMP.reduced_cost(g) == 26.0 - end + @test JuMP.reduced_cost(inf, label = All) == [0.0, 0.0, 0.0] + @test JuMP.reduced_cost(g) == 26.0 + end # test map_optimizer_index @testset "map_optimizer_index" begin - @test isa(InfiniteOpt.map_optimizer_index(g, Val(:TransData)), MOI.VariableIndex) - @test isa(InfiniteOpt.map_optimizer_index(inf, Val(:TransData)), Vector{MOI.VariableIndex}) - @test isa(InfiniteOpt.map_optimizer_index(rv, Val(:TransData)), Vector{MOI.VariableIndex}) + @test isa(InfiniteOpt.map_optimizer_index(g, tb), MOI.VariableIndex) + @test isa(InfiniteOpt.map_optimizer_index(inf, tb), Vector{MOI.VariableIndex}) + @test isa(InfiniteOpt.map_optimizer_index(rv, tb), Vector{MOI.VariableIndex}) + @test_throws ErrorException InfiniteOpt.map_optimizer_index(inf, TestBackend()) end # test optimizer_index @testset "JuMP.optimizer_index" begin @@ -225,13 +245,13 @@ end meas2 = support_sum(inf2, par2) @objective(m, Min, g^2) @constraint(m, c1, 2 * g <= 1) - tm = transcription_model(m) + tb = m.backend JuMP.optimize!(m) - inft = transcription_variable(inf) - gt = transcription_variable(g) - inf2t = transcription_variable(inf2) + inft = transformation_variable(inf) + gt = transformation_variable(g) + inf2t = transformation_variable(inf2) # setup the optimizer - mockoptimizer = JuMP.backend(tm).optimizer.model + mockoptimizer = JuMP.backend(tb).optimizer.model MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.OPTIMAL) MOI.set(mockoptimizer, MOI.ResultCount(), 1) MOI.set(mockoptimizer, MOI.PrimalStatus(1), MOI.FEASIBLE_POINT) @@ -248,11 +268,11 @@ end end # test map_value @testset "map_value" begin - @test InfiniteOpt.map_value(meas1, Val(:TransData), 1) == 4. - @test InfiniteOpt.map_value(meas2, Val(:TransData), 1) == [0., -3.] - @test InfiniteOpt.map_value(3g - 1, Val(:TransData), 1) == 2. - @test InfiniteOpt.map_value(inf^2 + g, Val(:TransData), 1) == [5., 1.] - @test InfiniteOpt.map_value(zero(AffExpr) + 1, Val(:TransData), 1) == 1. + @test InfiniteOpt.map_value(meas1, tb) == 4. + @test InfiniteOpt.map_value(meas2, tb) == [0., -3.] + @test InfiniteOpt.map_value(3g - 1, tb) == 2. + @test InfiniteOpt.map_value(inf^2 + g, tb) == [5., 1.] + @test InfiniteOpt.map_value(zero(AffExpr) + 1, tb) == 1. end # test value @testset "JuMP.value" begin @@ -284,18 +304,17 @@ end @constraint(m, c2, inf >= 0) @constraint(m, c3, sin(g) == 0) @constraint(m, c4, sin(inf) == 0) - tm = transcription_model(m) + @objective(m, Min, 42) + tb = m.backend JuMP.optimize!(m) - inft = transcription_variable(inf) - gt = transcription_variable(g) - c1t = transcription_constraint(c1) - c2t = transcription_constraint(c2) - c3t = transcription_constraint(c3) - c4t = transcription_constraint(c4) + inft = transformation_variable(inf) + gt = transformation_variable(g) + c1t = transformation_constraint(c1) + c2t = transformation_constraint(c2) + c3t = transformation_constraint(c3) + c4t = transformation_constraint(c4) # setup optimizer info - mockoptimizer = JuMP.backend(tm).optimizer.model - # block = MOI.get(tm, MOI.NLPBlock()) - # MOI.initialize(block.evaluator, Symbol[]) + mockoptimizer = JuMP.backend(tb).optimizer.model MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.OPTIMAL) MOI.set(mockoptimizer, MOI.ResultCount(), 1) MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FEASIBLE_POINT) @@ -309,11 +328,10 @@ end MOI.set(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizer_index(c3t), 4.0) MOI.set(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizer_index(c4t[1]), 2.0) MOI.set(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizer_index(c4t[2]), 3.0) - # MOI.set(mockoptimizer, MOI.NLPBlockDual(1), [4.0, 2., 3.]) # test map_value @testset "map_value" begin - @test InfiniteOpt.map_value(c1, Val(:TransData), 1) == 1. - @test InfiniteOpt.map_value(c2, Val(:TransData), 1) == [-1., 0.] + @test InfiniteOpt.map_value(c1, tb) == 1. + @test InfiniteOpt.map_value(c2, tb) == [-1., 0.] end # test value @testset "JuMP.value" begin @@ -325,8 +343,9 @@ end end # test map_optimizer_index @testset "map_optimizer_index" begin - @test isa(InfiniteOpt.map_optimizer_index(c1, Val(:TransData)), MOI.ConstraintIndex) - @test isa(InfiniteOpt.map_optimizer_index(c2, Val(:TransData)), Vector{<:MOI.ConstraintIndex}) + @test isa(InfiniteOpt.map_optimizer_index(c1, tb), MOI.ConstraintIndex) + @test isa(InfiniteOpt.map_optimizer_index(c2, tb), Vector{<:MOI.ConstraintIndex}) + @test_throws ErrorException InfiniteOpt.map_optimizer_index(c1, TestBackend()) end # test optimizer_index @testset "JuMP.optimizer_index" begin @@ -342,8 +361,9 @@ end end # test map_dual @testset "map_dual" begin - @test InfiniteOpt.map_dual(c1, Val(:TransData), 1) == -1. - @test InfiniteOpt.map_dual(c2, Val(:TransData), 1) == [0., 1.] + @test InfiniteOpt.map_dual(c1, tb) == -1. + @test InfiniteOpt.map_dual(c2, tb) == [0., 1.] + @test_throws ErrorException InfiniteOpt.map_dual(c1, TestBackend()) end # test dual @testset "JuMP.dual" begin @@ -355,27 +375,12 @@ end end # test shadow_price @testset "JuMP.shadow_price" begin - # test with FEASIBILITY_SENSE - @test_throws ErrorException shadow_price(c1) - @test_throws ErrorException shadow_price(c2) - @test_throws ErrorException shadow_price(c3) - # test with MIN_SENSE - set_objective_sense(m, MOI.MIN_SENSE) @test shadow_price(c1) == -1. @test shadow_price(c1, ndarray = true) == [-1.] @test shadow_price(c2, label = PublicLabel) == [-0., -1.] @test shadow_price(c3) == -4 @test shadow_price(c4) == [-2, -3] - # test with MAX_SENSE - set_objective_sense(m, MOI.MAX_SENSE) - @test shadow_price(c1) == 1. - @test shadow_price(c1, ndarray = true) == [1.] - @test shadow_price(c2, label = PublicLabel) == [0., 1.] - @test shadow_price(c3) == 4 - @test shadow_price(c4) == [2, 3] - # test no duals error - MOI.set(mockoptimizer, MOI.DualStatus(), MOI.NO_SOLUTION) - @test_throws ErrorException shadow_price(c1) + @test_throws ErrorException InfiniteOpt.map_shadow_price(c1, TestBackend()) end end @@ -393,16 +398,16 @@ end @constraint(m, c3, g <= 1) @constraint(m, c4, inf <= 1) @objective(m, Max, 2g) - tm = transcription_model(m) + tb = m.backend optimize!(m) - inft = transcription_variable(inf) - gt = transcription_variable(g) - c1t = transcription_constraint(c1) - c2t = transcription_constraint(c2) - c3t = transcription_constraint(c3) - c4t = transcription_constraint(c4) + inft = transformation_variable(inf) + gt = transformation_variable(g) + c1t = transformation_constraint(c1) + c2t = transformation_constraint(c2) + c3t = transformation_constraint(c3) + c4t = transformation_constraint(c4) # setup the optimizer info - mockoptimizer = JuMP.backend(tm).optimizer.model; + mockoptimizer = JuMP.backend(tb).optimizer.model; MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.OPTIMAL) MOI.set(mockoptimizer, MOI.ResultCount(), 1) MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FEASIBLE_POINT) @@ -428,6 +433,7 @@ end MOI.set(mockoptimizer, MOI.ConstraintBasisStatus(), JuMP.optimizer_index(c4t[2]), MOI.BASIC) # test making the report @test lp_sensitivity_report(m, atol = 1e-6) isa InfOptSensitivityReport + @test_throws ErrorException lp_sensitivity_report(TestBackend()) # test constraint queries @test lp_sensitivity_report(m)[c1] == (-Inf, 0) @test lp_sensitivity_report(m)[c2, label = All] == [(-Inf, 0), (-Inf, 0)] diff --git a/test/runtests.jl b/test/runtests.jl index 2ddd2531..db9702f5 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -97,8 +97,8 @@ println("") println("") @time @testset "Solution Queries" begin include("results.jl") end println("") -# @time @testset "Extensions" begin include("extensions.jl") end -# println("") +@time @testset "Extensions" begin include("extensions.jl") end +println("") println("----------------------------------------------------------------------------") println("-----------------------------TESTING COMPLETE!------------------------------") println("----------------------------------------------------------------------------") From 50edde45177665fb378d7e258054b16be6ee761b Mon Sep 17 00:00:00 2001 From: pulsipher Date: Wed, 3 Jul 2024 12:04:27 -0400 Subject: [PATCH 4/8] Updated Docs --- Project.toml | 2 +- README.md | 9 +- docs/README.md | 2 +- docs/jump/README.md | 1 - docs/make.jl | 16 +- docs/src/develop/extensions.md | 330 ++++++++++++++++------------- docs/src/guide/constraint.md | 2 +- docs/src/guide/derivative.md | 5 +- docs/src/guide/expression.md | 37 ++-- docs/src/guide/finite_parameter.md | 11 +- docs/src/guide/measure.md | 65 +++--- docs/src/guide/model.md | 204 +++++++++++------- docs/src/guide/objective.md | 14 +- docs/src/guide/optimize.md | 122 ++++++----- docs/src/guide/parameter.md | 21 +- docs/src/guide/result.md | 46 ++-- docs/src/guide/transcribe.md | 160 ++++++-------- docs/src/guide/variable.md | 2 +- docs/src/index.md | 9 +- docs/src/install.md | 22 +- docs/src/manual/backend.md | 85 ++++++++ docs/src/manual/expression.md | 3 +- docs/src/manual/measure.md | 5 +- docs/src/manual/optimize.md | 59 ------ docs/src/manual/parameter.md | 2 + docs/src/manual/result.md | 34 ++- docs/src/manual/transcribe.md | 43 ++-- docs/src/manual/variable.md | 5 + docs/src/tutorials/quick_start.md | 22 +- src/array_parameters.jl | 2 +- src/backends.jl | 22 +- src/constraints.jl | 4 +- src/derivative_evaluations.jl | 2 +- src/derivatives.jl | 2 +- src/expressions.jl | 2 +- src/measure_expansions.jl | 6 +- src/nlp.jl | 4 +- src/results.jl | 2 +- src/scalar_parameters.jl | 6 +- src/variable_basics.jl | 2 +- test/backend_setup.jl | 2 + 41 files changed, 761 insertions(+), 633 deletions(-) create mode 100644 docs/src/manual/backend.md delete mode 100644 docs/src/manual/optimize.md diff --git a/Project.toml b/Project.toml index a6a92dae..53585864 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "InfiniteOpt" uuid = "20393b10-9daf-11e9-18c9-8db751c92c57" authors = ["Joshua Pulsipher and Weiqi Zhang"] -version = "0.5.8" +version = "0.6.0" [deps] DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" diff --git a/README.md b/README.md index 477a906a..0e7fe02c 100644 --- a/README.md +++ b/README.md @@ -9,10 +9,6 @@ interface for these advanced problem types that can be used by those with little to no background in these areas. It also it contains a wealth of capabilities making it a powerful and convenient tool for advanced users. -:tada: **`v0.5` introduces general nonlinear modeling!**: See the documentation for details. - -:mega: **`v0.5.1` now requires Julia 1.6 or newer** - **Current Version:** [![](https://docs.juliahub.com/InfiniteOpt/version.svg)](https://juliahub.com/ui/Packages/InfiniteOpt/p3GvY) [![](https://docs.juliahub.com/InfiniteOpt/pkgeval.svg)](https://juliahub.com/ui/Packages/InfiniteOpt/p3GvY) | **Documentation** | **Build Status** | **Citation** | @@ -27,7 +23,6 @@ include: - Infinite variables (decision functions) (e.g., `y(t, x)`) - Derivatives (e.g., `∂y(t, x)/∂t`) - Measures (e.g., `∫y(t,x)dt`, `𝔼[y(ξ)]`) -- **1st class nonlinear modeling** The unifying modeling abstraction behind `InfiniteOpt` captures a wide spectrum of disciplines which include dynamic, PDE, stochastic, and semi-infinite @@ -48,7 +43,7 @@ can be installed by entering the following in the REPL. ```julia julia> ] -(v1.9) pkg> add InfiniteOpt +(v1.10) pkg> add InfiniteOpt ``` ## Documentation @@ -83,7 +78,7 @@ citing it. A pre-print version is freely available though [arXiv](https://arxiv.org/abs/2106.12689). ## Project Status -The package is tested against Julia `1.6` and `1.9` on Linux, Mac, and Windows. +The package is tested against Julia `1.6` and `1.10` on Linux, Mac, and Windows. ## Contributing `InfiniteOpt` is being actively developed and suggestions or other forms of contribution are encouraged. diff --git a/docs/README.md b/docs/README.md index ef69e1a2..19c185a7 100644 --- a/docs/README.md +++ b/docs/README.md @@ -5,7 +5,7 @@ To install it, run the following command in a Julia session: ```julia julia> ] -(v1.8) pkg> add Documenter +(v1.10) pkg> add Documenter ``` You'll also need to make sure you have `JuMP.jl`, `Distributions.jl`, and `Ipopt.jl` diff --git a/docs/jump/README.md b/docs/jump/README.md index 08d79698..4e8a8b8f 100644 --- a/docs/jump/README.md +++ b/docs/jump/README.md @@ -19,7 +19,6 @@ include: - Infinite variables (decision functions) (for example, ``y(t, x)``) - Derivatives (for example, ``\frac{\partial y(t, x)}{\partial t}``) - Measures (for example, ``\int_{t \in T}y(t, x)dt`` and ``\mathbb{E}[y(\xi)]``) -- First class nonlinear modeling The unifying modeling abstraction behind `InfiniteOpt` captures a wide spectrum of disciplines which include dynamic, PDE, stochastic, and semi-infinite diff --git a/docs/make.jl b/docs/make.jl index b1f63a23..798337c1 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,4 +1,4 @@ -using Documenter, InfiniteOpt, Distributions, Literate, Random +using Documenter, InfiniteOpt, Distributions, Literate, Random, Pkg if !@isdefined(EXAMPLE_DIR) const EXAMPLE_DIR = joinpath(@__DIR__, "src", "examples") @@ -84,8 +84,8 @@ makedocs(; "Measures" => "manual/measure.md", "Objectives" => "manual/objective.md", "Constraints" => "manual/constraint.md", - "Model Transcription" => "manual/transcribe.md", - "Optimization" => "manual/optimize.md", + "Backends" => "manual/backend.md", + "TranscriptionOpt" => "manual/transcribe.md", "Results" => "manual/result.md" ], "Development" => [ @@ -94,21 +94,21 @@ makedocs(; "Style Guide" => "develop/style.md" ], ], - repo = "https://github.com/infiniteopt/InfiniteOpt.jl/blob/{commit}{path}#L{line}", + # repo = "https://github.com/infiniteopt/InfiniteOpt.jl/blob/{commit}{path}#L{line}", sitename = "InfiniteOpt.jl", authors = "Joshua Pulsipher and Weiqi Zhang", - doctest = true, + modules = [InfiniteOpt], checkdocs = :exports, linkcheck = true, - linkcheck_ignore = [r"https://www.youtube.com/.*"], - strict = true, + linkcheck_ignore = [r"https://www.youtube.com/.*", "https://github.com/infiniteopt/InfiniteOpt.jl/blob/master/test/extensions/backend.jl"], # TODO remove before merging format = Documenter.HTML( # See https://github.com/JuliaDocs/Documenter.jl/issues/868 prettyurls = get(ENV, "CI", nothing) == "true", analytics = "UA-178297470-1", collapselevel = 1, assets = ["assets/extra_styles.css"], - + size_threshold = 250 * 2^10, # KiB + size_threshold_warn = 150 * 2^10 # KiB ) ) diff --git a/docs/src/develop/extensions.md b/docs/src/develop/extensions.md index 16e9a715..ab9ce8bf 100644 --- a/docs/src/develop/extensions.md +++ b/docs/src/develop/extensions.md @@ -303,7 +303,7 @@ function InfiniteOpt.make_indexed_derivative_expr( order::Int, idx, supps::Vector{Float64}, # ordered - write_model::JuMP.AbstractModel, + write_model::Union{InfiniteModel, AbstractTransformationBackend}, ::ExplicitEuler, # put extra data args here (none in this case) ) @@ -441,7 +441,7 @@ function InfiniteOpt.MeasureToolbox.generate_integral_data( method::UnifGrid; num_supports::Int = InfiniteOpt.DefaultNumSupports, weight_func::Function = InfiniteOpt.default_weight - )::InfiniteOpt.DiscreteMeasureData + ) increment = (upper_bound - lower_bound) / (num_supports - 1) supports = [lower_bound + (i - 1) * increment for i in 1:num_supports] coeffs = ones(num_supports) / num_supports * (upper_bound - lower_bound) @@ -544,11 +544,11 @@ function InfiniteOpt.parameter_refs(data::DiscreteVarianceData) return data.parameter_refs end -function InfiniteOpt.supports(data::DiscreteVarianceData)::Vector +function InfiniteOpt.supports(data::DiscreteVarianceData) return data.supports end -function InfiniteOpt.support_label(data::DiscreteVarianceData)::DataType +function InfiniteOpt.support_label(data::DiscreteVarianceData) return data.label end @@ -560,7 +560,7 @@ end We also need to extend [`InfiniteOpt.add_supports_to_parameters`](@ref) since support points will be used for measure evaluation later: ```jldoctest measure_data; output = false -function InfiniteOpt.add_supports_to_parameters(data::DiscreteVarianceData)::Nothing +function InfiniteOpt.add_supports_to_parameters(data::DiscreteVarianceData) pref = parameter_refs(data) supps = supports(data) label = support_label(data) @@ -599,8 +599,8 @@ We can define variance measures now, but now let's extend function InfiniteOpt.expand_measure( expr::JuMP.AbstractJuMPScalar, data::DiscreteVarianceData, - write_model::JuMP.AbstractModel - )::JuMP.AbstractJuMPScalar + write_model::Union{InfiniteModel, AbstractTransformationBackend} + ) # define the expectation data expect_data = DiscreteMeasureData( data.parameter_refs, @@ -635,7 +635,7 @@ function variance( name::String = "Var", num_supports::Int = 10, use_existing::Bool = false - )::GeneralVariableRef + ) # get the supports if use_existing supps = supports.(params) @@ -703,7 +703,7 @@ types of supports and extend `support_label`: ```jldoctest info_model; output = false struct RandomInternal <: InternalLabel end -function InfiniteOpt.support_label(info::RandomGenerativeInfo)::Type{RandomInternal} +function InfiniteOpt.support_label(info::RandomGenerativeInfo) return RandomInternal end @@ -714,7 +714,7 @@ Finally, let's extend `make_generative_supports` to create a vector of the generative supports based on a `RandomGenerativeInfo` and the existing model supports which are passed in the function as input: ```jldoctest info_model; output = false -function InfiniteOpt.make_generative_supports(info::RandomGenerativeInfo, pref, supps)::Vector{Float64} +function InfiniteOpt.make_generative_supports(info::RandomGenerativeInfo, pref, supps) num_existing = length(supps) num_existing <= 1 && error("`$pref` doesn't have enough supports.") num_internal = info.attr @@ -734,44 +734,88 @@ Our extension is done and now `RandomGenerativeInfo` can be incorporated by a `GenerativeDerivativeMethod` we create or an `AbstractMeasureData` object of our choice like `FunctionalDiscreteMeasureData`. -## [Optimizer Models] (@id extend_optimizer_model) +## [Transformation Backends] (@id extend_backends) `InfiniteOpt` provides a convenient interface and abstraction for modeling infinite-dimensional optimization problems. By default, `InfiniteModel`s are -reformulated into a solvable `JuMP.Model` (referred to as an optimizer model) -via `TranscriptionOpt` which discretizes the model in accordance with the -infinite parameter supports. However, users may wish to employ some other -reformulation method to produce the optimizer model. This section will explain -how this can be done in `InfiniteOpt`. A template for implementing this -extension is provided in -[`./test/extensions/optimizer_model.jl`](https://github.com/infiniteopt/InfiniteOpt.jl/blob/master/test/extensions/optimizer_model.jl). +reformulated into a solvable `JuMP.Model` via `TranscriptionOpt.TranscriptionBackend` +which discretizes the model in accordance with the infinite parameter supports. +However, users may wish to employ some other transformation method to produce +the transformation backend. This section will explain how this can be done in +`InfiniteOpt`. A template for implementing this extension is provided in +[`./test/extensions/backend.jl`](https://github.com/infiniteopt/InfiniteOpt.jl/blob/master/test/extensions/backend.jl). Our default sub-module `InfiniteOpt.TranscriptionOpt` also serves as a good example. -!!! note - We are currently working on a fundamental overhaul of the optimizer model - interface. The new interface will be much more modular, will permit non-JuMP - backends, and should generally make extending more intuitive. Track the progress - [here](https://github.com/infiniteopt/InfiniteOpt.jl/issues/105). - -A new reformulation method and its corresponding optimizer model can be +A new transformation approach and its corresponding transformation backend can be extended using the following steps: 1. Define a `mutable struct` for variable/constraint mappings and other needed info (required) -2. Define a `JuMP.Model` constructor that uses (1.) in `Model.ext[:my_ext_key]` (recommended) -3. Extend [`build_optimizer_model!`](@ref) to in accordance with the new optimizer model (required) -4. Extend [`optimizer_model_variable`](@ref) if possible (enables result queries) -5. Extend [`optimizer_model_expression`](@ref) if possible (enables result queries) -6. Extend [`optimizer_model_constraint`](@ref) if possible (enables result queries) -7. Extend [`InfiniteOpt.variable_supports`](@ref) if appropriate -8. Extend [`InfiniteOpt.expression_supports`](@ref) if appropriate -9. Extend [`InfiniteOpt.constraint_supports`](@ref) if appropriate -10. If steps 4-6 are skipped then extend the following: +2. Define an [`AbstractTransformationBackend`](@ref) (required) +3. Extend [`Base.empty!`](@ref Base.empty!(::AbstractTransformationBackend)) for the backend (required) +4. Extend [`build_transformation_backend!`](@ref build_transformation_backend!(::InfiniteModel, ::AbstractTransformationBackend)) (required) +5. If appropriate and NOT a [`JuMPBackend`](@ref), extend the following: + - [`transformation_model`](@ref transformation_model(::AbstractTransformationBackend)) + - [`transformation_data`](@ref transformation_data(::AbstractTransformationBackend)) + - [`JuMP.set_attribute`](@ref JuMP.set_attribute(::AbstractTransformationBackend, ::Any, ::Any)) + - [`JuMP.get_attribute`](@ref JuMP.get_attribute(::AbstractTransformationBackend, ::Any)) + - [`JuMP.optimize!`](@ref JuMP.optimize!(::AbstractTransformationBackend)) + - [`JuMP.set_optimizer`](@ref JuMP.set_optimizer(::AbstractTransformationBackend, ::Any)) + - [`JuMP.set_silent`](@ref JuMP.set_silent(::AbstractTransformationBackend)) + - [`JuMP.unset_silent`](@ref JuMP.unset_silent(::AbstractTransformationBackend)) + - [`JuMP.set_time_limit_sec`](@ref JuMP.set_time_limit_sec(::AbstractTransformationBackend, ::Any)) + - [`JuMP.time_limit_sec`](@ref JuMP.time_limit_sec(::AbstractTransformationBackend)) + - [`JuMP.unset_time_limit_sec`](@ref JuMP.unset_time_limit_sec(::AbstractTransformationBackend)) + - [`JuMP.solver_name`](@ref JuMP.solver_name(::AbstractTransformationBackend)) + - [`JuMP.bridge_constraints`](@ref JuMP.bridge_constraints(::AbstractTransformationBackend)) + - [`JuMP.add_bridge`](@ref JuMP.add_bridge(::AbstractTransformationBackend, ::Any)) + - [`JuMP.print_active_bridges`](@ref JuMP.print_active_bridges(::IO,::AbstractTransformationBackend)) + - [`JuMP.print_active_bridges`](@ref JuMP.print_active_bridges(::IO,::AbstractTransformationBackend)) + - [`JuMP.set_string_names_on_creation`](@ref JuMP.set_string_names_on_creation(::AbstractTransformationBackend)) + - [`JuMP.set_string_names_on_creation`](@ref JuMP.set_string_names_on_creation(::AbstractTransformationBackend, ::Any)) + - [`JuMP.compute_conflict!`](@ref JuMP.compute_conflict!(::AbstractTransformationBackend)) + - [`JuMP.copy_conflict`](@ref JuMP.copy_conflict(::AbstractTransformationBackend)) + - [`JuMP.mode`](@ref JuMP.mode(::AbstractTransformationBackend)) + - [`JuMP.backend`](@ref JuMP.backend(::AbstractTransformationBackend)) + - [`JuMP.unsafe_backend`](@ref JuMP.unsafe_backend(::AbstractTransformationBackend)) +6. Extend the following, if possible (also enables result queries for `JuMPBackend`s): + - [`transformation_variable`](@ref transformation_variable(::GeneralVariableRef, ::AbstractTransformationBackend)) + - [`transformation_expression`](@ref transformation_expression(::Any, ::AbstractTransformationBackend)) + - [`transformation_constraint`](@ref transformation_constraint(::InfOptConstraintRef, ::AbstractTransformationBackend)) +7. Extend the following, if appropriate: + - [`InfiniteOpt.variable_supports`](@ref) + - [`InfiniteOpt.expression_supports`](@ref) + - [`InfiniteOpt.constraint_supports`](@ref) +8. As appropriate and if NOT a `JuMPBackend`, extend the following: + - [`JuMP.termination_status`](@ref JuMP.termination_status(::AbstractTransformationBackend)) + - [`JuMP.raw_status`](@ref JuMP.raw_status(::AbstractTransformationBackend)) + - [`JuMP.solve_time`](@ref JuMP.solve_time(::AbstractTransformationBackend)) + - [`JuMP.simplex_iterations`](@ref JuMP.simplex_iterations(::AbstractTransformationBackend)) + - [`JuMP.barrier_iterations`](@ref JuMP.barrier_iterations(::AbstractTransformationBackend)) + - [`JuMP.node_count`](@ref JuMP.node_count(::AbstractTransformationBackend)) + - [`JuMP.objective_bound`](@ref JuMP.objective_bound(::AbstractTransformationBackend)) + - [`JuMP.relative_gap`](@ref JuMP.relative_gap(::AbstractTransformationBackend)) + - [`JuMP.result_count`](@ref JuMP.result_count(::AbstractTransformationBackend)) + - [`JuMP.primal_status`](@ref JuMP.primal_status(::AbstractTransformationBackend)) + - [`JuMP.dual_status`](@ref JuMP.dual_status(::AbstractTransformationBackend)) + - [`JuMP.has_values`](@ref JuMP.has_values(::AbstractTransformationBackend)) + - [`JuMP.has_duals`](@ref JuMP.has_duals(::AbstractTransformationBackend)) + - [`JuMP.objective_value`](@ref JuMP.objective_value(::AbstractTransformationBackend)) + - [`JuMP.dual_objective_value`](@ref JuMP.dual_objective_value(::AbstractTransformationBackend)) + - [`JuMP.lp_sensitivity_report`](@ref JuMP.lp_sensitivity_report(::AbstractTransformationBackend)) +9. If Step 6 was skipped and/or the backend is NOT a `JuMPBackend` then extend the following: - [`InfiniteOpt.map_value`](@ref) (enables `JuMP.value`) + - [`InfiniteOpt.map_infinite_parameter_value`](@ref) (enables `JuMP.value` for infinite parameters) - [`InfiniteOpt.map_optimizer_index`](@ref) (enables `JuMP.optimizer_index`) + - [`InfiniteOpt.map_reduced_cost`](@ref) (enables `JuMP.reduced_cost`) + - [`InfiniteOpt.map_shadow_price`](@ref) (enables `JuMP.shadow_cost`) - [`InfiniteOpt.map_dual`](@ref) (enables `JuMP.dual`) -11. Extend [`InfiniteOpt.add_point_variable`](@ref) and +10. Extend [`InfiniteOpt.add_point_variable`](@ref) and [`InfiniteOpt.add_semi_infinite_variable`](@ref) to use [`expand_measure`](@ref) without modifying the infinite model. +This may seem like a lot a work, but the majority of the above steps can be +skipped for [`JuMPBackend`](@ref)s as exemplified below. A complete extension, +showing all the above is provided in the extension template file. + For the sake of example, let's suppose we want to define a reformulation method for `InfiniteModel`s that are 2-stage stochastic programs (i.e., only `DistributionDomain`s are used, infinite variables are random 2nd stage variables, @@ -779,13 +823,13 @@ and finite variables are 1st stage variables). In particular, let's make a simpl method that replaces the infinite parameters with their mean values, giving us the deterministic mean-valued problem. -First, let's define the `mutable struct` that will be used to store our variable -and constraint mappings. This case it is quite simple since our deterministic -model will have a 1-to-1 mapping: +First, let's define the (potentially mutable) `struct` that will be used to store +our variable and constraint mappings. This case it is quite simple since our +deterministic model will have a 1-to-1 mapping: ```jldoctest opt_model; output = false using InfiniteOpt, Distributions -mutable struct DeterministicData +struct DeterministicData # variable and constraint mapping infvar_to_detvar::Dict{GeneralVariableRef, VariableRef} infconstr_to_detconstr::Dict{InfOptConstraintRef, ConstraintRef} @@ -800,52 +844,46 @@ end ``` -Now let's define a constructor for optimizer models that will use -`DeterministicData` and let's define a method to access that data: +Now let's define the transformation backend based on [`JuMPBackend`](@ref) +that will use a tag `Deterministic`: ```jldoctest opt_model; output = false -const DetermKey = :DetermData +struct Deterministic <: AbstractJuMPTag end -function DeterministicModel(args...; kwargs...)::Model - # initialize the JuMP Model - model = Model(args...; kwargs...) - model.ext[DetermKey] = DeterministicData() - return model -end +const DeterministicBackend = JuMPBackend{Deterministic, Float64, DeterministicData} -function deterministic_data(model::Model)::DeterministicData - haskey(model.ext, DetermKey) || error("Model is not a DeterministicModel.") - return model.ext[DetermKey] +# Constructor +function DeterministicBackend(; kwargs...) + return JuMPBackend{Deterministic}(Model(; kwargs...), DeterministicData()) +end +function DeterministicBackend(optimizer_constructor; kwargs...) + backend = DeterministicBackend(; kwargs...) + set_optimizer(backend.model, optimizer_constructor) + return backend end # output -deterministic_data (generic function with 1 method) +JuMPBackend{Deterministic, Float64, DeterministicData} ``` - -!!! note - The use of an extension key such as `DetermKey` is required since it used to - dispatch reformulation and querying methods making optimizer model - extensions possible. - With the constructor we can now specify that a given `InfiniteModel` uses a -`DeterministicModel` instead of a `TranscriptionModel` using the `OptimizerModel` -keyword argument or via [`set_optimizer_model`](@ref): +`DeterministicBackend` instead of a `TranscriptionBackend` or via +[`set_transformation_backend`](@ref): ```jldoctest opt_model; output = false using Ipopt # Make model using Ipopt and DeterministicModels -model = InfiniteModel(optimizer_with_attributes(Ipopt.Optimizer, "print_level" => 0), - OptimizerModel = DeterministicModel) +dbackend = DeterministicBackend(optimizer_with_attributes(Ipopt.Optimizer, "print_level" => 0)) +model = InfiniteModel(dbackend) # Or equivalently model = InfiniteModel() -set_optimizer_model(model, DeterministicModel()) +set_transformation_backend(model, DeterministicBackend()) set_optimizer(model, optimizer_with_attributes(Ipopt.Optimizer, "print_level" => 0)) # output ``` -Now `model` uses a `DeterministicModel` as its optimizer model! With that we can +Now `model` uses a `DeterministicBackend` as its transformation backend! With that we can build our `InfiniteModel` as normal, for example: ```jldoctest opt_model @infinite_parameter(model, ξ ~ Uniform()) @@ -868,20 +906,34 @@ Subject to ``` We have defined our `InfiniteModel`, but now we need to specify how to -reformulate it into a `DeterministicModel`. This is accomplished by extending -[`build_optimizer_model!`](@ref). This will enable the use of `optimize!`. First, -let's define an internal function `_make_expression` that will use dispatch to -convert and `InfiniteOpt` expression into a `JuMP` expression using the mappings -stored in `opt_model` in its `DeterministicData`: +reformulate it into a `DeterministicBackend`. This is accomplished by extending +[`build_transformation_backend!`](@ref build_transformation_backend!(::InfiniteModel, ::AbstractTransformationBackend)) +which will enable the use of `optimize!`. A necessary preliminary step though, is to +define `Base.empty!` for `DeterministicData`: +```jldoctest opt_model; output = false +function Base.empty!(data::DeterministicData) + empty!(data.infvar_to_detvar) + empty!(data.infconstr_to_detconstr) + return data +end + +# output + +``` +This enables the backend to be cleared out before it is rebuilt which is +necessary to allow for modifications to the model. +Now, let's define an internal function `_make_expression` that will use dispatch to +convert an `InfiniteOpt` expression into a `JuMP` expression using the mappings +stored in `backend`'s `DeterministicData`: ```jldoctest opt_model; output = false ## Make dispatch methods for converting InfiniteOpt expressions # GeneralVariableRef -function _make_expression(opt_model::Model, expr::GeneralVariableRef) - return _make_expression(opt_model, expr, index(expr)) +function _make_expression(backend::DeterministicBackend, expr::GeneralVariableRef) + return _make_expression(backend, expr, index(expr)) end # IndependentParameterRef function _make_expression( - opt_model::Model, + backend::DeterministicBackend, expr::GeneralVariableRef, ::IndependentParameterIndex ) @@ -889,7 +941,7 @@ function _make_expression( end # FiniteParameterRef function _make_expression( - opt_model::Model, + backend::DeterministicBackend, expr::GeneralVariableRef, ::FiniteParameterIndex ) @@ -897,31 +949,31 @@ function _make_expression( end # DependentParameterRef function _make_expression( - opt_model::Model, + backend::DeterministicBackend, expr::GeneralVariableRef, ::DependentParameterIndex ) - return mean(infinite_domain(expr).distribution) # assuming valid dist. + return mean(infinite_domain(expr).distribution) # assuming valid distribution end # DecisionVariableRef function _make_expression( - opt_model::Model, + backend::DeterministicBackend, expr::GeneralVariableRef, ::Union{InfiniteVariableIndex, FiniteVariableIndex} ) - return deterministic_data(opt_model).infvar_to_detvar[expr] + return backend.data.infvar_to_detvar[expr] end # MeasureRef --> assume is expectation function _make_expression( - opt_model::Model, + backend::DeterministicBackend, expr::GeneralVariableRef, ::MeasureIndex ) - return _make_expression(opt_model, measure_function(expr)) + return _make_expression(backend, measure_function(expr)) end -# AffExpr/QuadExpr -function _make_expression(opt_model::Model, expr::Union{GenericAffExpr, GenericQuadExpr, GenericNonlinearExpr}) - return map_expression(v -> _make_expression(opt_model, v), expr) +# AffExpr/QuadExpr/NonlinearExpr +function _make_expression(backend::DeterministicBackend, expr::Union{GenericAffExpr, GenericQuadExpr, GenericNonlinearExpr}) + return map_expression(v -> _make_expression(backend, v), expr) end # output @@ -934,27 +986,22 @@ expectations. Naturally, a full extension should include checks to enforce that such assumptions hold. Notice that [`map_expression`](@ref) is useful for converting expressions. -Now let's extend [`build_optimizer_model!`](@ref) for `DeterministicModel`s. -Such extensions should build an optimizer model in place and in general should -employ the following: -- [`clear_optimizer_model_build!`](@ref InfiniteOpt.clear_optimizer_model_build!(::InfiniteModel)) -- [`set_transformation_backend_ready`](@ref). -In place builds without the use of `clear_optimizer_model_build!` are also -possible, but will require some sort of active mapping scheme to update in -accordance with the `InfiniteModel` in the case that the -optimizer model is built more than once. Thus, for simplicity we extend -`build_optimizer_model!` below using an initial clearing scheme: +Now let's extend +[`build_transformation_backend!`](@ref build_transformation_backend!(::InfiniteModel, ::AbstractTransformationBackend)) +for `DeterministicBackend`s. This should build out the backend in-place and thus we should also be +sure to have it clear out any previous builds with `Base.empty!`: ```jldoctest opt_model; output = false -function InfiniteOpt.build_optimizer_model!( +function InfiniteOpt.build_transformation_backend!( model::InfiniteModel, - key::Val{DetermKey} - )::Nothing + backend::DeterministicBackend + ) # TODO check that `model` is a stochastic model - # clear the model for a build/rebuild - determ_model = InfiniteOpt.clear_optimizer_model_build!(model) + # empty the model for a build/rebuild + empty!(backend) + backend.model.operator_counter = 0 # clears out any previous user defined operators # add user-defined nonlinear operators if there are any - add_operators_to_jump(determ_model, model) + add_operators_to_jump(backend.model, model) # add variables for vref in all_variables(model) @@ -971,36 +1018,34 @@ function InfiniteOpt.build_optimizer_model!( end info = VariableInfo(!isnan(lb), lb, !isnan(ub), ub, is_fixed(vref), lb, !isnan(start), start, is_binary(vref), is_integer(vref)) - new_vref = add_variable(determ_model, ScalarVariable(info), name(vref)) - deterministic_data(determ_model).infvar_to_detvar[vref] = new_vref + new_vref = add_variable(backend.model, ScalarVariable(info), name(vref)) + backend.data.infvar_to_detvar[vref] = new_vref end # add the objective - obj_func = _make_expression(determ_model, objective_function(model)) - set_objective(determ_model, objective_sense(model), obj_func) + obj_func = _make_expression(backend, objective_function(model)) + set_objective(backend.model, objective_sense(model), obj_func) # add the constraints for cref in all_constraints(model, Union{GenericAffExpr, GenericQuadExpr, GenericNonlinearExpr}) constr = constraint_object(cref) - new_func = _make_expression(determ_model, constr.func) + new_func = _make_expression(backend, constr.func) new_constr = build_constraint(error, new_func, constr.set) - new_cref = add_constraint(determ_model, new_constr, name(cref)) - deterministic_data(determ_model).infconstr_to_detconstr[cref] = new_cref + new_cref = add_constraint(backend.model, new_constr, name(cref)) + backend.data.infconstr_to_detconstr[cref] = new_cref end - - # update the status - set_transformation_backend_ready(model, true) return end # output ``` -Now we can build our optimizer model to obtain a `DeterministicModel` which can -be leveraged to call `optimize!` +Note that Step 5 can be skipped since we are using the `JuMPBackend` API which inherits +all the needed methods. Now we can build our backend automatically and enable the use of +`optimize!`: ```jldoctest opt_model optimize!(model) -print(optimizer_model(model)) +print(transformation_model(model)) # output Min z + y[1] + y[2] @@ -1012,51 +1057,50 @@ Subject to y[2] ≥ 0 ``` Note that better variable naming could be used with the reformulated infinite -variables. Moreover, in general extensions of [`build_optimizer_model!`](@ref) -should account for the possibility that `InfiniteModel` contains constraints wiht +variables. Moreover, in general extensions of [`build_transformation_backend!`](@ref) +should account for the possibility that `InfiniteModel` contains constraints with [`DomainRestrictions`](@ref) as accessed via [`domain_restrictions`](@ref). -Now that we have optimized out `InfiniteModel` via the use the of a -`DeterministicModel`, we probably will want to access the results. All queries -are enabled when we extend [`optimizer_model_variable`](@ref), -[`optimizer_model_expression`](@ref), and [`optimizer_model_constraint`](@ref) -to return the variable(s)/expression(s)/constraint(s) in the -optimizer model corresponding to their `InfiniteModel` counterparts. These will -use the `mutable struct` of mapping data and should error if no mapping can be -found, Let's continue our example using `DeterministicModel`s: +Now that we have optimized our `InfiniteModel` via the use the of a +`DeterministicBackend`, we probably want to access the results. All queries +are enabled via Step 6 where we extend: +- [`transformation_variable`](@ref transformation_variable(::GeneralVariableRef, ::AbstractTransformationBackend)) +- [`transformation_expression`](@ref transformation_expression(::Any, ::AbstractTransformationBackend)) +- [`transformation_constraint`](@ref transformation_constraint(::InfOptConstraintRef, ::AbstractTransformationBackend)) +[`transformation_expression`](@ref), and [`transformation_constraint`](@ref) +to return the variable(s)/expression(s)/constraint(s) in the backend. +These will use the `DeterministicData` and should error if no mapping can be +found. ```jldoctest opt_model; output = false -function InfiniteOpt.optimizer_model_variable( +function InfiniteOpt.transformation_variable( vref::GeneralVariableRef, - key::Val{DetermKey} + backend::DeterministicBackend ) - model = optimizer_model(JuMP.owner_model(vref)) - map_dict = deterministic_data(model).infvar_to_detvar - haskey(map_dict, vref) || error("Variable $vref not used in the optimizer model.") + map_dict = backend.data.infvar_to_detvar + haskey(map_dict, vref) || error("Variable $vref not used in the transformation backend.") return map_dict[vref] end -function InfiniteOpt.optimizer_model_expression( +function InfiniteOpt.transformation_expression( expr::JuMP.AbstractJuMPScalar, - key::Val{DetermKey} + backend::DeterministicBackend ) - model = optimizer_model(InfiniteOpt._model_from_expr(expr)) - return _make_expression(model, expr) + return _make_expression(backend, expr) end -function InfiniteOpt.optimizer_model_constraint( +function InfiniteOpt.transformation_constraint( cref::InfOptConstraintRef, - key::Val{DetermKey} + backend::DeterministicBackend ) - model = optimizer_model(JuMP.owner_model(cref)) - map_dict = deterministic_data(model).infconstr_to_detconstr - haskey(map_dict, cref) || error("Constraint $cref not used in the optimizer model.") + map_dict = backend.data.infconstr_to_detconstr + haskey(map_dict, cref) || error("Constraint $cref not used in the transformation backend.") return map_dict[cref] end # output ``` -With these extensions we can now access all the result queries. For example: +With these extensions we can now access all the result queries (skipping Steps 8-9). For example: ```jldoctest opt_model julia> termination_status(model) LOCALLY_SOLVED::TerminationStatusCode = 4 @@ -1072,18 +1116,8 @@ julia> value.(y) julia> optimizer_index(z) MathOptInterface.VariableIndex(3) ``` - -!!! note - If [`optimizer_model_variable`](@ref), [`optimizer_model_expression`](@ref), - and/or [`optimizer_model_constraint`](@ref) cannot be extended due to the - nature of the reformulation then please refer to step 10 of the extension - steps listed at the beginning of this section. - -Furthermore, if appropriate for the given reformulation the following should be -extended: -- [`InfiniteOpt.variable_supports`](@ref) to enable `supports` on variables) -- [`InfiniteOpt.expression_supports`](@ref) to enable `supports` on expressions) -- [`InfiniteOpt.constraint_supports`](@ref) to enable `supports` on constraints) +We also skip steps 7 and 10 since these are not applicable to this particular +example. That's it! diff --git a/docs/src/guide/constraint.md b/docs/src/guide/constraint.md index 5a026fcb..6fa2a1e1 100644 --- a/docs/src/guide/constraint.md +++ b/docs/src/guide/constraint.md @@ -38,7 +38,7 @@ julia> @variable(model, z[1:2]); ``` !!! note - Unlike previous versions, `InfiniteOpt` now supports all the constraints + `InfiniteOpt` supports all the constraints types offered by `JuMP`, including vector and semi-definite constraints! Please see [JuMP's constraint documentation](https://jump.dev/JuMP.jl/v1/manual/constraints/#Constraints) for a thorough explanation of the supported types and syntax. diff --git a/docs/src/guide/derivative.md b/docs/src/guide/derivative.md index 414dd9c9..2b58e651 100644 --- a/docs/src/guide/derivative.md +++ b/docs/src/guide/derivative.md @@ -101,7 +101,8 @@ Here, the default method is backward finite difference. These are enforced on an infinite parameter basis (i.e., the parameter the differential operator is taken with respect to). Unlike, `FiniteDifference` which directly handles derivatives of any order, `OrthogonalCollocation` is limited to 1st order derivatives and higher -order derivatives will be reformulated, accordingly. In the above examples, +order derivatives are automatically reformulated into a system of 1st order derivatives. +In the above examples, any derivatives taken with respect to `t` will use orthogonal collocation on finite elements since that is what we specified as our derivative method. More information is provided in the [Derivative Methods](@ref) Section below. However, we @@ -450,7 +451,7 @@ solutions can be done efficiently and seamlessly. This is also the recommended workflow. However, we do provide user accessible derivative evaluation methods that generate the auxiliary derivative equations and add them to the `InfiniteModel`. This can be useful for visualizing how these techniques work and can be helpful for -user-defined reformulation extensions (i.e., optimizer model extensions). +user-defined reformulation extensions (i.e., transformation backend extensions). We can build these relations for a particular derivative via [`evaluate`](@ref). For example, let's build evaluation equations for `d1`: diff --git a/docs/src/guide/expression.md b/docs/src/guide/expression.md index 06a0efc7..73f6b8d6 100644 --- a/docs/src/guide/expression.md +++ b/docs/src/guide/expression.md @@ -18,20 +18,17 @@ Expressions in `InfiniteOpt` (also called functions) refer to mathematical statements involving variables and numbers. Thus, these comprise the mathematical expressions used that are used in measures, objectives, and constraints. Programmatically, `InfiniteOpt` simply extends `JuMP` expression -types and methods principally pertaining to affine and quadratic mathematical -expressions. A natively supported abstraction for general nonlinear expressions -is planned for development since that of `JuMP` is not readily extendable. +types and methods. ## [Parameter Functions](@id par_func_docs) -As described further below, InfiniteOpt.jl only supports affine and quadratic -expressions in its current rendition. However, there several use cases where we -might want to provide a more complex known function of infinite parameter(s) (e.g., +Rather than construct an explicit symbolic expression, we +might want to provide a more complex function of infinite parameter(s) (e.g., nonlinear setpoint tracking). Thus, we provide parameter function objects -that given a particular realization of infinite parameters will output a scalar -value. Note that this can be interpreted as an infinite variable that is -constrained to a particular known function. This is accomplished via +that wrap arbitrary Julia functions that take infinite parameters as input and output +a scalar value. Mathematically, these can can be interpreted infinite variables +constrained to a particular known function. These are created via [`@parameter_function`](@ref) or [`parameter_function`](@ref) and is exemplified -by defining a parameter function `f(t)` that uses `sin(t)`: +below by defining a parameter function `f(t)` that uses `sin(t)`: ```jldoctest param_func julia> using InfiniteOpt; @@ -42,7 +39,7 @@ julia> @infinite_parameter(model, t in [0, 10]); julia> @parameter_function(model, f == sin(t)) f(t) ``` -Here we created a parameter function object, added it to `model`, and +Here, we created a parameter function object, added it to `model`, and then created a Julia variable `f` that serves as a `GeneralVariableRef` that points to it. From here we can treat `f` as a normal infinite variable and use it with measures, derivatives, and constraints. For example, we can do the following: @@ -59,7 +56,7 @@ julia> @constraint(model, y - f <= 0) y(t) - f(t) ≤ 0, ∀ t ∈ [0, 10] ``` We can also define parameter functions that depend on multiple infinite -parameters even use an anonymous function if preferred: +parameters and even use an anonymous Julia function if desired: ```jldoctest param_func julia> @infinite_parameter(model, x[1:2] in [-1, 1]); @@ -77,9 +74,8 @@ julia> @parameter_function(model, pfunc_alt[i = 1:3] == t -> mysin(t, as[i], b = pfunc_alt[2](t) pfunc_alt[3](t) ``` -The main recommended use case for [`parameter_function`](@ref) is that it is -amenable to define complex anonymous functions via a do-block which is useful -for applications like defining a time-varied setpoint: +The use of [`parameter_function`](@ref) is convenient for enabling do-block syntax which +often handy. For instance, when defining a time-varied setpoint for optimal control: ```jldoctest param_func julia> setpoint = parameter_function(t, name = "setpoint") do t_supp if t_supp <= 5 @@ -95,7 +91,7 @@ functions: [`@parameter_function`](@ref) and [`parameter_function`](@ref). Beyond this, there are a number of query and modification methods that can be employed for parameter functions and these are detailed in the -[technical manual](@ref par_func_manual) Section below. +[technical manual](@ref par_func_manual). ## Variable Hierarchy Expressions employ variable reference types inherited from @@ -110,13 +106,10 @@ green and the concrete types are shown blue. In consistently with `JuMP` expression support, [`GeneralVariableRef`](@ref) exists as a variable reference type that is able to represent any of the above concrete subtypes of [`DispatchVariableRef`](@ref). This allows the expression -containers to be homogeneous in variable type. This is a paradigm shift from -previous versions of `InfiniteOpt` that used the hierarchy of types directly -to construct expressions. This behavior led to stability and performance -limitations and thus a has been discontinued. +containers to be homogeneous in variable type which provides improved performance. -However, the variable hierarchy is still used to create for variable methods. -To accomplish this appropriate `GeneralVariableRef` dispatch methods are implemented +However, the variable hierarchy is used for variable methods. +To accomplish this, appropriate `GeneralVariableRef` dispatch methods are implemented (which are detailed in User Methods section at the bottom of this page) that utilize [`dispatch_variable_ref`](@ref) to create the appropriate concrete subtype of `DispatchVariableRef` and call the appropriate underlying method. diff --git a/docs/src/guide/finite_parameter.md b/docs/src/guide/finite_parameter.md index 256815c5..11170339 100644 --- a/docs/src/guide/finite_parameter.md +++ b/docs/src/guide/finite_parameter.md @@ -12,16 +12,11 @@ as needed. Furthermore, at the optimization step these parameters are replaced with their numeric values. Thus, not adding unnecessary decision variables as is typically done in `JuMP` models using `JuMP.fix` on placeholder variables. -!!! note - The syntax of [`@finite_parameter`](@ref) has changed with from previous - versions for enhanced long term support. Please consult the documentation - below for the updated syntax. - !!! warning In some cases, using [`@finite_parameter`](@ref) can unexpectedly make - the underlying `JuMP` model contain nonlinear constraints/objectives. This + the underlying transformation backend contain nonlinear constraints/objectives. This occurs when a quadratic expression is mutliplied by a finite parameter - (making a `NLPExpr`): + (making a `GenericNonlinearExpr`): ```julia-repl julia> model = InfiniteModel(); @variable(model, z); @finite_parameter(model, p == 2); @@ -88,4 +83,4 @@ converted into the appropriate affine expression when transcribed. `InfiniteOpt`'s implementation of finite parameters should not be a reason to use `InfiniteOpt` to model non-infinite-dimensional problems, since the added overhead will make it slower than just iteratively building `JuMP` models. For - this behavior, we recommend looking into using `ParameterJuMP`. + this behavior, we recommend looking into using `ParametricOptInterface`. diff --git a/docs/src/guide/measure.md b/docs/src/guide/measure.md index 94c8cf18..4bd05614 100644 --- a/docs/src/guide/measure.md +++ b/docs/src/guide/measure.md @@ -8,9 +8,9 @@ A guide for measure operators in `InfiniteOpt`. See the respective ## Overview Measure operators are objects that capture the evaluation of an expression with respect -to parameters, which is a distinct feature of optimization problems with -infinite decision spaces. In dynamic optimization measures can represent integral -terms such as the total cost over time, and in stochastic optimization measures +to infinite parameters, which is a distinct feature of optimization problems with +infinite decision spaces. In dynamic optimization, measures typically are integral +terms such as the total cost over time, and in stochastic optimization, measures can represent integrals over the uncertain parameters, such as expectations. In `InfiniteOpt`, measures are general operators that can be uni-variate or multi-variate. Natively we employ measure abstractions that employ discretization @@ -161,7 +161,8 @@ the integrals by taking a discretization scheme \int_{\tau \in \mathcal{T}} f(\tau)w(\tau) d\tau \approx \sum_{i=1}^N \alpha_i f(\tau_i) w(\tau_i) ``` where ``\tau_i`` are the grid points where the expression ``f(\tau)`` is -evaluated, and ``N`` is the total number of points taken. +evaluated, and ``N`` is the total number of points taken (assuming the transformation +backend depends on discretization). This is the abstraction behind both [`DiscreteMeasureData`](@ref) and [`FunctionalDiscreteMeasureData`](@ref) which are the native measure data types @@ -328,16 +329,16 @@ specified elsewhere in the model. Consider the following example with 3 equidist supports and an integral objective function that uses `UniTrapezoid()` (the default): ```jldoctest support_manage; setup = :(using InfiniteOpt), output = false # Create a model, with one variable and an infinite parameter with a given number of supports -m = InfiniteModel() -@infinite_parameter(m, t in [0, 2], num_supports = 3) -@variable(m, u, Infinite(t)) +model = InfiniteModel() +@infinite_parameter(model, t in [0, 2], num_supports = 3) +@variable(model, u, Infinite(t)) # Create an objective function with the default trapezoid integration -@objective(m, Min, integral(u^2, t)) +@objective(model, Min, integral(u^2, t)) # Get the transcribed model to check how the supports are taken into account -build_optimizer_model!(m) -trans_m = optimizer_model(m); +build_transformation_backend!(model) +tmodel = transformation_model(model); # output A JuMP Model @@ -361,25 +362,25 @@ julia> supports(t) 1.0 2.0 -julia> transcription_variable(u) +julia> transformation_variable(u) 3-element Vector{VariableRef}: u(support: 1) u(support: 2) u(support: 3) -julia> objective_function(trans_m) +julia> objective_function(tmodel) 0.5 u(support: 1)² + u(support: 2)² + 0.5 u(support: 3)² ``` Thus, the integral incorporates the 3 supports generated outside the `integral` declaration. -Then we readjust the model to use Gauss-Legendre quadrature via `GaussLegendre()` +Now let's readjust the model to use Gauss-Legendre quadrature via `GaussLegendre()` that uses 2 quadrature nodes: ```jldoctest support_manage; output = false # Set the new objective and update the TranscriptionModel -set_objective_function(m, integral(u^2, t, eval_method = GaussLegendre(), num_nodes = 2)) -build_optimizer_model!(m) -trans_m = optimizer_model(m); +set_objective_function(model, integral(u^2, t, eval_method = GaussLegendre(), num_nodes = 2)) +build_transformation_backend!(model) +trans_m = transformation_model(model); # output A JuMP Model @@ -402,7 +403,7 @@ julia> supports(t) 1.57735026919 2.0 -julia> transcription_variable(u) +julia> transformation_variable(u) 5-element Vector{VariableRef}: u(support: 1) u(support: 2) @@ -410,7 +411,7 @@ julia> transcription_variable(u) u(support: 4) u(support: 5) -julia> objective_function(trans_m) +julia> objective_function(tmodel) u(support: 2)² + u(support: 4)² ``` The supports used in the objective function are different from the supports used @@ -422,22 +423,22 @@ be excluded from the objective function which will affect the behavior of the optimization and lead to unexpected results. However, this behavior is avoided if we let the integral add the supports and -not add supports elsewhere (for convenience we'll use `set_uni_integral_defaults`): +not add supports elsewhere (for convenience we'll use [`set_uni_integral_defaults`](@ref)): ```jldoctest support_manage; output = false # Define a new model, parameter, and variable -m = InfiniteModel() -@infinite_parameter(m, t in [0, 2]) -@variable(m, u, Infinite(t)) +model = InfiniteModel() +@infinite_parameter(model, t in [0, 2]) +@variable(model, u, Infinite(t)) # Update the integral default keyword arguments for convenience set_uni_integral_defaults(eval_method = GaussLegendre(), num_nodes = 2) # Set the objective with our desired integral -@objective(m, Min, integral(u^2, t)) +@objective(model, Min, integral(u^2, t)) # Build the transcribed model -build_optimizer_model!(m) -trans_m = optimizer_model(m); +build_transformation_backend!(model) +tmodel = transformation_model(model); # output A JuMP Model @@ -456,12 +457,12 @@ julia> supports(t) 0.42264973081 1.57735026919 -julia> transcription_variable(u) +julia> transformation_variable(u) 2-element Vector{VariableRef}: u(support: 1) u(support: 2) -julia> objective_function(trans_m) +julia> objective_function(tmodel) u(support: 1)² + u(support: 2)² ``` Therefore, using quadratures other than `UniTrapezoid()` or `FEGaussLobatto()` @@ -470,18 +471,18 @@ requires careful analysis if there are user-defined supports in the problem. ## Expansion In a model, each measure records the integrand expression and an evaluation scheme that details the discretization scheme to approximate the integral. -The model will not expand the measures until the transcription stage, at which +The model will not expand the measures until the transformation stage, when a `JuMP.AbstractJuMPScalar` is created for each measure to represent how -the measure is modeled in a transcription model based on the stored +the measure is modeled in a transformed model based on the stored discretization scheme (see [Model Transcription](@ref transcription_docs) for details on transcription). Additional point variables will be created in the expansion process if the measure is evaluated at infinite parameter points that do not have corresponding point variables yet. Sometimes for extension purposes, one might want to expand a specific measure -before reaching the transcription stage. Alternatively, one might want to use -custom reformulation instead of the transcription encoded in this package, in -which expanding measures will also be useful. This can be done using the [`expand`](@ref) +before reaching the transformation stage. Alternatively, one might want to use +custom reformulation instead of those natively provided by InfiniteOpt, in +which case, expanding measures will also be useful. This can be done using the [`expand`](@ref) function, which takes a [`MeasureRef`](@ref) object and returns a `JuMP.AbstractJuMPScalar` based on the [`AbstractMeasureData`](@ref). For example, suppose we want to integrate ``y^2`` in ``t``, with two supports ``t = 2.5`` and ``t = 7.5``. diff --git a/docs/src/guide/model.md b/docs/src/guide/model.md index 16dbad80..56217af1 100644 --- a/docs/src/guide/model.md +++ b/docs/src/guide/model.md @@ -25,38 +25,58 @@ julia> using InfiniteOpt julia> model = InfiniteModel() An InfiniteOpt Model Feasibility problem with: -Finite Parameters: 0 -Infinite Parameters: 0 -Variables: 0 -Derivatives: 0 -Measures: 0 -Optimizer model backend information: -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. + Finite parameters: 0 + Infinite parameters: 0 + Variables: 0 + Derivatives: 0 + Measures: 0 +Transformation backend information: + Backend type: TranscriptionBackend + Solver name: No optimizer attached. + Transformation built and up-to-date: false +``` +Ultimately, `model` will be solved via a transformation backend. By default, +we see that a [`TranscriptionBackend`](@ref) (a JuMP `Model` that will contain +a transcribed, i.e., a discretized model) is used. To specify, a backend +of our choice, we use the syntax: +```jldoctest +julia> using InfiniteOpt + +julia> model = InfiniteModel(TranscriptionBackend()) +An InfiniteOpt Model +Feasibility problem with: + Finite parameters: 0 + Infinite parameters: 0 + Variables: 0 + Derivatives: 0 + Measures: 0 +Transformation backend information: + Backend type: TranscriptionBackend + Solver name: No optimizer attached. + Transformation built and up-to-date: false ``` -The optimizer that will be used to solve the model can also be specified at -model definition: +Since `TranscriptionBackend`s are a common choice, we can just pass a JuMP +compatible optimizer (i.e., solver) to the model and a `TranscriptionBackend` +that uses that optimizer will be initialized: ```jldoctest julia> using InfiniteOpt, Ipopt julia> model = InfiniteModel(Ipopt.Optimizer) An InfiniteOpt Model Feasibility problem with: -Finite Parameters: 0 -Infinite Parameters: 0 -Variables: 0 -Derivatives: 0 -Measures: 0 -Optimizer model backend information: -Model mode: AUTOMATIC -CachingOptimizer state: EMPTY_OPTIMIZER -Solver name: Ipopt + Finite parameters: 0 + Infinite parameters: 0 + Variables: 0 + Derivatives: 0 + Measures: 0 +Transformation backend information: + Backend type: TranscriptionBackend + Solver name: Ipopt + Transformation built and up-to-date: false ``` -Note that any optimizer currently supported by `JuMP v0.19.0` or newer is -supported for use in `InfiniteOpt`. For completeness, the table of currently -supported optimizers is provided below in [Supported Optimizers](@ref). +For completeness, the table of currently supported JuMP compatible optimizers +is provided below in [Supported Optimizers](@ref). We can also specify optimizer attributes via [`optimizer_with_attributes`](https://jump.dev/JuMP.jl/v1/api/JuMP/#JuMP.optimizer_with_attributes) @@ -64,85 +84,121 @@ which allows us to append as many attributes as we like, for example: ```jldoctest julia> using InfiniteOpt, Ipopt -julia> model = InfiniteModel(optimizer_with_attributes(Ipopt.Optimizer, - "output_level" => 0)) +julia> jump_opt = optimizer_with_attributes(Ipopt.Optimizer, "output_level" => 0); + +julia> model = InfiniteModel(jump_opt) An InfiniteOpt Model Feasibility problem with: -Finite Parameters: 0 -Infinite Parameters: 0 -Variables: 0 -Derivatives: 0 -Measures: 0 -Optimizer model backend information: -Model mode: AUTOMATIC -CachingOptimizer state: EMPTY_OPTIMIZER -Solver name: Ipopt + Finite parameters: 0 + Infinite parameters: 0 + Variables: 0 + Derivatives: 0 + Measures: 0 +Transformation backend information: + Backend type: TranscriptionBackend + Solver name: Ipopt + Transformation built and up-to-date: false ``` Now you have an initialized `InfiniteModel` that is ready for your mathematical model to be defined and optimized! ## Advanced Definition Information -As you may have noticed in the above examples, `InfiniteModel`s contain an -optimizer model backend which simply corresponds to a `JuMP.Model` that -will be used to store and optimize the reformulation of the infinite mathematical -model stored in `InfiniteModel`. It also will contain a mapping between its -optimization model and that of the `InfiniteModel` (e.g., a mapping between the -variables and constraints). By default, `InfiniteModel`s use a -[`TranscriptionModel`](@ref) optimizer model backend which will store a -transcribed (discretized) version of the infinite model. More information on -the internal use of `TranscriptionModel`s is provided in -[Model Transcription](@ref transcription_docs). - -All the arguments used with the `InfiniteModel` constructor (e.g., the optimizer) -are simply passed on and stored in the optimizer model backend. Thus, any -argument supported by `JuMP.Model` can be passed on to the optimizer -model by including it in the `InfiniteModel` constructor. For example, we can -specify the `add_bridges` keyword argument in the `InfiniteModel` call to use -in the definition of the optimizer model: +As noted above, `InfiniteModel`s contain a transformation backend that will ultimately +be used to optimize the `InfiniteModel` via a transformed version of it. Such backends +typically have methods to transform an `InfiniteModel` into a transformed model that +can be optimized; moreover, they store necessary data to map back to the `InfiniteModel`. + +By default, `InfiniteModel`s use a [`TranscriptionBackend`](@ref) which will store a +transcribed (i.e., discretized) version of the infinite model. More information on +`TranscriptionBackends`s is provided in [Model Transcription](@ref transcription_docs). +Notably, the main argument `TranscriptionBackend` is an appropriate JuMP compatible +optimizer: ```jldoctest julia> using InfiniteOpt, Ipopt -julia> model = InfiniteModel(Ipopt.Optimizer, - add_bridges = false) -An InfiniteOpt Model +julia> backend = TranscriptionBackend(Ipopt.Optimizer) +A TranscriptionBackend that uses a +A JuMP Model Feasibility problem with: -Finite Parameters: 0 -Infinite Parameters: 0 Variables: 0 -Derivatives: 0 -Measures: 0 -Optimizer model backend information: Model mode: AUTOMATIC CachingOptimizer state: EMPTY_OPTIMIZER Solver name: Ipopt ``` -Moreover, alternative optimizer model types (i.e., not a `TranscriptionModel`) can be -specified via the `OptimizerModel` keyword argument when initializing the -`InfiniteModel`. Thus, to redundantly specify a `TranscriptionModel` we would call: -```jldoctest model_fun -julia> using InfiniteOpt +We query the underlying transformation backend, transformation model, and transformation +data via [`transformation_backend`](@ref), +[`transformation_model`](@ref transformation_model(::InfiniteModel)), and +[`transformation_data`](@ref transformation_data(::InfiniteModel)), respectively: +```jldoctest +julia> using InfiniteOpt; model = InfiniteModel(); -julia> model = InfiniteModel(OptimizerModel = TranscriptionModel) -An InfiniteOpt Model +julia> tbackend = transformation_backend(model) +A TranscriptionBackend that uses a +A JuMP Model +Feasibility problem with: +Variables: 0 +Model mode: AUTOMATIC +CachingOptimizer state: NO_OPTIMIZER +Solver name: No optimizer attached. + +julia> tmodel = transformation_model(model) +A JuMP Model Feasibility problem with: -Finite Parameters: 0 -Infinite Parameters: 0 Variables: 0 -Derivatives: 0 -Measures: 0 -Optimizer model backend information: Model mode: AUTOMATIC CachingOptimizer state: NO_OPTIMIZER Solver name: No optimizer attached. + +julia> data = transformation_data(model); +``` + +A new transformation backend is specified via [`set_transformation_backend`](@ref): +```jldoctest +julia> using InfiniteOpt, Ipopt; model = InfiniteModel(); + +julia> set_transformation_backend(model, TranscriptionBackend(Ipopt.Optimizer)) + +julia> tbackend = transformation_backend(model) +A TranscriptionBackend that uses a +A JuMP Model +Feasibility problem with: +Variables: 0 +Model mode: AUTOMATIC +CachingOptimizer state: EMPTY_OPTIMIZER +Solver name: Ipopt +``` +Again, since `TranscriptionBackend` is the default, the following models are equivalent: +```jldoctest +julia> using InfiniteOpt, Ipopt; + +julia> model1 = InfiniteModel(); + +julia> set_transformation_backend(model1, TranscriptionBackend(Ipopt.Optimizer, add_bridges = false)) + +julia> model2 = InfiniteModel(Ipopt.Optimizer, add_bridges = false) +An InfiniteOpt Model +Feasibility problem with: + Finite parameters: 0 + Infinite parameters: 0 + Variables: 0 + Derivatives: 0 + Measures: 0 +Transformation backend information: + Backend type: TranscriptionBackend + Solver name: Ipopt + Transformation built and up-to-date: false ``` -More information on implementing custom optimizer models is located on the + +More information on implementing custom transformation backends is located on the Extensions page. ## Supported Optimizers -`InfiniteOpt` can use any optimizer that is supported by `JuMP v0.19.0` or newer -(i.e., has a `MathOptInterface` implementation). Please refer to `JuMP`'s current +Supported optimizers (e.g., solvers) depend on the transformation backend being +used. For [`JuMPBackend`](@ref)s such as [`TranscriptionBackend`](@ref), any +JuMP compatible optimizer (i.e., has a `MathOptInterface` implementation) can be +used. Please refer to `JuMP`'s current [solver documentation](https://jump.dev/JuMP.jl/v1/installation/#Supported-solvers) to learn what solvers are supported and how to install them. diff --git a/docs/src/guide/objective.md b/docs/src/guide/objective.md index 8901d8cf..6d434eee 100644 --- a/docs/src/guide/objective.md +++ b/docs/src/guide/objective.md @@ -10,10 +10,9 @@ respective [technical manual](@ref obj_manual) for more details. Naturally, objective functions serve as a key aspect of optimization problems in general and this is certainly the case with infinite dimensional ones. In `InfiniteOpt` objectives are defined in much the same way they are in `JuMP`. -One key idea to keep in mind is that the objective must evaluate to a finite -expression. Note this means that objectives can only explicitly contain -finite variables and point variables. Infinite expressions must be evaluated in a -measure to be included (e.g., evaluate the expectation of a random variable). +One key idea is that the objective must evaluate to a finite expression which means +it must only explicitly contain finite variables and point variables. Infinite +expressions must be summarized by a measure (e.g., taking the expectation of a random variable). ## [Basic Usage] (@id obj_basic) Principally, the objective function is specified via @@ -35,13 +34,12 @@ julia> @variable(model, x[1:2]) julia> @objective(model, Min, 0.5x[1] + 0.5x[2] + 𝔼(y^2 - y, ξ)) 0.5 x[1] + 0.5 x[2] + 𝔼{ξ}[y(ξ)² - y(ξ)] ``` -Thus, we have defined an objective using `InfiniteOpt`'s straightforward syntax. Note that the second argument indicates the objective sense which can be expressed `Min` for minimization problems and `Max` for maximization problems. The objective function (expression) must be finite containing only finite variables, -point variables, and/or measures. Also, any included measures must fully -integrate over all the infinite parameters contained in its input function. -For example, if we define had an infinite variable `z(ξ, t)` then the measure +point variables, and/or measures. Also, any included measures must fully summarize +all infinite parameters contained in the expression they summarize. +For example, if we define an infinite variable `z(ξ, t)`, then the measure `𝔼(z, ξ)` could not be included since the resulting expression would still be infinite with respect to `t`. However, adding a measure for `t` would result in a valid object to add to an objective: `∫(𝔼(z, ξ), t)`. diff --git a/docs/src/guide/optimize.md b/docs/src/guide/optimize.md index 3305f312..9ec9e4b9 100644 --- a/docs/src/guide/optimize.md +++ b/docs/src/guide/optimize.md @@ -7,15 +7,12 @@ A guide for optimizing (solving) `InfiniteOpt` models. See the respective [technical manual](@ref opt_manual) for more details. ## Overview -Fundamentally, we seek to optimize a given infinite optimization model that -we have defined and this is the very reason why `InfiniteOpt` was created. Thus, -`InfiniteOpt` offers a general and intuitive platform to do just this. This -is made up of transforming the `InfiniteModel` into a standard optimization -problem stored as a `JuMP.Model` (referred to as the `optimizer_model`) that is -then optimized via a compatible optimizer. By default, this is done via a -`TranscriptionModel` as described on the previous page. However, user-defined +`InfiniteOpt` offers a general and intuitive platform to optimize infinite +optimization models. This is accomplished by applying a *transformation* to +build a transformation backend that can be solved. By default, this is done via a +[`TranscriptionBackend`](@ref) as described on the previous page. However, user-defined reformulation strategies can readily be implemented as described in the -[Optimizer Models](@ref extend_optimizer_model) section on the extensions page. +[Transformation Backends](@ref extend_backends) section on the extensions page. ## Basic Usage For most users, [`optimize!`](@ref JuMP.optimize!(::InfiniteModel)) is the only @@ -27,7 +24,7 @@ julia> using InfiniteOpt, Ipopt; julia> model = InfiniteModel(Ipopt.Optimizer); -julia> set_optimizer_attribute(model, "print_level", 0); +julia> set_attribute(model, "print_level", 0); julia> @infinite_parameter(model, t in [0, 10], num_supports = 10); @@ -57,10 +54,10 @@ julia> optimize!(model); julia> termination_status(model) LOCALLY_SOLVED::TerminationStatusCode = 4 ``` -Now our model has been solved and we can query the solution. How to query the +Now our model has been solved, and we can query the solution. How to query the solution is explained on the [Results](@ref result_docs) page. -If no optimizer has been specified for the `InfiniteModel`, one can be provided +If no optimizer has been specified for the transformation backend, one can be provided via [`set_optimizer`](@ref): ```jldoctest; setup = :(using InfiniteOpt, Ipopt; model = InfiniteModel()) julia> set_optimizer(model, Ipopt.Optimizer) @@ -70,35 +67,47 @@ A number of methods also exist to adjust the optimizer settings such as suppressing output. This is explained below in the [Optimizer Settings](@ref opt_settings) section. -## Optimizer Models -As discussed previously, `InfiniteModel`s contain an `optimizer_model` field -which stores a transformed finite version of the model in a `JuMP.Model` that +## [Transformation Backends](@id opt_transform_backends) +As discussed previously, `InfiniteModel`s contain a transformation backend to +store and solve a transformed version of the model. This backend typically contains a data object (that stores a mapping between the transformed model and -the infinite model) in the `Model.ext` dictionary with an associated key. By -default a `JuMP.Model` using [`TranscriptionData`](@ref) stored under the key -`:TransData` is used and is referred to as a `TranscriptionModel`. The -optimizer model is then what is used to optimize the infinite model, and it provides -the information exacted by solution queries mapped back to the infinite -model using the mapping data structure. +the infinite model). By default, a `TranscriptionBackend` is used which creates +a discretized `JuMP.Model`. This transformation backend is used to optimize the +infinite model, and it provides the information exacted by solution queries +mapped back to the infinite model using the mapping data structure. The process for optimizing an `InfiniteModel` is summarized in the following steps: 1. fully define the `InfiniteModel` - 2. build the optimizer model via [`build_optimizer_model!`](@ref) - 3. optimize the `optimizer_model` via [`optimize!`](@ref JuMP.optimize!(::JuMP.Model)). - -Here `build_optimizer_model!` creates a reformulated finite version of the -`InfiniteModel`, stores it in `InfiniteModel.optimizer_model` via -[`set_optimizer_model`](@ref), and indicates that the optimizer model is ready -via [`set_transformation_backend_ready`](@ref). These steps are all automated when -[`optimize!`](@ref JuMP.optimize!(::InfiniteModel)) is invoked on the + 2. build the transformation backend via [`build_transformation_backend!`](@ref) + 3. optimize the `transformation_backend` via [`optimize!`](@ref JuMP.optimize!(::AbstractTransformationBackend)). + +Here, `build_transformation_backend!` empties any existing backend information via +[`empty!`](@ref Base.empty!(::AbstractTransformationBackend)), creates a reformulated +of the `InfiniteModel`, stores it in in-place, and indicates that the transformation +backend is ready via [`set_transformation_backend_ready`](@ref). These steps are all +automated when [`optimize!`](@ref JuMP.optimize!(::InfiniteModel)) is invoked on the `InfiniteModel`. -The `optimizer_model` can be queried/extracted at any time from an `InfiniteModel` -via [`optimizer_model`](@ref). For example, let's extract the optimizer model -from the example above in the basic usage section: +The `transformation_backend` can be queried/extracted at any time from an `InfiniteModel` +via [`transformation_backend`](@ref) and its underlying model (if there is one) is extracted +via [`transformation_model`](@ref transformation_model(::InfiniteModel)). For example, +let's extract the transformation backend from the example above in the basic usage section: ```jldoctest optimize -julia> trans_model = optimizer_model(model) +julia> backend = transformation_backend(model) +A TranscriptionBackend that uses a +A JuMP Model +Minimization problem with: +Variables: 11 +Objective function type: AffExpr +`AffExpr`-in-`MathOptInterface.EqualTo{Float64}`: 1 constraint +`AffExpr`-in-`MathOptInterface.GreaterThan{Float64}`: 10 constraints +`VariableRef`-in-`MathOptInterface.GreaterThan{Float64}`: 11 constraints +Model mode: AUTOMATIC +CachingOptimizer state: ATTACHED_OPTIMIZER +Solver name: Ipopt + +julia> tmodel = transformation_model(model) A JuMP Model Minimization problem with: Variables: 11 @@ -111,13 +120,12 @@ CachingOptimizer state: ATTACHED_OPTIMIZER Solver name: Ipopt ``` -The `JuMP` variable(s) stored in the optimizer model that correspond to a +The `JuMP` variable(s) stored in the transformation backend that correspond to a particular `InfiniteOpt` variable can be queried via -[`optimizer_model_variable`](@ref optimizer_model_variable(::GeneralVariableRef)). -Using a `TranscriptionModel` this equivalent to calling -[`transcription_variable`](@ref). Thus, using the going example we get: +[`transformation_variable`](@ref transformation_variable(::GeneralVariableRef)). +Thus, using the going example we get: ```jldoctest optimize -julia> optimizer_model_variable(y) # infinite variable +julia> transformation_variable(y) # infinite variable 10-element Vector{VariableRef}: y(support: 1) y(support: 2) @@ -130,16 +138,15 @@ julia> optimizer_model_variable(y) # infinite variable y(support: 9) y(support: 10) -julia> optimizer_model_variable(z) # finite variable +julia> transformation_variable(z) # finite variable z ``` In like manner, we get the `JuMP` constraints corresponding to a particular `InfiniteOpt` constraint via -[`optimizer_model_constraint`](@ref optimizer_model_constraint(::InfOptConstraintRef)). -Using a `TranscriptionModel` this equivalent to calling -[`transcription_constraint`](@ref). Thus, using going example we get: +[`transformation_constraint`](@ref transformation_constraint(::InfOptConstraintRef)). +Thus, using going example we get: ```jldoctest optimize -julia> optimizer_model_constraint(c1) # infinite constraint +julia> transformation_constraint(c1) # infinite constraint 10-element Vector{ConstraintRef}: c1(support: 1) : z - y(support: 1) ≥ 0 c1(support: 2) : z - y(support: 2) ≥ 0 @@ -153,9 +160,9 @@ julia> optimizer_model_constraint(c1) # infinite constraint c1(support: 10) : z - y(support: 10) ≥ 0 ``` We can also query the expressions via -[`optimizer_model_expression`](@ref optimizer_model_expression(::JuMP.AbstractJuMPScalar)): +[`transformation_expression`](@ref transformation_expression(::JuMP.AbstractJuMPScalar)): ```jldoctest optimize -julia> optimizer_model_expression(z - y^2 + 3) # infinite expression +julia> transformation_expression(z - y^2 + 3) # infinite expression 10-element Vector{AbstractJuMPScalar}: -y(support: 1)² + z + 3 -y(support: 2)² + z + 3 @@ -170,28 +177,28 @@ julia> optimizer_model_expression(z - y^2 + 3) # infinite expression ``` !!! note - 1. Like `supports` the `optimizer_model_[obj]` methods also employ the + 1. Like `supports` the `transformation_[obj]` methods also employ the `label::Type{AbstractSupportLabel} = PublicLabel` keyword argument that by default will return variables/expressions/constraints associated with public supports. The full set (e.g., ones corresponding to internal collocation nodes) is obtained via `label = All`. 2. These methods also employ the `ndarray::Bool` keyword argument that will cause the - output to be formatted as a n-dimensional array where the dimensions + output to be formatted as an n-dimensional array where the dimensions correspond to the infinite parameter dependencies. For example, if we have an - infinite variable `y(t, ξ)` and we invoke a query method with `ndarray = true` + infinite variable `y(t, ξ)`, and we invoke a query method with `ndarray = true` then we'll get a matrix whose dimensions correspond to the supports of `t` and `ξ`, respectively. Also, if `ndarray = true` then `label` correspond to the intersection of supports labels in contrast to its default of invoking the union of the labels. -The purpose of this `optimizer_model` abstraction is to readily enable user-defined +The purpose of this `transformation_backend` abstraction is to readily enable user-defined reformulation extensions (e.g., using polynomial chaos expansion theory). However, this is all handled behind the scenes such that most users can interact with `InfiniteModel`s like any `JuMP.Model`. ## [Optimizer Settings](@id opt_settings) -A few optimizer settings can be set in a consistent way agnostic of particular -solver keywords. One such setting is that of suppressing and unsuppressing +A few optimizer settings can be set consistently agnostic of particular +solver keywords. One such setting is that of suppressing optimizer verbose output. This is accomplished via [`set_silent`](@ref JuMP.set_silent(::InfiniteModel)) and [`unset_silent`](@ref JuMP.unset_silent(::InfiniteModel)). The syntax is @@ -214,22 +221,21 @@ julia> time_limit_sec(model) julia> unset_time_limit_sec(model) ``` -Other optimizer specific settings can be set via -[`set_optimizer_attribute`](@ref). For example, let's set the maximum CPU time -for Ipopt: +Other optimizer specific settings can be set via [`set_attribute`](@ref). +For example, let's set the maximum CPU time for Ipopt: ```jldoctest optimize -julia> set_optimizer_attribute(model, "max_cpu_time", 60.) +julia> set_attribute(model, "max_cpu_time", 60.) ``` -Multiple settings can be specified via [`set_optimizer_attributes`](@ref). For +Multiple settings can be specified via [`set_attributes`](@ref). For example, let's specify the tolerance and the maximum number of iterations: ```jldoctest optimize -julia> set_optimizer_attributes(model, "tol" => 1e-4, "max_iter" => 100) +julia> set_attributes(model, "tol" => 1e-4, "max_iter" => 100) ``` -Finally, we can query optimizer settings via [`get_optimizer_attribute`](@ref). +Finally, we can query optimizer settings via [`get_attribute`](@ref). For example, let's query the maximum number of iterations: ```jldoctest optimize -julia> get_optimizer_attribute(model, "max_iter") +julia> get_attribute(model, "max_iter") 100 ``` Note this only works if the attribute has been previously specified. diff --git a/docs/src/guide/parameter.md b/docs/src/guide/parameter.md index af003e3c..0682861e 100644 --- a/docs/src/guide/parameter.md +++ b/docs/src/guide/parameter.md @@ -13,12 +13,6 @@ added by defining a user-defined type). These can be used to parameterize infinite variables, semi-infinite variables, point variables, derivatives, measures, and can be used directly inside constraints. -!!! note - Previous versions of `InfiniteOpt` used the syntax - `@infinite_parameter(model, ξ in distribution)` for defining random infinite - parameters. This has been updated to - `@infinite_parameter(model, ξ ~ distribution)`. - ## Basic Usage First, we need to initialize and add infinite parameters to our `InfiniteModel`. This can be accomplished using [`@infinite_parameter`](@ref). For example, let's @@ -37,7 +31,8 @@ infinite variables, derivatives, measures, and constraints as described in their respective user guide sections. When the model is optimized, `t` will be transcribed (discretized) over its domain -following its support points. Users can specify support points via the +following its support points (assuming the transformation backend relies on +discretization). Users can specify support points via the `num_supports` or `supports` keyword arguments. For example, if we desire to have only 10 equidistant supports then we could have instead defined `t`: ```jldoctest; setup = :(using InfiniteOpt; model = InfiniteModel()) @@ -87,7 +82,7 @@ julia> @infinite_parameter(model, ξ[i = 1:3] ~ Normal(), independent = true) ξ[2] ξ[3] ``` -Note that we use `~` instead of `in` when specifying distributions. We could have +Note that we use `~` instead of `in` when specifying distributions. We also could have used `i` as an index to assign a different distribution to each parameter. Supports can also be specified for each parameter as shown above. Similarly, the `num_supports` keyword is used to generate random supports. @@ -317,9 +312,11 @@ for more information. ## Supports For an infinite parameter, its supports are a finite set of points that the -parameter will take (or possibly take, if the parameter is random). During the -transcription stage, the supports specified will become part of the grid points -that approximate all functions parameterized by the infinite parameter. +parameter will take (or possibly take, if the parameter is random). Assuming +the underlying transformation backend relies on a discretization strategy (as is +the case with [`TranscriptionBackend`](@ref)), these supports will be used +when building the transcription backend to approximate all variables/expressions +parameterized by the infinite parameter over a grid of points. Once an infinite parameter is defined, users can access the supports using [`supports`](@ref) function: @@ -545,7 +542,7 @@ false ``` This function checks if the parameter is used by any constraint, measure, or -variable. In a similar way, functions [`used_by_constraint`](@ref), +variable. Similarly, functions [`used_by_constraint`](@ref), [`used_by_measure`](@ref) and [`used_by_infinite_variable`](@ref) can be applied to find out any dependency of specific types on the infinite parameter. diff --git a/docs/src/guide/result.md b/docs/src/guide/result.md index b7f4da66..a00874b4 100644 --- a/docs/src/guide/result.md +++ b/docs/src/guide/result.md @@ -11,7 +11,7 @@ A guide for querying optimized `InfiniteOpt` models. See the respective So far we have covered defining, transforming, and optimizing `InfiniteModel`s. Now comes the point to extract information from our optimized model. This is done following extended versions of `JuMP`s querying functions in combination with -the mapping information stored in the optimizer model. Thus, this page will +the mapping information stored in the transformation backend. Thus, this page will walk through the use of these result query functions. ## Basic Usage @@ -21,7 +21,7 @@ julia> using InfiniteOpt, Ipopt; julia> model = InfiniteModel(Ipopt.Optimizer); -julia> set_optimizer_attribute(model, "print_level", 0); +julia> set_attribute(model, "print_level", 0); julia> @infinite_parameter(model, t in [0, 10], num_supports = 10); @@ -183,7 +183,7 @@ These again all have a 1-to-1 correspondence. In the case that our variables/constraints depend on multiple infinite parameter it is typically convenient to add the keyword statement `ndarray = true` when calling any variable/constraint queries (e.g., `value` - and `dual`). This will reformat the output vector into a n-dimensional array + and `dual`). This will reformat the output vector into an n-dimensional array whose dimensions correspond to the supports of the infinite parameters. ## Termination Queries @@ -197,7 +197,7 @@ The commonly used queries include [`termination_status`](@ref), exemplified in the Basic Usage section above and are helpful in quickly understanding the optimality status of a given model following the many possible statuses reported by `MathOptInterface` which are documented -[here](https://jump.dev/MathOptInterface.jl/v0.9.22/manual/solutions/#Solutions). +[here](https://jump.dev/MathOptInterface.jl/v1/manual/solutions/#Solutions). We use `result_count` to determine how many solutions are recorded in the optimizer. ```jldoctest results @@ -235,9 +235,9 @@ if the optimizer supplies this information which again Ipopt does not. ## Variable Queries Information about the optimized variables is gathered consistently in comparison to typical `JuMP` models. With `InfiniteModel`s this is done by querying the -optimizer model and using its stored variable mappings to return the correct +transformation backend and using its stored variable mappings to return the correct information. Thus, here the queries are extended to work with the specifics of -the optimizer model to return the appropriate info. +the transformation backend to return the appropriate info. !!! note 1. Like `supports` the all variable based query methods below also employ the @@ -246,7 +246,7 @@ the optimizer model to return the appropriate info. supports. The full set (e.g., ones corresponding to internal collocation nodes) is obtained via `label = All`. 2. These methods also employ the `ndarray::Bool` keyword argument that will cause the - output to be formatted as a n-dimensional array where the dimensions + output to be formatted as an n-dimensional array where the dimensions correspond to the infinite parameter dependencies. For example, if we have an infinite variable `y(t, ξ)` and we invoke a query method with `ndarray = true` then we'll get a matrix whose dimensions correspond to the supports of `t` and @@ -254,7 +254,7 @@ the optimizer model to return the appropriate info. intersection of supports labels in contrast to its default of invoking the union of the labels. -First, we should verify that the optimized model in fact has variable values +First, we should verify that the transformed variable in fact has variable values via [`has_values`](@ref). In our example, we have: ```jldoctest results julia> has_values(model) @@ -264,10 +264,10 @@ So we have values readily available to be extracted. Now [`value`](@ref JuMP.value(::GeneralVariableRef)) can be used to query the values as shown above in the Basic Usage section. This works by calling the -appropriate [`map_value`](@ref InfiniteOpt.map_value) defined by the optimizer -model. By default this, employs the `map_value` fallback which uses -`optimizer_model_variable` to do the mapping. Details on how to extend these -methods for user-defined optimizer models is explained on the Extensions page. +appropriate [`map_value`](@ref InfiniteOpt.map_value) defined by the transformation +backend. By default, this employs the `map_value` fallback which uses +`transformation_variable` to do the mapping. Details on how to extend these +methods for user-defined transformation backends is explained on the Extensions page. We also, support call to `value` that use an expression of variables as input. @@ -300,15 +300,15 @@ appropriate versions of [`map_optimizer_index`](@ref InfiniteOpt.map_optimizer_i Like variables, a variety of information can be queried about constraints. !!! note - 1. Like `supports` the all constraint query methods below also employ the + 1. Like `supports`, all the constraint query methods below also employ the `label::Type{AbstractSupportLabel} = PublicLabel` keyword argument that by default will return the desired information associated with public supports. The full set (e.g., ones corresponding to internal collocation nodes) is obtained via `label = All`. 2. These methods also employ the `ndarray::Bool` keyword argument that will cause the - output to be formatted as a n-dimensional array where the dimensions + output to be formatted as an n-dimensional array where the dimensions correspond to the infinite parameter dependencies. For example, if we have an - infinite constraint that depends on `t` and `ξ)` and we invoke a query method + infinite constraint that depends on `t` and `ξ)`, and we invoke a query method with `ndarray = true` then we'll get a matrix whose dimensions correspond to the supports of `t` and `ξ`, respectively. Also, if `ndarray = true` then `label` correspond to the intersection of supports labels in contrast to its @@ -361,9 +361,6 @@ Here 10 indices are given in accordance with the transcription constraints. The mapping between these and the original infinite constraints is managed via the appropriate extensions of [`map_optimizer_index`](@ref InfiniteOpt.map_optimizer_index). -!!! note - `optimizer_index` does not work for constraints that contain `NLPExprs`. - We can also query dual information from our constraints if it is available. First, we should verify that dual information is available via [`has_duals`](@ref): @@ -413,7 +410,7 @@ This is computed via interrogating the duals and the objective sense. We also conduct sensitivity analysis for linear problems using [`lp_sensitivity_report`](@ref JuMP.lp_sensitivity_report(::InfiniteModel)). This will generate a [`InfOptSensitivityReport`](@ref) which contains mapping to the -ranges indicating how much a constraint RHS constant or a objective +ranges indicating how much a constraint RHS constant or an objective coefficient can be changed without violating the feasibility of the solution. This is further explained in the `JuMP` documentation [here](https://jump.dev/JuMP.jl/v1/manual/solutions/#Sensitivity-analysis-for-LP). @@ -458,12 +455,13 @@ julia> report[c1, label = All] (-Inf, 42.0) ``` -## Other Queries -Any other queries supported by `JuMP` can be accessed by simply interrogating the -optimizer model directly using [`optimizer_model`](@ref) to access it. For -example, we can get the solution summary of the optimizer model: +## Direct Queries + We can directly interrogate the transformation backend to get more + information. For instance, with `TranscriptionBackend`s we can get + the underlying `JuMP.Model` via [`transformation_model`](@ref) and + then call whatever `JuMP` query we want: ```julia-repl -julia> solution_summary(optimizer_model(model)) +julia> solution_summary(transformation_model(model)) * Solver : Ipopt * Status diff --git a/docs/src/guide/transcribe.md b/docs/src/guide/transcribe.md index 089b9fed..949af16d 100644 --- a/docs/src/guide/transcribe.md +++ b/docs/src/guide/transcribe.md @@ -8,28 +8,24 @@ A guide for transcribing infinite models in `InfiniteOpt`. See the respective [technical manual](@ref transcription_manual) for more details. ## Overview -All infinite models need to be reformulated in such a way that they can be solved -using traditional optimization methods. Typically, this involves discretization -of the infinite domain via particular parameter support points. By default, -`InfiniteOpt` employs this methodology via the use of transcription models (which -comprise the `optimizer_model` as discussed in the -[Infinite Models](@ref infinite_model_docs) section). `InfiniteOpt` is built -modularly to readily accept other user defined techniques and this is discussed -in further detail on the [Extensions](@ref) page. This page will detail -transcription models based in `InfiniteOpt.TranscriptionOpt` which provide the -default transcription (reformulation) capabilities of `InfiniteOpt`. +All infinite models need to be transformed into a form that can be solved. A +common approach is direct transcription (e.g., discretize-then-optimize) where +the domain of an infinite parameter is approximated by a collection of support +points. This is the idea behind `TranscriptionOpt` (which implements +[`TranscriptionBackend`](@ref)) which is InfiniteOpt's default transformation +backend as discussed in the [Infinite Models](@ref infinite_model_docs) section. +This page will detail the transcription models based in `InfiniteOpt.TranscriptionOpt`. ## Basic Usage Most users will not need to employ the capabilities of `TranscriptionOpt` directly since they are employed implicitly with the call of [`optimize!`](@ref JuMP.optimize!(::InfiniteModel)) on an infinite model. This -occurs since `TranscriptionModel`s are the default optimizer model type that is -employed. +occurs since `TranscriptionBackend`s are the default backend. However, some users may wish to use `TranscriptionOpt` to extract a fully discretized/transcribed version of an infinite model that is conveniently output as a typical `JuMP` model and can then be treated as such. This is principally -accomplished via [`build_optimizer_model!`](@ref). To illustrate how this is done, +accomplished via [`build_transformation_backend!`](@ref). To illustrate how this is done, let's first define a basic infinite model with a simple support structure for the sake of example: ```jldoctest transcribe @@ -65,12 +61,12 @@ Subject to constr : y(t)² - z ≤ 42, ∀ t ∈ [0, 10] ``` Now we can make `JuMP` model containing the transcribed version of `inf_model` -via [`build_optimizer_model!`](@ref) and then extract it via -[`optimizer_model`](@ref): +via [`build_transformation_backend!`](@ref) and then extract it via +[`transformation_model`](@ref transformation_model(::InfiniteModel)): ```jldoctest transcribe -julia> build_optimizer_model!(inf_model) +julia> build_transformation_backend!(inf_model) -julia> trans_model = optimizer_model(inf_model) +julia> trans_model = transformation_model(inf_model) A JuMP Model Minimization problem with: Variables: 4 @@ -95,19 +91,12 @@ Subject to y(support: 3) ≥ 0 z binary ``` -!!! note - Previous versions of InfiniteOpt, employed a `TranscriptionModel(model::InfiniteModel)` - constructor to build transcription models independently of the optimizer model. - This has functionality has been removed in favor of internal optimizer model - based builds for efficiency reasons and to properly manage MOI optimizer - attributes. - -Thus, we have a transcribed `JuMP` model. To be precise this is actually a -`TranscriptionModel` which is a `JuMP.Model` with some extra data stored in the -`ext` field that retains the mapping between the transcribed variables/constraints -and their infinite counterparts. Notice, that multiple finite variables -have been introduced to discretize `y(t)` at supports 1, 2, and 3 which correspond -to 0, 5, and 10 as can be queried by `supports`: + +Thus, we have a transcribed `JuMP` model. To be precise, data on the mapping between +the transcribed variables/constraints and their infinite counterparts is also generated +as part of the `TranscriptionBackend` that `trans_model` is part of. Notice, that +multiple finite variables have been introduced to discretize `y(t)` at supports 1, 2, +and 3 which correspond to 0, 5, and 10 as can be queried by `supports`: ```jldoctest transcribe julia> supports(y) 3-element Vector{Tuple}: @@ -118,25 +107,25 @@ julia> supports(y) Also, notice how the constraints are transcribed in accordance with these supports except the initial condition which naturally is only invoked for the first support point. Furthermore, the transcription variable(s) of any variable associated with -the infinite model can be determined via [`transcription_variable`](@ref): +the infinite model can be determined via [`transformation_variable`](@ref): ```jldoctest transcribe -julia> transcription_variable(y) +julia> transformation_variable(y) 3-element Vector{VariableRef}: y(support: 1) y(support: 2) y(support: 3) -julia> transcription_variable(trans_model, z) +julia> transformation_variable(z) z ``` Similarly, the transcription constraints associated with infinite model constraints -can be queried via [`transcription_constraint`](@ref) and the associated supports +can be queried via [`transformation_constraint`](@ref) and the associated supports and infinite parameters can be found via `supports` and `parameter_refs`: ```jldoctest transcribe -julia> transcription_constraint(initial) +julia> transformation_constraint(initial) initial(support: 1) : y(support: 1) = 1 -julia> transcription_constraint(constr) +julia> transformation_constraint(constr) 3-element Vector{ConstraintRef}: constr(support: 1) : y(support: 1)² - z ≤ 42 constr(support: 2) : y(support: 2)² - z ≤ 42 @@ -154,7 +143,7 @@ julia> parameter_refs(constr) Note the parameter reference tuple corresponds to the support tuples. !!! note - Method that query the transcription surrogates (e.g., `transcription_variable`) + Methods that query the transcription objects (e.g., `transcformation_variable`) and the respective supports via `supports` also accept the keyword argument `label` to specify which that transcription objects are desired in accordance to the support labels that are inherited from and/or are equal to `label`. By @@ -162,9 +151,9 @@ Note the parameter reference tuple corresponds to the support tuples. solely associated with internal supports). The full query response can always be obtained via `label = All`. -Now we have a transcribed `JuMP` model that can be optimized via traditional -`JuMP` methods whose variables and constraints can be accessed using the methods -mentioned above. +Now we have a transcribed JuMP model that can be optimized via traditional +JuMP compatible optimizers whose variables and constraints can be accessed using +the methods mentioned above. ## Transcription Theory A given infinite-dimensional optimization problem is parameterized according to @@ -196,7 +185,7 @@ For example, let's consider a space-time optimization problem of the form: \end{aligned} ``` Thus, we have an optimization problem whose decision space is infinite with -respect to time ``t`` and position ``x``. Now let's transcript it following the +respect to time ``t`` and position ``x``. Now let's transcribe it following the above steps. First, we need to specify the infinite parameter supports and for simplicity let's choose the following sparse sets: - ``t \in \{0, 5, 10\}`` @@ -311,11 +300,11 @@ Subject to Thus, we obtain the infinite problem in `InfiniteOpt`. As previously noted, transcription would be handled automatically behind the scenes when the model is optimized. However, we can directly extract the transcribed version by building a -`TranscriptionModel`: +`TranscriptionBackend`: ```jldoctest trans_example -julia> build_optimizer_model!(inf_model) +julia> build_transformation_backend!(inf_model) -julia> trans_model = optimizer_model(inf_model); +julia> trans_model = transformation_model(inf_model); julia> print(trans_model) Min y(support: 1)² + y(support: 2)² + y(support: 3)² @@ -389,18 +378,20 @@ julia> supports(g, ndarray = true) # format it as an n-dimensional array (t by x ## TranscriptionOpt `InfiniteOpt.TranscriptionOpt` is a sub-module which principally implements -`TranscriptionModel`s and its related access/modification methods. Thus, +[]`TranscriptionBackend`](@ref)s and its related access/modification methods. Thus, this section will detail what these are and how they work. -### TranscriptionModels -A `TranscriptionModel` is simply a `JuMP.Model` whose `ext` field contains -[`TranscriptionData`](@ref) which acts to map the transcribed model back to the -original infinite model (e.g., map the variables and constraints). Such models -are constructed via a default version of -[`build_optimizer_model!`](@ref InfiniteOpt.build_optimizer_model!(::InfiniteOpt.InfiniteModel,::Val{:TransData})) -which wraps [`build_transcription_model!`](@ref InfiniteOpt.TranscriptionOpt.build_transcription_model!): +### TranscriptionBackends +A `TranscriptionBackend` is simply a [`JuMPBackend`](@ref) that uses the +[`Transcription`](@ref InfiniteOpt.TranscriptionOpt.Transcription) `AbstractJuMPTag` +and [`TranscriptionData`](@ref InfiniteOpt.TranscriptionOpt.TranscriptionData) which +acts to map the transcribed model back to the original infinite model (e.g., map the +variables and constraints). Such models are constructed via a default version of +[`build_transformation_backend!`](@ref InfiniteOpt.build_transformation_backend!(::InfiniteOpt.InfiniteModel,::TranscriptionBackend)) +which wraps [`build_transcription_backend!`](@ref InfiniteOpt.TranscriptionOpt.build_transcription_backend!): ```jldoctest transcribe -julia> model1 = TranscriptionModel() # make an empty model +julia> backend1 = TranscriptionBackend() # make an empty backend +A TranscriptionBackend that uses a A JuMP Model Feasibility problem with: Variables: 0 @@ -408,9 +399,10 @@ Model mode: AUTOMATIC CachingOptimizer state: NO_OPTIMIZER Solver name: No optimizer attached. -julia> build_optimizer_model!(inf_model); +julia> build_transformation_backend!(inf_model); -julia> model2 = optimizer_model(inf_model) # generate from an InfiniteModel +julia> backend2 = transformation_backend(inf_model) # generate from an InfiniteModel +A TranscriptionBackend that uses a A JuMP Model Minimization problem with: Variables: 4 @@ -423,20 +415,17 @@ Model mode: AUTOMATIC CachingOptimizer state: NO_OPTIMIZER Solver name: No optimizer attached. ``` -Note that the all the normal `JuMP.Model` arguments can be used with both -constructor when making an empty model, and they are simply inherited from those -specified in the `InfiniteModel`. The call to `build_optimizer_model!` is the backbone -behind infinite model transcription and is what encapsulates all the methods to +The call to `build_transformation_backend!` is the backbone +behind infinite model transformation and is what encapsulates all the methods to transcribe measures, variables, derivatives, and constraints. This is also the method that enables the use of [`optimize!`](@ref JuMP.optimize!(::InfiniteModel)). ### Queries In this section we highlight a number of query methods that pertain to -`TranscriptionModel`s and their mappings. First, if the `optimizer_model` of an -`InfiniteModel` is a `TranscriptionModel` it can be extracted via -[`transcription_model`](@ref): -```jldoctest transcribe; setup = :(clear_optimizer_model_build!(inf_model)) -julia> transcription_model(inf_model) +`TranscriptionBackend`s and their mappings. First, we can retrieve the JuMP `Model` +via [`transformation_model`](@ref): +```jldoctest transcribe; setup = :(empty!(inf_model.backend)) +julia> transformation_model(inf_model) A JuMP Model Feasibility problem with: Variables: 0 @@ -445,42 +434,27 @@ CachingOptimizer state: NO_OPTIMIZER Solver name: No optimizer attached. ``` Here we observe that such a model is currently empty and hasn't been populated -yet. Furthermore, we check that a `Model` is an `TranscriptionModel` via -[`is_transcription_model`](@ref): -```jldoctest transcribe -julia> is_transcription_model(optimizer_model(inf_model)) -true - -julia> is_transcription_model(Model()) -false -``` -We can also extract the raw [`TranscriptionData`](@ref) object from a -`TranscriptionModel` via [`transcription_data`](@ref). -```jldoctest transcribe -julia> transcription_data(trans_model); -``` +yet. Next we can retrieve the `JuMP` variable(s) for a particular `InfiniteOpt` -variable via [`transcription_variable`](@ref). For finite variables, this will +variable via [`transformation_variable`](@ref). For finite variables, this will be a one to one mapping, and for infinite variables a list of supported variables will be returned in the order of the supports. Following the initial example in the basic usage section, this is done: ```jldoctest transcribe -julia> build_optimizer_model!(inf_model); trans_model = optimizer_model(inf_model); +julia> build_transformation_backend!(inf_model); backend = transformation_backend(inf_model); -julia> transcription_variable(trans_model, y) +julia> transformation_variable(y, backend) 3-element Vector{VariableRef}: y(support: 1) y(support: 2) y(support: 3) -julia> transcription_variable(trans_model, z) +julia> transformation_variable(z, backend) z ``` -Note that if the `TranscriptionModel` is stored as the current `optimizer_model` -then the first argument (specifying the `TranscriptionModel` can be omitted). Thus, -in this case the first argument can be omitted as it was above, but is shown for -completeness. +Note that if the `TranscriptionBackend` is the current backend then, +then the 2nd argument can be omitted. Similarly, the parameter supports corresponding to the transcription variables (in the case of transcribed infinite variables) can be queried via @@ -494,7 +468,7 @@ julia> supports(y) ``` !!! note - 1. Note that like `supports` the `transcription_[obj]` methods also employ the + 1. Note that like `supports`, the `transformation_[obj]` methods also employ the `label::Type{AbstractSupportLabel} = PublicLabel` keyword argument that by default will return variables/expressions/constraints associated with public supports. The full set (e.g., ones corresponding to internal collocation nodes) @@ -502,14 +476,14 @@ julia> supports(y) 2. These methods also employ the `ndarray::Bool` keyword argument that will cause the output to be formatted as an n-dimensional array where the dimensions correspond to the infinite parameter dependencies. For example, if we have an - infinite variable `y(t, ξ)` and we invoke a query method with `ndarray = true` + infinite variable `y(t, ξ)`, and we invoke a query method with `ndarray = true` then we'll get a matrix whose dimensions correspond to the supports of `t` and `ξ`, respectively. Also, if `ndarray = true` then `label` correspond to the intersection of supports labels in contrast to its default of invoking the union of the labels. -Likewise, [`transcription_constraint`](@ref) and -`supports`(@ref) can be used with constraints to find their transcribed +Likewise, [`transformation_constraint`](@ref transformation_constraint(::InfOptConstraintRef)) and +[`supports`](@ref supports(::InfOptConstraintRef)) can be used with constraints to find their transcribed equivalents in the `JuMP` model and determine their supports. We can also do this with measures and expressions: @@ -517,15 +491,15 @@ We can also do this with measures and expressions: julia> meas = support_sum(y^2, t) support_sum{t}[y(t)²] -julia> build_optimizer_model!(inf_model) +julia> build_transformation_backend!(inf_model) -julia> transcription_variable(meas) +julia> transformation_variable(meas) y(support: 1)² + y(support: 2)² + y(support: 3)² julia> supports(meas) () -julia> transcription_expression(y^2 + z - 42) +julia> transformation_expression(y^2 + z - 42) 3-element Vector{AbstractJuMPScalar}: y(support: 1)² + z - 42 y(support: 2)² + z - 42 diff --git a/docs/src/guide/variable.md b/docs/src/guide/variable.md index 2406aaa7..6da97864 100644 --- a/docs/src/guide/variable.md +++ b/docs/src/guide/variable.md @@ -89,7 +89,7 @@ julia> @variable(model, w0[i = 1:3], SemiInfinite(w[i], 0, x)) w0[2] w0[3] ``` -Thus we create a Julia array variable `w0` whose elements `w0[i]` point to their +Thus, we create a Julia array variable `w0` whose elements `w0[i]` point to their respective semi-infinite variables `w[i](0, x)` stored in `model`. Alternatively, we can make a semi-infinite variable via our restriction syntax: ```jldoctest var_basic diff --git a/docs/src/index.md b/docs/src/index.md index acce681e..60b68f82 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -7,8 +7,9 @@ A `JuMP` extension for expressing and solving infinite-dimensional optimization problems. !!! note - `InfiniteOpt v0.6` introduces `JuMP`'s new general nonlinear modeling to `InfiniteOpt`! - Please see [Nonlinear Expressions](@ref nlp_guide) for more information. + `InfiniteOpt v0.6` introduces quite a few new features and makes some + breaking changes to the underlying data structures. The core API remains + largely the same. ## What is InfiniteOpt? `InfiniteOpt.jl` provides a general mathematical abstraction to express and solve @@ -79,7 +80,7 @@ capabilities from JuliaCon 2021 (note that nonlinear support has since been adde `InfiniteOpt.jl` is a registered `Julia` package and can be added simply by inputting the following in the package manager: ```julia -(v1.9) pkg> add InfiniteOpt +(v1.10) pkg> add InfiniteOpt ``` Please visit our [Installation Guide](@ref) for more details and information on how to get started. @@ -153,5 +154,5 @@ citing it. A pre-print version is freely available though [arXiv](https://arxiv.org/abs/2106.12689). ## Acknowledgements -We acknowledge our support from the Department of Energy under grant +We acknowledge previous support from the Department of Energy under grant DE-SC0014114. diff --git a/docs/src/install.md b/docs/src/install.md index 16fd98a7..cd5e9f54 100644 --- a/docs/src/install.md +++ b/docs/src/install.md @@ -21,7 +21,7 @@ Julia terminal and enter the package manager: ```julia-repl julia> ] -(v1.9) pkg> +(v1.10) pkg> ``` !!! tip @@ -36,12 +36,12 @@ Use the `add` command in the package to manager to add the following packages: For example, to install `Distributions` we would enter: ```julia-repl -(v1.9) pkg> add Distributions +(v1.10) pkg> add Distributions ``` Now let's install `InfiniteOpt`: ```julia-repl -(v1.9) pkg> add InfiniteOpt +(v1.10) pkg> add InfiniteOpt ``` !!! info @@ -51,26 +51,28 @@ Now let's install `InfiniteOpt`: Alternatively, we can install the current experimental version of `InfiniteOpt` via: ```julia-repl -(v1.9) pkg> add https://github.com/infiniteopt/InfiniteOpt.jl +(v1.10) pkg> add https://github.com/infiniteopt/InfiniteOpt.jl ``` ## Install Optimization Solvers -`InfiniteOpt` relies on solvers to solve optimization problems. Many solvers are +`InfiniteOpt` relies on [transformation backends](@ref opt_transform_backends) to +solve problems. These backends typically depend on optimization solvers which often are not native to Julia and might require commercial licenses. A list of currently -supported solvers and their corresponding Julia packages is provided in +supported solvers for [`JuMPBackend`](@ref)s (i.e., transformation backends +that use JuMP) and their corresponding Julia packages is provided in [Supported Optimizers](@ref). For example, we can install Ipopt which is an open-source nonlinear solver: ```julia-repl -(v1.9) pkg> add Ipopt +(v1.10) pkg> add Ipopt ``` Now Ipopt can be used as the optimizer (solver) for an infinite model by running: ```julia-repl julia> using InfiniteOpt, Ipopt -julia> model = InfiniteModel(Ipopt.Optimizer) +julia> model = InfiniteModel(Ipopt.Optimizer) # uses TranscriptionBackend by default ``` -Most solver packages follow the `ModuleName.Optimizer` naming convention, but +Most JuMP solver packages follow the `ModuleName.Optimizer` naming convention, but this may not always be the case. See [Infinite Models](@ref infinite_model_docs) for more information on defining infinite models and specifying solvers. @@ -86,7 +88,7 @@ form `vX.Y.Z`. You can check which versions you have installed with: ```julia-repl julia> ] -(v1.9) pkg> status +(v1.10) pkg> status ``` This should almost always be the most-recent release. You can check the releases of a package by going to the relevant GitHub page, and navigating to the diff --git a/docs/src/manual/backend.md b/docs/src/manual/backend.md new file mode 100644 index 00000000..d26d6ab6 --- /dev/null +++ b/docs/src/manual/backend.md @@ -0,0 +1,85 @@ +# [Transformation Backends](@id opt_manual) +A technical manual for optimizing (solving) `InfiniteOpt` models via +transformation backends. See the respective [guide](@ref opt_docs) +for more information. + +## Optimize +```@docs +JuMP.optimize!(::InfiniteModel) +``` + +## Backend Settings/Queries +```@docs +transformation_model(::InfiniteModel) +transformation_data(::InfiniteModel) +transformation_backend +set_transformation_backend +build_transformation_backend!(::InfiniteModel) +transformation_variable(::GeneralVariableRef) +transformation_expression(::JuMP.AbstractJuMPScalar) +transformation_constraint(::InfOptConstraintRef) +supports(::Union{DecisionVariableRef, MeasureRef, ParameterFunctionRef}) +supports(::JuMP.AbstractJuMPScalar) +supports(::InfOptConstraintRef) +JuMP.set_optimizer(::InfiniteModel, ::Any) +JuMP.set_silent(::InfiniteModel) +JuMP.unset_silent(::InfiniteModel) +JuMP.set_time_limit_sec(::InfiniteModel, ::Any) +JuMP.unset_time_limit_sec(::InfiniteModel) +JuMP.time_limit_sec(::InfiniteModel) +JuMP.solver_name(model::InfiniteModel) +JuMP.mode(::InfiniteModel) +JuMP.compute_conflict!(::InfiniteModel) +JuMP.copy_conflict(::InfiniteModel) +JuMP.set_string_names_on_creation(::InfiniteModel) +JuMP.set_string_names_on_creation(::InfiniteModel, ::Any) +JuMP.bridge_constraints(::InfiniteModel) +JuMP.add_bridge(::InfiniteModel, ::Any) +JuMP.print_active_bridges(::IO, ::InfiniteModel, ::Vararg{Any}) +JuMP.print_bridge_graph(::IO, ::InfiniteModel) +JuMP.set_attribute(::InfiniteModel, ::Any, ::Any) +JuMP.set_attributes +JuMP.get_attribute(::InfiniteModel, ::Any) +JuMP.backend(::InfiniteModel) +JuMP.unsafe_backend(::InfiniteModel) +``` + +## Transformation Backend API +```@docs +AbstractTransformationBackend +JuMPBackend +AbstractJuMPTag +transformation_model(::AbstractTransformationBackend) +transformation_data(::AbstractTransformationBackend) +JuMP.get_attribute(::AbstractTransformationBackend, ::Any) +JuMP.set_attribute(::AbstractTransformationBackend, ::Any, ::Any) +Base.empty!(::AbstractTransformationBackend) +build_transformation_backend!(::InfiniteModel, ::AbstractTransformationBackend) +JuMP.optimize!(::AbstractTransformationBackend) +JuMP.set_optimizer(::AbstractTransformationBackend, ::Any) +JuMP.set_silent(::AbstractTransformationBackend) +JuMP.unset_silent(::AbstractTransformationBackend) +JuMP.set_time_limit_sec(::AbstractTransformationBackend, ::Any) +JuMP.unset_time_limit_sec(::AbstractTransformationBackend) +JuMP.time_limit_sec(::AbstractTransformationBackend) +JuMP.solver_name(model::AbstractTransformationBackend) +JuMP.mode(::AbstractTransformationBackend) +JuMP.compute_conflict!(::AbstractTransformationBackend) +JuMP.copy_conflict(::AbstractTransformationBackend) +JuMP.set_string_names_on_creation(::AbstractTransformationBackend) +JuMP.set_string_names_on_creation(::AbstractTransformationBackend, ::Any) +JuMP.bridge_constraints(::AbstractTransformationBackend) +JuMP.add_bridge(::AbstractTransformationBackend, ::Any) +JuMP.print_active_bridges(::IO, ::AbstractTransformationBackend, ::Vararg{Any}) +JuMP.print_bridge_graph(::IO, ::AbstractTransformationBackend) +JuMP.backend(::AbstractTransformationBackend) +JuMP.unsafe_backend(::AbstractTransformationBackend) +transformation_variable(::GeneralVariableRef, ::AbstractTransformationBackend) +transformation_expression(::JuMP.AbstractJuMPScalar, ::AbstractTransformationBackend) +transformation_constraint(::InfOptConstraintRef, ::AbstractTransformationBackend) +variable_supports(::Any, ::AbstractTransformationBackend) +expression_supports(::Any, ::AbstractTransformationBackend) +constraint_supports(::InfOptConstraintRef, ::AbstractTransformationBackend) +transformation_backend_ready +set_transformation_backend_ready +``` diff --git a/docs/src/manual/expression.md b/docs/src/manual/expression.md index 0c5f5c86..6323be12 100644 --- a/docs/src/manual/expression.md +++ b/docs/src/manual/expression.md @@ -53,7 +53,7 @@ add_operators_to_jump ## Expression Methods ```@docs -parameter_refs(::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.NonlinearExpr}) +parameter_refs(::Union{JuMP.GenericAffExpr, JuMP.GenericNonlinearExpr, JuMP.GenericQuadExpr}) map_expression map_expression_to_ast ``` @@ -76,6 +76,7 @@ JuMP.is_valid(::InfiniteModel, ::DispatchVariableRef) used_by_infinite_variable(::GeneralVariableRef) used_by_point_variable(::GeneralVariableRef) used_by_semi_infinite_variable(::GeneralVariableRef) +used_by_parameter_function(::GeneralVariableRef) used_by_derivative(::GeneralVariableRef) used_by_measure(::GeneralVariableRef) used_by_objective(::GeneralVariableRef) diff --git a/docs/src/manual/measure.md b/docs/src/manual/measure.md index 4648d064..73107f2a 100644 --- a/docs/src/manual/measure.md +++ b/docs/src/manual/measure.md @@ -99,6 +99,7 @@ measure_data is_analytic parameter_refs(::MeasureRef) is_used(::MeasureRef) +used_by_derivative(::MeasureRef) used_by_constraint(::MeasureRef) used_by_measure(::MeasureRef) used_by_objective(::MeasureRef) @@ -119,7 +120,7 @@ InfiniteOpt.analytic_expansion InfiniteOpt.expand_measures make_point_variable_ref make_semi_infinite_variable_ref -add_point_variable(::JuMP.Model, ::Any, ::Any, ::Any) -add_semi_infinite_variable(::JuMP.Model, ::Any, ::Any) +add_point_variable(::AbstractTransformationBackend, ::Any, ::Any) +add_semi_infinite_variable(::AbstractTransformationBackend, ::Any) internal_semi_infinite_variable ``` diff --git a/docs/src/manual/optimize.md b/docs/src/manual/optimize.md deleted file mode 100644 index 2d35f853..00000000 --- a/docs/src/manual/optimize.md +++ /dev/null @@ -1,59 +0,0 @@ -# [Optimization](@id opt_manual) -A technical manual for optimizing (solving) `InfiniteOpt` models. See the -respective [guide](@ref opt_docs) for more information. - -## Optimize -```@docs -JuMP.optimize!(::InfiniteModel) -``` - -## Optimizer Settings -```@docs -JuMP.set_optimizer(::InfiniteModel, ::Any) -JuMP.set_silent(::InfiniteModel) -JuMP.unset_silent(::InfiniteModel) -JuMP.set_time_limit_sec(::InfiniteModel, ::Any) -JuMP.unset_time_limit_sec(::InfiniteModel) -JuMP.time_limit_sec(::InfiniteModel) -JuMP.set_optimizer_attribute(::InfiniteModel, ::String, ::Any) -JuMP.set_optimizer_attribute(::InfiniteModel,::MOI.AbstractOptimizerAttribute,::Any) -JuMP.set_optimizer_attributes(::InfiniteModel, ::Pair) -JuMP.get_optimizer_attribute(::InfiniteModel, ::String) -JuMP.get_optimizer_attribute(::InfiniteModel,::MOI.AbstractOptimizerAttribute) -JuMP.add_bridge(::InfiniteModel, ::Type{<:MOI.Bridges.AbstractBridge}) -``` - -## Optimizer Queries -```@docs -JuMP.solver_name(model::InfiniteModel) -JuMP.backend(model::InfiniteModel) -JuMP.mode(model::InfiniteModel) -JuMP.bridge_constraints(::InfiniteModel) -``` - -## Optimizer Model API -```@docs -optimizer_model -set_optimizer_model -optimizer_model_key(::InfiniteModel) -optimizer_model_key(::JuMP.Model) -build_optimizer_model!(::InfiniteModel) -build_optimizer_model! -clear_optimizer_model_build!(::InfiniteModel) -clear_optimizer_model_build!(::JuMP.Model) -InfiniteOpt.add_infinite_model_optimizer -optimizer_model_variable(::GeneralVariableRef) -optimizer_model_variable -supports(::Union{DecisionVariableRef, MeasureRef}) -InfiniteOpt.variable_supports -optimizer_model_expression(::JuMP.AbstractJuMPScalar) -optimizer_model_expression -supports(::JuMP.AbstractJuMPScalar) -InfiniteOpt.expression_supports -InfiniteOpt.optimizer_model_constraint(::InfOptConstraintRef) -optimizer_model_constraint -supports(::InfOptConstraintRef) -InfiniteOpt.constraint_supports -transformation_backend_ready -set_transformation_backend_ready -``` diff --git a/docs/src/manual/parameter.md b/docs/src/manual/parameter.md index 98421e35..e0db04bd 100644 --- a/docs/src/manual/parameter.md +++ b/docs/src/manual/parameter.md @@ -55,6 +55,7 @@ derivative_method(::IndependentParameterRef) is_used(::ScalarParameterRef) used_by_infinite_variable(::IndependentParameterRef) used_by_parameter_function(::IndependentParameterRef) +used_by_derivative(::IndependentParameterRef) used_by_measure(::ScalarParameterRef) used_by_constraint(::ScalarParameterRef) ``` @@ -79,6 +80,7 @@ derivative_method(::DependentParameterRef) is_used(::DependentParameterRef) used_by_infinite_variable(::DependentParameterRef) used_by_parameter_function(::DependentParameterRef) +used_by_derivative(::DependentParameterRef) used_by_measure(::DependentParameterRef) used_by_constraint(::DependentParameterRef) ``` diff --git a/docs/src/manual/result.md b/docs/src/manual/result.md index 03bcc9ce..a314a810 100644 --- a/docs/src/manual/result.md +++ b/docs/src/manual/result.md @@ -13,6 +13,7 @@ JuMP.dual_status(::InfiniteModel) ## General ```@docs JuMP.solve_time(::InfiniteModel) +JuMP.relative_gap(::InfiniteModel) JuMP.simplex_iterations(::InfiniteModel) JuMP.barrier_iterations(::InfiniteModel) JuMP.node_count(::InfiniteModel) @@ -32,9 +33,6 @@ JuMP.has_values(::InfiniteModel) JuMP.value(::GeneralVariableRef) JuMP.reduced_cost(::GeneralVariableRef) JuMP.optimizer_index(::GeneralVariableRef) -InfiniteOpt.map_value -InfiniteOpt.map_reduced_cost -InfiniteOpt.map_optimizer_index ``` ## Constraints @@ -44,12 +42,11 @@ JuMP.value(::InfOptConstraintRef) JuMP.optimizer_index(::InfOptConstraintRef) JuMP.dual(::InfOptConstraintRef) JuMP.shadow_price(::InfOptConstraintRef) -InfiniteOpt.map_dual ``` ## Expressions ```@docs -JuMP.value(::Union{JuMP.GenericAffExpr{<:Any, <:GeneralVariableRef}, JuMP.GenericQuadExpr{<:Any, <:GeneralVariableRef}}) +JuMP.value(::Union{JuMP.GenericAffExpr{Float64, GeneralVariableRef}, JuMP.GenericQuadExpr{Float64, GeneralVariableRef}, JuMP.GenericNonlinearExpr{GeneralVariableRef}}) ``` ## LP Sensitivity @@ -57,3 +54,30 @@ JuMP.value(::Union{JuMP.GenericAffExpr{<:Any, <:GeneralVariableRef}, JuMP.Generi JuMP.lp_sensitivity_report(::InfiniteModel) InfOptSensitivityReport ``` + +## Transformation Backend Extension API +```@docs +JuMP.termination_status(::AbstractTransformationBackend) +JuMP.raw_status(::AbstractTransformationBackend) +JuMP.primal_status(::AbstractTransformationBackend) +JuMP.dual_status(::AbstractTransformationBackend) +JuMP.solve_time(::AbstractTransformationBackend) +JuMP.relative_gap(::AbstractTransformationBackend) +JuMP.simplex_iterations(::AbstractTransformationBackend) +JuMP.barrier_iterations(::AbstractTransformationBackend) +JuMP.node_count(::AbstractTransformationBackend) +JuMP.result_count(::AbstractTransformationBackend) +JuMP.objective_bound(::AbstractTransformationBackend) +JuMP.objective_value(::AbstractTransformationBackend) +JuMP.dual_objective_value(::AbstractTransformationBackend) +JuMP.has_values(::AbstractTransformationBackend) +map_value(::Any, ::AbstractTransformationBackend) +map_infinite_parameter_value +map_reduced_cost(::GeneralVariableRef, ::AbstractTransformationBackend) +map_optimizer_index(::GeneralVariableRef, ::AbstractTransformationBackend) +JuMP.has_duals(::AbstractTransformationBackend) +map_dual(::InfOptConstraintRef, ::AbstractTransformationBackend) +map_shadow_price(::InfOptConstraintRef, ::AbstractTransformationBackend) +map_optimizer_index(::InfOptConstraintRef, ::AbstractTransformationBackend) +JuMP.lp_sensitivity_report(::AbstractTransformationBackend) +``` \ No newline at end of file diff --git a/docs/src/manual/transcribe.md b/docs/src/manual/transcribe.md index 1cecdeb7..9b833b52 100644 --- a/docs/src/manual/transcribe.md +++ b/docs/src/manual/transcribe.md @@ -1,11 +1,12 @@ -# [Model Transcription](@id transcription_manual) -A technical manual for `InfiniteOpt.TranscriptionOpt`. See the respective -[guide](@ref transcription_docs) for more information. +# [TranscriptionOpt](@id transcription_manual) +A technical manual for `InfiniteOpt.TranscriptionOpt` (the default transformation +backend). See the respective [guide](@ref transcription_docs) for more information. ## Definition ```@docs -InfiniteOpt.TranscriptionOpt.TranscriptionModel +InfiniteOpt.TranscriptionOpt.TranscriptionBackend InfiniteOpt.TranscriptionOpt.TranscriptionData +InfiniteOpt.TranscriptionOpt.Transcription InfiniteOpt.TranscriptionOpt.set_parameter_supports InfiniteOpt.TranscriptionOpt.transcribe_finite_variables! InfiniteOpt.TranscriptionOpt.transcribe_infinite_variables! @@ -18,30 +19,28 @@ InfiniteOpt.TranscriptionOpt.transcribe_objective! InfiniteOpt.TranscriptionOpt.transcribe_constraints! InfiniteOpt.TranscriptionOpt.transcribe_derivative_evaluations! InfiniteOpt.TranscriptionOpt.transcribe_variable_collocation_restictions! -InfiniteOpt.TranscriptionOpt.build_transcription_model! -InfiniteOpt.add_point_variable(::JuMP.Model,::InfiniteOpt.GeneralVariableRef,::Vector{Float64},::Val{:TransData}) -InfiniteOpt.add_semi_infinite_variable(::JuMP.Model,::InfiniteOpt.SemiInfiniteVariable,::Val{:TransData}) -InfiniteOpt.build_optimizer_model!(::InfiniteOpt.InfiniteModel,::Val{:TransData}) +InfiniteOpt.TranscriptionOpt.build_transcription_backend! +InfiniteOpt.add_point_variable(::InfiniteOpt.TranscriptionOpt.TranscriptionBackend,::InfiniteOpt.GeneralVariableRef,::Vector{Float64}) +InfiniteOpt.add_semi_infinite_variable(::InfiniteOpt.TranscriptionOpt.TranscriptionBackend,::InfiniteOpt.SemiInfiniteVariable) +InfiniteOpt.build_transformation_backend!(::InfiniteOpt.InfiniteModel,::InfiniteOpt.TranscriptionOpt.TranscriptionBackend) ``` ## Queries ```@docs -InfiniteOpt.TranscriptionOpt.is_transcription_model InfiniteOpt.TranscriptionOpt.transcription_data InfiniteOpt.TranscriptionOpt.has_internal_supports -InfiniteOpt.TranscriptionOpt.transcription_model -InfiniteOpt.TranscriptionOpt.transcription_variable(::JuMP.Model,::InfiniteOpt.GeneralVariableRef) -InfiniteOpt.optimizer_model_variable(::InfiniteOpt.GeneralVariableRef,::Val{:TransData}) -InfiniteOpt.variable_supports(::JuMP.Model,::Union{InfiniteOpt.InfiniteVariableRef, InfiniteOpt.SemiInfiniteVariableRef},::Val{:TransData}) -InfiniteOpt.TranscriptionOpt.lookup_by_support(::JuMP.Model,::InfiniteOpt.GeneralVariableRef,::Vector) -InfiniteOpt.internal_semi_infinite_variable(::InfiniteOpt.SemiInfiniteVariableRef,::Val{:TransData}) -InfiniteOpt.TranscriptionOpt.transcription_expression(::JuMP.Model,::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr}) -InfiniteOpt.optimizer_model_expression(::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr},::Val{:TransData}) -InfiniteOpt.expression_supports(::JuMP.Model,::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr}, ::Val{:TransData}) -InfiniteOpt.TranscriptionOpt.transcription_constraint(::JuMP.Model,::InfiniteOpt.InfOptConstraintRef) -InfiniteOpt.optimizer_model_constraint(::InfiniteOpt.InfOptConstraintRef,::Val{:TransData}) -InfiniteOpt.constraint_supports(::JuMP.Model,::InfiniteOpt.InfOptConstraintRef,::Val{:TransData}) -InfiniteOpt.TranscriptionOpt.parameter_supports(::JuMP.Model) +InfiniteOpt.TranscriptionOpt.transcription_variable(::InfiniteOpt.GeneralVariableRef, ::InfiniteOpt.TranscriptionOpt.TranscriptionBackend) +InfiniteOpt.transformation_variable(::InfiniteOpt.GeneralVariableRef,::InfiniteOpt.TranscriptionOpt.TranscriptionBackend) +InfiniteOpt.variable_supports(::Union{InfiniteOpt.InfiniteVariableRef, InfiniteOpt.SemiInfiniteVariableRef},::InfiniteOpt.TranscriptionOpt.TranscriptionBackend) +InfiniteOpt.TranscriptionOpt.lookup_by_support(::InfiniteOpt.GeneralVariableRef,::InfiniteOpt.TranscriptionOpt.TranscriptionBackend,::Vector) +InfiniteOpt.internal_semi_infinite_variable(::InfiniteOpt.SemiInfiniteVariableRef,::InfiniteOpt.TranscriptionOpt.TranscriptionBackend) +InfiniteOpt.TranscriptionOpt.transcription_expression(::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr}, ::InfiniteOpt.TranscriptionOpt.TranscriptionBackend) +InfiniteOpt.transformation_expression(::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.GenericNonlinearExpr},::InfiniteOpt.TranscriptionOpt.TranscriptionBackend) +InfiniteOpt.expression_supports(::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr}, ::InfiniteOpt.TranscriptionOpt.TranscriptionBackend) +InfiniteOpt.TranscriptionOpt.transcription_constraint(::InfiniteOpt.InfOptConstraintRef, ::InfiniteOpt.TranscriptionOpt.TranscriptionBackend) +InfiniteOpt.transformation_constraint(::InfiniteOpt.InfOptConstraintRef,::InfiniteOpt.TranscriptionOpt.TranscriptionBackend) +InfiniteOpt.constraint_supports(::InfiniteOpt.InfOptConstraintRef,::InfiniteOpt.TranscriptionOpt.TranscriptionBackend) +InfiniteOpt.TranscriptionOpt.parameter_supports(::InfiniteOpt.TranscriptionOpt.TranscriptionBackend) ``` ## Utilities diff --git a/docs/src/manual/variable.md b/docs/src/manual/variable.md index f4cf2a71..196c1019 100644 --- a/docs/src/manual/variable.md +++ b/docs/src/manual/variable.md @@ -10,6 +10,7 @@ which originates from `JuMP.jl`. ### Infinite ```@docs +JuMP.@variable InfOptVariableType Infinite JuMP.build_variable(::Function, ::JuMP.VariableInfo, ::Infinite) @@ -88,12 +89,14 @@ parameter_refs(::InfiniteVariableRef) parameter_list(::InfiniteVariableRef) raw_parameter_refs(::InfiniteVariableRef) is_used(::Union{InfiniteVariableRef, DerivativeRef}) +used_by_derivative(::Union{DerivativeRef, InfiniteVariableRef}) used_by_point_variable(::Union{InfiniteVariableRef, DerivativeRef}) used_by_semi_infinite_variable(::Union{InfiniteVariableRef, DerivativeRef}) ``` ### Semi-Infinite ```@docs +JuMP.has_lower_bound(::SemiInfiniteVariableRef) JuMP.lower_bound(::SemiInfiniteVariableRef) JuMP.LowerBoundRef(::SemiInfiniteVariableRef) JuMP.has_upper_bound(::SemiInfiniteVariableRef) @@ -112,6 +115,8 @@ parameter_refs(::SemiInfiniteVariableRef) parameter_list(::SemiInfiniteVariableRef) raw_parameter_refs(::SemiInfiniteVariableRef) eval_supports(::SemiInfiniteVariableRef) +is_used(::SemiInfiniteVariableRef) +used_by_derivative(::SemiInfiniteVariableRef) ``` ### Point diff --git a/docs/src/tutorials/quick_start.md b/docs/src/tutorials/quick_start.md index b94f65c5..1314ab8b 100644 --- a/docs/src/tutorials/quick_start.md +++ b/docs/src/tutorials/quick_start.md @@ -7,7 +7,7 @@ DocTestFilters = [r"≤|<=", r" == | = ", r" ∈ | in ", r" for all | ∀ ", r"d Below we exemplify and briefly explain the very basics behind defining and solving an infinite-dimensional optimization problem in `InfiniteOpt`. Please refer to the Guide on our subsequent pages for more complete information. The Basic Usage sections -on the each guide page are good places to start from. Also, the syntax of `InfiniteOpt` +on each guide page are good places to start from. Also, the syntax of `InfiniteOpt` is inspired by `JuMP` thus we recommend new users that haven't used `JuMP`, first consult their tutorials starting [here](https://jump.dev/JuMP.jl/v1/tutorials/getting_started/getting_started_with_JuMP/). @@ -65,15 +65,15 @@ julia> using InfiniteOpt, Distributions, Ipopt; julia> model = InfiniteModel(Ipopt.Optimizer) An InfiniteOpt Model Feasibility problem with: -Finite Parameters: 0 -Infinite Parameters: 0 -Variables: 0 -Derivatives: 0 -Measures: 0 -Optimizer model backend information: -Model mode: AUTOMATIC -CachingOptimizer state: EMPTY_OPTIMIZER -Solver name: Ipopt + Finite parameters: 0 + Infinite parameters: 0 + Variables: 0 + Derivatives: 0 + Measures: 0 +Transformation backend information: + Backend type: TranscriptionBackend + Solver name: Ipopt + Transformation built and up-to-date: false ``` Learn more about `InfiniteModel`s and optimizers on our [Infinite Models](@ref infinite_model_docs) page. @@ -241,7 +241,7 @@ That's it, now we have our problem defined in `InfiniteOpt`! ## Solution & Queries ### Optimize Now that our model is defined, let's optimize it via [`optimize!`](@ref): -```jldoctest quick; setup = :(set_optimizer_attribute(model, "print_level", 0)) +```jldoctest quick; setup = :(set_attribute(model, "print_level", 0)) julia> optimize!(model) ``` diff --git a/src/array_parameters.jl b/src/array_parameters.jl index 866012ae..627cd70a 100644 --- a/src/array_parameters.jl +++ b/src/array_parameters.jl @@ -1748,7 +1748,7 @@ function JuMP.delete( error("Cannot delete `$prefs` since they are used by an infinite " * "parameter function(s).") end - # update optimizer model status + # update transformation backend status if any(is_used(pref) for pref in prefs) set_transformation_backend_ready(model, false) end diff --git a/src/backends.jl b/src/backends.jl index 131b5413..4a0a671c 100644 --- a/src/backends.jl +++ b/src/backends.jl @@ -226,6 +226,24 @@ function JuMP.set_optimizer_attribute(model::InfiniteModel, attr, value) return JuMP.set_attribute(model, attr, value) end +""" + JuMP.set_attributes(model::InfiniteModel, pairs::Pair...)::Nothing + +Specify multiple optimizer transformation backend attributes as `Pair`s +of the form `attr => value` which are used for `set_attribute(model, attr, value)`. + +**Example** +```julia-repl +julia> set_attributes(model, "tol" => 1e-4, "max_iter" => 100) +``` +""" +function JuMP.set_attributes(model::InfiniteModel, pairs::Pair...) + for (attr, value) in pairs + JuMP.set_attribute(model.backend, attr, value) + end + return +end + """ Base.empty!(backend::AbstractTransformationBackend) @@ -337,7 +355,7 @@ end JuMP.optimize!(model::InfiniteModel; [kwargs...]) Extend `JuMP.optimize!` to optimize infinite models using the internal -optimizer model. Calls [`build_transformation_backend!`](@ref) if the optimizer +transformation backend. Calls [`build_transformation_backend!`](@ref) if the optimizer model isn't up-to-date. The `kwargs` correspond to keyword arguments passed to [`build_transformation_backend!`](@ref) if any are defined. The `kwargs` can also include arguments that are passed to an optimize hook if one was set with @@ -670,7 +688,7 @@ end ) Return the supports associated with the mappings of `vref` in `backend`. -This dispatches off of `backend` which permits optimizer model extensions. This +This dispatches off of `backend` which permits transformation backend extensions. This should throw an error if `vref` is not associated with the variable mappings stored in `backend`. Keyword arguments can be added as needed. Note that no extension is necessary for point or finite variables. diff --git a/src/constraints.jl b/src/constraints.jl index 498bd267..76fd8a23 100644 --- a/src/constraints.jl +++ b/src/constraints.jl @@ -897,7 +897,7 @@ function add_domain_restrictions( else model.constraint_restrictions[JuMP.index(cref)] = new_restrictions end - # update the optimizer model status + # update the backend status set_transformation_backend_ready(JuMP.owner_model(cref), false) return end @@ -965,7 +965,7 @@ function JuMP.delete(model::InfiniteModel, cref::InfOptConstraintRef) delete_domain_restrictions(cref) # delete constraint information _delete_data_object(cref) - # reset optimizer model status + # reset transformation backend status set_transformation_backend_ready(model, false) return end diff --git a/src/derivative_evaluations.jl b/src/derivative_evaluations.jl index bab385c6..42e742e5 100644 --- a/src/derivative_evaluations.jl +++ b/src/derivative_evaluations.jl @@ -320,7 +320,7 @@ function derivative_expr_data( method::AbstractDerivativeMethod, ) error("`derivative_expr_data` not defined for derivative method of type " * - "`$(typeof(method))`. It might be because the optimizer model doesn't " * + "`$(typeof(method))`. It might be because the transformation backend doesn't " * "support this particular derivative method. If you are extending InfiniteOpt" * "to have a new derivative method, please extend `derivative_expr_data` if " * "possible. See the documentation for details.") diff --git a/src/derivatives.jl b/src/derivatives.jl index 7133e14b..27fab67d 100644 --- a/src/derivatives.jl +++ b/src/derivatives.jl @@ -140,7 +140,7 @@ end Return a `Bool` whether `dref` has been evaluated within the `InfiniteModel` and has derivative constraints that have been added to the `InfiniteModel`. Note this -does not indicate if such constraints have been added to the optimizer model. Thus, +does not indicate if such constraints have been added to the transformation backend. Thus, with normal usage (i.e., not using `evaluate`) this should always return `false`. """ function has_derivative_constraints(dref::DerivativeRef) diff --git a/src/expressions.jl b/src/expressions.jl index 2c4b2c85..06fe5347 100644 --- a/src/expressions.jl +++ b/src/expressions.jl @@ -421,7 +421,7 @@ another model. """ function JuMP.delete(model::InfiniteModel, fref::ParameterFunctionRef)::Nothing @assert JuMP.is_valid(model, fref) "Parameter function is invalid." - # update the optimizer model status + # update the transformation backend status if is_used(fref) set_transformation_backend_ready(model, false) end diff --git a/src/measure_expansions.jl b/src/measure_expansions.jl index 874725e0..c8eff25a 100644 --- a/src/measure_expansions.jl +++ b/src/measure_expansions.jl @@ -85,7 +85,7 @@ function add_point_variable(backend::AbstractTransformationBackend, ivref, supp) "`$(typeof(backend))`.") end -# Store/add the variable to the optimizer model via add_measure_variable +# Store/add the variable to the transformation backend via add_measure_variable # This avoids changing the InfiniteModel unexpectedly function make_point_variable_ref( write_model::AbstractTransformationBackend, @@ -157,7 +157,7 @@ and return the correct `InfiniteOpt` variable reference. This is an internal method used by [`make_semi_infinite_variable_ref`](@ref) to make semi-infinite variables when the `write_model` is a transformation backend. This is useful for extensions that wish to expand measures, but without changing the original -`InfiniteModel`. An error is thrown for optimizer model types. Note if this is +`InfiniteModel`. An error is thrown for new transformation backend types. Note if this is extended, than [`internal_semi_infinite_variable`](@ref) should also be extended in order to direct semi-infinite variables references to the underlying [`SemiInfiniteVariable`](@ref). @@ -168,7 +168,7 @@ function add_semi_infinite_variable(backend::AbstractTransformationBackend, var) end -# Add semi-infinite infinite variables in the optimizer model without modifying the InfiniteModel +# Add semi-infinite infinite variables in the transformation backend without modifying the InfiniteModel function make_semi_infinite_variable_ref( write_model::AbstractTransformationBackend, ivref::GeneralVariableRef, diff --git a/src/nlp.jl b/src/nlp.jl index 0055ead8..8f0090f4 100644 --- a/src/nlp.jl +++ b/src/nlp.jl @@ -53,7 +53,7 @@ function JuMP.add_nonlinear_operator( end push!(model.operators, NLPOperator(name, dim, f, funcs...)) model.op_lookup[name] = (f, dim) - # TODO should we set the optimizer model to be out of date? + # TODO should we set the transformation backend to be out of date? return JuMP.NonlinearOperator(f, name) end @@ -120,7 +120,7 @@ end Add the additional nonlinear operators in `inf_model` to a `JuMP` model `opt_model`. This is intended as an internal method, but it is provided for developers that -extend `InfiniteOpt` to use other optimizer models. +extend `InfiniteOpt` to use new [`JuMPBackend`](@ref)s. """ function add_operators_to_jump(opt_model::JuMP.Model, inf_model::InfiniteModel) for data in added_nonlinear_operators(inf_model) diff --git a/src/results.jl b/src/results.jl index 5826dbe9..45d7fcd1 100644 --- a/src/results.jl +++ b/src/results.jl @@ -581,7 +581,7 @@ end )::InfOptSensitivityReport Extends `JuMP.lp_sensitivity_report` to generate and return an LP sensitivity -report in accordance with the optimizer model. See +report in accordance with the transformation backend. See [`InfOptSensitivityReport`](@ref) for syntax details on how to query it. `atol` denotes the optimality tolerance and should match that used by the solver to compute the basis. Please refer to `JuMP`'s documentation for more technical diff --git a/src/scalar_parameters.jl b/src/scalar_parameters.jl index 66a460da..76c5d839 100644 --- a/src/scalar_parameters.jl +++ b/src/scalar_parameters.jl @@ -744,7 +744,7 @@ end Create generative supports for `pref` if needed in accordance with its generative support info using [`make_generative_supports`](@ref) and add them to `pref`. This is intended as an internal function, but can be useful user defined -optimizer model extensions that utlize our support system. +transformation backend extensions that utlize our support system. """ function add_generative_supports(pref::IndependentParameterRef)::Nothing info = generative_support_info(pref) @@ -1651,7 +1651,7 @@ function JuMP.delete( error("Cannot delete `$pref` since it is used by an parameter ", "function(s).") end - # update optimizer model status + # update transformation backend status if is_used(pref) set_transformation_backend_ready(model, false) end @@ -1675,7 +1675,7 @@ end # FiniteParameterRef function JuMP.delete(model::InfiniteModel, pref::FiniteParameterRef)::Nothing @assert JuMP.is_valid(model, pref) "Parameter reference is invalid." - # update optimizer model status + # update transformation backend status if is_used(pref) set_transformation_backend_ready(model, false) end diff --git a/src/variable_basics.jl b/src/variable_basics.jl index aa2a5c19..11c2b3de 100644 --- a/src/variable_basics.jl +++ b/src/variable_basics.jl @@ -1116,7 +1116,7 @@ Subject to """ function JuMP.delete(model::InfiniteModel, vref::DecisionVariableRef)::Nothing @assert JuMP.is_valid(model, vref) "Variable is invalid." - # update the optimizer model status + # update the transformation backend status if is_used(vref) set_transformation_backend_ready(model, false) end diff --git a/test/backend_setup.jl b/test/backend_setup.jl index e822c448..e19c1734 100644 --- a/test/backend_setup.jl +++ b/test/backend_setup.jl @@ -51,6 +51,8 @@ @test_throws ErrorException set_attribute(TestBackend(), MOI.TimeLimitSec(), 10.) @test set_optimizer_attribute(m, MOI.TimeLimitSec(), 12.) isa Nothing @test get_optimizer_attribute(m, MOI.TimeLimitSec()) == 12 + @test set_attributes(m, MOI.TimeLimitSec() => 10.) isa Nothing + @test get_attribute(m, MOI.TimeLimitSec()) == 10 end # Base.empty! @testset "Base.empty!" begin From 319bb878298244d6d14e10433c87c625d45abf16 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Wed, 3 Jul 2024 12:06:13 -0400 Subject: [PATCH 5/8] project updates --- Project.toml | 2 +- docs/Project.toml | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Project.toml b/Project.toml index 53585864..cd5e0d3c 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "InfiniteOpt" uuid = "20393b10-9daf-11e9-18c9-8db751c92c57" authors = ["Joshua Pulsipher and Weiqi Zhang"] -version = "0.6.0" +version = "0.5.9" [deps] DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" diff --git a/docs/Project.toml b/docs/Project.toml index 091907c4..1e7c869d 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -14,12 +14,12 @@ Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" [compat] Distributions = "0.25" -Documenter = "0.27" +Documenter = "1.5" InfiniteOpt = "0.5" -Ipopt = "1.4" +Ipopt = "1.6" HiGHS = "1" julia = "1.6" -JuMP = "1.15" -Literate = "2.14" +JuMP = "1.22" +Literate = "2.18" Plots = "1" SpecialFunctions = "2" From 95f45dbbc4f05d5500803400848fadc40889b71a Mon Sep 17 00:00:00 2001 From: pulsipher Date: Wed, 3 Jul 2024 13:25:26 -0400 Subject: [PATCH 6/8] fix tests --- src/infinite_domains.jl | 12 ++++++------ src/scalar_parameters.jl | 2 +- test/TranscriptionOpt/transcribe.jl | 2 ++ test/backend_mappings.jl | 1 + test/show.jl | 3 +++ 5 files changed, 13 insertions(+), 7 deletions(-) diff --git a/src/infinite_domains.jl b/src/infinite_domains.jl index 68c5ecf5..d8d33319 100644 --- a/src/infinite_domains.jl +++ b/src/infinite_domains.jl @@ -128,7 +128,7 @@ for unrecognized domain types. **Example** ```jldoctest; setup = :(using InfiniteOpt, JuMP) -julia> domain = InfiniteDomain(0, 1); +julia> domain = IntervalDomain(0, 1); julia> has_lower_bound(domain) true @@ -164,7 +164,7 @@ returns `false`. Extensions are enabled by `JuMP.has_lower_bound(domain)` and **Example** ```jldoctest; setup = :(using InfiniteOpt, JuMP) -julia> domain = InfiniteDomain(0, 1); +julia> domain = IntervalDomain(0, 1); julia> lower_bound(domain) 0.0 @@ -198,7 +198,7 @@ User-defined domain types should extend this if appropriate. **Example** ```jldoctest; setup = :(using InfiniteOpt, JuMP) -julia> domain = InfiniteDomain(0, 1); +julia> domain = IntervalDomain(0, 1); julia> set_lower_bound(domain, 0.5) [0.5, 1] @@ -245,7 +245,7 @@ for unrecognized domain types. **Example** ```jldoctest; setup = :(using InfiniteOpt, JuMP) -julia> domain = InfiniteDomain(0, 1); +julia> domain = IntervalDomain(0, 1); julia> has_upper_bound(domain) true @@ -281,7 +281,7 @@ returns `false`. Extensions are enabled by `JuMP.has_upper_bound(domain)` and **Example** ```jldoctest; setup = :(using InfiniteOpt, JuMP) -julia> domain = InfiniteDomain(0, 1); +julia> domain = IntervalDomain(0, 1); julia> upper_bound(domain) 1.0 @@ -316,7 +316,7 @@ User-defined domain types should extend this if appropriate. **Example** ```jldoctest; setup = :(using InfiniteOpt, JuMP) -julia> domain = InfiniteDomain(0, 1); +julia> domain = IntervalDomain(0, 1); julia> set_upper_bound(domain, 0.5) [0, 0.5] diff --git a/src/scalar_parameters.jl b/src/scalar_parameters.jl index 76c5d839..424ad781 100644 --- a/src/scalar_parameters.jl +++ b/src/scalar_parameters.jl @@ -225,7 +225,7 @@ a helper method for [`@finite_parameter`](@ref). **Example** ```jldoctest; setup = :(using InfiniteOpt) -julia> build_finite_parameter(error, 1) +julia> build_parameter(error, 1) FiniteParameter(1.0) ``` """ diff --git a/test/TranscriptionOpt/transcribe.jl b/test/TranscriptionOpt/transcribe.jl index 38007b55..684e1ed8 100644 --- a/test/TranscriptionOpt/transcribe.jl +++ b/test/TranscriptionOpt/transcribe.jl @@ -610,4 +610,6 @@ end @test transformation_backend_ready(m) @test num_variables(m.backend.model) == 44 @test time_limit_sec(m.backend) == 42 + # test bad keyword + @test_throws ErrorException build_transformation_backend!(m, bad = 42) end \ No newline at end of file diff --git a/test/backend_mappings.jl b/test/backend_mappings.jl index 27682b61..46eb6fbb 100644 --- a/test/backend_mappings.jl +++ b/test/backend_mappings.jl @@ -210,6 +210,7 @@ end @test transformation_backend_ready(m) set_transformation_backend(m, TestBackend()) @test_throws ErrorException optimize!(m) + @test_throws ErrorException optimize!(TestBackend()) end # Test that we avoid world age problems with generated parameter functions diff --git a/test/show.jl b/test/show.jl index be274c24..f0854155 100644 --- a/test/show.jl +++ b/test/show.jl @@ -767,6 +767,9 @@ end str = "Transformation backend information: \n Backend type: TranscriptionBackend\n " * "Solver name: Mock\n Transformation built and up-to-date: false\n" io_test(show_backend_summary, str, m) + # test fallback + str = " Backend type: TestBackend\n" + io_test(show_backend_summary, str, InfiniteModel(), TestBackend()) end # test show_objective_function_summary @testset "JuMP.show_objective_function_summary" begin From 9b78285090d94e8d4006443df15994c2c332617b Mon Sep 17 00:00:00 2001 From: pulsipher Date: Wed, 3 Jul 2024 13:41:44 -0400 Subject: [PATCH 7/8] fix/debug docs --- docs/make.jl | 1 + docs/src/manual/variable.md | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/make.jl b/docs/make.jl index 798337c1..4ea1c6c1 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -101,6 +101,7 @@ makedocs(; checkdocs = :exports, linkcheck = true, linkcheck_ignore = [r"https://www.youtube.com/.*", "https://github.com/infiniteopt/InfiniteOpt.jl/blob/master/test/extensions/backend.jl"], # TODO remove before merging + warnonly = true, format = Documenter.HTML( # See https://github.com/JuliaDocs/Documenter.jl/issues/868 prettyurls = get(ENV, "CI", nothing) == "true", diff --git a/docs/src/manual/variable.md b/docs/src/manual/variable.md index 196c1019..04693eaa 100644 --- a/docs/src/manual/variable.md +++ b/docs/src/manual/variable.md @@ -10,7 +10,6 @@ which originates from `JuMP.jl`. ### Infinite ```@docs -JuMP.@variable InfOptVariableType Infinite JuMP.build_variable(::Function, ::JuMP.VariableInfo, ::Infinite) From 85c3ab5c15999de6b301fd2722acf786d4c0e979 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Wed, 3 Jul 2024 14:34:45 -0400 Subject: [PATCH 8/8] remove checkdocs --- docs/make.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/make.jl b/docs/make.jl index 4ea1c6c1..42f0916d 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -98,10 +98,10 @@ makedocs(; sitename = "InfiniteOpt.jl", authors = "Joshua Pulsipher and Weiqi Zhang", modules = [InfiniteOpt], - checkdocs = :exports, + checkdocs = :none, linkcheck = true, linkcheck_ignore = [r"https://www.youtube.com/.*", "https://github.com/infiniteopt/InfiniteOpt.jl/blob/master/test/extensions/backend.jl"], # TODO remove before merging - warnonly = true, + # warnonly = true, format = Documenter.HTML( # See https://github.com/JuliaDocs/Documenter.jl/issues/868 prettyurls = get(ENV, "CI", nothing) == "true",