From bc0d9c83d6a9b0447d631c36ddb9181436741359 Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Fri, 25 Oct 2024 13:58:30 +1300 Subject: [PATCH] Add bound constraints to the Formulation object (#155) --- src/predictors/Affine.jl | 11 +++-- src/predictors/Pipeline.jl | 4 +- src/predictors/ReLU.jl | 84 +++++++++++++++++++++++--------------- src/predictors/Scale.jl | 16 ++++++-- src/predictors/Sigmoid.jl | 17 ++++---- src/predictors/SoftMax.jl | 19 ++++----- src/predictors/SoftPlus.jl | 18 ++++---- src/predictors/Tanh.jl | 17 ++++---- src/utilities.jl | 16 +++++++- test/test_predictors.jl | 56 ++++++++++++++++++++++++- 10 files changed, 175 insertions(+), 83 deletions(-) diff --git a/src/predictors/Affine.jl b/src/predictors/Affine.jl index a4b9f074..31aec105 100644 --- a/src/predictors/Affine.jl +++ b/src/predictors/Affine.jl @@ -22,7 +22,7 @@ julia> using JuMP, MathOptAI julia> model = Model(); -julia> @variable(model, x[1:2]); +julia> @variable(model, 0 <= x[i in 1:2] <= i); julia> f = MathOptAI.Affine([2.0 3.0], [4.0]) Affine(A, b) [input: 2, output: 1] @@ -37,7 +37,9 @@ julia> formulation Affine(A, b) [input: 2, output: 1] ├ variables [1] │ └ moai_Affine[1] -└ constraints [1] +└ constraints [3] + ├ moai_Affine[1] ≥ 4 + ├ moai_Affine[1] ≤ 12 └ 2 x[1] + 3 x[2] - moai_Affine[1] = -4 julia> y, formulation = @@ -75,6 +77,7 @@ function add_predictor(model::JuMP.AbstractModel, predictor::Affine, x::Vector) m = size(predictor.A, 1) y = JuMP.@variable(model, [1:m], base_name = "moai_Affine") bounds = _get_variable_bounds.(x) + cons = Any[] for i in 1:size(predictor.A, 1) y_lb, y_ub = predictor.b[i], predictor.b[i] for j in 1:size(predictor.A, 2) @@ -83,9 +86,9 @@ function add_predictor(model::JuMP.AbstractModel, predictor::Affine, x::Vector) y_ub += a_ij * ifelse(a_ij >= 0, ub, lb) y_lb += a_ij * ifelse(a_ij >= 0, lb, ub) end - _set_bounds_if_finite(y[i], y_lb, y_ub) + _set_bounds_if_finite(cons, y[i], y_lb, y_ub) end - cons = JuMP.@constraint(model, predictor.A * x .+ predictor.b .== y) + append!(cons, JuMP.@constraint(model, predictor.A * x .+ predictor.b .== y)) return y, Formulation(predictor, y, cons) end diff --git a/src/predictors/Pipeline.jl b/src/predictors/Pipeline.jl index 795d2640..e00412c9 100644 --- a/src/predictors/Pipeline.jl +++ b/src/predictors/Pipeline.jl @@ -46,7 +46,9 @@ ReLUQuadratic() ├ variables [2] │ ├ moai_ReLU[1] │ └ moai_z[1] -└ constraints [2] +└ constraints [4] + ├ moai_ReLU[1] ≥ 0 + ├ moai_z[1] ≥ 0 ├ moai_Affine[1] - moai_ReLU[1] + moai_z[1] = 0 └ moai_ReLU[1]*moai_z[1] = 0 ``` diff --git a/src/predictors/ReLU.jl b/src/predictors/ReLU.jl index becabdc2..3bd0a444 100644 --- a/src/predictors/ReLU.jl +++ b/src/predictors/ReLU.jl @@ -17,7 +17,7 @@ julia> using JuMP, MathOptAI julia> model = Model(); -julia> @variable(model, x[1:2]); +julia> @variable(model, -1 <= x[i in 1:2] <= i); julia> f = MathOptAI.ReLU() ReLU() @@ -34,9 +34,11 @@ ReLU() ├ variables [2] │ ├ moai_ReLU[1] │ └ moai_ReLU[2] -└ constraints [4] +└ constraints [6] ├ moai_ReLU[1] ≥ 0 + ├ moai_ReLU[1] ≤ 1 ├ moai_ReLU[2] ≥ 0 + ├ moai_ReLU[2] ≤ 2 ├ moai_ReLU[1] - max(0.0, x[1]) = 0 └ moai_ReLU[2] - max(0.0, x[2]) = 0 @@ -57,12 +59,10 @@ ReducedSpace(ReLU()) struct ReLU <: AbstractPredictor end function add_predictor(model::JuMP.AbstractModel, predictor::ReLU, x::Vector) - ub = last.(_get_variable_bounds.(x)) y = JuMP.@variable(model, [1:length(x)], base_name = "moai_ReLU") - _set_bounds_if_finite.(y, 0, max.(0, ub)) - cons = JuMP.@constraint(model, y .== max.(0, x)) - constraints = Any[JuMP.LowerBoundRef.(y); cons] - return y, Formulation(predictor, y, constraints) + cons = _set_direct_bounds(x -> max(0, x), 0, nothing, x, y) + append!(cons, JuMP.@constraint(model, y .== max.(0, x))) + return y, Formulation(predictor, y, cons) end function add_predictor( @@ -103,17 +103,21 @@ ReLUBigM(100.0) ├ variables [4] │ ├ moai_ReLU[1] │ ├ moai_ReLU[2] -│ ├ _[5] -│ └ _[6] -└ constraints [8] - ├ _[5] binary +│ ├ moai_z[1] +│ └ moai_z[2] +└ constraints [12] + ├ moai_ReLU[1] ≥ 0 + ├ moai_ReLU[1] ≤ 1 + ├ moai_ReLU[2] ≥ 0 + ├ moai_ReLU[2] ≤ 2 + ├ moai_z[1] binary ├ -x[1] + moai_ReLU[1] ≥ 0 - ├ moai_ReLU[1] - _[5] ≤ 0 - ├ -x[1] + moai_ReLU[1] + 3 _[5] ≤ 3 - ├ _[6] binary + ├ moai_ReLU[1] - moai_z[1] ≤ 0 + ├ -x[1] + moai_ReLU[1] + 3 moai_z[1] ≤ 3 + ├ moai_z[2] binary ├ -x[2] + moai_ReLU[2] ≥ 0 - ├ moai_ReLU[2] - 2 _[6] ≤ 0 - └ -x[2] + moai_ReLU[2] + 3 _[6] ≤ 3 + ├ moai_ReLU[2] - 2 moai_z[2] ≤ 0 + └ -x[2] + moai_ReLU[2] + 3 moai_z[2] ≤ 3 ``` """ struct ReLUBigM <: AbstractPredictor @@ -126,14 +130,14 @@ function add_predictor( x::Vector, ) m = length(x) - bounds = _get_variable_bounds.(x) y = JuMP.@variable(model, [1:m], base_name = "moai_ReLU") - _set_bounds_if_finite.(y, 0, max.(0, last.(bounds))) - formulation = Formulation(predictor) + cons = _set_direct_bounds(x -> max(0, x), 0, nothing, x, y) + formulation = Formulation(predictor, Any[], cons) append!(formulation.variables, y) for i in 1:m - lb, ub = bounds[i] + lb, ub = _get_variable_bounds(x[i]) z = JuMP.@variable(model, binary = true) + JuMP.set_name(z, "moai_z[$i]") push!(formulation.variables, z) push!(formulation.constraints, JuMP.BinaryRef(z)) c = JuMP.@constraint(model, y[i] >= x[i]) @@ -167,7 +171,7 @@ julia> using JuMP, MathOptAI julia> model = Model(); -julia> @variable(model, x[1:2] >= -1); +julia> @variable(model, -1 <= x[i in 1:2] <= i); julia> f = MathOptAI.ReLUSOS1() ReLUSOS1() @@ -186,7 +190,13 @@ ReLUSOS1() │ ├ moai_ReLU[2] │ ├ moai_z[1] │ └ moai_z[2] -└ constraints [4] +└ constraints [10] + ├ moai_ReLU[1] ≥ 0 + ├ moai_ReLU[1] ≤ 1 + ├ moai_ReLU[2] ≥ 0 + ├ moai_ReLU[2] ≤ 2 + ├ moai_z[1] ≤ 1 + ├ moai_z[2] ≤ 1 ├ x[1] - moai_ReLU[1] + moai_z[1] = 0 ├ x[2] - moai_ReLU[2] + moai_z[2] = 0 ├ [moai_ReLU[1], moai_z[1]] ∈ MathOptInterface.SOS1{Float64}([1.0, 2.0]) @@ -203,11 +213,11 @@ function add_predictor( m = length(x) bounds = _get_variable_bounds.(x) y = JuMP.@variable(model, [i in 1:m], base_name = "moai_ReLU") - _set_bounds_if_finite.(y, 0, max.(0, last.(bounds))) + cons = _set_direct_bounds(x -> max(0, x), 0, nothing, x, y) z = JuMP.@variable(model, [1:m], lower_bound = 0, base_name = "moai_z") - _set_bounds_if_finite.(z, nothing, -first.(bounds)) - cons = JuMP.@constraint(model, x .== y - z) - formulation = Formulation(predictor, Any[y; z], Any[cons;]) + _set_bounds_if_finite.(Ref(cons), z, nothing, -first.(bounds)) + append!(cons, JuMP.@constraint(model, x .== y - z)) + formulation = Formulation(predictor, Any[y; z], cons) for i in 1:m c = JuMP.@constraint(model, [y[i], z[i]] in MOI.SOS1([1.0, 2.0])) push!(formulation.constraints, c) @@ -235,7 +245,7 @@ julia> using JuMP, MathOptAI julia> model = Model(); -julia> @variable(model, x[1:2] >= -1); +julia> @variable(model, -1 <= x[i in 1:2] <= i); julia> f = MathOptAI.ReLUQuadratic() ReLUQuadratic() @@ -254,7 +264,15 @@ ReLUQuadratic() │ ├ moai_ReLU[2] │ ├ moai_z[1] │ └ moai_z[2] -└ constraints [4] +└ constraints [12] + ├ moai_ReLU[1] ≥ 0 + ├ moai_ReLU[1] ≤ 1 + ├ moai_ReLU[2] ≥ 0 + ├ moai_ReLU[2] ≤ 2 + ├ moai_z[1] ≥ 0 + ├ moai_z[1] ≤ 1 + ├ moai_z[2] ≥ 0 + ├ moai_z[2] ≤ 1 ├ x[1] - moai_ReLU[1] + moai_z[1] = 0 ├ x[2] - moai_ReLU[2] + moai_z[2] = 0 ├ moai_ReLU[1]*moai_z[1] = 0 @@ -271,10 +289,10 @@ function add_predictor( m = length(x) bounds = _get_variable_bounds.(x) y = JuMP.@variable(model, [1:m], base_name = "moai_ReLU") - _set_bounds_if_finite.(y, 0, max.(0, last.(bounds))) + cons = _set_direct_bounds(x -> max(0, x), 0, nothing, x, y) z = JuMP.@variable(model, [1:m], base_name = "moai_z") - _set_bounds_if_finite.(z, 0, max.(0, -first.(bounds))) - c1 = JuMP.@constraint(model, x .== y - z) - c2 = JuMP.@constraint(model, y .* z .== 0) - return y, Formulation(predictor, Any[y; z], Any[c1; c2]) + _set_bounds_if_finite.(Ref(cons), z, 0, max.(0, -first.(bounds))) + append!(cons, JuMP.@constraint(model, x .== y - z)) + append!(cons, JuMP.@constraint(model, y .* z .== 0)) + return y, Formulation(predictor, Any[y; z], cons) end diff --git a/src/predictors/Scale.jl b/src/predictors/Scale.jl index 481db88a..9e1f74f4 100644 --- a/src/predictors/Scale.jl +++ b/src/predictors/Scale.jl @@ -22,7 +22,7 @@ julia> using JuMP, MathOptAI julia> model = Model(); -julia> @variable(model, x[1:2]); +julia> @variable(model, 0 <= x[i in 1:2] <= i); julia> f = MathOptAI.Scale([2.0, 3.0], [4.0, 5.0]) Scale(scale, bias) @@ -39,7 +39,11 @@ Scale(scale, bias) ├ variables [2] │ ├ moai_Scale[1] │ └ moai_Scale[2] -└ constraints [2] +└ constraints [6] + ├ moai_Scale[1] ≥ 4 + ├ moai_Scale[1] ≤ 6 + ├ moai_Scale[2] ≥ 5 + ├ moai_Scale[2] ≤ 11 ├ 2 x[1] - moai_Scale[1] = -4 └ 3 x[2] - moai_Scale[2] = -5 @@ -70,14 +74,18 @@ function add_predictor(model::JuMP.AbstractModel, predictor::Scale, x::Vector) m = length(predictor.scale) y = JuMP.@variable(model, [1:m], base_name = "moai_Scale") bounds = _get_variable_bounds.(x) + cons = Any[] for (i, scale) in enumerate(predictor.scale) y_lb = y_ub = predictor.bias[i] lb, ub = bounds[i] y_ub += scale * ifelse(scale >= 0, ub, lb) y_lb += scale * ifelse(scale >= 0, lb, ub) - _set_bounds_if_finite(y[i], y_lb, y_ub) + _set_bounds_if_finite(cons, y[i], y_lb, y_ub) end - cons = JuMP.@constraint(model, predictor.scale .* x .+ predictor.bias .== y) + append!( + cons, + JuMP.@constraint(model, predictor.scale .* x .+ predictor.bias .== y), + ) return y, Formulation(predictor, y, cons) end diff --git a/src/predictors/Sigmoid.jl b/src/predictors/Sigmoid.jl index e7ada8f9..5ed6cbef 100644 --- a/src/predictors/Sigmoid.jl +++ b/src/predictors/Sigmoid.jl @@ -17,7 +17,7 @@ julia> using JuMP, MathOptAI julia> model = Model(); -julia> @variable(model, x[1:2]); +julia> @variable(model, -1 <= x[i in 1:2] <= i); julia> f = MathOptAI.Sigmoid() Sigmoid() @@ -35,10 +35,10 @@ Sigmoid() │ ├ moai_Sigmoid[1] │ └ moai_Sigmoid[2] └ constraints [6] - ├ moai_Sigmoid[1] ≥ 0 - ├ moai_Sigmoid[2] ≥ 0 - ├ moai_Sigmoid[1] ≤ 1 - ├ moai_Sigmoid[2] ≤ 1 + ├ moai_Sigmoid[1] ≥ 0.2689414213699951 + ├ moai_Sigmoid[1] ≤ 0.7310585786300049 + ├ moai_Sigmoid[2] ≥ 0.2689414213699951 + ├ moai_Sigmoid[2] ≤ 0.8807970779778823 ├ moai_Sigmoid[1] - (1.0 / (1.0 + exp(-x[1]))) = 0 └ moai_Sigmoid[2] - (1.0 / (1.0 + exp(-x[2]))) = 0 @@ -60,10 +60,9 @@ struct Sigmoid <: AbstractPredictor end function add_predictor(model::JuMP.AbstractModel, predictor::Sigmoid, x::Vector) y = JuMP.@variable(model, [1:length(x)], base_name = "moai_Sigmoid") - _set_bounds_if_finite.(y, 0, 1) - cons = JuMP.@constraint(model, y .== 1 ./ (1 .+ exp.(-x))) - constraints = Any[JuMP.LowerBoundRef.(y); JuMP.UpperBoundRef.(y); cons] - return y, Formulation(predictor, y, constraints) + cons = _set_direct_bounds(x -> 1 / (1 + exp(-x)), 0, 1, x, y) + append!(cons, JuMP.@constraint(model, y .== 1 ./ (1 .+ exp.(-x)))) + return y, Formulation(predictor, y, cons) end function add_predictor( diff --git a/src/predictors/SoftMax.jl b/src/predictors/SoftMax.jl index 5fcae811..cafa830b 100644 --- a/src/predictors/SoftMax.jl +++ b/src/predictors/SoftMax.jl @@ -37,8 +37,8 @@ SoftMax() │ └ moai_SoftMax[2] └ constraints [8] ├ moai_SoftMax[1] ≥ 0 - ├ moai_SoftMax[2] ≥ 0 ├ moai_SoftMax[1] ≤ 1 + ├ moai_SoftMax[2] ≥ 0 ├ moai_SoftMax[2] ≤ 1 ├ moai_SoftMax_denom ≥ 0 ├ moai_SoftMax_denom - (0.0 + exp(x[2]) + exp(x[1])) = 0 @@ -66,19 +66,14 @@ struct SoftMax <: AbstractPredictor end function add_predictor(model::JuMP.AbstractModel, predictor::SoftMax, x::Vector) y = JuMP.@variable(model, [1:length(x)], base_name = "moai_SoftMax") - _set_bounds_if_finite.(y, 0, 1) + cons = Any[] + _set_bounds_if_finite.(Ref(cons), y, 0, 1) denom = JuMP.@variable(model, base_name = "moai_SoftMax_denom") JuMP.set_lower_bound(denom, 0) - d_con = JuMP.@constraint(model, denom == sum(exp.(x))) - cons = JuMP.@constraint(model, y .== exp.(x) ./ denom) - constraints = [ - JuMP.LowerBoundRef.(y) - JuMP.UpperBoundRef.(y) - JuMP.LowerBoundRef(denom) - d_con - cons - ] - return y, Formulation(predictor, [denom; y], constraints) + push!(cons, JuMP.LowerBoundRef(denom)) + push!(cons, JuMP.@constraint(model, denom == sum(exp.(x)))) + append!(cons, JuMP.@constraint(model, y .== exp.(x) ./ denom)) + return y, Formulation(predictor, [denom; y], cons) end function add_predictor( diff --git a/src/predictors/SoftPlus.jl b/src/predictors/SoftPlus.jl index 1902d1cd..b48d99db 100644 --- a/src/predictors/SoftPlus.jl +++ b/src/predictors/SoftPlus.jl @@ -17,7 +17,7 @@ julia> using JuMP, MathOptAI julia> model = Model(); -julia> @variable(model, x[1:2]); +julia> @variable(model, -1 <= x[i in 1:2] <= i); julia> f = MathOptAI.SoftPlus(; beta = 2.0) SoftPlus(2.0) @@ -34,9 +34,11 @@ SoftPlus(2.0) ├ variables [2] │ ├ moai_SoftPlus[1] │ └ moai_SoftPlus[2] -└ constraints [4] - ├ moai_SoftPlus[1] ≥ 0 - ├ moai_SoftPlus[2] ≥ 0 +└ constraints [6] + ├ moai_SoftPlus[1] ≥ 0.0634640055214863 + ├ moai_SoftPlus[1] ≤ 1.0634640055214863 + ├ moai_SoftPlus[2] ≥ 0.0634640055214863 + ├ moai_SoftPlus[2] ≤ 2.0090749639589047 ├ moai_SoftPlus[1] - (log(1.0 + exp(2 x[1])) / 2.0) = 0 └ moai_SoftPlus[2] - (log(1.0 + exp(2 x[2])) / 2.0) = 0 @@ -64,11 +66,11 @@ function add_predictor( predictor::SoftPlus, x::Vector, ) + β = predictor.beta y = JuMP.@variable(model, [1:length(x)], base_name = "moai_SoftPlus") - _set_bounds_if_finite.(y, 0, nothing) - beta = predictor.beta - cons = JuMP.@constraint(model, y .== log.(1 .+ exp.(beta .* x)) ./ beta) - return y, Formulation(predictor, y, Any[JuMP.LowerBoundRef.(y); cons]) + cons = _set_direct_bounds(xi -> log(1 + exp(β * xi)) / β, 0, nothing, x, y) + append!(cons, JuMP.@constraint(model, y .== log.(1 .+ exp.(β .* x)) ./ β)) + return y, Formulation(predictor, y, cons) end function add_predictor( diff --git a/src/predictors/Tanh.jl b/src/predictors/Tanh.jl index 58efbe7c..6954f21b 100644 --- a/src/predictors/Tanh.jl +++ b/src/predictors/Tanh.jl @@ -17,7 +17,7 @@ julia> using JuMP, MathOptAI julia> model = Model(); -julia> @variable(model, x[1:2]); +julia> @variable(model, -1 <= x[i in 1:2] <= i); julia> f = MathOptAI.Tanh() Tanh() @@ -35,10 +35,10 @@ Tanh() │ ├ moai_Tanh[1] │ └ moai_Tanh[2] └ constraints [6] - ├ moai_Tanh[1] ≥ -1 - ├ moai_Tanh[2] ≥ -1 - ├ moai_Tanh[1] ≤ 1 - ├ moai_Tanh[2] ≤ 1 + ├ moai_Tanh[1] ≥ -0.7615941559557649 + ├ moai_Tanh[1] ≤ 0.7615941559557649 + ├ moai_Tanh[2] ≥ -0.7615941559557649 + ├ moai_Tanh[2] ≤ 0.9640275800758169 ├ moai_Tanh[1] - tanh(x[1]) = 0 └ moai_Tanh[2] - tanh(x[2]) = 0 @@ -60,10 +60,9 @@ struct Tanh <: AbstractPredictor end function add_predictor(model::JuMP.AbstractModel, predictor::Tanh, x::Vector) y = JuMP.@variable(model, [1:length(x)], base_name = "moai_Tanh") - _set_bounds_if_finite.(y, -1, 1) - cons = JuMP.@constraint(model, y .== tanh.(x)) - constraints = Any[JuMP.LowerBoundRef.(y); JuMP.UpperBoundRef.(y); cons] - return y, Formulation(predictor, y, constraints) + cons = _set_direct_bounds(tanh, -1, 1, x, y) + append!(cons, JuMP.@constraint(model, y .== tanh.(x))) + return y, Formulation(predictor, y, cons) end function add_predictor( diff --git a/src/utilities.jl b/src/utilities.jl index 3c9d5382..6eb9e885 100644 --- a/src/utilities.jl +++ b/src/utilities.jl @@ -22,15 +22,18 @@ function _get_variable_bounds(x::JuMP.GenericVariableRef{T}) where {T} end function _set_bounds_if_finite( + cons::Vector, x::JuMP.GenericVariableRef{T}, l::Union{Nothing,Real}, u::Union{Nothing,Real}, ) where {T} if l !== nothing && l > typemin(T) JuMP.set_lower_bound(x, l) + push!(cons, JuMP.LowerBoundRef(x)) end if u !== nothing && u < typemax(T) JuMP.set_upper_bound(x, u) + push!(cons, JuMP.UpperBoundRef(x)) end return end @@ -39,4 +42,15 @@ end _get_variable_bounds(::Any) = -Inf, Inf # Default fallback: skip setting variable bound -_set_bounds_if_finite(::Any, ::Any, ::Any) = nothing +_set_bounds_if_finite(::Vector, ::Any, ::Any, ::Any) = nothing + +function _set_direct_bounds(f::F, l, u, x::Vector, y::Vector) where {F} + cons = Any[] + for (xi, yi) in zip(x, y) + x_l, x_u = _get_variable_bounds(xi) + y_l = x_l === nothing ? l : f(x_l) + y_u = x_u === nothing ? u : f(x_u) + _set_bounds_if_finite(cons, yi, y_l, y_u) + end + return cons +end diff --git a/test/test_predictors.jl b/test/test_predictors.jl index 7412962d..f94f152a 100644 --- a/test/test_predictors.jl +++ b/test/test_predictors.jl @@ -147,7 +147,7 @@ function test_ReLU_bounds() model = Model() @variable(model, lb <= x <= ub) y, _ = MathOptAI.add_predictor(model, f, [x]) - @test lower_bound.(y) == [0.0] + @test lower_bound.(y) == [max(0.0, lb)] @test upper_bound.(y) == [max(0.0, ub)] end end @@ -254,6 +254,22 @@ function test_Sigmoid() return end +function test_Sigmoid_bounds() + f(x) = 1 / (1 + exp(-x)) + values = [-Inf, -2, 0, 2, Inf] + for lb in values, ub in values + if lb == Inf || ub == -Inf || lb > ub + continue + end + model = Model() + @variable(model, lb <= x <= ub) + y, _ = MathOptAI.add_predictor(model, MathOptAI.Sigmoid(), [x]) + @test lower_bound(y[1]) == f(lb) + @test upper_bound(y[1]) == f(ub) + end + return +end + function test_ReducedSpace_Sigmoid() model = Model(Ipopt.Optimizer) set_silent(model) @@ -324,6 +340,26 @@ function test_SoftPlus() return end +function test_SoftPlus_bounds() + f(x, beta) = log(1 + exp(beta * x)) / beta + values = [-Inf, -2, 0, 2, Inf] + for beta in [1.0, 1.5, 2.0], lb in values, ub in values + if lb == Inf || ub == -Inf || lb > ub + continue + end + model = Model() + @variable(model, lb <= x <= ub) + y, _ = MathOptAI.add_predictor(model, MathOptAI.SoftPlus(; beta), [x]) + @test lower_bound(y[1]) == f(lb, beta) + if isfinite(ub) + @test upper_bound(y[1]) == f(ub, beta) + else + @test !has_upper_bound(y[1]) + end + end + return +end + function test_ReducedSpace_SoftPlus() model = Model(Ipopt.Optimizer) set_silent(model) @@ -359,6 +395,21 @@ function test_Tanh() return end +function test_Tanh_bounds() + values = [-Inf, -2, 0, 2, Inf] + for lb in values, ub in values + if lb == Inf || ub == -Inf || lb > ub + continue + end + model = Model() + @variable(model, lb <= x <= ub) + y, _ = MathOptAI.add_predictor(model, MathOptAI.Tanh(), [x]) + @test lower_bound.(y) == [tanh(lb)] + @test upper_bound.(y) == [tanh(ub)] + end + return +end + function test_ReducedSpace_Tanh() model = Model(Ipopt.Optimizer) set_silent(model) @@ -408,7 +459,8 @@ function test_fallback_bound_methods() fake_variable = "x" l, u = MathOptAI._get_variable_bounds(fake_variable) @test (l, u) == (-Inf, Inf) - @test MathOptAI._set_bounds_if_finite(fake_variable, l, u) === nothing + cons = Any[] + @test MathOptAI._set_bounds_if_finite(cons, fake_variable, l, u) === nothing return end