From 27439cd3e5b702e4547512274e367a7b432934f5 Mon Sep 17 00:00:00 2001 From: Andrew Date: Mon, 24 Mar 2025 14:09:22 -0400 Subject: [PATCH 1/8] fix conic error --- src/ConicProgram/ConicProgram.jl | 2 +- test/conic_program.jl | 135 ++++++++++++++++--------------- test/jump_wrapper.jl | 6 +- 3 files changed, 74 insertions(+), 69 deletions(-) diff --git a/src/ConicProgram/ConicProgram.jl b/src/ConicProgram/ConicProgram.jl index 89a79183..3cd970ac 100644 --- a/src/ConicProgram/ConicProgram.jl +++ b/src/ConicProgram/ConicProgram.jl @@ -302,7 +302,7 @@ function DiffOpt.forward_differentiate!(model::Model) dAj, dAv, ) - dA = SparseArrays.sparse(dAi, dAj, dAv, lines, cols) + dA = -SparseArrays.sparse(dAi, dAj, dAv, lines, cols) m = size(A, 1) n = size(A, 2) diff --git a/test/conic_program.jl b/test/conic_program.jl index a3d4a939..ade3f4df 100644 --- a/test/conic_program.jl +++ b/test/conic_program.jl @@ -11,6 +11,7 @@ import Ipopt import LinearAlgebra import MathOptInterface as MOI import SCS +using JuMP const ATOL = 2e-4 const RTOL = 2e-4 @@ -26,9 +27,9 @@ function runtests() return end -function _test_simple_socp(eq_vec::Bool) +function _test_simple_socp(eq_vec::Bool) # FIXME: Does it make sense to still test vec? # referred from _soc2test, https://github.com/jump-dev/MathOptInterface.jl/blob/master/src/Test/contconic.jl#L1355 - # find equivalent diffcp python program here: https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_socp_1_py.ipynb + # find reference diffcp python program here: https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_socp_1_py.ipynb # Problem SOC2 # min x # s.t. y ≥ 1/√2 @@ -38,86 +39,90 @@ function _test_simple_socp(eq_vec::Bool) # s.t. -1/√2 + y ∈ R₊ # 1 - t ∈ {0} # (t,x,y) ∈ SOC₃ - model = DiffOpt.diff_optimizer(SCS.Optimizer) - MOI.set(model, MOI.Silent(), true) - x, y, t = MOI.add_variables(model, 3) + + model = JuMP.Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer)) + set_silent(model) + + x = @variable(model) + y = @variable(model) + t = @variable(model) + + ceq = @constraint(model, -1.0t == -1.0) + + cnon = @constraint(model, 1.0y >= 1 / √2) + csoc = @constraint(model, [1.0t, 1.0x, 1.0y] in MOI.SecondOrderCone(3)) + + @objective(model, Min, 1.0x) + + optimize!(model) + + # set foward sensitivities + MOI.set(model, DiffOpt.ForwardConstraintFunction(), ceq, 1.0 * x) + + DiffOpt.forward_differentiate!(model) + + dx = MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) + MOI.set( model, - MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}(), - 1.0x, + DiffOpt.ReverseVariablePrimal(), + x, + 1.0 ) - MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) - if eq_vec - ceq = MOI.add_constraint( - model, - MOI.Utilities.vectorize([-1.0t + 1.0]), - MOI.Zeros(1), - ) - else - ceq = MOI.add_constraint(model, -1.0t, MOI.EqualTo(-1.0)) - end - cnon = MOI.add_constraint( - model, - MOI.Utilities.vectorize([1.0y - 1 / √2]), - MOI.Nonnegatives(1), - ) - csoc = MOI.add_constraint( - model, - MOI.Utilities.vectorize([1.0t, 1.0x, 1.0y]), - MOI.SecondOrderCone(3), - ) - MOI.optimize!(model) - if eq_vec - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - ceq, - MOI.Utilities.vectorize([1.0 * x]), - ) - else - MOI.set(model, DiffOpt.ForwardConstraintFunction(), ceq, 1.0 * x) - end + + DiffOpt.reverse_differentiate!(model) + + @test JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), ceq), x) ≈ dx atol=ATOL rtol=RTOL + + DiffOpt.empty_input_sensitivities!(model) + + MOI.set(model, DiffOpt.ForwardConstraintFunction(), cnon, 1.0 * y) + + DiffOpt.forward_differentiate!(model) + + dy = MOI.get(model, DiffOpt.ForwardVariablePrimal(), y) + MOI.set( model, - DiffOpt.ForwardConstraintFunction(), - cnon, - MOI.Utilities.vectorize([1.0 * y]), + DiffOpt.ReverseVariablePrimal(), + y, + 1.0 ) + + DiffOpt.reverse_differentiate!(model) + + @test JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), cnon), y) ≈ dy atol=ATOL rtol=RTOL + + DiffOpt.empty_input_sensitivities!(model) + MOI.set( model, DiffOpt.ForwardConstraintFunction(), csoc, - MOI.Utilities.operate(vcat, Float64, 1.0 * t, 0.0, 0.0), + MOI.Utilities.operate(vcat, Float64, 1.0 * t.index, 0.0, 0.0), ) + DiffOpt.forward_differentiate!(model) - # these matrices are benchmarked with the output generated by diffcp - # refer the python file mentioned above to get equivalent python source code - @test model.diff.model.x ≈ [-1 / √2; 1 / √2; 1.0] atol = ATOL rtol = RTOL - if eq_vec - @test model.diff.model.s ≈ [0.0, 0.0, 1.0, -1 / √2, 1 / √2] atol = ATOL rtol = - RTOL - @test model.diff.model.y ≈ [√2, 1.0, √2, 1.0, -1.0] atol = ATOL rtol = - RTOL - else - @test model.diff.model.s ≈ [0.0, 1.0, -1 / √2, 1 / √2, 0.0] atol = ATOL rtol = - RTOL - @test model.diff.model.y ≈ [1.0, √2, 1.0, -1.0, √2] atol = ATOL rtol = - RTOL - end - dx = [1.12132144; 1 / √2; 1 / √2] - for (i, vi) in enumerate([x, y, t]) - @test dx[i] ≈ MOI.get(model, DiffOpt.ForwardVariablePrimal(), vi) atol = - ATOL rtol = RTOL - end - # @test dx ≈ [1.12132144; 1/√2; 1/√2] atol=ATOL rtol=RTOL - # @test ds ≈ [0.0; 0.0; -2.92893438e-01; 1.12132144e+00; 7.07106999e-01] atol=ATOL rtol=RTOL - # @test dy ≈ [2.4142175; 5.00000557; 3.8284315; √2; -4.00000495] atol=ATOL rtol=RTOL + + ds = MOI.get(model, DiffOpt.ForwardVariablePrimal(), t) + + MOI.set( + model, + DiffOpt.ReverseVariablePrimal(), + t, + 1.0 + ) + + DiffOpt.reverse_differentiate!(model) + + # @test JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), csoc).func.func.func, t.index) ≈ ds atol=ATOL rtol=RTOL + return end test_differentiating_simple_SOCP_vector() = _test_simple_socp(true) -test_differentiating_simple_SOCP_scalar() = _test_simple_socp(false) +# test_differentiating_simple_SOCP_scalar() = _test_simple_socp(false) # FIXME: Does it make sense to still test vec? # refered from _psd0test, https://github.com/jump-dev/MathOptInterface.jl/blob/master/src/Test/contconic.jl#L3919 # find equivalent diffcp program here: https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_sdp_1_py.ipynb diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index dae907c3..fafe637e 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -33,9 +33,9 @@ function test_jump_api() (DiffOpt.quadratic_diff_model, HiGHS.Optimizer), (DiffOpt.quadratic_diff_model, SCS.Optimizer), (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), - # (DiffOpt.conic_diff_model, HiGHS.Optimizer), - # (DiffOpt.conic_diff_model, SCS.Optimizer), # conicmodel has a issue with sign - # (DiffOpt.conic_diff_model, Ipopt.Optimizer), + (DiffOpt.conic_diff_model, HiGHS.Optimizer), + (DiffOpt.conic_diff_model, SCS.Optimizer), # conicmodel has a issue with sign + (DiffOpt.conic_diff_model, Ipopt.Optimizer), # (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), # SQF ctr not supported? # (DiffOpt.nonlinear_diff_model, SCS.Optimizer), # returns zero for sensitivity (DiffOpt.nonlinear_diff_model, Ipopt.Optimizer), From 9a63ce9de0846c5d40f3cbfdeb6b747ae161948e Mon Sep 17 00:00:00 2001 From: Andrew Date: Mon, 24 Mar 2025 16:08:23 -0400 Subject: [PATCH 2/8] use jump psd problem --- test/conic_program.jl | 237 ++++++++++++++++++++++++++---------------- 1 file changed, 150 insertions(+), 87 deletions(-) diff --git a/test/conic_program.jl b/test/conic_program.jl index ade3f4df..1cd57583 100644 --- a/test/conic_program.jl +++ b/test/conic_program.jl @@ -27,7 +27,7 @@ function runtests() return end -function _test_simple_socp(eq_vec::Bool) # FIXME: Does it make sense to still test vec? +function _test_simple_socp(eq_vec::Bool) # referred from _soc2test, https://github.com/jump-dev/MathOptInterface.jl/blob/master/src/Test/contconic.jl#L1355 # find reference diffcp python program here: https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_socp_1_py.ipynb # Problem SOC2 @@ -47,8 +47,11 @@ function _test_simple_socp(eq_vec::Bool) # FIXME: Does it make sense to still te y = @variable(model) t = @variable(model) - ceq = @constraint(model, -1.0t == -1.0) - + ceq = if eq_vec + @constraint(model, [t] .== [1.0]) + else + @constraint(model, t == 1.0) + end cnon = @constraint(model, 1.0y >= 1 / √2) csoc = @constraint(model, [1.0t, 1.0x, 1.0y] in MOI.SecondOrderCone(3)) @@ -57,11 +60,16 @@ function _test_simple_socp(eq_vec::Bool) # FIXME: Does it make sense to still te optimize!(model) # set foward sensitivities - MOI.set(model, DiffOpt.ForwardConstraintFunction(), ceq, 1.0 * x) + if eq_vec + MOI.set.(model, DiffOpt.ForwardConstraintFunction(), ceq, [1.0 * x]) + else + MOI.set(model, DiffOpt.ForwardConstraintFunction(), ceq, 1.0 * x) + end DiffOpt.forward_differentiate!(model) - dx = MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) + dx = -0.9999908 + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ dx atol=ATOL rtol=RTOL MOI.set( model, @@ -72,7 +80,11 @@ function _test_simple_socp(eq_vec::Bool) # FIXME: Does it make sense to still te DiffOpt.reverse_differentiate!(model) - @test JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), ceq), x) ≈ dx atol=ATOL rtol=RTOL + if eq_vec + @test all(isapprox.(JuMP.coefficient.(MOI.get.(model, DiffOpt.ReverseConstraintFunction(), ceq), x), dx, atol=ATOL, rtol=RTOL)) + else + @test JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), ceq), x) ≈ dx atol=ATOL rtol=RTOL + end DiffOpt.empty_input_sensitivities!(model) @@ -80,7 +92,8 @@ function _test_simple_socp(eq_vec::Bool) # FIXME: Does it make sense to still te DiffOpt.forward_differentiate!(model) - dy = MOI.get(model, DiffOpt.ForwardVariablePrimal(), y) + dy = -0.707083 + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), y) ≈ dy atol=ATOL rtol=RTOL MOI.set( model, @@ -104,7 +117,8 @@ function _test_simple_socp(eq_vec::Bool) # FIXME: Does it make sense to still te DiffOpt.forward_differentiate!(model) - ds = MOI.get(model, DiffOpt.ForwardVariablePrimal(), t) + ds = 0.0 + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), t) ≈ ds atol=ATOL rtol=RTOL MOI.set( model, @@ -122,7 +136,7 @@ end test_differentiating_simple_SOCP_vector() = _test_simple_socp(true) -# test_differentiating_simple_SOCP_scalar() = _test_simple_socp(false) # FIXME: Does it make sense to still test vec? +test_differentiating_simple_SOCP_scalar() = _test_simple_socp(false) # refered from _psd0test, https://github.com/jump-dev/MathOptInterface.jl/blob/master/src/Test/contconic.jl#L3919 # find equivalent diffcp program here: https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_sdp_1_py.ipynb @@ -383,85 +397,134 @@ end function test_differentiating_conic_with_PSD_and_POS_constraints() # refer psdt2test, https://github.com/jump-dev/MathOptInterface.jl/blob/master/src/Test/contconic.jl#L4306 # find equivalent diffcp program here - https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_sdp_3_py.ipynb - model = DiffOpt.diff_optimizer(SCS.Optimizer) - MOI.set(model, MOI.Silent(), true) - x = MOI.add_variables(model, 7) - @test MOI.get(model, MOI.NumberOfVariables()) == 7 + # model = DiffOpt.diff_optimizer(SCS.Optimizer) + # MOI.set(model, MOI.Silent(), true) + # x = MOI.add_variables(model, 7) + # @test MOI.get(model, MOI.NumberOfVariables()) == 7 + # η = 10.0 + # c1 = MOI.add_constraint( + # model, + # MOI.VectorAffineFunction( + # MOI.VectorAffineTerm.(1, MOI.ScalarAffineTerm.(-1.0, x[1:6])), + # [η], + # ), + # MOI.Nonnegatives(1), + # ) + # c2 = MOI.add_constraint( + # model, + # MOI.VectorAffineFunction( + # MOI.VectorAffineTerm.(1:6, MOI.ScalarAffineTerm.(1.0, x[1:6])), + # zeros(6), + # ), + # MOI.Nonnegatives(6), + # ) + # α = 0.8 + # δ = 0.9 + # c3 = MOI.add_constraint( + # model, + # MOI.VectorAffineFunction( + # MOI.VectorAffineTerm.( + # [fill(1, 7); fill(2, 5); fill(3, 6)], + # MOI.ScalarAffineTerm.( + # [ + # δ / 2, + # α, + # δ, + # δ / 4, + # δ / 8, + # 0.0, + # -1.0, + # -δ / (2 * √2), + # -δ / 4, + # 0, + # -δ / (8 * √2), + # 0.0, + # δ / 2, + # δ - α, + # 0, + # δ / 8, + # δ / 4, + # -1.0, + # ], + # [x[1:7]; x[1:3]; x[5:6]; x[1:3]; x[5:7]], + # ), + # ), + # zeros(3), + # ), + # MOI.PositiveSemidefiniteConeTriangle(2), + # ) + # c4 = MOI.add_constraint( + # model, + # MOI.VectorAffineFunction( + # MOI.VectorAffineTerm.( + # 1, + # MOI.ScalarAffineTerm.(0.0, [x[1:3]; x[5:6]]), + # ), + # [0.0], + # ), + # MOI.Zeros(1), + # ) + # MOI.set( + # model, + # MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}(), + # MOI.ScalarAffineFunction([MOI.ScalarAffineTerm(1.0, x[7])], 0.0), + # ) + # MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE) + # MOI.optimize!(model) + # MOI.get(model, MOI.ObjectiveValue()) + # # dc = ones(7) + # MOI.get.(model, MOI.VariablePrimal(), x) + + # Make a JuMP model backed by DiffOpt.diff_optimizer(SCS.Optimizer) + model = Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer)) + set_silent(model) # just to suppress solver output + + @variable(model, x[1:7]) + @test num_variables(model) == 7 + η = 10.0 - c1 = MOI.add_constraint( - model, - MOI.VectorAffineFunction( - MOI.VectorAffineTerm.(1, MOI.ScalarAffineTerm.(-1.0, x[1:6])), - [η], - ), - MOI.Nonnegatives(1), - ) - c2 = MOI.add_constraint( - model, - MOI.VectorAffineFunction( - MOI.VectorAffineTerm.(1:6, MOI.ScalarAffineTerm.(1.0, x[1:6])), - zeros(6), - ), - MOI.Nonnegatives(6), - ) + # c1: sum(-x[1:6]) + η >= 0 + c1 = @constraint(model, -sum(x[i] for i in 1:6) + η ≥ 0) + + # c2: x[i] >= 0 for each i in 1:6 + # (this replicates MOI.Nonnegatives(6) on x[1:6]) + c2 = [ + @constraint(model, x[i] ≥ 0) + for i in 1:6 + ] + + # c3: A 2×2 PSD constraint. In raw MOI form you had + # MOI.PositiveSemidefiniteConeTriangle(2), + # which means a vector dimension=3 => [a11, a12, a22]. + # We'll build that in JuMP with `[ [a11 a12]; [a12 a22] ] in PSDCone()`. α = 0.8 δ = 0.9 - c3 = MOI.add_constraint( + c3 = @constraint( model, - MOI.VectorAffineFunction( - MOI.VectorAffineTerm.( - [fill(1, 7); fill(2, 5); fill(3, 6)], - MOI.ScalarAffineTerm.( - [ - δ / 2, - α, - δ, - δ / 4, - δ / 8, - 0.0, - -1.0, - -δ / (2 * √2), - -δ / 4, - 0, - -δ / (8 * √2), - 0.0, - δ / 2, - δ - α, - 0, - δ / 8, - δ / 4, - -1.0, - ], - [x[1:7]; x[1:3]; x[5:6]; x[1:3]; x[5:7]], - ), - ), - zeros(3), - ), - MOI.PositiveSemidefiniteConeTriangle(2), - ) - c4 = MOI.add_constraint( - model, - MOI.VectorAffineFunction( - MOI.VectorAffineTerm.( - 1, - MOI.ScalarAffineTerm.(0.0, [x[1:3]; x[5:6]]), - ), - [0.0], - ), - MOI.Zeros(1), + LinearAlgebra.Symmetric(hcat( + [((δ/2)*x[1] + α*x[2] + δ*x[3] + (δ/4)*x[4] + (δ/8)*x[5] - 1.0*x[7]) # a11 + ((-(δ/(2*√2)))*x[1] - (δ/4)*x[2] - (δ/(8*√2))*x[5])], + [((-(δ/(2*√2)))*x[1] - (δ/4)*x[2] - (δ/(8*√2))*x[5]) + ((δ/2)*x[1] + (δ - α)*x[2] + 0.0*x[3] + (δ/8)*x[5] + (δ/4)*x[6] - x[7])]) + ) in PSDCone() ) - MOI.set( - model, - MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}(), - MOI.ScalarAffineFunction([MOI.ScalarAffineTerm(1.0, x[7])], 0.0), - ) - MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE) - MOI.optimize!(model) - # dc = ones(7) + + # c4: In the original MOI example it was some "useless" constraint on x[1:3] + x[5:6]. + # For demonstration, we replicate a dimension=1 equality to zero: + c4 = @constraint(model, 0.0 * x[1] + 0.0 * x[2] + 0.0 * x[3] + 0.0 * x[5] + 0.0 * x[6] == 0) + + # Make the objective: "maximize x[7]" exactly as in the original + @objective(model, Max, x[7]) + + # Solve + optimize!(model) + + @test objective_value(model) ≈ 1.90192374 atol=ATOL rtol=RTOL + MOI.set( model, DiffOpt.ForwardObjectiveFunction(), - MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.(ones(7), x), 0.0), + sum(x) ) # db = ones(11) # dA = ones(11, 7) @@ -469,27 +532,27 @@ function test_differentiating_conic_with_PSD_and_POS_constraints() model, DiffOpt.ForwardConstraintFunction(), c1, - MOI.Utilities.vectorize(ones(1, 7) * x + ones(1)), + sum(x) + 1, ) - MOI.set( + MOI.set.( model, DiffOpt.ForwardConstraintFunction(), c2, - MOI.Utilities.vectorize(ones(6, 7) * x + ones(6)), + sum(x) + 1, ) - MOI.set( + MOI.set.( model, DiffOpt.ForwardConstraintFunction(), c3, - MOI.Utilities.vectorize(ones(3, 7) * x + ones(3)), + sum(x) + 1, ) MOI.set( model, DiffOpt.ForwardConstraintFunction(), c4, - MOI.Utilities.vectorize(ones(1, 7) * x + ones(1)), + sum(x) + 1, ) - DiffOpt.forward_differentiate!(model) + DiffOpt.forward_differentiate!(model) # ERROR HERE @test model.diff.model.x ≈ [20 / 3.0, 0.0, 10 / 3.0, 0.0, 0.0, 0.0, 1.90192379] atol = ATOL rtol = RTOL From 43a82263118a69cc817042eabfe1d44c6e5ebedd Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Fri, 11 Apr 2025 19:17:59 +1200 Subject: [PATCH 3/8] [docs] update to Documenter@1 (#286) * [docs] update to Documenter@1 * Update --- .github/workflows/ci.yml | 48 +++++++++++++++++----------------------- docs/Project.toml | 2 +- docs/make.jl | 8 +++---- 3 files changed, 24 insertions(+), 34 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b66918fe..37487402 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,7 +1,14 @@ name: CI on: - - push - - pull_request + push: + branches: + - master + pull_request: + types: [opened, synchronize, reopened] +# needed to allow julia-actions/cache to delete old caches that it has created +permissions: + actions: write + contents: read jobs: test: name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ github.event_name }} @@ -17,21 +24,12 @@ jobs: os: windows-latest arch: x64 steps: - - uses: actions/checkout@v3 - - uses: julia-actions/setup-julia@v1 + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@v2 with: version: ${{ matrix.version }} arch: ${{ matrix.arch }} - - uses: actions/cache@v3 - env: - cache-name: cache-artifacts - with: - path: ~/.julia/artifacts - key: ${{ runner.os }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} - restore-keys: | - ${{ runner.os }}-test-${{ env.cache-name }}- - ${{ runner.os }}-test- - ${{ runner.os }}- + - uses: julia-actions/cache@v2 - uses: julia-actions/julia-buildpkg@v1 - uses: julia-actions/julia-runtest@v1 env: @@ -48,23 +46,17 @@ jobs: # Fix for Plots with GR backend, see https://github.com/jheinen/GR.jl/issues/422 GKSwstype: nul steps: - - uses: actions/checkout@v3 - - uses: julia-actions/setup-julia@v1 + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@v2 with: version: '1' - - shell: bash - run: julia --project=docs -e "using Pkg; Pkg.develop(PackageSpec(path=pwd()))" - - shell: bash - run: julia --project=docs -e "using Pkg; Pkg.instantiate()" - - shell: bash - env: - DATADEPS_ALWAYS_ACCEPT: true # For MLDatasets.MNIST + - name: Install dependencies + shell: julia --color=yes --project=docs/ {0} run: | - julia --project=docs -e ' - using Documenter: doctest - using DiffOpt - doctest(DiffOpt)' - - run: julia --project=docs docs/make.jl + using Pkg + Pkg.develop(PackageSpec(path=pwd())) + Pkg.instantiate() + - run: julia --project=docs --color=yes docs/make.jl env: DATADEPS_ALWAYS_ACCEPT: true # For MLDatasets.MNIST GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/docs/Project.toml b/docs/Project.toml index f953f3b3..c4112c75 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -15,4 +15,4 @@ Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [compat] -Documenter = "0.27" +Documenter = "1" diff --git a/docs/make.jl b/docs/make.jl index 9d825aaf..a31f7e28 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -43,8 +43,10 @@ end literate_directory(_EXAMPLE_DIR) makedocs(; + authors = "JuMP Community", + sitename = "DiffOpt.jl", + repo = "https://github.com/jump-dev/DiffOpt.jl", modules = [DiffOpt], - doctest = false, clean = true, format = Documenter.HTML(; prettyurls = get(ENV, "CI", nothing) == "true", @@ -61,10 +63,6 @@ makedocs(; f in readdir(_EXAMPLE_DIR) if endswith(f, ".md") ], ], - strict = true, # See https://github.com/JuliaOpt/JuMP.jl/issues/1576 - repo = "https://github.com/jump-dev/DiffOpt.jl", - sitename = "DiffOpt.jl", - authors = "JuMP Community", ) deploydocs(; repo = "github.com/jump-dev/DiffOpt.jl.git", push_preview = true) From d49b6441202177436b930ff4dec39d63b7c9cb6c Mon Sep 17 00:00:00 2001 From: Andrew Date: Sat, 12 Apr 2025 16:15:01 -0400 Subject: [PATCH 4/8] update --- test/conic_program.jl | 354 ++++++++++++------------------------------ test/jump_wrapper.jl | 2 +- 2 files changed, 104 insertions(+), 252 deletions(-) diff --git a/test/conic_program.jl b/test/conic_program.jl index 1cd57583..1c974cb1 100644 --- a/test/conic_program.jl +++ b/test/conic_program.jl @@ -129,6 +129,7 @@ function _test_simple_socp(eq_vec::Bool) DiffOpt.reverse_differentiate!(model) + # FIXME: this is not working - https://github.com/jump-dev/DiffOpt.jl/issues/283 # @test JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), csoc).func.func.func, t.index) ≈ ds atol=ATOL rtol=RTOL return @@ -394,257 +395,108 @@ function test_differentiating_conic_with_PSD_and_SOC_constraints() return end -function test_differentiating_conic_with_PSD_and_POS_constraints() - # refer psdt2test, https://github.com/jump-dev/MathOptInterface.jl/blob/master/src/Test/contconic.jl#L4306 - # find equivalent diffcp program here - https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_sdp_3_py.ipynb - # model = DiffOpt.diff_optimizer(SCS.Optimizer) - # MOI.set(model, MOI.Silent(), true) - # x = MOI.add_variables(model, 7) - # @test MOI.get(model, MOI.NumberOfVariables()) == 7 - # η = 10.0 - # c1 = MOI.add_constraint( - # model, - # MOI.VectorAffineFunction( - # MOI.VectorAffineTerm.(1, MOI.ScalarAffineTerm.(-1.0, x[1:6])), - # [η], - # ), - # MOI.Nonnegatives(1), - # ) - # c2 = MOI.add_constraint( - # model, - # MOI.VectorAffineFunction( - # MOI.VectorAffineTerm.(1:6, MOI.ScalarAffineTerm.(1.0, x[1:6])), - # zeros(6), - # ), - # MOI.Nonnegatives(6), - # ) - # α = 0.8 - # δ = 0.9 - # c3 = MOI.add_constraint( - # model, - # MOI.VectorAffineFunction( - # MOI.VectorAffineTerm.( - # [fill(1, 7); fill(2, 5); fill(3, 6)], - # MOI.ScalarAffineTerm.( - # [ - # δ / 2, - # α, - # δ, - # δ / 4, - # δ / 8, - # 0.0, - # -1.0, - # -δ / (2 * √2), - # -δ / 4, - # 0, - # -δ / (8 * √2), - # 0.0, - # δ / 2, - # δ - α, - # 0, - # δ / 8, - # δ / 4, - # -1.0, - # ], - # [x[1:7]; x[1:3]; x[5:6]; x[1:3]; x[5:7]], - # ), - # ), - # zeros(3), - # ), - # MOI.PositiveSemidefiniteConeTriangle(2), - # ) - # c4 = MOI.add_constraint( - # model, - # MOI.VectorAffineFunction( - # MOI.VectorAffineTerm.( - # 1, - # MOI.ScalarAffineTerm.(0.0, [x[1:3]; x[5:6]]), - # ), - # [0.0], - # ), - # MOI.Zeros(1), - # ) - # MOI.set( - # model, - # MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}(), - # MOI.ScalarAffineFunction([MOI.ScalarAffineTerm(1.0, x[7])], 0.0), - # ) - # MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE) - # MOI.optimize!(model) - # MOI.get(model, MOI.ObjectiveValue()) - # # dc = ones(7) - # MOI.get.(model, MOI.VariablePrimal(), x) - - # Make a JuMP model backed by DiffOpt.diff_optimizer(SCS.Optimizer) - model = Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer)) - set_silent(model) # just to suppress solver output - - @variable(model, x[1:7]) - @test num_variables(model) == 7 - - η = 10.0 - # c1: sum(-x[1:6]) + η >= 0 - c1 = @constraint(model, -sum(x[i] for i in 1:6) + η ≥ 0) - - # c2: x[i] >= 0 for each i in 1:6 - # (this replicates MOI.Nonnegatives(6) on x[1:6]) - c2 = [ - @constraint(model, x[i] ≥ 0) - for i in 1:6 - ] - - # c3: A 2×2 PSD constraint. In raw MOI form you had - # MOI.PositiveSemidefiniteConeTriangle(2), - # which means a vector dimension=3 => [a11, a12, a22]. - # We'll build that in JuMP with `[ [a11 a12]; [a12 a22] ] in PSDCone()`. - α = 0.8 - δ = 0.9 - c3 = @constraint( - model, - LinearAlgebra.Symmetric(hcat( - [((δ/2)*x[1] + α*x[2] + δ*x[3] + (δ/4)*x[4] + (δ/8)*x[5] - 1.0*x[7]) # a11 - ((-(δ/(2*√2)))*x[1] - (δ/4)*x[2] - (δ/(8*√2))*x[5])], - [((-(δ/(2*√2)))*x[1] - (δ/4)*x[2] - (δ/(8*√2))*x[5]) - ((δ/2)*x[1] + (δ - α)*x[2] + 0.0*x[3] + (δ/8)*x[5] + (δ/4)*x[6] - x[7])]) - ) in PSDCone() - ) - - # c4: In the original MOI example it was some "useless" constraint on x[1:3] + x[5:6]. - # For demonstration, we replicate a dimension=1 equality to zero: - c4 = @constraint(model, 0.0 * x[1] + 0.0 * x[2] + 0.0 * x[3] + 0.0 * x[5] + 0.0 * x[6] == 0) - - # Make the objective: "maximize x[7]" exactly as in the original - @objective(model, Max, x[7]) - - # Solve - optimize!(model) - - @test objective_value(model) ≈ 1.90192374 atol=ATOL rtol=RTOL - - MOI.set( - model, - DiffOpt.ForwardObjectiveFunction(), - sum(x) - ) - # db = ones(11) - # dA = ones(11, 7) - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - c1, - sum(x) + 1, - ) - MOI.set.( - model, - DiffOpt.ForwardConstraintFunction(), - c2, - sum(x) + 1, - ) - MOI.set.( - model, - DiffOpt.ForwardConstraintFunction(), - c3, - sum(x) + 1, - ) - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - c4, - sum(x) + 1, - ) - DiffOpt.forward_differentiate!(model) # ERROR HERE - @test model.diff.model.x ≈ - [20 / 3.0, 0.0, 10 / 3.0, 0.0, 0.0, 0.0, 1.90192379] atol = ATOL rtol = - RTOL - @test model.diff.model.s ≈ [ - 0.0, - 0.0, - 20 / 3.0, - 0.0, - 10 / 3.0, - 0.0, - 0.0, - 0.0, - 4.09807621, - -2.12132, - 1.09807621, - ] atol = ATOL rtol = RTOL - @test model.diff.model.y ≈ [ - 0.0, - 0.19019238, - 0.0, - 0.12597667, - 0.0, - 0.14264428, - 0.14264428, - 0.01274047, - 0.21132487, - 0.408248, - 0.78867513, - ] atol = ATOL rtol = RTOL - atol = 0.3 - rtol = 0.01 - # compare these with https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_sdp_3_py.ipynb - # results are not exactly as: 1. there is some residual error 2. diffcp results are SCS specific, hence scaled - dx = [-39.6066, 10.8953, -14.9189, 10.9054, 10.883, 10.9118, -21.7508] - for (i, vi) in enumerate(x) - @test dx[i] ≈ MOI.get(model, DiffOpt.ForwardVariablePrimal(), vi) atol = - atol rtol = rtol - end - # @test dy ≈ [0.0, -3.56905, 0.0, -0.380035, 0.0, -0.41398, -0.385321, -0.00743119, -0.644986, -0.550542, -2.36765] atol=atol rtol=rtol - # @test ds ≈ [0.0, 0.0, -50.4973, 0.0, -25.8066, 0.0, 0.0, 0.0, -7.96528, -1.62968, -2.18925] atol=atol rtol=rtol - # TODO: future example, how to differentiate wrt a specific constraint/variable, refer QPLib article for more - dA = zeros(11, 7) - dA[3:8, 1:6] = Matrix{Float64}(LinearAlgebra.I, 6, 6) # differentiating only wrt POS constraint c2 - db = zeros(11) - dc = zeros(7) - # db = zeros(11) - # dA = zeros(11, 7) - # dA[3:8, 1:6] = Matrix{Float64}(LinearAlgebra.I, 6, 6) # differentiating only wrt POS constraint c2 - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - c1, - MOI.Utilities.zero_with_output_dimension( - MOI.VectorAffineFunction{Float64}, - 1, - ), - ) - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - c2, - MOI.Utilities.vectorize(ones(6) .* x[1:6]), - ) - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - c3, - MOI.Utilities.zero_with_output_dimension( - MOI.VectorAffineFunction{Float64}, - 3, - ), - ) - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - c4, - MOI.Utilities.zero_with_output_dimension( - MOI.VectorAffineFunction{Float64}, - 1, - ), - ) - DiffOpt.forward_differentiate!(model) - # for (i, vi) in enumerate(X) - # @test 0.0 ≈ MOI.get(model, - # DiffOpt.ForwardVariablePrimal(), vi) atol=1e-2 rtol=RTOL - # end - # TODO add a test here, probably on duals - # # note that there's no change in the PSD slack values or dual optimas - # @test dy ≈ [0.0, 0.0, 0.0, 0.125978, 0.0, 0.142644, 0.142641, 0.0127401, 0.0, 0.0, 0.0] atol=atol rtol=RTOL - # @test ds ≈ [0.0, 0.0, -6.66672, 0.0, -3.33336, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] atol=atol rtol=RTOL - return -end +#FIXME: this test is not working - https://github.com/jump-dev/DiffOpt.jl/issues/285 +# Besides API errors, Reverse and Forward differentiation are not matching +# function test_differentiating_conic_with_PSD_and_POS_constraints() +# # refer psdt2test, https://github.com/jump-dev/MathOptInterface.jl/blob/master/src/Test/contconic.jl#L4306 +# # find equivalent diffcp program here - https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_sdp_3_py.ipynb +# # Make a JuMP model backed by DiffOpt.diff_optimizer(SCS.Optimizer) +# model = Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer)) +# set_silent(model) # just to suppress solver output + +# @variable(model, x[1:7]) +# @test num_variables(model) == 7 + +# η = 10.0 +# # c1: sum(-x[1:6]) + η >= 0 +# c1 = @constraint(model, -sum(x[i] for i in 1:6) + η ≥ 0) + +# # c2: x[i] >= 0 for each i in 1:6 +# # (this replicates MOI.Nonnegatives(6) on x[1:6]) +# c2 = [ +# @constraint(model, x[i] ≥ 0) +# for i in 1:6 +# ] + +# # c3: A 2×2 PSD constraint. In raw MOI form you had +# # MOI.PositiveSemidefiniteConeTriangle(2), +# # which means a vector dimension=3 => [a11, a12, a22]. +# # We'll build that in JuMP with `[ [a11 a12]; [a12 a22] ] in PSDCone()`. +# α = 0.8 +# δ = 0.9 +# c3 = @constraint( +# model, +# LinearAlgebra.Symmetric(hcat( +# [((δ/2)*x[1] + α*x[2] + δ*x[3] + (δ/4)*x[4] + (δ/8)*x[5] - 1.0*x[7]) # a11 +# ((-(δ/(2*√2)))*x[1] - (δ/4)*x[2] - (δ/(8*√2))*x[5])], +# [((-(δ/(2*√2)))*x[1] - (δ/4)*x[2] - (δ/(8*√2))*x[5]) +# ((δ/2)*x[1] + (δ - α)*x[2] + 0.0*x[3] + (δ/8)*x[5] + (δ/4)*x[6] - x[7])]) +# ) in PSDCone() +# ) + +# # c4: In the original MOI example it was some "useless" constraint on x[1:3] + x[5:6]. +# # For demonstration, we replicate a dimension=1 equality to zero: +# c4 = @constraint(model, 0.0 * x[1] + 0.0 * x[2] + 0.0 * x[3] + 0.0 * x[5] + 0.0 * x[6] == 0) + +# # Make the objective: "maximize x[7]" exactly as in the original +# @objective(model, Max, x[7]) + +# # Solve +# optimize!(model) + +# @test objective_value(model) ≈ 1.90192374 atol=ATOL rtol=RTOL + +# # MOI.set( +# # model, +# # DiffOpt.ForwardObjectiveFunction(), +# # 3*x[7] +# # ) # FIXME: ERROR when forward_differentiate + +# dx = [2.81765 7.35515e-6 3.33222 1.73763e-5 1.58653e-5 1.74104e-5 0.3617] +# for i=1:7 +# @show i +# MOI.set( +# model, +# DiffOpt.ForwardConstraintFunction(), +# c1, +# x[i]+0.0 +# ) +# DiffOpt.forward_differentiate!(model) +# @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x[i]) ≈ dx[i] atol=ATOL rtol=RTOL +# MOI.set( +# model, +# DiffOpt.ReverseVariablePrimal(), +# x[i], +# 1.0 +# ) +# DiffOpt.reverse_differentiate!(model) +# @test JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), c1), x[i]) ≈ dx[i] atol=ATOL rtol=RTOL +# DiffOpt.empty_input_sensitivities!(model) +# end +# DiffOpt.empty_input_sensitivities!(model) +# dx = [0.0 0.0 0.0 0.0 0.0 0.0 0.0] +# for i=1:6 +# MOI.set( +# model, +# DiffOpt.ForwardConstraintFunction(), +# c2[i], +# x[i]+0.0 +# ) +# DiffOpt.forward_differentiate!(model) +# # @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x[i]) ≈ dx[i] atol=ATOL rtol=RTOL +# @show MOI.get(model, DiffOpt.ForwardVariablePrimal(), x[i]) +# MOI.set( +# model, +# DiffOpt.ReverseVariablePrimal(), +# x[i], +# 1.0 +# ) +# DiffOpt.reverse_differentiate!(model) +# @show JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), c2[i]), x[i]) +# end + +# return +# end function test_differentiating_a_simple_psd() # refer _psd3test, https://github.com/jump-dev/MathOptInterface.jl/blob/master/src/Test/contconic.jl#L4484 diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index fafe637e..1720246e 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -34,7 +34,7 @@ function test_jump_api() (DiffOpt.quadratic_diff_model, SCS.Optimizer), (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), (DiffOpt.conic_diff_model, HiGHS.Optimizer), - (DiffOpt.conic_diff_model, SCS.Optimizer), # conicmodel has a issue with sign + (DiffOpt.conic_diff_model, SCS.Optimizer), (DiffOpt.conic_diff_model, Ipopt.Optimizer), # (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), # SQF ctr not supported? # (DiffOpt.nonlinear_diff_model, SCS.Optimizer), # returns zero for sensitivity From 74785d497b6016c2cc41b59b5e0ae151cf8bec11 Mon Sep 17 00:00:00 2001 From: Andrew Date: Sat, 12 Apr 2025 16:17:32 -0400 Subject: [PATCH 5/8] format --- test/conic_program.jl | 60 +++++++++++++++++++++++-------------------- 1 file changed, 32 insertions(+), 28 deletions(-) diff --git a/test/conic_program.jl b/test/conic_program.jl index 1c974cb1..0249a85c 100644 --- a/test/conic_program.jl +++ b/test/conic_program.jl @@ -60,7 +60,7 @@ function _test_simple_socp(eq_vec::Bool) optimize!(model) # set foward sensitivities - if eq_vec + if eq_vec MOI.set.(model, DiffOpt.ForwardConstraintFunction(), ceq, [1.0 * x]) else MOI.set(model, DiffOpt.ForwardConstraintFunction(), ceq, 1.0 * x) @@ -69,21 +69,30 @@ function _test_simple_socp(eq_vec::Bool) DiffOpt.forward_differentiate!(model) dx = -0.9999908 - @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ dx atol=ATOL rtol=RTOL + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ dx atol = ATOL rtol = + RTOL - MOI.set( - model, - DiffOpt.ReverseVariablePrimal(), - x, - 1.0 - ) + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, 1.0) DiffOpt.reverse_differentiate!(model) if eq_vec - @test all(isapprox.(JuMP.coefficient.(MOI.get.(model, DiffOpt.ReverseConstraintFunction(), ceq), x), dx, atol=ATOL, rtol=RTOL)) + @test all( + isapprox.( + JuMP.coefficient.( + MOI.get.(model, DiffOpt.ReverseConstraintFunction(), ceq), + x, + ), + dx, + atol = ATOL, + rtol = RTOL, + ), + ) else - @test JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), ceq), x) ≈ dx atol=ATOL rtol=RTOL + @test JuMP.coefficient( + MOI.get(model, DiffOpt.ReverseConstraintFunction(), ceq), + x, + ) ≈ dx atol = ATOL rtol = RTOL end DiffOpt.empty_input_sensitivities!(model) @@ -93,21 +102,20 @@ function _test_simple_socp(eq_vec::Bool) DiffOpt.forward_differentiate!(model) dy = -0.707083 - @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), y) ≈ dy atol=ATOL rtol=RTOL + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), y) ≈ dy atol = ATOL rtol = + RTOL - MOI.set( - model, - DiffOpt.ReverseVariablePrimal(), - y, - 1.0 - ) + MOI.set(model, DiffOpt.ReverseVariablePrimal(), y, 1.0) DiffOpt.reverse_differentiate!(model) - @test JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), cnon), y) ≈ dy atol=ATOL rtol=RTOL + @test JuMP.coefficient( + MOI.get(model, DiffOpt.ReverseConstraintFunction(), cnon), + y, + ) ≈ dy atol = ATOL rtol = RTOL DiffOpt.empty_input_sensitivities!(model) - + MOI.set( model, DiffOpt.ForwardConstraintFunction(), @@ -118,20 +126,16 @@ function _test_simple_socp(eq_vec::Bool) DiffOpt.forward_differentiate!(model) ds = 0.0 - @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), t) ≈ ds atol=ATOL rtol=RTOL + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), t) ≈ ds atol = ATOL rtol = + RTOL - MOI.set( - model, - DiffOpt.ReverseVariablePrimal(), - t, - 1.0 - ) + MOI.set(model, DiffOpt.ReverseVariablePrimal(), t, 1.0) DiffOpt.reverse_differentiate!(model) # FIXME: this is not working - https://github.com/jump-dev/DiffOpt.jl/issues/283 # @test JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), csoc).func.func.func, t.index) ≈ ds atol=ATOL rtol=RTOL - + return end @@ -494,7 +498,7 @@ end # DiffOpt.reverse_differentiate!(model) # @show JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), c2[i]), x[i]) # end - + # return # end From bf2c4c389ecd6ae63356e292b74a632b0517fa68 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 20 May 2025 02:49:27 +0200 Subject: [PATCH 6/8] Pass attributes through Objective.FunctionConversionBridge (#287) * Pass attributes through Objective.FunctionConversionBridge * Fix * add test * fix tol * fix test * add reverse test --------- Co-authored-by: joaquimg --- src/bridges.jl | 24 ++++++++++++++++++++++++ test/jump.jl | 43 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 67 insertions(+) diff --git a/src/bridges.jl b/src/bridges.jl index 2b0eb46b..a56717b3 100644 --- a/src/bridges.jl +++ b/src/bridges.jl @@ -3,6 +3,30 @@ # Use of this source code is governed by an MIT-style license that can be found # in the LICENSE.md file or at https://opensource.org/licenses/MIT. +function MOI.get( + model::MOI.ModelLike, + attr::ObjectiveFunctionAttribute{ReverseObjectiveFunction,G}, + ::MOI.Bridges.Objective.FunctionConversionBridge{T,F,G}, +) where {T,F,G} + return MOI.get( + model, + ObjectiveFunctionAttribute{ReverseObjectiveFunction,F}(attr.attr), + ) +end + +function MOI.set( + model::MOI.ModelLike, + attr::ObjectiveFunctionAttribute{ForwardObjectiveFunction,G}, + ::MOI.Bridges.Objective.FunctionConversionBridge{T,F,G}, + value, +) where {T,F,G} + return MOI.set( + model, + ObjectiveFunctionAttribute{ForwardObjectiveFunction,F}(attr.attr), + value, + ) +end + function MOI.get( model::MOI.ModelLike, ::ObjectiveFunctionAttribute{ReverseObjectiveFunction}, diff --git a/test/jump.jl b/test/jump.jl index 8055f1f1..bdb031b7 100644 --- a/test/jump.jl +++ b/test/jump.jl @@ -31,6 +31,49 @@ function runtests() return end +function test_single_variable_objective_forward() + model = Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer)) + @variable(model, x[1:7] >= 0) + @constraint(model, c1, sum(x[i] for i in 1:6) == 10) + @constraint(model, c2, x[7] == 10) + @constraint( + model, + c3, + LinearAlgebra.Symmetric([ + x[7] 0.0 + 0.0 x[1] + ]) in PSDCone() + ) + @objective(model, Max, x[7]) + optimize!(model) + MOI.set(model, DiffOpt.ForwardObjectiveFunction(), sum(x)) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x[7]) ≈ 0 atol = ATOL + return +end + +function test_single_variable_objective_reverse() + model = Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer)) + @variable(model, x[1:7] >= 0) + @constraint(model, c1, sum(x[i] for i in 1:6) == 10) + @constraint(model, c2, x[7] == 10) + @constraint( + model, + c3, + LinearAlgebra.Symmetric([ + x[7] 0.0 + 0.0 x[1] + ]) in PSDCone() + ) + @objective(model, Max, x[7]) + optimize!(model) + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x[7], 1.0) + DiffOpt.reverse_differentiate!(model) + func = MOI.get(model, DiffOpt.ReverseObjectiveFunction()) + @test JuMP.coefficient(func, x[7]) ≈ 0.0 atol = ATOL rtol = RTOL + return +end + function test_forward_on_trivial_qp() # using example on https://osqp.org/docs/examples/setup-and-solve.html Q = [4.0 1.0; 1.0 2.0] From 3ff70397279a7cce5838dc42b1385412af544238 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Mon, 19 May 2025 20:59:13 -0700 Subject: [PATCH 7/8] bump POI --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index a3689608..e89e4c6b 100644 --- a/Project.toml +++ b/Project.toml @@ -23,5 +23,5 @@ JuMP = "1" LazyArrays = "0.21, 0.22, 1" MathOptInterface = "1.18" MathOptSetDistances = "0.2.9" -ParametricOptInterface = "0.9.0" +ParametricOptInterface = "0.11" julia = "1.6" From 83f1d1fcb06d85d3c8923a94c501829e9b0c88d3 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Mon, 19 May 2025 22:36:06 -0700 Subject: [PATCH 8/8] cleanup --- src/ConicProgram/ConicProgram.jl | 3 +- test/conic_program.jl | 180 ++++++++++++++----------------- 2 files changed, 80 insertions(+), 103 deletions(-) diff --git a/src/ConicProgram/ConicProgram.jl b/src/ConicProgram/ConicProgram.jl index 3cd970ac..f9444404 100644 --- a/src/ConicProgram/ConicProgram.jl +++ b/src/ConicProgram/ConicProgram.jl @@ -302,7 +302,8 @@ function DiffOpt.forward_differentiate!(model::Model) dAj, dAv, ) - dA = -SparseArrays.sparse(dAi, dAj, dAv, lines, cols) + dAv .*= -1.0 + dA = SparseArrays.sparse(dAi, dAj, dAv, lines, cols) m = size(A, 1) n = size(A, 2) diff --git a/test/conic_program.jl b/test/conic_program.jl index 0249a85c..f5e5b000 100644 --- a/test/conic_program.jl +++ b/test/conic_program.jl @@ -399,108 +399,84 @@ function test_differentiating_conic_with_PSD_and_SOC_constraints() return end -#FIXME: this test is not working - https://github.com/jump-dev/DiffOpt.jl/issues/285 -# Besides API errors, Reverse and Forward differentiation are not matching -# function test_differentiating_conic_with_PSD_and_POS_constraints() -# # refer psdt2test, https://github.com/jump-dev/MathOptInterface.jl/blob/master/src/Test/contconic.jl#L4306 -# # find equivalent diffcp program here - https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_sdp_3_py.ipynb -# # Make a JuMP model backed by DiffOpt.diff_optimizer(SCS.Optimizer) -# model = Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer)) -# set_silent(model) # just to suppress solver output - -# @variable(model, x[1:7]) -# @test num_variables(model) == 7 - -# η = 10.0 -# # c1: sum(-x[1:6]) + η >= 0 -# c1 = @constraint(model, -sum(x[i] for i in 1:6) + η ≥ 0) - -# # c2: x[i] >= 0 for each i in 1:6 -# # (this replicates MOI.Nonnegatives(6) on x[1:6]) -# c2 = [ -# @constraint(model, x[i] ≥ 0) -# for i in 1:6 -# ] - -# # c3: A 2×2 PSD constraint. In raw MOI form you had -# # MOI.PositiveSemidefiniteConeTriangle(2), -# # which means a vector dimension=3 => [a11, a12, a22]. -# # We'll build that in JuMP with `[ [a11 a12]; [a12 a22] ] in PSDCone()`. -# α = 0.8 -# δ = 0.9 -# c3 = @constraint( -# model, -# LinearAlgebra.Symmetric(hcat( -# [((δ/2)*x[1] + α*x[2] + δ*x[3] + (δ/4)*x[4] + (δ/8)*x[5] - 1.0*x[7]) # a11 -# ((-(δ/(2*√2)))*x[1] - (δ/4)*x[2] - (δ/(8*√2))*x[5])], -# [((-(δ/(2*√2)))*x[1] - (δ/4)*x[2] - (δ/(8*√2))*x[5]) -# ((δ/2)*x[1] + (δ - α)*x[2] + 0.0*x[3] + (δ/8)*x[5] + (δ/4)*x[6] - x[7])]) -# ) in PSDCone() -# ) - -# # c4: In the original MOI example it was some "useless" constraint on x[1:3] + x[5:6]. -# # For demonstration, we replicate a dimension=1 equality to zero: -# c4 = @constraint(model, 0.0 * x[1] + 0.0 * x[2] + 0.0 * x[3] + 0.0 * x[5] + 0.0 * x[6] == 0) - -# # Make the objective: "maximize x[7]" exactly as in the original -# @objective(model, Max, x[7]) - -# # Solve -# optimize!(model) - -# @test objective_value(model) ≈ 1.90192374 atol=ATOL rtol=RTOL - -# # MOI.set( -# # model, -# # DiffOpt.ForwardObjectiveFunction(), -# # 3*x[7] -# # ) # FIXME: ERROR when forward_differentiate - -# dx = [2.81765 7.35515e-6 3.33222 1.73763e-5 1.58653e-5 1.74104e-5 0.3617] -# for i=1:7 -# @show i -# MOI.set( -# model, -# DiffOpt.ForwardConstraintFunction(), -# c1, -# x[i]+0.0 -# ) -# DiffOpt.forward_differentiate!(model) -# @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x[i]) ≈ dx[i] atol=ATOL rtol=RTOL -# MOI.set( -# model, -# DiffOpt.ReverseVariablePrimal(), -# x[i], -# 1.0 -# ) -# DiffOpt.reverse_differentiate!(model) -# @test JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), c1), x[i]) ≈ dx[i] atol=ATOL rtol=RTOL -# DiffOpt.empty_input_sensitivities!(model) -# end -# DiffOpt.empty_input_sensitivities!(model) -# dx = [0.0 0.0 0.0 0.0 0.0 0.0 0.0] -# for i=1:6 -# MOI.set( -# model, -# DiffOpt.ForwardConstraintFunction(), -# c2[i], -# x[i]+0.0 -# ) -# DiffOpt.forward_differentiate!(model) -# # @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x[i]) ≈ dx[i] atol=ATOL rtol=RTOL -# @show MOI.get(model, DiffOpt.ForwardVariablePrimal(), x[i]) -# MOI.set( -# model, -# DiffOpt.ReverseVariablePrimal(), -# x[i], -# 1.0 -# ) -# DiffOpt.reverse_differentiate!(model) -# @show JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), c2[i]), x[i]) -# end - -# return -# end +function _build_simple_sdp() + # refer psdt2test, https://github.com/jump-dev/MathOptInterface.jl/blob/master/src/Test/contconic.jl#L4306 + # find equivalent diffcp program here - https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_sdp_3_py.ipynb + # Make a JuMP model backed by DiffOpt.diff_optimizer(SCS.Optimizer) + model = Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer)) + set_silent(model) # just to suppress solver output + + @variable(model, x[1:3]) + + @constraint(model, c1, sum(x[i] for i in 1:3) == 4) + + @constraint(model, c2[i = 1:3], x[i] ≥ 0) + + @constraint(model, x[1] == 2) + + @constraint( + model, + c3, + LinearAlgebra.Symmetric([ + x[3]+1 2 + 2 2x[3]+2 + ]) in PSDCone() + ) + + @objective(model, Min, 4x[3] + x[2]) + return model +end + +function test_differentiating_conic_with_PSD_constraints() + model = _build_simple_sdp() + optimize!(model) + x = model[:x] + c1 = model[:c1] + c2 = model[:c2] + sx = value.(x) + @test sx ≈ [2.0, 3.0 - sqrt(2), sqrt(2) - 1] atol = ATOL rtol = RTOL + + for i in 1:3 + _model = _build_simple_sdp() + JuMP.set_normalized_coefficient(_model[:c1], _model[:x][i], 1.001) + optimize!(_model) + _dx = (value(_model[:x][i]) - value(sx[i])) / 0.001 + i in (1, 3) ? (@test abs(_dx) < 0.05) : (@test -1.6 < _dx < -1.45) + MOI.set(model, DiffOpt.ForwardConstraintFunction(), c1, x[i] + 0.0) + DiffOpt.forward_differentiate!(model) + _dx = MOI.get(model, DiffOpt.ForwardVariablePrimal(), x[i]) + i in (1, 3) ? (@test abs(_dx) < 0.05) : (@test -1.6 < _dx < -1.45) + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x[i], 1.0) + DiffOpt.reverse_differentiate!(model) + _dx = JuMP.coefficient( + MOI.get(model, DiffOpt.ReverseConstraintFunction(), c1), + x[i], + ) + i in (1, 3) ? (@test abs(_dx) < 0.05) : (@test -1.6 < _dx < -1.45) + DiffOpt.empty_input_sensitivities!(model) + end + for i in 1:3 + DiffOpt.empty_input_sensitivities!(model) + _model = _build_simple_sdp() + JuMP.set_normalized_coefficient(_model[:c2][i], _model[:x][i], 1.001) + optimize!(_model) + _dx = (value(_model[:x][i]) - value(sx[i])) / 0.001 + @test abs(_dx) < 0.15 + MOI.set(model, DiffOpt.ForwardConstraintFunction(), c2[i], x[i] + 0.0) + DiffOpt.forward_differentiate!(model) + _dx = MOI.get(model, DiffOpt.ForwardVariablePrimal(), x[i]) + @test abs(_dx) < 0.15 + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x[i], 1.0) + DiffOpt.reverse_differentiate!(model) + _dx = JuMP.coefficient( + MOI.get(model, DiffOpt.ReverseConstraintFunction(), c2[i]), + x[i], + ) + @test abs(_dx) < 0.15 + end + + return +end function test_differentiating_a_simple_psd() # refer _psd3test, https://github.com/jump-dev/MathOptInterface.jl/blob/master/src/Test/contconic.jl#L4484