From b4d1765cb7e759705e2739b7a00d784fbdc68399 Mon Sep 17 00:00:00 2001 From: Michel Schanen Date: Tue, 26 Nov 2024 14:26:57 -0600 Subject: [PATCH 1/4] Enzyme WIP --- src/enzyme.jl | 167 +++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 158 insertions(+), 9 deletions(-) diff --git a/src/enzyme.jl b/src/enzyme.jl index db5133fe..69b983ba 100644 --- a/src/enzyme.jl +++ b/src/enzyme.jl @@ -1,6 +1,9 @@ -struct EnzymeADGradient <: ADNLPModels.ADBackend end +struct EnzymeReverseADJacobian <: ADBackend end +struct EnzymeReverseADHessian <: ADBackend end -function EnzymeADGradient( +struct EnzymeReverseADGradient <: ADNLPModels.ADBackend end + +function EnzymeReverseADGradient( nvar::Integer, f, ncon::Integer = 0, @@ -8,14 +11,160 @@ function EnzymeADGradient( x0::AbstractVector = rand(nvar), kwargs..., ) - return EnzymeADGradient() + return EnzymeReverseADGradient() +end + +function ADNLPModels.gradient!(::EnzymeReverseADGradient, g, f, x) + Enzyme.autodiff(Enzyme.Reverse, f, Enzyme.Duplicated(x, g)) # gradient!(Reverse, g, f, x) + return g +end + +function EnzymeReverseADJacobian( + nvar::Integer, + f, + ncon::Integer = 0, + c::Function = (args...) -> []; + kwargs..., +) + return EnzymeReverseADJacobian() +end + +jacobian(::EnzymeReverseADJacobian, f, x) = Enzyme.jacobian(Enzyme.Reverse, f, x) + +function EnzymeReverseADHessian( + nvar::Integer, + + f, + ncon::Integer = 0, + c::Function = (args...) -> []; + kwargs..., +) + @assert nvar > 0 + nnzh = nvar * (nvar + 1) / 2 + return EnzymeReverseADHessian() end -@init begin - @require Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" begin - function ADNLPModels.gradient!(::EnzymeADGradient, g, f, x) - Enzyme.autodiff(Enzyme.Reverse, f, Enzyme.Duplicated(x, g)) # gradient!(Reverse, g, f, x) - return g - end +function hessian(::EnzymeReverseADHessian, f, x) + seed = similar(x) + hess = zeros(eltype(x), length(x), length(x)) + fill!(seed, zero(x)) + for i in 1:length(x) + seed[i] = one(x) + Enzyme.hvp!(view(hess, i, :), f, x, seed) + seed[i] = zero(x) end + return hess +end + +struct EnzymeReverseADJprod <: InPlaceADBackend + x::Vector{Float64} +end + +function EnzymeReverseADJprod( + nvar::Integer, + f, + ncon::Integer = 0, + c::Function = (args...) -> []; + kwargs..., +) + x = zeros(nvar) + return EnzymeReverseADJprod(x) +end + +function Jprod!(b::EnzymeReverseADJprod, Jv, c!, x, v, ::Val) + Enzyme.autodiff(Enzyme.Forward, c!, Duplicated(b.x, Jv), Enzyme.Duplicated(x, v)) + return Jv +end + +struct EnzymeReverseADJtprod <: InPlaceADBackend + x::Vector{Float64} +end + +function EnzymeReverseADJtprod( + nvar::Integer, + f, + ncon::Integer = 0, + c::Function = (args...) -> []; + kwargs..., +) + x = zeros(nvar) + return EnzymeReverseADJtprod(x) +end + +function Jtvprod!(b::EnzymeReverseADJtprod, Jtv, c!, x, v, ::Val) + Enzyme.autodiff(Enzyme.Reverse, c!, Duplicated(b.x, Jtv), Enzyme.Duplicated(x, v)) + return Jtv +end + +struct EnzymeReverseADHprod <: InPlaceADBackend + grad::Vector{Float64} +end + +function EnzymeReverseADHvprod( + nvar::Integer, + f, + ncon::Integer = 0, + c!::Function = (args...) -> []; + x0::AbstractVector{T} = rand(nvar), + kwargs..., +) where {T} + grad = zeros(nvar) + return EnzymeReverseADHprod(grad) +end + +function Hvprod!(b::EnzymeReverseADHvprod, Hv, x, v, f, args...) + # What to do with args? + Enzyme.autodiff( + Forward, + gradient!, + Const(Reverse), + DuplicatedNoNeed(b.grad, Hv), + Const(f), + Duplicated(x, v), + ) + return Hv +end + +function Hvprod!( + b::EnzymeReverseADHvprod, + Hv, + x::AbstractVector{T}, + v, + ℓ, + ::Val{:lag}, + y, + obj_weight::Real = one(T), +) + Enzyme.autodiff( + Forward, + gradient!, + Const(Reverse), + DuplicatedNoNeed(b.grad, Hv), + Const(ℓ), + Duplicated(x, v), + Const(y), + ) + + return Hv +end + +function Hvprod!( + b::EnzymeReverseADHvprod{T, S, Tagf}, + Hv, + x, + v, + f, + ::Val{:obj}, + obj_weight::Real = one(T), +) + Enzyme.autodiff( + Forward, + gradient!, + Const(Reverse), + DuplicatedNoNeed(b.grad, Hv), + Const(f), + Duplicated(x, v), + Const(y), + ) + return Hv end From 2981213fdc22f37a809a2e85728b007c9d04717d Mon Sep 17 00:00:00 2001 From: Michel Schanen Date: Tue, 26 Nov 2024 14:42:10 -0600 Subject: [PATCH 2/4] cont. --- Project.toml | 1 + src/enzyme.jl | 16 ++++++++-------- test/enzyme.jl | 8 ++++++-- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/Project.toml b/Project.toml index f7b60817..2aef3b97 100644 --- a/Project.toml +++ b/Project.toml @@ -4,6 +4,7 @@ version = "0.8.7" [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" +Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6" diff --git a/src/enzyme.jl b/src/enzyme.jl index 69b983ba..1750e689 100644 --- a/src/enzyme.jl +++ b/src/enzyme.jl @@ -56,7 +56,7 @@ function hessian(::EnzymeReverseADHessian, f, x) return hess end -struct EnzymeReverseADJprod <: InPlaceADBackend +struct EnzymeReverseADJprod <: InPlaceADbackend x::Vector{Float64} end @@ -76,7 +76,7 @@ function Jprod!(b::EnzymeReverseADJprod, Jv, c!, x, v, ::Val) return Jv end -struct EnzymeReverseADJtprod <: InPlaceADBackend +struct EnzymeReverseADJtprod <: InPlaceADbackend x::Vector{Float64} end @@ -96,7 +96,7 @@ function Jtvprod!(b::EnzymeReverseADJtprod, Jtv, c!, x, v, ::Val) return Jtv end -struct EnzymeReverseADHprod <: InPlaceADBackend +struct EnzymeReverseADHvprod <: InPlaceADbackend grad::Vector{Float64} end @@ -109,7 +109,7 @@ function EnzymeReverseADHvprod( kwargs..., ) where {T} grad = zeros(nvar) - return EnzymeReverseADHprod(grad) + return EnzymeReverseADHvprod(grad) end function Hvprod!(b::EnzymeReverseADHvprod, Hv, x, v, f, args...) @@ -128,12 +128,12 @@ end function Hvprod!( b::EnzymeReverseADHvprod, Hv, - x::AbstractVector{T}, + x, v, ℓ, ::Val{:lag}, y, - obj_weight::Real = one(T), + obj_weight::Real = one(eltype(x)), ) Enzyme.autodiff( Forward, @@ -149,13 +149,13 @@ function Hvprod!( end function Hvprod!( - b::EnzymeReverseADHvprod{T, S, Tagf}, + b::EnzymeReverseADHvprod, Hv, x, v, f, ::Val{:obj}, - obj_weight::Real = one(T), + obj_weight::Real = one(eltype(x)), ) Enzyme.autodiff( Forward, diff --git a/test/enzyme.jl b/test/enzyme.jl index 504557a0..5654cec7 100644 --- a/test/enzyme.jl +++ b/test/enzyme.jl @@ -3,8 +3,12 @@ using ADNLPModels, ManualNLPModels, NLPModels, NLPModelsModifiers, NLPModelsTest using ADNLPModels: gradient, gradient!, jacobian, hessian, Jprod!, Jtprod!, directional_second_derivative, Hvprod! -# Automatically loads the code for Enzyme with Requires -import Enzyme +for problem in NLPModelsTest.nlp_problems ∪ ["GENROSE"] + include("nlp/problems/$(lowercase(problem)).jl") +end +for problem in NLPModelsTest.nls_problems + include("nls/problems/$(lowercase(problem)).jl") +end #= ADNLPModels.EmptyADbackend(args...; kwargs...) = ADNLPModels.EmptyADbackend() From 2e283708288be99ef18e71b69e46e6a1d2ffa23c Mon Sep 17 00:00:00 2001 From: Michel Schanen Date: Tue, 26 Nov 2024 15:56:30 -0600 Subject: [PATCH 3/4] cont. --- Project.toml | 3 +++ src/ADNLPModels.jl | 2 +- src/enzyme.jl | 22 +++++++++++----- test/enzyme.jl | 64 +++++++++++++++++++++++++++++++++++++++++++++- 4 files changed, 83 insertions(+), 8 deletions(-) diff --git a/Project.toml b/Project.toml index 2aef3b97..1e669177 100644 --- a/Project.toml +++ b/Project.toml @@ -7,7 +7,10 @@ ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +ManualNLPModels = "30dfa513-9b2f-4fb3-9796-781eabac1617" NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6" +NLPModelsModifiers = "e01155f1-5c6f-4375-a9d8-616dd036575f" +NLPModelsTest = "7998695d-6960-4d3a-85c4-e1bceb8cd856" Requires = "ae029012-a4dd-5104-9daa-d747884805df" ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" diff --git a/src/ADNLPModels.jl b/src/ADNLPModels.jl index a50d1005..8a474401 100644 --- a/src/ADNLPModels.jl +++ b/src/ADNLPModels.jl @@ -7,7 +7,7 @@ using LinearAlgebra, SparseArrays using ADTypes: ADTypes, AbstractColoringAlgorithm, AbstractSparsityDetector using SparseConnectivityTracer: TracerSparsityDetector using SparseMatrixColorings -using ForwardDiff, ReverseDiff +using ForwardDiff, ReverseDiff, Enzyme # JSO using NLPModels diff --git a/src/enzyme.jl b/src/enzyme.jl index 1750e689..6227648f 100644 --- a/src/enzyme.jl +++ b/src/enzyme.jl @@ -14,6 +14,12 @@ function EnzymeReverseADGradient( return EnzymeReverseADGradient() end +function ADNLPModels.gradient(::EnzymeReverseADGradient, f, x) + g = similar(x) + Enzyme.autodiff(Enzyme.Reverse, f, Enzyme.Duplicated(x, g)) # gradient!(Reverse, g, f, x) + return g +end + function ADNLPModels.gradient!(::EnzymeReverseADGradient, g, f, x) Enzyme.autodiff(Enzyme.Reverse, f, Enzyme.Duplicated(x, g)) # gradient!(Reverse, g, f, x) return g @@ -47,11 +53,13 @@ end function hessian(::EnzymeReverseADHessian, f, x) seed = similar(x) hess = zeros(eltype(x), length(x), length(x)) - fill!(seed, zero(x)) + fill!(seed, zero(eltype(x))) + tmp = similar(x) for i in 1:length(x) - seed[i] = one(x) - Enzyme.hvp!(view(hess, i, :), f, x, seed) - seed[i] = zero(x) + seed[i] = one(eltype(seed)) + Enzyme.hvp!(tmp, f, x, seed) + hess[:, i] .= tmp + seed[i] = zero(eltype(seed)) end return hess end @@ -72,7 +80,9 @@ function EnzymeReverseADJprod( end function Jprod!(b::EnzymeReverseADJprod, Jv, c!, x, v, ::Val) - Enzyme.autodiff(Enzyme.Forward, c!, Duplicated(b.x, Jv), Enzyme.Duplicated(x, v)) + @show c!(x) + @show Enzyme.autodiff(Enzyme.Forward, Const(c!), Duplicated(x, v)) + error("This is BAD") return Jv end @@ -91,7 +101,7 @@ function EnzymeReverseADJtprod( return EnzymeReverseADJtprod(x) end -function Jtvprod!(b::EnzymeReverseADJtprod, Jtv, c!, x, v, ::Val) +function Jtprod!(b::EnzymeReverseADJtprod, Jtv, c!, x, v, ::Val) Enzyme.autodiff(Enzyme.Reverse, c!, Duplicated(b.x, Jtv), Enzyme.Duplicated(x, v)) return Jtv end diff --git a/test/enzyme.jl b/test/enzyme.jl index 5654cec7..60932fa5 100644 --- a/test/enzyme.jl +++ b/test/enzyme.jl @@ -10,8 +10,70 @@ for problem in NLPModelsTest.nls_problems include("nls/problems/$(lowercase(problem)).jl") end +EnzymeReverseAD() = ADNLPModels.ADModelBackend( + ADNLPModels.EnzymeReverseADGradient(), + ADNLPModels.EnzymeReverseADHvprod(zeros(1)), + ADNLPModels.EnzymeReverseADJprod(zeros(1)), + ADNLPModels.EnzymeReverseADJtprod(zeros(1)), + ADNLPModels.EnzymeReverseADJacobian(), + ADNLPModels.EnzymeReverseADHessian(), + ADNLPModels.EnzymeReverseADHvprod(zeros(1)), + ADNLPModels.EmptyADbackend(), + ADNLPModels.EmptyADbackend(), + ADNLPModels.EmptyADbackend(), + ADNLPModels.EmptyADbackend(), + ADNLPModels.EmptyADbackend(), +) + +function test_autodiff_backend_error() + @testset "Error without loading package - $backend" for backend in [:EnzymeReverseAD] + adbackend = eval(backend)() + # @test_throws ArgumentError gradient(adbackend.gradient_backend, sum, [1.0]) + # @test_throws ArgumentError gradient!(adbackend.gradient_backend, [1.0], sum, [1.0]) + # @test_throws ArgumentError jacobian(adbackend.jacobian_backend, identity, [1.0]) + # @test_throws ArgumentError hessian(adbackend.hessian_backend, sum, [1.0]) + # @test_throws ArgumentError Jprod!( + # adbackend.jprod_backend, + # [1.0], + # [1.0], + # identity, + # [1.0], + # Val(:c), + # ) + # @test_throws ArgumentError Jtprod!( + # adbackend.jtprod_backend, + # [1.0], + # [1.0], + # identity, + # [1.0], + # Val(:c), + # ) + gradient(adbackend.gradient_backend, sum, [1.0]) + gradient!(adbackend.gradient_backend, [1.0], sum, [1.0]) + jacobian(adbackend.jacobian_backend, identity, [1.0]) + hessian(adbackend.hessian_backend, sum, [1.0]) + Jprod!( + adbackend.jprod_backend, + [1.0], + identity, + [1.0], + [1.0], + Val(:c), + ) + # Jtprod!( + # adbackend.jtprod_backend, + # [1.0], + # identity, + # [1.0], + # [1.0], + # Val(:c), + # ) + end +end + +test_autodiff_backend_error() #= -ADNLPModels.EmptyADbackend(args...; kwargs...) = ADNLPModels.EmptyADbackend() +# ADNLPModels.EmptyADbackend(args...; kwargs...) = ADNLPModels.EmptyADbackend() names = OptimizationProblems.meta[!, :name] list_excluded_enzyme = [ From 80f437b8b82446bd8687abc4c607737a177a899f Mon Sep 17 00:00:00 2001 From: Michel Schanen Date: Wed, 27 Nov 2024 12:03:11 -0600 Subject: [PATCH 4/4] Enzyme WIP --- src/enzyme.jl | 21 ++++--- test/enzyme.jl | 121 ++++++++++++-------------------------- test/nlp/basic.jl | 14 ++--- test/nlp/nlpmodelstest.jl | 29 ++++----- 4 files changed, 69 insertions(+), 116 deletions(-) diff --git a/src/enzyme.jl b/src/enzyme.jl index 6227648f..c2371d70 100644 --- a/src/enzyme.jl +++ b/src/enzyme.jl @@ -1,7 +1,7 @@ struct EnzymeReverseADJacobian <: ADBackend end struct EnzymeReverseADHessian <: ADBackend end -struct EnzymeReverseADGradient <: ADNLPModels.ADBackend end +struct EnzymeReverseADGradient <: InPlaceADbackend end function EnzymeReverseADGradient( nvar::Integer, @@ -16,12 +16,13 @@ end function ADNLPModels.gradient(::EnzymeReverseADGradient, f, x) g = similar(x) - Enzyme.autodiff(Enzyme.Reverse, f, Enzyme.Duplicated(x, g)) # gradient!(Reverse, g, f, x) + # Enzyme.autodiff(Enzyme.Reverse, Const(f), Active, Enzyme.Duplicated(x, g)) # gradient!(Reverse, g, f, x) + Enzyme.gradient!(Reverse, g, Const(f), x) return g end function ADNLPModels.gradient!(::EnzymeReverseADGradient, g, f, x) - Enzyme.autodiff(Enzyme.Reverse, f, Enzyme.Duplicated(x, g)) # gradient!(Reverse, g, f, x) + Enzyme.autodiff(Enzyme.Reverse, Const(f), Active, Enzyme.Duplicated(x, g)) # gradient!(Reverse, g, f, x) return g end @@ -57,7 +58,7 @@ function hessian(::EnzymeReverseADHessian, f, x) tmp = similar(x) for i in 1:length(x) seed[i] = one(eltype(seed)) - Enzyme.hvp!(tmp, f, x, seed) + Enzyme.hvp!(tmp, Const(f), x, seed) hess[:, i] .= tmp seed[i] = zero(eltype(seed)) end @@ -80,9 +81,7 @@ function EnzymeReverseADJprod( end function Jprod!(b::EnzymeReverseADJprod, Jv, c!, x, v, ::Val) - @show c!(x) - @show Enzyme.autodiff(Enzyme.Forward, Const(c!), Duplicated(x, v)) - error("This is BAD") + Enzyme.autodiff(Enzyme.Forward, Const(c!), Duplicated(b.x,Jv), Duplicated(x, v)) return Jv end @@ -102,7 +101,7 @@ function EnzymeReverseADJtprod( end function Jtprod!(b::EnzymeReverseADJtprod, Jtv, c!, x, v, ::Val) - Enzyme.autodiff(Enzyme.Reverse, c!, Duplicated(b.x, Jtv), Enzyme.Duplicated(x, v)) + Enzyme.autodiff(Enzyme.Reverse, Const(c!), Duplicated(b.x, Jtv), Enzyme.Duplicated(x, v)) return Jtv end @@ -126,7 +125,7 @@ function Hvprod!(b::EnzymeReverseADHvprod, Hv, x, v, f, args...) # What to do with args? Enzyme.autodiff( Forward, - gradient!, + Const(Enzyme.gradient!), Const(Reverse), DuplicatedNoNeed(b.grad, Hv), Const(f), @@ -147,7 +146,7 @@ function Hvprod!( ) Enzyme.autodiff( Forward, - gradient!, + Const(Enzyme.gradient!), Const(Reverse), DuplicatedNoNeed(b.grad, Hv), Const(ℓ), @@ -169,7 +168,7 @@ function Hvprod!( ) Enzyme.autodiff( Forward, - gradient!, + Const(Enzyme.gradient!), Const(Reverse), DuplicatedNoNeed(b.grad, Hv), Const(f), diff --git a/test/enzyme.jl b/test/enzyme.jl index 60932fa5..51c87ec8 100644 --- a/test/enzyme.jl +++ b/test/enzyme.jl @@ -24,7 +24,10 @@ EnzymeReverseAD() = ADNLPModels.ADModelBackend( ADNLPModels.EmptyADbackend(), ADNLPModels.EmptyADbackend(), ) - +function mysum!(y, x) + sum!(y, x) + return nothing +end function test_autodiff_backend_error() @testset "Error without loading package - $backend" for backend in [:EnzymeReverseAD] adbackend = eval(backend)() @@ -50,100 +53,50 @@ function test_autodiff_backend_error() # ) gradient(adbackend.gradient_backend, sum, [1.0]) gradient!(adbackend.gradient_backend, [1.0], sum, [1.0]) - jacobian(adbackend.jacobian_backend, identity, [1.0]) + jacobian(adbackend.jacobian_backend, sum, [1.0]) hessian(adbackend.hessian_backend, sum, [1.0]) Jprod!( adbackend.jprod_backend, [1.0], - identity, + sum!, + [1.0], + [1.0], + Val(:c), + ) + Jtprod!( + adbackend.jtprod_backend, + [1.0], + mysum!, [1.0], [1.0], Val(:c), ) - # Jtprod!( - # adbackend.jtprod_backend, - # [1.0], - # identity, - # [1.0], - # [1.0], - # Val(:c), - # ) end end test_autodiff_backend_error() -#= -# ADNLPModels.EmptyADbackend(args...; kwargs...) = ADNLPModels.EmptyADbackend() -names = OptimizationProblems.meta[!, :name] -list_excluded_enzyme = [ - "brybnd", - "clplatea", - "clplateb", - "clplatec", - "curly", - "curly10", - "curly20", - "curly30", - "elec", - "fminsrf2", - "hs101", - "hs117", - "hs119", - "hs86", - "integreq", - "ncb20", - "ncb20b", - "palmer1c", - "palmer1d", - "palmer2c", - "palmer3c", - "palmer4c", - "palmer5c", - "palmer5d", - "palmer6c", - "palmer7c", - "palmer8c", - "sbrybnd", - "tetra", - "tetra_duct12", - "tetra_duct15", - "tetra_duct20", - "tetra_foam5", - "tetra_gear", - "tetra_hook", - "threepk", - "triangle", - "triangle_deer", - "triangle_pacman", - "triangle_turtle", - "watson", -] -for pb in names - @info pb - (pb in list_excluded_enzyme) && continue - nlp = eval(Meta.parse(pb))( - gradient_backend = ADNLPModels.EnzymeADGradient, - jacobian_backend = ADNLPModels.EmptyADbackend, - hessian_backend = ADNLPModels.EmptyADbackend, - ) - grad(nlp, get_x0(nlp)) -end -=# +push!( + ADNLPModels.predefined_backend, + :enzyme_backend => Dict( + :gradient_backend => ADNLPModels.EnzymeReverseADGradient, + :jprod_backend => ADNLPModels.EnzymeReverseADJprod, + :jtprod_backend => ADNLPModels.EnzymeReverseADJtprod, + :hprod_backend => ADNLPModels.EnzymeReverseADHvprod, + :jacobian_backend => ADNLPModels.EnzymeReverseADJacobian, + :hessian_backend => ADNLPModels.EnzymeReverseADHessian, + :ghjvprod_backend => ADNLPModels.ForwardDiffADGHjvprod, + :jprod_residual_backend => ADNLPModels.EnzymeReverseADJprod, + :jtprod_residual_backend => ADNLPModels.EnzymeReverseADJtprod, + :hprod_residual_backend => ADNLPModels.EnzymeReverseADHvprod, + :jacobian_residual_backend => ADNLPModels.EnzymeReverseADJacobian, + :hessian_residual_backend => ADNLPModels.EnzymeReverseADHessian, + ), +) + +include("utils.jl") +include("nlp/basic.jl") +include("nls/basic.jl") +include("nlp/nlpmodelstest.jl") +include("nls/nlpmodelstest.jl") -#= -ERROR: Duplicated Returns not yet handled -Stacktrace: - [1] autodiff - @.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:209 [inlined] - [2] autodiff(mode::EnzymeCore.ReverseMode, f::OptimizationProblems.ADNLPProblems.var"#f#254"{OptimizationProblems.ADNLPProblems.var"#f#250#255"}, args::Duplicated{Vector{Float64}}) - @ Enzyme.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:248 - [3] gradient!(#unused#::ADNLPModels.EnzymeADGradient, g::Vector{Float64}, f::Function, x::Vector{Float64}) - @ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\enzyme.jl:17 - [4] grad!(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64}, g::Vector{Float64}) - @ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\nlp.jl:542 - [5] grad(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64}) - @ NLPModels.julia\packages\NLPModels\XBcWL\src\nlp\api.jl:31 - [6] top-level scope - @ .\REPL[7]:5 -=# diff --git a/test/nlp/basic.jl b/test/nlp/basic.jl index 741c3cb8..d09d2912 100644 --- a/test/nlp/basic.jl +++ b/test/nlp/basic.jl @@ -17,13 +17,13 @@ function test_autodiff_model(name; kwargs...) nlp = ADNLPModel(f, x0, c, [0.0], [0.0]; kwargs...) @test obj(nlp, x0) == f(x0) - x = range(-1, stop = 1, length = 100) - y = 2x .+ 3 + randn(100) * 0.1 - regr = LinearRegression(x, y) - nlp = ADNLPModel(regr, ones(2); kwargs...) - β = [ones(100) x] \ y - @test abs(obj(nlp, β) - norm(y .- β[1] - β[2] * x)^2 / 2) < 1e-12 - @test norm(grad(nlp, β)) < 1e-12 + # x = range(-1, stop = 1, length = 100) + # y = 2x .+ 3 + randn(100) * 0.1 + # regr = LinearRegression(x, y) + # nlp = ADNLPModel(regr, ones(2); kwargs...) + # β = [ones(100) x] \ y + # @test abs(obj(nlp, β) - norm(y .- β[1] - β[2] * x)^2 / 2) < 1e-12 + # @test norm(grad(nlp, β)) < 1e-12 test_getter_setter(nlp) diff --git a/test/nlp/nlpmodelstest.jl b/test/nlp/nlpmodelstest.jl index 78bf56ec..cd3dc31d 100644 --- a/test/nlp/nlpmodelstest.jl +++ b/test/nlp/nlpmodelstest.jl @@ -1,5 +1,6 @@ -@testset "Checking NLPModelsTest (NLP) tests with $backend" for backend in - keys(ADNLPModels.predefined_backend) +# @testset "Checking NLPModelsTest (NLP) tests with $backend" for backend in +# keys(ADNLPModels.predefined_backend) +backend = :enzyme_backend @testset "Checking NLPModelsTest tests on problem $problem" for problem in NLPModelsTest.nlp_problems nlp_from_T = eval(Meta.parse(lowercase(problem) * "_autodiff")) @@ -12,17 +13,17 @@ @testset "Check Consistency" begin consistent_nlps(nlps, exclude = [], linear_api = true, reimplemented = ["jtprod"]) end - @testset "Check dimensions" begin - check_nlp_dimensions(nlp_ad, exclude = [], linear_api = true) - end - @testset "Check multiple precision" begin - multiple_precision_nlp(nlp_from_T, exclude = [], linear_api = true) - end - @testset "Check view subarray" begin - view_subarray_nlp(nlp_ad, exclude = []) - end - @testset "Check coordinate memory" begin - coord_memory_nlp(nlp_ad, exclude = [], linear_api = true) - end + # @testset "Check dimensions" begin + # check_nlp_dimensions(nlp_ad, exclude = [], linear_api = true) + # end + # @testset "Check multiple precision" begin + # multiple_precision_nlp(nlp_from_T, exclude = [], linear_api = true) + # end + # @testset "Check view subarray" begin + # view_subarray_nlp(nlp_ad, exclude = []) + # end + # @testset "Check coordinate memory" begin + # coord_memory_nlp(nlp_ad, exclude = [], linear_api = true) + # end end end