Skip to content

Commit

Permalink
Enzyme WIP
Browse files Browse the repository at this point in the history
  • Loading branch information
michel2323 committed Nov 27, 2024
1 parent 2e28370 commit 80f437b
Show file tree
Hide file tree
Showing 4 changed files with 69 additions and 116 deletions.
21 changes: 10 additions & 11 deletions src/enzyme.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
struct EnzymeReverseADJacobian <: ADBackend end
struct EnzymeReverseADHessian <: ADBackend end

struct EnzymeReverseADGradient <: ADNLPModels.ADBackend end
struct EnzymeReverseADGradient <: InPlaceADbackend end

function EnzymeReverseADGradient(
nvar::Integer,
Expand All @@ -16,12 +16,13 @@ end

function ADNLPModels.gradient(::EnzymeReverseADGradient, f, x)
g = similar(x)
Enzyme.autodiff(Enzyme.Reverse, f, Enzyme.Duplicated(x, g)) # gradient!(Reverse, g, f, x)
# Enzyme.autodiff(Enzyme.Reverse, Const(f), Active, Enzyme.Duplicated(x, g)) # gradient!(Reverse, g, f, x)
Enzyme.gradient!(Reverse, g, Const(f), x)
return g
end

function ADNLPModels.gradient!(::EnzymeReverseADGradient, g, f, x)
Enzyme.autodiff(Enzyme.Reverse, f, Enzyme.Duplicated(x, g)) # gradient!(Reverse, g, f, x)
Enzyme.autodiff(Enzyme.Reverse, Const(f), Active, Enzyme.Duplicated(x, g)) # gradient!(Reverse, g, f, x)
return g
end

Expand Down Expand Up @@ -57,7 +58,7 @@ function hessian(::EnzymeReverseADHessian, f, x)
tmp = similar(x)
for i in 1:length(x)
seed[i] = one(eltype(seed))
Enzyme.hvp!(tmp, f, x, seed)
Enzyme.hvp!(tmp, Const(f), x, seed)
hess[:, i] .= tmp
seed[i] = zero(eltype(seed))
end
Expand All @@ -80,9 +81,7 @@ function EnzymeReverseADJprod(
end

function Jprod!(b::EnzymeReverseADJprod, Jv, c!, x, v, ::Val)
@show c!(x)
@show Enzyme.autodiff(Enzyme.Forward, Const(c!), Duplicated(x, v))
error("This is BAD")
Enzyme.autodiff(Enzyme.Forward, Const(c!), Duplicated(b.x,Jv), Duplicated(x, v))
return Jv
end

Expand All @@ -102,7 +101,7 @@ function EnzymeReverseADJtprod(
end

function Jtprod!(b::EnzymeReverseADJtprod, Jtv, c!, x, v, ::Val)
Enzyme.autodiff(Enzyme.Reverse, c!, Duplicated(b.x, Jtv), Enzyme.Duplicated(x, v))
Enzyme.autodiff(Enzyme.Reverse, Const(c!), Duplicated(b.x, Jtv), Enzyme.Duplicated(x, v))
return Jtv
end

Expand All @@ -126,7 +125,7 @@ function Hvprod!(b::EnzymeReverseADHvprod, Hv, x, v, f, args...)
# What to do with args?
Enzyme.autodiff(
Forward,
gradient!,
Const(Enzyme.gradient!),
Const(Reverse),
DuplicatedNoNeed(b.grad, Hv),
Const(f),
Expand All @@ -147,7 +146,7 @@ function Hvprod!(
)
Enzyme.autodiff(
Forward,
gradient!,
Const(Enzyme.gradient!),
Const(Reverse),
DuplicatedNoNeed(b.grad, Hv),
Const(ℓ),
Expand All @@ -169,7 +168,7 @@ function Hvprod!(
)
Enzyme.autodiff(
Forward,
gradient!,
Const(Enzyme.gradient!),
Const(Reverse),
DuplicatedNoNeed(b.grad, Hv),
Const(f),
Expand Down
121 changes: 37 additions & 84 deletions test/enzyme.jl
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,10 @@ EnzymeReverseAD() = ADNLPModels.ADModelBackend(
ADNLPModels.EmptyADbackend(),
ADNLPModels.EmptyADbackend(),
)

function mysum!(y, x)
sum!(y, x)
return nothing
end
function test_autodiff_backend_error()
@testset "Error without loading package - $backend" for backend in [:EnzymeReverseAD]
adbackend = eval(backend)()
Expand All @@ -50,100 +53,50 @@ function test_autodiff_backend_error()
# )
gradient(adbackend.gradient_backend, sum, [1.0])
gradient!(adbackend.gradient_backend, [1.0], sum, [1.0])
jacobian(adbackend.jacobian_backend, identity, [1.0])
jacobian(adbackend.jacobian_backend, sum, [1.0])
hessian(adbackend.hessian_backend, sum, [1.0])
Jprod!(
adbackend.jprod_backend,
[1.0],
identity,
sum!,
[1.0],
[1.0],
Val(:c),
)
Jtprod!(
adbackend.jtprod_backend,
[1.0],
mysum!,
[1.0],
[1.0],
Val(:c),
)
# Jtprod!(
# adbackend.jtprod_backend,
# [1.0],
# identity,
# [1.0],
# [1.0],
# Val(:c),
# )
end
end

test_autodiff_backend_error()
#=
# ADNLPModels.EmptyADbackend(args...; kwargs...) = ADNLPModels.EmptyADbackend()

names = OptimizationProblems.meta[!, :name]
list_excluded_enzyme = [
"brybnd",
"clplatea",
"clplateb",
"clplatec",
"curly",
"curly10",
"curly20",
"curly30",
"elec",
"fminsrf2",
"hs101",
"hs117",
"hs119",
"hs86",
"integreq",
"ncb20",
"ncb20b",
"palmer1c",
"palmer1d",
"palmer2c",
"palmer3c",
"palmer4c",
"palmer5c",
"palmer5d",
"palmer6c",
"palmer7c",
"palmer8c",
"sbrybnd",
"tetra",
"tetra_duct12",
"tetra_duct15",
"tetra_duct20",
"tetra_foam5",
"tetra_gear",
"tetra_hook",
"threepk",
"triangle",
"triangle_deer",
"triangle_pacman",
"triangle_turtle",
"watson",
]
for pb in names
@info pb
(pb in list_excluded_enzyme) && continue
nlp = eval(Meta.parse(pb))(
gradient_backend = ADNLPModels.EnzymeADGradient,
jacobian_backend = ADNLPModels.EmptyADbackend,
hessian_backend = ADNLPModels.EmptyADbackend,
)
grad(nlp, get_x0(nlp))
end
=#
push!(
ADNLPModels.predefined_backend,
:enzyme_backend => Dict(
:gradient_backend => ADNLPModels.EnzymeReverseADGradient,
:jprod_backend => ADNLPModels.EnzymeReverseADJprod,
:jtprod_backend => ADNLPModels.EnzymeReverseADJtprod,
:hprod_backend => ADNLPModels.EnzymeReverseADHvprod,
:jacobian_backend => ADNLPModels.EnzymeReverseADJacobian,
:hessian_backend => ADNLPModels.EnzymeReverseADHessian,
:ghjvprod_backend => ADNLPModels.ForwardDiffADGHjvprod,
:jprod_residual_backend => ADNLPModels.EnzymeReverseADJprod,
:jtprod_residual_backend => ADNLPModels.EnzymeReverseADJtprod,
:hprod_residual_backend => ADNLPModels.EnzymeReverseADHvprod,
:jacobian_residual_backend => ADNLPModels.EnzymeReverseADJacobian,
:hessian_residual_backend => ADNLPModels.EnzymeReverseADHessian,
),
)

include("utils.jl")
include("nlp/basic.jl")
include("nls/basic.jl")
include("nlp/nlpmodelstest.jl")
include("nls/nlpmodelstest.jl")

#=
ERROR: Duplicated Returns not yet handled
Stacktrace:
[1] autodiff
@.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:209 [inlined]
[2] autodiff(mode::EnzymeCore.ReverseMode, f::OptimizationProblems.ADNLPProblems.var"#f#254"{OptimizationProblems.ADNLPProblems.var"#f#250#255"}, args::Duplicated{Vector{Float64}})
@ Enzyme.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:248
[3] gradient!(#unused#::ADNLPModels.EnzymeADGradient, g::Vector{Float64}, f::Function, x::Vector{Float64})
@ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\enzyme.jl:17
[4] grad!(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64}, g::Vector{Float64})
@ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\nlp.jl:542
[5] grad(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64})
@ NLPModels.julia\packages\NLPModels\XBcWL\src\nlp\api.jl:31
[6] top-level scope
@ .\REPL[7]:5
=#
14 changes: 7 additions & 7 deletions test/nlp/basic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,13 @@ function test_autodiff_model(name; kwargs...)
nlp = ADNLPModel(f, x0, c, [0.0], [0.0]; kwargs...)
@test obj(nlp, x0) == f(x0)

x = range(-1, stop = 1, length = 100)
y = 2x .+ 3 + randn(100) * 0.1
regr = LinearRegression(x, y)
nlp = ADNLPModel(regr, ones(2); kwargs...)
β = [ones(100) x] \ y
@test abs(obj(nlp, β) - norm(y .- β[1] - β[2] * x)^2 / 2) < 1e-12
@test norm(grad(nlp, β)) < 1e-12
# x = range(-1, stop = 1, length = 100)
# y = 2x .+ 3 + randn(100) * 0.1
# regr = LinearRegression(x, y)
# nlp = ADNLPModel(regr, ones(2); kwargs...)
# β = [ones(100) x] \ y
# @test abs(obj(nlp, β) - norm(y .- β[1] - β[2] * x)^2 / 2) < 1e-12
# @test norm(grad(nlp, β)) < 1e-12

test_getter_setter(nlp)

Expand Down
29 changes: 15 additions & 14 deletions test/nlp/nlpmodelstest.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
@testset "Checking NLPModelsTest (NLP) tests with $backend" for backend in
keys(ADNLPModels.predefined_backend)
# @testset "Checking NLPModelsTest (NLP) tests with $backend" for backend in
# keys(ADNLPModels.predefined_backend)
backend = :enzyme_backend
@testset "Checking NLPModelsTest tests on problem $problem" for problem in
NLPModelsTest.nlp_problems
nlp_from_T = eval(Meta.parse(lowercase(problem) * "_autodiff"))
Expand All @@ -12,17 +13,17 @@
@testset "Check Consistency" begin
consistent_nlps(nlps, exclude = [], linear_api = true, reimplemented = ["jtprod"])
end
@testset "Check dimensions" begin
check_nlp_dimensions(nlp_ad, exclude = [], linear_api = true)
end
@testset "Check multiple precision" begin
multiple_precision_nlp(nlp_from_T, exclude = [], linear_api = true)
end
@testset "Check view subarray" begin
view_subarray_nlp(nlp_ad, exclude = [])
end
@testset "Check coordinate memory" begin
coord_memory_nlp(nlp_ad, exclude = [], linear_api = true)
end
# @testset "Check dimensions" begin
# check_nlp_dimensions(nlp_ad, exclude = [], linear_api = true)
# end
# @testset "Check multiple precision" begin
# multiple_precision_nlp(nlp_from_T, exclude = [], linear_api = true)
# end
# @testset "Check view subarray" begin
# view_subarray_nlp(nlp_ad, exclude = [])
# end
# @testset "Check coordinate memory" begin
# coord_memory_nlp(nlp_ad, exclude = [], linear_api = true)
# end
end
end

0 comments on commit 80f437b

Please sign in to comment.