From ece89c0fac2ac52ded96d763dc4f0afc7f2f4bc2 Mon Sep 17 00:00:00 2001 From: adrhill Date: Fri, 17 May 2024 16:58:08 +0200 Subject: [PATCH 1/6] Update README --- README.md | 32 +++++++++++++++++++++++++++++--- docs/Project.toml | 4 +++- docs/make.jl | 1 + 3 files changed, 33 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 28cdab3f..a1978eb3 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ julia> ]add SparseConnectivityTracer For functions `y = f(x)` and `f!(y, x)`, the sparsity pattern of the Jacobian of $f$ can be obtained by computing a single forward-pass through `f`: -```julia-repl +```jldoctest julia> using SparseConnectivityTracer julia> x = rand(3); @@ -39,7 +39,7 @@ julia> jacobian_pattern(f, x) ``` As a larger example, let's compute the sparsity pattern from a convolutional layer from [Flux.jl](https://github.com/FluxML/Flux.jl): -```julia-repl +```jldoctest julia> using SparseConnectivityTracer, Flux julia> x = rand(28, 28, 3, 1); @@ -71,7 +71,7 @@ For high-dimensional functions, `Set{UInt64}` can be more efficient . For scalar functions `y = f(x)`, the sparsity pattern of the Hessian of $f$ can be obtained by computing a single forward-pass through `f`: -```julia-repl +```jldoctest julia> x = rand(5); julia> f(x) = x[1] + x[2]*x[3] + 1/x[4] + 1*x[5]; @@ -97,6 +97,32 @@ julia> hessian_pattern(g, x) For more detailled examples, take a look at the [documentation](https://adrianhill.de/SparseConnectivityTracer.jl/dev). +### Global function tracing + +The functions `jacobian_pattern`, `hessian_pattern` and `connectivity_pattern` return conservative sparsity patterns over the entire input domain of `x`. +They are not compatible with functions that require information about the primal values of a computation (e.g. `iszero`, `>`, `==`). + +To compute a less conservative sparsity pattern at an input point `x`, use `local_jacobian_pattern`, `local_hessian_pattern` and `local_connectivity_pattern` instead. +Note that these patterns depend on the input `x`: + +```jldoctest +julia> f(x) = ifelse(x[2] < x[3], x[1] ^ x[2], x[3] * x[4]); + +julia> local_hessian_pattern(f, [1 2 3 4]) +4×4 SparseArrays.SparseMatrixCSC{Bool, Int64} with 4 stored entries: + 1 1 ⋅ ⋅ + 1 1 ⋅ ⋅ + ⋅ ⋅ ⋅ ⋅ + ⋅ ⋅ ⋅ ⋅ + +julia> local_hessian_pattern(f, [1 3 2 4]) +4×4 SparseArrays.SparseMatrixCSC{Bool, Int64} with 2 stored entries: + ⋅ ⋅ ⋅ ⋅ + ⋅ ⋅ ⋅ ⋅ + ⋅ ⋅ ⋅ 1 + ⋅ ⋅ 1 ⋅ +``` + ## Related packages * [SparseDiffTools.jl](https://github.com/JuliaDiff/SparseDiffTools.jl): automatic sparsity detection via Symbolics.jl and Cassette.jl * [SparsityTracing.jl](https://github.com/PALEOtoolkit/SparsityTracing.jl): automatic Jacobian sparsity detection using an algorithm based on SparsLinC by Bischof et al. (1996) diff --git a/docs/Project.toml b/docs/Project.toml index cf62baaa..40476eba 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -1,7 +1,9 @@ [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" +Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" SparseConnectivityTracer = "9f842d2f-2579-4b1d-911e-f412cf18a3f5" [compat] -ADTypes = "1" \ No newline at end of file +ADTypes = "1" +Flux = "0.14" diff --git a/docs/make.jl b/docs/make.jl index 69154d14..80e9a514 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,5 +1,6 @@ using SparseConnectivityTracer using Documenter +using Flux: Conv, relu # used in README examples # Create index.md from README cp(joinpath(@__DIR__, "..", "README.md"), joinpath(@__DIR__, "src", "index.md"); force=true) From 86137d9569fd270ddc786a5666aa7ea8b28ad82b Mon Sep 17 00:00:00 2001 From: adrhill Date: Fri, 17 May 2024 17:00:45 +0200 Subject: [PATCH 2/6] Turn NNlib tests into Flux tests --- README.md | 10 +++++----- test/Project.toml | 2 +- test/flux.jl | 46 ++++++++++++++++++++++++++++++++++++++++++++++ test/nnlib.jl | 26 -------------------------- test/runtests.jl | 5 ++--- 5 files changed, 54 insertions(+), 35 deletions(-) create mode 100644 test/flux.jl delete mode 100644 test/nnlib.jl diff --git a/README.md b/README.md index a1978eb3..79cac544 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ julia> x = rand(3); julia> f(x) = [x[1]^2, 2 * x[1] * x[2]^2, sin(x[3])]; julia> jacobian_pattern(f, x) -3×3 SparseArrays.SparseMatrixCSC{Bool, UInt64} with 4 stored entries: +3×3 SparseArrays.SparseMatrixCSC{Bool, Int64} with 4 stored entries: 1 ⋅ ⋅ 1 1 ⋅ ⋅ ⋅ 1 @@ -47,7 +47,7 @@ julia> x = rand(28, 28, 3, 1); julia> layer = Conv((3, 3), 3 => 2); julia> jacobian_pattern(layer, x) -1352×2352 SparseArrays.SparseMatrixCSC{Bool, UInt64} with 36504 stored entries: +1352×2352 SparseArrays.SparseMatrixCSC{Bool, Int64} with 36504 stored entries: ⎡⠙⢿⣦⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠻⣷⣤⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠈⠻⣷⣄⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⎤ ⎢⠀⠀⠙⢿⣦⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠈⠙⢿⣦⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠈⠻⣷⣤⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⎥ ⎢⠀⠀⠀⠀⠙⢿⣦⣀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠙⢿⣦⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠈⠙⢿⣦⡀⠀⠀⠀⠀⠀⠀⠀⎥ @@ -64,7 +64,7 @@ julia> jacobian_pattern(layer, x) ``` The type of index set `S` that is internally used to keep track of connectivity can be specified via `jacobian_pattern(f, x, S)`, defaulting to `BitSet`. -For high-dimensional functions, `Set{UInt64}` can be more efficient . +For high-dimensional functions, `Set{Int64}` can be more efficient . ### Hessian @@ -77,7 +77,7 @@ julia> x = rand(5); julia> f(x) = x[1] + x[2]*x[3] + 1/x[4] + 1*x[5]; julia> hessian_pattern(f, x) -5×5 SparseArrays.SparseMatrixCSC{Bool, UInt64} with 3 stored entries: +5×5 SparseArrays.SparseMatrixCSC{Bool, Int64} with 3 stored entries: ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ 1 ⋅ ⋅ ⋅ 1 ⋅ ⋅ ⋅ @@ -87,7 +87,7 @@ julia> hessian_pattern(f, x) julia> g(x) = f(x) + x[2]^x[5]; julia> hessian_pattern(g, x) -5×5 SparseArrays.SparseMatrixCSC{Bool, UInt64} with 7 stored entries: +5×5 SparseArrays.SparseMatrixCSC{Bool, Int64} with 7 stored entries: ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ 1 1 ⋅ 1 ⋅ 1 ⋅ ⋅ ⋅ diff --git a/test/Project.toml b/test/Project.toml index c2ec2061..8197cbed 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -2,11 +2,11 @@ ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595" Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" +Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b" JuliaFormatter = "98e50ef6-434e-11e9-1051-2b60c6c9e899" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" -NNlib = "872c559c-99b0-510c-b3b7-b6c96a88d5cd" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" ReferenceTests = "324d217c-45ce-50fc-942e-d289b448e8cf" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" diff --git a/test/flux.jl b/test/flux.jl new file mode 100644 index 00000000..bab50b8f --- /dev/null +++ b/test/flux.jl @@ -0,0 +1,46 @@ +using ADTypes +using ADTypes: AbstractSparsityDetector +using Flux: Conv, relu +using ReferenceTests +using SparseConnectivityTracer +using SparseConnectivityTracer: DuplicateVector, RecursiveSet, SortedVector +using Test + +function test_flux_conv(method::AbstractSparsityDetector) + x = rand(3, 3, 2, 1) # WHCN + weights = reshape( + [ + 0.98367139 + 0.61198703 + 0.01781049 + 0.79373138 + 0.01697244 + 0.16676845 + 0.15130460 + 0.17703203 + ], + 2, + 2, + 2, + 1, + ) + bias = [0.83478294] + + layer = Conv(weights, bias) # Conv((2, 2), 2 => 1) + J = ADTypes.jacobian_sparsity(layer, x, method) + @test_reference "references/pattern/jacobian/NNlib/conv.txt" BitMatrix(J) + + layer = Conv(weights, bias, relu) + @test_broken J = ADTypes.jacobian_sparsity(layer, x, method) + # @test_reference "references/pattern/jacobian/NNlib/conv_relu.txt" BitMatrix(J) +end + +@testset "$method" for method in ( + TracerSparsityDetector(BitSet), + TracerSparsityDetector(Set{UInt64}), + TracerSparsityDetector(DuplicateVector{UInt64}), + TracerSparsityDetector(RecursiveSet{UInt64}), + TracerSparsityDetector(SortedVector{UInt64}), +) + test_flux_conv(method) +end diff --git a/test/nnlib.jl b/test/nnlib.jl deleted file mode 100644 index 1853e1d9..00000000 --- a/test/nnlib.jl +++ /dev/null @@ -1,26 +0,0 @@ -using ADTypes -using ADTypes: AbstractSparsityDetector -using NNlib -using ReferenceTests -using SparseConnectivityTracer -using SparseConnectivityTracer: DuplicateVector, RecursiveSet, SortedVector -using Test - -function test_nnlib_conv(method::AbstractSparsityDetector) - x = rand(3, 3, 2, 1) # WHCN - w = rand(2, 2, 2, 1) # Conv((2, 2), 2 => 1) - f(x) = NNlib.conv(x, w) - - J = ADTypes.jacobian_sparsity(f, x, method) - @test_reference "references/pattern/jacobian/NNlib/conv.txt" BitMatrix(J) -end - -@testset "$method" for method in ( - TracerSparsityDetector(BitSet), - TracerSparsityDetector(Set{UInt64}), - TracerSparsityDetector(DuplicateVector{UInt64}), - TracerSparsityDetector(RecursiveSet{UInt64}), - TracerSparsityDetector(SortedVector{UInt64}), -) - test_nnlib_conv(method) -end diff --git a/test/runtests.jl b/test/runtests.jl index 41375cbe..bebe1104 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -9,7 +9,6 @@ using Documenter using LinearAlgebra using Random -using NNlib DocMeta.setdocmeta!( SparseConnectivityTracer, @@ -70,8 +69,8 @@ DocMeta.setdocmeta!( @testset "Brusselator" begin include("brusselator.jl") end - @testset "NNlib" begin - include("nnlib.jl") + @testset "Flux.jl" begin + include("flux.jl") end end From 9c775c456ecf4a98795b6bb69859a45da62ab6f1 Mon Sep 17 00:00:00 2001 From: adrhill Date: Fri, 17 May 2024 17:18:31 +0200 Subject: [PATCH 3/6] Undo README doctests --- README.md | 9 +++++---- docs/Project.toml | 2 -- docs/make.jl | 1 - 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 79cac544..af18a89f 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ julia> ]add SparseConnectivityTracer For functions `y = f(x)` and `f!(y, x)`, the sparsity pattern of the Jacobian of $f$ can be obtained by computing a single forward-pass through `f`: -```jldoctest +```julia-repl julia> using SparseConnectivityTracer julia> x = rand(3); @@ -39,7 +39,8 @@ julia> jacobian_pattern(f, x) ``` As a larger example, let's compute the sparsity pattern from a convolutional layer from [Flux.jl](https://github.com/FluxML/Flux.jl): -```jldoctest + +```julia-repl julia> using SparseConnectivityTracer, Flux julia> x = rand(28, 28, 3, 1); @@ -71,7 +72,7 @@ For high-dimensional functions, `Set{Int64}` can be more efficient . For scalar functions `y = f(x)`, the sparsity pattern of the Hessian of $f$ can be obtained by computing a single forward-pass through `f`: -```jldoctest +```julia-repl julia> x = rand(5); julia> f(x) = x[1] + x[2]*x[3] + 1/x[4] + 1*x[5]; @@ -105,7 +106,7 @@ They are not compatible with functions that require information about the primal To compute a less conservative sparsity pattern at an input point `x`, use `local_jacobian_pattern`, `local_hessian_pattern` and `local_connectivity_pattern` instead. Note that these patterns depend on the input `x`: -```jldoctest +```julia-repl julia> f(x) = ifelse(x[2] < x[3], x[1] ^ x[2], x[3] * x[4]); julia> local_hessian_pattern(f, [1 2 3 4]) diff --git a/docs/Project.toml b/docs/Project.toml index 40476eba..46474468 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -1,9 +1,7 @@ [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" -Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" SparseConnectivityTracer = "9f842d2f-2579-4b1d-911e-f412cf18a3f5" [compat] ADTypes = "1" -Flux = "0.14" diff --git a/docs/make.jl b/docs/make.jl index 80e9a514..69154d14 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,6 +1,5 @@ using SparseConnectivityTracer using Documenter -using Flux: Conv, relu # used in README examples # Create index.md from README cp(joinpath(@__DIR__, "..", "README.md"), joinpath(@__DIR__, "src", "index.md"); force=true) From d281f1cd3eec40af23bbcb10ed69d44152281cd3 Mon Sep 17 00:00:00 2001 From: adrhill Date: Fri, 17 May 2024 17:25:41 +0200 Subject: [PATCH 4/6] Support comparisons with numbers --- src/overload_dual.jl | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/overload_dual.jl b/src/overload_dual.jl index f442c302..ce8f9f55 100644 --- a/src/overload_dual.jl +++ b/src/overload_dual.jl @@ -22,7 +22,17 @@ end for fn in (:isequal, :isapprox, :isless, :(==), :(<), :(>), :(<=), :(>=)) @eval Base.$fn(dx::D, dy::D) where {D<:Dual} = $fn(primal(dx), primal(dy)) - @eval function Base.$fn(t1::T, t2::T) where {T<:AbstractTracer} - throw(MissingPrimalError($fn, t1)) + @eval Base.$fn(dx::D, y::Number) where {D<:Dual} = $fn(primal(dx), y) + @eval Base.$fn(x::Number, dy::D) where {D<:Dual} = $fn(x, primal(dy)) + + # Error on non-dual tracers + @eval function Base.$fn(tx::T, ty::T) where {T<:AbstractTracer} + return throw(MissingPrimalError($fn, tx)) + end + @eval function Base.$fn(tx::T, y::Number) where {T<:AbstractTracer} + return throw(MissingPrimalError($fn, tx)) + end + @eval function Base.$fn(x::Number, ty::T) where {T<:AbstractTracer} + return throw(MissingPrimalError($fn, ty)) end end From 0e61788d07aadd2fa79c5f89216cbcb0ea6acd53 Mon Sep 17 00:00:00 2001 From: adrhill Date: Fri, 17 May 2024 17:51:57 +0200 Subject: [PATCH 5/6] Fix type promotion on duals --- src/conversion.jl | 27 +++++--- src/tracers.jl | 4 ++ test/flux.jl | 65 ++++++++++++------- .../pattern/jacobian/NNlib/conv_relu.txt | 1 + 4 files changed, 65 insertions(+), 32 deletions(-) create mode 100644 test/references/pattern/jacobian/NNlib/conv_relu.txt diff --git a/src/conversion.jl b/src/conversion.jl index 4ef98db4..a5a33812 100644 --- a/src/conversion.jl +++ b/src/conversion.jl @@ -48,15 +48,18 @@ Base.similar(::Array, ::Type{ConnectivityTracer{C}}, dims::Dims{N}) where {C,N} Base.similar(::Array, ::Type{GradientTracer{G}}, dims::Dims{N}) where {G,N} = zeros(GradientTracer{G}, dims) Base.similar(::Array, ::Type{HessianTracer{G,H}}, dims::Dims{N}) where {G,H,N} = zeros(HessianTracer{G,H}, dims) - ## Duals -function Base.promote_rule(::Type{D}, ::Type{N}) where {P,T,D<:Dual{P,T},N<:Number} - PP = Base.promote_rule(P, N) # TODO: possible method call error? - return D{PP,T} +function Base.promote_rule(::Type{Dual{P1, T}}, ::Type{Dual{P2, T}}) where {P1,P2,T} + PP = Base.promote_type(P1, P2) # TODO: possible method call error? + return Dual{PP,T} +end +function Base.promote_rule(::Type{Dual{P, T}}, ::Type{N}) where {P,T,N<:Number} + PP = Base.promote_type(P, N) # TODO: possible method call error? + return Dual{PP,T} end -function Base.promote_rule(::Type{N}, ::Type{D}) where {P,T,D<:Dual{P,T},N<:Number} - PP = Base.promote_rule(P, N) # TODO: possible method call error? - return D{PP,T} +function Base.promote_rule(::Type{N}, ::Type{Dual{P, T}}) where {P,T,N<:Number} + PP = Base.promote_type(P, N) # TODO: possible method call error? + return Dual{PP,T} end Base.big(::Type{D}) where {P,T,D<:Dual{P,T}} = Dual{big(P),T} @@ -64,9 +67,13 @@ Base.widen(::Type{D}) where {P,T,D<:Dual{P,T}} = Dual{widen(P),T} Base.big(d::D) where {P,T,D<:Dual{P,T}} = Dual(big(primal(d)), tracer(d)) Base.widen(d::D) where {P,T,D<:Dual{P,T}} = Dual(widen(primal(d)), tracer(d)) -Base.convert(::Type{D}, x::Number) where {P,T,D<:Dual{P,T}} = Dual(x, empty(T)) -Base.convert(::Type{D}, d::D) where {D<:Dual} = d -Base.convert(::Type{T}, d::D) where {T<:Number,D<:Dual} = Dual(convert(T, primal(d)), tracer(d)) +Base.convert(::Type{D}, x::Number) where {P,T,D<:Dual{P,T}} = Dual(x, empty(T)) +Base.convert(::Type{D}, d::D) where {P,T,D<:Dual{P,T}} = d +Base.convert(::Type{N}, d::D) where {N<:Number,P,T,D<:Dual{P,T}} = Dual(convert(T, primal(d)), tracer(d)) + +function Base.convert(::Type{Dual{P1,T}}, d::Dual{P2,T}) where {P1,P2,T} + return Dual(convert(P1, primal(d)), tracer(d)) +end ## Constants Base.zero(::Type{D}) where {P,T,D<:Dual{P,T}} = D(zero(P), empty(T)) diff --git a/src/tracers.jl b/src/tracers.jl index 503b94f6..2412c405 100644 --- a/src/tracers.jl +++ b/src/tracers.jl @@ -211,6 +211,10 @@ gradient(d::Dual{P,T}) where {P,T<:GradientTracer} = gradient(d.tracer) gradient(d::Dual{P,T}) where {P,T<:HessianTracer} = gradient(d.tracer) hessian(d::Dual{P,T}) where {P,T<:HessianTracer} = hessian(d.tracer) +function Dual{P,T}(x::Number) where {P<:Number,T<:AbstractTracer} + return Dual(convert(P, x), empty(T)) +end + #===========# # Utilities # #===========# diff --git a/test/flux.jl b/test/flux.jl index bab50b8f..ee5c0fa1 100644 --- a/test/flux.jl +++ b/test/flux.jl @@ -6,41 +6,62 @@ using SparseConnectivityTracer using SparseConnectivityTracer: DuplicateVector, RecursiveSet, SortedVector using Test -function test_flux_conv(method::AbstractSparsityDetector) - x = rand(3, 3, 2, 1) # WHCN +function test_flux_conv(S::Type) + x = reshape( + [ + 0.2677768300138966 + 1.1934917429169245 + -1.0496617141319355 + 0.456668782925957 + 0.09678342859916624 + -0.7962039825333248 + -0.6138709208787495 + -0.6809396498148278 + 0.4938230574627916 + 0.7847107012511034 + 0.7423059724033608 + -0.6914378396432983 + 1.2062310319178624 + -0.19647670394840708 + 0.10708057449244994 + -0.4787927739226245 + 0.045072020113458774 + -1.219617669693635 + ], + 3, + 3, + 2, + 1, + ) # WHCN weights = reshape( [ - 0.98367139 - 0.61198703 - 0.01781049 - 0.79373138 - 0.01697244 - 0.16676845 - 0.15130460 - 0.17703203 + 0.311843398150865 + 0.488663701947109 + 0.648497438559604 + -0.41742794246238 + 0.174865988551499 + 1.061745573803265 + -0.72434245370475 + -0.05213963181095 ], 2, 2, 2, 1, ) - bias = [0.83478294] + bias = [0.1] layer = Conv(weights, bias) # Conv((2, 2), 2 => 1) - J = ADTypes.jacobian_sparsity(layer, x, method) - @test_reference "references/pattern/jacobian/NNlib/conv.txt" BitMatrix(J) + J1 = jacobian_pattern(layer, x, S) + @test_reference "references/pattern/jacobian/NNlib/conv.txt" BitMatrix(J1) layer = Conv(weights, bias, relu) - @test_broken J = ADTypes.jacobian_sparsity(layer, x, method) - # @test_reference "references/pattern/jacobian/NNlib/conv_relu.txt" BitMatrix(J) + J2 = local_jacobian_pattern(layer, x, S) + @test_reference "references/pattern/jacobian/NNlib/conv_relu.txt" BitMatrix(J2) end -@testset "$method" for method in ( - TracerSparsityDetector(BitSet), - TracerSparsityDetector(Set{UInt64}), - TracerSparsityDetector(DuplicateVector{UInt64}), - TracerSparsityDetector(RecursiveSet{UInt64}), - TracerSparsityDetector(SortedVector{UInt64}), +@testset "$S" for S in ( + BitSet, Set{UInt64}, DuplicateVector{UInt64}, RecursiveSet{UInt64}, SortedVector{UInt64} ) - test_flux_conv(method) + test_flux_conv(S) end diff --git a/test/references/pattern/jacobian/NNlib/conv_relu.txt b/test/references/pattern/jacobian/NNlib/conv_relu.txt new file mode 100644 index 00000000..aa460c09 --- /dev/null +++ b/test/references/pattern/jacobian/NNlib/conv_relu.txt @@ -0,0 +1 @@ +Bool[1 1 0 1 1 0 0 0 0 1 1 0 1 1 0 0 0 0; 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0; 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0; 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0] \ No newline at end of file From fded9f1779198850728afea4b0e4977eb057c48a Mon Sep 17 00:00:00 2001 From: adrhill Date: Fri, 17 May 2024 17:58:35 +0200 Subject: [PATCH 6/6] Add `oneunit` --- src/conversion.jl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/conversion.jl b/src/conversion.jl index a5a33812..925e1d14 100644 --- a/src/conversion.jl +++ b/src/conversion.jl @@ -17,6 +17,7 @@ for TT in (GradientTracer, ConnectivityTracer, HessianTracer) ## Constants Base.zero(::Type{T}) where {T<:TT} = empty(T) Base.one(::Type{T}) where {T<:TT} = empty(T) + Base.oneunit(::Type{T}) where {T<:TT} = empty(T) Base.typemin(::Type{T}) where {T<:TT} = empty(T) Base.typemax(::Type{T}) where {T<:TT} = empty(T) Base.eps(::Type{T}) where {T<:TT} = empty(T) @@ -27,6 +28,7 @@ for TT in (GradientTracer, ConnectivityTracer, HessianTracer) Base.zero(::T) where {T<:TT} = empty(T) Base.one(::T) where {T<:TT} = empty(T) + Base.oneunit(::T) where {T<:TT} = empty(T) Base.typemin(::T) where {T<:TT} = empty(T) Base.typemax(::T) where {T<:TT} = empty(T) Base.eps(::T) where {T<:TT} = empty(T) @@ -78,6 +80,7 @@ end ## Constants Base.zero(::Type{D}) where {P,T,D<:Dual{P,T}} = D(zero(P), empty(T)) Base.one(::Type{D}) where {P,T,D<:Dual{P,T}} = D(one(P), empty(T)) +Base.oneunit(::Type{D}) where {P,T,D<:Dual{P,T}} = D(oneunit(P), empty(T)) Base.typemin(::Type{D}) where {P,T,D<:Dual{P,T}} = D(typemin(P), empty(T)) Base.typemax(::Type{D}) where {P,T,D<:Dual{P,T}} = D(typemax(P), empty(T)) Base.eps(::Type{D}) where {P,T,D<:Dual{P,T}} = D(eps(P), empty(T)) @@ -88,6 +91,7 @@ Base.maxintfloat(::Type{D}) where {P,T,D<:Dual{P,T}} = D(maxintfloat(P), empty(T Base.zero(d::D) where {P,T,D<:Dual{P,T}} = D(zero(primal(d)), empty(T)) Base.one(d::D) where {P,T,D<:Dual{P,T}} = D(one(primal(d)), empty(T)) +Base.oneunit(d::D) where {P,T,D<:Dual{P,T}} = D(oneunit(primal(d)), empty(T)) Base.typemin(d::D) where {P,T,D<:Dual{P,T}} = D(typemin(primal(d)), empty(T)) Base.typemax(d::D) where {P,T,D<:Dual{P,T}} = D(typemax(primal(d)), empty(T)) Base.eps(d::D) where {P,T,D<:Dual{P,T}} = D(eps(primal(d)), empty(T))