From bd2f0e0ca525c6ee93098f28b0f1e15a6c94162c Mon Sep 17 00:00:00 2001 From: Simone Carlo Surace Date: Thu, 16 Jun 2022 15:53:46 +0200 Subject: [PATCH 1/9] Add type inference tests --- test/expectations.jl | 62 ++++++++++++++++++++++++++++---------------- 1 file changed, 39 insertions(+), 23 deletions(-) diff --git a/test/expectations.jl b/test/expectations.jl index 65fe96e..c207c24 100644 --- a/test/expectations.jl +++ b/test/expectations.jl @@ -1,6 +1,4 @@ @testset "expectations" begin - # Test that the various methods of computing expectations return the same - # result. rng = MersenneTwister(123456) q_f = Normal.(zeros(10), ones(10)) @@ -30,30 +28,48 @@ end end - @testset "$(nameof(typeof(lik)))" for lik in likelihoods_to_test - methods = [ - GaussHermiteExpectation(100), - MonteCarloExpectation(1e7), - GPLikelihoods.DefaultExpectationMethod(), - ] - def = GPLikelihoods.default_expectation_method(lik) - if def isa GPLikelihoods.AnalyticExpectation - push!(methods, def) - end - y = rand.(rng, lik.(zeros(10))) + @testset "testing consistency of different expectation methods" begin + @testset "$(nameof(typeof(lik)))" for lik in likelihoods_to_test + # Test that the various methods of computing expectations return the same + # result. + methods = [ + GaussHermiteExpectation(100), + MonteCarloExpectation(1e7), + GPLikelihoods.DefaultExpectationMethod(), + ] + def = GPLikelihoods.default_expectation_method(lik) + if def isa GPLikelihoods.AnalyticExpectation + push!(methods, def) + end + y = rand.(rng, lik.(zeros(10))) - results = map(m -> GPLikelihoods.expected_loglikelihood(m, lik, q_f, y), methods) - @test all(x -> isapprox(x, results[end]; atol=1e-6, rtol=1e-3), results) + results = map(m -> GPLikelihoods.expected_loglikelihood(m, lik, q_f, y), methods) + @test all(x -> isapprox(x, results[end]; atol=1e-6, rtol=1e-3), results) + end end - @test GPLikelihoods.expected_loglikelihood( - MonteCarloExpectation(1), GaussianLikelihood(), q_f, zeros(10) - ) isa Real - @test GPLikelihoods.expected_loglikelihood( - GaussHermiteExpectation(1), GaussianLikelihood(), q_f, zeros(10) - ) isa Real - @test GPLikelihoods.default_expectation_method(θ -> Normal(0, θ)) isa - GaussHermiteExpectation + @testset "testing return types and type stability" begin + @test GPLikelihoods.expected_loglikelihood( + MonteCarloExpectation(1), GaussianLikelihood(), q_f, zeros(10) + ) isa Real + @test GPLikelihoods.expected_loglikelihood( + GaussHermiteExpectation(1), GaussianLikelihood(), q_f, zeros(10) + ) isa Real + @test GPLikelihoods.default_expectation_method(θ -> Normal(0, θ)) isa + GaussHermiteExpectation + + @testset "$(nameof(typeof(lik)))" for lik in likelihoods_to_test + # Test that `expectec_loglikelihood` is type-stable + y = rand.(rng, lik.(zeros(10))) + for method in [ + MonteCarloExpectation(100), + GaussHermiteExpectation(100), + GPLikelihoods.DefaultExpectationMethod(), + ] + @test (@inferred expected_loglikelihood(method, lik, q_f, y)) isa Real + end + end + end # see https://github.com/JuliaGaussianProcesses/ApproximateGPs.jl/issues/82 @testset "testing Zygote compatibility with GaussHermiteExpectation" begin From f71f7c8354380090353541779ecb46985432c692 Mon Sep 17 00:00:00 2001 From: Simone Surace Date: Wed, 22 Jun 2022 18:21:40 +0200 Subject: [PATCH 2/9] Make `expected_loglikelihood` type stable --- src/expectations.jl | 28 ++++++++-------------------- 1 file changed, 8 insertions(+), 20 deletions(-) diff --git a/src/expectations.jl b/src/expectations.jl index 3de986b..5be8de3 100644 --- a/src/expectations.jl +++ b/src/expectations.jl @@ -81,31 +81,19 @@ function expected_loglikelihood( end # Compute the expected_loglikelihood over a collection of observations and marginal distributions -function expected_loglikelihood( +function GPLikelihoods.expected_loglikelihood( gh::GaussHermiteExpectation, lik, q_f::AbstractVector{<:Normal}, y::AbstractVector ) # Compute the expectation via Gauss-Hermite quadrature # using a reparameterisation by change of variable # (see e.g. en.wikipedia.org/wiki/Gauss%E2%80%93Hermite_quadrature) - return sum(Broadcast.instantiate( - Broadcast.broadcasted(y, q_f) do yᵢ, q_fᵢ # Loop over every pair - # of marginal distribution q(fᵢ) and observation yᵢ - expected_loglikelihood(gh, lik, q_fᵢ, yᵢ) - end, - )) -end - -# Compute the expected_loglikelihood for one observation and a marginal distributions -function expected_loglikelihood(gh::GaussHermiteExpectation, lik, q_f::Normal, y) - μ = mean(q_f) - σ̃ = sqrt2 * std(q_f) - return invsqrtπ * sum(Broadcast.instantiate( - Broadcast.broadcasted(gh.xs, gh.ws) do x, w # Loop over every - # pair of Gauss-Hermite point x with weight w - f = σ̃ * x + μ - loglikelihood(lik(f), y) * w - end, - )) + # PR #86 introduces eager instead of lazy broadcast over observations + # and Gauss-Hermit points and weights in order to make the function + # type stable. Compared to other type stable implementations, e.g. + # using a custom two-argument pairwise sum, this is faster to + # differentiate using Zygote. + A = loglikelihood.(lik.(sqrt2 .* std.(q_f) .* gh.xs' .+ mean.(q_f)), y) .* gh.ws' + return invsqrtπ * sum(A) end function expected_loglikelihood( From f547674531f846bbf26ad112434394dd85fb0af7 Mon Sep 17 00:00:00 2001 From: Simone Surace Date: Wed, 22 Jun 2022 18:21:46 +0200 Subject: [PATCH 3/9] Bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index ffc57b2..2f44d8e 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "GPLikelihoods" uuid = "6031954c-0455-49d7-b3b9-3e1c99afaf40" authors = ["JuliaGaussianProcesses Team"] -version = "0.4.3" +version = "0.4.4" [deps] ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" From 9c17fc11dfc9d02b13065c654a0751f7294ac0bb Mon Sep 17 00:00:00 2001 From: Simone Surace Date: Wed, 22 Jun 2022 18:22:17 +0200 Subject: [PATCH 4/9] Simplify parameter conversions --- src/likelihoods/negativebinomial.jl | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/likelihoods/negativebinomial.jl b/src/likelihoods/negativebinomial.jl index 1c2ea40..2f20290 100644 --- a/src/likelihoods/negativebinomial.jl +++ b/src/likelihoods/negativebinomial.jl @@ -147,9 +147,10 @@ struct NBParamII{T} <: NBParamMean end function (l::NegativeBinomialLikelihood{<:NBParamII})(f::Real) - μ = l.invlink(f) - ev = l.params.α * μ - return NegativeBinomial(_nb_mean_excessvar_to_r_p(μ, ev)...) + # Simplify parameter conversions and avoid splatting + r = inv(l.params.α) + p = inv(one(l.params.α) + l.params.α * l.invlink(f)) + return NegativeBinomial(r, p) end """ From 75cd106c07416420ad0d459086ded83607cbaef5 Mon Sep 17 00:00:00 2001 From: Simone Surace Date: Wed, 22 Jun 2022 18:42:55 +0200 Subject: [PATCH 5/9] Add likelihoods to test --- test/expectations.jl | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/test/expectations.jl b/test/expectations.jl index c207c24..b315433 100644 --- a/test/expectations.jl +++ b/test/expectations.jl @@ -3,10 +3,15 @@ q_f = Normal.(zeros(10), ones(10)) likelihoods_to_test = [ + BernoulliLikelihood(), ExponentialLikelihood(), GammaLikelihood(), - PoissonLikelihood(), GaussianLikelihood(), + NegativeBinomialLikelihood(NBParamSuccess(1.)), + NegativeBinomialLikelihood(NBParamFailure(1.)), + NegativeBinomialLikelihood(NBParamI(1.)), + NegativeBinomialLikelihood(NBParamII(1.)), + PoissonLikelihood(), ] @testset "testing all analytic implementations" begin From e6b9afbcb633c012657318bd015caee521b1efea Mon Sep 17 00:00:00 2001 From: Simone Carlo Surace <51025924+simsurace@users.noreply.github.com> Date: Wed, 22 Jun 2022 18:51:10 +0200 Subject: [PATCH 6/9] Remove unnecessary module prefix --- src/expectations.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/expectations.jl b/src/expectations.jl index 5be8de3..55e904e 100644 --- a/src/expectations.jl +++ b/src/expectations.jl @@ -81,7 +81,7 @@ function expected_loglikelihood( end # Compute the expected_loglikelihood over a collection of observations and marginal distributions -function GPLikelihoods.expected_loglikelihood( +function expected_loglikelihood( gh::GaussHermiteExpectation, lik, q_f::AbstractVector{<:Normal}, y::AbstractVector ) # Compute the expectation via Gauss-Hermite quadrature From d7eb3a6f53dd0e4d87bbda1d6d49d14771870897 Mon Sep 17 00:00:00 2001 From: Simone Surace Date: Wed, 22 Jun 2022 18:55:28 +0200 Subject: [PATCH 7/9] Implement reviewdog suggestions --- test/expectations.jl | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/test/expectations.jl b/test/expectations.jl index b315433..9063e7d 100644 --- a/test/expectations.jl +++ b/test/expectations.jl @@ -7,10 +7,10 @@ ExponentialLikelihood(), GammaLikelihood(), GaussianLikelihood(), - NegativeBinomialLikelihood(NBParamSuccess(1.)), - NegativeBinomialLikelihood(NBParamFailure(1.)), - NegativeBinomialLikelihood(NBParamI(1.)), - NegativeBinomialLikelihood(NBParamII(1.)), + NegativeBinomialLikelihood(NBParamSuccess(1.0)), + NegativeBinomialLikelihood(NBParamFailure(1.0)), + NegativeBinomialLikelihood(NBParamI(1.0)), + NegativeBinomialLikelihood(NBParamII(1.0)), PoissonLikelihood(), ] @@ -48,7 +48,9 @@ end y = rand.(rng, lik.(zeros(10))) - results = map(m -> GPLikelihoods.expected_loglikelihood(m, lik, q_f, y), methods) + results = map( + m -> GPLikelihoods.expected_loglikelihood(m, lik, q_f, y), methods + ) @test all(x -> isapprox(x, results[end]; atol=1e-6, rtol=1e-3), results) end end From 51e411ff8c836d7fec1180d0195e4367f0ec030a Mon Sep 17 00:00:00 2001 From: Simone Carlo Surace <51025924+simsurace@users.noreply.github.com> Date: Wed, 22 Jun 2022 19:08:44 +0200 Subject: [PATCH 8/9] Unpack alpha --- src/likelihoods/negativebinomial.jl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/likelihoods/negativebinomial.jl b/src/likelihoods/negativebinomial.jl index 2f20290..f663d4c 100644 --- a/src/likelihoods/negativebinomial.jl +++ b/src/likelihoods/negativebinomial.jl @@ -148,8 +148,9 @@ end function (l::NegativeBinomialLikelihood{<:NBParamII})(f::Real) # Simplify parameter conversions and avoid splatting - r = inv(l.params.α) - p = inv(one(l.params.α) + l.params.α * l.invlink(f)) + α = l.params.α + r = inv(α) + p = inv(one(α) + α * l.invlink(f)) return NegativeBinomial(r, p) end From 82f58e048ae8fb07487f2397020c7e4bd15583b2 Mon Sep 17 00:00:00 2001 From: Simone Carlo Surace <51025924+simsurace@users.noreply.github.com> Date: Tue, 5 Jul 2022 16:52:36 +0200 Subject: [PATCH 9/9] Correct the PR number --- src/expectations.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/expectations.jl b/src/expectations.jl index 55e904e..85db8c3 100644 --- a/src/expectations.jl +++ b/src/expectations.jl @@ -87,7 +87,7 @@ function expected_loglikelihood( # Compute the expectation via Gauss-Hermite quadrature # using a reparameterisation by change of variable # (see e.g. en.wikipedia.org/wiki/Gauss%E2%80%93Hermite_quadrature) - # PR #86 introduces eager instead of lazy broadcast over observations + # PR #90 introduces eager instead of lazy broadcast over observations # and Gauss-Hermit points and weights in order to make the function # type stable. Compared to other type stable implementations, e.g. # using a custom two-argument pairwise sum, this is faster to