diff --git a/demo/Distributions.hs b/demo/Distributions.hs new file mode 100644 index 0000000..2b709ec --- /dev/null +++ b/demo/Distributions.hs @@ -0,0 +1,39 @@ +module Distributions where + +import Sampler + +import qualified System.Random.MWC as MWC + +import Control.Monad +import Control.Monad.Reader + +-- | Prob in range [0,1] +type Prob = Double + +-- | Uniform standard continuous +random :: Sampler Double +random = Sampler $ ask >>= MWC.uniform + +uniform :: Double -> Double -> Sampler Double +uniform a b | a > b = uniform b a +uniform a b | a <= b = (\x -> (b - a) * x + a) <$> random + +bernoulli :: Prob -> Sampler Bool +bernoulli p = (<=p) <$> random + +binomial :: Int -> Prob -> Sampler Int +binomial n p = length . filter id <$> replicateM n (bernoulli p) + +gaussian :: Double -> Double -> Sampler Double +gaussian μ σ² = + (\u1 u2 -> μ + σ² * sqrt (-2 * log u1) * cos (2 * pi * u2)) + <$> random <*> random + +gk :: Double -> Double -> Double -> Double -> Sampler Double +gk a b g k = let c = 0.8 in + (\z -> a + + b + * (1 + c * tanh (g * z / 2)) + * z + * (1 + z**2)**k) + <$> gaussian 0 1 diff --git a/demo/Examples.hs b/demo/Examples.hs new file mode 100644 index 0000000..73afaff --- /dev/null +++ b/demo/Examples.hs @@ -0,0 +1,68 @@ +{-# LANGUAGE DuplicateRecordFields #-} + +module Examples where + +import Distributions +import Rejection +import Metropolis +import Sampler + +import qualified System.Random.MWC as MWC + +import Control.Parallel.Strategies +import Control.Monad + +import Data.List + +-- call the examples for an external script to graph the results +main :: IO () +main = do + model <- getLine + case model of + "weibull" -> approxWeibull 1 1.8 >>= print + "gk" -> abcGK >>= \pars -> print (fst <$> pars) >> print (snd <$> pars) + +approxWeibull :: Double -> Double -> IO [Double] +approxWeibull λ k = let + kernel = RSMC + { prior = uniform 0 5 + , priorDensity = const 2 + , targetDensity = \x -> (k / λ) * (x / λ)**(k-1) * exp (-(x / λ)**k) + } + in MWC.createSystemRandom >>= sample (rs 10000 kernel) + +abcGK :: IO [(Double, Double)] +abcGK = let + -- y <- gk 0 1 -0.3 0.8 + y :: [Double] = [-0.5104415370506095,-0.7036434632465107,-0.2668368363498056,0.6815278539148982,1.8065750007642016,-1.1482634213252663,-0.10660853352738332,1.2469280832101104,0.7656001636856136,-0.7320043142261864,-4.028497402676845,1.0387799211913844,-1.8660457828367103,-4.0299248430163175,-4.741513290215539,0.6530403315829644,0.24479825013251938,2.6113731175552144,-0.25192550180978834,-5.460086525801049e-2,1.1772787229049015,-5.169184991582197,0.2460051914736727,2.850651742166059,2.803350122147847,-2.5138331249271295,-0.16021357084783153,7.95598130026318e-2,0.1579509935011216,1.2485569713553373,-0.7026650541206256,-6.356494098001902,-1.1067147258067545,-0.3656293496022067,2.5930550010502995,1.4006864549753912,1.3287563023623362e-2,-0.4934879835848968,-0.1152751265700261,4.124238713981147,-0.3274281061664352,17.815877356198563,2.439324175744774,-0.3556365214539682,4.184379956930912,0.5211002573326591,-7.544726539658729,1.1432160816505454,0.10474592715483252,2.2598574690278124,-5.982209564872222e-2,0.3970662237111481,-0.1226321303550215,-2.833925560844585,5.606732231516373,0.4775251542596893,5.521384007685723,-1.3339762548390766,-0.5803242310454019,2.036198920878609,1.011632562481338,-6.607706277661513e-2,3.8510553864065047,-0.11955527847164106,1.6288674013209972,-1.197855950997007,-0.9812973027028926,-3.3243812608680865,-0.9287618414540904,0.6247003134293596,-0.3608071093058975,-1.6574586764389634,0.8422068782827897,1.2963993547489352,3.210441947634901e-2,1.0709150621786345,0.24485693966354982,-1.682055190609811,-1.151283428155862,7.170554007117898e-2,4.8061093617134505,-1.5299907999218874,-2.9428392355646134,0.48157705348963215,2.5763918848022368e-2,0.7333915815410592,-15.221663665916477,-1.202260148652221,-0.2840124914985552,2.3367624660280866,-0.7582762004885446,-4.103347263586696,1.1093339134558795,-1.1724787237984193,0.2366040973823823,-2.1128076576945514,-0.8438843816125938,3.0540896846195116,2.5287449193874108,1.4229384570569787,-1.6480220917225217,-4.134782467746005,0.39280543241777094,0.8511886602772839,2.7602672336570637,0.43392287493375675,-0.4682323987521255,0.7613470128077908,-11.930262800184837,-6.431491814607961,3.0805223447668006,1.9298178856672217,3.2534332707055844,4.065319796389563,-1.7722270288855324,-0.5834553617246786,2.5598942267685425,3.8319128595273138,-1.2609803136050555,-1.5906702843789262,8.158738166147467e-2,-0.7764482576234684,0.6998190710622543,-1.1476144230138676,-1.370813552369474,2.5052687698461074,-0.5323437463478694,0.759077074826166,-0.11759656272783872,-0.24720087400075288,3.72415007937246,1.7881388423706739,1.149159194536326,0.3400441025811831,-0.6689134211264488,0.7618204570110713,-0.7033980200595047,0.2540934346295764,-5.240906405876261,0.41994043930730424,2.2754579486301916,-12.254575244919574,-0.25598032216004896,1.4874270863529166,1.2377717637918372e-3,-0.27220660238528827,0.23363929554286147,-0.574466434237023,-4.759971356174461,0.24492525256681869] + summarise x = let + x' = sort x + n = length x + mean = (sum x) / (fromIntegral n) + sd = (sum . map (\x -> (x - mean)**2) $ x) / (fromIntegral n-1) + in (mean, sd + , (1/(fromIntegral n * sd**3)) * (sum . map (\x -> (x - mean)**3)) x + , (1/(fromIntegral n * sd**4)) * (sum . map (\x -> (x - mean)**4)) x) + kernel = MABC + { observations = summarise y + , model = \(g, k) -> summarise <$> replicateM 50 (gk 0 1 g k) + , prior = \(g, k) -> (if -2 <= g && g <= 0 then 1 else 0) * (if 0 <= k && k <= 2 then 1 else 0) + , transition = \(g, k) -> (,) <$> gaussian g 2.36 <*> gaussian k 2.36 + , distance = \(x0, x1, x2, x3) (y0, y1, y2, y3) -> + (x0 - y0)**2 + (x1 - y1)**2 + (x2 - y2)**2 + (x3 - y3)**2 + , tolerance = 1.0 + } + in do + gen <- MWC.createSystemRandom + (g0:g1:g2:g3:g4:g5:_) <- sample (replicateM 6 $ uniform (-2) 0) gen + (k0:k1:k2:k3:k4:k5:_) <- sample (replicateM 6 $ uniform 0 2) gen + runEval $ do + pars0 <- rpar (sample (mh 10000 kernel (g0,k0)) gen) + pars1 <- rpar (sample (mh 10000 kernel (g1,k1)) gen) + pars2 <- rpar (sample (mh 10000 kernel (g2,k2)) gen) + pars3 <- rpar (sample (mh 10000 kernel (g3,k3)) gen) + pars4 <- rpar (sample (mh 10000 kernel (g4,k4)) gen) + pars5 <- rpar (sample (mh 10000 kernel (g5,k5)) gen) + return $ do + a <- pars0 ; b <- pars1 ; c <- pars2 ; d <- pars3 ; e <- pars4 ; f <- pars5 + return $ a <> b <> c <> d <> e <> f diff --git a/demo/Metropolis.hs b/demo/Metropolis.hs new file mode 100644 index 0000000..7963977 --- /dev/null +++ b/demo/Metropolis.hs @@ -0,0 +1,43 @@ +-- | Metropolis Sampling + +{-# LANGUAGE FunctionalDependencies #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE DuplicateRecordFields #-} + +module Metropolis where + +import Sampler +import Distributions + +mh :: MHKernel k a => Int -> k -> a -> Sampler [a] +mh 0 _ _ = return [] +mh n k x_0 = do + x_1 <- k `perturb` x_0 + a <- accepts k x_0 x_1 + if a + then (x_1:) <$> mh (n-1) k x_1 + else (x_0:) <$> mh (n-1) k x_0 + +class MHKernel k a | k -> a where + perturb :: k -> a -> Sampler a + accepts :: k -> a -> a -> Sampler Bool + +data MABC θ ω = MABC + { observations :: ω + , model :: θ -> Sampler ω + , prior :: θ -> Double -- ^ density + , transition :: θ -> Sampler θ -- ^ assumed symmetrical + , distance :: ω -> ω -> Double + , tolerance :: Double + } + +instance MHKernel (MABC θ ω) θ where + perturb :: MABC θ ω -> θ -> Sampler θ + perturb MABC{..} = transition + + accepts :: MABC θ ω -> θ -> θ -> Sampler Bool + accepts MABC{..} θ θ' = do + x <- model θ' + if distance x observations <= tolerance + then bernoulli $ min 1 (prior θ' / prior θ) + else return False diff --git a/demo/Rejection.hs b/demo/Rejection.hs new file mode 100644 index 0000000..6479b07 --- /dev/null +++ b/demo/Rejection.hs @@ -0,0 +1,61 @@ +-- | Rejection Sampling + +{-# LANGUAGE FunctionalDependencies #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE DuplicateRecordFields #-} + +module Rejection where + +import Sampler +import Distributions + +import Data.List + +import Control.Monad + +import qualified System.Random.MWC as MWC + +class RSKernel k a | k -> a where + propose :: k -> Sampler a + accepts :: k -> a -> Sampler Bool + +rs :: RSKernel k a => Int -> k -> Sampler [a] +rs 0 _ = return [] +rs n k = do + x <- propose k + a <- k `accepts` x + if a + then (x:) <$> rs (n-1) k + else rs (n-1) k + +data RSMC ω = RSMC + { prior :: Sampler ω + , priorDensity :: ω -> Double -- ^ scaled by M + , targetDensity :: ω -> Double + } + +instance RSKernel (RSMC ω) ω where + propose :: RSMC ω -> Sampler ω + propose RSMC{..} = prior + + accepts :: RSMC ω -> ω -> Sampler Bool + accepts RSMC{..} x = let + α = targetDensity x / priorDensity x + in (bernoulli $ min 1 α) + +data RSABC θ ω = RSABC + { observations :: ω + , model :: θ -> Sampler ω + , prior :: Sampler θ + , distance :: ω -> ω -> Double + , tolerance :: Double + } + +instance Eq ω => RSKernel (RSABC θ ω) θ where + propose :: RSABC θ ω -> Sampler θ + propose RSABC{..} = prior + + accepts :: RSABC θ ω -> θ -> Sampler Bool + accepts RSABC{..} θ = do + x <- model θ + return $ distance x observations <= tolerance diff --git a/demo/Sampler.hs b/demo/Sampler.hs new file mode 100644 index 0000000..eb77cd7 --- /dev/null +++ b/demo/Sampler.hs @@ -0,0 +1,15 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +module Sampler where + +import qualified System.Random.MWC as MWC + +import Control.Monad.Reader + +type Gen = MWC.GenIO + +newtype Sampler a = Sampler { runSampler :: ReaderT Gen IO a } + deriving (Functor, Applicative, Monad, MonadIO) + +sample :: Sampler a -> Gen -> IO a +sample = runReaderT . runSampler diff --git a/demo/visualiser.py b/demo/visualiser.py new file mode 100755 index 0000000..4d60181 --- /dev/null +++ b/demo/visualiser.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 + +import sys +import subprocess +import matplotlib.pyplot as plt + +def main(model): + if model == "weibull": + result = subprocess.run(["stack", "runhaskell", "Examples.hs"], input="weibull\n".encode(), stdout=subprocess.PIPE).stdout.decode() + result = [float(i) for i in result[1:-2].split(",")] # turn into list + plt.hist(result, bins=35, range=(-1, 8)); + plt.show() + elif model == "gk": + result = subprocess.run(["stack", "runhaskell", "Examples.hs"], input="gk\n".encode(), stdout=subprocess.PIPE).stdout.decode() + result = result[0:-1].split("\n") + g = [float(i) for i in result[0][1:-1].split(",")] + k = [float(i) for i in result[1][1:-1].split(",")] + plt.hist(g, bins=35, range=(-3, 3), alpha=0.8) + plt.hist(k, bins=35, range=(-3, 3), alpha=0.8) + plt.show() + else: + print("bad argument") + +if __name__ == '__main__': + main(sys.argv[1]) diff --git a/presentations/inference.org b/presentations/inference.org index 973be46..87efce4 100644 --- a/presentations/inference.org +++ b/presentations/inference.org @@ -4,7 +4,8 @@ #+options: broken-links:nil c:nil creator:nil d:(not "LOGBOOK") date:nil e:t #+options: email:nil f:t inline:t num:t p:nil pri:nil prop:nil stat:t tags:t #+options: tasks:t tex:t timestamp:t title:t toc:nil todo:t |:t -#+title: Inference in Haskell for ABC +#+title: Inference in Haskell for Approximate Bayesian Computation +#+beamer_header: \title[Inference in Haskell for ABC]{Inference in Haskell for Approximate Bayesian Computation} #+author: Piotr Kozicki #+email: piotr.kozicki.2022@bristol.ac.uk #+language: en @@ -17,7 +18,7 @@ #+latex_compiler: xelatex #+latex_header: \usepackage{fontspec} #+latex_header: \setsansfont{Fira Sans} -#+latex_header: \setmonofont{Fira Code}[Contextuals=Alternate] +#+latex_header: \setmonofont{Fira Mono} #+latex_header: \usepackage{pgfplots} #+columns: %45ITEM %10BEAMER_env(Env) %10BEAMER_act(Act) %4BEAMER_col(Col) %8BEAMER_opt(Opt) #+beamer_theme: CambridgeUS @@ -49,6 +50,10 @@ Often rewritten with hypotheses \theta and evidence \(\mathbf x\) \overbrace{p(\theta)}^{\text{prior}}. \] +- The /posterior/ is the target +- The /likelihood/ is the model +- The /prior/ is an assumption + ** Example Suppose \(\mathbf x = (3~1~7~5~1~2~3~4~3~2)\) comes from \(Poi(\theta)\) @@ -102,19 +107,24 @@ assuming nothing about \(\theta\). ** Motivation In this example we used a simple distribution. -In reality, the likelihood may be unavailable or intractable. +In reality, the likelihood may be unavailable (e.g. Tukey's g-k distribution[fn:1]) or intractable (e.g. many convolutions). #+beamer: \pause \hfill -To avoid this problem, we avoid likelihoods all together and instead consider -only generative models. +Avoid likelihoods! Instead use simulation with numerical and Monte-Carlo methods. + +\hfill + +Write a program to simulate data. Requires a source of randomness. #+beamer: \pause #+begin_src haskell -newtype Sampler ω = Sampler {runSampler :: ReaderT Gen IO ω} +type Gen = MWC.GenIO + +newtype Sampler ω = { runSampler :: ReaderT Gen IO ω } deriving (Functor, Applicative, Monad) sample :: Sampler ω -> Gen -> IO ω @@ -123,40 +133,37 @@ sample = runReaderT . runSampler ** Example Samplers -We can define our own Samplers by transforming existing Samplers. +Can re-define them straight from =MWC= #+begin_src haskell -uniform :: Double -> Double -> Sampler Double -uniform a b | a <= b = (\x -> (b - a) * x + a) <$> random +random :: Sampler Double +random = Sampler $ ask >>= MWC.uniform -- U(0, 1) #+end_src #+beamer: \pause -#+begin_src haskell -bernoulli :: Prob -> Sampler Bool -bernoulli p = (<=p) <$> uniform 0 1 -#+end_src - -#+beamer: \pause +Or we can reuse samplers to make new distributions. #+begin_src haskell -binomial :: Int -> Prob -> Sampler Int -binomial n p = length . filter id - <$> replicateM n (bernoulli p) -#+end_src +uniform :: Double -> Double -> Sampler Double +uniform a b | a <= b = (\x -> (b - a) * x + a) <$> random -#+beamer: \pause +bernoulli :: Double -> Sampler Bool +bernoulli p | 0 <= p && p <= 1 = (<=p) <$> random -#+begin_src haskell gaussian :: Double -> Double -> Sampler Double -gaussian μ σ² = (\u₁ u₂ -> - μ + σ² * sqrt (-2 * log u₁) * cos (2 * pi * u₂)) +gaussian μ σ² = (\u1 u2 -> + μ + σ² * sqrt (-2 * log u1) * cos (2 * pi * u2)) <$> random <*> random #+end_src -** ABC Description +** Approximate Bayesian Computation + +ABC is a /likelihood-free/ method useable with =Sampler=. + +\hfill -To be able to use ABC we need: +To use ABC we need: #+beamer: \pause - A generative model \(\mu : \theta \to \texttt{Sampler}~\omega\) #+beamer: \pause @@ -181,27 +188,24 @@ to apply a weight to \(\theta_0\). \hfill To approximate the posterior, this is repeated many times for different -\(\theta\) using a Monte Carlo method. - +\(\theta\) using a Monte-Carlo method. * Rejection Sampling ** Rejection Sampling -A simple Monte-Carlo sampling method. +A simple Monte-Carlo method. Sample \(\mathbf x\) from the prior and see how "good" it is. -Proposals are taken from a prior, then we have to decide whether or not to accept them. +Either \(\mathbf x\) is fully accepted or not, i.e. no weights besides 0 and 1. #+beamer: \pause -#+begin_src haskell -class RSKernel k a | k -> a where - propose :: k -> Sampler a - accepts :: k -> a -> Sampler Bool -#+end_src +\hfill -#+beamer: \pause +Abstracted the key operations into a "handler" =RSKernel=. + +Now the algorithm is /generally/ written: #+begin_src haskell -rs :: RSKernel k a => Int -> k -> IO [a] +rs :: RSKernel k a => Int -> k -> Sampler [a] rs 0 _ = return [] rs n k = do x <- propose k @@ -211,43 +215,17 @@ rs n k = do else rs (n-1) k #+end_src -** Distribution Approximation - -We provide a prior \(g\) we can sample from directly such that \(f \leq M \cdot g\). - -#+begin_src haskell -data RSMC ω = RSMC - { prior :: Sampler ω - , priorDensity :: ω -> Double -- ^ scaled by M - , targetDensity :: ω -> Double } -#+end_src - -#+beamer: \pause - -#+begin_src haskell -instance RSKernel (RSMC ω) ω where - propose :: RSMC ω -> Sampler ω - propose RSMC{..} = prior - - accepts :: RSMC ω -> ω -> Sampler Bool - accepts RSMC{..} x = let - α = targetDensity x / priorDensity x - in bernoulli $ min 1 α -#+end_src - ** Approximate Bayesian Computation +To enable ABC via rejection sampling, just need to provide a handler. + #+begin_src haskell data RSABC θ ω = RSABC { observations :: ω , model :: θ -> Sampler ω , prior :: Sampler θ } -#+end_src -#+beamer: \pause - -#+begin_src haskell -instance RSKernel (RSABC θ ω) θ where +instance Eq ω => RSKernel (RSABC θ ω) θ where propose :: RSABC θ ω -> Sampler θ propose RSABC{..} = prior @@ -257,7 +235,7 @@ instance RSKernel (RSABC θ ω) θ where return $ x == observations #+end_src -* Improvements +* Necessary Improvements ** Tolerance To increase the acceptance rate, we usually use a weaker condition, that @@ -266,52 +244,50 @@ To increase the acceptance rate, we usually use a weaker condition, that #+begin_src haskell RSABC θ ω = RSABC { distance :: ω -> ω -> Double - , tolerance :: Double + , ϵ :: Double , ... } -#+end_src - -#+beamer: \pause -#+begin_src haskell instance RSKernel (RSABC θ ω) where - accepts :: RSABC θ ω -> θ -> Sampler Bool + ... accepts RSABC{..} θ = do x <- model θ - return $ x `distance` observations <= tolerance + return $ x `distance` observations <= ϵ #+end_src #+beamer: \pause -A sensible =distance= might be a sum of [weighted] squared distances. +This is only /strictly/ necessary for continuous distributions. -** Summary Statistics +** Dimension Reduction -We rarely compare only one sample at a time, so usually the sample space \(\omega\) will have many dimensions. - -#+beamer: \pause +We rarely use only one observation, so \(\mathbf y\) is a long vector. -This introduces the "curse of dimensionality", which here means any two samples we generate might seem to be far apart. +\hfill -#+beamer: \pause +Affected by the "Curse of Dimensionality." Results that \(\mathbf x\) and \(\mathbf y\) almost always far apart. \hfill -We try to solve this by replacing raw data with summary statistics[fn:2]. +Solve this with /dimension reduction methods/[fn:2] e.g. *summary statistics* -#+beamer: \pause +** Summary Statistics -Ideally we have /sufficient/ summary statistics, i.e. \(S\) such that \(p(\theta | S(y)) = p(\theta | y)\). +Ideally, summary \(S\) is "sufficient", i.e. \(p(\theta|S(\mathbf y)) = p(\theta|\mathbf y)\). -#+beamer: \pause +\hfill + +Sufficient summaries are hard to find. In practice \(S\) is "informative". + +\hfill -But this almost never happens, so we instead look for "informative" summaries[fn:1], e.g. quantiles are usually fine. +Often \(S\) maps raw data to e.g. mean, variance, quantiles... * Metropolis-Hastings ** Metropolis-Hastings -An improvement on rejection sampling. Stay near to accepted samples by carrying -out a /random walk/ over values of \(\theta\), rather than resampling from the -prior. +An improvement on rejection sampling, by staying near accepted samples. + +Since we stay in "good" regions and move out of "bad" regions, we will approximate the posterior sooner. #+beamer: \pause @@ -319,19 +295,15 @@ prior. class MHKernel k a | k -> a where perturb :: k -> a -> Sampler a accepts :: k -> a -> a -> Sampler Bool -#+end_src -#+beamer: \pause - -#+begin_src haskell mh :: MHKernel k a => Int -> k -> a -> Sampler [a] mh 0 _ _ = return [] -mh n k x_0 = do - x_1 <- k `perturb` x_0 - a <- accepts k x_0 x_1 +mh n k last = do + proposed <- k `perturb` last + a <- accepts k last proposed if a - then (x_1:) <$> mh (n-1) k x_1 - else (x_0:) <$> mh (n-1) k x_0 + then (proposed:) <$> mh (n-1) k proposed + else (last:) <$> mh (n-1) k last #+end_src ** Approximate Bayesian Computation @@ -340,15 +312,10 @@ mh n k x_0 = do data MHABC θ ω = MHABC { observations :: ω , model :: θ -> Sampler ω - , prior :: θ -> Double -- ^ density + , priorD :: θ -> Double -- ^ density , transition :: θ -> Sampler θ -- ^ assumed symmetrical - , distance :: ω -> ω -> Double - , tolerance :: Double } -#+end_src - -#+beamer: \pause + , distance :: ω -> ω -> Double , ϵ :: Double } -#+begin_src haskell instance MHKernel (MHABC θ ω) θ where perturb :: MHABC θ ω -> θ -> Sampler θ perturb MHABC{..} = transition @@ -356,26 +323,38 @@ instance MHKernel (MHABC θ ω) θ where accepts :: MHABC θ ω -> θ -> θ -> Sampler Bool accepts MHABC{..} θ θ' = do x <- model θ' - if distance x observations <= tolerance - then bernoulli $ min 1 (prior θ' / prior θ) + if x `distance` observations <= ϵ + then bernoulli $ min 1 (priorD θ' / priorD θ) else return False #+end_src -** Possible Improvements +* Conclusion +** Summary -1. A tuning system, for adaptive Metropolis -2. Multiple particles for MCMC +- Implemented Approximate Bayesian Computation, particularly for the Tukey g-and-k distribution. -If we start on a bad point, we are not likely to move. Adaptive Metropolis may -change that by increasing/decreasing the variance. +- Learnt some Monte-Carlo methods and tried to implement them generally. -Having mutliple particles should also offset this problem. +** Possible Developments -* Reading -** Reading +- Parallelisation[fn:4] + +\hfill + +- Adaptive Metropolis +- Particle Filter and other algorithms +- Allow p.d.f-based distributions[fn:3] + +\hfill + +- Use kernel density estimate with approximated posterior +- Find peaks with AD/stochastic gradient ascent + +* Further Reading +** Further Reading - [[https://www.pnas.org/doi/10.1073/pnas.0306899100][Marjoram et al]] -- [[https://www.maths.lu.se/fileadmin/maths/forskning_research/InferPartObsProcess/abc_slides.pdf][Umberto Picchini's slides on ABC]] +- [[https://www.maths.lu.se/fileadmin/maths/forskning_research/InferPartObsProcess/abc_slides.pdf][Umberto Picchini's slides]] - [[https://arxiv.org/abs/1004.1112][Fernhead and Prangle --- Constructing Summary Statistics]] - [[https://projecteuclid.org/journals/statistical-science/volume-28/issue-2/A-Comparative-Review-of-Dimension-Reduction-Methods-in-Approximate-Bayesian/10.1214/12-STS406.full][Blum et al --- Comparative Review of Dimension Reduction Methods]] @@ -383,6 +362,11 @@ Having mutliple particles should also offset this problem. - [[https://link.springer.com/article/10.1007/s11222-022-10092-4][Drovandi et Frazier --- Comparison with Full Data Methods]] * Footnotes -[fn:2] Some (recent?) approaches to full-data ABC -[fn:1] Key problem in ABC! +[fn:1] Like Gaussian distribution with skew and kurtosis. + +[fn:3] Possible by Monte-Carlo methods + +[fn:2] Some full-data approaches, with recent interest + +[fn:4] Attempts were made... anyone?