Skip to content

Commit

Permalink
refactor: sample conditional test.
Browse files Browse the repository at this point in the history
  • Loading branch information
janfb committed Aug 1, 2024
1 parent fc434b7 commit 77f6017
Showing 1 changed file with 20 additions and 12 deletions.
32 changes: 20 additions & 12 deletions tests/linearGaussian_snpe_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,39 +493,45 @@ def simulator(theta):
@pytest.mark.mcmc
def test_sample_conditional(mcmc_params_accurate: dict):
"""
Test whether sampling from the conditional gives the same results as evaluating.
Test whether sampling from the conditional gives the same results as
evaluating.
This compares samples that get smoothed with a Gaussian kde to evaluating the
conditional log-probability with `eval_conditional_density`.
This compares samples that get smoothed with a Gaussian kde to evaluating
the conditional log-probability with `eval_conditional_density`.
`eval_conditional_density` is itself tested in `sbiutils_test.py`. Here, we use
a bimodal posterior to test the conditional.
`eval_conditional_density` is itself tested in `sbiutils_test.py`. Here, we
use a bimodal posterior to test the conditional.
NOTE: The comparison between conditional log_probs obtained from the MCMC
posterior and from analysis.eval_conditional_density can be gamed by
underfitting the posterior estimator, i.e., by using a small number of
simulations.
"""

num_dim = 3
dim_to_sample_1 = 0
dim_to_sample_2 = 2
num_simulations = 5000
num_simulations = 5500
num_conditional_samples = 1000
num_conditions = 50

x_o = zeros(1, num_dim)

likelihood_shift = -1.0 * ones(num_dim)
likelihood_cov = 0.1 * eye(num_dim)
likelihood_cov = 0.05 * eye(num_dim)

prior = utils.BoxUniform(-2.0 * ones(num_dim), 2.0 * ones(num_dim))

# TODO: janfb does not see how this setup results in a bi-model posterior.
def simulator(theta):
batch_size, _ = theta.shape
# create -1 1 mask for bimodality
mask = torch.ones(batch_size, 1)
# set mask to -1 randomly across the batch
mask = mask * 2 * (torch.rand(batch_size, 1) > 0.5) - 1
if torch.rand(1) > 0.5:
return linear_gaussian(theta, likelihood_shift, likelihood_cov)
else:
return linear_gaussian(theta, -likelihood_shift, likelihood_cov)

# Sample bi-modally by applying a 1-(-1) mask to the likelihood shift.
return linear_gaussian(theta, mask * likelihood_shift, likelihood_cov)

# Test whether SNPE works properly with structured z-scoring.
net = posterior_nn("maf", z_score_x="structured", hidden_features=20)
Expand Down Expand Up @@ -572,6 +578,7 @@ def simulator(theta):

limits = [[-2, 2], [-2, 2], [-2, 2]]

# Fit a Gaussian KDE to the conditional samples and get log-probs.
density = gaussian_kde(cond_samples.numpy().T, bw_method="scott")

X, Y = np.meshgrid(
Expand All @@ -581,7 +588,7 @@ def simulator(theta):
positions = np.vstack([X.ravel(), Y.ravel()])
sample_kde_grid = np.reshape(density(positions).T, X.shape)

# Evaluate the conditional with eval_conditional_density.
# Get conditional log probs eval_conditional_density.
eval_grid = analysis.eval_conditional_density(
posterior,
condition=samples[0],
Expand All @@ -598,6 +605,7 @@ def simulator(theta):

max_err = np.max(error)
assert max_err < 0.0027
print(f"Max error: {max_err}")


def test_mdn_conditional_density(num_dim: int = 3, cond_dim: int = 1):
Expand Down

0 comments on commit 77f6017

Please sign in to comment.