Skip to content

Commit b92fe19

Browse files
committed
fix remaining bugs in tutorials
1 parent 9efa645 commit b92fe19

File tree

3 files changed

+11
-8
lines changed

3 files changed

+11
-8
lines changed

tutorials/12_iid_data_and_permutation_invariant_embeddings.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -226,8 +226,8 @@
226226
}
227227
],
228228
"source": [
229-
"# Train SNLE.\n",
230-
"inferer = SNLE(prior, show_progress_bars=True, density_estimator=\"mdn\")\n",
229+
"# Train NLE.\n",
230+
"inferer = NLE(prior, show_progress_bars=True, density_estimator=\"mdn\")\n",
231231
"theta, x = simulate_for_sbi(simulator, prior, 10000, simulation_batch_size=1000)\n",
232232
"inferer.append_simulations(theta, x).train(training_batch_size=1000);"
233233
]
@@ -310,9 +310,9 @@
310310
"cell_type": "markdown",
311311
"metadata": {},
312312
"source": [
313-
"The pairplot above already indicates that (S)NLE is well able to obtain accurate posterior samples also for increasing number of trials (note that we trained the single-round version of SNLE so that we did not have to re-train it for new $x_o$).\n",
313+
"The pairplot above already indicates that (S)NLE is well able to obtain accurate posterior samples also for increasing number of trials (note that we trained the single-round version of NLE so that we did not have to re-train it for new $x_o$).\n",
314314
"\n",
315-
"Quantitatively we can measure the accuracy of SNLE by calculating the `c2st` score between SNLE and the true posterior samples, where the best accuracy is perfect for `0.5`:\n"
315+
"Quantitatively we can measure the accuracy of NLE by calculating the `c2st` score between NLE and the true posterior samples, where the best accuracy is perfect for `0.5`:\n"
316316
]
317317
},
318318
{

tutorials/15_importance_sampled_posteriors.ipynb

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,11 @@
161161
"print(\"observations.shape\", observation.shape)\n",
162162
"\n",
163163
"# sample from posterior\n",
164-
"theta_inferred = posterior.sample((10_000,))"
164+
"theta_inferred = posterior.sample((10_000,))\n",
165+
"\n",
166+
"# get samples from ground-truth posterior\n",
167+
"gt_samples = MultivariateNormal(observation, eye(2)).sample((len(theta_inferred) * 5,))\n",
168+
"gt_samples = gt_samples[prior.support.check(gt_samples)][:len(theta_inferred)]"
165169
]
166170
},
167171
{

tutorials/18_training_interface.ipynb

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@
104104
"metadata": {},
105105
"outputs": [],
106106
"source": [
107-
"from sbi.neural_nets.build_nets import build_nsf\n",
107+
"from sbi.neural_nets.net_builders import build_nsf\n",
108108
"\n",
109109
"density_estimator = build_nsf(theta, x)"
110110
]
@@ -352,7 +352,6 @@
352352
"metadata": {},
353353
"outputs": [],
354354
"source": [
355-
"from sbi.neural_nets.flow import build_nsf\n",
356355
"from sbi.inference.posteriors import MCMCPosterior\n",
357356
"from sbi.inference.potentials import likelihood_estimator_based_potential"
358357
]
@@ -440,7 +439,7 @@
440439
"metadata": {},
441440
"outputs": [],
442441
"source": [
443-
"from sbi.neural_nets.classifier import build_resnet_classifier\n",
442+
"from sbi.neural_nets.net_builders import build_resnet_classifier\n",
444443
"from sbi.inference.posteriors import MCMCPosterior\n",
445444
"from sbi.inference.potentials import ratio_estimator_based_potential\n",
446445
"from sbi import utils as utils"

0 commit comments

Comments
 (0)