Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
43069cb
refactor: update RatioEstimator class to inherit from ConditionalEsti…
abelaba Aug 18, 2025
a62053f
refactor: update type hints from Union to ConditionalEstimator type
abelaba Aug 18, 2025
49333fe
reafactor(nre): remove RatioEstimatorBuilder protocol and update type…
abelaba Aug 18, 2025
662e1f8
refactor(npe, nle): update estimator builder type hints to not includ…
abelaba Aug 18, 2025
81d066c
refactor: remove VectorFieldEstimatorBuilder and update estimator typ…
abelaba Aug 18, 2025
7dcf58a
docs: remove empty line
abelaba Aug 18, 2025
6db3110
test: add FMPE and NPSE for testing valid and invalid estimator builders
abelaba Aug 23, 2025
5815889
chore: add loss function to RatioEstimator
abelaba Aug 23, 2025
bef2ec7
chore(nle, npe): remove ConditionalVectorFieldEstimator builder type
abelaba Aug 24, 2025
6d95f0a
test: update parameters for checking invalid builder
abelaba Aug 24, 2025
ff84a69
chore(fmpe): assign default value for prior
abelaba Aug 24, 2025
3a71ad8
test(npse, fmpe): correct test methods
abelaba Aug 24, 2025
bf8f482
chore: rename DensityEstimatorBuilder to ConditionalEstimatorBuilder
abelaba Sep 1, 2025
d344471
chore(nle, npe): update density_estimator type annotation
abelaba Sep 1, 2025
5fb2fe1
test: check embedding_net property is not None
abelaba Sep 1, 2025
0951b30
chore: update density_estimator type annotation for mnle and mnpe
abelaba Sep 1, 2025
fe76480
Update sbi/diagnostics/misspecification.py
abelaba Sep 1, 2025
ba28a91
chore(nle): update import structure
abelaba Sep 1, 2025
7e3965c
chore: update formatting
abelaba Sep 1, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions sbi/diagnostics/misspecification.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,12 @@ def calc_misspecification_mmd(
"in that case the MMD is computed in the x-space.",
stacklevel=2,
)
if inference._neural_net.embedding_net is None:
raise AttributeError(
"embedding_net attribute is None but is required for misspecification"
" detection."
)

z_obs = inference._neural_net.embedding_net(x_obs).detach()
z = inference._neural_net.embedding_net(x).detach()
else:
Expand Down
15 changes: 7 additions & 8 deletions sbi/inference/trainers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@
ConditionalEstimator,
ConditionalVectorFieldEstimator,
)
from sbi.neural_nets.ratio_estimators import RatioEstimator
from sbi.sbi_types import TorchTransform
from sbi.utils import (
check_prior,
Expand Down Expand Up @@ -314,7 +313,7 @@ def train(
def _get_potential_function(
self,
prior: Distribution,
estimator: Union[RatioEstimator, ConditionalEstimator],
estimator: ConditionalEstimator,
) -> Tuple[BasePotential, TorchTransform]:
"""Subclass-specific potential creation"""
...
Expand Down Expand Up @@ -416,7 +415,7 @@ def get_dataloaders(

def build_posterior(
self,
estimator: Optional[Union[RatioEstimator, ConditionalEstimator]],
estimator: Optional[ConditionalEstimator],
prior: Optional[Distribution],
sample_with: Literal[
"mcmc", "rejection", "vi", "importance", "direct", "sde", "ode"
Expand Down Expand Up @@ -501,8 +500,8 @@ def _resolve_prior(self, prior: Optional[Distribution]) -> Distribution:
return prior

def _resolve_estimator(
self, estimator: Optional[Union[RatioEstimator, ConditionalEstimator]]
) -> Tuple[Union[RatioEstimator, ConditionalEstimator], str]:
self, estimator: Optional[ConditionalEstimator]
) -> Tuple[ConditionalEstimator, str]:
"""
Resolves the estimator and determines its device.

Expand All @@ -525,9 +524,9 @@ def _resolve_estimator(
# If internal net is used device is defined.
device = self._device
else:
if not isinstance(estimator, (ConditionalEstimator, RatioEstimator)):
if not isinstance(estimator, ConditionalEstimator):
raise TypeError(
"estimator must be ConditionalEstimator or RatioEstimator,"
"estimator must be ConditionalEstimator,"
f" got {type(estimator).__name__}",
)
# Otherwise, infer it from the device of the net parameters.
Expand Down Expand Up @@ -759,7 +758,7 @@ def _validate_posterior_parameters_consistency(

def _create_posterior(
self,
estimator: Union[RatioEstimator, ConditionalEstimator],
estimator: ConditionalEstimator,
prior: Distribution,
sample_with: Literal[
"mcmc", "rejection", "vi", "importance", "direct", "sde", "ode"
Expand Down
13 changes: 8 additions & 5 deletions sbi/inference/trainers/nle/mnle.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
)
from sbi.inference.trainers.nle.nle_base import LikelihoodEstimatorTrainer
from sbi.neural_nets.estimators import MixedDensityEstimator
from sbi.neural_nets.estimators.base import DensityEstimatorBuilder
from sbi.neural_nets.estimators.base import ConditionalEstimatorBuilder
from sbi.sbi_types import TensorBoardSummaryWriter
from sbi.utils.sbiutils import del_entries

Expand All @@ -34,7 +34,10 @@ class MNLE(LikelihoodEstimatorTrainer):
def __init__(
self,
prior: Optional[Distribution] = None,
density_estimator: Union[Literal["mnle"], DensityEstimatorBuilder] = "mnle",
density_estimator: Union[
Literal["mnle"],
ConditionalEstimatorBuilder[MixedDensityEstimator],
] = "mnle",
device: str = "cpu",
logging_level: Union[int, str] = "WARNING",
summary_writer: Optional[TensorBoardSummaryWriter] = None,
Expand All @@ -49,11 +52,11 @@ def __init__(
density_estimator: If it is a string, it must be "mnle" to use the
preconfiugred neural nets for MNLE. Alternatively, a function
that builds a custom neural network, which adheres to
`DensityEstimatorBuilder` protocol can be provided. The function will
be called with the first batch of simulations (theta, x), which can
`ConditionalEstimatorBuilder` protocol can be provided. The function
will be called with the first batch of simulations (theta, x), which can
thus be used for shape inference and potentially for z-scoring. The
density estimator needs to provide the methods `.log_prob` and
`.sample()`.
`.sample()` and must return a `MixedDensityEstimator`.
device: Training device, e.g., "cpu", "cuda" or "cuda:{0, 1, ...}".
logging_level: Minimum severity of messages to log. One of the strings
INFO, WARNING, DEBUG, ERROR and CRITICAL.
Expand Down
14 changes: 9 additions & 5 deletions sbi/inference/trainers/nle/nle_a.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,10 @@
from torch.distributions import Distribution

from sbi.inference.trainers.nle.nle_base import LikelihoodEstimatorTrainer
from sbi.neural_nets.estimators.base import DensityEstimatorBuilder
from sbi.neural_nets.estimators.base import (
ConditionalDensityEstimator,
ConditionalEstimatorBuilder,
)
from sbi.sbi_types import TensorBoardSummaryWriter
from sbi.utils.sbiutils import del_entries

Expand All @@ -23,7 +26,8 @@ def __init__(
self,
prior: Optional[Distribution] = None,
density_estimator: Union[
Literal["nsf", "maf", "mdn", "made"], DensityEstimatorBuilder
Literal["nsf", "maf", "mdn", "made"],
ConditionalEstimatorBuilder[ConditionalDensityEstimator],
] = "maf",
device: str = "cpu",
logging_level: Union[int, str] = "WARNING",
Expand All @@ -39,11 +43,11 @@ def __init__(
density_estimator: If it is a string, use a pre-configured network of the
provided type (one of nsf, maf, mdn, made). Alternatively, a function
that builds a custom neural network, which adheres to
`DensityEstimatorBuilder` protocol can be provided. The function will
be called with the first batch of simulations (theta, x), which can
`ConditionalEstimatorBuilder` protocol can be provided. The function
will be called with the first batch of simulations (theta, x), which can
thus be used for shape inference and potentially for z-scoring. The
density estimator needs to provide the methods `.log_prob` and
`.sample()`.
`.sample()` and must return a `ConditionalDensityEstimator`.
device: Training device, e.g., "cpu", "cuda" or "cuda:{0, 1, ...}".
logging_level: Minimum severity of messages to log. One of the strings
INFO, WARNING, DEBUG, ERROR and CRITICAL.
Expand Down
11 changes: 6 additions & 5 deletions sbi/inference/trainers/nle/nle_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from sbi.inference.trainers.base import NeuralInference
from sbi.neural_nets import likelihood_nn
from sbi.neural_nets.estimators import ConditionalDensityEstimator
from sbi.neural_nets.estimators.base import DensityEstimatorBuilder
from sbi.neural_nets.estimators.base import ConditionalEstimatorBuilder
from sbi.neural_nets.estimators.shape_handling import (
reshape_to_batch_event,
)
Expand All @@ -40,7 +40,8 @@ def __init__(
self,
prior: Optional[Distribution] = None,
density_estimator: Union[
Literal["nsf", "maf", "mdn", "made"], DensityEstimatorBuilder
Literal["nsf", "maf", "mdn", "made"],
ConditionalEstimatorBuilder[ConditionalDensityEstimator],
] = "maf",
device: str = "cpu",
logging_level: Union[int, str] = "WARNING",
Expand All @@ -57,11 +58,11 @@ def __init__(
density_estimator: If it is a string, use a pre-configured network of the
provided type (one of nsf, maf, mdn, made). Alternatively, a function
that builds a custom neural network, which adheres to
`DensityEstimatorBuilder` protocol can be provided. The function will
be called with the first batch of simulations (theta, x), which can
`ConditionalEstimatorBuilder` protocol can be provided. The function
will be called with the first batch of simulations (theta, x), which can
thus be used for shape inference and potentially for z-scoring. The
density estimator needs to provide the methods `.log_prob` and
`.sample()`.
`.sample()` and must return a `ConditionalDensityEstimator`.

See docstring of `NeuralInference` class for all other arguments.
"""
Expand Down
13 changes: 8 additions & 5 deletions sbi/inference/trainers/npe/mnpe.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
)
from sbi.inference.trainers.npe.npe_c import NPE_C
from sbi.neural_nets.estimators import MixedDensityEstimator
from sbi.neural_nets.estimators.base import DensityEstimatorBuilder
from sbi.neural_nets.estimators.base import ConditionalEstimatorBuilder
from sbi.sbi_types import TensorBoardSummaryWriter
from sbi.utils.sbiutils import del_entries

Expand All @@ -32,7 +32,10 @@ class MNPE(NPE_C):
def __init__(
self,
prior: Optional[Distribution] = None,
density_estimator: Union[Literal["mnpe"], DensityEstimatorBuilder] = "mnpe",
density_estimator: Union[
Literal["mnpe"],
ConditionalEstimatorBuilder[MixedDensityEstimator],
] = "mnpe",
device: str = "cpu",
logging_level: Union[int, str] = "WARNING",
summary_writer: Optional[TensorBoardSummaryWriter] = None,
Expand All @@ -47,11 +50,11 @@ def __init__(
density_estimator: If it is a string, it must be "mnpe" to use the
preconfigured neural nets for MNPE. Alternatively, a function
that builds a custom neural network, which adheres to
`DensityEstimatorBuilder` protocol can be provided. The function will
be called with the first batch of simulations (theta, x), which can
`ConditionalEstimatorBuilder` protocol can be provided. The function
will be called with the first batch of simulations (theta, x), which can
thus be used for shape inference and potentially for z-scoring. The
density estimator needs to provide the methods `.log_prob` and
`.sample()`.
`.sample()` and must return a `MixedDensityEstimator`.
device: Training device, e.g., "cpu", "cuda" or "cuda:{0, 1, ...}".
logging_level: Minimum severity of messages to log. One of the strings
INFO, WARNING, DEBUG, ERROR and CRITICAL.
Expand Down
18 changes: 10 additions & 8 deletions sbi/inference/trainers/npe/npe_a.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
)
from sbi.neural_nets.estimators.base import (
ConditionalDensityEstimator,
DensityEstimatorBuilder,
ConditionalEstimatorBuilder,
)
from sbi.sbi_types import TensorBoardSummaryWriter
from sbi.utils import torchutils
Expand Down Expand Up @@ -53,7 +53,8 @@ def __init__(
self,
prior: Optional[Distribution] = None,
density_estimator: Union[
Literal["mdn_snpe_a"], DensityEstimatorBuilder
Literal["mdn_snpe_a"],
ConditionalEstimatorBuilder[ConditionalDensityEstimator],
] = "mdn_snpe_a",
num_components: int = 10,
device: str = "cpu",
Expand All @@ -71,14 +72,15 @@ def __init__(
density_estimator: If it is a string (only "mdn_snpe_a" is valid), use a
pre-configured mixture of densities network. Alternatively, a function
that builds a custom neural network, which adheres to
`DensityEstimatorBuilder` protocol can be provided. The function will
be called with the first batch of simulations (theta, x), which can
`ConditionalEstimatorBuilder` protocol can be provided. The function
will be called with the first batch of simulations (theta, x), which can
thus be used for shape inference and potentially for z-scoring. The
density estimator needs to provide the methods `.log_prob` and
`.sample()`. Note that until the last round only a single (multivariate)
Gaussian component is used for training (seeAlgorithm 1 in [1]). In the
last round, this component is replicated `num_components` times, its
parameters are perturbed with a very small noise, and then the last
`.sample()` and must return a `ConditionalDensityEstimator`.
Note that until the last round only a single (multivariate) Gaussian
component is used for training (seeAlgorithm 1 in [1]). In the last
round, this component is replicated `num_components` times,
its parameters are perturbed with a very small noise, and then the last
training round is done with the expanded Gaussian mixture as estimator
for the proposal posterior.
num_components: Number of components of the mixture of Gaussians in the
Expand Down
14 changes: 9 additions & 5 deletions sbi/inference/trainers/npe/npe_b.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,10 @@
from sbi.inference.trainers.npe.npe_base import (
PosteriorEstimatorTrainer,
)
from sbi.neural_nets.estimators.base import DensityEstimatorBuilder
from sbi.neural_nets.estimators.base import (
ConditionalDensityEstimator,
ConditionalEstimatorBuilder,
)
from sbi.neural_nets.estimators.shape_handling import reshape_to_sample_batch_event
from sbi.sbi_types import TensorBoardSummaryWriter
from sbi.utils.sbiutils import del_entries
Expand All @@ -37,7 +40,8 @@ def __init__(
self,
prior: Optional[Distribution] = None,
density_estimator: Union[
Literal["nsf", "maf", "mdn", "made"], DensityEstimatorBuilder
Literal["nsf", "maf", "mdn", "made"],
ConditionalEstimatorBuilder[ConditionalDensityEstimator],
] = "maf",
device: str = "cpu",
logging_level: Union[int, str] = "WARNING",
Expand All @@ -52,11 +56,11 @@ def __init__(
density_estimator: If it is a string, use a pre-configured network of the
provided type (one of nsf, maf, mdn, made). Alternatively, a function
that builds a custom neural network, which adheres to
`DensityEstimatorBuilder` protocol can be provided. The function will
be called with the first batch of simulations (theta, x), which can
`ConditionalEstimatorBuilder` protocol can be provided. The function
will be called with the first batch of simulations (theta, x), which can
thus be used for shape inference and potentially for z-scoring. The
density estimator needs to provide the methods `.log_prob` and
`.sample()`.
`.sample()` and must return a `ConditionalDensityEstimator`.
device: Training device, e.g., "cpu", "cuda" or "cuda:{0, 1, ...}".
logging_level: Minimum severity of messages to log. One of the strings
INFO, WARNING, DEBUG, ERROR and CRITICAL.
Expand Down
15 changes: 7 additions & 8 deletions sbi/inference/trainers/npe/npe_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,7 @@
from torch.utils.tensorboard.writer import SummaryWriter
from typing_extensions import Self

from sbi.inference.posteriors import (
DirectPosterior,
)
from sbi.inference.posteriors import DirectPosterior
from sbi.inference.posteriors.base_posterior import NeuralPosterior
from sbi.inference.posteriors.posterior_parameters import (
DirectPosteriorParameters,
Expand All @@ -34,7 +32,7 @@
)
from sbi.neural_nets import posterior_nn
from sbi.neural_nets.estimators import ConditionalDensityEstimator
from sbi.neural_nets.estimators.base import DensityEstimatorBuilder
from sbi.neural_nets.estimators.base import ConditionalEstimatorBuilder
from sbi.neural_nets.estimators.shape_handling import (
reshape_to_batch_event,
reshape_to_sample_batch_event,
Expand All @@ -59,7 +57,8 @@ def __init__(
self,
prior: Optional[Distribution] = None,
density_estimator: Union[
Literal["nsf", "maf", "mdn", "made"], DensityEstimatorBuilder
Literal["nsf", "maf", "mdn", "made"],
ConditionalEstimatorBuilder[ConditionalDensityEstimator],
] = "maf",
device: str = "cpu",
logging_level: Union[int, str] = "WARNING",
Expand All @@ -76,11 +75,11 @@ def __init__(
density_estimator: If it is a string, use a pre-configured network of the
provided type (one of nsf, maf, mdn, made). Alternatively, a function
that builds a custom neural network, which adheres to
`DensityEstimatorBuilder` protocol can be provided. The function will
be called with the first batch of simulations (theta, x), which can
`ConditionalEstimatorBuilder` protocol can be provided. The function
will be called with the first batch of simulations (theta, x), which can
thus be used for shape inference and potentially for z-scoring. The
density estimator needs to provide the methods `.log_prob` and
`.sample()`.
`.sample()` and must return a `ConditionalDensityEstimator`.

See docstring of `NeuralInference` class for all other arguments.
"""
Expand Down
12 changes: 6 additions & 6 deletions sbi/inference/trainers/npe/npe_c.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
)
from sbi.neural_nets.estimators.base import (
ConditionalDensityEstimator,
DensityEstimatorBuilder,
ConditionalEstimatorBuilder,
)
from sbi.neural_nets.estimators.shape_handling import (
reshape_to_batch_event,
Expand Down Expand Up @@ -73,7 +73,8 @@ def __init__(
self,
prior: Optional[Distribution] = None,
density_estimator: Union[
Literal["nsf", "maf", "mdn", "made"], DensityEstimatorBuilder
Literal["nsf", "maf", "mdn", "made"],
ConditionalEstimatorBuilder[ConditionalDensityEstimator],
] = "maf",
device: str = "cpu",
logging_level: Union[int, str] = "WARNING",
Expand All @@ -88,12 +89,11 @@ def __init__(
density_estimator: If it is a string, use a pre-configured network of the
provided type (one of nsf, maf, mdn, made). Alternatively, a function
that builds a custom neural network, which adheres to
`DensityEstimatorBuilder` protocol can be provided. The function will
be called with the first batch of simulations (theta, x), which can
`ConditionalEstimatorBuilder` protocol can be provided. The function
will be called with the first batch of simulations (theta, x), which can
thus be used for shape inference and potentially for z-scoring. The
density estimator needs to provide the methods `.log_prob` and
`.sample()`.

`.sample()` and must return a `ConditionalDensityEstimator`.
device: Training device, e.g., "cpu", "cuda" or "cuda:{0, 1, ...}".
logging_level: Minimum severity of messages to log. One of the strings
INFO, WARNING, DEBUG, ERROR and CRITICAL.
Expand Down
Loading