Skip to content

Commit

Permalink
Add docstring
Browse files Browse the repository at this point in the history
  • Loading branch information
stefanradev93 committed Jun 12, 2024
1 parent 2a7f4cf commit 5152b5c
Show file tree
Hide file tree
Showing 2 changed files with 54 additions and 2 deletions.
52 changes: 51 additions & 1 deletion bayesflow/experimental/backend_approximators/approximator.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,57 @@


class Approximator(BaseApproximator):
def __init__(self, inference_variables: list[str], inference_conditions: list[str] = None, summary_variables: list[str] = None, summary_conditions: list[str] = None, **kwargs):
def __init__(
self,
inference_variables: list[str],
inference_conditions: list[str] = None,
summary_variables: list[str] = None,
summary_conditions: list[str] = None,
**kwargs
):

""" The main workhorse for learning amortized neural approximators for distributions arising
in inverse problems and Bayesian inference (e.g., posterior distributions, likelihoods, marginal
likelihoods).
The complete semantics of this class allow for flexible estimation of the following distribution:
Q(inference_variables | H(summary_variables; summary_conditions), inference_conditions),
#TODO - math notation
where all quantities to the right of the "given" symbol | are optional and H refers to the optional
summary /embedding network used to compress high-dimensional data into lower-dimensional summary
vectors. Some examples are provided below.
Parameters
----------
inference_variables: list[str]
A list of variable names indicating the quantities to be inferred / learned by the approximator,
e.g., model parameters when approximating the Bayesian posterior or observables when approximating
a likelihood density.
inference_conditions: list[str]
A list of variable names indicating quantities that will be used to condition (i.e., inform) the
distribution over inference variables directly, that is, without passing through the summary network.
summary_variables: list[str]
A list of variable names indicating quantities that will be used to condition (i.e., inform) the
distribution over inference variables after passing through the summary network (i.e., undergoing a
learnable transformation / dimensionality reduction). For instance, non-vector quantities (e.g.,
sets or time-series) in posterior inference will typically qualify as summary variables. In addition,
these quantities may involve learnable distributions on their own.
summary_conditions: list[str]
A list of variable names indicating quantities that will be used to condition (i.e., inform) the
optional summary network, e.g., when the summary network accepts further conditions that do not
conform to the semantics of summary variable (i.e., need not be embedded or their distribution
needs not be learned).
#TODO add citations
Examples
-------
#TODO
"""

configurator = Configurator(inference_variables, inference_conditions, summary_variables, summary_conditions)
kwargs.setdefault("summary_network", None)
super().__init__(configurator=configurator, **kwargs)
Original file line number Diff line number Diff line change
Expand Up @@ -44,18 +44,20 @@ def evaluate(self, *args, **kwargs):

# noinspection PyMethodOverriding
def compute_metrics(self, data: dict[str, Tensor], stage: str = "training") -> dict[str, Tensor]:

if self.summary_network is None:
self.configurator.configure_inference_variables(data)
self.configurator.configure_inference_conditions(data)

return self.inference_network.compute_metrics(data, stage=stage)

self.configurator.configure_summary_variables(data)
self.configurator.configure_summary_conditions(data)

summary_metrics = self.summary_network.compute_metrics(data, stage=stage)

self.configurator.configure_inference_variables(data)
self.configurator.configure_inference_conditions(data)

inference_metrics = self.inference_network.compute_metrics(data, stage=stage)

metrics = {"loss": summary_metrics["loss"] + inference_metrics["loss"]}
Expand Down

0 comments on commit 5152b5c

Please sign in to comment.