Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added Documentation to Adapter Transforms #268

Merged
merged 13 commits into from
Dec 9, 2024
21 changes: 15 additions & 6 deletions bayesflow/adapters/transforms/concatenate.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,22 @@

@serializable(package="bayesflow.adapters")
class Concatenate(Transform):
"""Concatenate multiple arrays into a new key.
Parameters:

keys:

into:
"""Concatenate multiple arrays into a new key. Used to specify how data variables should be treated by the network.

Parameters:
keys: Input a list of strings, where the strings are the names of data variables.
into: A string telling the network how to use the variables named in keys.
axis: integer specifing along which axis to concatonate the keys. The last axis is used by default.

Example:
Suppose you have a simulator that generates variables "beta" and "sigma" from priors and then observation
variables "x" and "y". We can then use concatonate in the following way

adapter = (
bf.Adapter()
.concatenate(["beta", "sigma"], into="inference_variables")
.concatenate(["x", "y"], into="summary_variables")
)
"""

def __init__(self, keys: Sequence[str], *, into: str, axis: int = -1):
Expand Down
6 changes: 3 additions & 3 deletions bayesflow/adapters/transforms/constrain.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class Constrain(ElementwiseTransform):


Examples:
Let sigma be the standard deviation of a normal distribution,
1) Let sigma be the standard deviation of a normal distribution,
then sigma should always be greater than zero.

Useage:
Expand All @@ -41,8 +41,8 @@ class Constrain(ElementwiseTransform):
.constrain("sigma", lower=0)
)

Suppose p is the parameter for a binomial distribution where p must be in [0,1]
then we would constrain the neural network to estimate p in the following way.
2 ) Suppose p is the parameter for a binomial distribution where p must be in
[0,1] then we would constrain the neural network to estimate p in the following way.

Usage:
adapter = (
Expand Down
20 changes: 20 additions & 0 deletions bayesflow/adapters/transforms/drop.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,26 @@

@serializable(package="bayesflow.adapters")
class Drop(Transform):
"""
Transform to drop variables from further calculation.

Parameters:
keys: list of strings, containing names of data variables that should be dropped

Example:

>>> import bayesflow as bf
>>> a = [1, 2, 3, 4]
>>> b = [[1, 2], [3, 4]]
>>> c = [[5, 6, 7, 8]]
>>> dat = dict(a=a, b=b, c=c)
>>> dat
{'a': [1, 2, 3, 4], 'b': [[1, 2], [3, 4]], 'c': [[5, 6, 7, 8]]}
>>> drop = bf.adapters.transforms.Drop(("b", "c"))
>>> drop.forward(dat)
{'a': [1, 2, 3, 4]}
"""

def __init__(self, keys: Sequence[str]):
self.keys = keys

Expand Down
2 changes: 2 additions & 0 deletions bayesflow/adapters/transforms/elementwise_transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

@serializable(package="bayesflow.adapters")
class ElementwiseTransform:
"""Base class on which other transforms are based"""

def __call__(self, data: np.ndarray, inverse: bool = False, **kwargs) -> np.ndarray:
if inverse:
return self.inverse(data, **kwargs)
Expand Down
5 changes: 3 additions & 2 deletions bayesflow/adapters/transforms/keep.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,9 @@ class Keep(Transform):

adapter = (
bf.adapters.Adapter()
# only keep theta and x
.keep(("theta", "x"))
# drop data from unneeded priors alpha, and r
# only keep theta and x
.keep(("theta", "x"))
)

Example:
Expand Down
4 changes: 4 additions & 0 deletions bayesflow/adapters/transforms/one_hot.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@

@serializable(package="bayesflow.adapters")
class OneHot(ElementwiseTransform):
"""
Changes data to be one-hot encoded.
"""

def __init__(self, num_classes: int):
super().__init__()
self.num_classes = num_classes
Expand Down
15 changes: 15 additions & 0 deletions bayesflow/adapters/transforms/standardize.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,21 @@

@serializable(package="bayesflow.adapters")
class Standardize(ElementwiseTransform):
"""
Transform that when applied standardizes data using typical z-score standardization i.e. for some unstandardized
data x the standardized version z would be

z = (x - mean(x))/std(x)

Parameters:
mean: integer or float used to specify a mean if known but will be estimated from data when not provided
std: integer or float used to specify a standard devation if known but will be estimated from data when not provided
axis: integer representing a specific axis along which standardization should take place. By default
standardization happens individually for each dimension
momentum: float in (0,1) specifying the momentum during training

"""

def __init__(
self,
mean: int | float | np.ndarray = None,
Expand Down
4 changes: 4 additions & 0 deletions bayesflow/adapters/transforms/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@

@serializable(package="bayesflow.adapters")
class Transform:
"""
Base class on which other transforms are based
"""

def __call__(self, data: dict[str, np.ndarray], *, inverse: bool = False, **kwargs) -> dict[str, np.ndarray]:
if inverse:
return self.inverse(data, **kwargs)
Expand Down
Loading