Skip to content

Commit 469104d

Browse files
committed
Cleanup
ruff formatting changes
1 parent 46bc669 commit 469104d

File tree

4 files changed

+21
-7
lines changed

4 files changed

+21
-7
lines changed

bayesflow/benchmarks/gaussian_linear.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ def observation_model(params: np.ndarray, n_obs: int = None, scale: float = 0.1,
3737
"""Generates batched draws from a D-dimenional Gaussian distributions given a batch of
3838
location (mean) parameters of D dimensions. Assumes a spherical convariance matrix given
3939
by scale * I_D.
40-
40+
4141
See Task T.1 from the paper https://arxiv.org/pdf/2101.04653.pdf
4242
NOTE: The paper description uses a variance of 0.1 for the prior and likelihood
4343
but the implementation uses scale = 0.1 Our implmenetation uses a default scale

bayesflow/benchmarks/gaussian_linear_uniform.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,12 +39,12 @@ def observation_model(params: np.ndarray, n_obs: int = None, scale: float = 0.1,
3939
"""Generates batched draws from a D-dimenional Gaussian distributions given a batch of
4040
location (mean) parameters of D dimensions. Assumes a spherical convariance matrix given
4141
by scale * I_D.
42-
42+
4343
See Task T.2 from the paper https://arxiv.org/pdf/2101.04653.pdf
4444
NOTE: The paper description uses a variance of 0.1 for likelihood function
4545
but the implementation uses scale = 0.1 Our implmenetation uses a default scale
4646
of 0.1 for consistency with the implementation.
47-
47+
4848
Parameters
4949
----------
5050
params : np.ndarray of shape (params, D)

bayesflow/benchmarks/gaussian_mixture.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,9 @@ def prior(lower_bound: float = -10.0, upper_bound: float = 10.0, D: int = 2, rng
3535
return rng.uniform(low=lower_bound, high=upper_bound, size=D)
3636

3737

38-
def observation_model(params: np.ndarray, prob: float = 0.5, scale_c1: float = 1.0, scale_c2: float = 0.1, rng: np.random.Generator = None):
38+
def observation_model(
39+
params: np.ndarray, prob: float = 0.5, scale_c1: float = 1.0, scale_c2: float = 0.1, rng: np.random.Generator = None
40+
):
3941
"""Simulates data from the Gaussian mixture model (GMM) with
4042
shared location vector. For more details, see
4143

bayesflow/benchmarks/slcp_distractors.py

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,9 @@ def simulator():
99
return dict(parameters=prior_draws, observables=observables)
1010

1111

12-
def get_random_student_t(dim: int = 2, mu_scale: float = 15., shape_scale: float = 0.01, rng: np.random.Generator = None):
12+
def get_random_student_t(
13+
dim: int = 2, mu_scale: float = 15.0, shape_scale: float = 0.01, rng: np.random.Generator = None
14+
):
1315
"""A helper function to create a "frozen" multivariate student-t distribution of dimensions `dim`.
1416
1517
Parameters
@@ -42,7 +44,9 @@ def get_random_student_t(dim: int = 2, mu_scale: float = 15., shape_scale: float
4244
return multivariate_t(loc=mu, shape=shape_scale, df=2, allow_singular=True, seed=rng)
4345

4446

45-
def draw_mixture_student_t(num_students: int, n_draws: int = 46, dim: int = 2, mu_scale: float = 15.0, rng: np.random.Generator = None):
47+
def draw_mixture_student_t(
48+
num_students: int, n_draws: int = 46, dim: int = 2, mu_scale: float = 15.0, rng: np.random.Generator = None
49+
):
4650
"""Helper function to generate `n_draws` random draws from a mixture of `num_students`
4751
multivariate Student-t distributions.
4852
@@ -105,7 +109,15 @@ def prior(lower_bound: float = -3.0, upper_bound: float = 3.0, rng: np.random.Ge
105109
return rng.uniform(low=lower_bound, high=upper_bound, size=5)
106110

107111

108-
def observation_model(params: np.ndarray, n_obs: int = 4, n_dist: int = 46, dim: int = 2, mu_scale: float = 15.0, flatten: bool = True, rng: np.random.Generator = None):
112+
def observation_model(
113+
params: np.ndarray,
114+
n_obs: int = 4,
115+
n_dist: int = 46,
116+
dim: int = 2,
117+
mu_scale: float = 15.0,
118+
flatten: bool = True,
119+
rng: np.random.Generator = None,
120+
):
109121
"""Generates data from the SLCP model designed as a benchmark for a simple likelihood
110122
and a complex posterior due to a non-linear pushforward params -> x. In addition, it
111123
outputs uninformative distractor data.

0 commit comments

Comments
 (0)