Skip to content

Commit

Permalink
Improve docs and references
Browse files Browse the repository at this point in the history
  • Loading branch information
Radev committed May 14, 2024
1 parent bbea8a6 commit 7597e45
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 9 deletions.
16 changes: 8 additions & 8 deletions bayesflow/experimental/networks/coupling_flow/actnorm.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@ class ActNorm(keras.Layer):
References
----------
.. [1] Kingma, Diederik P., and Prafulla Dhariwal.
"Glow: Generative flow with invertible 1x1 convolutions."
arXiv preprint arXiv:1807.03039 (2018).
.. [2] Salimans, Tim, and Durk P. Kingma.
"Weight normalization: A simple reparameterization to accelerate
training of deep neural networks."
Advances in neural information processing systems 29 (2016): 901-909.
.. [1] Kingma, D. P., & Dhariwal, P. (2018).
Glow: Generative flow with invertible 1x1 convolutions.
Advances in Neural Information Processing Systems, 31.
.. [2] Salimans, Tim, and Durk P. Kingma. (2016).
Weight normalization: A simple reparameterization to accelerate
training of deep neural networks.
Advances in Neural Information Processing Systems, 29.
"""

def __init__(self, target_dim: int, **kwargs):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,16 @@


class AllInOneCoupling(keras.Layer):
""" Implements a single coupling layer, followed by a permutation. """
"""
Implements a single coupling layer, preceeeded by an optional activation normalization,
followed by a permutation [1]. The layer implements two coupling transformations, such that
the entire input is transformed following a forward / inverse call.
[1] Kingma, D. P., & Dhariwal, P. (2018).
Glow: Generative flow with invertible 1x1 convolutions.
Advances in Neural Information Processing Systems, 31.
"""

def __init__(
self,
subnet: keras.Model | keras.layers.Layer,
Expand Down

0 comments on commit 7597e45

Please sign in to comment.