Skip to content

Commit 2b70ae5

Browse files
authoredAug 8, 2023
Added some docstrings ops/nn.py (#677)
* Small typo * Added some docstrings * Remove extra lines * Unindent examples // use floats in example section
1 parent cb95f39 commit 2b70ae5

File tree

2 files changed

+212
-1
lines changed

2 files changed

+212
-1
lines changed
 

‎keras_core/activations/activations.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -429,7 +429,7 @@ def log_softmax(x, axis=-1):
429429
is applied along.
430430
431431
Args:
432-
x : Input tensor.
432+
x: Input tensor.
433433
axis: Integer, axis along which the softmax is applied.
434434
"""
435435
return ops.log_softmax(x, axis=axis)

‎keras_core/ops/nn.py

+211
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,24 @@ def compute_output_spec(self, x):
2222

2323
@keras_core_export(["keras_core.ops.relu", "keras_core.ops.nn.relu"])
2424
def relu(x):
25+
"""Rectified linear unit activation function.
26+
27+
It is defined as `f(x) = max(0, x)`.
28+
29+
Args:
30+
x: Input tensor.
31+
32+
Returns:
33+
A tensor with the same shape as `x`.
34+
35+
Example:
36+
37+
>>> x = np.array([-1., 0., 1.])
38+
>>> x_relu = keras_core.ops.relu(x)
39+
>>> print(x_relu)
40+
array([0., 0., 1.], shape=(3,), dtype=float64)
41+
42+
"""
2543
if any_symbolic_tensors((x,)):
2644
return Relu().symbolic_call(x)
2745
return backend.nn.relu(x)
@@ -37,6 +55,24 @@ def compute_output_spec(self, x):
3755

3856
@keras_core_export(["keras_core.ops.relu6", "keras_core.ops.nn.relu6"])
3957
def relu6(x):
58+
"""Rectified linear unit activation function with upper bound of 6.
59+
60+
It is defined as `f(x) = np.clip(x, 0, 6)`.
61+
62+
Args:
63+
x: Input tensor.
64+
65+
Returns:
66+
A tensor with the same shape as `x`.
67+
68+
Example:
69+
70+
>>> x = np.array([-1., 0., 1., 6., 7.])
71+
>>> x_relu6 = keras_core.ops.relu6(x)
72+
>>> print(x_relu6)
73+
array([0., 0., 1., 6., 6.], shape=(5,), dtype=float64)
74+
75+
"""
4076
if any_symbolic_tensors((x,)):
4177
return Relu6().symbolic_call(x)
4278
return backend.nn.relu6(x)
@@ -52,6 +88,24 @@ def compute_output_spec(self, x):
5288

5389
@keras_core_export(["keras_core.ops.sigmoid", "keras_core.ops.nn.sigmoid"])
5490
def sigmoid(x):
91+
"""Sigmoid activation function.
92+
93+
It is defined as `f(x) = 1 / (1 + exp(-x))`.
94+
95+
Args:
96+
x: Input tensor.
97+
98+
Returns:
99+
A tensor with the same shape as `x`.
100+
101+
Example:
102+
103+
>>> x = np.array([-1., 0., 1.])
104+
>>> x_sigmoid = keras_core.ops.sigmoid(x)
105+
>>> print(x_sigmoid)
106+
array([0.26894143, 0.5, 0.7310586 ], dtype=float64)
107+
108+
"""
55109
if any_symbolic_tensors((x,)):
56110
return Sigmoid().symbolic_call(x)
57111
return backend.nn.sigmoid(x)
@@ -67,6 +121,25 @@ def compute_output_spec(self, x):
67121

68122
@keras_core_export(["keras_core.ops.softplus", "keras_core.ops.nn.softplus"])
69123
def softplus(x):
124+
"""Softplus activation function.
125+
126+
It is defined as `f(x) = log(exp(x) + 1)`, where `log` is the natural
127+
logarithm and `exp` is the exponential function.
128+
129+
Args:
130+
x: Input tensor.
131+
132+
Returns:
133+
A tensor with the same shape as `x`.
134+
135+
Example:
136+
137+
>>> x = np.array([-1., 0., 1.])
138+
>>> x_softplus = keras_core.ops.softplus(x)
139+
>>> print(x_softplus)
140+
array([0.31326166, 0.6931472 , 1.3132616 ], shape=(3,), dtype=float64)
141+
142+
"""
70143
if any_symbolic_tensors((x,)):
71144
return Softplus().symbolic_call(x)
72145
return backend.nn.softplus(x)
@@ -82,6 +155,24 @@ def compute_output_spec(self, x):
82155

83156
@keras_core_export(["keras_core.ops.softsign", "keras_core.ops.nn.softsign"])
84157
def softsign(x):
158+
"""Softsign activation function.
159+
160+
It is defined as `f(x) = x / (abs(x) + 1)`.
161+
162+
Args:
163+
x: Input tensor.
164+
165+
Returns:
166+
A tensor with the same shape as `x`.
167+
168+
Example:
169+
170+
>>> x = np.array([-1., 0., 1.])
171+
>>> x_softsign = keras_core.ops.softsign(x)
172+
>>> print(x_softsign)
173+
array([-0.5, 0., 0.5], shape=(3,), dtype=float64)
174+
175+
"""
85176
if any_symbolic_tensors((x,)):
86177
return Softsign().symbolic_call(x)
87178
return backend.nn.softsign(x)
@@ -97,6 +188,24 @@ def compute_output_spec(self, x):
97188

98189
@keras_core_export(["keras_core.ops.silu", "keras_core.ops.nn.silu"])
99190
def silu(x):
191+
"""Sigmoid-weighted linear unit activation function.
192+
193+
It is defined as `f(x) = x * sigmoid(x)`.
194+
195+
Args:
196+
x: Input tensor.
197+
198+
Returns:
199+
A tensor with the same shape as `x`.
200+
201+
Example:
202+
203+
>>> x = np.array([-1., 0., 1.])
204+
>>> x_silu = keras_core.ops.silu(x)
205+
>>> print(x_silu)
206+
array([-0.26894143, 0., 0.7310586], shape=(3,), dtype=float64)
207+
208+
"""
100209
if any_symbolic_tensors((x,)):
101210
return Silu().symbolic_call(x)
102211
return backend.nn.silu(x)
@@ -132,6 +241,24 @@ def compute_output_spec(self, x):
132241
]
133242
)
134243
def log_sigmoid(x):
244+
"""Logarithm of the sigmoid activation function.
245+
246+
It is defined as `f(x) = log(1 / (1 + exp(-x)))`.
247+
248+
Args:
249+
x: Input tensor.
250+
251+
Returns:
252+
A tensor with the same shape as `x`.
253+
254+
Example:
255+
256+
>>> x = np.array([-1., 0., 1.])
257+
>>> x_log_sigmoid = keras_core.ops.log_sigmoid(x)
258+
>>> print(x_log_sigmoid)
259+
array([-1.3132616, -0.6931472, -0.3132616], shape=(3,), dtype=float64)
260+
261+
"""
135262
if any_symbolic_tensors((x,)):
136263
return LogSigmoid().symbolic_call(x)
137264
return backend.nn.log_sigmoid(x)
@@ -153,6 +280,28 @@ def compute_output_spec(self, x):
153280
["keras_core.ops.leaky_relu", "keras_core.ops.nn.leaky_relu"]
154281
)
155282
def leaky_relu(x, negative_slope=0.2):
283+
"""Leaky version of a Rectified Linear Unit.
284+
285+
It allows a small gradient when the unit is not active, it is defined as:
286+
287+
`f(x) = alpha * x for x < 0` or `f(x) = x for x >= 0`.
288+
289+
Args:
290+
x: Input tensor.
291+
negative_slope: Slope of the activation function at x < 0.
292+
Defaults to `0.2`.
293+
294+
Returns:
295+
A tensor with the same shape as `x`.
296+
297+
Example:
298+
299+
>>> x = np.array([-1., 0., 1.])
300+
>>> x_leaky_relu = keras_core.ops.leaky_relu(x)
301+
>>> print(x_leaky_relu)
302+
array([-0.2, 0. , 1. ], shape=(3,), dtype=float64)
303+
304+
"""
156305
if any_symbolic_tensors((x,)):
157306
return LeakyRelu(negative_slope).symbolic_call(x)
158307
return backend.nn.leaky_relu(x, negative_slope=negative_slope)
@@ -173,6 +322,26 @@ def compute_output_spec(self, x):
173322
]
174323
)
175324
def hard_sigmoid(x):
325+
"""Hard sigmoid activation function.
326+
327+
It is defined as:
328+
329+
`0 if x < -2.5`, `1 if x > 2.5`, `(0.2 * x) + 0.5 if -2.5 <= x <= 2.5`.
330+
331+
Args:
332+
x: Input tensor.
333+
334+
Returns:
335+
A tensor with the same shape as `x`.
336+
337+
Example:
338+
339+
>>> x = np.array([-1., 0., 1.])
340+
>>> x_hard_sigmoid = keras_core.ops.hard_sigmoid(x)
341+
>>> print(x_hard_sigmoid)
342+
array([0.3, 0.5, 0.7], shape=(3,), dtype=float64)
343+
344+
"""
176345
if any_symbolic_tensors((x,)):
177346
return HardSigmoid().symbolic_call(x)
178347
return backend.nn.hard_sigmoid(x)
@@ -192,6 +361,27 @@ def compute_output_spec(self, x):
192361

193362
@keras_core_export(["keras_core.ops.elu", "keras_core.ops.nn.elu"])
194363
def elu(x, alpha=1.0):
364+
"""Exponential Linear Unit.
365+
366+
It is defined as:
367+
368+
`f(x) = alpha * (exp(x) - 1.) for x < 0`, `f(x) = x for x >= 0`.
369+
370+
Args:
371+
x: Input tensor.
372+
alpha: A scalar, slope of positive section. Defaults to `1.0`.
373+
374+
Returns:
375+
A tensor with the same shape as `x`.
376+
377+
Example:
378+
379+
>>> x = np.array([-1., 0., 1.])
380+
>>> x_elu = keras_core.ops.elu(x)
381+
>>> print(x_elu)
382+
array([-0.63212055, 0., 1.], shape=(3,), dtype=float64)
383+
384+
"""
195385
if any_symbolic_tensors((x,)):
196386
return Elu(alpha).symbolic_call(x)
197387
return backend.nn.elu(x, alpha=alpha)
@@ -207,6 +397,27 @@ def compute_output_spec(self, x):
207397

208398
@keras_core_export(["keras_core.ops.selu", "keras_core.ops.nn.selu"])
209399
def selu(x):
400+
"""Scaled Exponential Linear Unit (SELU).
401+
402+
It is defined as:
403+
404+
`f(x) = scale * alpha * (exp(x) - 1.) for x < 0`,
405+
`f(x) = scale * x for x >= 0`.
406+
407+
Args:
408+
x: Input tensor.
409+
410+
Returns:
411+
A tensor with the same shape as `x`.
412+
413+
Example:
414+
415+
>>> x = np.array([-1., 0., 1.])
416+
>>> x_selu = keras_core.ops.selu(x)
417+
>>> print(x_selu)
418+
array([-1.11133055, 0., 1.05070098], shape=(3,), dtype=float64)
419+
420+
"""
210421
if any_symbolic_tensors((x,)):
211422
return Selu().symbolic_call(x)
212423
return backend.nn.selu(x)

0 commit comments

Comments
 (0)