@@ -22,6 +22,24 @@ def compute_output_spec(self, x):
22
22
23
23
@keras_core_export (["keras_core.ops.relu" , "keras_core.ops.nn.relu" ])
24
24
def relu (x ):
25
+ """Rectified linear unit activation function.
26
+
27
+ It is defined as `f(x) = max(0, x)`.
28
+
29
+ Args:
30
+ x: Input tensor.
31
+
32
+ Returns:
33
+ A tensor with the same shape as `x`.
34
+
35
+ Example:
36
+
37
+ >>> x = np.array([-1., 0., 1.])
38
+ >>> x_relu = keras_core.ops.relu(x)
39
+ >>> print(x_relu)
40
+ array([0., 0., 1.], shape=(3,), dtype=float64)
41
+
42
+ """
25
43
if any_symbolic_tensors ((x ,)):
26
44
return Relu ().symbolic_call (x )
27
45
return backend .nn .relu (x )
@@ -37,6 +55,24 @@ def compute_output_spec(self, x):
37
55
38
56
@keras_core_export (["keras_core.ops.relu6" , "keras_core.ops.nn.relu6" ])
39
57
def relu6 (x ):
58
+ """Rectified linear unit activation function with upper bound of 6.
59
+
60
+ It is defined as `f(x) = np.clip(x, 0, 6)`.
61
+
62
+ Args:
63
+ x: Input tensor.
64
+
65
+ Returns:
66
+ A tensor with the same shape as `x`.
67
+
68
+ Example:
69
+
70
+ >>> x = np.array([-1., 0., 1., 6., 7.])
71
+ >>> x_relu6 = keras_core.ops.relu6(x)
72
+ >>> print(x_relu6)
73
+ array([0., 0., 1., 6., 6.], shape=(5,), dtype=float64)
74
+
75
+ """
40
76
if any_symbolic_tensors ((x ,)):
41
77
return Relu6 ().symbolic_call (x )
42
78
return backend .nn .relu6 (x )
@@ -52,6 +88,24 @@ def compute_output_spec(self, x):
52
88
53
89
@keras_core_export (["keras_core.ops.sigmoid" , "keras_core.ops.nn.sigmoid" ])
54
90
def sigmoid (x ):
91
+ """Sigmoid activation function.
92
+
93
+ It is defined as `f(x) = 1 / (1 + exp(-x))`.
94
+
95
+ Args:
96
+ x: Input tensor.
97
+
98
+ Returns:
99
+ A tensor with the same shape as `x`.
100
+
101
+ Example:
102
+
103
+ >>> x = np.array([-1., 0., 1.])
104
+ >>> x_sigmoid = keras_core.ops.sigmoid(x)
105
+ >>> print(x_sigmoid)
106
+ array([0.26894143, 0.5, 0.7310586 ], dtype=float64)
107
+
108
+ """
55
109
if any_symbolic_tensors ((x ,)):
56
110
return Sigmoid ().symbolic_call (x )
57
111
return backend .nn .sigmoid (x )
@@ -67,6 +121,25 @@ def compute_output_spec(self, x):
67
121
68
122
@keras_core_export (["keras_core.ops.softplus" , "keras_core.ops.nn.softplus" ])
69
123
def softplus (x ):
124
+ """Softplus activation function.
125
+
126
+ It is defined as `f(x) = log(exp(x) + 1)`, where `log` is the natural
127
+ logarithm and `exp` is the exponential function.
128
+
129
+ Args:
130
+ x: Input tensor.
131
+
132
+ Returns:
133
+ A tensor with the same shape as `x`.
134
+
135
+ Example:
136
+
137
+ >>> x = np.array([-1., 0., 1.])
138
+ >>> x_softplus = keras_core.ops.softplus(x)
139
+ >>> print(x_softplus)
140
+ array([0.31326166, 0.6931472 , 1.3132616 ], shape=(3,), dtype=float64)
141
+
142
+ """
70
143
if any_symbolic_tensors ((x ,)):
71
144
return Softplus ().symbolic_call (x )
72
145
return backend .nn .softplus (x )
@@ -82,6 +155,24 @@ def compute_output_spec(self, x):
82
155
83
156
@keras_core_export (["keras_core.ops.softsign" , "keras_core.ops.nn.softsign" ])
84
157
def softsign (x ):
158
+ """Softsign activation function.
159
+
160
+ It is defined as `f(x) = x / (abs(x) + 1)`.
161
+
162
+ Args:
163
+ x: Input tensor.
164
+
165
+ Returns:
166
+ A tensor with the same shape as `x`.
167
+
168
+ Example:
169
+
170
+ >>> x = np.array([-1., 0., 1.])
171
+ >>> x_softsign = keras_core.ops.softsign(x)
172
+ >>> print(x_softsign)
173
+ array([-0.5, 0., 0.5], shape=(3,), dtype=float64)
174
+
175
+ """
85
176
if any_symbolic_tensors ((x ,)):
86
177
return Softsign ().symbolic_call (x )
87
178
return backend .nn .softsign (x )
@@ -97,6 +188,24 @@ def compute_output_spec(self, x):
97
188
98
189
@keras_core_export (["keras_core.ops.silu" , "keras_core.ops.nn.silu" ])
99
190
def silu (x ):
191
+ """Sigmoid-weighted linear unit activation function.
192
+
193
+ It is defined as `f(x) = x * sigmoid(x)`.
194
+
195
+ Args:
196
+ x: Input tensor.
197
+
198
+ Returns:
199
+ A tensor with the same shape as `x`.
200
+
201
+ Example:
202
+
203
+ >>> x = np.array([-1., 0., 1.])
204
+ >>> x_silu = keras_core.ops.silu(x)
205
+ >>> print(x_silu)
206
+ array([-0.26894143, 0., 0.7310586], shape=(3,), dtype=float64)
207
+
208
+ """
100
209
if any_symbolic_tensors ((x ,)):
101
210
return Silu ().symbolic_call (x )
102
211
return backend .nn .silu (x )
@@ -132,6 +241,24 @@ def compute_output_spec(self, x):
132
241
]
133
242
)
134
243
def log_sigmoid (x ):
244
+ """Logarithm of the sigmoid activation function.
245
+
246
+ It is defined as `f(x) = log(1 / (1 + exp(-x)))`.
247
+
248
+ Args:
249
+ x: Input tensor.
250
+
251
+ Returns:
252
+ A tensor with the same shape as `x`.
253
+
254
+ Example:
255
+
256
+ >>> x = np.array([-1., 0., 1.])
257
+ >>> x_log_sigmoid = keras_core.ops.log_sigmoid(x)
258
+ >>> print(x_log_sigmoid)
259
+ array([-1.3132616, -0.6931472, -0.3132616], shape=(3,), dtype=float64)
260
+
261
+ """
135
262
if any_symbolic_tensors ((x ,)):
136
263
return LogSigmoid ().symbolic_call (x )
137
264
return backend .nn .log_sigmoid (x )
@@ -153,6 +280,28 @@ def compute_output_spec(self, x):
153
280
["keras_core.ops.leaky_relu" , "keras_core.ops.nn.leaky_relu" ]
154
281
)
155
282
def leaky_relu (x , negative_slope = 0.2 ):
283
+ """Leaky version of a Rectified Linear Unit.
284
+
285
+ It allows a small gradient when the unit is not active, it is defined as:
286
+
287
+ `f(x) = alpha * x for x < 0` or `f(x) = x for x >= 0`.
288
+
289
+ Args:
290
+ x: Input tensor.
291
+ negative_slope: Slope of the activation function at x < 0.
292
+ Defaults to `0.2`.
293
+
294
+ Returns:
295
+ A tensor with the same shape as `x`.
296
+
297
+ Example:
298
+
299
+ >>> x = np.array([-1., 0., 1.])
300
+ >>> x_leaky_relu = keras_core.ops.leaky_relu(x)
301
+ >>> print(x_leaky_relu)
302
+ array([-0.2, 0. , 1. ], shape=(3,), dtype=float64)
303
+
304
+ """
156
305
if any_symbolic_tensors ((x ,)):
157
306
return LeakyRelu (negative_slope ).symbolic_call (x )
158
307
return backend .nn .leaky_relu (x , negative_slope = negative_slope )
@@ -173,6 +322,26 @@ def compute_output_spec(self, x):
173
322
]
174
323
)
175
324
def hard_sigmoid (x ):
325
+ """Hard sigmoid activation function.
326
+
327
+ It is defined as:
328
+
329
+ `0 if x < -2.5`, `1 if x > 2.5`, `(0.2 * x) + 0.5 if -2.5 <= x <= 2.5`.
330
+
331
+ Args:
332
+ x: Input tensor.
333
+
334
+ Returns:
335
+ A tensor with the same shape as `x`.
336
+
337
+ Example:
338
+
339
+ >>> x = np.array([-1., 0., 1.])
340
+ >>> x_hard_sigmoid = keras_core.ops.hard_sigmoid(x)
341
+ >>> print(x_hard_sigmoid)
342
+ array([0.3, 0.5, 0.7], shape=(3,), dtype=float64)
343
+
344
+ """
176
345
if any_symbolic_tensors ((x ,)):
177
346
return HardSigmoid ().symbolic_call (x )
178
347
return backend .nn .hard_sigmoid (x )
@@ -192,6 +361,27 @@ def compute_output_spec(self, x):
192
361
193
362
@keras_core_export (["keras_core.ops.elu" , "keras_core.ops.nn.elu" ])
194
363
def elu (x , alpha = 1.0 ):
364
+ """Exponential Linear Unit.
365
+
366
+ It is defined as:
367
+
368
+ `f(x) = alpha * (exp(x) - 1.) for x < 0`, `f(x) = x for x >= 0`.
369
+
370
+ Args:
371
+ x: Input tensor.
372
+ alpha: A scalar, slope of positive section. Defaults to `1.0`.
373
+
374
+ Returns:
375
+ A tensor with the same shape as `x`.
376
+
377
+ Example:
378
+
379
+ >>> x = np.array([-1., 0., 1.])
380
+ >>> x_elu = keras_core.ops.elu(x)
381
+ >>> print(x_elu)
382
+ array([-0.63212055, 0., 1.], shape=(3,), dtype=float64)
383
+
384
+ """
195
385
if any_symbolic_tensors ((x ,)):
196
386
return Elu (alpha ).symbolic_call (x )
197
387
return backend .nn .elu (x , alpha = alpha )
@@ -207,6 +397,27 @@ def compute_output_spec(self, x):
207
397
208
398
@keras_core_export (["keras_core.ops.selu" , "keras_core.ops.nn.selu" ])
209
399
def selu (x ):
400
+ """Scaled Exponential Linear Unit (SELU).
401
+
402
+ It is defined as:
403
+
404
+ `f(x) = scale * alpha * (exp(x) - 1.) for x < 0`,
405
+ `f(x) = scale * x for x >= 0`.
406
+
407
+ Args:
408
+ x: Input tensor.
409
+
410
+ Returns:
411
+ A tensor with the same shape as `x`.
412
+
413
+ Example:
414
+
415
+ >>> x = np.array([-1., 0., 1.])
416
+ >>> x_selu = keras_core.ops.selu(x)
417
+ >>> print(x_selu)
418
+ array([-1.11133055, 0., 1.05070098], shape=(3,), dtype=float64)
419
+
420
+ """
210
421
if any_symbolic_tensors ((x ,)):
211
422
return Selu ().symbolic_call (x )
212
423
return backend .nn .selu (x )
0 commit comments