Skip to content

Commit 3e4f4f7

Browse files
committed
unify code samples in paddle.nn.functional.silu and paddle.nn.Silu
1 parent a5cd974 commit 3e4f4f7

File tree

2 files changed

+5
-6
lines changed

2 files changed

+5
-6
lines changed

python/paddle/nn/functional/activation.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -777,10 +777,9 @@ def silu(x, name=None):
777777
.. code-block:: python
778778
import paddle
779779
import paddle.nn.functional as F
780-
import numpy as np
781-
paddle.disable_static()
782-
x = paddle.to_tensor(np.array([1.0, 2.0, 3.0, 4.0]))
783-
out = F.silu(x) # [ 0.7310586 1.7615942 2.8577224, 3.9280552 ]
780+
781+
x = paddle.to_tensor([1.0, 2.0, 3.0, 4.0])
782+
out = F.silu(x) # [ 0.731059, 1.761594, 2.857722, 3.928055 ]
784783
"""
785784

786785
if in_dygraph_mode():

python/paddle/nn/layer/activation.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -925,7 +925,7 @@ class Silu(layers.Layer):
925925
Silu Activation.
926926
.. math::
927927
928-
Silu(x) = \\frac{x}{1 + e^{-x}}
928+
Silu(x) = \frac{x}{1 + e^{-x}}
929929
930930
Parameters:
931931
x (Tensor): The input Tensor with data type float32, or float64.
@@ -943,7 +943,7 @@ class Silu(layers.Layer):
943943
944944
x = paddle.to_tensor([1.0, 2.0, 3.0, 4.0])
945945
m = paddle.nn.Silu()
946-
out = m(x) # [ 0.7310586 1.7615942 2.8577224, 3.9280552 ]
946+
out = m(x) # [ 0.731059, 1.761594, 2.857722, 3.928055 ]
947947
"""
948948

949949
def __init__(self, name=None):

0 commit comments

Comments
 (0)