Skip to content
Merged
10 changes: 5 additions & 5 deletions python/paddle/nn/functional/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -712,7 +712,7 @@ def rrelu(x, lower=1.0 / 8.0, upper=1.0 / 3.0, training=True, name=None):

is_test = not training

if in_dynamic_mode():
if in_dynamic_or_pir_mode():
return _C_ops.rrelu(x, lower, upper, is_test)
else:
check_variable_and_dtype(
Expand Down Expand Up @@ -886,7 +886,7 @@ def maxout(x, groups, axis=1, name=None):
[0.42400089, 0.40641287, 0.97020894, 0.74437362],
[0.51785129, 0.73292869, 0.97786582, 0.92382854]]]])
"""
if in_dynamic_mode():
if in_dynamic_or_pir_mode():
return _C_ops.maxout(x, groups, axis)
else:
check_variable_and_dtype(
Expand Down Expand Up @@ -1007,7 +1007,7 @@ def selu(
f"The alpha must be no less than zero. Received: {alpha}."
)

if in_dynamic_mode():
if in_dynamic_or_pir_mode():
return _C_ops.selu(x, scale, alpha)
else:
check_variable_and_dtype(
Expand Down Expand Up @@ -1533,7 +1533,7 @@ def tanhshrink(x, name=None):
Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True,
[-0.02005100, -0.00262472, 0.00033201, 0.00868741])
"""
if in_dynamic_mode():
if in_dynamic_or_pir_mode():
return _C_ops.tanh_shrink(x)
else:
check_variable_and_dtype(
Expand Down Expand Up @@ -1583,7 +1583,7 @@ def thresholded_relu(x, threshold=1.0, name=None):
[2., 0., 0.])
"""

if in_dynamic_mode():
if in_dynamic_or_pir_mode():
return _C_ops.thresholded_relu(x, threshold)
else:
check_variable_and_dtype(
Expand Down
16 changes: 12 additions & 4 deletions test/legacy_test/test_activation_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -1155,7 +1155,10 @@ def setUp(self):
def test_check_grad(self):
if self.dtype == np.float16:
return
self.check_grad(['X'], 'Out')
self.check_grad(['X'], 'Out', check_pir=True)

def test_check_output(self):
self.check_output(check_pir=True)


class TestTanhshrink_ZeroDim(TestTanhshrink):
Expand All @@ -1174,6 +1177,7 @@ def setUp(self):
else paddle.CPUPlace()
)

@test_with_pir_api
def test_static_api(self):
with static_guard():
with paddle.static.program_guard(paddle.static.Program()):
Expand Down Expand Up @@ -4302,7 +4306,10 @@ def init_shape(self):
def test_check_grad(self):
if self.dtype == np.float16:
return
self.check_grad(['X'], 'Out')
self.check_grad(['X'], 'Out', check_pir=True)

def test_check_output(self):
self.check_output(check_pir=True)


class TestThresholdedRelu_ZeroDim(TestThresholdedRelu):
Expand All @@ -4323,6 +4330,7 @@ def setUp(self):
else paddle.CPUPlace()
)

@test_with_pir_api
def test_static_api(self):
with static_guard():
with paddle.static.program_guard(paddle.static.Program()):
Expand Down Expand Up @@ -4790,7 +4798,7 @@ def test_check_grad(self):
create_test_act_fp16_class(
TestTanh, check_prim=True, check_prim_pir=True, enable_cinn=True
)
create_test_act_fp16_class(TestTanhshrink)
create_test_act_fp16_class(TestTanhshrink, check_pir=True)
create_test_act_fp16_class(TestHardShrink, check_pir=True)
create_test_act_fp16_class(TestSoftshrink, check_pir=True)
create_test_act_fp16_class(
Expand Down Expand Up @@ -4965,7 +4973,7 @@ def test_check_grad(self):
create_test_act_bf16_class(TestSilu, check_prim=True, check_prim_pir=True)
create_test_act_bf16_class(TestLogSigmoid)
create_test_act_bf16_class(TestTanh, check_prim=True, check_prim_pir=True)
create_test_act_bf16_class(TestTanhshrink)
create_test_act_bf16_class(TestTanhshrink, check_pir=True)
create_test_act_bf16_class(TestHardShrink, check_pir=True)
create_test_act_bf16_class(TestSoftshrink, check_pir=True)
create_test_act_bf16_class(
Expand Down
7 changes: 5 additions & 2 deletions test/legacy_test/test_maxout_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import paddle
import paddle.nn.functional as F
from paddle.base import core
from paddle.pir_utils import test_with_pir_api

paddle.enable_static()
np.random.seed(1)
Expand Down Expand Up @@ -57,10 +58,10 @@ def set_attrs(self):
pass

def test_check_output(self):
self.check_output()
self.check_output(check_pir=True)

def test_check_grad(self):
self.check_grad(['X'], 'Out')
self.check_grad(['X'], 'Out', check_pir=True)


class TestMaxOutOpAxis0(TestMaxOutOp):
Expand Down Expand Up @@ -95,6 +96,7 @@ def setUp(self):
else paddle.CPUPlace()
)

@test_with_pir_api
def test_static_api(self):
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.static.data('X', self.x_np.shape, self.x_np.dtype)
Expand Down Expand Up @@ -161,6 +163,7 @@ def setUp(self):
self.axis = 1
self.place = paddle.CUDAPlace(0)

@test_with_pir_api
def test_static_api(self):
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.static.data('X', self.x_np.shape, self.x_np.dtype)
Expand Down
Loading