diff --git a/python/paddle/tensor/math.py b/python/paddle/tensor/math.py index ae3d6121061a64..055fa20144e901 100644 --- a/python/paddle/tensor/math.py +++ b/python/paddle/tensor/math.py @@ -6828,7 +6828,7 @@ def polygamma(x, n, name=None): if n == 0: return digamma(x) else: - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.polygamma(x, n) else: check_variable_and_dtype( diff --git a/test/legacy_test/test_polygamma_op.py b/test/legacy_test/test_polygamma_op.py index 9c9b0416ba4f20..5edd092f58610d 100644 --- a/test/legacy_test/test_polygamma_op.py +++ b/test/legacy_test/test_polygamma_op.py @@ -20,6 +20,7 @@ import paddle from paddle.base import core +from paddle.pir_utils import test_with_pir_api np.random.seed(100) paddle.seed(100) @@ -64,6 +65,7 @@ def setUp(self): if core.is_compiled_with_cuda(): self.place.append(paddle.CUDAPlace(0)) + @test_with_pir_api def test_api_static(self): def run(place): paddle.enable_static() @@ -197,7 +199,7 @@ def init_config(self): self.target = ref_polygamma(self.inputs['x'], self.order) def test_check_output(self): - self.check_output() + self.check_output(check_pir=True) def test_check_grad(self): self.check_grad( @@ -206,6 +208,7 @@ def test_check_grad(self): user_defined_grads=[ ref_polygamma_grad(self.case, 1 / self.case.size, self.order) ], + check_pir=True, )