From e17f0eef70ec018c899ae022099aa79b3ab15da5 Mon Sep 17 00:00:00 2001 From: longranger2 <836253168@qq.com> Date: Sat, 4 Nov 2023 01:52:58 +0000 Subject: [PATCH 01/12] migrate crop, cross, softmax_with_cross_entropy into pir --- python/paddle/nn/functional/loss.py | 8 ++++-- python/paddle/tensor/linalg.py | 2 +- python/paddle/tensor/manipulation.py | 2 +- test/legacy_test/test_crop_op.py | 4 +-- test/legacy_test/test_cross_op.py | 12 ++++++--- .../test_softmax_with_cross_entropy_op.py | 26 ++++++++++++------- 6 files changed, 34 insertions(+), 20 deletions(-) diff --git a/python/paddle/nn/functional/loss.py b/python/paddle/nn/functional/loss.py index 5faba8b2f31310..3a2e31f8c97254 100644 --- a/python/paddle/nn/functional/loss.py +++ b/python/paddle/nn/functional/loss.py @@ -22,7 +22,11 @@ from paddle.utils import deprecated from ...base.data_feeder import check_variable_and_dtype -from ...base.framework import _current_expected_place, in_pir_mode +from ...base.framework import ( + _current_expected_place, + in_dynamic_or_pir_mode, + in_pir_mode, +) from ...base.layer_helper import LayerHelper from ...common_ops_import import Variable from ...tensor.manipulation import reshape @@ -267,7 +271,7 @@ def base_softmax_with_cross_entropy( ) if input_dims - 1 == label_dims: label = paddle.unsqueeze(label, axis=axis) - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): softmax, loss = _C_ops.cross_entropy_with_softmax( logits, label, diff --git a/python/paddle/tensor/linalg.py b/python/paddle/tensor/linalg.py index 86a2188c332255..c136fac33c255d 100644 --- a/python/paddle/tensor/linalg.py +++ b/python/paddle/tensor/linalg.py @@ -1466,7 +1466,7 @@ def cross(x, y, axis=9, name=None): [0., 0., 0.], [0., 0., 0.]]) """ - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): axis = K_DEFAULT_DIM if axis is None else axis return _C_ops.cross(x, y, axis) else: diff --git a/python/paddle/tensor/manipulation.py b/python/paddle/tensor/manipulation.py index 40856399238ae2..ff619ba09aa3bd 100644 --- a/python/paddle/tensor/manipulation.py +++ b/python/paddle/tensor/manipulation.py @@ -792,7 +792,7 @@ def crop(x, shape=None, offsets=None, name=None): if shape is None: shape = x.shape - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.crop(x, shape, offsets) out = helper.create_variable_for_type_inference(x.dtype) diff --git a/test/legacy_test/test_crop_op.py b/test/legacy_test/test_crop_op.py index 858fd89fc7e998..9353f6eda71823 100644 --- a/test/legacy_test/test_crop_op.py +++ b/test/legacy_test/test_crop_op.py @@ -80,10 +80,10 @@ def initTestCase(self): self.offsets = [1, 2] def test_check_output(self): - self.check_output() + self.check_output(check_pir=True) def test_check_grad_normal(self): - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_pir=True) class TestCase1(TestCropOp): diff --git a/test/legacy_test/test_cross_op.py b/test/legacy_test/test_cross_op.py index cd13ea10f45106..6c88f6dc6cfb26 100644 --- a/test/legacy_test/test_cross_op.py +++ b/test/legacy_test/test_cross_op.py @@ -20,6 +20,7 @@ import paddle from paddle import base from paddle.base import Program, core, program_guard +from paddle.pir_utils import test_with_pir_api class TestCrossOp(OpTest): @@ -47,10 +48,10 @@ def init_output(self): self.outputs = {'Out': np.array(z_list).reshape(self.shape)} def test_check_output(self): - self.check_output() + self.check_output(check_pir=True) def test_check_grad_normal(self): - self.check_grad(['X', 'Y'], 'Out') + self.check_grad(['X', 'Y'], 'Out', check_pir=True) class TestCrossOpCase1(TestCrossOp): @@ -116,13 +117,15 @@ def test_check_output(self): if core.is_compiled_with_cuda(): place = core.CUDAPlace(0) if core.is_bfloat16_supported(place): - self.check_output_with_place(place) + self.check_output_with_place(place, check_pir=True) def test_check_grad_normal(self): if core.is_compiled_with_cuda(): place = core.CUDAPlace(0) if core.is_bfloat16_supported(place): - self.check_grad_with_place(place, ['X', 'Y'], 'Out') + self.check_grad_with_place( + place, ['X', 'Y'], 'Out', check_pir=True + ) class TestCrossAPI(unittest.TestCase): @@ -134,6 +137,7 @@ def input_data(self): [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]] ).astype('float32') + @test_with_pir_api def test_cross_api(self): self.input_data() diff --git a/test/legacy_test/test_softmax_with_cross_entropy_op.py b/test/legacy_test/test_softmax_with_cross_entropy_op.py index 0515c7c78e9d70..413824a8c1cd94 100644 --- a/test/legacy_test/test_softmax_with_cross_entropy_op.py +++ b/test/legacy_test/test_softmax_with_cross_entropy_op.py @@ -153,8 +153,8 @@ def setUp(self): def test_check_output(self): if self.python_api is not None: - self.check_output() - self.check_output() + self.check_output(check_pir=True) + self.check_output(check_pir=True) def test_check_grad(self): if core.is_compiled_with_rocm(): @@ -165,7 +165,9 @@ def test_check_grad(self): max_relative_error=5e-1, ) # HIP will have accuracy fail when using float32 in CPU place - self.check_grad(["Logits"], "Loss", max_relative_error=5e-1) + self.check_grad( + ["Logits"], "Loss", max_relative_error=5e-1, check_pir=True + ) else: if self.python_api is not None: self.check_grad( @@ -173,7 +175,9 @@ def test_check_grad(self): "Loss", numeric_grad_delta=0.001, ) - self.check_grad(["Logits"], "Loss", numeric_grad_delta=0.001) + self.check_grad( + ["Logits"], "Loss", numeric_grad_delta=0.001, check_pir=True + ) class TestSoftmaxWithCrossEntropyOpInt32(TestSoftmaxWithCrossEntropyOp): @@ -509,13 +513,15 @@ def setUp(self): def test_check_output(self): if self.python_api is not None: - self.check_output() - self.check_output() + self.check_output(check_pir=True) + self.check_output(check_pir=True) def test_check_grad(self): if self.python_api is not None: - self.check_grad(["Logits"], "Loss") - self.check_grad(["Logits"], "Loss", max_relative_error=0.1) + self.check_grad(["Logits"], "Loss", check_pir=True) + self.check_grad( + ["Logits"], "Loss", max_relative_error=0.1, check_pir=True + ) class TestSoftmaxWithCrossEntropyOpNoCudnnFp16( @@ -557,8 +563,8 @@ def initParams(self): def test_check_output(self): if self.python_api is not None: - self.check_output() - self.check_output() + self.check_output(check_pir=True) + self.check_output(check_pir=True) def test_check_grad(self): if core.is_compiled_with_rocm(): From 2876ba5c994c6065dda2e36f44802e1bd6de0cb1 Mon Sep 17 00:00:00 2001 From: LoneRanger <836253168@qq.com> Date: Tue, 7 Nov 2023 09:32:07 +0800 Subject: [PATCH 02/12] Update test_crop_op.py --- test/legacy_test/test_crop_op.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/legacy_test/test_crop_op.py b/test/legacy_test/test_crop_op.py index 9353f6eda71823..6ef5e8fd90fd80 100644 --- a/test/legacy_test/test_crop_op.py +++ b/test/legacy_test/test_crop_op.py @@ -47,6 +47,7 @@ def indexOf(shape, index): class TestCropOp(OpTest): def setUp(self): self.op_type = "crop" + self.python_api = paddle.crop self.crop_by_input = False self.offset_by_input = False self.attrs = {} From 8d68a01755fa18336c861ebca3321733ae4b0cf0 Mon Sep 17 00:00:00 2001 From: LoneRanger <836253168@qq.com> Date: Tue, 7 Nov 2023 09:33:56 +0800 Subject: [PATCH 03/12] Update test_cross_op.py --- test/legacy_test/test_cross_op.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/legacy_test/test_cross_op.py b/test/legacy_test/test_cross_op.py index 6c88f6dc6cfb26..45c79f14a801f3 100644 --- a/test/legacy_test/test_cross_op.py +++ b/test/legacy_test/test_cross_op.py @@ -149,7 +149,7 @@ def test_cross_api(self): exe = base.Executor(base.CPUPlace()) (res,) = exe.run( feed={'x': self.data_x, 'y': self.data_y}, - fetch_list=[z.name], + fetch_list=[z], return_numpy=False, ) expect_out = np.array( From 82721e8cbe57c77d3d4cdc4f2d2387d723f3f81c Mon Sep 17 00:00:00 2001 From: LoneRanger <836253168@qq.com> Date: Tue, 7 Nov 2023 11:28:26 +0800 Subject: [PATCH 04/12] Update test_cross_op.py --- test/legacy_test/test_cross_op.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/legacy_test/test_cross_op.py b/test/legacy_test/test_cross_op.py index 45c79f14a801f3..4d5a802aba5e4e 100644 --- a/test/legacy_test/test_cross_op.py +++ b/test/legacy_test/test_cross_op.py @@ -165,7 +165,7 @@ def test_cross_api(self): exe = base.Executor(base.CPUPlace()) (res,) = exe.run( feed={'x': self.data_x, 'y': self.data_y}, - fetch_list=[z.name], + fetch_list=[z], return_numpy=False, ) expect_out = np.array( From 7495a01665496b3486f7021554cabec8af5088e5 Mon Sep 17 00:00:00 2001 From: longranger2 <836253168@qq.com> Date: Wed, 8 Nov 2023 23:37:33 +0800 Subject: [PATCH 05/12] fix bug --- test/legacy_test/test_crop_op.py | 5 ++--- test/legacy_test/test_crop_tensor_op.py | 8 ++++---- test/legacy_test/test_cross_op.py | 12 ++++++++---- 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/test/legacy_test/test_crop_op.py b/test/legacy_test/test_crop_op.py index 9353f6eda71823..9635d99e44024a 100644 --- a/test/legacy_test/test_crop_op.py +++ b/test/legacy_test/test_crop_op.py @@ -46,7 +46,6 @@ def indexOf(shape, index): class TestCropOp(OpTest): def setUp(self): - self.op_type = "crop" self.crop_by_input = False self.offset_by_input = False self.attrs = {} @@ -80,10 +79,10 @@ def initTestCase(self): self.offsets = [1, 2] def test_check_output(self): - self.check_output(check_pir=True) + self.check_output() def test_check_grad_normal(self): - self.check_grad(['X'], 'Out', check_pir=True) + self.check_grad(['X'], 'Out') class TestCase1(TestCropOp): diff --git a/test/legacy_test/test_crop_tensor_op.py b/test/legacy_test/test_crop_tensor_op.py index 8d9743a55171c4..ab8a6466d97a52 100644 --- a/test/legacy_test/test_crop_tensor_op.py +++ b/test/legacy_test/test_crop_tensor_op.py @@ -81,10 +81,10 @@ def initTestCase(self): self.offsets = [1, 2] def test_check_output(self): - self.check_output() + self.check_output(check_pir=True) def test_check_grad_normal(self): - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_pir=True) class TestCase1(TestCropTensorOp): @@ -182,10 +182,10 @@ def initTestCase(self): self.shape_attr = [0, 0] def test_check_output(self): - self.check_output() + self.check_output(check_pir=True) def test_check_grad_normal(self): - self.check_grad(["X"], "Out") + self.check_grad(["X"], "Out", check_pir=True) class TestCropTensorOpTensorAttrCase1(TestCropTensorOpTensorAttr): diff --git a/test/legacy_test/test_cross_op.py b/test/legacy_test/test_cross_op.py index 6c88f6dc6cfb26..536c833a7e6679 100644 --- a/test/legacy_test/test_cross_op.py +++ b/test/legacy_test/test_cross_op.py @@ -19,7 +19,7 @@ import paddle from paddle import base -from paddle.base import Program, core, program_guard +from paddle.base import core from paddle.pir_utils import test_with_pir_api @@ -141,13 +141,16 @@ def input_data(self): def test_cross_api(self): self.input_data() + main = paddle.static.Program() + startup = paddle.static.Program() # case 1: - with program_guard(Program(), Program()): + with paddle.static.program_guard(main, startup): x = paddle.static.data(name='x', shape=[-1, 3], dtype="float32") y = paddle.static.data(name='y', shape=[-1, 3], dtype="float32") z = paddle.cross(x, y, axis=1) exe = base.Executor(base.CPUPlace()) (res,) = exe.run( + main, feed={'x': self.data_x, 'y': self.data_y}, fetch_list=[z.name], return_numpy=False, @@ -158,12 +161,13 @@ def test_cross_api(self): np.testing.assert_allclose(expect_out, np.array(res), rtol=1e-05) # case 2: - with program_guard(Program(), Program()): + with paddle.static.program_guard(main, startup): x = paddle.static.data(name='x', shape=[-1, 3], dtype="float32") y = paddle.static.data(name='y', shape=[-1, 3], dtype="float32") z = paddle.cross(x, y) exe = base.Executor(base.CPUPlace()) (res,) = exe.run( + main, feed={'x': self.data_x, 'y': self.data_y}, fetch_list=[z.name], return_numpy=False, @@ -174,7 +178,7 @@ def test_cross_api(self): np.testing.assert_allclose(expect_out, np.array(res), rtol=1e-05) # case 3: - with program_guard(Program(), Program()): + with paddle.static.program_guard(main, startup): x = paddle.static.data(name="x", shape=[-1, 3], dtype="float32") y = paddle.static.data(name='y', shape=[-1, 3], dtype='float32') From b02c830f2fa739a3b2ceae465134abad1692049c Mon Sep 17 00:00:00 2001 From: longranger2 <836253168@qq.com> Date: Wed, 8 Nov 2023 23:39:58 +0800 Subject: [PATCH 06/12] fix bug --- test/legacy_test/test_crop_op.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/legacy_test/test_crop_op.py b/test/legacy_test/test_crop_op.py index 9635d99e44024a..858fd89fc7e998 100644 --- a/test/legacy_test/test_crop_op.py +++ b/test/legacy_test/test_crop_op.py @@ -46,6 +46,7 @@ def indexOf(shape, index): class TestCropOp(OpTest): def setUp(self): + self.op_type = "crop" self.crop_by_input = False self.offset_by_input = False self.attrs = {} From 5607cd66591edf62988f2a14775e974f196bc55b Mon Sep 17 00:00:00 2001 From: longranger2 <836253168@qq.com> Date: Wed, 8 Nov 2023 23:43:48 +0800 Subject: [PATCH 07/12] fix bug --- .../test_softmax_with_cross_entropy_op.py | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/test/legacy_test/test_softmax_with_cross_entropy_op.py b/test/legacy_test/test_softmax_with_cross_entropy_op.py index 413824a8c1cd94..8b77c1ff59432f 100644 --- a/test/legacy_test/test_softmax_with_cross_entropy_op.py +++ b/test/legacy_test/test_softmax_with_cross_entropy_op.py @@ -160,9 +160,7 @@ def test_check_grad(self): if core.is_compiled_with_rocm(): if self.python_api is not None: self.check_grad( - ["Logits"], - "Loss", - max_relative_error=5e-1, + ["Logits"], "Loss", max_relative_error=5e-1, check_pir=True ) # HIP will have accuracy fail when using float32 in CPU place self.check_grad( @@ -171,9 +169,7 @@ def test_check_grad(self): else: if self.python_api is not None: self.check_grad( - ["Logits"], - "Loss", - numeric_grad_delta=0.001, + ["Logits"], "Loss", numeric_grad_delta=0.001, check_pir=True ) self.check_grad( ["Logits"], "Loss", numeric_grad_delta=0.001, check_pir=True @@ -540,8 +536,12 @@ def initParams(self): def test_check_grad(self): if self.python_api is not None: - self.check_grad(["Logits"], "Loss", max_relative_error=0.1) - self.check_grad(["Logits"], "Loss", max_relative_error=0.1) + self.check_grad( + ["Logits"], "Loss", max_relative_error=0.1, check_pir=True + ) + self.check_grad( + ["Logits"], "Loss", max_relative_error=0.1, check_pir=True + ) class TestSoftmaxWithCrossEntropyOp2(TestSoftmaxWithCrossEntropyOp): @@ -570,12 +570,16 @@ def test_check_grad(self): if core.is_compiled_with_rocm(): # HIP will have accuracy fail when using float32 in CPU place if self.python_api is not None: - self.check_grad(["Logits"], "Loss", max_relative_error=0.1) - self.check_grad(["Logits"], "Loss", max_relative_error=0.1) + self.check_grad( + ["Logits"], "Loss", max_relative_error=0.1, check_pir=True + ) + self.check_grad( + ["Logits"], "Loss", max_relative_error=0.1, check_pir=True + ) else: if self.python_api is not None: - self.check_grad(["Logits"], "Loss") - self.check_grad(["Logits"], "Loss") + self.check_grad(["Logits"], "Loss", check_pir=True) + self.check_grad(["Logits"], "Loss", check_pir=True) class TestSoftmaxWithCrossEntropyOp3(TestSoftmaxWithCrossEntropyOp): From 416c252e7f27386fc6daf1733cb399e594922b86 Mon Sep 17 00:00:00 2001 From: longranger2 <836253168@qq.com> Date: Mon, 13 Nov 2023 21:42:44 +0800 Subject: [PATCH 08/12] fix bug --- python/paddle/tensor/manipulation.py | 5 ++++- test/legacy_test/test_cross_op.py | 2 ++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/python/paddle/tensor/manipulation.py b/python/paddle/tensor/manipulation.py index ff619ba09aa3bd..07f8ab9af574f5 100644 --- a/python/paddle/tensor/manipulation.py +++ b/python/paddle/tensor/manipulation.py @@ -780,7 +780,10 @@ def crop(x, shape=None, offsets=None, name=None): x, 'x', ['float32', 'float64', 'int32', 'int64'], 'crop_tensor' ) check_type( - shape, 'shape', (list, tuple, Variable, type(None)), 'crop_tensor' + shape, + 'shape', + (list, tuple, Variable, type(None), paddle.pir.OpResult), + 'crop_tensor', ) check_type( offsets, 'offsets', (list, tuple, Variable, type(None)), 'crop_tensor' diff --git a/test/legacy_test/test_cross_op.py b/test/legacy_test/test_cross_op.py index 98845758576a5a..afb261cc4d91c2 100644 --- a/test/legacy_test/test_cross_op.py +++ b/test/legacy_test/test_cross_op.py @@ -160,6 +160,8 @@ def test_cross_api(self): ) np.testing.assert_allclose(expect_out, np.array(res), rtol=1e-05) + main = paddle.static.Program() + startup = paddle.static.Program() # case 2: with paddle.static.program_guard(main, startup): x = paddle.static.data(name='x', shape=[-1, 3], dtype="float32") From f170160f78ed603111594fbc30ea3a46e130a9a1 Mon Sep 17 00:00:00 2001 From: longranger2 <836253168@qq.com> Date: Tue, 14 Nov 2023 22:57:00 +0800 Subject: [PATCH 09/12] fix bug --- python/paddle/tensor/manipulation.py | 5 ++++- test/legacy_test/test_cross_op.py | 8 +++++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/python/paddle/tensor/manipulation.py b/python/paddle/tensor/manipulation.py index 07f8ab9af574f5..e659136ec4f16f 100644 --- a/python/paddle/tensor/manipulation.py +++ b/python/paddle/tensor/manipulation.py @@ -786,7 +786,10 @@ def crop(x, shape=None, offsets=None, name=None): 'crop_tensor', ) check_type( - offsets, 'offsets', (list, tuple, Variable, type(None)), 'crop_tensor' + offsets, + 'offsets', + (list, tuple, Variable, type(None), paddle.pir.OpResult), + 'crop_tensor', ) if offsets is None: diff --git a/test/legacy_test/test_cross_op.py b/test/legacy_test/test_cross_op.py index afb261cc4d91c2..6aeab30d6c42f7 100644 --- a/test/legacy_test/test_cross_op.py +++ b/test/legacy_test/test_cross_op.py @@ -179,7 +179,13 @@ def test_cross_api(self): ) np.testing.assert_allclose(expect_out, np.array(res), rtol=1e-05) - # case 3: + def test_cross_api1(self): + self.input_data() + + main = paddle.static.Program() + startup = paddle.static.Program() + + # case 1: with paddle.static.program_guard(main, startup): x = paddle.static.data(name="x", shape=[-1, 3], dtype="float32") y = paddle.static.data(name='y', shape=[-1, 3], dtype='float32') From fd31de5f84f6fa4e535d14f6833295e2f857eddc Mon Sep 17 00:00:00 2001 From: LoneRanger <836253168@qq.com> Date: Thu, 16 Nov 2023 07:37:17 +0800 Subject: [PATCH 10/12] Update test_softmax_with_cross_entropy_op.py --- test/legacy_test/test_softmax_with_cross_entropy_op.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/legacy_test/test_softmax_with_cross_entropy_op.py b/test/legacy_test/test_softmax_with_cross_entropy_op.py index 8b77c1ff59432f..abd903a69196c8 100644 --- a/test/legacy_test/test_softmax_with_cross_entropy_op.py +++ b/test/legacy_test/test_softmax_with_cross_entropy_op.py @@ -160,19 +160,19 @@ def test_check_grad(self): if core.is_compiled_with_rocm(): if self.python_api is not None: self.check_grad( - ["Logits"], "Loss", max_relative_error=5e-1, check_pir=True + ["Logits"], "Loss", max_relative_error=5e-1, check_pir=False ) # HIP will have accuracy fail when using float32 in CPU place self.check_grad( - ["Logits"], "Loss", max_relative_error=5e-1, check_pir=True + ["Logits"], "Loss", max_relative_error=5e-1, check_pir=False ) else: if self.python_api is not None: self.check_grad( - ["Logits"], "Loss", numeric_grad_delta=0.001, check_pir=True + ["Logits"], "Loss", numeric_grad_delta=0.001, check_pir=False ) self.check_grad( - ["Logits"], "Loss", numeric_grad_delta=0.001, check_pir=True + ["Logits"], "Loss", numeric_grad_delta=0.001, check_pir=False ) From 5a152a17c00bb2e9332a63ef63ce040b350d68ce Mon Sep 17 00:00:00 2001 From: LoneRanger <836253168@qq.com> Date: Thu, 16 Nov 2023 12:49:38 +0800 Subject: [PATCH 11/12] Update test_softmax_with_cross_entropy_op.py --- test/legacy_test/test_softmax_with_cross_entropy_op.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/test/legacy_test/test_softmax_with_cross_entropy_op.py b/test/legacy_test/test_softmax_with_cross_entropy_op.py index abd903a69196c8..7a3320626c9bcb 100644 --- a/test/legacy_test/test_softmax_with_cross_entropy_op.py +++ b/test/legacy_test/test_softmax_with_cross_entropy_op.py @@ -169,7 +169,10 @@ def test_check_grad(self): else: if self.python_api is not None: self.check_grad( - ["Logits"], "Loss", numeric_grad_delta=0.001, check_pir=False + ["Logits"], + "Loss", + numeric_grad_delta=0.001, + check_pir=False ) self.check_grad( ["Logits"], "Loss", numeric_grad_delta=0.001, check_pir=False From a8a26cf58f4d016dad63d76170800d9aafd3de7c Mon Sep 17 00:00:00 2001 From: longranger2 <836253168@qq.com> Date: Thu, 16 Nov 2023 20:57:35 +0800 Subject: [PATCH 12/12] fix bug --- .../test_softmax_with_cross_entropy_op.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/test/legacy_test/test_softmax_with_cross_entropy_op.py b/test/legacy_test/test_softmax_with_cross_entropy_op.py index 7a3320626c9bcb..e2d512707e57de 100644 --- a/test/legacy_test/test_softmax_with_cross_entropy_op.py +++ b/test/legacy_test/test_softmax_with_cross_entropy_op.py @@ -172,7 +172,7 @@ def test_check_grad(self): ["Logits"], "Loss", numeric_grad_delta=0.001, - check_pir=False + check_pir=False, ) self.check_grad( ["Logits"], "Loss", numeric_grad_delta=0.001, check_pir=False @@ -517,9 +517,9 @@ def test_check_output(self): def test_check_grad(self): if self.python_api is not None: - self.check_grad(["Logits"], "Loss", check_pir=True) + self.check_grad(["Logits"], "Loss", check_pir=False) self.check_grad( - ["Logits"], "Loss", max_relative_error=0.1, check_pir=True + ["Logits"], "Loss", max_relative_error=0.1, check_pir=False ) @@ -540,10 +540,10 @@ def initParams(self): def test_check_grad(self): if self.python_api is not None: self.check_grad( - ["Logits"], "Loss", max_relative_error=0.1, check_pir=True + ["Logits"], "Loss", max_relative_error=0.1, check_pir=False ) self.check_grad( - ["Logits"], "Loss", max_relative_error=0.1, check_pir=True + ["Logits"], "Loss", max_relative_error=0.1, check_pir=False ) @@ -574,15 +574,15 @@ def test_check_grad(self): # HIP will have accuracy fail when using float32 in CPU place if self.python_api is not None: self.check_grad( - ["Logits"], "Loss", max_relative_error=0.1, check_pir=True + ["Logits"], "Loss", max_relative_error=0.1, check_pir=False ) self.check_grad( - ["Logits"], "Loss", max_relative_error=0.1, check_pir=True + ["Logits"], "Loss", max_relative_error=0.1, check_pir=False ) else: if self.python_api is not None: - self.check_grad(["Logits"], "Loss", check_pir=True) - self.check_grad(["Logits"], "Loss", check_pir=True) + self.check_grad(["Logits"], "Loss", check_pir=False) + self.check_grad(["Logits"], "Loss", check_pir=False) class TestSoftmaxWithCrossEntropyOp3(TestSoftmaxWithCrossEntropyOp):