Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion test/legacy_test/test_cumsum_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -496,9 +496,12 @@ def test_check_grad(self):


class BadInputTest(unittest.TestCase):
@test_with_pir_api
def test_error(self):
paddle.enable_static()
with base.program_guard(base.Program()):
with paddle.static.program_guard(
paddle.static.Program(), paddle.static.Program()
):

def test_bad_x():
data = [1, 2, 4]
Expand Down
65 changes: 33 additions & 32 deletions test/legacy_test/test_pad_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

def pad_wrapper(x, paddings, pad_value):
return paddle.nn.functional.pad(
x, pad=list(paddings), mode='constant', value=pad_value
x, pad=list(paddings), mode="constant", value=pad_value
)


Expand All @@ -38,16 +38,16 @@ def setUp(self):
self.op_type = "pad"
self.python_api = pad_wrapper
self.inputs = {
'X': np.random.random(self.shape).astype(self.dtype),
"X": np.random.random(self.shape).astype(self.dtype),
}
self.attrs = {}
self.attrs['paddings'] = list(np.array(self.paddings).flatten())
self.attrs['pad_value'] = self.pad_value
self.attrs["paddings"] = list(np.array(self.paddings).flatten())
self.attrs["pad_value"] = self.pad_value
self.outputs = {
'Out': np.pad(
self.inputs['X'],
"Out": np.pad(
self.inputs["X"],
self.paddings,
mode='constant',
mode="constant",
constant_values=self.pad_value,
)
}
Expand All @@ -62,8 +62,8 @@ def test_check_output(self):

def test_check_grad_normal(self):
self.check_grad(
['X'],
'Out',
["X"],
"Out",
check_prim=True,
check_pir=True,
check_prim_pir=True,
Expand Down Expand Up @@ -109,8 +109,8 @@ def get_dtype(self):

def test_check_grad_normal(self):
self.check_grad(
['X'],
'Out',
["X"],
"Out",
check_prim=True,
check_pir=True,
check_prim_pir=True,
Expand All @@ -128,6 +128,7 @@ def test_check_grad_normal(self):


class TestPadOpError(unittest.TestCase):
@test_with_pir_api
def test_errors(self):
with static_guard():
with paddle.static.program_guard(
Expand All @@ -139,11 +140,11 @@ def test_Variable():
paddle.nn.functional.pad(x=input_data, pad=[1, 1, 1, 1])

self.assertRaises(TypeError, test_Variable)

data = paddle.static.data(
name='data', shape=[4], dtype='float16'
)
paddle.nn.functional.pad(x=data, pad=[0, 1])
if core.is_compiled_with_cuda():
data = paddle.static.data(
name="data", shape=[4], dtype="float16"
)
paddle.nn.functional.pad(x=data, pad=[0, 1])


class TestPaddingValueTensor(UnittestBase):
Expand Down Expand Up @@ -171,7 +172,7 @@ def test_static(self):
exe.run(startup_prog)
res = exe.run(fetch_list=[feat, out])
gt = np.pad(
res[0], [1, 1], 'constant', constant_values=[1.0, 1.0]
res[0], [1, 1], "constant", constant_values=[1.0, 1.0]
)
np.testing.assert_allclose(res[1], gt)

Expand All @@ -183,7 +184,7 @@ def test_static(self):
gt = np.pad(
infer_outs[0],
[1, 1],
'constant',
"constant",
constant_values=[1.0, 1.0],
)
np.testing.assert_allclose(infer_outs[1], gt)
Expand All @@ -207,20 +208,20 @@ def test_pir_static(self):
exe.run(startup_prog)
res = exe.run(fetch_list=[feat, out])
gt = np.pad(
res[0], [1, 1], 'constant', constant_values=[1.0, 1.0]
res[0], [1, 1], "constant", constant_values=[1.0, 1.0]
)
np.testing.assert_allclose(res[1], gt)

def path_prefix(self):
return 'padding_value'
return "padding_value"

def var_prefix(self):
return "Var["

def call_func(self, x):
padding_value = paddle.assign([1.0])
out = paddle.nn.functional.pad(
x, pad=[1, 1, 1, 1], value=padding_value, mode='constant'
x, pad=[1, 1, 1, 1], value=padding_value, mode="constant"
)
return out

Expand All @@ -238,12 +239,12 @@ class TestPaddingValueTensor3(unittest.TestCase):
@test_with_pir_api
def test_static(self):
with static_guard():
np_x = np.random.random((16, 16)).astype('float32')
np_x = np.random.random((16, 16)).astype("float32")
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
with paddle.static.program_guard(main_prog, startup_prog):
x = paddle.assign(np_x).astype('float32')
pad_value = paddle.assign([0.0]).astype('float64')
x = paddle.assign(np_x).astype("float32")
pad_value = paddle.assign([0.0]).astype("float64")
y = paddle.nn.functional.pad(x, [0, 1, 2, 3], value=pad_value)
loss = y.sum()
optimize_ops, params_grads = paddle.optimizer.SGD(
Expand Down Expand Up @@ -273,13 +274,13 @@ def setUp(self):
self.python_api = pad_wrapper
x = np.random.random(self.shape).astype(np.float32)
self.attrs = {}
self.attrs['paddings'] = list(np.array(self.paddings).flatten())
self.attrs['pad_value'] = self.pad_value
self.attrs["paddings"] = list(np.array(self.paddings).flatten())
self.attrs["pad_value"] = self.pad_value
out = np.pad(
x, self.paddings, mode='constant', constant_values=self.pad_value
x, self.paddings, mode="constant", constant_values=self.pad_value
)
self.inputs = {'X': convert_float_to_uint16(x)}
self.outputs = {'Out': convert_float_to_uint16(out)}
self.inputs = {"X": convert_float_to_uint16(x)}
self.outputs = {"Out": convert_float_to_uint16(out)}
self.prim_op_type = "prim"
self.public_python_api = pad_wrapper
self.if_enable_cinn()
Expand All @@ -300,14 +301,14 @@ def test_check_grad(self):
place = core.CUDAPlace(0)
self.check_grad_with_place(
place,
['X'],
'Out',
["X"],
"Out",
check_prim=True,
check_pir=True,
check_prim_pir=True,
)


if __name__ == '__main__':
if __name__ == "__main__":
# paddle.enable_static()
unittest.main()