diff --git a/test/dygraph_to_static/test_to_tensor.py b/test/dygraph_to_static/test_to_tensor.py index 5b7dc6144845d0..c56d5f9ad9887f 100644 --- a/test/dygraph_to_static/test_to_tensor.py +++ b/test/dygraph_to_static/test_to_tensor.py @@ -186,8 +186,8 @@ class TestStatic(Dy2StTestBase): def test_static(self): paddle.enable_static() main_prog = paddle.static.Program() - starup_prog = paddle.static.Program() - with paddle.static.program_guard(main_prog, starup_prog): + startup_prog = paddle.static.Program() + with paddle.static.program_guard(main_prog, startup_prog): if core.is_compiled_with_cuda(): place = paddle.CUDAPlace(0) else: @@ -208,7 +208,7 @@ def test_static(self): sgd.minimize(paddle.mean(out)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[x, out]) diff --git a/test/legacy_test/test_activation_nn_grad.py b/test/legacy_test/test_activation_nn_grad.py index d88d52f46be1c9..7bdcc6fcf30346 100644 --- a/test/legacy_test/test_activation_nn_grad.py +++ b/test/legacy_test/test_activation_nn_grad.py @@ -567,8 +567,8 @@ def _check_cos_double_static(self, place): x_data = np.random.randn(64, 64).astype("float32") with static_guard(): main_prog = paddle.static.Program() - starup_prog = paddle.static.Program() - with paddle.static.program_guard(main_prog, starup_prog): + startup_prog = paddle.static.Program() + with paddle.static.program_guard(main_prog, startup_prog): x = paddle.assign(x_data) x.stop_gradient = False y = paddle.cos(x) @@ -576,7 +576,7 @@ def _check_cos_double_static(self, place): dxx = paddle.static.gradients(dx, x)[0] exe = paddle.static.Executor(place) - exe.run(starup_prog) + exe.run(startup_prog) (dxx_result,) = exe.run(main_prog, fetch_list=[dxx]) dxx_expected = -np.cos(x_data) np.testing.assert_allclose(dxx_result, dxx_expected, 1e-6, 1e-6) diff --git a/test/legacy_test/test_adaptive_avg_pool2d.py b/test/legacy_test/test_adaptive_avg_pool2d.py index 137e943fa5e892..880a7cf949a62e 100644 --- a/test/legacy_test/test_adaptive_avg_pool2d.py +++ b/test/legacy_test/test_adaptive_avg_pool2d.py @@ -353,8 +353,8 @@ def init_info(self): def test_static(self): paddle.enable_static() main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(6, 6) x = paddle.randn(self.shapes[0]) x.stop_gradient = False @@ -367,7 +367,7 @@ def test_static(self): self.assertTrue(self.var_prefix() in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[out1, out2]) np.testing.assert_allclose(res[0], res[1]) paddle.static.save_inference_model( diff --git a/test/legacy_test/test_arg_min_max_op.py b/test/legacy_test/test_arg_min_max_op.py index 8eb1076c709d51..c35fa9f8f7d39b 100644 --- a/test/legacy_test/test_arg_min_max_op.py +++ b/test/legacy_test/test_arg_min_max_op.py @@ -296,8 +296,8 @@ def init_info(self): def test_static(self): main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(4, 10) x = paddle.randn([2, 3, 4]) x.stop_gradient = False @@ -310,7 +310,7 @@ def test_static(self): self.assertTrue(self.var_prefix() in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[feat, out]) paddle.static.save_inference_model( self.save_path, [x], [feat, out], exe @@ -338,8 +338,8 @@ def call_func(self, x): class TestArgMinTensorAxis(TestArgMaxTensorAxis): def test_static(self): main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(4, 10) x = paddle.randn([2, 3, 4]) x.stop_gradient = False @@ -352,7 +352,7 @@ def test_static(self): self.assertTrue(self.var_prefix() in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[feat, out]) paddle.static.save_inference_model( self.save_path, [x], [feat, out], exe diff --git a/test/legacy_test/test_attribute_var.py b/test/legacy_test/test_attribute_var.py index e058816bf5da5b..e06e8a3d80d509 100644 --- a/test/legacy_test/test_attribute_var.py +++ b/test/legacy_test/test_attribute_var.py @@ -73,8 +73,8 @@ def init_info(self): def test_static(self): main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(10, 10) x = paddle.randn(self.shapes[0]) x.stop_gradient = False @@ -88,7 +88,7 @@ def test_static(self): self.assertTrue("Var[" in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[x, out]) # export model paddle.static.save_inference_model(self.save_path, [x], [out], exe) @@ -110,8 +110,8 @@ def init_info(self): def _test_static(self): main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(4, 10) x = paddle.randn([2, 3, 4]) x.stop_gradient = False @@ -126,7 +126,7 @@ def _test_static(self): self.assertTrue("Vars[" in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[x, out]) self.assertEqual(res[1].shape, (6, 6, 10)) @@ -143,8 +143,8 @@ def init_info(self): def _test_static(self): main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(4, 10) x = paddle.randn([2, 3, 4]) x.stop_gradient = False @@ -158,7 +158,7 @@ def _test_static(self): self.assertTrue("Var[" in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[x, out]) self.assertEqual(res[1].shape, (6, 6, 10)) diff --git a/test/legacy_test/test_bincount_op.py b/test/legacy_test/test_bincount_op.py index 9a6a6a1fbb4b02..f5e53c8d4b85c7 100644 --- a/test/legacy_test/test_bincount_op.py +++ b/test/legacy_test/test_bincount_op.py @@ -259,8 +259,8 @@ def test_static_and_infer(self): paddle.enable_static() np_x = np.random.randn(100).astype('float32') main_prog = paddle.static.Program() - starup_prog = paddle.static.Program() - with paddle.static.program_guard(main_prog, starup_prog): + startup_prog = paddle.static.Program() + with paddle.static.program_guard(main_prog, startup_prog): # run static x = paddle.static.data(shape=np_x.shape, name='x', dtype=np_x.dtype) linear = paddle.nn.Linear(np_x.shape[0], np_x.shape[0]) @@ -272,7 +272,7 @@ def test_static_and_infer(self): ) exe = paddle.static.Executor(self.place) - exe.run(starup_prog) + exe.run(startup_prog) static_out = exe.run(feed={'x': np_x}, fetch_list=[out]) # run infer diff --git a/test/legacy_test/test_conv2d_transpose_op.py b/test/legacy_test/test_conv2d_transpose_op.py index 5ffbc5e9c39f8e..3bb2a6f09f4d26 100644 --- a/test/legacy_test/test_conv2d_transpose_op.py +++ b/test/legacy_test/test_conv2d_transpose_op.py @@ -1356,8 +1356,8 @@ def call_func(self, x): def test_static(self): main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(8, 8) x = paddle.randn([2, 3, 8, 8]) x.stop_gradient = False @@ -1369,7 +1369,7 @@ def test_static(self): self.assertTrue(self.var_prefix() in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[feat, out]) np.testing.assert_allclose(res[1].shape, (2, 6, 17, 17)) diff --git a/test/legacy_test/test_cumsum_op.py b/test/legacy_test/test_cumsum_op.py index 0a2df2aaa8158a..1f223ba05d0d56 100644 --- a/test/legacy_test/test_cumsum_op.py +++ b/test/legacy_test/test_cumsum_op.py @@ -534,8 +534,8 @@ def test_static_and_infer(self): paddle.enable_static() np_x = np.random.randn(9, 10, 11).astype('float32') main_prog = paddle.static.Program() - starup_prog = paddle.static.Program() - with paddle.static.program_guard(main_prog, starup_prog): + startup_prog = paddle.static.Program() + with paddle.static.program_guard(main_prog, startup_prog): # run static x = paddle.static.data(shape=np_x.shape, name='x', dtype=np_x.dtype) linear = paddle.nn.Linear(np_x.shape[-1], np_x.shape[-1]) @@ -548,7 +548,7 @@ def test_static_and_infer(self): sgd.minimize(paddle.mean(out)) exe = paddle.static.Executor(self.place) - exe.run(starup_prog) + exe.run(startup_prog) static_out = exe.run(feed={'x': np_x}, fetch_list=[out]) # run infer diff --git a/test/legacy_test/test_eye_op.py b/test/legacy_test/test_eye_op.py index 0b0afe07f968a5..a7a4d503a9ef40 100644 --- a/test/legacy_test/test_eye_op.py +++ b/test/legacy_test/test_eye_op.py @@ -150,8 +150,8 @@ def init_info(self): def test_static(self): main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(4, 10) x = paddle.randn([2, 3, 4]) x.stop_gradient = False @@ -165,7 +165,7 @@ def test_static(self): self.assertTrue(self.var_prefix() in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[tmp, out]) gt = np.eye(3, 10) np.testing.assert_allclose(res[0], gt) diff --git a/test/legacy_test/test_multinomial_op.py b/test/legacy_test/test_multinomial_op.py index e886876b27583a..2f512533543de7 100644 --- a/test/legacy_test/test_multinomial_op.py +++ b/test/legacy_test/test_multinomial_op.py @@ -457,8 +457,8 @@ def call_func(self, x): def test_static(self): paddle.enable_static() main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(4, 10) x = paddle.randn([3, 4]) x.stop_gradient = False @@ -469,7 +469,7 @@ def test_static(self): self.assertTrue(self.var_prefix() in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[feat, out]) paddle.static.save_inference_model( self.save_path, [x], [feat, out], exe diff --git a/test/legacy_test/test_pad_op.py b/test/legacy_test/test_pad_op.py index b48271fb93b602..5912e57bef649c 100644 --- a/test/legacy_test/test_pad_op.py +++ b/test/legacy_test/test_pad_op.py @@ -154,8 +154,8 @@ def init_info(self): def test_static(self): with static_guard(): main_prog = paddle.static.Program() - starup_prog = paddle.static.Program() - with paddle.static.program_guard(main_prog, starup_prog): + startup_prog = paddle.static.Program() + with paddle.static.program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(4, 10) x = paddle.randn([2, 4]) x.stop_gradient = False @@ -168,7 +168,7 @@ def test_static(self): self.assertTrue(self.var_prefix() in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[feat, out]) gt = np.pad( res[0], [1, 1], 'constant', constant_values=[1.0, 1.0] @@ -191,8 +191,8 @@ def test_static(self): def test_pir_static(self): with paddle.pir_utils.IrGuard(): main_prog = paddle.static.Program() - starup_prog = paddle.static.Program() - with paddle.static.program_guard(main_prog, starup_prog): + startup_prog = paddle.static.Program() + with paddle.static.program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(4, 10) x = paddle.randn([2, 4]) x.stop_gradient = False @@ -204,7 +204,7 @@ def test_pir_static(self): sgd.minimize(paddle.mean(out)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[feat, out]) gt = np.pad( res[0], [1, 1], 'constant', constant_values=[1.0, 1.0] @@ -240,8 +240,8 @@ def test_static(self): with static_guard(): np_x = np.random.random((16, 16)).astype('float32') main_prog = paddle.static.Program() - starup_prog = paddle.static.Program() - with paddle.static.program_guard(main_prog, starup_prog): + startup_prog = paddle.static.Program() + with paddle.static.program_guard(main_prog, startup_prog): x = paddle.assign(np_x).astype('float32') pad_value = paddle.assign([0.0]).astype('float64') y = paddle.nn.functional.pad(x, [0, 1, 2, 3], value=pad_value) @@ -251,7 +251,7 @@ def test_static(self): ).minimize(loss) exe = paddle.static.Executor(paddle.CPUPlace()) - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run( main_prog, fetch_list=[y] + [g for p, g in params_grads] ) diff --git a/test/legacy_test/test_squeeze2_op.py b/test/legacy_test/test_squeeze2_op.py index 6b856d3c721eb7..e634e609b7f610 100755 --- a/test/legacy_test/test_squeeze2_op.py +++ b/test/legacy_test/test_squeeze2_op.py @@ -197,8 +197,8 @@ def init_info(self): def test_static(self): main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(4, 10) x = paddle.randn([2, 3, 4]) x.stop_gradient = False @@ -214,7 +214,7 @@ def test_static(self): self.assertTrue("Var[" in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[feat, out, out2]) self.assertEqual(res[0].shape, (1, 2, 1, 3, 10)) self.assertEqual(res[1].shape, (2, 3, 10)) @@ -233,8 +233,8 @@ def init_info(self): def test_static(self): main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(4, 10) x = paddle.randn([2, 3, 4]) x.stop_gradient = False @@ -253,7 +253,7 @@ def test_static(self): self.assertTrue("Vars[" in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[feat, out, out2]) self.assertEqual(res[0].shape, (1, 2, 1, 3, 10)) self.assertEqual(res[1].shape, (2, 3, 10)) diff --git a/test/legacy_test/test_sum_op.py b/test/legacy_test/test_sum_op.py index ddc2f0ec71de31..a2ea104378bd1a 100644 --- a/test/legacy_test/test_sum_op.py +++ b/test/legacy_test/test_sum_op.py @@ -583,8 +583,8 @@ def test_dygraph(self): def test_static_and_infer(self): paddle.enable_static() main_prog = paddle.static.Program() - starup_prog = paddle.static.Program() - with paddle.static.program_guard(main_prog, starup_prog): + startup_prog = paddle.static.Program() + with paddle.static.program_guard(main_prog, startup_prog): # run static x = paddle.static.data( shape=self.x.shape, name='x', dtype='float32' @@ -606,7 +606,7 @@ def test_static_and_infer(self): sgd = paddle.optimizer.SGD(learning_rate=0.0) sgd.minimize(paddle.mean(out)) exe = paddle.static.Executor(self.place) - exe.run(starup_prog) + exe.run(startup_prog) static_out = exe.run( feed={'x': self.x.numpy().astype('float32')}, fetch_list=[out] ) diff --git a/test/legacy_test/test_uniform_random_op.py b/test/legacy_test/test_uniform_random_op.py index bdddc141f34c80..b7424d4cf2e302 100644 --- a/test/legacy_test/test_uniform_random_op.py +++ b/test/legacy_test/test_uniform_random_op.py @@ -740,8 +740,8 @@ def init_info(self): def test_static(self): main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(4, 10) x = paddle.randn([2, 3, 4]) x.stop_gradient = False @@ -758,7 +758,7 @@ def test_static(self): self.assertTrue(self.var_prefix() in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[out]) np.testing.assert_array_equal(res[0].shape, [2, 3, 10]) diff --git a/test/legacy_test/test_unpool_op.py b/test/legacy_test/test_unpool_op.py index 1f8f72f3ae08d6..7b431b7fdc1be5 100644 --- a/test/legacy_test/test_unpool_op.py +++ b/test/legacy_test/test_unpool_op.py @@ -450,8 +450,8 @@ def init_info(self): def test_static(self): paddle.enable_static() main_prog = paddle.static.Program() - starup_prog = paddle.static.Program() - with paddle.static.program_guard(main_prog, starup_prog): + startup_prog = paddle.static.Program() + with paddle.static.program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(6, 6) x = paddle.randn(self.shapes[0]) x.stop_gradient = False @@ -466,7 +466,7 @@ def test_static(self): self.assertTrue(self.var_prefix() in str(main_prog)) exe = paddle.static.Executor() - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[out]) np.testing.assert_array_equal(res[0].shape, [1, 3, 7, 7]) if not in_pir_mode(): diff --git a/test/xpu/test_pad_op_xpu.py b/test/xpu/test_pad_op_xpu.py index 23adee084f594a..bc86a74bb236b6 100644 --- a/test/xpu/test_pad_op_xpu.py +++ b/test/xpu/test_pad_op_xpu.py @@ -126,8 +126,8 @@ def init_info(self): def test_static(self): with static_guard(): main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): fc = paddle.nn.Linear(4, 10) x = paddle.randn([2, 4]) x.stop_gradient = False @@ -139,7 +139,7 @@ def test_static(self): sgd.minimize(paddle.mean(out)) self.assertTrue(self.var_prefix() in str(main_prog)) exe = paddle.static.Executor(paddle.XPUPlace(0)) - exe.run(starup_prog) + exe.run(startup_prog) res = exe.run(fetch_list=[feat, out]) gt = np.pad( res[0], [1, 1], 'constant', constant_values=[1.0, 1.0] @@ -186,8 +186,8 @@ def test_static(self): with static_guard(): np_x = np.random.random((16, 16)).astype('float32') main_prog = Program() - starup_prog = Program() - with program_guard(main_prog, starup_prog): + startup_prog = Program() + with program_guard(main_prog, startup_prog): x = paddle.assign(np_x).astype('float32') pad_value = paddle.assign([0.0]).astype('float64') y = paddle.nn.functional.pad(