-
Notifications
You must be signed in to change notification settings - Fork 5.9k
【PIR API adaptor No.14-15】Assign and Bilinear #58876
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 9 commits
a3b4147
f3efe79
61c2f3f
d3bfb23
b58508d
c5f5f8e
30dc8d0
db487ac
6e9c06a
bda4cd5
7a7f61f
a1954f5
b40b729
297da6e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Liyulingyue marked this conversation as resolved.
Show resolved
Hide resolved
|
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -23,6 +23,7 @@ | |
| from paddle import base | ||
| from paddle.base import framework | ||
| from paddle.base.core import VarDesc | ||
| from paddle.pir_utils import test_with_pir_api | ||
| from paddle.regularizer import L2Decay | ||
|
|
||
| DELTA = 0.00001 | ||
|
|
@@ -726,6 +727,59 @@ def test_type_error(self): | |
| self.assertRaises(TypeError, self.test_bilinear_initializer, 'int32') | ||
|
|
||
|
|
||
| class TestBilinearInitializerPir(unittest.TestCase): | ||
| def setUp(self): | ||
| self.init_uniform_op_name = 'pd_op.uniform' | ||
| self.init_normal_op_name = 'pd_op.assign_value' | ||
| self.set_parameter_op_name = 'builtin.set_parameter' | ||
Liyulingyue marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
| def get_init_ops_by_op_name(self, block, op_name): | ||
| checked_ops = [] | ||
| for op in block.ops: | ||
| # get init op | ||
| if op_name == op.name(): | ||
| checked_ops.append(op) | ||
| return checked_ops | ||
|
|
||
| def test_bilinear_initializer(self, dtype="float32"): | ||
| """Test the bilinear initializer with supplied arguments""" | ||
| with paddle.pir_utils.IrGuard(): | ||
| main = paddle.static.Program() | ||
| startup = paddle.static.Program() | ||
| with paddle.static.program_guard(main, startup): | ||
| param = paddle.pir.core.create_parameter( | ||
| dtype=dtype, | ||
| shape=[5, 10], | ||
|
||
| name="param", | ||
| initializer=paddle.nn.initializer.Bilinear(), | ||
| ) | ||
|
|
||
| block = startup.global_block() | ||
| checked_ops = self.get_init_ops_by_op_name( | ||
| block, self.init_uniform_op_name | ||
Liyulingyue marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| ) | ||
| num_ops = 2 if dtype in ["float16", "uint16", "float64"] else 1 | ||
| init_op = checked_ops[0] | ||
| self.assertEqual(len(checked_ops), 1) | ||
| return block | ||
|
|
||
| def test_bilinear_initializer_fp64(self): | ||
| self.test_bilinear_initializer(dtype='float64') | ||
|
|
||
| def test_bilinear_initializer_fp16(self): | ||
| """Test the bilinear initializer with supplied arguments""" | ||
| block = self.test_bilinear_initializer("float16") | ||
| self.assertTrue(check_cast_op(block.ops[1])) | ||
|
||
|
|
||
| def test_bilinear_initializer_bf16(self): | ||
| """Test the bilinear initializer with supplied arguments""" | ||
| block = self.test_bilinear_initializer("uint16") | ||
| self.assertTrue(check_cast_op(block.ops[1])) | ||
Liyulingyue marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
|
|
||
Liyulingyue marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| def test_type_error(self): | ||
| self.assertRaises(TypeError, self.test_bilinear_initializer, 'int32') | ||
|
|
||
|
|
||
| class TestBilinearInitializerDygraphAPI(unittest.TestCase): | ||
| def func_test_case(self): | ||
| factor = 2 | ||
|
|
@@ -811,6 +865,57 @@ def test_numpy_array_initializer_bf16(self): | |
| self.assertTrue(block.ops[1]) | ||
|
|
||
|
|
||
| class TestNumpyArrayInitializerPir(unittest.TestCase): | ||
| def setUp(self): | ||
| self.init_uniform_op_name = 'pd_op.uniform' | ||
| self.init_normal_op_name = 'pd_op.gaussian' | ||
|
||
| self.set_parameter_op_name = 'builtin.set_parameter' | ||
|
|
||
| def get_init_ops_by_op_name(self, block, op_name): | ||
| checked_ops = [] | ||
| for op in block.ops: | ||
| # get init op | ||
| if op_name == op.name(): | ||
| checked_ops.append(op) | ||
| return checked_ops | ||
|
|
||
| def test_numpy_array_initializer(self, dtype="float32"): | ||
| """Test the numpy array initializer with supplied arguments""" | ||
| import numpy | ||
|
|
||
| main = paddle.static.Program() | ||
| startup = paddle.static.Program() | ||
| np_array = numpy.random.random(10000).astype(dtype) | ||
| with paddle.static.program_guard(main, startup): | ||
| param = paddle.pir.core.create_parameter( | ||
| dtype=dtype, | ||
| shape=[5, 10], | ||
Liyulingyue marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| name="param", | ||
| initializer=paddle.nn.initializer.Assign(np_array), | ||
| ) | ||
|
|
||
| block = startup.global_block() | ||
| checked_ops = self.get_init_ops_by_op_name( | ||
| block, self.init_uniform_op_name | ||
| ) | ||
| num_ops = 2 if dtype in ["float16", "uint16"] else 1 | ||
| self.assertEqual(len(checked_ops), num_ops) | ||
|
||
| init_op = checked_ops[0] | ||
| self.assertEqual(init_op.type, 'assign_value') | ||
|
||
| assert (init_op.attr('fp32_values') == np_array).all() | ||
Liyulingyue marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| return block | ||
|
|
||
| def test_numpy_array_initializer_fp16(self): | ||
| """Test the numpy array initializer with float16""" | ||
| block = self.test_numpy_array_initializer("float16") | ||
| self.assertTrue(block.ops[1]) | ||
Liyulingyue marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
|
|
||
Liyulingyue marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| def test_numpy_array_initializer_bf16(self): | ||
| """Test the numpy array initializer with bfloat16""" | ||
| block = self.test_numpy_array_initializer("uint16") | ||
| self.assertTrue(block.ops[1]) | ||
|
||
|
|
||
|
|
||
| class TestSetGlobalInitializer(unittest.TestCase): | ||
| def test_set_global_weight_initilizer(self): | ||
| """Test Set Global Param initilizer with UniformInitializer""" | ||
|
|
@@ -1026,6 +1131,7 @@ def run_dynamic_graph(dtype): | |
| ) | ||
| return w | ||
|
|
||
| @test_with_pir_api | ||
| def run_static_graph(dtype): | ||
| with static_guard(): | ||
| exe = paddle.static.Executor(paddle.CPUPlace()) | ||
|
|
@@ -1064,6 +1170,7 @@ def run_dynamic_graph(dtype): | |
| ) | ||
| return w | ||
|
|
||
| @test_with_pir_api | ||
| def run_static_graph(dtype): | ||
| with static_guard(): | ||
| exe = paddle.static.Executor(paddle.CPUPlace()) | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.