From feb60e7bcbd9e7e545e880b7e0f5dcfc0d3607d7 Mon Sep 17 00:00:00 2001 From: cyber-pioneer Date: Mon, 15 Apr 2024 12:58:36 +0000 Subject: [PATCH 1/2] bind tile --- .../decomp_interface_gen_op_list.py | 2 - paddle/fluid/primitive/composite/composite.h | 86 ------------------- 2 files changed, 88 deletions(-) diff --git a/paddle/fluid/pir/dialect/op_generator/decomp_interface_gen_op_list.py b/paddle/fluid/pir/dialect/op_generator/decomp_interface_gen_op_list.py index 80b198c8d533a3..d616d9b9c76d36 100644 --- a/paddle/fluid/pir/dialect/op_generator/decomp_interface_gen_op_list.py +++ b/paddle/fluid/pir/dialect/op_generator/decomp_interface_gen_op_list.py @@ -50,7 +50,6 @@ "squeeze", "stack", "unsqueeze", - "tile", ] # come into effect in generated file op_decomp.cc @@ -84,7 +83,6 @@ "squeeze", "stack", "unsqueeze", - "tile", ] diff --git a/paddle/fluid/primitive/composite/composite.h b/paddle/fluid/primitive/composite/composite.h index ca602dfb2ea893..c2203320ddf825 100644 --- a/paddle/fluid/primitive/composite/composite.h +++ b/paddle/fluid/primitive/composite/composite.h @@ -990,92 +990,6 @@ std::tuple group_norm_decomp( return std::make_tuple(out, mean_out, var_out); } -template -Tensor tile_decomp(const Tensor& x, const IntArray& repeat_times) { - // x.shape = [3,4] repeat_time=(a,b,c) - // shape1 = [1,3,4] - // shape2 = [1,1,1,3,1,4] - // shape3 = [a,1,b,3,c,4] - // shape4 = shape1 -> [a, b*3, c*4] - // t1 = x.reshape(shape1) - // t2 = t1.reshape(shape2) - // t3 = t2.expand(shape3) - // res = t3.reshape(t3) - std::vector repeat_times_ = repeat_times.GetData(); - std::vector shape1 = x.shape(); - auto diff = int64_t(repeat_times_.size()) - int64_t(shape1.size()); - Tensor t1; - if (has_dynamic_shape(shape1)) { - size_t repeat_time_length = repeat_times_.size(); - std::vector unsqueeze_idx2; - if (diff > 0) { - std::vector unsqueeze_idx1(diff); - std::iota(unsqueeze_idx1.begin(), unsqueeze_idx1.end(), 0); - t1 = unsqueeze(x, unsqueeze_idx1); - } else { - t1 = x; - } - auto length2 = t1.dims().size(); - for (size_t i = 0; i < repeat_times_.size(); i++) { - unsqueeze_idx2.push_back(length2 - repeat_times_.size() + i * 2); - } - - Tensor t2 = unsqueeze(t1, unsqueeze_idx2); - std::vector ref_shape(t2.dims().size(), 1); - for (size_t i = 0; i < unsqueeze_idx2.size(); i++) { - ref_shape[unsqueeze_idx2[i]] = repeat_times_[i]; - } - Tensor ref_t = full(ref_shape, 1.0, t2.dtype()); - Tensor t3 = t2 * ref_t; - Tensor origin_shape_t = shape(t1); - std::vector res_s; - for (int64_t i = int64_t(length2) - 1; i >= 0; i--) { - auto relative_idx = - int64_t(repeat_time_length) - 1 - int64_t(length2 - i - 1); - - if (relative_idx >= 0) { - res_s.insert( - res_s.begin(), - get_slice(origin_shape_t, i) * repeat_times_[relative_idx]); - } else { - res_s.insert(res_s.begin(), get_slice(origin_shape_t, i)); - } - } - Tensor s4 = concat(res_s, 0); - return backend::reshape_with_tensor(t3, s4); - - } else { - if (diff > 0) { - for (int64_t i = 0; i < diff; i++) { - shape1.insert(shape1.begin(), 1); - } - } - - auto length = int64_t(shape1.size()); - std::vector shape2 = shape1; - std::vector shape3 = shape1; - std::vector final_shape = shape1; - auto r_length = repeat_times_.size(); - for (size_t j = 0; j < repeat_times_.size(); j++) { - int64_t i = int64_t(j); - - shape2.insert(shape2.begin() + (length - 1 - i), 1); - shape3.insert(shape3.begin() + (length - 1 - i), - repeat_times_[r_length - i - 1]); - - final_shape[length - i - 1] = - final_shape[length - i - 1] * repeat_times_[r_length - i - 1]; - } - - t1 = reshape(x, shape1); - - auto t2 = reshape(t1, shape2); - auto t3 = t2.expand(shape3); - auto res = reshape(t3, final_shape); - return res; - } -} - template Tensor square_decomp(const Tensor& x) { auto org_dtype = x.dtype(); From cc7e51c82d83265f7ed5c4bc0cd4bd1ee8c1ca92 Mon Sep 17 00:00:00 2001 From: cyber-pioneer Date: Tue, 16 Apr 2024 02:07:20 +0000 Subject: [PATCH 2/2] fix test case --- .../test_prim_sub_graph_dynamic_shape.py | 36 ------------------- 1 file changed, 36 deletions(-) diff --git a/test/prim/pir_prim/test_prim_sub_graph_dynamic_shape.py b/test/prim/pir_prim/test_prim_sub_graph_dynamic_shape.py index 846c29d657fa1c..c204d3f949d83c 100644 --- a/test/prim/pir_prim/test_prim_sub_graph_dynamic_shape.py +++ b/test/prim/pir_prim/test_prim_sub_graph_dynamic_shape.py @@ -70,16 +70,6 @@ def stack_net(x): return paddle.stack([x, y], axis=0) -def tile_net1(x): - y = paddle.tile(x, repeat_times=[2, 5]) - return y - - -def tile_net2(x): - y = paddle.tile(x, repeat_times=[3, 2, 5]) - return y - - def index_sample_net(x, index): return paddle.index_sample(x, index) @@ -251,32 +241,6 @@ def setUp(self): self.tol = 1e-6 -class TestPrimTile(TestPrimBase): - def setUp(self): - np.random.seed(2023) - self.dtype = "float32" - self.x_shape = [1, 300, 4096] - self.init_x_shape = [None, None, 4096] - self.x = np.random.random(self.x_shape).astype(self.dtype) - self.net = tile_net1 - self.necessary_ops = "pd_op.tile" - self.enable_cinn = False - self.tol = 1e-6 - - -class TestPrimTile2(TestPrimBase): - def setUp(self): - np.random.seed(2023) - self.dtype = "float32" - self.x_shape = [300, 4096] - self.init_x_shape = [None, 4096] - self.x = np.random.random(self.x_shape).astype(self.dtype) - self.net = tile_net2 - self.necessary_ops = "pd_op.tile" - self.enable_cinn = False - self.tol = 1e-6 - - class TestPrimTwo(unittest.TestCase): def setUp(self): np.random.seed(2023)