Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions paddle/fluid/pir/dialect/op_generator/ops_api_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@
'max_pool2d_v2',
'partial_sum',
'random_routing',
'rank_attention',
'recv_v2',
'rnn_',
'row_conv',
Expand Down
10 changes: 10 additions & 0 deletions paddle/fluid/pir/dialect/operator/ir/ops.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1342,6 +1342,16 @@
data_type : dtype
backend : place

- op : rank_attention
args : (Tensor x, Tensor rank_offset, Tensor rank_param, int max_rank = 3, int max_size = 0)
output : Tensor(input_help), Tensor(out), Tensor(ins_rank)
infer_meta :
func : RankAttentionInferMeta
kernel :
func : rank_attention
data_type : x
backward : rank_attention_grad

- op : read_file
args : (str filename = "", DataType dtype=DataType::UINT8, Place place=CPUPlace())
output : Tensor(out)
Expand Down
10 changes: 10 additions & 0 deletions paddle/fluid/pir/dialect/operator/ir/ops_backward.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -636,6 +636,16 @@
func : prod_grad
composite: prod_grad(x, out, out_grad, dims, keep_dim, reduce_all, x_grad)

- backward_op : rank_attention_grad
forward : rank_attention (Tensor x, Tensor rank_offset, Tensor rank_param, int max_rank = 3, int max_size = 0) -> Tensor(input_help), Tensor(out), Tensor(ins_rank)
args : (Tensor x, Tensor rank_offset, Tensor rank_param, Tensor input_help, Tensor ins_rank, Tensor out_grad, int max_rank = 3, int max_size = 0)
output : Tensor(rank_param_grad)
infer_meta :
func : RankAttentionGradInferMeta
kernel :
func : rank_attention_grad
data_type : out_grad

- backward_op : repeat_interleave_grad
forward : repeat_interleave(Tensor x, int repeats, int axis) -> Tensor(out)
args : (Tensor x, Tensor out_grad, int repeats, int axis)
Expand Down
2 changes: 2 additions & 0 deletions paddle/fluid/pir/dialect/operator/utils/utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,8 @@ const std::unordered_set<std::string> LegacyOpList = {
SparseMomentumOp::name(),
GetTensorFromSelectedRowsOp::name(),
TdmSamplerOp::name(),
RankAttentionOp::name(),
RankAttentionGradOp::name(),
RowConvOp::name(),
RowConvGradOp::name(),
SoftReluOp::name(),
Expand Down
9 changes: 9 additions & 0 deletions paddle/phi/api/yaml/op_compat.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3866,6 +3866,15 @@
outputs:
out : Out

- op: rank_attention
backward: rank_attention_grad
inputs:
{x : X, rank_offset : RankOffset, rank_param : RankParam}
outputs:
{input_help : InputHelp, out : Out, ins_rank: InsRank}
attrs:
{max_rank : MaxRank, max_size : MaxSize}

- op: read_from_array
inputs:
array : X
Expand Down
12 changes: 12 additions & 0 deletions paddle/phi/infermeta/backward.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1019,6 +1019,18 @@ void PsroiPoolGradInferMeta(const MetaTensor& x,
dx->share_meta(x);
}

void RankAttentionGradInferMeta(const MetaTensor& x,
const MetaTensor& rank_offset,
const MetaTensor& rank_param,
const MetaTensor& input_help,
const MetaTensor& ins_rank,
const MetaTensor& out_grad,
int max_rank,
int max_size,
MetaTensor* rank_param_grad) {
rank_param_grad->set_dims(rank_param.dims());
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这里需要设置一下rank_param_grad的dtype

}

void RealAndImagGradInferMeta(const MetaTensor& out_grad, MetaTensor* dx) {
dx->set_dims(out_grad.dims());
dx->set_dtype(dtype::ToComplex(out_grad.dtype()));
Expand Down
10 changes: 10 additions & 0 deletions paddle/phi/infermeta/backward.h
Original file line number Diff line number Diff line change
Expand Up @@ -419,6 +419,16 @@ void PsroiPoolGradInferMeta(const MetaTensor& x,
float spatial_scale,
MetaTensor* dx);

void RankAttentionGradInferMeta(const MetaTensor& x,
const MetaTensor& rank_offset,
const MetaTensor& rank_param,
const MetaTensor& input_help,
const MetaTensor& ins_rank,
const MetaTensor& out_grad,
int max_rank,
int max_size,
MetaTensor* rank_param_grad);

void RealAndImagGradInferMeta(const MetaTensor& out_grad, MetaTensor* dx);

void ReshapeDoubleGradInferMeta(const MetaTensor& out_grad,
Expand Down
39 changes: 39 additions & 0 deletions paddle/phi/infermeta/ternary.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1093,6 +1093,45 @@ void RandomRoutingInferMeta(const MetaTensor& prob,
out->share_lod(topk_idx);
}

void RankAttentionInferMeta(const MetaTensor& x,
const MetaTensor& rank_offset,
const MetaTensor& rank_param,
int max_rank,
int max_size,
MetaTensor* input_help,
MetaTensor* out,
MetaTensor* ins_rank) {
auto x_dims = x.dims();
auto ins_num = x_dims[0];
auto param_dims = rank_param.dims();
auto para_col = param_dims[1];
auto rank_offset_dims = rank_offset.dims();
auto x_fea_dim = x_dims[1];
auto block_matrix_row = max_rank * x_fea_dim;

PADDLE_ENFORCE_EQ(
(rank_offset_dims[1] - 1) / 2,
max_rank,
phi::errors::InvalidArgument("Input(RankOffset) has wrong columns, "
"except columns to be %d, but got %d",
max_rank,
(rank_offset_dims[1] - 1) / 2));

std::vector<int64_t> out_dims({ins_num, para_col});
out->set_dims(common::make_ddim(out_dims));
out->set_dtype(x.dtype());

std::vector<int64_t> input_help_dims({ins_num, block_matrix_row});
input_help->set_dims(common::make_ddim(input_help_dims));
input_help->set_dtype(x.dtype());

std::vector<int64_t> ins_rank_dims({ins_num, 1});
ins_rank->set_dims(common::make_ddim(ins_rank_dims));
ins_rank->set_dtype(x.dtype());

out->share_lod(x);
}

void RoiAlignInferMeta(const MetaTensor& x,
const MetaTensor& boxes,
const MetaTensor& boxes_num,
Expand Down
9 changes: 9 additions & 0 deletions paddle/phi/infermeta/ternary.h
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,15 @@ void RandomRoutingInferMeta(const MetaTensor& prob,
const MetaTensor& topk_idx,
MetaTensor* out);

void RankAttentionInferMeta(const MetaTensor& x,
const MetaTensor& rank_offset,
const MetaTensor& rank_param,
int max_rank,
int max_size,
MetaTensor* input_help,
MetaTensor* out,
MetaTensor* ins_rank);

void RoiAlignInferMeta(const MetaTensor& x,
const MetaTensor& boxes,
const MetaTensor& boxes_num,
Expand Down
1 change: 1 addition & 0 deletions test/white_list/pir_op_test_white_list
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,7 @@ test_qr_op
test_randint_op
test_randperm_op
test_range
test_rank_attention_op
test_reduce_op
test_reduce_op_static_build
test_repeat_interleave_op
Expand Down