Skip to content

Commit 85e4f45

Browse files
[Paddle Inference]Add Roi_align op TRT converter unittest (#35549)
* add_roi_align * add_roi_align * add_roi_align_teller * add_roi_align * add-roi_align
1 parent 5928856 commit 85e4f45

File tree

2 files changed

+223
-0
lines changed

2 files changed

+223
-0
lines changed

paddle/fluid/inference/tensorrt/op_teller.cc

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -793,6 +793,36 @@ bool OpTeller::Tell(const framework::ir::Node* node, bool use_no_calib_int8,
793793
"the roi_align will change the batch size.";
794794
return false;
795795
}
796+
std::vector<std::string> attrs{"pooled_height", "pooled_width",
797+
"spatial_scale", "sampling_ratio"};
798+
for (auto const attr : attrs) {
799+
if (!desc.HasAttr(attr)) return false;
800+
}
801+
802+
const auto pooled_height =
803+
BOOST_GET_CONST(int, desc.GetAttr("pooled_height"));
804+
if (pooled_height <= 0) return false;
805+
806+
const auto pooled_width =
807+
BOOST_GET_CONST(int, desc.GetAttr("pooled_width"));
808+
if (pooled_width <= 0) return false;
809+
810+
const auto spatial_scale =
811+
BOOST_GET_CONST(float, desc.GetAttr("spatial_scale"));
812+
if (spatial_scale <= 0.f) return false;
813+
814+
const auto sampling_ratio =
815+
BOOST_GET_CONST(int, desc.GetAttr("sampling_ratio"));
816+
const auto aligned = BOOST_GET_CONST(bool, desc.GetAttr("aligned"));
817+
818+
if (sampling_ratio == -1 && aligned == true) return false;
819+
820+
auto roi_align_inputs = desc.Inputs();
821+
if (roi_align_inputs.find("RoisNum") != roi_align_inputs.end()) {
822+
if (desc.Input("RoisNum").size() >= 1) {
823+
return false;
824+
}
825+
}
796826
}
797827

798828
if (op_type == "shuffle_channel") {
Lines changed: 193 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,193 @@
1+
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
from trt_layer_auto_scan_test import TrtLayerAutoScanTest, SkipReasons
16+
from program_config import TensorConfig, ProgramConfig
17+
import numpy as np
18+
import paddle.inference as paddle_infer
19+
from functools import partial
20+
from typing import Optional, List, Callable, Dict, Any, Set
21+
22+
23+
class TrtConvertRoiAlignTest(TrtLayerAutoScanTest):
24+
def is_program_valid(self, program_config: ProgramConfig) -> bool:
25+
return True
26+
27+
def sample_program_configs(self):
28+
def generate_input1(attrs: List[Dict[str, Any]], batch):
29+
return np.ones([batch, 256, 32, 32]).astype(np.float32)
30+
31+
def generate_input2(attrs: List[Dict[str, Any]], batch):
32+
return np.random.random([3, 4]).astype(np.float32)
33+
34+
def generate_input3(attrs: List[Dict[str, Any]], batch):
35+
return np.random.random([batch]).astype(np.int32)
36+
37+
for num_input in [0, 1]:
38+
for batch in [1, 2, 4]:
39+
for spatial_scale in [0.5, 0.6]:
40+
for pooled_height in [7, 1]:
41+
for pooled_width in [7, 1]:
42+
for sampling_ratio in [-1, 4, 8]:
43+
for aligned in [True, False]:
44+
self.num_input = num_input
45+
if num_input == 1:
46+
batch = 1
47+
dics = [{
48+
"spatial_scale": spatial_scale,
49+
"pooled_height": pooled_height,
50+
"pooled_width": pooled_width,
51+
"sampling_ratio": sampling_ratio,
52+
"aligned": aligned
53+
}, {}]
54+
dics_input = [{
55+
"X": ["roi_align_input"],
56+
"ROIs": ["ROIs"],
57+
"RoisNum": ["RoisNum"]
58+
}, {
59+
"X": ["roi_align_input"],
60+
"ROIs": ["ROIs"]
61+
}]
62+
program_input = [{
63+
"roi_align_input": TensorConfig(
64+
data_gen=partial(generate_input1,
65+
dics, batch)),
66+
"ROIs": TensorConfig(data_gen=partial(
67+
generate_input2, dics, batch)),
68+
"RoisNum": TensorConfig(
69+
data_gen=partial(generate_input3,
70+
dics, batch))
71+
}, {
72+
"roi_align_input": TensorConfig(
73+
data_gen=partial(generate_input1,
74+
dics, batch)),
75+
"ROIs": TensorConfig(
76+
data_gen=partial(generate_input2,
77+
dics, batch),
78+
lod=[[32, 3]])
79+
}]
80+
ops_config = [{
81+
"op_type": "roi_align",
82+
"op_inputs": dics_input[num_input],
83+
"op_outputs": {
84+
"Out": ["roi_align_out"]
85+
},
86+
"op_attrs": dics[0]
87+
}]
88+
ops = self.generate_op_config(ops_config)
89+
program_config = ProgramConfig(
90+
ops=ops,
91+
weights={},
92+
inputs=program_input[num_input],
93+
outputs=["roi_align_out"])
94+
95+
yield program_config
96+
97+
def sample_predictor_configs(
98+
self, program_config) -> (paddle_infer.Config, List[int], float):
99+
def generate_dynamic_shape(attrs):
100+
if self.num_input == 0:
101+
self.dynamic_shape.min_input_shape = {
102+
"roi_align_input": [1, 256, 32, 32],
103+
"ROIs": [3, 4],
104+
"RoisNum": [1]
105+
}
106+
self.dynamic_shape.max_input_shape = {
107+
"roi_align_input": [1, 256, 64, 64],
108+
"ROIs": [3, 4],
109+
"RoisNum": [1]
110+
}
111+
self.dynamic_shape.opt_input_shape = {
112+
"roi_align_input": [1, 256, 64, 64],
113+
"ROIs": [3, 4],
114+
"RoisNum": [1]
115+
}
116+
elif self.num_input == 1:
117+
self.dynamic_shape.min_input_shape = {
118+
"roi_align_input": [1, 256, 32, 32],
119+
"ROIs": [3, 4]
120+
}
121+
self.dynamic_shape.max_input_shape = {
122+
"roi_align_input": [1, 256, 64, 64],
123+
"ROIs": [3, 4]
124+
}
125+
self.dynamic_shape.opt_input_shape = {
126+
"roi_align_input": [1, 256, 64, 64],
127+
"ROIs": [3, 4]
128+
}
129+
130+
def clear_dynamic_shape():
131+
self.dynamic_shape.min_input_shape = {}
132+
self.dynamic_shape.max_input_shape = {}
133+
self.dynamic_shape.opt_input_shape = {}
134+
135+
def generate_trt_nodes_num(attrs, dynamic_shape):
136+
if self.num_input == 0:
137+
if dynamic_shape == True:
138+
return 0, 5
139+
elif self.num_input == 1:
140+
if dynamic_shape == True:
141+
return 1, 3
142+
else:
143+
return 0, 4
144+
145+
attrs = [
146+
program_config.ops[i].attrs
147+
for i in range(len(program_config.ops))
148+
]
149+
150+
# for static_shape
151+
clear_dynamic_shape()
152+
self.trt_param.precision = paddle_infer.PrecisionType.Float32
153+
yield self.create_inference_config(), generate_trt_nodes_num(
154+
attrs, False), 1e-5
155+
self.trt_param.precision = paddle_infer.PrecisionType.Half
156+
yield self.create_inference_config(), generate_trt_nodes_num(
157+
attrs, False), 1e-5
158+
159+
# for dynamic_shape
160+
generate_dynamic_shape(attrs)
161+
self.trt_param.precision = paddle_infer.PrecisionType.Float32
162+
yield self.create_inference_config(), generate_trt_nodes_num(attrs,
163+
True), 1e-5
164+
self.trt_param.precision = paddle_infer.PrecisionType.Half
165+
yield self.create_inference_config(), generate_trt_nodes_num(attrs,
166+
True), 1e-5
167+
168+
def add_skip_trt_case(self):
169+
def teller1(program_config, predictor_config):
170+
if len(program_config.inputs) == 3:
171+
return True
172+
return False
173+
174+
self.add_skip_case(teller1, SkipReasons.TRT_NOT_SUPPORT,
175+
"INPUT RoisNum NOT SUPPORT")
176+
177+
def teller2(program_config, predictor_config):
178+
if (program_config.ops[0].attrs['sampling_ratio'] == -1 and
179+
program_config.ops[0].attrs['aligned'] == True):
180+
return True
181+
return False
182+
183+
self.add_skip_case(
184+
teller2, SkipReasons.TRT_NOT_SUPPORT,
185+
"SAMPLING_RATIO EQUAL TO - 1 WHEN ALIGNED IS TRUE IS NOT SUPPORT")
186+
187+
def test(self):
188+
self.add_skip_trt_case()
189+
self.run_test()
190+
191+
192+
if __name__ == "__main__":
193+
unittest.main()

0 commit comments

Comments
 (0)