Skip to content

Commit 4b41dd4

Browse files
authored
Support PaddleDetection 2.0 (#252)
* support ppyolo, ppyolo-tiny * Support PaddleDetection 2.0 models * fix name for greater than * fix output_dim_idx constraint
1 parent 90be4c3 commit 4b41dd4

File tree

9 files changed

+397
-102
lines changed

9 files changed

+397
-102
lines changed

paddle2onnx/op_mapper/__init__.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,4 +36,3 @@
3636
from .custom_paddle_op import collect_fpn_proposals
3737
from .custom_paddle_op import distribute_fpn_proposals
3838
from .custom_paddle_op import box_clip
39-
from .custom_paddle_op import fill_constant_batch_size_like

paddle2onnx/op_mapper/activation.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,25 @@ def opset_1(cls, graph, node, **kw):
5252
alpha=node.attr('alpha'))
5353

5454

55+
@op_mapper('softplus')
56+
class Softplus():
57+
support_opset_verison_range = (1, 12)
58+
59+
@classmethod
60+
def opset_1(cls, graph, node, **kw):
61+
beta = node.attr('beta')
62+
threshold = node.attr('threshold')
63+
if np.isclose(beta, 1.0, 1e-06, 1e-06) and \
64+
np.isclose(threshold, 20.0, 1e-06, 1e-06):
65+
onnx_node = graph.make_node(
66+
'Softplus',
67+
inputs=[node.input('X')[0]],
68+
outputs=node.output('Out'))
69+
else:
70+
raise Exception("[ERROR] Operator softplus " \
71+
"only supported while beta==1.0 and threshold==20.0")
72+
73+
5574
@op_mapper('prelu')
5675
class PRelu():
5776
support_opset_verison_range = (9, 13)

paddle2onnx/op_mapper/custom_paddle_op/fill_constant_batch_size_like.py

Lines changed: 0 additions & 44 deletions
This file was deleted.

paddle2onnx/op_mapper/detection/multiclass_nms.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ def nms(cls, graph, node, scores, bboxes, class_id=None):
7777
iou_threshold = 0.5
7878
logging.warning(
7979
"Operator:{} is not supported completely, so we use traditional"
80-
" NMS (iou_theshold={}) to instead it, which introduce some difference.".
80+
" NMS (nms_theshold={}) to instead it, which introduce some difference.".
8181
format(node.type, str(iou_threshold)))
8282
else:
8383
iou_threshold = node.attr('nms_threshold')
@@ -162,14 +162,12 @@ def keep_top_k(cls,
162162
if background == 0:
163163
nonzero = graph.make_node('NonZero', inputs=[squeezed_class_id])
164164
else:
165-
thresh = graph.make_node(
166-
'Constant', inputs=[], dtype=dtypes.ONNX.INT32, value=[-1])
167-
168-
cast = graph.make_node('Cast', inputs=[squeezed_class_id], to=6)
169-
170-
greater = graph.make_node('Greater', inputs=[cast, thresh])
171-
172-
nonzero = graph.make_node('NonZero', inputs=[greater])
165+
filter_cls_id = graph.make_node(
166+
'Constant', dtype=dtypes.ONNX.INT32, value=[background])
167+
cast = graph.make_node(
168+
'Cast', inputs=[squeezed_class_id], to=dtypes.ONNX.INT32)
169+
filter_index = graph.make_node('Sub', inputs=[cast, filter_cls_id])
170+
nonzero = graph.make_node('NonZero', inputs=[filter_index])
173171

174172
class_id = graph.make_node('Gather', inputs=[class_id, nonzero], axis=0)
175173

@@ -295,7 +293,9 @@ def keep_top_k(cls,
295293
axes=[0])
296294
if node.type in ['matrix_nms', 'multiclass_nms3']:
297295
select_bboxes_shape = graph.make_node(
298-
'Shape', inputs=[final_indices])
296+
'Shape', inputs=[concat_final_results])
297+
select_bboxes_shape1 = graph.make_node(
298+
'Cast', inputs=[select_bboxes_shape], to=dtypes.ONNX.INT32)
299299
indices = graph.make_node(
300300
'Constant', dtype=dtypes.ONNX.INT64, value=[0])
301301
rois_num = None
@@ -306,5 +306,5 @@ def keep_top_k(cls,
306306
if rois_num is not None:
307307
graph.make_node(
308308
"Gather",
309-
inputs=[select_bboxes_shape, indices],
309+
inputs=[select_bboxes_shape1, indices],
310310
outputs=rois_num)

paddle2onnx/op_mapper/logic.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,30 @@ def opset_12(cls, graph, node, **kw):
3131
outputs=node.output('Out'))
3232

3333

34+
@op_mapper('equal')
35+
class Equal():
36+
support_opset_verison_range = (12, )
37+
38+
@classmethod
39+
def opset_1(cls, graph, node, **kw):
40+
onnx_node = graph.make_node(
41+
'Equal',
42+
inputs=[node.input('X', 0), node.input('Y', 0)],
43+
outputs=node.output('Out'))
44+
45+
46+
@op_mapper('greater_than')
47+
class GreaterThan():
48+
support_opset_verison_range = (1, )
49+
50+
@classmethod
51+
def opset_1(cls, graph, node, **kw):
52+
onnx_node = graph.make_node(
53+
'Greater',
54+
inputs=[node.input('X', 0), node.input('Y', 0)],
55+
outputs=node.output('Out'))
56+
57+
3458
@op_mapper('logical_and')
3559
class LogicalAnd():
3660
support_opset_verison_range = (1, )

paddle2onnx/op_mapper/math.py

Lines changed: 174 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -88,9 +88,14 @@ def opset_1(cls, graph, node, **kw):
8888

8989
@op_mapper(
9090
[
91-
'elementwise_add', 'elementwise_sub', 'elementwise_div',
92-
'elementwise_mul', 'elementwise_min', 'elementwise_max',
93-
'elementwise_pow'
91+
'elementwise_add',
92+
'elementwise_sub',
93+
'elementwise_div',
94+
'elementwise_mul',
95+
'elementwise_min',
96+
'elementwise_max',
97+
'elementwise_pow',
98+
'elementwise_mod',
9499
],
95100
mapper_dict={
96101
'elementwise_add': 'Add',
@@ -100,12 +105,13 @@ def opset_1(cls, graph, node, **kw):
100105
'elementwise_min': 'Min',
101106
'elementwise_max': 'Max',
102107
'elementwise_pow': 'Pow',
108+
'elementwise_mod': 'Mod',
103109
})
104110
class ElementwiseOps():
105111
support_opset_version_range = (7, 12)
106112

107113
@classmethod
108-
def opset_7(cls, graph, node, **kw):
114+
def opset_9(cls, graph, node, **kw):
109115
op_type = kw['mapper_dict'][node.type]
110116
axis = node.attr('axis')
111117
x = node.input('X', 0)
@@ -130,6 +136,51 @@ def opset_7(cls, graph, node, **kw):
130136
op_type, inputs=[x, y_node], outputs=node.output('Out'))
131137

132138

139+
@op_mapper('elementwise_floordiv')
140+
class ElementWiseFloorDiv():
141+
support_opset_version_range = (11, 12)
142+
143+
@classmethod
144+
def opset_7(cls, graph, node, **kw):
145+
x = node.input('X', 0)
146+
y = node.input('Y', 0)
147+
axis = node.attr('axis')
148+
x_shape = node.input_shape('X', 0)
149+
y_shape = node.input_shape('Y', 0)
150+
x_dtype = node.input_dtype('X', 0)
151+
y_dtype = node.input_dtype('Y', 0)
152+
x_dtype = dtypes.DTYPE_PADDLE_STR_MAP[x_dtype]
153+
y_dtype = dtypes.DTYPE_PADDLE_STR_MAP[y_dtype]
154+
is_int = False
155+
if x_dtype.count('int') > 0 and y_dtype.count('int') > 0:
156+
is_int = True
157+
if axis == -1 or axis == (len(x_shape) - 1
158+
) or len(x_shape) == len(y_shape):
159+
if is_int:
160+
graph.make_node(
161+
'Div', inputs=[x, y], outputs=node.output('Out'))
162+
else:
163+
div_node = graph.make_node('Div', inputs=[x, y])
164+
graph.make_node(
165+
'Floor', inputs=[div_node], outputs=node.output('Out'))
166+
else:
167+
broadcast_shape = [1] * len(x_shape)
168+
broadcast_shape[axis:axis + len(y_shape)] = y_shape
169+
broadcast_shape_node = graph.make_node(
170+
'Constant',
171+
dtype=dtypes.ONNX.INT64,
172+
value=list(broadcast_shape))
173+
y_node = graph.make_node(
174+
'Reshape', inputs=[y, broadcast_shape_node])
175+
if is_int:
176+
div_node = graph.make_node(
177+
'Div', inputs=[x, y_node], outputs=node.output('Out'))
178+
else:
179+
div_node = graph.make_node('Div', inputs=[x, y_node])
180+
graph.make_node(
181+
'Floor', inputs=[div_node], outputs=node.output('Out'))
182+
183+
133184
@op_mapper('pow')
134185
class Pow():
135186
support_opset_version_range = (8, 12)
@@ -233,6 +284,65 @@ def opset_1(cls, graph, node, **kw):
233284
'MatMul', inputs=[x, y], outputs=node.output('Out'))
234285

235286

287+
@op_mapper('p_norm')
288+
class PNorm():
289+
support_opset_version_range = (1, 12)
290+
291+
@classmethod
292+
def opset_1(cls, graph, node, **kw):
293+
x = node.input('X', 0)
294+
axis = node.attr('axis')
295+
p = node.attr('porder')
296+
keepdim = node.attr('keepdim')
297+
epsilon = node.attr('epsilon')
298+
assert axis == 1, "Only axis == 1 is supported for p_norm"
299+
if p == 1 or p == 2 and not keepdim:
300+
graph.make_node(
301+
'LpNormalization',
302+
inputs=[x],
303+
outputs=node.output('Out'),
304+
axis=1,
305+
p=p)
306+
else:
307+
pnode = graph.make_node(
308+
'Constant', dtype=dtypes.ONNX.FLOAT, value=[p])
309+
mul = graph.make_node('Pow', inputs=[x, pnode])
310+
reduce_sum = graph.make_node(
311+
'ReduceSum', inputs=[mul], axes=[1], keepdims=keepdim)
312+
pnode1 = graph.make_node(
313+
'Constant', dtype=dtypes.ONNX.FLOAT, value=[1.0 / p])
314+
graph.make_node(
315+
'Pow', inputs=[reduce_sum, pnode1], outputs=node.output('Out'))
316+
317+
@classmethod
318+
def opset_13(cls, graph, node, **kw):
319+
x = node.input('X', 0)
320+
axis = node.attr('axis')
321+
p = node.attr('porder')
322+
keepdim = node.attr('keepdim')
323+
epsilon = node.attr('epsilon')
324+
assert axis == 1, "Only axis == 1 is supported for p_norm"
325+
if (p == 1 or p == 2) and not keepdim:
326+
graph.make_node(
327+
'LpNormalization',
328+
inputs=[x],
329+
outputs=node.output('Out'),
330+
axis=1,
331+
p=p)
332+
else:
333+
pnode = graph.make_node(
334+
'Constant', dtype=dtypes.ONNX.FLOAT, value=[p])
335+
mul = graph.make_node('Pow', inputs=[x, pnode])
336+
axes = graph.make_node(
337+
'Constant', dtype=dtypes.ONNX.INT64, value=[1])
338+
reduce_sum = graph.make_node(
339+
'ReduceSum', inputs=[mul, axes], keepdims=keepdim)
340+
pnode1 = graph.make_node(
341+
'Constant', dtype=dtypes.ONNX.FLOAT, value=[1.0 / p])
342+
graph.make_node(
343+
'Pow', inputs=[reduce_sum, pnode1], outputs=node.output('Out'))
344+
345+
236346
@op_mapper('sum')
237347
class Sum():
238348
support_opset_version_range = (1, 12)
@@ -325,6 +435,66 @@ def opset_1(cls, graph, node, **kw):
325435
'keepdims': 0})
326436

327437

438+
#
439+
#@op_mapper('scale')
440+
#class Scale():
441+
# support_opset_version_range = (1, 12)
442+
#
443+
# @classmethod
444+
# def opset_1(cls, graph, node, **kw):
445+
# scale = node.attr('scale')
446+
# bias = node.attr('bias')
447+
# if np.fabs(scale - 1.0) < 1e-06 and np.fabs(bias - 0.0) < 1e-06:
448+
# graph.make_node(
449+
# 'Identity', inputs=node.input('X'), outputs=node.output('Out'))
450+
# else:
451+
# raise Exception(
452+
# "please try to convert OP:scale with opset_version >= 7.")
453+
#
454+
# @classmethod
455+
# def opset_7(cls, graph, node, **kw):
456+
# scale = node.attr('scale')
457+
# bias = node.attr('bias')
458+
# if np.fabs(scale - 1.0) < 1e-06 and np.fabs(bias - 0.0) < 1e-06:
459+
# graph.make_node(
460+
# 'Identity', inputs=node.input('X'), outputs=node.output('Out'))
461+
# else:
462+
# cast_node = graph.make_node(
463+
# 'Cast', inputs=node.input('X'),
464+
# attrs={'to': dtypes.ONNX.FLOAT})
465+
# if np.fabs(scale - 1.0) < 1e-06:
466+
# bias_node = graph.make_node(
467+
# 'Constant',
468+
# attrs={'dtype': dtypes.ONNX.FLOAT,
469+
# 'value': [bias]})
470+
# graph.make_node('Add', inputs=[cast_node, bias_node], outputs=node.output('Out'))
471+
# elif np.fabs(bias - 1.0) < 1e-06:
472+
# scale_node = graph.make_node(
473+
# 'Constant',
474+
# attrs={'dtype': dtypes.ONNX.FLOAT,
475+
# 'value': [scale]})
476+
# graph.make_node('Mul', inputs=[cast_node, scale_node], outputs=node.output('Out'))
477+
# else:
478+
# scale_node = graph.make_node(
479+
# 'Constant',
480+
# attrs={'dtype': dtypes.ONNX.FLOAT,
481+
# 'value': [scale]})
482+
# bias_node = graph.make_node(
483+
# 'Constant',
484+
# attrs={'dtype': dtypes.ONNX.FLOAT,
485+
# 'value': [bias]})
486+
# if node.attr('bias_after_scale'):
487+
# node1 = graph.make_node('Mul', inputs=[cast_node, scale_node])
488+
# node2 = graph.make_node(
489+
# 'Add',
490+
# inputs=[node1, bias_node],
491+
# outputs=node.output('Out'))
492+
# else:
493+
# node1 = graph.make_node('Add', inputs=[cast_node, bias_node])
494+
# node2 = graph.make_node(
495+
# 'Mul',
496+
# inputs=[node1, scale_node],
497+
# outputs=[node.output('Out', 0)])
328498
@op_mapper('scale')
329499
class Scale():
330500
support_opset_version_range = (1, 12)

0 commit comments

Comments
 (0)