Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion paddle2onnx/op_mapper/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,3 @@
from .custom_paddle_op import collect_fpn_proposals
from .custom_paddle_op import distribute_fpn_proposals
from .custom_paddle_op import box_clip
from .custom_paddle_op import fill_constant_batch_size_like
19 changes: 19 additions & 0 deletions paddle2onnx/op_mapper/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,25 @@ def opset_1(cls, graph, node, **kw):
alpha=node.attr('alpha'))


@op_mapper('softplus')
class Softplus():
support_opset_verison_range = (1, 12)

@classmethod
def opset_1(cls, graph, node, **kw):
beta = node.attr('beta')
threshold = node.attr('threshold')
if np.isclose(beta, 1.0, 1e-06, 1e-06) and \
np.isclose(threshold, 20.0, 1e-06, 1e-06):
onnx_node = graph.make_node(
'Softplus',
inputs=[node.input('X')[0]],
outputs=node.output('Out'))
else:
raise Exception("[ERROR] Operator softplus " \
"only supported while beta==1.0 and threshold==20.0")


@op_mapper('prelu')
class PRelu():
support_opset_verison_range = (9, 13)
Expand Down

This file was deleted.

22 changes: 11 additions & 11 deletions paddle2onnx/op_mapper/detection/multiclass_nms.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def nms(cls, graph, node, scores, bboxes, class_id=None):
iou_threshold = 0.5
logging.warning(
"Operator:{} is not supported completely, so we use traditional"
" NMS (iou_theshold={}) to instead it, which introduce some difference.".
" NMS (nms_theshold={}) to instead it, which introduce some difference.".
format(node.type, str(iou_threshold)))
else:
iou_threshold = node.attr('nms_threshold')
Expand Down Expand Up @@ -162,14 +162,12 @@ def keep_top_k(cls,
if background == 0:
nonzero = graph.make_node('NonZero', inputs=[squeezed_class_id])
else:
thresh = graph.make_node(
'Constant', inputs=[], dtype=dtypes.ONNX.INT32, value=[-1])

cast = graph.make_node('Cast', inputs=[squeezed_class_id], to=6)

greater = graph.make_node('Greater', inputs=[cast, thresh])

nonzero = graph.make_node('NonZero', inputs=[greater])
filter_cls_id = graph.make_node(
'Constant', dtype=dtypes.ONNX.INT32, value=[background])
cast = graph.make_node(
'Cast', inputs=[squeezed_class_id], to=dtypes.ONNX.INT32)
filter_index = graph.make_node('Sub', inputs=[cast, filter_cls_id])
nonzero = graph.make_node('NonZero', inputs=[filter_index])

class_id = graph.make_node('Gather', inputs=[class_id, nonzero], axis=0)

Expand Down Expand Up @@ -295,7 +293,9 @@ def keep_top_k(cls,
axes=[0])
if node.type in ['matrix_nms', 'multiclass_nms3']:
select_bboxes_shape = graph.make_node(
'Shape', inputs=[final_indices])
'Shape', inputs=[concat_final_results])
select_bboxes_shape1 = graph.make_node(
'Cast', inputs=[select_bboxes_shape], to=dtypes.ONNX.INT32)
indices = graph.make_node(
'Constant', dtype=dtypes.ONNX.INT64, value=[0])
rois_num = None
Expand All @@ -306,5 +306,5 @@ def keep_top_k(cls,
if rois_num is not None:
graph.make_node(
"Gather",
inputs=[select_bboxes_shape, indices],
inputs=[select_bboxes_shape1, indices],
outputs=rois_num)
24 changes: 24 additions & 0 deletions paddle2onnx/op_mapper/logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,30 @@ def opset_12(cls, graph, node, **kw):
outputs=node.output('Out'))


@op_mapper('equal')
class Equal():
support_opset_verison_range = (12, )

@classmethod
def opset_1(cls, graph, node, **kw):
onnx_node = graph.make_node(
'Equal',
inputs=[node.input('X', 0), node.input('Y', 0)],
outputs=node.output('Out'))


@op_mapper('greater_than')
class GreaterThan():
support_opset_verison_range = (1, )

@classmethod
def opset_1(cls, graph, node, **kw):
onnx_node = graph.make_node(
'Greater',
inputs=[node.input('X', 0), node.input('Y', 0)],
outputs=node.output('Out'))


@op_mapper('logical_and')
class LogicalAnd():
support_opset_verison_range = (1, )
Expand Down
178 changes: 174 additions & 4 deletions paddle2onnx/op_mapper/math.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,14 @@ def opset_1(cls, graph, node, **kw):

@op_mapper(
[
'elementwise_add', 'elementwise_sub', 'elementwise_div',
'elementwise_mul', 'elementwise_min', 'elementwise_max',
'elementwise_pow'
'elementwise_add',
'elementwise_sub',
'elementwise_div',
'elementwise_mul',
'elementwise_min',
'elementwise_max',
'elementwise_pow',
'elementwise_mod',
],
mapper_dict={
'elementwise_add': 'Add',
Expand All @@ -100,12 +105,13 @@ def opset_1(cls, graph, node, **kw):
'elementwise_min': 'Min',
'elementwise_max': 'Max',
'elementwise_pow': 'Pow',
'elementwise_mod': 'Mod',
})
class ElementwiseOps():
support_opset_version_range = (7, 12)

@classmethod
def opset_7(cls, graph, node, **kw):
def opset_9(cls, graph, node, **kw):
op_type = kw['mapper_dict'][node.type]
axis = node.attr('axis')
x = node.input('X', 0)
Expand All @@ -130,6 +136,51 @@ def opset_7(cls, graph, node, **kw):
op_type, inputs=[x, y_node], outputs=node.output('Out'))


@op_mapper('elementwise_floordiv')
class ElementWiseFloorDiv():
support_opset_version_range = (11, 12)

@classmethod
def opset_7(cls, graph, node, **kw):
x = node.input('X', 0)
y = node.input('Y', 0)
axis = node.attr('axis')
x_shape = node.input_shape('X', 0)
y_shape = node.input_shape('Y', 0)
x_dtype = node.input_dtype('X', 0)
y_dtype = node.input_dtype('Y', 0)
x_dtype = dtypes.DTYPE_PADDLE_STR_MAP[x_dtype]
y_dtype = dtypes.DTYPE_PADDLE_STR_MAP[y_dtype]
is_int = False
if x_dtype.count('int') > 0 and y_dtype.count('int') > 0:
is_int = True
if axis == -1 or axis == (len(x_shape) - 1
) or len(x_shape) == len(y_shape):
if is_int:
graph.make_node(
'Div', inputs=[x, y], outputs=node.output('Out'))
else:
div_node = graph.make_node('Div', inputs=[x, y])
graph.make_node(
'Floor', inputs=[div_node], outputs=node.output('Out'))
else:
broadcast_shape = [1] * len(x_shape)
broadcast_shape[axis:axis + len(y_shape)] = y_shape
broadcast_shape_node = graph.make_node(
'Constant',
dtype=dtypes.ONNX.INT64,
value=list(broadcast_shape))
y_node = graph.make_node(
'Reshape', inputs=[y, broadcast_shape_node])
if is_int:
div_node = graph.make_node(
'Div', inputs=[x, y_node], outputs=node.output('Out'))
else:
div_node = graph.make_node('Div', inputs=[x, y_node])
graph.make_node(
'Floor', inputs=[div_node], outputs=node.output('Out'))


@op_mapper('pow')
class Pow():
support_opset_version_range = (8, 12)
Expand Down Expand Up @@ -233,6 +284,65 @@ def opset_1(cls, graph, node, **kw):
'MatMul', inputs=[x, y], outputs=node.output('Out'))


@op_mapper('p_norm')
class PNorm():
support_opset_version_range = (1, 12)

@classmethod
def opset_1(cls, graph, node, **kw):
x = node.input('X', 0)
axis = node.attr('axis')
p = node.attr('porder')
keepdim = node.attr('keepdim')
epsilon = node.attr('epsilon')
assert axis == 1, "Only axis == 1 is supported for p_norm"
if p == 1 or p == 2 and not keepdim:
graph.make_node(
'LpNormalization',
inputs=[x],
outputs=node.output('Out'),
axis=1,
p=p)
else:
pnode = graph.make_node(
'Constant', dtype=dtypes.ONNX.FLOAT, value=[p])
mul = graph.make_node('Pow', inputs=[x, pnode])
reduce_sum = graph.make_node(
'ReduceSum', inputs=[mul], axes=[1], keepdims=keepdim)
pnode1 = graph.make_node(
'Constant', dtype=dtypes.ONNX.FLOAT, value=[1.0 / p])
graph.make_node(
'Pow', inputs=[reduce_sum, pnode1], outputs=node.output('Out'))

@classmethod
def opset_13(cls, graph, node, **kw):
x = node.input('X', 0)
axis = node.attr('axis')
p = node.attr('porder')
keepdim = node.attr('keepdim')
epsilon = node.attr('epsilon')
assert axis == 1, "Only axis == 1 is supported for p_norm"
if (p == 1 or p == 2) and not keepdim:
graph.make_node(
'LpNormalization',
inputs=[x],
outputs=node.output('Out'),
axis=1,
p=p)
else:
pnode = graph.make_node(
'Constant', dtype=dtypes.ONNX.FLOAT, value=[p])
mul = graph.make_node('Pow', inputs=[x, pnode])
axes = graph.make_node(
'Constant', dtype=dtypes.ONNX.INT64, value=[1])
reduce_sum = graph.make_node(
'ReduceSum', inputs=[mul, axes], keepdims=keepdim)
pnode1 = graph.make_node(
'Constant', dtype=dtypes.ONNX.FLOAT, value=[1.0 / p])
graph.make_node(
'Pow', inputs=[reduce_sum, pnode1], outputs=node.output('Out'))


@op_mapper('sum')
class Sum():
support_opset_version_range = (1, 12)
Expand Down Expand Up @@ -325,6 +435,66 @@ def opset_1(cls, graph, node, **kw):
'keepdims': 0})


#
#@op_mapper('scale')
#class Scale():
# support_opset_version_range = (1, 12)
#
# @classmethod
# def opset_1(cls, graph, node, **kw):
# scale = node.attr('scale')
# bias = node.attr('bias')
# if np.fabs(scale - 1.0) < 1e-06 and np.fabs(bias - 0.0) < 1e-06:
# graph.make_node(
# 'Identity', inputs=node.input('X'), outputs=node.output('Out'))
# else:
# raise Exception(
# "please try to convert OP:scale with opset_version >= 7.")
#
# @classmethod
# def opset_7(cls, graph, node, **kw):
# scale = node.attr('scale')
# bias = node.attr('bias')
# if np.fabs(scale - 1.0) < 1e-06 and np.fabs(bias - 0.0) < 1e-06:
# graph.make_node(
# 'Identity', inputs=node.input('X'), outputs=node.output('Out'))
# else:
# cast_node = graph.make_node(
# 'Cast', inputs=node.input('X'),
# attrs={'to': dtypes.ONNX.FLOAT})
# if np.fabs(scale - 1.0) < 1e-06:
# bias_node = graph.make_node(
# 'Constant',
# attrs={'dtype': dtypes.ONNX.FLOAT,
# 'value': [bias]})
# graph.make_node('Add', inputs=[cast_node, bias_node], outputs=node.output('Out'))
# elif np.fabs(bias - 1.0) < 1e-06:
# scale_node = graph.make_node(
# 'Constant',
# attrs={'dtype': dtypes.ONNX.FLOAT,
# 'value': [scale]})
# graph.make_node('Mul', inputs=[cast_node, scale_node], outputs=node.output('Out'))
# else:
# scale_node = graph.make_node(
# 'Constant',
# attrs={'dtype': dtypes.ONNX.FLOAT,
# 'value': [scale]})
# bias_node = graph.make_node(
# 'Constant',
# attrs={'dtype': dtypes.ONNX.FLOAT,
# 'value': [bias]})
# if node.attr('bias_after_scale'):
# node1 = graph.make_node('Mul', inputs=[cast_node, scale_node])
# node2 = graph.make_node(
# 'Add',
# inputs=[node1, bias_node],
# outputs=node.output('Out'))
# else:
# node1 = graph.make_node('Add', inputs=[cast_node, bias_node])
# node2 = graph.make_node(
# 'Mul',
# inputs=[node1, scale_node],
# outputs=[node.output('Out', 0)])
@op_mapper('scale')
class Scale():
support_opset_version_range = (1, 12)
Expand Down
Loading