Skip to content

Commit 78260ff

Browse files
authored
fix output_padding in conv (#33585)
* fix output padding conv * add repr unittest for conv
1 parent e6c5282 commit 78260ff

File tree

2 files changed

+19
-6
lines changed

2 files changed

+19
-6
lines changed

python/paddle/fluid/tests/unittests/test_conv2d_transpose_op.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
import numpy as np
1919

2020
import paddle
21+
import paddle.nn as nn
2122
paddle.enable_static()
2223
import paddle.fluid.core as core
2324
import paddle.fluid as fluid
@@ -898,5 +899,17 @@ def attr_padding_with_data_format():
898899
self.assertRaises(ValueError, attr_padding_with_data_format)
899900

900901

902+
class TestConv2DTransposeRepr(unittest.TestCase):
903+
def test_case(self):
904+
paddle.disable_static()
905+
x_var = paddle.uniform((2, 4, 8, 8), dtype='float32', min=-1., max=1.)
906+
conv = nn.Conv2DTranspose(4, 6, (3, 3), output_padding=1, stride=2)
907+
print(conv)
908+
y_var = conv(x_var)
909+
y_np = y_var.numpy()
910+
self.assertIsNotNone(y_np)
911+
paddle.enable_static()
912+
913+
901914
if __name__ == '__main__':
902915
unittest.main()

python/paddle/nn/layer/conv.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def __init__(self,
9898
'kernel_size')
9999
self._padding = padding
100100
self._padding_mode = padding_mode
101-
self._output_padding = output_padding
101+
self.output_padding = output_padding
102102
if dims != 1:
103103
self._updated_padding, self._padding_algorithm = _update_padding_nd(
104104
padding, channel_last, dims)
@@ -163,8 +163,8 @@ def extra_repr(self):
163163
main_str += ', padding={_padding}'
164164
if self._padding_mode is not 'zeros':
165165
main_str += ', padding_mode={_padding_mode}'
166-
if self._output_padding != 0:
167-
main_str += ', output_padding={_output_padding}'
166+
if self.output_padding != 0:
167+
main_str += ', output_padding={output_padding}'
168168
if self._dilation != [1] * len(self._dilation):
169169
main_str += ', dilation={_dilation}'
170170
if self._groups != 1:
@@ -508,7 +508,7 @@ def forward(self, x, output_size=None):
508508
self.weight,
509509
bias=self.bias,
510510
output_size=output_size,
511-
output_padding=self._output_padding,
511+
output_padding=self.output_padding,
512512
padding=self._padding,
513513
stride=self._stride,
514514
dilation=self._dilation,
@@ -824,7 +824,7 @@ def __init__(self,
824824

825825
def forward(self, x, output_size=None):
826826
if output_size is None:
827-
output_padding = self._output_padding
827+
output_padding = self.output_padding
828828
else:
829829
output_padding = 0
830830

@@ -1161,7 +1161,7 @@ def __init__(self,
11611161

11621162
def forward(self, x, output_size=None):
11631163
if output_size is None:
1164-
output_padding = self._output_padding
1164+
output_padding = self.output_padding
11651165
else:
11661166
output_padding = 0
11671167

0 commit comments

Comments
 (0)