Skip to content
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion paddle/fluid/API.spec
Original file line number Diff line number Diff line change
Expand Up @@ -684,7 +684,7 @@ paddle.fluid.dygraph.LayerNorm.state_dict (ArgSpec(args=['self', 'destination',
paddle.fluid.dygraph.LayerNorm.sublayers (ArgSpec(args=['self', 'include_sublayers'], varargs=None, keywords=None, defaults=(True,)), ('document', '00a881005ecbc96578faf94513bf0d62'))
paddle.fluid.dygraph.LayerNorm.train (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '6adf97f83acf6453d4a6a4b1070f3754'))
paddle.fluid.dygraph.NCE ('paddle.fluid.dygraph.nn.NCE', ('document', '47eb439a5568468fad70235f1e61ead9'))
paddle.fluid.dygraph.NCE.__init__ (ArgSpec(args=['self', 'name_scope', 'num_total_classes', 'param_attr', 'bias_attr', 'num_neg_samples', 'sampler', 'custom_dist', 'seed', 'is_sparse'], varargs=None, keywords=None, defaults=(None, None, None, 'uniform', None, 0, False)), ('document', '6adf97f83acf6453d4a6a4b1070f3754'))
paddle.fluid.dygraph.NCE.__init__ (ArgSpec(args=['self', 'name_scope', 'num_total_classes', 'sample_weight', 'param_attr', 'bias_attr', 'num_neg_samples', 'sampler', 'custom_dist', 'seed', 'is_sparse'], varargs=None, keywords=None, defaults=(None, None, None, None, 'uniform', None, 0, False)), ('document', '6adf97f83acf6453d4a6a4b1070f3754'))
paddle.fluid.dygraph.NCE.add_parameter (ArgSpec(args=['self', 'name', 'parameter'], varargs=None, keywords=None, defaults=None), ('document', 'f35ab374c7d5165c3daf3bd64a5a2ec1'))
paddle.fluid.dygraph.NCE.add_sublayer (ArgSpec(args=['self', 'name', 'sublayer'], varargs=None, keywords=None, defaults=None), ('document', '839ff3c0534677ba6ad8735c3fd4e995'))
paddle.fluid.dygraph.NCE.backward (ArgSpec(args=['self'], varargs='inputs', keywords=None, defaults=None), ('document', '6adf97f83acf6453d4a6a4b1070f3754'))
Expand Down
65 changes: 56 additions & 9 deletions python/paddle/fluid/dygraph/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
from ..param_attr import ParamAttr
from ..initializer import Normal, Constant, NumpyArrayInitializer
import numpy as np
import logging

__all__ = [
'Conv2D', 'Conv3D', 'Pool2D', 'FC', 'BatchNorm', 'Embedding', 'GRUUnit',
Expand Down Expand Up @@ -1374,13 +1375,20 @@ def _build_once(self, input):
shape=param_shape,
dtype=self._dtype,
default_initializer=Constant(1.0))
else:
if self._param_attr:
logging.warn("param_attr are only avaliable with scale is True")

if self._shift:
assert self._bias_attr is not False
self._bias_w = self.create_parameter(
attr=self._bias_attr,
shape=param_shape,
dtype=self._dtype,
is_bias=True)
else:
if self._bias_attr:
logging.warn("bias_attr are only avaliable with shift is True")

def forward(self, input):
inputs = dict()
Expand Down Expand Up @@ -1410,7 +1418,7 @@ def forward(self, input):
"begin_norm_axis": self._begin_norm_axis
})

return self._helper.append_activation(layer_norm_out)
return self._helper.append_activation(layer_norm_out, act=self._act)


class GRUUnit(layers.Layer):
Expand Down Expand Up @@ -1648,6 +1656,7 @@ class NCE(layers.Layer):
def __init__(self,
name_scope,
num_total_classes,
sample_weight=None,
param_attr=None,
bias_attr=None,
num_neg_samples=None,
Expand All @@ -1661,7 +1670,7 @@ def __init__(self,
self._num_total_classes = num_total_classes

self._inputs = dict()

self._inputs['SampleWeight'] = sample_weight if sample_weight is not None else []
if sampler == "uniform":
sampler = 0
elif sampler == "log_uniform":
Expand Down Expand Up @@ -1941,15 +1950,15 @@ def _build_once(self, x, y):

if self._bias_attr:
bias_size = [1, self._size]
bias = self.create_parameter(
self._bias_param = self.create_parameter(
attr=self._bias_attr,
shape=bias_size,
dtype=self._dtype,
is_bias=True)
self._inputs["Bias"] = bias

def forward(self, x, y):
self._inputs = {"X": x, "Y": y, "Weight": self._w}
self._inputs["Bias"] = self._bias_param
if self._name is not None:
out = self._helper.create_variable(
name=".".join([self.full_name(), self._name]),
Expand All @@ -1964,7 +1973,7 @@ def forward(self, x, y):
outputs={"Out": out})

# add activation
return self._helper.append_activation(out)
return self._helper.append_activation(out, act=self._act)


class Conv2DTranspose(layers.Layer):
Expand Down Expand Up @@ -2099,6 +2108,7 @@ def __init__(self,
assert param_attr is not False, "param_attr should not be False in conv2d_transpose."
self._param_attr = param_attr
self._bias_attr = bias_attr
self._act = act
self._groups = groups
self._num_filters = num_filters
self._use_cudnn = use_cudnn
Expand Down Expand Up @@ -2162,6 +2172,12 @@ def _build_once(self, input):
self._img_filter = self.create_parameter(
dtype=input.dtype, shape=filter_shape, attr=self._param_attr)

self._bias_param = self.create_parameter(
attr=self._bias_attr,
shape=[self._num_filters],
dtype=self._dtype,
is_bias=True)

def forward(self, input):
pre_bias = self._helper.create_variable_for_type_inference(
dtype=input.dtype)
Expand All @@ -2179,8 +2195,19 @@ def forward(self, input):
'use_cudnn': self._use_cudnn
})

pre_act = self._helper.append_bias_op(pre_bias, dim_start=1, dim_end=2)
out = self._helper.append_activation(pre_act)
if self._bias_param is not None:
pre_act = self._helper.create_variable_for_type_inference(
dtype=self._dtype)
self._helper.append_op(
type='elementwise_add',
inputs={'X': [pre_bias],
'Y': [self._bias_param]},
outputs={'Out': [pre_act]},
attrs={'axis': 1})
else:
pre_act = pre_bias

out = self._helper.append_activation(pre_act, act=self._act)
return out


Expand Down Expand Up @@ -2230,13 +2257,20 @@ def __init__(self,
self._padding = padding
self._bias_attr = bias_attr
self._param_attr = param_attr
self._act = act

def _build_once(self, input):
self._dtype = self._helper.input_dtype(input)
filter_shape = [self._filter_size * input.shape[1], self._num_filters]
self._filter_param = self.create_parameter(
attr=self._param_attr, shape=filter_shape, dtype=self._dtype)

self._bias_param = self.create_parameter(
attr=self._bias_attr,
shape=[self._num_filters],
dtype=self._dtype,
is_bias=True)

def forward(self, input):
pre_bias = self._helper.create_variable_for_type_inference(self._dtype)
self._helper.append_op(
Expand All @@ -2251,8 +2285,20 @@ def forward(self, input):
'contextStart': -int(self._filter_size // 2),
'contextLength': self._filter_size
})
pre_act = self._helper.append_bias_op(pre_bias)
return self._helper.append_activation(pre_act)

if self._bias_param is not None:
pre_act = self._helper.create_variable_for_type_inference(
dtype=self._dtype)
self._helper.append_op(
type='elementwise_add',
inputs={'X': [pre_bias],
'Y': [self._bias_param]},
outputs={'Out': [pre_act]},
attrs={'axis': 1})
else:
pre_act = pre_bias

return self._helper.append_activation(pre_act, act=self._act)


class RowConv(layers.Layer):
Expand Down Expand Up @@ -2614,6 +2660,7 @@ def forward(self, nodes_vector, edge_set):
out = self.create_variable(
name=self._name, dtype=self._dtype, persistable=False)
else:

out = self._helper.create_variable_for_type_inference(
dtype=self._dtype)

Expand Down
Loading