Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion python/paddle/common_ops_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
OpProtoHolder,
Variable,
_dygraph_tracer,
_in_legacy_dygraph,
_non_static_mode,
_varbase_creator,
convert_np_dtype_to_dtype_,
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/dygraph/tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ def trace_op(
stop_gradient=False,
inplace_map=None,
):
if not framework._in_legacy_dygraph():
if framework.in_dygraph_mode():
# inputs : {"sum": [tensor], ...}
# outputs : {"sum": [tensor], ...}
if type in name_mapping.keys():
Expand Down
40 changes: 2 additions & 38 deletions python/paddle/fluid/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,9 +237,6 @@ def in_dygraph_mode():
return (_dygraph_tracer_ is not None) and _in_eager_mode_


def _in_legacy_dygraph():
return (not _in_eager_mode_) and (_dygraph_tracer_ is not None)


def _non_static_mode():
return _dygraph_tracer_ is not None
Expand All @@ -255,7 +252,8 @@ def _test_eager_guard(place=None):
try:
yield
finally:
pass
if not already_fallback:
_enable_legacy_dygraph()


global_ipu_index = -1
Expand Down Expand Up @@ -1334,8 +1332,6 @@ def __instancecheck__(cls, instance):
if in_dygraph_mode():
return issubclass(t, core.eager.Tensor)
else:
if _in_legacy_dygraph():
return issubclass(t, core.VarBase)
return issubclass(t, Variable)


Expand All @@ -1346,8 +1342,6 @@ def __instancecheck__(cls, instance):
if in_dygraph_mode():
return issubclass(t, EagerParamBase)
else:
if _in_legacy_dygraph():
return issubclass(t, ParamBase)
return issubclass(t, Parameter)


Expand Down Expand Up @@ -3893,19 +3887,6 @@ def _rename_var(self, name, new_name):
error_clip=error_clip,
)
else:
if _in_legacy_dygraph():
var = ParamBase(
d.shape(),
d.dtype(),
type=orig_var_type,
name=new_name,
stop_gradient=stop_gradient,
trainable=trainable,
optimize_attr=optimize_attr,
regularizer=regularizer,
error_clip=error_clip,
)
else:
var = Parameter(
self,
d.shape(),
Expand Down Expand Up @@ -3946,9 +3927,6 @@ def create_parameter(self, *args, **kwargs):
if in_dygraph_mode():
param = EagerParamBase(*args, **kwargs)
else:
if _in_legacy_dygraph():
param = ParamBase(*args, **kwargs)
else:
param = Parameter(global_block, *args, **kwargs)

if 'initializer' in kwargs:
Expand Down Expand Up @@ -4262,20 +4240,6 @@ def _copy_param_info_from(self, other):
name=v.name,
)
else:
if _in_legacy_dygraph():
new_p = ParamBase(
shape=v.shape,
dtype=v.dtype,
type=v.type,
lod_level=v.lod_level,
stop_gradient=p.stop_gradient,
trainable=p.trainable,
optimize_attr=p.optimize_attr,
regularizer=p.regularizer,
error_clip=p.error_clip,
name=v.name,
)
else:
new_p = Parameter(
block=self,
shape=v.shape,
Expand Down
1 change: 0 additions & 1 deletion python/paddle/fluid/layers/layer_function_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,6 @@ def func(x, name=None):
op = getattr(_C_ops, op_type)
return op(x)
# TODO(dev): Because some ops' yaml has not been migrated.
# Replace it with _in_legacy_dygraph while all yaml work is done.
if in_dygraph_mode() and hasattr(_legacy_C_ops, op_type):
op = getattr(_legacy_C_ops, op_type)
return op(x)
Expand Down
19 changes: 5 additions & 14 deletions python/paddle/fluid/tests/unittests/op_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
_dygraph_tracer,
_enable_legacy_dygraph,
_in_eager_without_dygraph_check,
_in_legacy_dygraph,
_test_eager_guard,
)
from paddle.fluid.op import Operator
Expand Down Expand Up @@ -716,7 +715,7 @@ def create_var(np_value, name, is_input, if_return_inputs_grad_dict):

if if_return_inputs_grad_dict:
v.stop_gradient = False
if not _in_legacy_dygraph():
if hasattr(v, "retain_grads"):
v.retain_grads()

if has_lod:
Expand Down Expand Up @@ -2515,22 +2514,14 @@ def _get_dygraph_grad(
for no_grad_val in no_grad_set:
del inputs[no_grad_val]

if not _in_legacy_dygraph():
core.eager.run_backward(
core.eager.run_backward(
fluid.layers.utils.flatten(outputs), grad_outputs, False
)
grad_inputs = []
for inputs_list in inputs.values():
grad_inputs = []
for inputs_list in inputs.values():
for inp in inputs_list:
grad_inputs.append(inp.grad.numpy())
return grad_inputs
else:
grad_inputs = paddle.grad(
outputs=fluid.layers.utils.flatten(outputs),
inputs=fluid.layers.utils.flatten(inputs),
grad_outputs=grad_outputs,
)
return [grad.numpy() for grad in grad_inputs]
return grad_inputs

@staticmethod
def _numpy_to_lod_tensor(np_value, lod, place):
Expand Down
1 change: 0 additions & 1 deletion python/paddle/framework/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@

from ..fluid.layer_helper import LayerHelper # noqa: F401
from ..fluid.framework import in_dygraph_mode # noqa: F401
from ..fluid.framework import _in_legacy_dygraph # noqa: F401
from ..fluid.framework import _global_flags # noqa: F401
from ..fluid.framework import _apply_pass # noqa: F401
from ..fluid.framework import switch_main_program
Expand Down
85 changes: 37 additions & 48 deletions python/paddle/nn/functional/norm.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
# TODO: define normalization api
import paddle
import paddle.fluid as fluid
from paddle import _C_ops, _legacy_C_ops, in_dynamic_mode
from paddle.fluid.framework import _in_legacy_dygraph, in_dygraph_mode
from paddle import _C_ops, in_dynamic_mode
from paddle.fluid.framework import in_dygraph_mode

from ...fluid import dygraph_utils
from ...fluid.data_feeder import check_type, check_variable_and_dtype
Expand Down Expand Up @@ -336,54 +336,43 @@ def layer_norm(
out, _, _ = _C_ops.layer_norm(x, weight, bias, epsilon, begin_norm_axis)
return out

if _in_legacy_dygraph():
out, _, _ = _legacy_C_ops.layer_norm(
x,
weight,
bias,
'epsilon',
epsilon,
'begin_norm_axis',
begin_norm_axis,
else:
check_variable_and_dtype(
x, 'input', ['float16', 'float32', 'float64'], 'LayerNorm'
)

inputs = dict()
inputs['X'] = [x]
if weight:
inputs['Scale'] = [weight]
if bias:
inputs['Bias'] = [bias]
attrs = {"epsilon": epsilon, "begin_norm_axis": begin_norm_axis}

# create output
helper = LayerHelper('layer_norm', **locals())

dtype = x.dtype
mean_out = helper.create_variable_for_type_inference(
dtype=dtype, stop_gradient=True
)
variance_out = helper.create_variable_for_type_inference(
dtype=dtype, stop_gradient=True
)
layer_norm_out = helper.create_variable_for_type_inference(dtype)

helper.append_op(
type="layer_norm",
inputs=inputs,
outputs={
"Y": layer_norm_out,
"Mean": mean_out,
"Variance": variance_out,
},
attrs={"epsilon": epsilon, "begin_norm_axis": begin_norm_axis},
)
return out

check_variable_and_dtype(
x, 'input', ['float16', 'float32', 'float64'], 'LayerNorm'
)

inputs = dict()
inputs['X'] = [x]
if weight:
inputs['Scale'] = [weight]
if bias:
inputs['Bias'] = [bias]
attrs = {"epsilon": epsilon, "begin_norm_axis": begin_norm_axis}

# create output
helper = LayerHelper('layer_norm', **locals())

dtype = x.dtype
mean_out = helper.create_variable_for_type_inference(
dtype=dtype, stop_gradient=True
)
variance_out = helper.create_variable_for_type_inference(
dtype=dtype, stop_gradient=True
)
layer_norm_out = helper.create_variable_for_type_inference(dtype)

helper.append_op(
type="layer_norm",
inputs=inputs,
outputs={
"Y": layer_norm_out,
"Mean": mean_out,
"Variance": variance_out,
},
attrs={"epsilon": epsilon, "begin_norm_axis": begin_norm_axis},
)

return helper.append_activation(layer_norm_out)
return helper.append_activation(layer_norm_out)


def instance_norm(
Expand Down