Skip to content
10 changes: 5 additions & 5 deletions python/paddle/base/backward.py
Original file line number Diff line number Diff line change
Expand Up @@ -830,7 +830,7 @@ class Var:
def __init__(self, var_name):
self.var_name = var_name
self.gen_op = None
self.pendding_ops = []
self.pending_ops = []

def set_gen_op(self, gen_op):
assert isinstance(gen_op, Op)
Expand All @@ -839,7 +839,7 @@ def set_gen_op(self, gen_op):

def add_pending_op(self, op):
assert isinstance(op, Op)
self.pendding_ops.append(op)
self.pending_ops.append(op)

class Op:
def __init__(self, op_desc):
Expand Down Expand Up @@ -916,8 +916,8 @@ def _create_op_node(op_desc):
op_node = candidate_ops.pop(0)
if _all_in_set_(op_node.inputs, ready_vars):
for out_var in op_node.outputs:
candidate_ops.extend(out_var.pendding_ops)
op_list.extend(out_var.pendding_ops)
candidate_ops.extend(out_var.pending_ops)
op_list.extend(out_var.pending_ops)
ready_vars.update(op_node.outputs)
else:
remove_ops = False
Expand Down Expand Up @@ -1571,7 +1571,7 @@ def find_op_index(block_desc, cur_op_desc):
# NOTE: In primitive mode, the intermediate variable generated by
# decompositing raw grad op are not satisfied the rule of 'XX@GRAD',
# which will cause it be pruned according to current pruning logic.
# For simplicity, we treate all prmitive operators as one raw
# For simplicity, we treat all primitive operators as one raw
# operator, and keep the pruning logic consistent with currently
# logic. The drawback of this solution is may lead to some primitive
# operators are not pruned, which is needed to fixed.
Expand Down
6 changes: 3 additions & 3 deletions python/paddle/base/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -739,7 +739,7 @@ def _as_lodtensor(data, place, dtype=None):
data = np.array(data)
if data.dtype == np.object_:
raise TypeError(
"\n\tFaild to convert input data to a regular ndarray :\n\t* Usually "
"\n\tFailed to convert input data to a regular ndarray :\n\t* Usually "
"this means the input data contains nested lists with different lengths. "
"Please consider using 'base.create_lod_tensor' to convert it to a LoD-Tensor."
)
Expand Down Expand Up @@ -1675,7 +1675,7 @@ def run(
needed to generate :code:`fetch_list` will be pruned. The default is False, which means the
program will not pruned and all the operators and variables will be executed during running.
Note that if the tuple returned from :code:`Optimizer.minimize()` is passed to :code:`fetch_list`,
:code:`use_prune` will be overrided to True, and the program will be pruned.
:code:`use_prune` will be overridden to True, and the program will be pruned.

Returns:

Expand Down Expand Up @@ -1880,7 +1880,7 @@ def _run_impl(
if scope is None:
scope = global_scope()

# use_prune can be overrided by putting optimize_ops in fetch_list
# use_prune can be overridden by putting optimize_ops in fetch_list
_origin_fetch_list = fetch_list
_origin_program = program
fetch_list, optimize_ops = self._split_optimize_ops_in_fetch_list(
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/base/incubate/checkpoint/auto_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,7 @@ def __init__(
self._save_checkpoint_inter = self._checker.save_checkpoint_inter
assert (
self._save_checkpoint_inter >= 0
), f"checkpointer:{self._save_checkpoint_inter} must >=0"
), f"checkpoint inter:{self._save_checkpoint_inter} must >=0"
self._last_checkpoint_time = time.time()

self._load_cp_nos = None
Expand Down
22 changes: 11 additions & 11 deletions python/paddle/nn/layer/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ def is_instance(self, param, cls):
)


class LayerOpsRecoder:
class LayerOpsRecorder:
"""
Record generated operators information in nn.Layer.
"""
Expand Down Expand Up @@ -405,7 +405,7 @@ def __init__(self, name_scope=None, dtype="float32"):
self._loaddict_holder = collections.OrderedDict()

# Record generated op_descs in this layer
self._op_recorder = LayerOpsRecoder(ops=[], hooks=[])
self._op_recorder = LayerOpsRecorder(ops=[], hooks=[])
self._customized_attrs = {}

self._forward_pre_hooks = collections.OrderedDict()
Expand Down Expand Up @@ -636,7 +636,7 @@ def register_forward_post_hook(self, hook):

>>> # the forward_post_hook change the output of the layer: output = output * 2
>>> def forward_post_hook(layer, input, output):
... # user can use layer, input and output for information statistis tasks
... # user can use layer, input and output for information statistics tasks
...
... # change the output
... return output * 2
Expand Down Expand Up @@ -690,7 +690,7 @@ def register_forward_pre_hook(self, hook):

>>> # the forward_pre_hook change the input of the layer: input = input * 2
>>> def forward_pre_hook(layer, input):
... # user can use layer and input for information statistis tasks
... # user can use layer and input for information statistics tasks
...
... # change the input
... input_return = (input[0] * 2)
Expand Down Expand Up @@ -998,7 +998,7 @@ def astype(self, dtype=None):
return self
else:
raise ValueError(
"dtype value error, must be 'bfloat16', 'float16', 'float32', 'float64', 'int8', 'int16', 'int32', 'int64', 'uint8', 'complex64', 'complex128', 'bool', or paddle.dtype, numpy.dtype, but recieve "
"dtype value error, must be 'bfloat16', 'float16', 'float32', 'float64', 'int8', 'int16', 'int32', 'int64', 'uint8', 'complex64', 'complex128', 'bool', or paddle.dtype, numpy.dtype, but receive "
+ str(dtype)
)

Expand Down Expand Up @@ -1951,7 +1951,7 @@ def to_static_state_dict(
include_sublayers(bool, optional) : If true, also include the parameters and persistable buffers from sublayers. Default: True.
use_hook(bool, optional) : If true, the operations contained in _state_dict_hooks will be appended to the destination. Default: True.

Retruns:
Returns:
dict, a dict contains all the parameters and persistable buffers.

Examples:
Expand Down Expand Up @@ -1988,7 +1988,7 @@ def state_dict(
include_sublayers(bool, optional) : If true, also include the parameters and persistable buffers from sublayers. Default: True.
use_hook(bool, optional) : If true, the operations contained in _state_dict_hooks will be appended to the destination. Default: True.

Retruns:
Returns:
dict: a dict contains all the parameters and persistable buffers.

Examples:
Expand Down Expand Up @@ -2049,7 +2049,7 @@ def _check_match(key, param):
if len(state) != len(param):
missing_keys.append(key)
raise ValueError(
f"{key} receieves the length of {len(state)}, "
f"{key} receives the length of {len(state)}, "
f"but the expected shape is {len(param)}"
)
else:
Expand Down Expand Up @@ -2126,7 +2126,7 @@ def _set_var(var, ndarray):
_set_var(param, state)
except ValueError as e:
raise ValueError(
"This error might happens in dy2static, while calling 'set_state_dict' dynamicly in 'forward', which is not supported. If you only need call 'set_state_dict' once, move it to '__init__'."
"This error might happens in dy2static, while calling 'set_state_dict' dynamically in 'forward', which is not supported. If you only need call 'set_state_dict' once, move it to '__init__'."
)

return missing_keys, unexpected_keys
Expand Down Expand Up @@ -2230,7 +2230,7 @@ def _transform(self, t, device, dtype, blocking):
if t.place.is_gpu_place():
# for gpu, minimum memory allocation unit is 256 bytes.
size_dtype = core.size_of_dtype(dtype)
# Note(zhangbo): Paddle GPU minimum memory allocation unit is 256 bytes, waiting_alloc_memory will comput ‘t’ occupied memory space.
# Note(zhangbo): Paddle GPU minimum memory allocation unit is 256 bytes, waiting_alloc_memory will compute ‘t’ occupied memory space.
# Coefficient 1.2 is used to avoid OOM that may occur in this critical state when the memory is just enough.
waiting_alloc_memory = (
((np.prod(t.shape) * size_dtype) / 256 + 1) * 256 * 1.2
Expand Down Expand Up @@ -2345,7 +2345,7 @@ def transform(t, device, dtype, blocking):

def _startup_program(self):
"""
Return starup program containing initialization operations of all parameters.
Return startup program containing initialization operations of all parameters.

NOTE(dev): This is a very low level API and only for inner developer.
"""
Expand Down
2 changes: 1 addition & 1 deletion test/dygraph_to_static/test_legacy_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -453,7 +453,7 @@ def test_set_state_dict_err(self):
error_message = str(new_exception)

self.assertIn(
"This error might happens in dy2static, while calling 'set_state_dict' dynamicly in 'forward', which is not supported. If you only need call 'set_state_dict' once, move it to '__init__'.",
"This error might happens in dy2static, while calling 'set_state_dict' dynamically in 'forward', which is not supported. If you only need call 'set_state_dict' once, move it to '__init__'.",
error_message,
)

Expand Down