Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions python/paddle/jit/dy2static/convert_call_func.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ def convert_call(func):

if inspect.isgeneratorfunction(func):
# NOTE(xiongkun03): inspect.isfunction() will return True even though func is a generator function.
# If we don't deal generatorfunction here, we will regard it as normal function and get errors in some
# If we don't deal generator function here, we will regard it as normal function and get errors in some
# occasion.
number_of_stars = 30
translator_logger.warn(
Expand Down Expand Up @@ -304,7 +304,7 @@ def convert_call(func):
_, forward_func = unwrap_decorators(func.forward)
func._original_funcs['forward'] = forward_func.__func__
forward_func = convert_to_static(forward_func)
# Bound mothod will be convert into plain function after `convert_to_static`.
# Bound method will be convert into plain function after `convert_to_static`.
# So descriptor mechanism is used to bound `self` instance on function to
# keep it as bound method.
func.forward = forward_func.__get__(func)
Expand Down
8 changes: 4 additions & 4 deletions python/paddle/jit/dy2static/function_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def args_to_input_spec(self, args, kwargs):
# So we don't support to deal this case while specifying `input_spec` currently.
if kwargs:
raise ValueError(
"{} got unexpected keyword arguments: {}. Cannot trace the function when `input_spec` is specificed.".format(
"{} got unexpected keyword arguments: {}. Cannot trace the function when `input_spec` is specified.".format(
self._dygraph_function.__name__, kwargs
)
)
Expand Down Expand Up @@ -420,7 +420,7 @@ def check_type_and_len(input, spec, check_length=False):
for rest_input in inputs[len(input_spec) :]:
if isinstance(rest_input, (core.eager.Tensor, np.ndarray)):
logging_utils.warn(
"The inputs constain `{}` without specifying InputSpec, its shape and dtype will be treated immutable. "
"The inputs contain `{}` without specifying InputSpec, its shape and dtype will be treated immutable. "
"Please specific InputSpec information in `@to_static` if you expect them as mutable inputs.".format(
type_name(rest_input)
)
Expand Down Expand Up @@ -452,7 +452,7 @@ def check_type_and_len(input, spec, check_length=False):
real_spec.shape = input_spec.shape
else:
logging_utils.warn(
f"input spec is not compatitable with real inputs. input_spec: {input_spec} , real_spec: {real_spec} "
f"input spec is not compatible with real inputs. input_spec: {input_spec} , real_spec: {real_spec} "
)
return real_spec
else:
Expand Down Expand Up @@ -520,7 +520,7 @@ def _replace_spec_name(name, input_spec):

def _hash_spec_names(args_specs, kwargs_specs):
"""
Generater hash spec with args/kwargs InputSpec names.
Generator hash spec with args/kwargs InputSpec names.
Consider the following InputSpecs with same shape/dtype except for name:
1. [InputSpec([3,3], 'float32', 'x'), InputSpec([3,3], 'float32', 'x')]
2. [InputSpec([3,3], 'float32', 'x'), InputSpec([3,3], 'float32', 'y')]
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/jit/dy2static/logging_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def wrapper(*args, **kwargs):

class TranslatorLogger:
"""
class for Logging and debugging during the tranformation from dygraph to static graph.
class for Logging and debugging during the transformation from dygraph to static graph.
The object of this class is a singleton.
"""

Expand Down Expand Up @@ -199,7 +199,7 @@ def set_verbosity(level=0, also_to_stdout=False):
`set_verbosity` has a higher priority than the environment variable.

Args:
level(int): The verbosity level. The larger value idicates more verbosity.
level(int): The verbosity level. The larger value indicates more verbosity.
The default value is 0, which means no logging.
also_to_stdout(bool): Whether to also output log messages to `sys.stdout`.

Expand Down
10 changes: 5 additions & 5 deletions python/paddle/jit/dy2static/partial_program.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def __get__(self, instance, cls):

class ProgramInfo:
"""
A helper class to recoder Program information
A helper class to record Program information
"""

def __init__(self):
Expand All @@ -126,7 +126,7 @@ def __init__(self):

def __call__(self, key, prog_creator):
"""
Recoder infer program and op size.
Record infer program and op size.
"""
assert key in ['fp32', 'amp', 'fp16']
if key not in self.programs:
Expand Down Expand Up @@ -564,7 +564,7 @@ def backward_program(self):
else:
"""
Can't just return paddle.static.Program(), because self.backward_program is a property,
whenever we call this method, a tmp Program() object is created and is gc immediatly
whenever we call this method, a tmp Program() object is created and is gc immediately
after executed the following line in PartialProgramLayer.__call__.

>>> self.backward_program.desc.block(0),
Expand Down Expand Up @@ -597,7 +597,7 @@ def forward(self, in):
return x, y

loss = forward(in)[0].sum()
loss.backward() # <----- x@grad will be overwrited by elementwise_add_grad Op
loss.backward() # <----- x@grad will be overwritten by elementwise_add_grad Op
"""

def _need_aggregation(var):
Expand Down Expand Up @@ -1116,7 +1116,7 @@ def _check_params_all_inited(self, main_program):

param_and_buffer_names_set = set()
for i, var in enumerate(self._params):
# self._params constains parameters and buffers with persistable=True.
# self._params contains parameters and buffers with persistable=True.
if not isinstance(var, core.eager.Tensor):
raise TypeError(
'Type of self._params[{}] in PartialProgramLayer should be Parameter or Variable, but received {}.'.format(
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/jit/dy2static/pir_partial_program.py
Original file line number Diff line number Diff line change
Expand Up @@ -665,7 +665,7 @@ def forward(self, in):
return x, y

loss = forward(in)[0].sum()
loss.backward() # <----- x@grad will be overwrited by elementwise_add_grad Op
loss.backward() # <----- x@grad will be overwritten by elementwise_add_grad Op
"""

def _need_aggregation(var):
Expand Down Expand Up @@ -1044,7 +1044,7 @@ def _check_params_all_inited(self, main_program):

param_and_buffer_names_set = set()
for i, var in enumerate(self._params):
# self._params constains parameters and buffers with persistable=True.
# self._params contains parameters and buffers with persistable=True.
if not isinstance(var, core.eager.Tensor):
raise TypeError(
'Type of self._params[{}] in PartialProgramLayer should be Parameter or Variable, but received {}.'.format(
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/jit/dy2static/transformers/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@

def apply_optimization(transformers):
"""
Judge wheter to apply optimized transformation, such as BreakTransformOptimizer.
Judge whether to apply optimized transformation, such as BreakTransformOptimizer.
And not all optimized transformations are applied by default. It's controlled by
'export FLAGS_optim_transformation=1'
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

# This file stores the customed function that will be called by the dispatch mechanism.
# This file stores the customized function that will be called by the dispatch mechanism.

from ...utils import BreakGraphError, FallbackError

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ def start_translate(frame: types.FrameType, **kwargs) -> GuardedFunction:
raise InnerError(
f"{simulator._code.co_name} should not fallback, but got '{e}'"
)
# if disable_eval_frame is True, it means we want fallback to speedup rather than error occured
# if disable_eval_frame is True, it means we want fallback to speedup rather than error occurred
if is_strict_mode() and e.disable_eval_frame is False:
raise
log(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -405,12 +405,12 @@ def start_compile(self, *ret_vars: VariableBase):
]

tensor_items = self._find_tensor_outputs(ret_items)
compiled_fn, statment_ir = self.sir_ctx.compile_fn(
compiled_fn, statement_ir = self.sir_ctx.compile_fn(
[Symbol(tensor_var.var_name) for tensor_var in tensor_items],
**self._kwargs,
)
input_names = statment_ir.inputs
compiled_fn_name = f"__compiled_fn_{statment_ir.name}"
input_names = statement_ir.inputs
compiled_fn_name = f"__compiled_fn_{statement_ir.name}"
# prepare function and inputs
self.pycode_gen.gen_load_object(compiled_fn, compiled_fn_name)
for name in input_names:
Expand Down Expand Up @@ -463,7 +463,7 @@ def call_paddle_api(
"""
assert is_paddle_api(func)
# not fallback api, start symbolic trace.
# TODO(xiokgun): may have python buildin object inside metas.
# TODO(xiokgun): may have python builtin object inside metas.
# TODO(xiokgun): 4 kinds of python arguments. support it !!
log(3, f"call paddle.api : {func.__name__}", "\n")

Expand Down Expand Up @@ -550,7 +550,7 @@ def compute_fn(static_function, inputs, outputs, stacks):
)

def message_handler(*args, **kwargs):
return "Call ast faild"
return "Call ast failed"

try:
return inner_error_default_handler(
Expand Down Expand Up @@ -633,7 +633,7 @@ def get_opcode_executor_stack():
from .opcode_executor import OpcodeExecutorBase

if len(OpcodeExecutorBase.call_stack) == 0:
# In test case, we can meet this senario.
# In test case, we can meet this scenario.
return []
current_executor = OpcodeExecutorBase.call_stack[-1]
current_line = current_executor._current_line
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ def FOR_ITER(self, instr: Instruction):

self._graph.add_global_guarded_variable(iterator)

# simplely get next
# simply get next
if isinstance(
iterator,
SequenceIterVariable,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def gen_new_opcode(
for key, val in code_options.items():
if isinstance(val, list):
code_options[key] = tuple(val)
# code_options is a dict, use keys to makesure the input order
# code_options is a dict, use keys to make sure the input order
return types.CodeType(*[code_options[k] for k in keys])


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def _find_var(key: str = "default") -> VariableBase | None:
class VariableBase:
"""
VariableBase is a basic concept and each symbols in VM stack is regarded as
an Variable Object in symblic tracing process.
an Variable Object in symbolic tracing process.

There are two key data structures during Python runtime:
PyFrameObject, which provides the instance for function logical lock usage,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ def _reconstruct(self, codegen: PyCodeGen):
self.graph.add_global_guarded_variable(var)
for var in self.kwargs.values():
self.graph.add_global_guarded_variable(var)
# currently dont' consider kwargs
# currently don't consider kwargs
codegen.gen_load_global("print", push_null=True)
for var in self.args:
var.reconstruct(codegen)
Expand All @@ -232,7 +232,7 @@ class DataVariable(VariableBase):
"""
A value only object.
If it's all magic method don't change the function_graph state, [tensor op, guard, side_effect]
we will call it a ValueObjectVariable, we directy call python operator on it.
we will call it a ValueObjectVariable, we directly call python operator on it.
"""

def __init__(
Expand Down Expand Up @@ -885,18 +885,18 @@ def keys(self):
def get(self, key):
if isinstance(key, VariableBase):
raise InnerError(
f"[{self.__class__.__name__}]: recieved {key} to get value."
f"[{self.__class__.__name__}]: received {key} to get value."
)
return self.proxy.get(key)

def set(self, key, value):
if isinstance(key, VariableBase):
raise InnerError(
f"[{self.__class__.__name__}]: recieved {key} as key."
f"[{self.__class__.__name__}]: received {key} as key."
)
if not isinstance(value, VariableBase):
raise InnerError(
f"[{self.__class__.__name__}]: recieved {value} to set value."
f"[{self.__class__.__name__}]: received {value} to set value."
)
self.proxy.set(key, value)
self.graph.side_effects.record_proxy_variable(self)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -533,7 +533,7 @@ def __init__(
def call_function(self, /, *args, **kwargs):
self.graph.add_global_guarded_variable(self)
# when layer is created in forward function, we use strong ref because it can't have
# weigths and buffers, see PaddleLayerClassVariable for details.
# weights and buffers, see PaddleLayerClassVariable for details.
weak_ref = not isinstance(self.tracker, CreateLayerTracker)
return self.graph.call_layer(self, weak_ref, *args, **kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -796,7 +796,7 @@ def _reconstruct(self, codegen: PyCodeGen):
for key in self.proxy.get_all().keys():
if not isinstance(key, ConstTypes):
raise InnerError(
f"[{self.__class__.__name__}]: recieved {key} as key."
f"[{self.__class__.__name__}]: received {key} as key."
)
key_var = ConstantVariable.wrap_literal(key, self.graph)
value_var = self[key]
Expand All @@ -809,7 +809,7 @@ def get_items(self):
for key in self.proxy.get_all().keys():
if not isinstance(key, ConstTypes):
raise InnerError(
f"[{self.__class__.__name__}]: recieved {key} as key."
f"[{self.__class__.__name__}]: received {key} as key."
)
key_var = VariableFactory.from_value(
key, self.graph, tracker=ConstTracker(key)
Expand All @@ -823,7 +823,7 @@ def get_wrapped_items(self):
for key in self.proxy.get_all().keys():
if not isinstance(key, ConstTypes):
raise InnerError(
f"[{self.__class__.__name__}]: recieved {key} as key."
f"[{self.__class__.__name__}]: received {key} as key."
)
items[key] = self[key]
return items
Expand All @@ -843,7 +843,7 @@ def __len__(self):
def get(self, key, default=None):
if isinstance(key, VariableBase):
raise InnerError(
f"[{self.__class__.__name__}]: recieved {key} to get value."
f"[{self.__class__.__name__}]: received {key} to get value."
)

if default is None:
Expand All @@ -863,12 +863,12 @@ def getitem(self, key):
def setitem(self, key, value):
if isinstance(key, VariableBase):
raise InnerError(
f"[{self.__class__.__name__}]: recieved {key} as key."
f"[{self.__class__.__name__}]: received {key} as key."
)

if not isinstance(value, VariableBase):
raise InnerError(
f"[{self.__class__.__name__}]: recieved {value} to set value."
f"[{self.__class__.__name__}]: received {value} to set value."
)

self.proxy.set(key, value)
Expand All @@ -889,7 +889,7 @@ def __delitem__(self, key):
def delitem(self, key):
if isinstance(key, VariableBase):
raise InnerError(
f"[{self.__class__.__name__}]: recieved {key} as key to delete."
f"[{self.__class__.__name__}]: received {key} as key to delete."
)
self.proxy.delete(key)
self.graph.side_effects.record_proxy_variable(self)
Expand Down