Skip to content

Commit 555d4b8

Browse files
committed
Fix words
1 parent 6cdfedd commit 555d4b8

10 files changed

+31
-31
lines changed

test/ir/inference/auto_scan_test.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -278,9 +278,9 @@ def run_test(self, quant=False, *args, **kwargs):
278278
model, params, prog_config, base_config, feed_data
279279
)
280280
)
281-
self.success_log(f"basline program_config: {prog_config}")
281+
self.success_log(f"baseline program_config: {prog_config}")
282282
self.success_log(
283-
f"basline predictor_config: {self.inference_config_str(base_config)}"
283+
f"baseline predictor_config: {self.inference_config_str(base_config)}"
284284
)
285285

286286
for pred_config, (atol, rtol) in self.sample_predictor_configs(
@@ -561,11 +561,11 @@ def inference_config_str(self, config) -> str:
561561
dic["passes"] = self.passes
562562

563563
enable_trt = config.tensorrt_engine_enabled()
564-
trt_precison = config.tensorrt_precision_mode()
564+
trt_precision = config.tensorrt_precision_mode()
565565
trt_dynamic_shape = config.tensorrt_dynamic_shape_enabled()
566566
if enable_trt:
567567
dic["use_trt"] = True
568-
dic["trt_precision"] = trt_precison
568+
dic["trt_precision"] = trt_precision
569569
dic["use_dynamic_shape"] = trt_dynamic_shape
570570
else:
571571
dic["use_trt"] = False
@@ -713,11 +713,11 @@ def assert_op_size(self, trt_engine_num, paddle_op_num):
713713
def inference_config_str(self, config: paddle_infer.Config) -> str:
714714
dic = {}
715715
enable_trt = config.tensorrt_engine_enabled()
716-
trt_precison = config.tensorrt_precision_mode()
716+
trt_precision = config.tensorrt_precision_mode()
717717
trt_dynamic_shape = config.tensorrt_dynamic_shape_enabled()
718718
if enable_trt:
719719
dic["use_trt"] = True
720-
dic["trt_precision"] = trt_precison
720+
dic["trt_precision"] = trt_precision
721721
dic["use_dynamic_shape"] = trt_dynamic_shape
722722
else:
723723
dic["use_trt"] = False
@@ -755,7 +755,7 @@ def random_to_skip():
755755
gpu_config,
756756
prog_config.get_feed_data(),
757757
)
758-
self.success_log(f"basline program_config: {prog_config}")
758+
self.success_log(f"baseline program_config: {prog_config}")
759759

760760
for (
761761
pred_config,

test/ir/inference/program_config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -260,7 +260,7 @@ def __init__(
260260
no_cast_list: Optional[List[str]] = None,
261261
):
262262
self.ops = ops
263-
# if no weight need to save, we create a place_holder to help seriazlie params.
263+
# if no weight need to save, we create a place_holder to help serialize params.
264264
if not weights:
265265

266266
def generate_weight():

test/ir/inference/test_conv_elementwise_add2_act_fuse_pass.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,7 +190,7 @@ def sample_program_config(self, draw):
190190
)
191191
)
192192

193-
# 9. Generate legal elemntwise_add: X of conv2d
193+
# 9. Generate legal elementwise_add: X of conv2d
194194
bias_2_dict = {}
195195
bias_2_dict[1] = [
196196
x_shape[0],

test/ir/inference/test_trt_convert_pad.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ def teller1(program_config, predictor_config):
137137
self.add_skip_case(
138138
teller1,
139139
SkipReasons.TRT_NOT_IMPLEMENTED,
140-
"NOT Implemented: we need to add support pad not only inplement on h or w, such as paddings = [0, 0, 1, 1, 1, 1, 1, 1]",
140+
"NOT Implemented: we need to add support pad not only implement on h or w, such as paddings = [0, 0, 1, 1, 1, 1, 1, 1]",
141141
)
142142

143143
def test(self):

test/ir/inference/test_trt_convert_reshape.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -88,12 +88,12 @@ def generate_shapeT2_data(attrs: List[Dict[str, Any]]):
8888
},
8989
]
9090
self.dims = dims
91-
dics_intput = [{"X": ["reshape_input"]}]
91+
dics_input = [{"X": ["reshape_input"]}]
9292

9393
ops_config = [
9494
{
9595
"op_type": "reshape",
96-
"op_inputs": dics_intput[0],
96+
"op_inputs": dics_input[0],
9797
"op_outputs": {"Out": ["reshape_out"]},
9898
"op_attrs": dics[0],
9999
}
@@ -228,7 +228,7 @@ def generate_input1(attrs: List[Dict[str, Any]]):
228228
{},
229229
]
230230
self.dims = dims
231-
dics_intput = [
231+
dics_input = [
232232
{
233233
"X": ["reshape_input"],
234234
"ShapeTensor": ["shapeT1_data", "shapeT2_data"],
@@ -257,7 +257,7 @@ def generate_input1(attrs: List[Dict[str, Any]]):
257257
},
258258
{
259259
"op_type": "reshape",
260-
"op_inputs": dics_intput[0],
260+
"op_inputs": dics_input[0],
261261
"op_outputs": {"Out": ["reshape_out"]},
262262
"op_attrs": dics[0],
263263
},
@@ -351,7 +351,7 @@ def generate_input1(attrs: List[Dict[str, Any]]):
351351
{},
352352
]
353353
self.dims = dims
354-
dics_intput = [
354+
dics_input = [
355355
{
356356
"X": ["reshape_input"],
357357
"shape_data": ["shape_data"],
@@ -370,7 +370,7 @@ def generate_input1(attrs: List[Dict[str, Any]]):
370370
},
371371
{
372372
"op_type": "reshape",
373-
"op_inputs": dics_intput[0],
373+
"op_inputs": dics_input[0],
374374
"op_outputs": {"Out": ["reshape_out"]},
375375
"op_attrs": dics[0],
376376
},
@@ -463,12 +463,12 @@ def generate_input1(attrs: List[Dict[str, Any]]):
463463
},
464464
]
465465
self.dims = dims
466-
dics_intput = [{"X": ["reshape_input"]}]
466+
dics_input = [{"X": ["reshape_input"]}]
467467

468468
ops_config = [
469469
{
470470
"op_type": "reshape",
471-
"op_inputs": dics_intput[0],
471+
"op_inputs": dics_input[0],
472472
"op_outputs": {"Out": ["reshape_out"]},
473473
"op_attrs": dics[0],
474474
}

test/ir/inference/test_trt_convert_rnn.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def sample_program_configs(self):
4646
"is_bidirec": is_bidirec,
4747
"is_test": True,
4848
"dropout_prob": 0.0,
49-
# for my convience
49+
# for my convenience
5050
"batch": batch,
5151
"seq_len": seq_len,
5252
}

test/ir/pass_test.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -187,16 +187,16 @@ def _check_fused_ops(self, program):
187187
if program is None or program == self.main_program:
188188
program = self._apply_ir_passes()
189189

190-
acctual_num_fused_ops = 0
191-
# Ir passes can only be applyed to block 0.
190+
actual_num_fused_ops = 0
191+
# Ir passes can only be applied to block 0.
192192
for op in program.block(0).ops:
193193
if op.type == self.fused_op_type:
194-
acctual_num_fused_ops += 1
194+
actual_num_fused_ops += 1
195195
self.assertTrue(
196-
self.num_fused_ops == acctual_num_fused_ops,
196+
self.num_fused_ops == actual_num_fused_ops,
197197
"Checking of the number of fused operator < {} > failed. "
198198
"Expected: {}, Received: {}".format(
199-
self.fused_op_type, self.num_fused_ops, acctual_num_fused_ops
199+
self.fused_op_type, self.num_fused_ops, actual_num_fused_ops
200200
),
201201
)
202202

test/ir/pir/test_if_api.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def test_if_with_multiple_output(self):
6868
self.assertEqual(last_op.name(), "pd_op.if")
6969
self.assertEqual(len(out), 2)
7070

71-
# check Operaion::as_if_op interface
71+
# check Operation::as_if_op interface
7272
if_op = last_op.as_if_op()
7373
true_block = if_op.true_block()
7474
self.assertEqual(len(true_block), 3)
@@ -77,7 +77,7 @@ def test_if_with_multiple_output(self):
7777
build_pipe_for_block(true_block)
7878
self.assertEqual(len(true_block), 4)
7979

80-
# check Operaion::blocks interface
80+
# check Operation::blocks interface
8181
block_list = []
8282
for block in out[0].get_defining_op().blocks():
8383
block_list.append(block)
@@ -94,7 +94,7 @@ def test_if_op_vjp_interface(self):
9494
out_grad = paddle.full(shape=[6, 1], dtype='float32', fill_value=3)
9595
# check vjp interface for if_op
9696
if_input = [[input] for input in get_used_external_value(if_op)]
97-
if_input_stop_graditents = [[True], [False], [False], [True]]
97+
if_input_stop_gradients = [[True], [False], [False], [True]]
9898
if_output = [if_op.results()]
9999
if_output_grad = [[out_grad]]
100100
self.assertEqual(has_vjp(if_op), True)
@@ -103,7 +103,7 @@ def test_if_op_vjp_interface(self):
103103
if_input,
104104
if_output,
105105
if_output_grad,
106-
if_input_stop_graditents,
106+
if_input_stop_gradients,
107107
)
108108

109109
self.assertEqual(grad_outs[0][0], None)

test/ir/pir/test_while_api.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ def test_while_op_vjp_interface(self):
104104
[input] for input in get_used_external_value(body_block)
105105
]
106106
self.assertEqual(len(while_input), 4)
107-
while_input_stop_graditents = [[True], [False], [True], [True]]
107+
while_input_stop_gradients = [[True], [False], [True], [True]]
108108
while_output = [[value] for value in while_op.results()]
109109
while_output_grad = [[out_grad], [out_grad], [out_grad]]
110110
self.assertEqual(has_vjp(while_op), True)
@@ -113,7 +113,7 @@ def test_while_op_vjp_interface(self):
113113
while_input,
114114
while_output,
115115
while_output_grad,
116-
while_input_stop_graditents,
116+
while_input_stop_gradients,
117117
)
118118

119119
self.assertEqual(grad_outs[0][0], None)

test/ir/test_fuse_resnet_unit.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333
"and device's compute capability is at least 7.0 and less than 9.0",
3434
)
3535
class TestFuseResNetUnit(unittest.TestCase):
36-
def test_fuse_resenet_unit(self):
36+
def test_fuse_resnet_unit(self):
3737
place = paddle.CUDAPlace(0)
3838
program = paddle.static.Program()
3939
startup_program = paddle.static.Program()

0 commit comments

Comments
 (0)