Skip to content

Commit 25adfc2

Browse files
gouzilLuckycheng222
authored andcommitted
[CodeStyle] black -> ruff format migration - part 35 (PaddlePaddle#74786)
1 parent 52c15cd commit 25adfc2

36 files changed

+250
-223
lines changed

.pre-commit-config.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ repos:
9797
9898
| python/_.+
9999
100-
# | test/a.+
100+
| test/a.+
101101
102102
# | test/[b-h].+
103103
@@ -153,7 +153,7 @@ repos:
153153
154154
# | python/_.+
155155
156-
| test/a.+
156+
# | test/a.+
157157
158158
| test/[b-h].+
159159

test/auto_parallel/PP_Schedules_demo.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -508,9 +508,7 @@ def test_FthenB_align_mode_of_GradientClipByGlobalNorm(self):
508508
parameters=self.model.parameters(),
509509
grad_clip=paddle.nn.ClipGradByGlobalNorm(1.0),
510510
)
511-
if (
512-
dist.in_auto_parallel_align_mode()
513-
): # When in auto parallel align mode, patching the optimizer step function
511+
if dist.in_auto_parallel_align_mode(): # When in auto parallel align mode, patching the optimizer step function
514512
orig_step = (
515513
opt.step.__func__ if hasattr(opt.step, "__func__") else opt.step
516514
)

test/auto_parallel/custom_op/semi_auto_parallel_for_custom_op.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,9 @@ def __init__(self):
4040
self._seed = eval(os.getenv("seed"))
4141

4242
def check_placements(self, output, expected_placements):
43-
assert (
44-
output.placements == expected_placements
45-
), f"{output.placements} vs {expected_placements}"
43+
assert output.placements == expected_placements, (
44+
f"{output.placements} vs {expected_placements}"
45+
)
4646

4747
def test_custom_relu(self):
4848
shapes = [16, 4, 4]

test/auto_parallel/dtensor_from_local_api.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -63,12 +63,12 @@ def _check_mesh(grad):
6363
if mesh is None and placements is None:
6464
assert not grad.is_dist(), "grad.is_dist() is not False"
6565
else:
66-
assert (
67-
grad.process_mesh == mesh
68-
), "grad.process_mesh is not equal to mesh"
69-
assert (
70-
grad.placements == placements
71-
), "grad.placements is not equal to placements"
66+
assert grad.process_mesh == mesh, (
67+
"grad.process_mesh is not equal to mesh"
68+
)
69+
assert grad.placements == placements, (
70+
"grad.placements is not equal to placements"
71+
)
7272

7373
return _check_mesh
7474

test/auto_parallel/hybrid_strategy/parallel_api.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,9 @@ def __init__(self):
178178
) or (
179179
self.config.context_parallel is False
180180
and self.config.sep_parallel is True
181-
), "when sep > 1, either context_parallel or sep_parallel should be true"
181+
), (
182+
"when sep > 1, either context_parallel or sep_parallel should be true"
183+
)
182184
num_hidden_layers = os.getenv("num_hidden_layers")
183185
if num_hidden_layers:
184186
self.config.num_hidden_layers = int(num_hidden_layers)
@@ -299,9 +301,9 @@ def check_lora(self, layer):
299301
) and not self.share_embedding:
300302
assert sub_layer.weight.stop_gradient
301303
if 'o_proj' in name:
302-
assert (
303-
sub_layer.weight.stop_gradient
304-
), f'{name} , {sub_layer.weight.name} , {sub_layer.weight}'
304+
assert sub_layer.weight.stop_gradient, (
305+
f'{name} , {sub_layer.weight.name} , {sub_layer.weight}'
306+
)
305307
assert not sub_layer.lora_A.stop_gradient
306308
assert not sub_layer.lora_B.stop_gradient
307309
# assert sub_layer.bias.stop_gradient is None

test/auto_parallel/hybrid_strategy/semi_auto_llama.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,9 @@ def __init__(self):
137137
assert (
138138
self.config.sep_parallel_degree
139139
!= self.config.context_parallel_degree
140-
), f"only one of the context_parallel and sep_parallel can be True, but get context_parallel_degree = {self.config.context_parallel_degree} and sep_parallel_degree = {self.config.sep_parallel_degree}, please check your env"
140+
), (
141+
f"only one of the context_parallel and sep_parallel can be True, but get context_parallel_degree = {self.config.context_parallel_degree} and sep_parallel_degree = {self.config.sep_parallel_degree}, please check your env"
142+
)
141143

142144
self.init_dist_env()
143145

test/auto_parallel/hybrid_strategy/semi_auto_llama_acc_align.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,9 @@ def __init__(self):
159159
assert (
160160
self.config.sep_parallel_degree
161161
!= self.config.context_parallel_degree
162-
), f"only one of the context_parallel and sep_parallel can be True, but get context_parallel_degree = {self.config.context_parallel_degree} and sep_parallel_degree = {self.config.sep_parallel_degree}, please check your env"
162+
), (
163+
f"only one of the context_parallel and sep_parallel can be True, but get context_parallel_degree = {self.config.context_parallel_degree} and sep_parallel_degree = {self.config.sep_parallel_degree}, please check your env"
164+
)
163165

164166
self.run_step = 10
165167
self.run_step_dy2static = (

test/auto_parallel/hybrid_strategy/semi_auto_llama_dataloader.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,9 @@ def __init__(self):
152152
assert (
153153
self.config.sep_parallel_degree
154154
!= self.config.context_parallel_degree
155-
), f"only one of the context_parallel and sep_parallel can be True, but get context_parallel_degree = {self.config.context_parallel_degree} and sep_parallel_degree = {self.config.sep_parallel_degree}, please check your env"
155+
), (
156+
f"only one of the context_parallel and sep_parallel can be True, but get context_parallel_degree = {self.config.context_parallel_degree} and sep_parallel_degree = {self.config.sep_parallel_degree}, please check your env"
157+
)
156158

157159
self.init_dist_env()
158160

test/auto_parallel/hybrid_strategy/semi_auto_llama_pp_gradmerge.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,9 @@ def __init__(self):
133133
assert (
134134
self.config.sep_parallel_degree
135135
!= self.config.context_parallel_degree
136-
), f"only one of the context_parallel and sep_parallel can be True, but get context_parallel_degree = {self.config.context_parallel_degree} and sep_parallel_degree = {self.config.sep_parallel_degree}, please check your env"
136+
), (
137+
f"only one of the context_parallel and sep_parallel can be True, but get context_parallel_degree = {self.config.context_parallel_degree} and sep_parallel_degree = {self.config.sep_parallel_degree}, please check your env"
138+
)
137139

138140
self.init_dist_env()
139141

test/auto_parallel/hybrid_strategy/semi_auto_llama_save_load.py

Lines changed: 24 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,9 @@ def __init__(self):
111111
assert (
112112
self.config.sep_parallel_degree
113113
!= self.config.context_parallel_degree
114-
), f"only one of the context_parallel and sep_parallel can be True, but get context_parallel_degree = {self.config.context_parallel_degree} and sep_parallel_degree = {self.config.sep_parallel_degree}, please check your env"
114+
), (
115+
f"only one of the context_parallel and sep_parallel can be True, but get context_parallel_degree = {self.config.context_parallel_degree} and sep_parallel_degree = {self.config.sep_parallel_degree}, please check your env"
116+
)
115117

116118
self.init_dist_env()
117119

@@ -136,41 +138,43 @@ def init_dist_env(self):
136138
random.seed(1024)
137139

138140
def check_program_equal(self, program_a, program_b):
139-
assert (
140-
program_a.num_ops() == program_b.num_ops()
141-
), f'The number of ops between two programs is different: {program_a.num_ops()} vs {program_b.num_ops()}.'
141+
assert program_a.num_ops() == program_b.num_ops(), (
142+
f'The number of ops between two programs is different: {program_a.num_ops()} vs {program_b.num_ops()}.'
143+
)
142144
for i in range(program_a.num_ops()):
143145
a_op = program_a.global_block().ops[i]
144146
b_op = program_a.global_block().ops[i]
145147
# check op name
146-
assert (
147-
a_op.name() == b_op.name()
148-
), f'The name of {i} op in program is different: {a_op.name()} vs {b_op.name()}.'
148+
assert a_op.name() == b_op.name(), (
149+
f'The name of {i} op in program is different: {a_op.name()} vs {b_op.name()}.'
150+
)
149151
# check op inputs
150152
for index in range(a_op.num_operands()):
151153
assert (
152154
a_op.operand(index)
153155
.source()
154156
.is_same(b_op.operand(index).source())
155-
), f'The type of {index} operand is different: {a_op.operand(index).source()} vs {b_op.operand(index).source()}'
157+
), (
158+
f'The type of {index} operand is different: {a_op.operand(index).source()} vs {b_op.operand(index).source()}'
159+
)
156160
# check op outputs
157161
for index in range(a_op.num_results()):
158-
assert a_op.result(index).is_same(
159-
b_op.result(index)
160-
), f'The type of {index} result is different: {a_op.result(index)} vs {b_op.result(index)}'
162+
assert a_op.result(index).is_same(b_op.result(index)), (
163+
f'The type of {index} result is different: {a_op.result(index)} vs {b_op.result(index)}'
164+
)
161165
# check op attrs
162166
for k, v in a_op.attrs().items():
163-
assert (
164-
k in b_op.attrs()
165-
), f'Can not find key of {k} attribute in other program'
167+
assert k in b_op.attrs(), (
168+
f'Can not find key of {k} attribute in other program'
169+
)
166170
if k == 'place':
167-
assert type(v) == type(
168-
b_op.attrs()[k]
169-
), f'The attribute of {k} is different: {type(v)} vs {type(b_op.attrs()[k])}'
171+
assert type(v) == type(b_op.attrs()[k]), (
172+
f'The attribute of {k} is different: {type(v)} vs {type(b_op.attrs()[k])}'
173+
)
170174
else:
171-
assert (
172-
v == b_op.attrs()[k]
173-
), f'The attribute of {k} is different: {v} vs {b_op.attrs()[k]}'
175+
assert v == b_op.attrs()[k], (
176+
f'The attribute of {k} is different: {v} vs {b_op.attrs()[k]}'
177+
)
174178

175179
def run_dy2static(self, tmp_ckpt_path):
176180
model = LlamaForCausalLMAuto(self.config)

0 commit comments

Comments
 (0)