Skip to content

Commit a35a4a5

Browse files
authored
[CodeStyle][E711] use is/is not for comparison with None (#47452)
* [CodeStyle][E711] use `is`/`is not` for comparison with `None` * `self.assertTrue($A is None)` -> `self.assertIsNone($A)` * `self.assertTrue($A is not None)` -> `self.assertIsNotNone($A)` * `self.assertFalse($A is None)` -> `self.assertIsNotNone($A)` * `self.assertEqual($A, None)` -> `self.assertIsNone($A)` * `self.assertNotEqual($A, None)` -> `self.assertIsNotNone($A)`
1 parent 9d80185 commit a35a4a5

File tree

137 files changed

+357
-352
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

137 files changed

+357
-352
lines changed

paddle/scripts/conda_build.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,7 @@ def meta_build_mac(var, python_str, paddle_version, build_var, build_name_str):
171171
def meta_build_linux(
172172
var, python_str, paddle_version, build_var, build_name_str, cuda_str=None
173173
):
174-
if cuda_str == None:
174+
if cuda_str is None:
175175
package_str = (
176176
"""
177177
package:
@@ -192,7 +192,7 @@ def meta_build_linux(
192192
)
193193
meta_build = var.build + build_name_str
194194
meta_str = package_str + meta_build + requirement
195-
if not (cuda_str == None):
195+
if not (cuda_str is None):
196196
meta_str = meta_str + cuda_str
197197
meta_str = meta_str + var.test + var.about
198198

@@ -209,7 +209,7 @@ def meta_build_linux(
209209
def meta_build_windows(
210210
var, python_str, paddle_version, blt_var, build_name_str, cuda_str=None
211211
):
212-
if cuda_str == None:
212+
if cuda_str is None:
213213
package_str = (
214214
"""
215215
package:
@@ -235,7 +235,7 @@ def meta_build_windows(
235235
meta_build = var.build + build_name_str
236236
meta_str = package_str + meta_build + requirement
237237

238-
if not (cuda_str == None):
238+
if not (cuda_str is None):
239239
meta_str = meta_str + cuda_str
240240

241241
blt_str = var.blt_const + blt_var

python/paddle/cost_model/cost_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ def static_cost_data(self):
7474

7575
def get_static_op_time(self, op_name, forward=True, dtype="float32"):
7676
# if forward is True, return op forward time, otherwise return op backward time.
77-
if op_name == None:
77+
if op_name is None:
7878
raise ValueError(
7979
'op_name should not be empty when you want to get static op time'
8080
)

python/paddle/dataset/imdb.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def tokenize(pattern):
4545
# tarfile.extractfile, which does random access and might
4646
# destroy hard disks.
4747
tf = tarf.next()
48-
while tf != None:
48+
while tf is not None:
4949
if bool(pattern.match(tf.name)):
5050
# newline and punctuations removal and ad-hoc tokenization.
5151
yield tarf.extractfile(tf).read().rstrip(b'\n\r').translate(

python/paddle/dataset/tests/imdb_test.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,13 +31,13 @@ class TestIMDB(unittest.TestCase):
3131
word_idx = None
3232

3333
def test_build_dict(self):
34-
if self.word_idx == None:
34+
if self.word_idx is None:
3535
self.word_idx = paddle.dataset.imdb.build_dict(TRAIN_PATTERN, 150)
3636

3737
self.assertEqual(len(self.word_idx), 7036)
3838

3939
def check_dataset(self, dataset, expected_size):
40-
if self.word_idx == None:
40+
if self.word_idx is None:
4141
self.word_idx = paddle.dataset.imdb.build_dict(TRAIN_PATTERN, 150)
4242

4343
sum = 0

python/paddle/distributed/auto_parallel/cost/base_cost.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -587,7 +587,7 @@ def get_max_beta(self, ranks):
587587
if forward_order_beta > backward_order_beta
588588
else backward_order_beta
589589
)
590-
if max_beta == None:
590+
if max_beta is None:
591591
max_beta = beta
592592
else:
593593
if beta > max_beta:

python/paddle/distributed/auto_parallel/partitioner.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ def partition(
8484
dist_op_context.rank_id = self._rank_id
8585

8686
# partition startup program
87-
if serial_startup_program == None:
87+
if serial_startup_program is None:
8888
partitioned_startup_prog = None
8989
else:
9090
partitioned_startup_prog = self.partition_startup_program(

python/paddle/distributed/auto_parallel/process_group.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def new_process_group(ranks, group_id=None):
6161
num_groups = len(_g_process_group_map)
6262
# Note: our process group may interfere with the original implementation
6363
# so the created group id should start from the original _new_ring_id()
64-
if group_id == None:
64+
if group_id is None:
6565
group_id = _new_ring_id() + num_groups + 1
6666

6767
new_pg = ProcessGroup(group_id, ranks)

python/paddle/distributed/auto_parallel/tuner/optimization_tuner.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -530,7 +530,7 @@ def _update(self, i, trial, results):
530530
self._finished_trials.append(trial)
531531

532532
cur_mertic = get_metric(results)
533-
if self._best_metric == None or cur_mertic > self._best_metric:
533+
if self._best_metric is None or cur_mertic > self._best_metric:
534534
self._best_metric = cur_mertic
535535
self._best_iter = i
536536

python/paddle/distributed/elastic.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ def set_np(self, np):
3131
self.etcd.put(self.np_path, '{}'.format(np).encode('latin-1'))
3232

3333
def scale_np(self, np):
34-
if self.etcd.get(self.np_path)[0] != None:
34+
if self.etcd.get(self.np_path)[0] is not None:
3535
self.set_np(np)
3636
return True
3737
return False

python/paddle/distributed/fleet/base/role_maker.py

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -293,7 +293,7 @@ def __get_default_iface_from_gateway(self):
293293
if "Gateway" in item and "Iface" in item:
294294
gateway_idx = item.index("Gateway")
295295
iface_idx = item.index("Iface")
296-
elif gateway_idx != None and iface_idx != None:
296+
elif gateway_idx is not None and iface_idx is not None:
297297
gateway = None
298298
if len(item) > gateway_idx:
299299
gateway = item[gateway_idx]
@@ -845,7 +845,7 @@ def _ps_env(self): # each role will execute it
845845
self._server_endpoints = self._server_endpoints.split(",")
846846

847847
self._worker_endpoints = os.getenv("PADDLE_TRAINER_ENDPOINTS", None)
848-
if self._worker_endpoints != None:
848+
if self._worker_endpoints is not None:
849849
self._worker_endpoints = self._worker_endpoints.split(",")
850850
else:
851851
self._worker_endpoints = []
@@ -860,14 +860,14 @@ def _ps_env(self): # each role will execute it
860860
self._coordinator_endpoints = self._coordinator_endpoints.split(",")
861861

862862
trainers_num = os.getenv("PADDLE_TRAINERS_NUM", None)
863-
if trainers_num == None:
863+
if trainers_num is None:
864864
raise ValueError(
865865
"Can not find PADDLE_TRAINERS_NUM, please check your environment."
866866
)
867867
trainers_num = int(trainers_num)
868868

869869
training_role = os.getenv("TRAINING_ROLE", None)
870-
if training_role == None:
870+
if training_role is None:
871871
raise ValueError(
872872
"Can not find TRAINING_ROLE, please check your environment."
873873
)
@@ -937,39 +937,39 @@ def _ps_env(self): # each role will execute it
937937
if training_role == "TRAINER":
938938
role = Role.WORKER
939939
current_id = os.getenv("PADDLE_TRAINER_ID", None)
940-
if current_id == None:
940+
if current_id is None:
941941
raise ValueError(
942942
"Can not find PADDLE_TRAINER_ID, please check your environment."
943943
)
944944
current_id = int(current_id)
945945
if self._is_heter_parameter_server_mode:
946946
self._stage_id = os.getenv("STAGE_ID", None)
947-
if self._stage_id == None:
947+
if self._stage_id is None:
948948
raise ValueError(
949949
"Can not find STAGE_ID, please check your environment."
950950
)
951951
self._stage_id = int(self._stage_id)
952952
self._stage_num = os.getenv("STAGE_NUM", None)
953-
if self._stage_num == None:
953+
if self._stage_num is None:
954954
raise ValueError(
955955
"Can not find STAGE_NUM, please check your environment."
956956
)
957957
self._stage_num = int(self._stage_num)
958958
self._stage_trainers = os.getenv(
959959
"PADDLE_STAGE_TRAINERS_NUM", None
960960
)
961-
if self._stage_trainers == None:
961+
if self._stage_trainers is None:
962962
raise ValueError(
963963
"Can not find PADDLE_STAGE_TRAINERS_NUM, please check your environment."
964964
)
965965
self._stage_trainers = eval(self._stage_trainers)
966966
cur_port = os.getenv("PADDLE_PORT", None)
967-
if cur_port == None:
967+
if cur_port is None:
968968
raise ValueError(
969969
"Can not find PADDLE_PORT, please check your environment."
970970
)
971971
cur_ip = os.getenv("POD_IP", None)
972-
if cur_ip == None:
972+
if cur_ip is None:
973973
raise ValueError(
974974
"Can not find POD_IP, please check your environment."
975975
)
@@ -982,12 +982,12 @@ def _ps_env(self): # each role will execute it
982982
elif training_role == "PSERVER":
983983
role = Role.SERVER
984984
cur_port = os.getenv("PADDLE_PORT", None)
985-
if cur_port == None:
985+
if cur_port is None:
986986
raise ValueError(
987987
"Can not find PADDLE_PORT, please check your environment."
988988
)
989989
cur_ip = os.getenv("POD_IP", None)
990-
if cur_ip == None:
990+
if cur_ip is None:
991991
raise ValueError(
992992
"Can not find POD_IP, please check your environment."
993993
)
@@ -997,20 +997,20 @@ def _ps_env(self): # each role will execute it
997997
elif training_role == "HETER_TRAINER":
998998
role = Role.HETER_WORKER
999999
self._stage_id = os.getenv("STAGE_ID", None)
1000-
if self._stage_id == None:
1000+
if self._stage_id is None:
10011001
raise ValueError(
10021002
"Can not find STAGE_ID, please check your environment."
10031003
)
10041004
self._stage_id = int(self._stage_id)
10051005
self._stage_num = os.getenv("STAGE_NUM", None)
1006-
if self._stage_num == None:
1006+
if self._stage_num is None:
10071007
raise ValueError(
10081008
"Can not find STAGE_NUM, please check your environment."
10091009
)
10101010
self._stage_num = int(self._stage_num)
10111011

10121012
self._stage_trainers = os.getenv("PADDLE_STAGE_TRAINERS_NUM", None)
1013-
if self._stage_trainers == None:
1013+
if self._stage_trainers is None:
10141014
raise ValueError(
10151015
"Can not find PADDLE_STAGE_TRAINERS_NUM, please check your environment."
10161016
)
@@ -1019,7 +1019,7 @@ def _ps_env(self): # each role will execute it
10191019
self._heter_trainer_device_type = os.getenv(
10201020
"HETER_DEVICE_TYPE", None
10211021
)
1022-
if self._heter_trainer_device_type == None:
1022+
if self._heter_trainer_device_type is None:
10231023
raise ValueError(
10241024
"Can not find HETER_DEVICE_TYPE, please check your environment."
10251025
)
@@ -1040,12 +1040,12 @@ def _ps_env(self): # each role will execute it
10401040
)
10411041

10421042
cur_port = os.getenv("PADDLE_PORT", None)
1043-
if cur_port == None:
1043+
if cur_port is None:
10441044
raise ValueError(
10451045
"Can not find PADDLE_PORT, please check your environment."
10461046
)
10471047
cur_ip = os.getenv("POD_IP", None)
1048-
if cur_ip == None:
1048+
if cur_ip is None:
10491049
raise ValueError(
10501050
"Can not find POD_IP, please check your environment."
10511051
)

0 commit comments

Comments
 (0)