@@ -368,13 +368,13 @@ def mode(self, value):
368368 self .model .mode = value
369369
370370 def train_batch (self , inputs , labels = None , update = True ):
371- assert (
372- self . model . _optimizer
373- ), "model not ready, please call `model.prepare()` first"
371+ assert self . model . _optimizer , (
372+ " model not ready, please call `model.prepare()` first"
373+ )
374374 self .mode = 'train'
375- assert (
376- update is True
377- ), "Does not support `update == False` in static graph mode by now."
375+ assert update is True , (
376+ "Does not support ` update == False` in static graph mode by now."
377+ )
378378 return self ._run (inputs , labels )
379379
380380 def eval_batch (self , inputs , labels = None ):
@@ -500,16 +500,16 @@ def _load_optimizer(self, state, executor):
500500 # However, dygraph wouldn't save it.
501501 if var .name not in state :
502502 continue
503- assert (
504- var .name in converted_state
505- ), f"variable [ { var . name } ] is not in optimizer state file"
503+ assert var . name in converted_state , (
504+ f"variable [ { var .name } ] is not in optimizer state file"
505+ )
506506 self ._set_var (var .name , converted_state [var .name ])
507507
508508 def _run (self , inputs , labels = None ):
509509 compiled_prog = self ._compiled_progs .get (self .mode , None )
510- assert (
511- compiled_prog
512- ), "Model is not ready, please call `model.prepare()` first"
510+ assert compiled_prog , (
511+ "Model is not ready, please call `model.prepare()` first"
512+ )
513513
514514 inputs = to_list (inputs )
515515 if labels is not None :
@@ -689,9 +689,9 @@ def _make_program(self, mode):
689689 }
690690
691691 def _initialize (self , prog , mode ):
692- assert (
693- self . model . _place is not None
694- ), "device is not set, please call `model.prepare()` first"
692+ assert self . model . _place is not None , (
693+ "device is not set, please call `model.prepare()` first"
694+ )
695695
696696 place = self .model ._place
697697
@@ -756,13 +756,13 @@ def mode(self, value):
756756 self .model .mode = value
757757
758758 def train_batch (self , inputs , labels = None , update = True ):
759- assert (
760- self . model . _optimizer
761- ), "model not ready, please call `model.prepare()` first"
759+ assert self . model . _optimizer , (
760+ " model not ready, please call `model.prepare()` first"
761+ )
762762 self .mode = 'train'
763- assert (
764- update is True
765- ), "Does not support `update == False` in static graph mode by now."
763+ assert update is True , (
764+ "Does not support ` update == False` in static graph mode by now."
765+ )
766766 return self ._run (inputs , labels )
767767
768768 def eval_batch (self , inputs , labels = None ):
@@ -919,9 +919,9 @@ def _load_optimizer(self, state, executor):
919919 converted_state .pop (dy_state_name )
920920 )
921921
922- assert (
923- var .name in converted_state
924- ), f"variable [ { var . name } ] is not in optimizer state file"
922+ assert var . name in converted_state , (
923+ f"variable [ { var .name } ] is not in optimizer state file"
924+ )
925925 self ._set_var (var , converted_state [var .name ])
926926
927927 def _set_var (self , var , ndarray ):
@@ -940,9 +940,9 @@ def _set_var(self, var, ndarray):
940940
941941 def _run (self , inputs , labels = None ):
942942 compiled_prog = self ._compiled_progs .get (self .mode , None )
943- assert (
944- compiled_prog
945- ), "Model is not ready, please call `model.prepare()` first"
943+ assert compiled_prog , (
944+ "Model is not ready, please call `model.prepare()` first"
945+ )
946946
947947 inputs = to_list (inputs )
948948 if labels is not None :
@@ -1141,9 +1141,9 @@ def _compile_and_initialize(self, prog, mode):
11411141 if compiled_prog is not None :
11421142 return compiled_prog
11431143
1144- assert (
1145- self . model . _place is not None
1146- ), "device is not set, please call `model.prepare()` first"
1144+ assert self . model . _place is not None , (
1145+ "device is not set, please call `model.prepare()` first"
1146+ )
11471147
11481148 place = self .model ._place
11491149
@@ -1234,9 +1234,9 @@ def mode(self, value):
12341234
12351235 # TODO multi device in dygraph mode not implemented at present time
12361236 def train_batch (self , inputs , labels = None , update = True ):
1237- assert (
1238- self . model . _optimizer
1239- ), "model not ready, please call `model.prepare()` first"
1237+ assert self . model . _optimizer , (
1238+ " model not ready, please call `model.prepare()` first"
1239+ )
12401240 self .model .network .train ()
12411241 self .mode = 'train'
12421242 inputs = to_list (inputs )
@@ -2031,7 +2031,9 @@ def _check_pure_fp16_configs():
20312031 assert isinstance (
20322032 self ._optimizer ._grad_clip ,
20332033 (paddle .nn .ClipGradByGlobalNorm , paddle .nn .ClipGradByNorm ),
2034- ), "Only ClipGradByNorm and ClipGradByGlobalNorm are supported in amp training with level=O2 currently."
2034+ ), (
2035+ "Only ClipGradByNorm and ClipGradByGlobalNorm are supported in amp training with level=O2 currently."
2036+ )
20352037
20362038 self ._adapter ._amp_custom_lists = {}
20372039 self ._adapter ._amp_configs = {}
@@ -2188,9 +2190,9 @@ def prepare(
21882190
21892191 metrics = metrics or []
21902192 for metric in to_list (metrics ):
2191- assert isinstance (
2192- metric , Metric
2193- ), f" { metric . __class__ . __name__ } is not sub class of Metric"
2193+ assert isinstance (metric , Metric ), (
2194+ f" { metric . __class__ . __name__ } is not sub class of Metric"
2195+ )
21942196 self ._metrics = to_list (metrics )
21952197 self ._prepare_amp (amp_configs )
21962198
@@ -2353,9 +2355,9 @@ def fit(
23532355 if isinstance (batch_size , (tuple , list )) and all (
23542356 isinstance (x , int ) for x in batch_size
23552357 ):
2356- assert (
2357- len ( batch_size ) == 2
2358- ), "batch_size length error, expected train_batch_size and eval_batch_size."
2358+ assert len ( batch_size ) == 2 , (
2359+ " batch_size length error, expected train_batch_size and eval_batch_size."
2360+ )
23592361 train_batch_size , eval_batch_size = batch_size
23602362 elif isinstance (batch_size , int ):
23612363 train_batch_size , eval_batch_size = batch_size , batch_size
@@ -2748,9 +2750,9 @@ def _save_inference_model(self, path: str) -> None:
27482750 params_filename = file_prefix + INFER_PARAMS_SUFFIX
27492751
27502752 prog = self ._adapter ._progs .get ('test' , None )
2751- assert (
2752- prog
2753- ), "Model is not ready, please call `model.prepare()` first"
2753+ assert prog , (
2754+ "Model is not ready, please call `model.prepare()` first"
2755+ )
27542756
27552757 if in_pir_mode ():
27562758 infer_prog = prog
@@ -2914,9 +2916,9 @@ def summary(
29142916 {'total_params': 61610, 'trainable_params': 61610}
29152917
29162918 """
2917- assert (
2918- input_size is not None or self ._inputs is not None
2919- ), "'input_size' or 'self._input' must be set"
2919+ assert input_size is not None or self . _inputs is not None , (
2920+ "' input_size' or ' self._input' must be set"
2921+ )
29202922 if input_size is not None :
29212923 _input_size = input_size
29222924 else :
0 commit comments