diff --git a/gliner/training/trainer.py b/gliner/training/trainer.py index 1a5e1e2..8f5ffb9 100644 --- a/gliner/training/trainer.py +++ b/gliner/training/trainer.py @@ -8,8 +8,9 @@ from transformers.trainer import ( is_sagemaker_mp_enabled, get_parameter_names, - ALL_LAYERNORM_LAYERS, + ) +from transformers.pytorch_utils import ALL_LAYERNORM_LAYERS #updated to fix this bug , previously it was called from training_args from transformers.trainer_utils import seed_worker if transformers.utils.is_apex_available():