Skip to content

Commit bc6a09d

Browse files
echarlaixyoung-developer
authored andcommitted
Fix training args transformers compatibility (huggingface#1770)
* Fix training args compatibility with transformers v4.38.0 or lower * format
1 parent 88fbcff commit bc6a09d

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

optimum/onnxruntime/training_args.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -44,11 +44,13 @@
4444
)
4545
from transformers.utils.generic import strtobool
4646

47+
from ..utils.import_utils import check_if_transformers_greater
48+
4749

4850
if is_torch_available():
4951
import torch
5052

51-
if is_accelerate_available():
53+
if is_accelerate_available() and check_if_transformers_greater("4.38.0"):
5254
from transformers.trainer_pt_utils import AcceleratorConfig
5355

5456

@@ -449,7 +451,7 @@ def __post_init__(self):
449451
os.environ[f"{prefix}SYNC_MODULE_STATES"] = self.fsdp_config.get("sync_module_states", "true")
450452
os.environ[f"{prefix}USE_ORIG_PARAMS"] = self.fsdp_config.get("use_orig_params", "false")
451453

452-
if is_accelerate_available():
454+
if is_accelerate_available() and check_if_transformers_greater("4.38.0"):
453455
if not isinstance(self.accelerator_config, (AcceleratorConfig)):
454456
if self.accelerator_config is None:
455457
self.accelerator_config = AcceleratorConfig()

0 commit comments

Comments
 (0)