Skip to content

Commit

Permalink
Up pinned accelerate version (huggingface#24089)
Browse files Browse the repository at this point in the history
* Min accelerate

* Also min version

* Min accelerate

* Also min version

* To different minor version

* Empty
  • Loading branch information
muellerzr authored and novice03 committed Jun 23, 2023
1 parent ee88c6f commit fa08da9
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 7 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@
# 2. once modified, run: `make deps_table_update` to update src/transformers/dependency_versions_table.py
_deps = [
"Pillow",
"accelerate>=0.19.0",
"accelerate>=0.20.1",
"av==9.2.0", # Latest version of PyAV (10.0.0) has issues with audio stream.
"beautifulsoup4",
"black~=23.1",
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/dependency_versions_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# 2. run `make deps_table_update``
deps = {
"Pillow": "Pillow",
"accelerate": "accelerate>=0.19.0",
"accelerate": "accelerate>=0.20.1",
"av": "av==9.2.0",
"beautifulsoup4": "beautifulsoup4",
"black": "black~=23.1",
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/training_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -1668,9 +1668,9 @@ def _setup_devices(self) -> "torch.device":
requires_backends(self, ["torch"])
logger.info("PyTorch: setting up devices")
if not is_sagemaker_mp_enabled():
if not is_accelerate_available(check_partial_state=True):
if not is_accelerate_available(min_version="0.20.1"):
raise ImportError(
"Using the `Trainer` with `PyTorch` requires `accelerate>=0.19.0`: Please run `pip install transformers[torch]` or `pip install accelerate -U`"
"Using the `Trainer` with `PyTorch` requires `accelerate>=0.20.1`: Please run `pip install transformers[torch]` or `pip install accelerate -U`"
)
AcceleratorState._reset_state(reset_partial_state=True)
self.distributed_state = None
Expand Down
6 changes: 3 additions & 3 deletions src/transformers/utils/import_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,9 +508,9 @@ def is_protobuf_available():
return importlib.util.find_spec("google.protobuf") is not None


def is_accelerate_available(check_partial_state=False):
if check_partial_state:
return _accelerate_available and version.parse(_accelerate_version) >= version.parse("0.19.0")
def is_accelerate_available(min_version: str = None):
if min_version is not None:
return _accelerate_available and version.parse(_accelerate_version) >= version.parse(min_version)
return _accelerate_available


Expand Down

0 comments on commit fa08da9

Please sign in to comment.