Skip to content

Commit e28d93a

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 60650eb commit e28d93a

File tree

5 files changed

+8
-6
lines changed

5 files changed

+8
-6
lines changed

src/lightning/fabric/cli.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,8 @@
2929
from lightning.fabric.utilities.consolidate_checkpoint import _process_cli_args
3030
from lightning.fabric.utilities.device_parser import _parse_gpu_ids
3131
from lightning.fabric.utilities.distributed import _suggested_max_num_threads
32-
from lightning.fabric.utilities.load import _load_distributed_checkpoint
3332
from lightning.fabric.utilities.imports import _lightning_xpu_available
33+
from lightning.fabric.utilities.load import _load_distributed_checkpoint
3434

3535
_log = logging.getLogger(__name__)
3636

src/lightning/fabric/utilities/imports.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,16 @@
2929
_TORCH_GREATER_EQUAL_2_1 = compare_version("torch", operator.ge, "2.1.0", use_base_version=True)
3030
_TORCH_GREATER_EQUAL_2_2 = compare_version("torch", operator.ge, "2.2.0", use_base_version=True)
3131
_TORCH_GREATER_EQUAL_2_3 = compare_version("torch", operator.ge, "2.3.0", use_base_version=True)
32-
_TORCH_EQUAL_2_0 = compare_version("torch", operator.ge, "2.0.0", use_base_version=True) and not _TORCH_GREATER_EQUAL_2_1
32+
_TORCH_EQUAL_2_0 = (
33+
compare_version("torch", operator.ge, "2.0.0", use_base_version=True) and not _TORCH_GREATER_EQUAL_2_1
34+
)
3335

3436
_PYTHON_GREATER_EQUAL_3_8_0 = (sys.version_info.major, sys.version_info.minor) >= (3, 8)
3537
_PYTHON_GREATER_EQUAL_3_10_0 = (sys.version_info.major, sys.version_info.minor) >= (3, 10)
3638

3739
_UTILITIES_GREATER_EQUAL_0_10 = compare_version("lightning_utilities", operator.ge, "0.10.0")
3840

41+
3942
@functools.lru_cache(maxsize=128)
4043
def _try_import_module(module_name: str) -> bool:
4144
try:

src/lightning/pytorch/trainer/connectors/accelerator_connector.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,8 +62,7 @@
6262
)
6363
from lightning.pytorch.strategies.ddp import _DDP_FORK_ALIASES
6464
from lightning.pytorch.utilities.exceptions import MisconfigurationException
65-
from lightning.pytorch.utilities.imports import _habana_available_and_importable
66-
from lightning.pytorch.utilities.imports import _lightning_xpu_available
65+
from lightning.pytorch.utilities.imports import _habana_available_and_importable, _lightning_xpu_available
6766
from lightning.pytorch.utilities.rank_zero import rank_zero_info, rank_zero_warn
6867

6968
log = logging.getLogger(__name__)

src/lightning/pytorch/trainer/setup.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,7 @@
2828
XLAProfiler,
2929
)
3030
from lightning.pytorch.utilities.exceptions import MisconfigurationException
31-
from lightning.pytorch.utilities.imports import _habana_available_and_importable
32-
from lightning.pytorch.utilities.imports import _lightning_xpu_available
31+
from lightning.pytorch.utilities.imports import _habana_available_and_importable, _lightning_xpu_available
3332
from lightning.pytorch.utilities.rank_zero import rank_zero_info, rank_zero_warn
3433

3534

src/lightning/pytorch/utilities/imports.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ def _habana_available_and_importable() -> bool:
4949
# also imports Lightning
5050
return bool(_LIGHTNING_HABANA_AVAILABLE) and _try_import_module("lightning_habana")
5151

52+
5253
_LIGHTNING_XPU_AVAILABLE = RequirementCache("lightning-xpu")
5354

5455

0 commit comments

Comments
 (0)