Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -472,27 +472,27 @@ def _set_devices_to_cpu_num_processes(self) -> None:
self._map_devices_to_accelerator(DeviceType.CPU)

def _map_devices_to_accelerator(self, accelerator: str) -> bool:
if self.devices is None:
return False
if accelerator == DeviceType.TPU and _TPU_AVAILABLE:
if self.devices == "auto":
self.devices = TPUAccelerator.auto_device_count()
self.devices = self.devices or 1
self.tpu_cores = device_parser.parse_tpu_cores(self.devices)
return True
if accelerator == DeviceType.IPU and _IPU_AVAILABLE:
if self.devices == "auto":
self.devices = IPUAccelerator.auto_device_count()
self.ipus = self.devices
self.ipus = self.devices = self.devices or 1
return True
if accelerator == DeviceType.GPU and torch.cuda.is_available():
if self.devices == "auto":
self.devices = GPUAccelerator.auto_device_count()
self.gpus = self.devices
self.gpus = self.devices = self.devices or 1
self.parallel_device_ids = device_parser.parse_gpu_ids(self.devices)
return True
if accelerator == DeviceType.CPU:
if self.devices == "auto":
self.devices = CPUAccelerator.auto_device_count()
self.devices = self.devices or 1
if not isinstance(self.devices, int):
raise MisconfigurationException(
"The flag `devices` must be an int with `accelerator='cpu'`,"
Expand Down
17 changes: 17 additions & 0 deletions tests/accelerators/test_accelerator_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -1026,6 +1026,23 @@ def test_unsupported_ipu_choice(monkeypatch):
Trainer(accelerator="ipu", precision=64)


@mock.patch("torch.cuda.is_available", return_value=False)
@mock.patch("pytorch_lightning.utilities.imports._TPU_AVAILABLE", return_value=False)
@mock.patch("pytorch_lightning.utilities.imports._IPU_AVAILABLE", return_value=False)
def test_accelerator_auto_choice_and_devices_cpu(is_ipu_available_mock, is_tpu_available_mock, is_gpu_available_mock):
trainer = Trainer(accelerator="auto")
assert trainer.devices == 1
assert trainer.num_processes == 1


@mock.patch("torch.cuda.is_available", return_value=True)
@mock.patch("torch.cuda.device_count", return_value=2)
def test_accelerator_auto_choice_and_devices_gpu(is_gpu_available_mock, device_count_mock):
trainer = Trainer(accelerator="auto")
assert trainer.devices == 1
assert trainer.gpus == 1


@mock.patch("torch.cuda.is_available", return_value=False)
@mock.patch("pytorch_lightning.utilities.imports._TPU_AVAILABLE", return_value=False)
@mock.patch("pytorch_lightning.utilities.imports._IPU_AVAILABLE", return_value=False)
Expand Down
7 changes: 7 additions & 0 deletions tests/accelerators/test_ipu.py
Original file line number Diff line number Diff line change
Expand Up @@ -604,3 +604,10 @@ def test_devices_auto_choice_ipu():
trainer = Trainer(accelerator="auto", devices="auto")
assert trainer.devices == 4
assert trainer.ipus == 4


@RunIf(ipu=True)
def test_accelerator_auto_choice_and_devices_ipu():
trainer = Trainer(accelerator="auto")
assert trainer.devices == 1
assert trainer.ipus == 1
7 changes: 7 additions & 0 deletions tests/accelerators/test_tpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,3 +317,10 @@ def test_devices_auto_choice_tpu():
trainer = Trainer(accelerator="auto", devices="auto")
assert trainer.devices == 8
assert trainer.tpu_cores == 8


@RunIf(tpu=True)
def test_accelerator_auto_choice_and_devices_tpu():
trainer = Trainer(accelerator="auto")
assert trainer.devices == 1
assert trainer.tpu_cores == 1