Skip to content

Commit d11f1bc

Browse files
committed
Remove deprecated code in pl.utilities.distributed
1 parent 051d316 commit d11f1bc

File tree

2 files changed

+1
-142
lines changed

2 files changed

+1
-142
lines changed

src/pytorch_lightning/utilities/distributed.py

Lines changed: 1 addition & 86 deletions
Original file line numberDiff line numberDiff line change
@@ -17,16 +17,8 @@
1717
import torch
1818
from torch.nn.parallel.distributed import DistributedDataParallel
1919

20-
from lightning_fabric.utilities.distributed import _all_gather_ddp_if_available as new_all_gather_ddp_if_available
2120
from lightning_fabric.utilities.distributed import _distributed_available as new_distributed_available
22-
from lightning_fabric.utilities.distributed import _gather_all_tensors as new_gather_all_tensors
23-
from lightning_fabric.utilities.distributed import (
24-
_get_default_process_group_backend_for_device as new_get_default_process_group_backend_for_device,
25-
)
26-
from lightning_fabric.utilities.distributed import _init_dist_connection as new_init_dist_connection
27-
from lightning_fabric.utilities.distributed import _sync_ddp as new_sync_ddp
28-
from lightning_fabric.utilities.distributed import _sync_ddp_if_available as new_sync_ddp_if_available
29-
from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_deprecation, rank_zero_info
21+
from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_info
3022

3123

3224
def register_ddp_comm_hook(
@@ -150,80 +142,3 @@ def _collect_states_on_rank_zero(state: Dict[str, Any]) -> Dict[int, Any]:
150142
if not new_distributed_available():
151143
return {0: state}
152144
return {rank: _broadcast_object_list(state, rank) for rank in range(torch.distributed.get_world_size())}
153-
154-
155-
def all_gather_ddp_if_available(*args: Any, **kwargs: Any) -> Any:
156-
rank_zero_deprecation(
157-
"`pytorch_lightning.utilities.distributed.all_gather_ddp_if_available` has been deprecated in v1.8.0 and will"
158-
" be removed in v2.0.0. This function is internal but you can copy over its implementation."
159-
)
160-
return new_all_gather_ddp_if_available(*args, **kwargs)
161-
162-
163-
def distributed_available() -> Any:
164-
rank_zero_deprecation(
165-
"`pytorch_lightning.utilities.distributed.distributed_available` has been deprecated in v1.8.0 and will"
166-
" be removed in v2.0.0. This function is internal but you can copy over its implementation."
167-
)
168-
return new_distributed_available()
169-
170-
171-
def gather_all_tensors(*args: Any, **kwargs: Any) -> Any:
172-
rank_zero_deprecation(
173-
"`pytorch_lightning.utilities.distributed.gather_all_tensors` has been deprecated in v1.8.0 and will"
174-
" be removed in v2.0.0. This function is internal but you can copy over its implementation."
175-
)
176-
return new_gather_all_tensors(*args, **kwargs)
177-
178-
179-
def get_default_process_group_backend_for_device(*args: Any, **kwargs: Any) -> Any:
180-
rank_zero_deprecation(
181-
"`pytorch_lightning.utilities.distributed.get_default_process_group_backend_for_device` has been deprecated"
182-
" in v1.8.0 and will be removed in v2.0.0. This function is internal but you can copy over its implementation."
183-
" `lightning_fabric.utilities.distributed.get_default_process_group_backend_for_device` instead."
184-
)
185-
return new_get_default_process_group_backend_for_device(*args, **kwargs)
186-
187-
188-
def init_dist_connection(*args: Any, **kwargs: Any) -> Any:
189-
rank_zero_deprecation(
190-
"`pytorch_lightning.utilities.distributed.init_dist_connection` has been deprecated in v1.8.0 and will"
191-
" be removed in v2.0.0. This function is internal but you can copy over its implementation."
192-
)
193-
return new_init_dist_connection(*args, **kwargs)
194-
195-
196-
def sync_ddp(*args: Any, **kwargs: Any) -> Any:
197-
rank_zero_deprecation(
198-
"`pytorch_lightning.utilities.distributed.sync_ddp` has been deprecated in v1.8.0 and will"
199-
" be removed in v2.0.0. This function is internal but you can copy over its implementation."
200-
)
201-
return new_sync_ddp(*args, **kwargs)
202-
203-
204-
def sync_ddp_if_available(*args: Any, **kwargs: Any) -> Any:
205-
rank_zero_deprecation(
206-
"`pytorch_lightning.utilities.distributed.sync_ddp_if_available` has been deprecated in v1.8.0 and will"
207-
" be removed in v2.0.0. This function is internal but you can copy over its implementation."
208-
)
209-
return new_sync_ddp_if_available(*args, **kwargs)
210-
211-
212-
def tpu_distributed() -> bool:
213-
rank_zero_deprecation(
214-
"`pytorch_lightning.utilities.distributed.tpu_distributed` has been deprecated in v1.8.0 and will"
215-
" be removed in v2.0.0. This function is internal but you can copy over its implementation."
216-
)
217-
from lightning_fabric.accelerators.tpu import _tpu_distributed
218-
219-
return _tpu_distributed()
220-
221-
222-
def rank_zero_only(*args: Any, **kwargs: Any) -> Any:
223-
rank_zero_deprecation(
224-
"`pytorch_lightning.utilities.distributed.rank_zero_only` has been deprecated in v1.8.1 and will"
225-
" be removed in v2.0.0. You can import it from `pytorch_lightning.utilities` instead."
226-
)
227-
from pytorch_lightning.utilities.rank_zero import rank_zero_only as new_rank_zero_only
228-
229-
return new_rank_zero_only(*args, **kwargs)

tests/tests_pytorch/deprecated_api/test_remove_2-0.py

Lines changed: 0 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
from pytorch_lightning.overrides import LightningDistributedModule, LightningParallelModule
2727
from pytorch_lightning.overrides.base import unwrap_lightning_module
2828
from pytorch_lightning.overrides.fairscale import LightningShardedDataParallel, unwrap_lightning_module_sharded
29-
from pytorch_lightning.plugins.environments import LightningEnvironment
3029
from pytorch_lightning.strategies.bagua import LightningBaguaModule
3130
from pytorch_lightning.strategies.utils import on_colab_kaggle
3231
from pytorch_lightning.utilities.apply_func import (
@@ -49,16 +48,6 @@
4948
parse_hpus,
5049
parse_tpu_cores,
5150
)
52-
from pytorch_lightning.utilities.distributed import (
53-
all_gather_ddp_if_available,
54-
distributed_available,
55-
gather_all_tensors,
56-
get_default_process_group_backend_for_device,
57-
init_dist_connection,
58-
sync_ddp,
59-
sync_ddp_if_available,
60-
tpu_distributed,
61-
)
6251
from pytorch_lightning.utilities.optimizer import optimizer_to_device, optimizers_to_device
6352
from pytorch_lightning.utilities.seed import pl_worker_init_function, reset_seed, seed_everything
6453
from pytorch_lightning.utilities.xla_device import inner_f, pl_multi_process, XLADeviceUtils
@@ -126,11 +115,6 @@ def test_v1_10_deprecated_xla_device_utilities():
126115
with pytest.deprecated_call(match="xla_device.XLADeviceUtils.tpu_device_exists` has been deprecated in v1.8.0"):
127116
XLADeviceUtils.tpu_device_exists()
128117

129-
from pytorch_lightning.utilities.distributed import tpu_distributed
130-
131-
with pytest.deprecated_call(match="tpu_distributed` has been deprecated in v1.8.0"):
132-
tpu_distributed()
133-
134118

135119
def test_v1_10_deprecated_apply_func_utilities():
136120
with pytest.deprecated_call(match="apply_func.apply_to_collection` has been deprecated in v1.8.0"):
@@ -200,39 +184,6 @@ def test_v1_10_deprecated_device_parser_utilities():
200184
parse_tpu_cores(None)
201185

202186

203-
def test_v1_10_deprecated_distributed_utilities():
204-
with pytest.deprecated_call(match="distributed.all_gather_ddp_if_available` has been deprecated in v1.8.0"):
205-
all_gather_ddp_if_available(torch.tensor(1))
206-
207-
with pytest.deprecated_call(match="distributed.distributed_available` has been deprecated in v1.8.0"):
208-
distributed_available()
209-
210-
with mock.patch("torch.distributed.get_world_size", return_value=2), mock.patch(
211-
"torch.distributed.barrier"
212-
), mock.patch("torch.distributed.all_gather"):
213-
with pytest.deprecated_call(match="distributed.gather_all_tensors` has been deprecated in v1.8.0"):
214-
gather_all_tensors(torch.tensor(1))
215-
216-
with pytest.deprecated_call(
217-
match="distributed.get_default_process_group_backend_for_device` has been deprecated in v1.8.0"
218-
):
219-
get_default_process_group_backend_for_device(torch.device("cpu"))
220-
221-
with mock.patch("torch.distributed.is_initialized", return_value=True):
222-
with pytest.deprecated_call(match="distributed.init_dist_connection` has been deprecated in v1.8.0"):
223-
init_dist_connection(LightningEnvironment(), "gloo")
224-
225-
with pytest.deprecated_call(match="distributed.sync_ddp_if_available` has been deprecated in v1.8.0"):
226-
sync_ddp_if_available(torch.tensor(1))
227-
228-
with mock.patch("torch.distributed.barrier"), mock.patch("torch.distributed.all_reduce"):
229-
with pytest.deprecated_call(match="distributed.sync_ddp` has been deprecated in v1.8.0"):
230-
sync_ddp(torch.tensor(1))
231-
232-
with pytest.deprecated_call(match="distributed.tpu_distributed` has been deprecated in v1.8.0"):
233-
tpu_distributed()
234-
235-
236187
def test_v1_10_deprecated_optimizer_utilities():
237188
with pytest.deprecated_call(match="optimizer.optimizers_to_device` has been deprecated in v1.8.0"):
238189
optimizers_to_device([torch.optim.Adam(torch.nn.Linear(1, 1).parameters())], "cpu")
@@ -255,10 +206,3 @@ def test_v1_10_deprecated_seed_utilities():
255206
def test_v1_10_deprecated_accelerator_setup_environment_method():
256207
with pytest.deprecated_call(match="`Accelerator.setup_environment` has been deprecated in deprecated in v1.8.0"):
257208
CPUAccelerator().setup_environment(torch.device("cpu"))
258-
259-
260-
def test_v1_8_1_deprecated_rank_zero_only():
261-
from pytorch_lightning.utilities.distributed import rank_zero_only
262-
263-
with pytest.deprecated_call(match="rank_zero_only` has been deprecated in v1.8.1"):
264-
rank_zero_only(lambda: None)

0 commit comments

Comments
 (0)