diff --git a/src/lightning_lite/lite.py b/src/lightning_lite/lite.py index 163791cb1959f..c1c60bec97ffb 100644 --- a/src/lightning_lite/lite.py +++ b/src/lightning_lite/lite.py @@ -22,11 +22,12 @@ import torch.nn as nn from lightning_utilities.core.apply_func import apply_to_collection from lightning_utilities.core.overrides import is_overridden -from lightning_utilities.core.rank_zero import rank_zero_warn from torch import Tensor from torch.optim import Optimizer from torch.utils.data import BatchSampler, DataLoader, DistributedSampler, RandomSampler, SequentialSampler +from lightning_lite.utilities.rank_zero import rank_zero_warn + from lightning_lite.plugins import Precision # avoid circular imports: # isort: split from lightning_lite.accelerators.accelerator import Accelerator from lightning_lite.connector import _Connector, _PLUGIN_INPUT, _PRECISION_INPUT diff --git a/src/lightning_lite/plugins/environments/slurm.py b/src/lightning_lite/plugins/environments/slurm.py index b7a0a1247bedb..0b72db21b8741 100644 --- a/src/lightning_lite/plugins/environments/slurm.py +++ b/src/lightning_lite/plugins/environments/slurm.py @@ -20,10 +20,9 @@ import sys from typing import Optional -from lightning_utilities.core.rank_zero import rank_zero_warn - from lightning_lite.plugins.environments.cluster_environment import ClusterEnvironment from lightning_lite.utilities.imports import _IS_WINDOWS +from lightning_lite.utilities.rank_zero import rank_zero_warn from lightning_lite.utilities.warnings import PossibleUserWarning log = logging.getLogger(__name__) diff --git a/src/lightning_lite/strategies/deepspeed.py b/src/lightning_lite/strategies/deepspeed.py index eb5967b230295..6e98b6e862666 100644 --- a/src/lightning_lite/strategies/deepspeed.py +++ b/src/lightning_lite/strategies/deepspeed.py @@ -22,7 +22,6 @@ import torch from lightning_utilities.core.imports import RequirementCache -from lightning_utilities.core.rank_zero import rank_zero_only from torch.nn import Module from torch.optim import Optimizer @@ -33,7 +32,7 @@ from lightning_lite.strategies.strategy import _Sharded from lightning_lite.utilities.distributed import log from lightning_lite.utilities.enums import PrecisionType -from lightning_lite.utilities.rank_zero import rank_zero_info +from lightning_lite.utilities.rank_zero import rank_zero_info, rank_zero_only from lightning_lite.utilities.seed import reset_seed from lightning_lite.utilities.types import _PATH diff --git a/src/lightning_lite/utilities/rank_zero.py b/src/lightning_lite/utilities/rank_zero.py index 6424b38854673..f1cbd91eb6846 100644 --- a/src/lightning_lite/utilities/rank_zero.py +++ b/src/lightning_lite/utilities/rank_zero.py @@ -21,11 +21,13 @@ # note: we want to keep these indirections so the `rank_zero_only.rank` is set on import from lightning_utilities.core.rank_zero import ( # noqa: F401 + rank_prefixed_message, rank_zero_debug, rank_zero_deprecation, rank_zero_info, rank_zero_only, rank_zero_warn, + WarningCache, ) import lightning_lite diff --git a/src/lightning_lite/utilities/seed.py b/src/lightning_lite/utilities/seed.py index a55b5e3dd84dc..631e33caa6cb6 100644 --- a/src/lightning_lite/utilities/seed.py +++ b/src/lightning_lite/utilities/seed.py @@ -7,9 +7,8 @@ import numpy as np import torch -from lightning_utilities.core.rank_zero import rank_prefixed_message -from lightning_lite.utilities.rank_zero import _get_rank, rank_zero_only, rank_zero_warn +from lightning_lite.utilities.rank_zero import _get_rank, rank_prefixed_message, rank_zero_only, rank_zero_warn log = logging.getLogger(__name__) diff --git a/src/pytorch_lightning/accelerators/accelerator.py b/src/pytorch_lightning/accelerators/accelerator.py index 448f8e87951be..54c43a9e52b98 100644 --- a/src/pytorch_lightning/accelerators/accelerator.py +++ b/src/pytorch_lightning/accelerators/accelerator.py @@ -15,11 +15,11 @@ from typing import Any, Dict import torch -from lightning_utilities.core.rank_zero import rank_zero_deprecation import pytorch_lightning as pl from lightning_lite.accelerators.accelerator import Accelerator as _Accelerator from lightning_lite.utilities.types import _DEVICE +from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation class Accelerator(_Accelerator, ABC): diff --git a/src/pytorch_lightning/callbacks/early_stopping.py b/src/pytorch_lightning/callbacks/early_stopping.py index 31e12d508dce2..de6662779cbc9 100644 --- a/src/pytorch_lightning/callbacks/early_stopping.py +++ b/src/pytorch_lightning/callbacks/early_stopping.py @@ -23,14 +23,13 @@ import numpy as np import torch -from lightning_utilities.core.rank_zero import rank_prefixed_message from torch import Tensor import pytorch_lightning as pl from lightning_lite.utilities.rank_zero import _get_rank from pytorch_lightning.callbacks.callback import Callback from pytorch_lightning.utilities.exceptions import MisconfigurationException -from pytorch_lightning.utilities.rank_zero import rank_zero_warn +from pytorch_lightning.utilities.rank_zero import rank_prefixed_message, rank_zero_warn log = logging.getLogger(__name__) diff --git a/src/pytorch_lightning/callbacks/model_checkpoint.py b/src/pytorch_lightning/callbacks/model_checkpoint.py index d7227c78f4e4a..72ad02e5d943a 100644 --- a/src/pytorch_lightning/callbacks/model_checkpoint.py +++ b/src/pytorch_lightning/callbacks/model_checkpoint.py @@ -31,7 +31,6 @@ import numpy as np import torch import yaml -from lightning_utilities.core.rank_zero import WarningCache from torch import Tensor import pytorch_lightning as pl @@ -39,7 +38,7 @@ from lightning_lite.utilities.types import _PATH from pytorch_lightning.callbacks import Checkpoint from pytorch_lightning.utilities.exceptions import MisconfigurationException -from pytorch_lightning.utilities.rank_zero import rank_zero_info, rank_zero_warn +from pytorch_lightning.utilities.rank_zero import rank_zero_info, rank_zero_warn, WarningCache from pytorch_lightning.utilities.types import STEP_OUTPUT log = logging.getLogger(__name__) diff --git a/src/pytorch_lightning/core/module.py b/src/pytorch_lightning/core/module.py index 6a2a79e717c0e..e0b5c226c6215 100644 --- a/src/pytorch_lightning/core/module.py +++ b/src/pytorch_lightning/core/module.py @@ -25,7 +25,6 @@ import torch from lightning_utilities.core.apply_func import apply_to_collection -from lightning_utilities.core.rank_zero import WarningCache from torch import ScriptModule, Tensor from torch.nn import Module from torch.optim.optimizer import Optimizer @@ -49,7 +48,7 @@ from pytorch_lightning.utilities import GradClipAlgorithmType from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.imports import _TORCH_GREATER_EQUAL_1_13 -from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_warn +from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_warn, WarningCache from pytorch_lightning.utilities.signature_utils import is_param_in_hook_signature from pytorch_lightning.utilities.types import ( _METRIC_COLLECTION, diff --git a/src/pytorch_lightning/lite/lite.py b/src/pytorch_lightning/lite/lite.py index 683c94446d1e1..428ace3d5f52f 100644 --- a/src/pytorch_lightning/lite/lite.py +++ b/src/pytorch_lightning/lite/lite.py @@ -15,8 +15,6 @@ from abc import ABC from typing import List, Optional, Tuple, Union -from lightning_utilities.core.rank_zero import rank_zero_deprecation, rank_zero_warn - from lightning_lite.connector import _PLUGIN_INPUT as _LITE_PLUGIN_INPUT from lightning_lite.connector import _PRECISION_INPUT from lightning_lite.lite import LightningLite as _NewLightningLite @@ -52,6 +50,7 @@ from pytorch_lightning.strategies import SingleTPUStrategy as PLSingleTPUStrategy from pytorch_lightning.strategies import Strategy as PLStrategy from pytorch_lightning.strategies import TPUSpawnStrategy as PLTPUSpawnStrategy +from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation, rank_zero_warn _PL_PLUGIN = Union[PLPrecisionPlugin, ClusterEnvironment, CheckpointIO] _PL_PLUGIN_INPUT = Union[_PL_PLUGIN, str] diff --git a/src/pytorch_lightning/loops/epoch/prediction_epoch_loop.py b/src/pytorch_lightning/loops/epoch/prediction_epoch_loop.py index cafc492ae52f5..7596b2ca70c12 100644 --- a/src/pytorch_lightning/loops/epoch/prediction_epoch_loop.py +++ b/src/pytorch_lightning/loops/epoch/prediction_epoch_loop.py @@ -2,12 +2,12 @@ from typing import Any, Dict, Iterator, List, Tuple import torch -from lightning_utilities.core.rank_zero import WarningCache from lightning_lite.utilities import move_data_to_device from pytorch_lightning.loops.loop import Loop from pytorch_lightning.overrides.distributed import IndexBatchSamplerWrapper from pytorch_lightning.trainer.progress import Progress +from pytorch_lightning.utilities.rank_zero import WarningCache warning_cache = WarningCache() diff --git a/src/pytorch_lightning/loops/epoch/training_epoch_loop.py b/src/pytorch_lightning/loops/epoch/training_epoch_loop.py index 777fa01b04847..5bdec1b55226b 100644 --- a/src/pytorch_lightning/loops/epoch/training_epoch_loop.py +++ b/src/pytorch_lightning/loops/epoch/training_epoch_loop.py @@ -18,7 +18,6 @@ import numpy as np import torch from lightning_utilities.core.apply_func import apply_to_collection -from lightning_utilities.core.rank_zero import WarningCache import pytorch_lightning as pl from pytorch_lightning import loops # import as loops to avoid circular imports @@ -32,7 +31,7 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.fetching import AbstractDataFetcher, DataLoaderIterDataFetcher from pytorch_lightning.utilities.model_helpers import is_overridden -from pytorch_lightning.utilities.rank_zero import rank_zero_warn +from pytorch_lightning.utilities.rank_zero import rank_zero_warn, WarningCache from pytorch_lightning.utilities.signature_utils import is_param_in_hook_signature _OUTPUTS_TYPE = List[_BATCH_OUTPUTS_TYPE] diff --git a/src/pytorch_lightning/loops/optimization/optimizer_loop.py b/src/pytorch_lightning/loops/optimization/optimizer_loop.py index 62f8980d28625..0add3e48cbc94 100644 --- a/src/pytorch_lightning/loops/optimization/optimizer_loop.py +++ b/src/pytorch_lightning/loops/optimization/optimizer_loop.py @@ -16,7 +16,6 @@ from typing import Any, Callable, Dict, List, Optional, Tuple, Union import torch -from lightning_utilities.core.rank_zero import WarningCache from torch import Tensor from torch.optim import Optimizer from typing_extensions import OrderedDict @@ -34,7 +33,7 @@ from pytorch_lightning.plugins.precision.native_amp import MixedPrecisionPlugin from pytorch_lightning.trainer.progress import OptimizationProgress from pytorch_lightning.utilities.exceptions import MisconfigurationException -from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation +from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation, WarningCache from pytorch_lightning.utilities.signature_utils import is_param_in_hook_signature from pytorch_lightning.utilities.types import STEP_OUTPUT diff --git a/src/pytorch_lightning/plugins/precision/colossalai.py b/src/pytorch_lightning/plugins/precision/colossalai.py index caf8c3bfab4f2..3b1198e77260d 100644 --- a/src/pytorch_lightning/plugins/precision/colossalai.py +++ b/src/pytorch_lightning/plugins/precision/colossalai.py @@ -13,7 +13,6 @@ # limitations under the License. from typing import Any, Callable, Optional, Union -from lightning_utilities.core.rank_zero import WarningCache from torch import Tensor from torch.optim import Optimizer @@ -21,6 +20,7 @@ from lightning_lite.utilities.types import Steppable from pytorch_lightning.plugins.precision.precision_plugin import PrecisionPlugin from pytorch_lightning.utilities.enums import PrecisionType +from pytorch_lightning.utilities.rank_zero import WarningCache warning_cache = WarningCache() diff --git a/src/pytorch_lightning/plugins/precision/deepspeed.py b/src/pytorch_lightning/plugins/precision/deepspeed.py index 8cafcd20af169..003f299281710 100644 --- a/src/pytorch_lightning/plugins/precision/deepspeed.py +++ b/src/pytorch_lightning/plugins/precision/deepspeed.py @@ -14,7 +14,6 @@ from typing import Any, Callable, Optional, TYPE_CHECKING, Union from lightning_utilities.core.imports import RequirementCache -from lightning_utilities.core.rank_zero import WarningCache from torch import Tensor from torch.optim import LBFGS, Optimizer @@ -26,7 +25,7 @@ from pytorch_lightning.utilities import GradClipAlgorithmType from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.model_helpers import is_overridden -from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation +from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation, WarningCache _DEEPSPEED_AVAILABLE = RequirementCache("deepspeed") if TYPE_CHECKING and _DEEPSPEED_AVAILABLE: diff --git a/src/pytorch_lightning/plugins/precision/ipu.py b/src/pytorch_lightning/plugins/precision/ipu.py index 92a15a8711efe..6dd6882ce208c 100644 --- a/src/pytorch_lightning/plugins/precision/ipu.py +++ b/src/pytorch_lightning/plugins/precision/ipu.py @@ -13,7 +13,6 @@ # limitations under the License. from typing import Any, Callable, Union -from lightning_utilities.core.rank_zero import WarningCache from torch import Tensor from torch.optim import LBFGS, Optimizer @@ -24,6 +23,7 @@ from pytorch_lightning.utilities import GradClipAlgorithmType from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.model_helpers import is_overridden +from pytorch_lightning.utilities.rank_zero import WarningCache warning_cache = WarningCache() diff --git a/src/pytorch_lightning/profilers/pytorch.py b/src/pytorch_lightning/profilers/pytorch.py index 3f3f221af696c..0724e7ee77c0b 100644 --- a/src/pytorch_lightning/profilers/pytorch.py +++ b/src/pytorch_lightning/profilers/pytorch.py @@ -20,7 +20,6 @@ from typing import Any, Callable, ContextManager, Dict, List, Optional, Type, TYPE_CHECKING, Union import torch -from lightning_utilities.core.rank_zero import WarningCache from torch import nn, Tensor from torch.autograd.profiler import record_function @@ -28,7 +27,7 @@ from pytorch_lightning.profilers.profiler import Profiler from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.imports import _KINETO_AVAILABLE -from pytorch_lightning.utilities.rank_zero import rank_zero_warn +from pytorch_lightning.utilities.rank_zero import rank_zero_warn, WarningCache if TYPE_CHECKING: from torch.autograd.profiler import EventList diff --git a/src/pytorch_lightning/strategies/colossalai.py b/src/pytorch_lightning/strategies/colossalai.py index aeb8b9f723332..ffecc17106f13 100644 --- a/src/pytorch_lightning/strategies/colossalai.py +++ b/src/pytorch_lightning/strategies/colossalai.py @@ -16,7 +16,6 @@ import torch from lightning_utilities.core.imports import RequirementCache -from lightning_utilities.core.rank_zero import rank_zero_warn from torch import Tensor from torch.nn import Module from torch.optim.optimizer import Optimizer @@ -35,6 +34,7 @@ from pytorch_lightning.trainer.states import TrainerFn from pytorch_lightning.utilities.enums import PrecisionType from pytorch_lightning.utilities.model_helpers import is_overridden +from pytorch_lightning.utilities.rank_zero import rank_zero_warn from pytorch_lightning.utilities.types import STEP_OUTPUT _COLOSSALAI_AVAILABLE = RequirementCache("colossalai") diff --git a/src/pytorch_lightning/strategies/deepspeed.py b/src/pytorch_lightning/strategies/deepspeed.py index c171071c7496e..47756231058ba 100644 --- a/src/pytorch_lightning/strategies/deepspeed.py +++ b/src/pytorch_lightning/strategies/deepspeed.py @@ -24,7 +24,6 @@ import torch from lightning_utilities.core.apply_func import apply_to_collection from lightning_utilities.core.imports import RequirementCache -from lightning_utilities.core.rank_zero import WarningCache from torch import Tensor from torch.nn import Module from torch.optim import Optimizer @@ -45,7 +44,7 @@ from pytorch_lightning.utilities import GradClipAlgorithmType from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.model_helpers import is_overridden -from pytorch_lightning.utilities.rank_zero import rank_zero_info, rank_zero_only, rank_zero_warn +from pytorch_lightning.utilities.rank_zero import rank_zero_info, rank_zero_only, rank_zero_warn, WarningCache from pytorch_lightning.utilities.types import LRSchedulerConfig, STEP_OUTPUT log = logging.getLogger(__name__) diff --git a/src/pytorch_lightning/trainer/connectors/data_connector.py b/src/pytorch_lightning/trainer/connectors/data_connector.py index 47aae74f15b73..aae3cc3f3b167 100644 --- a/src/pytorch_lightning/trainer/connectors/data_connector.py +++ b/src/pytorch_lightning/trainer/connectors/data_connector.py @@ -18,7 +18,6 @@ from weakref import proxy from lightning_utilities.core.apply_func import apply_to_collection -from lightning_utilities.core.rank_zero import WarningCache from torch.utils.data import BatchSampler, DataLoader, Sampler, SequentialSampler from torch.utils.data.distributed import DistributedSampler @@ -35,7 +34,7 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.imports import _fault_tolerant_training from pytorch_lightning.utilities.model_helpers import is_overridden -from pytorch_lightning.utilities.rank_zero import rank_zero_warn +from pytorch_lightning.utilities.rank_zero import rank_zero_warn, WarningCache from pytorch_lightning.utilities.types import EVAL_DATALOADERS, TRAIN_DATALOADERS from pytorch_lightning.utilities.warnings import PossibleUserWarning diff --git a/src/pytorch_lightning/trainer/connectors/logger_connector/result.py b/src/pytorch_lightning/trainer/connectors/logger_connector/result.py index d4c74f306b30f..309d49d71b945 100644 --- a/src/pytorch_lightning/trainer/connectors/logger_connector/result.py +++ b/src/pytorch_lightning/trainer/connectors/logger_connector/result.py @@ -17,7 +17,6 @@ import torch from lightning_utilities.core.apply_func import apply_to_collection, apply_to_collections -from lightning_utilities.core.rank_zero import WarningCache from torch import Tensor from torchmetrics import Metric from typing_extensions import TypedDict @@ -30,7 +29,7 @@ from pytorch_lightning.utilities.imports import _fault_tolerant_training from pytorch_lightning.utilities.memory import recursive_detach from pytorch_lightning.utilities.metrics import metrics_to_scalars -from pytorch_lightning.utilities.rank_zero import rank_zero_warn +from pytorch_lightning.utilities.rank_zero import rank_zero_warn, WarningCache from pytorch_lightning.utilities.warnings import PossibleUserWarning _IN_METRIC = Union[Metric, Tensor] # Do not include scalars as they were converted to tensors diff --git a/src/pytorch_lightning/trainer/states.py b/src/pytorch_lightning/trainer/states.py index 0063ef3fabe96..c0b1c5edb32a4 100644 --- a/src/pytorch_lightning/trainer/states.py +++ b/src/pytorch_lightning/trainer/states.py @@ -15,10 +15,9 @@ from enum import Enum, EnumMeta from typing import Any, List, Optional -from lightning_utilities.core.rank_zero import rank_zero_deprecation - from pytorch_lightning.utilities import LightningEnum from pytorch_lightning.utilities.enums import _FaultTolerantMode +from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation class _DeprecationManagingEnumMeta(EnumMeta): diff --git a/src/pytorch_lightning/utilities/data.py b/src/pytorch_lightning/utilities/data.py index 91cfc0cefb392..3432c8c3b7e53 100644 --- a/src/pytorch_lightning/utilities/data.py +++ b/src/pytorch_lightning/utilities/data.py @@ -17,7 +17,6 @@ import torch from lightning_utilities.core.apply_func import is_dataclass_instance -from lightning_utilities.core.rank_zero import WarningCache from torch import Tensor from torch.utils.data import ( BatchSampler, @@ -39,7 +38,7 @@ from pytorch_lightning.utilities.auto_restart import CaptureIterableDataset, CaptureMapDataset, FastForwardSampler from pytorch_lightning.utilities.enums import _FaultTolerantMode from pytorch_lightning.utilities.exceptions import MisconfigurationException -from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation, rank_zero_warn +from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation, rank_zero_warn, WarningCache # might be supported in later releases, see https://github.com/python/mypy/pull/13297 BType = Union[Tensor, str, Mapping[Any, "BType"], Iterable["BType"]] # type: ignore[misc] diff --git a/src/pytorch_lightning/utilities/migration/migration.py b/src/pytorch_lightning/utilities/migration/migration.py index f1261870e5295..f937f86fcd8b3 100644 --- a/src/pytorch_lightning/utilities/migration/migration.py +++ b/src/pytorch_lightning/utilities/migration/migration.py @@ -31,11 +31,10 @@ import re from typing import Any, Callable, Dict, List -from lightning_utilities.core.rank_zero import rank_zero_warn - from lightning_lite.utilities.warnings import PossibleUserWarning from pytorch_lightning.callbacks.early_stopping import EarlyStopping from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint +from pytorch_lightning.utilities.rank_zero import rank_zero_warn _CHECKPOINT = Dict[str, Any] diff --git a/src/pytorch_lightning/utilities/migration/utils.py b/src/pytorch_lightning/utilities/migration/utils.py index 154962cc14400..f231445fc8497 100644 --- a/src/pytorch_lightning/utilities/migration/utils.py +++ b/src/pytorch_lightning/utilities/migration/utils.py @@ -17,7 +17,6 @@ from types import ModuleType, TracebackType from typing import Any, Dict, List, Optional, Tuple, Type -from lightning_utilities.core.rank_zero import rank_zero_warn from packaging.version import Version import pytorch_lightning as pl @@ -25,6 +24,7 @@ from lightning_lite.utilities.types import _PATH from lightning_lite.utilities.warnings import PossibleUserWarning from pytorch_lightning.utilities.migration.migration import _migration_index +from pytorch_lightning.utilities.rank_zero import rank_zero_warn _log = logging.getLogger(__name__) _CHECKPOINT = Dict[str, Any] diff --git a/src/pytorch_lightning/utilities/model_summary/model_summary.py b/src/pytorch_lightning/utilities/model_summary/model_summary.py index 9cc59aaab25f0..3fad851664d4f 100644 --- a/src/pytorch_lightning/utilities/model_summary/model_summary.py +++ b/src/pytorch_lightning/utilities/model_summary/model_summary.py @@ -20,11 +20,11 @@ import numpy as np import torch import torch.nn as nn -from lightning_utilities.core.rank_zero import WarningCache from torch import Tensor from torch.utils.hooks import RemovableHandle import pytorch_lightning as pl +from pytorch_lightning.utilities.rank_zero import WarningCache log = logging.getLogger(__name__) warning_cache = WarningCache() diff --git a/src/pytorch_lightning/utilities/rank_zero.py b/src/pytorch_lightning/utilities/rank_zero.py index 8b716628ade40..c3a3af2347ac6 100644 --- a/src/pytorch_lightning/utilities/rank_zero.py +++ b/src/pytorch_lightning/utilities/rank_zero.py @@ -17,12 +17,14 @@ # note: we want to keep these indirections so the `rank_zero_module.log` is set (on import) for PL users from lightning_lite.utilities.rank_zero import LightningDeprecationWarning # noqa: F401 from lightning_lite.utilities.rank_zero import ( # noqa: F401 + rank_prefixed_message, rank_zero_debug, rank_zero_deprecation, rank_zero_info, rank_zero_module, rank_zero_only, rank_zero_warn, + WarningCache, ) rank_zero_module.log = logging.getLogger(__name__)