File tree Expand file tree Collapse file tree 1 file changed +10
-3
lines changed
src/pytorch_lightning/utilities Expand file tree Collapse file tree 1 file changed +10
-3
lines changed Original file line number Diff line number Diff line change 27
27
from torchmetrics import Metric
28
28
from typing_extensions import Protocol , runtime_checkable
29
29
30
+ try :
31
+ from torch .optim .lr_scheduler import LRScheduler as TorchLRScheduler
32
+ except ImportError :
33
+ # For torch <= 1.13.x
34
+ # TODO: Remove once minimum torch version is 1.14 (or 2.0)
35
+ from torch .optim .lr_scheduler import _LRScheduler as TorchLRScheduler
36
+
30
37
from lightning_lite .utilities .types import _LRScheduler , ProcessGroup , ReduceLROnPlateau
31
38
32
39
_NUMBER = Union [int , float ]
@@ -111,9 +118,9 @@ def no_sync(self) -> Generator:
111
118
112
119
113
120
# todo: improve LRSchedulerType naming/typing
114
- LRSchedulerTypeTuple = (torch . optim . lr_scheduler . _LRScheduler , torch .optim .lr_scheduler .ReduceLROnPlateau )
115
- LRSchedulerTypeUnion = Union [torch . optim . lr_scheduler . _LRScheduler , torch .optim .lr_scheduler .ReduceLROnPlateau ]
116
- LRSchedulerType = Union [Type [torch . optim . lr_scheduler . _LRScheduler ], Type [torch .optim .lr_scheduler .ReduceLROnPlateau ]]
121
+ LRSchedulerTypeTuple = (TorchLRScheduler , torch .optim .lr_scheduler .ReduceLROnPlateau )
122
+ LRSchedulerTypeUnion = Union [TorchLRScheduler , torch .optim .lr_scheduler .ReduceLROnPlateau ]
123
+ LRSchedulerType = Union [Type [TorchLRScheduler ], Type [torch .optim .lr_scheduler .ReduceLROnPlateau ]]
117
124
LRSchedulerPLType = Union [_LRScheduler , ReduceLROnPlateau ]
118
125
119
126
You can’t perform that action at this time.
0 commit comments