Skip to content

Commit 60683d6

Browse files
committed
Fixes
1 parent c22ff0a commit 60683d6

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

tests/tests_pytorch/models/test_ddp_fork_amp.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616
import torch
1717

18-
from pytorch_lightning.plugins import NativeMixedPrecisionPlugin
18+
from pytorch_lightning.plugins import MixedPrecisionPlugin
1919
from tests_pytorch.helpers.runif import RunIf
2020

2121

@@ -24,7 +24,7 @@
2424
def test_amp_gpus_ddp_fork():
2525
"""Ensure the use of native AMP with `ddp_fork` (or associated alias strategies) does not generate CUDA
2626
initialization errors."""
27-
_ = NativeMixedPrecisionPlugin(precision=16, device="cuda")
27+
_ = MixedPrecisionPlugin(precision=16, device="cuda")
2828
with multiprocessing.get_context("fork").Pool(1) as pool:
2929
in_bad_fork = pool.apply(torch.cuda._is_in_bad_fork)
3030
assert not in_bad_fork

0 commit comments

Comments
 (0)