File tree Expand file tree Collapse file tree 1 file changed +5
-8
lines changed
tests/tests_pytorch/plugins/precision Expand file tree Collapse file tree 1 file changed +5
-8
lines changed Original file line number Diff line number Diff line change 14
14
from unittest .mock import Mock
15
15
16
16
import pytest
17
- from torch .optim import Optimizer
18
-
19
- from lightning .pytorch .plugins import MixedPrecision
20
- from lightning .pytorch .utilities import GradClipAlgorithmType
21
-
22
- from torch import nn
23
17
import torch
18
+ from torch import nn
19
+ from torch .optim import Optimizer
24
20
25
21
from lightning .pytorch .plugins .precision import MixedPrecision
22
+ from lightning .pytorch .utilities import GradClipAlgorithmType
26
23
27
24
28
25
def test_clip_gradients ():
@@ -62,7 +59,7 @@ def test_optimizer_amp_scaling_support_in_step_method():
62
59
def test_amp_with_no_grad (precision : str ):
63
60
layer = nn .Linear (2 , 1 )
64
61
x = torch .randn (1 , 2 )
65
- amp = MixedPrecision (precision = precision , device = ' cpu' )
62
+ amp = MixedPrecision (precision = precision , device = " cpu" )
66
63
67
64
with amp .autocast_context_manager ():
68
65
with torch .no_grad ():
@@ -72,4 +69,4 @@ def test_amp_with_no_grad(precision: str):
72
69
73
70
loss .backward ()
74
71
75
- assert loss .grad_fn is not None
72
+ assert loss .grad_fn is not None
You can’t perform that action at this time.
0 commit comments