Skip to content

Commit f2a591a

Browse files
EnayatUllahfacebook-github-bot
authored andcommitted
Fixed the clipping operation in fast gradient clipping with Privacy Engine (#664)
Summary: Pull Request resolved: #664 Earlier, privacy engine erroneously wasn't using the max_grad_norm argument which infoms the clipping norm. It is fixed now. Reviewed By: HuanyuZhang Differential Revision: D60917356 fbshipit-source-id: aad3bcf08048027d93ae44400caf8d7601d90961
1 parent f1412fa commit f2a591a

File tree

1 file changed

+17
-6
lines changed

1 file changed

+17
-6
lines changed

opacus/privacy_engine.py

Lines changed: 17 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -171,6 +171,7 @@ def _prepare_model(
171171
module: nn.Module,
172172
*,
173173
batch_first: bool = True,
174+
max_grad_norm: Union[float, List[float]] = 1.0,
174175
loss_reduction: str = "mean",
175176
grad_sample_mode: str = "hooks",
176177
) -> AbstractGradSampleModule:
@@ -194,12 +195,21 @@ def _prepare_model(
194195

195196
return module
196197
else:
197-
return wrap_model(
198-
module,
199-
grad_sample_mode=grad_sample_mode,
200-
batch_first=batch_first,
201-
loss_reduction=loss_reduction,
202-
)
198+
if grad_sample_mode == "ghost":
199+
return wrap_model(
200+
module,
201+
grad_sample_mode=grad_sample_mode,
202+
batch_first=batch_first,
203+
loss_reduction=loss_reduction,
204+
max_grad_norm=max_grad_norm,
205+
)
206+
else:
207+
return wrap_model(
208+
module,
209+
grad_sample_mode=grad_sample_mode,
210+
batch_first=batch_first,
211+
loss_reduction=loss_reduction,
212+
)
203213

204214
def is_compatible(
205215
self,
@@ -355,6 +365,7 @@ def make_private(
355365
module = self._prepare_model(
356366
module,
357367
batch_first=batch_first,
368+
max_grad_norm=max_grad_norm,
358369
loss_reduction=loss_reduction,
359370
grad_sample_mode=grad_sample_mode,
360371
)

0 commit comments

Comments
 (0)