Skip to content

Commit bdaab58

Browse files
Deepak Agrawalfacebook-github-bot
authored andcommitted
Throws an error when params in optimizer are not the same as that of module's in make_private (#439)
Summary: Pull Request resolved: #439 Compare nn.Module.parameters() with list of parameters from all param_groups of optimizer. If they are all not equal then raise error "Module parameters are different than optimizer Parameters" Differential Revision: D37163873 fbshipit-source-id: 8e25fa1738f08c5aa52f856023f72948164d6f0e
1 parent d079ffd commit bdaab58

File tree

2 files changed

+37
-0
lines changed

2 files changed

+37
-0
lines changed

opacus/privacy_engine.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -360,6 +360,10 @@ def make_private(
360360
if noise_generator and self.secure_mode:
361361
raise ValueError("Passing seed is prohibited in secure mode")
362362

363+
# compare module parameter with optimizer parameters
364+
if not all(torch.eq(i,j).all() for i,j in zip(list(module.parameters()), sum([param_group['params'] for param_group in optimizer.param_groups], []))):
365+
raise ValueError("Module parameters are different than optimizer Parameters")
366+
363367
distributed = isinstance(module, (DPDDP, DDP))
364368

365369
module = self._prepare_model(

opacus/tests/privacy_engine_test.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
import torch.nn.functional as F
2828
from hypothesis import given, settings
2929
from opacus import PrivacyEngine
30+
from opacus.validators import ModuleValidator
3031
from opacus.layers.dp_multihead_attention import DPMultiheadAttention
3132
from opacus.optimizers.optimizer import _generate_noise
3233
from opacus.scheduler import StepNoise, _NoiseScheduler
@@ -464,6 +465,38 @@ def test_deterministic_run(self):
464465
"Model parameters after deterministic run must match",
465466
)
466467

468+
def test_param_equal_module_optimizer(self):
469+
"""Test that the privacy engine raises error if nn.Module parameters are not equal to optimizer parameters """
470+
model = models.densenet121(pretrained=True)
471+
num_ftrs = model.classifier.in_features
472+
model.classifier = nn.Sequential(nn.Linear(num_ftrs, 10), nn.Sigmoid())
473+
optimizer = torch.optim.SGD(model.parameters(),lr=0.01,momentum=0,weight_decay=0)
474+
dl = self._init_data()
475+
model = ModuleValidator.fix(model)
476+
privacy_engine = PrivacyEngine()
477+
with self.assertRaisesRegex(ValueError, "Module parameters are different than optimizer Parameters"):
478+
_, _, _ = privacy_engine.make_private(
479+
module=model,
480+
optimizer=optimizer,
481+
data_loader=dl,
482+
noise_multiplier=1.1,
483+
max_grad_norm=1.0
484+
)
485+
486+
# if optimizer is defined after ModuleValidator.fix() then raise no error
487+
optimizer = torch.optim.SGD(model.parameters(),lr=0.01,momentum=0,weight_decay=0)
488+
_, _, _ = privacy_engine.make_private(
489+
module=model,
490+
optimizer=optimizer,
491+
data_loader=dl,
492+
noise_multiplier=1.1,
493+
max_grad_norm=1.0
494+
)
495+
self.assertTrue(1, 1)
496+
497+
498+
499+
467500
@given(noise_scheduler=st.sampled_from([None, StepNoise]))
468501
@settings(deadline=None)
469502
def test_checkpoints(self, noise_scheduler: Optional[_NoiseScheduler]):

0 commit comments

Comments
 (0)