Skip to content

Commit

Permalink
Throws an error when params in optimizer are not the same as that of …
Browse files Browse the repository at this point in the history
…module's in `make_private` (pytorch#439)

Summary:
Pull Request resolved: pytorch#439

Compare nn.Module.parameters() with list of parameters from all param_groups of optimizer. If they are all not equal then raise error "Module parameters are different than optimizer Parameters"

Differential Revision: D37163873

fbshipit-source-id: cf2d89ae50ed7387406a0cea362994fb975d2db1
  • Loading branch information
Deepak Agrawal authored and facebook-github-bot committed Jun 17, 2022
1 parent d079ffd commit 7b1ad07
Show file tree
Hide file tree
Showing 2 changed files with 53 additions and 1 deletion.
18 changes: 17 additions & 1 deletion opacus/privacy_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@
from opacus.accountants import create_accountant
from opacus.accountants.utils import get_noise_multiplier
from opacus.data_loader import DPDataLoader, switch_generator
from opacus.distributed import DifferentiallyPrivateDistributedDataParallel as DPDDP
from opacus.distributed import \
DifferentiallyPrivateDistributedDataParallel as DPDDP
from opacus.grad_sample.grad_sample_module import GradSampleModule
from opacus.optimizers import DPOptimizer, get_optimizer_class
from opacus.scheduler import _NoiseScheduler
Expand Down Expand Up @@ -360,6 +361,21 @@ def make_private(
if noise_generator and self.secure_mode:
raise ValueError("Passing seed is prohibited in secure mode")

# compare module parameter with optimizer parameters
if not all(
torch.eq(i, j).all()
for i, j in zip(
list(module.parameters()),
sum(
[param_group["params"] for param_group in optimizer.param_groups],
[],
),
)
):
raise ValueError(
"Module parameters are different than optimizer Parameters"
)

distributed = isinstance(module, (DPDDP, DDP))

module = self._prepare_model(
Expand Down
36 changes: 36 additions & 0 deletions opacus/tests/privacy_engine_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from opacus.scheduler import StepNoise, _NoiseScheduler
from opacus.utils.module_utils import are_state_dict_equal
from opacus.validators.errors import UnsupportedModuleError
from opacus.validators.module_validator import ModuleValidator
from torch.utils.data import DataLoader, Dataset, TensorDataset
from torchvision import models, transforms
from torchvision.datasets import FakeData
Expand Down Expand Up @@ -464,6 +465,41 @@ def test_deterministic_run(self):
"Model parameters after deterministic run must match",
)

def test_param_equal_module_optimizer(self):
"""Test that the privacy engine raises error if nn.Module parameters are not equal to optimizer parameters"""
model = models.densenet121(pretrained=True)
num_ftrs = model.classifier.in_features
model.classifier = nn.Sequential(nn.Linear(num_ftrs, 10), nn.Sigmoid())
optimizer = torch.optim.SGD(
model.parameters(), lr=0.01, momentum=0, weight_decay=0
)
dl = self._init_data()
model = ModuleValidator.fix(model)
privacy_engine = PrivacyEngine()
with self.assertRaisesRegex(
ValueError, "Module parameters are different than optimizer Parameters"
):
_, _, _ = privacy_engine.make_private(
module=model,
optimizer=optimizer,
data_loader=dl,
noise_multiplier=1.1,
max_grad_norm=1.0,
)

# if optimizer is defined after ModuleValidator.fix() then raise no error
optimizer = torch.optim.SGD(
model.parameters(), lr=0.01, momentum=0, weight_decay=0
)
_, _, _ = privacy_engine.make_private(
module=model,
optimizer=optimizer,
data_loader=dl,
noise_multiplier=1.1,
max_grad_norm=1.0,
)
self.assertTrue(1, 1)

@given(noise_scheduler=st.sampled_from([None, StepNoise]))
@settings(deadline=None)
def test_checkpoints(self, noise_scheduler: Optional[_NoiseScheduler]):
Expand Down

0 comments on commit 7b1ad07

Please sign in to comment.