Skip to content
Closed
Changes from 1 commit
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
188bb42
add state_dict to privacy accountant
karthikprasad May 17, 2022
fe891c9
minor fixes and docstrings in accountant
karthikprasad May 17, 2022
6083f99
some more minor fixes in accountant
karthikprasad May 17, 2022
26348bc
add state dict support for GradSampleModule and save/load checkpoint …
karthikprasad May 17, 2022
01c5c4c
import typevar
karthikprasad May 17, 2022
13c272e
accountant unit test
karthikprasad May 18, 2022
f253a5e
lint fix in test
karthikprasad May 18, 2022
6d11754
fix typo
karthikprasad May 18, 2022
29e2c28
fix var name in test
karthikprasad May 18, 2022
66e8094
fix num steps in test
karthikprasad May 18, 2022
5887f78
fix lint again
karthikprasad May 18, 2022
64a1632
add-ons to GradSampleModule state_dict
karthikprasad May 22, 2022
345d1d7
fixes to GS and test
karthikprasad May 22, 2022
ec2fd92
test privacy engine checkpointing
karthikprasad May 23, 2022
eb3224b
remove debug comments
karthikprasad May 23, 2022
b6d5a86
fix lint
karthikprasad May 23, 2022
29ff3a8
fix lint again
karthikprasad May 23, 2022
f69819b
Minor fixex in FAQ (#430)
Kevin-Abd May 20, 2022
86a8e0d
disable poisson sampling in checkpoints test
karthikprasad May 23, 2022
d3591b0
rebase
karthikprasad May 23, 2022
8ad0545
fix sort order
karthikprasad May 23, 2022
f14e810
fix black
karthikprasad May 23, 2022
2416bb1
some more lints
karthikprasad May 23, 2022
051bc0c
address comments
karthikprasad May 25, 2022
e655df4
address comments
karthikprasad May 31, 2022
ba29092
Merge branch 'main' into master
karthikprasad May 31, 2022
33ee300
fix flake lint
karthikprasad May 31, 2022
1f9297a
Merge branch 'master' of https://github.com/karthikprasad/opacus
karthikprasad May 31, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
remove debug comments
  • Loading branch information
karthikprasad committed May 23, 2022
commit eb3224b90f3adf441c7532d1e2d54ce6b5174bdf
62 changes: 31 additions & 31 deletions opacus/tests/privacy_engine_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -729,26 +729,26 @@ def batch_second_collate(batch):
return data, labels


# class PrivacyEngineTextTest(BasePrivacyEngineTest, unittest.TestCase):
# def setUp(self):
# super().setUp()
# self.BATCH_FIRST = False

# def _init_data(self):
# x = torch.randint(0, 100, (12, self.DATA_SIZE))
# y = torch.randint(0, 12, (self.DATA_SIZE,))
# ds = MockTextDataset(x, y)
# return DataLoader(
# ds,
# batch_size=self.BATCH_SIZE,
# collate_fn=batch_second_collate,
# drop_last=True,
# )

# def _init_model(
# self, private=False, state_dict=None, model=None, **privacy_engine_kwargs
# ):
# return SampleAttnNet()
class PrivacyEngineTextTest(BasePrivacyEngineTest, unittest.TestCase):
def setUp(self):
super().setUp()
self.BATCH_FIRST = False

def _init_data(self):
x = torch.randint(0, 100, (12, self.DATA_SIZE))
y = torch.randint(0, 12, (self.DATA_SIZE,))
ds = MockTextDataset(x, y)
return DataLoader(
ds,
batch_size=self.BATCH_SIZE,
collate_fn=batch_second_collate,
drop_last=True,
)

def _init_model(
self, private=False, state_dict=None, model=None, **privacy_engine_kwargs
):
return SampleAttnNet()


class SampleTiedWeights(nn.Module):
Expand Down Expand Up @@ -777,15 +777,15 @@ def forward(self, x):
return x


# class PrivacyEngineTiedWeightsTest(BasePrivacyEngineTest, unittest.TestCase):
# def _init_data(self):
# ds = TensorDataset(
# torch.randint(low=0, high=100, size=(self.DATA_SIZE,)),
# torch.randint(low=0, high=100, size=(self.DATA_SIZE,)),
# )
# return DataLoader(ds, batch_size=self.BATCH_SIZE, drop_last=True)
class PrivacyEngineTiedWeightsTest(BasePrivacyEngineTest, unittest.TestCase):
def _init_data(self):
ds = TensorDataset(
torch.randint(low=0, high=100, size=(self.DATA_SIZE,)),
torch.randint(low=0, high=100, size=(self.DATA_SIZE,)),
)
return DataLoader(ds, batch_size=self.BATCH_SIZE, drop_last=True)

# def _init_model(
# self, private=False, state_dict=None, model=None, **privacy_engine_kwargs
# ):
# return SampleTiedWeights(tie=False)
def _init_model(
self, private=False, state_dict=None, model=None, **privacy_engine_kwargs
):
return SampleTiedWeights(tie=False)