Skip to content

Commit

Permalink
Enable improperly skipped attacks (#45)
Browse files Browse the repository at this point in the history
* Use full attack_list

* Update example attacks

* Fix evaluation
  • Loading branch information
nottombrown authored Sep 25, 2018
1 parent e9b3ba5 commit 53f8e85
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 31 deletions.
6 changes: 6 additions & 0 deletions examples/undefended_keras_resnet/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
## Undefended keras model
To run prediction and make coverage-error plot:
```
cd examples/undefended_keras_resnet
CUDA_VISIBLE_DEVICES=1 python main.py
```
2 changes: 0 additions & 2 deletions examples/undefended_pytorch_resnet/README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# Baseline codes

## Undefended pytorch model

To train the network (and evaluate every epoch):
Expand Down
40 changes: 13 additions & 27 deletions examples/undefended_pytorch_resnet/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,9 @@ def main():
args.workers = int(4 * (args.batch_size / 256))

if args.data == '':
bird_or_bicycle.get_dataset('train')
bird_or_bicycle.get_dataset('test')
bird_or_bicycle.get_dataset('extras')
args.data = bird_or_bicycle.dataset.default_data_root()

if args.gpu is not None:
Expand Down Expand Up @@ -129,33 +132,31 @@ def main():

# Data loading code
traindirs = [os.path.join(args.data, partition)
for partition in ['train', 'extras']]
valdir = os.path.join(args.data, 'test')
for partition in ['extras']]
# Use train as validation because it is IID with the test set
valdir = os.path.join(args.data, 'train')

# this normalization is NOT used, as the attack API requires
# the images to be in [0, 1] range. So we prepend a BatchNorm
# layer to the model instead of normalizing the images in the
# data iter.
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
_unused_normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])

train_dataset = [datasets.ImageFolder(
traindir,
transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
# normalize,
# _unused_normalize,
]))
for traindir in traindirs]
if len(train_dataset) == 1:
train_dataset = train_dataset[0]
else:
train_dataset = torch.utils.data.ConcatDataset(train_dataset)

# Duplicated the dataset to make it as
# train_dataset.samples = train_dataset.samples * 100

train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=args.batch_size, shuffle=True,
num_workers=args.workers, pin_memory=True)
Expand All @@ -165,7 +166,7 @@ def main():
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
# normalize,
# _unused_normalize,
])),
batch_size=args.batch_size, shuffle=False,
num_workers=args.workers, pin_memory=True)
Expand All @@ -174,7 +175,7 @@ def main():
if not args.resume:
print('WARNING: evaluating without loading a checkpoint, use --resume '
'to load a previously trained checkpoint if needed.')
evaluate(val_loader, model)
evaluate(model)
return

for epoch in range(args.start_epoch, args.epochs):
Expand Down Expand Up @@ -296,7 +297,7 @@ def validate_epoch(val_loader, model, criterion):
return top1.avg


def evaluate(val_loader, model):
def evaluate(model):
# ----------------------------------------
# Workaround: tensorflow claims all the visible
# GPU memory upon starting. We use hacky patch
Expand All @@ -312,22 +313,8 @@ def myinit(session_object, target='', graph=None, config=None):
oldinit(session_object, target, graph, config)

tf.Session.__init__ = myinit
# ----------------------------------------

if args.smoke_test:
max_num_batches = 1
else:
max_num_batches = -1 # unlimited

def dataiter_wrapper(pytorch_loader):
for i, (x_t, y_t) in enumerate(pytorch_loader):
# transpose from NCHW to NHWC format
x_np = x_t.cpu().numpy().transpose((0, 2, 3, 1))
y_np = y_t.cpu().numpy()
yield x_np, y_np

if max_num_batches > 0 and i + 1 >= max_num_batches:
break
# ----------------------------------------

def wrapped_model(x_np):
x_np = x_np.transpose((0, 3, 1, 2)) # from NHWC to NCHW
Expand All @@ -338,7 +325,6 @@ def wrapped_model(x_np):

eval_kit.evaluate_bird_or_bicycle_model(
wrapped_model,
dataset_iter=dataiter_wrapper(val_loader),
model_name='undefended_pytorch_resnet'
)

Expand Down
2 changes: 0 additions & 2 deletions unrestricted-advex/unrestricted_advex/eval_kit.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,8 +296,6 @@ def evaluate_bird_or_bicycle_model(model_fn, dataset_iter=None, model_name=None)
boundary_attack._stop_after_n_datapoints = 100
attack_list.append(boundary_attack)

attack_list = [attacks.CleanData()]

return evaluate_two_class_unambiguous_model(
model_fn, dataset_iter,
model_name=model_name,
Expand Down

0 comments on commit 53f8e85

Please sign in to comment.