Skip to content

Commit

Permalink
Fix criteria entries for supervised compression in ilsvrc2012
Browse files Browse the repository at this point in the history
  • Loading branch information
yoshitomo-matsubara committed Sep 29, 2024
1 parent 658ceed commit df56b0a
Show file tree
Hide file tree
Showing 109 changed files with 7,637 additions and 6,217 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ train:
cache_output:
val_data_loader:
dataset_id: *imagenet_val
sampler: &val_sampler
sampler:
class_or_func: !import_get
key: 'torch.utils.data.SequentialSampler'
kwargs:
Expand All @@ -126,26 +126,39 @@ train:
T_max: 300000
scheduling_step: 1
criterion:
key: 'GeneralizedCustomLoss'
org_term:
criterion:
key: 'CrossEntropyLoss'
kwargs:
reduction: 'mean'
weight: 1.0
sub_terms:
bpp:
criterion:
key: 'BppLoss'
kwargs:
entropy_module_path: 'bottleneck_layer.entropy_bottleneck'
reduction: 'batchmean'
weight: 1.024e-7
key: 'WeightedSumLoss'
kwargs:
sub_terms:
ce:
criterion:
key: 'CrossEntropyLoss'
kwargs:
reduction: 'mean'
criterion_wrapper:
key: 'SimpleLossWrapper'
kwargs:
input:
is_from_teacher: False
module_path: '.'
io: 'output'
target:
uses_label: True
weight: 1.0
bpp:
criterion:
key: 'BppLoss'
kwargs:
entropy_module_path: 'bottleneck_layer.entropy_bottleneck'
reduction: 'batchmean'
weight: 1.024e-7

test:
test_data_loader:
dataset_id: *imagenet_val
sampler: *val_sampler
sampler:
class_or_func: !import_get
key: 'torch.utils.data.SequentialSampler'
kwargs:
kwargs:
batch_size: 1
num_workers: 16
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ train:
cache_output:
val_data_loader:
dataset_id: *imagenet_val
sampler: &val_sampler
sampler:
class_or_func: !import_get
key: 'torch.utils.data.SequentialSampler'
kwargs:
Expand All @@ -126,26 +126,39 @@ train:
T_max: 300000
scheduling_step: 1
criterion:
key: 'GeneralizedCustomLoss'
org_term:
criterion:
key: 'CrossEntropyLoss'
kwargs:
reduction: 'mean'
weight: 1.0
sub_terms:
bpp:
criterion:
key: 'BppLoss'
kwargs:
entropy_module_path: 'bottleneck_layer.entropy_bottleneck'
reduction: 'batchmean'
weight: 1.28e-8
key: 'WeightedSumLoss'
kwargs:
sub_terms:
ce:
criterion:
key: 'CrossEntropyLoss'
kwargs:
reduction: 'mean'
criterion_wrapper:
key: 'SimpleLossWrapper'
kwargs:
input:
is_from_teacher: False
module_path: '.'
io: 'output'
target:
uses_label: True
weight: 1.0
bpp:
criterion:
key: 'BppLoss'
kwargs:
entropy_module_path: 'bottleneck_layer.entropy_bottleneck'
reduction: 'batchmean'
weight: 1.28e-8

test:
test_data_loader:
dataset_id: *imagenet_val
sampler: *val_sampler
sampler:
class_or_func: !import_get
key: 'torch.utils.data.SequentialSampler'
kwargs:
kwargs:
batch_size: 1
num_workers: 16
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ train:
cache_output:
val_data_loader:
dataset_id: *imagenet_val
sampler: &val_sampler
sampler:
class_or_func: !import_get
key: 'torch.utils.data.SequentialSampler'
kwargs:
Expand All @@ -126,26 +126,39 @@ train:
T_max: 300000
scheduling_step: 1
criterion:
key: 'GeneralizedCustomLoss'
org_term:
criterion:
key: 'CrossEntropyLoss'
kwargs:
reduction: 'mean'
weight: 1.0
sub_terms:
bpp:
criterion:
key: 'BppLoss'
kwargs:
entropy_module_path: 'bottleneck_layer.entropy_bottleneck'
reduction: 'batchmean'
weight: 2.048e-7
key: 'WeightedSumLoss'
kwargs:
sub_terms:
ce:
criterion:
key: 'CrossEntropyLoss'
kwargs:
reduction: 'mean'
criterion_wrapper:
key: 'SimpleLossWrapper'
kwargs:
input:
is_from_teacher: False
module_path: '.'
io: 'output'
target:
uses_label: True
weight: 1.0
bpp:
criterion:
key: 'BppLoss'
kwargs:
entropy_module_path: 'bottleneck_layer.entropy_bottleneck'
reduction: 'batchmean'
weight: 2.048e-7

test:
test_data_loader:
dataset_id: *imagenet_val
sampler: *val_sampler
sampler:
class_or_func: !import_get
key: 'torch.utils.data.SequentialSampler'
kwargs:
kwargs:
batch_size: 1
num_workers: 16
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ train:
cache_output:
val_data_loader:
dataset_id: *imagenet_val
sampler: &val_sampler
sampler:
class_or_func: !import_get
key: 'torch.utils.data.SequentialSampler'
kwargs:
Expand All @@ -126,26 +126,39 @@ train:
T_max: 300000
scheduling_step: 1
criterion:
key: 'GeneralizedCustomLoss'
org_term:
criterion:
key: 'CrossEntropyLoss'
kwargs:
reduction: 'mean'
weight: 1.0
sub_terms:
bpp:
criterion:
key: 'BppLoss'
kwargs:
entropy_module_path: 'bottleneck_layer.entropy_bottleneck'
reduction: 'batchmean'
weight: 3.2768e-6
key: 'WeightedSumLoss'
kwargs:
sub_terms:
ce:
criterion:
key: 'CrossEntropyLoss'
kwargs:
reduction: 'mean'
criterion_wrapper:
key: 'SimpleLossWrapper'
kwargs:
input:
is_from_teacher: False
module_path: '.'
io: 'output'
target:
uses_label: True
weight: 1.0
bpp:
criterion:
key: 'BppLoss'
kwargs:
entropy_module_path: 'bottleneck_layer.entropy_bottleneck'
reduction: 'batchmean'
weight: 3.2768e-6

test:
test_data_loader:
dataset_id: *imagenet_val
sampler: *val_sampler
sampler:
class_or_func: !import_get
key: 'torch.utils.data.SequentialSampler'
kwargs:
kwargs:
batch_size: 1
num_workers: 16
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ train:
cache_output:
val_data_loader:
dataset_id: *imagenet_val
sampler: &val_sampler
sampler:
class_or_func: !import_get
key: 'torch.utils.data.SequentialSampler'
kwargs:
Expand All @@ -126,26 +126,39 @@ train:
T_max: 300000
scheduling_step: 1
criterion:
key: 'GeneralizedCustomLoss'
org_term:
criterion:
key: 'CrossEntropyLoss'
kwargs:
reduction: 'mean'
weight: 1.0
sub_terms:
bpp:
criterion:
key: 'BppLoss'
kwargs:
entropy_module_path: 'bottleneck_layer.entropy_bottleneck'
reduction: 'batchmean'
weight: 8.192e-7
key: 'WeightedSumLoss'
kwargs:
sub_terms:
ce:
criterion:
key: 'CrossEntropyLoss'
kwargs:
reduction: 'mean'
criterion_wrapper:
key: 'SimpleLossWrapper'
kwargs:
input:
is_from_teacher: False
module_path: '.'
io: 'output'
target:
uses_label: True
weight: 1.0
bpp:
criterion:
key: 'BppLoss'
kwargs:
entropy_module_path: 'bottleneck_layer.entropy_bottleneck'
reduction: 'batchmean'
weight: 8.192e-7

test:
test_data_loader:
dataset_id: *imagenet_val
sampler: *val_sampler
sampler:
class_or_func: !import_get
key: 'torch.utils.data.SequentialSampler'
kwargs:
kwargs:
batch_size: 1
num_workers: 16
Expand Down
Loading

0 comments on commit df56b0a

Please sign in to comment.