Skip to content

Commit

Permalink
Merge pull request google#70 from jmduarte:master
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 367112229
Change-Id: I585caf56e834bfa481d5165229bbb47fb3cd99a9
  • Loading branch information
copybara-github committed Apr 6, 2021
2 parents 59a37ea + bc7b0ee commit 295cd9b
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 9 deletions.
7 changes: 4 additions & 3 deletions qkeras/qconv2d_batchnorm.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,6 @@ def __init__(
trainable=True,
virtual_batch_size=None,
adjustment=None,
name=None,

# other params
ema_freeze_delay=None,
Expand Down Expand Up @@ -127,7 +126,7 @@ def __init__(
bias_constraint=bias_constraint,
kernel_quantizer=kernel_quantizer,
bias_quantizer=bias_quantizer,
name=name)
**kwargs)

# initialization of batchnorm part of the composite layer
self.batchnorm = layers.BatchNormalization(
Expand All @@ -139,7 +138,9 @@ def __init__(
beta_regularizer=beta_regularizer,
gamma_regularizer=gamma_regularizer,
beta_constraint=beta_constraint, gamma_constraint=gamma_constraint,
fused=False)
renorm=renorm, renorm_clipping=renorm_clipping,
renorm_momentum=renorm_momentum, fused=fused, trainable=trainable,
virtual_batch_size=virtual_batch_size, adjustment=adjustment)

self.ema_freeze_delay = ema_freeze_delay
assert folding_mode in ["ema_stats_folding", "batch_stats_folding"]
Expand Down
10 changes: 5 additions & 5 deletions qkeras/qdepthwiseconv2d_batchnorm.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,6 @@ def __init__(
trainable=True,
virtual_batch_size=None,
adjustment=None,
name=None,

# other params
ema_freeze_delay=None,
Expand Down Expand Up @@ -128,7 +127,7 @@ def __init__(
bias_quantizer=bias_quantizer,
depthwise_range=depthwise_range,
bias_range=bias_range,
name=name)
**kwargs)

# initialization of batchnorm part of the composite layer
self.batchnorm = layers.BatchNormalization(
Expand All @@ -139,9 +138,10 @@ def __init__(
moving_variance_initializer=moving_variance_initializer,
beta_regularizer=beta_regularizer,
gamma_regularizer=gamma_regularizer,
beta_constraint=beta_constraint,
gamma_constraint=gamma_constraint,
fused=False)
beta_constraint=beta_constraint, gamma_constraint=gamma_constraint,
renorm=renorm, renorm_clipping=renorm_clipping,
renorm_momentum=renorm_momentum, fused=fused, trainable=trainable,
virtual_batch_size=virtual_batch_size, adjustment=adjustment)

self.ema_freeze_delay = ema_freeze_delay
assert folding_mode in ["ema_stats_folding", "batch_stats_folding"]
Expand Down
2 changes: 1 addition & 1 deletion tests/bn_folding_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ def _get_sequantial_folded_model(x_shape):
strides=(1, 1),
use_bias=False,
depthwise_quantizer=kernel_quantizer,
folded_mode=folding_mode,
folding_mode=folding_mode,
ema_freeze_delay=ema_freeze_delay,
name="folddepthwiseconv2d")(x)
model = Model(inputs=[x_in], outputs=[x])
Expand Down

0 comments on commit 295cd9b

Please sign in to comment.