Skip to content

Commit

Permalink
added missing hashes (#7988)
Browse files Browse the repository at this point in the history
* added missing hashes

* reverted changes to the example scripts
  • Loading branch information
bdwyer2 authored and fchollet committed Sep 26, 2017
1 parent 771be1a commit fb4a084
Show file tree
Hide file tree
Showing 9 changed files with 32 additions and 16 deletions.
3 changes: 2 additions & 1 deletion keras/applications/imagenet_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,8 @@ def decode_predictions(preds, top=5):
if CLASS_INDEX is None:
fpath = get_file('imagenet_class_index.json',
CLASS_INDEX_PATH,
cache_subdir='models')
cache_subdir='models',
file_hash='c2c37ea517e94d9795004a39431a14cb')
CLASS_INDEX = json.load(open(fpath))
results = []
for pred in preds:
Expand Down
4 changes: 2 additions & 2 deletions keras/applications/inception_resnet_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,13 +362,13 @@ def InceptionResNetV2(include_top=True,
weights_path = get_file(weights_filename,
BASE_WEIGHT_URL + weights_filename,
cache_subdir='models',
md5_hash='e693bd0210a403b3192acc6073ad2e96')
file_hash='e693bd0210a403b3192acc6073ad2e96')
else:
weights_filename = 'inception_resnet_v2_weights_tf_dim_ordering_tf_kernels_notop.h5'
weights_path = get_file(weights_filename,
BASE_WEIGHT_URL + weights_filename,
cache_subdir='models',
md5_hash='d19885ff4a710c122648d3b5c3b684e4')
file_hash='d19885ff4a710c122648d3b5c3b684e4')
model.load_weights(weights_path)

return model
4 changes: 2 additions & 2 deletions keras/applications/inception_v3.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,13 +377,13 @@ def InceptionV3(include_top=True,
'inception_v3_weights_tf_dim_ordering_tf_kernels.h5',
WEIGHTS_PATH,
cache_subdir='models',
md5_hash='9a0d58056eeedaa3f26cb7ebd46da564')
file_hash='9a0d58056eeedaa3f26cb7ebd46da564')
else:
weights_path = get_file(
'inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5',
WEIGHTS_PATH_NO_TOP,
cache_subdir='models',
md5_hash='bcbd6486424b2319ff4ef7d526e38f63')
file_hash='bcbd6486424b2319ff4ef7d526e38f63')
model.load_weights(weights_path)
return model

Expand Down
6 changes: 4 additions & 2 deletions keras/applications/vgg16.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,11 +162,13 @@ def VGG16(include_top=True, weights='imagenet',
if include_top:
weights_path = get_file('vgg16_weights_tf_dim_ordering_tf_kernels.h5',
WEIGHTS_PATH,
cache_subdir='models')
cache_subdir='models',
file_hash='64373286793e3c8b2b4e3219cbf3544b')
else:
weights_path = get_file('vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5',
WEIGHTS_PATH_NO_TOP,
cache_subdir='models')
cache_subdir='models',
file_hash='6d6bbae143d832006294945121d1f1fc')
model.load_weights(weights_path)
if K.backend() == 'theano':
layer_utils.convert_all_kernels_in_model(model)
Expand Down
6 changes: 4 additions & 2 deletions keras/applications/vgg19.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,11 +165,13 @@ def VGG19(include_top=True, weights='imagenet',
if include_top:
weights_path = get_file('vgg19_weights_tf_dim_ordering_tf_kernels.h5',
WEIGHTS_PATH,
cache_subdir='models')
cache_subdir='models',
file_hash='cbe5617147190e668d6c5d5026f83318')
else:
weights_path = get_file('vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5',
WEIGHTS_PATH_NO_TOP,
cache_subdir='models')
cache_subdir='models',
file_hash='253f8cb515780f3b799900260a226db6')
model.load_weights(weights_path)
if K.backend() == 'theano':
layer_utils.convert_all_kernels_in_model(model)
Expand Down
6 changes: 4 additions & 2 deletions keras/applications/xception.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,11 +249,13 @@ def Xception(include_top=True, weights='imagenet',
if include_top:
weights_path = get_file('xception_weights_tf_dim_ordering_tf_kernels.h5',
TF_WEIGHTS_PATH,
cache_subdir='models')
cache_subdir='models',
file_hash='0a58e3b7378bc2990ea3b43d5981f1f6')
else:
weights_path = get_file('xception_weights_tf_dim_ordering_tf_kernels_notop.h5',
TF_WEIGHTS_PATH_NO_TOP,
cache_subdir='models')
cache_subdir='models',
file_hash='b0042744bf5b25fce3cb969f33bebb97')
model.load_weights(weights_path)

if old_data_format:
Expand Down
7 changes: 5 additions & 2 deletions keras/datasets/imdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,9 @@ def load_data(path='imdb.npz', num_words=None, skip_top=0,
if kwargs:
raise TypeError('Unrecognized keyword arguments: ' + str(kwargs))

path = get_file(path, origin='https://s3.amazonaws.com/text-datasets/imdb.npz')
path = get_file(path,
origin='https://s3.amazonaws.com/text-datasets/imdb.npz',
file_hash='599dadb1135973df5b59232a0e9a887c')
with np.load(path) as f:
x_train, labels_train = f['x_train'], f['y_train']
x_test, labels_test = f['x_test'], f['y_test']
Expand Down Expand Up @@ -105,7 +107,8 @@ def get_word_index(path='imdb_word_index.json'):
The word index dictionary.
"""
path = get_file(path,
origin='https://s3.amazonaws.com/text-datasets/imdb_word_index.json')
origin='https://s3.amazonaws.com/text-datasets/imdb_word_index.json',
file_hash='bfafd718b763782e994055a2d397834f')
f = open(path)
data = json.load(f)
f.close()
Expand Down
4 changes: 3 additions & 1 deletion keras/datasets/mnist.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@ def load_data(path='mnist.npz'):
# Returns
Tuple of Numpy arrays: `(x_train, y_train), (x_test, y_test)`.
"""
path = get_file(path, origin='https://s3.amazonaws.com/img-datasets/mnist.npz')
path = get_file(path,
origin='https://s3.amazonaws.com/img-datasets/mnist.npz',
file_hash='8a61469f7ea1b51cbae51d4f78837e45')
f = np.load(path)
x_train, y_train = f['x_train'], f['y_train']
x_test, y_test = f['x_test'], f['y_test']
Expand Down
8 changes: 6 additions & 2 deletions keras/datasets/reuters.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,9 @@ def load_data(path='reuters.npz', num_words=None, skip_top=0,
if kwargs:
raise TypeError('Unrecognized keyword arguments: ' + str(kwargs))

path = get_file(path, origin='https://s3.amazonaws.com/text-datasets/reuters.npz')
path = get_file(path,
origin='https://s3.amazonaws.com/text-datasets/reuters.npz',
file_hash='87aedbeb0cb229e378797a632c1997b6')
with np.load(path) as f:
xs, labels = f['x'], f['y']

Expand Down Expand Up @@ -90,7 +92,9 @@ def get_word_index(path='reuters_word_index.json'):
# Returns
The word index dictionary.
"""
path = get_file(path, origin='https://s3.amazonaws.com/text-datasets/reuters_word_index.json')
path = get_file(path,
origin='https://s3.amazonaws.com/text-datasets/reuters_word_index.json',
file_hash='4d44cc38712099c9e383dc6e5f11a921')
f = open(path)
data = json.load(f)
f.close()
Expand Down

0 comments on commit fb4a084

Please sign in to comment.