|
| 1 | +# https://deeplearningcourses.com/c/data-science-deep-learning-in-theano-tensorflow |
| 2 | +# https://www.udemy.com/data-science-deep-learning-in-theano-tensorflow |
| 3 | +from __future__ import print_function, division |
| 4 | +from builtins import range |
| 5 | +# Note: you may need to update your version of future |
| 6 | +# sudo pip install -U future |
| 7 | + |
| 8 | +from keras.models import Sequential |
| 9 | +from keras.layers import Dense, Activation |
| 10 | +from util import get_normalized_data, y2indicator |
| 11 | + |
| 12 | +import matplotlib.pyplot as plt |
| 13 | + |
| 14 | +# NOTE: do NOT name your file keras.py because it will conflict |
| 15 | +# with importing keras |
| 16 | + |
| 17 | +# installation is easy! just the usual "sudo pip(3) install keras" |
| 18 | + |
| 19 | + |
| 20 | +# get the data, same as Theano + Tensorflow examples |
| 21 | +# no need to split now, the fit() function will do it |
| 22 | +X, Y = get_normalized_data() |
| 23 | + |
| 24 | +# get shapes |
| 25 | +N, D = X.shape |
| 26 | +K = len(set(Y)) |
| 27 | + |
| 28 | +# by default Keras wants one-hot encoded labels |
| 29 | +# there's another cost function we can use |
| 30 | +# where we can just pass in the integer labels directly |
| 31 | +# just like Tensorflow / Theano |
| 32 | +Y = y2indicator(Y) |
| 33 | + |
| 34 | + |
| 35 | +# the model will be a sequence of layers |
| 36 | +model = Sequential() |
| 37 | + |
| 38 | + |
| 39 | +# ANN with layers [784] -> [500] -> [300] -> [10] |
| 40 | +model.add(Dense(units=500, input_dim=D)) |
| 41 | +model.add(Activation('relu')) |
| 42 | +model.add(Dense(units=300)) # don't need to specify input_dim |
| 43 | +model.add(Activation('relu')) |
| 44 | +model.add(Dense(units=K)) |
| 45 | +model.add(Activation('softmax')) |
| 46 | + |
| 47 | + |
| 48 | +# list of losses: https://keras.io/losses/ |
| 49 | +# list of optimizers: https://keras.io/optimizers/ |
| 50 | +# list of metrics: https://keras.io/metrics/ |
| 51 | +model.compile( |
| 52 | + loss='categorical_crossentropy', |
| 53 | + optimizer='adam', |
| 54 | + metrics=['accuracy'] |
| 55 | +) |
| 56 | + |
| 57 | +# note: multiple ways to choose a backend |
| 58 | +# either theano, tensorflow, or cntk |
| 59 | +# https://keras.io/backend/ |
| 60 | + |
| 61 | + |
| 62 | +# gives us back a <keras.callbacks.History object at 0x112e61a90> |
| 63 | +r = model.fit(X, Y, validation_split=0.33, epochs=15, batch_size=32) |
| 64 | +print("Returned:", r) |
| 65 | + |
| 66 | +# print the available keys |
| 67 | +# should see: dict_keys(['val_loss', 'acc', 'loss', 'val_acc']) |
| 68 | +print(r.history.keys()) |
| 69 | + |
| 70 | +# plot some data |
| 71 | +plt.plot(r.history['loss'], label='loss') |
| 72 | +plt.plot(r.history['val_loss'], label='val_loss') |
| 73 | +plt.legend() |
| 74 | +plt.show() |
| 75 | + |
| 76 | +# accuracies |
| 77 | +plt.plot(r.history['acc'], label='acc') |
| 78 | +plt.plot(r.history['val_acc'], label='val_acc') |
| 79 | +plt.legend() |
| 80 | +plt.show() |
| 81 | + |
| 82 | + |
0 commit comments