|
22 | 22 | from keras.preprocessing.image import ImageDataGenerator
|
23 | 23 | from keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D
|
24 | 24 | from keras.preprocessing.image import array_to_img, img_to_array, load_img
|
| 25 | +from keras.callbacks import EarlyStopping |
| 26 | +from keras.optimizers import SGD |
25 | 27 | import time
|
26 | 28 |
|
27 | 29 | # Set parameters
|
|
86 | 88 | model.add(Dense(64)) #64
|
87 | 89 | model.add(Activation('relu'))
|
88 | 90 | model.add(Dropout(0.5))
|
89 |
| -model.add(Dense(int(classes_amount), activation='softmax')) #Output dimension |
| 91 | +model.add(Dense(int(classes_amount)) #Output dimension |
| 92 | +model.add(Activation('softmax')) |
90 | 93 | #model.add(Dense(1))
|
91 | 94 | #model.add(Activation('sigmoid')) #only for binary classes
|
92 | 95 |
|
| 96 | + |
| 97 | +#sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True) |
| 98 | + |
93 | 99 | # categorical_crossentropy for more that 2 classes. binary_crossentropy otherwise
|
94 | 100 | model.compile(loss='categorical_crossentropy',
|
95 | 101 | optimizer='rmsprop', #rmsprop
|
96 | 102 | metrics=['accuracy'])
|
97 | 103 |
|
| 104 | +early_stop = EarlyStopping(monitor='val_loss', patience=4) |
| 105 | + |
98 | 106 | batch_size = 16
|
99 | 107 | nb_epoch = 30
|
100 | 108 | nb_train_samples = 232*classes_amount # old_data == 283,data ==214
|
|
105 | 113 | steps_per_epoch=nb_train_samples / batch_size,
|
106 | 114 | epochs=nb_epoch,
|
107 | 115 | validation_data=validation_generator,
|
108 |
| - validation_steps=nb_validation_samples / batch_size) |
| 116 | + validation_steps=nb_validation_samples / batch_size, |
| 117 | + callbacks=[early_stop]) |
109 | 118 |
|
110 | 119 | # Save Model
|
111 | 120 | model_json = model.to_json()
|
|
0 commit comments