-
Notifications
You must be signed in to change notification settings - Fork 73
/
training.py
116 lines (89 loc) · 3.13 KB
/
training.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
import numpy as np
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten
from tensorflow.keras.layers import Conv1D, MaxPooling1D, BatchNormalization
import os
import random
import time
ACTIONS = ["left", "right", "none"]
reshape = (-1, 16, 60)
def create_data(starting_dir="data"):
training_data = {}
for action in ACTIONS:
if action not in training_data:
training_data[action] = []
data_dir = os.path.join(starting_dir,action)
for item in os.listdir(data_dir):
#print(action, item)
data = np.load(os.path.join(data_dir, item))
for item in data:
training_data[action].append(item)
lengths = [len(training_data[action]) for action in ACTIONS]
print(lengths)
for action in ACTIONS:
np.random.shuffle(training_data[action]) # note that regular shuffle is GOOF af
training_data[action] = training_data[action][:min(lengths)]
lengths = [len(training_data[action]) for action in ACTIONS]
print(lengths)
# creating X, y
combined_data = []
for action in ACTIONS:
for data in training_data[action]:
if action == "left":
combined_data.append([data, [1, 0, 0]])
elif action == "right":
#np.append(combined_data, np.array([data, [1, 0]]))
combined_data.append([data, [0, 0, 1]])
elif action == "none":
combined_data.append([data, [0, 1, 0]])
np.random.shuffle(combined_data)
print("length:",len(combined_data))
return combined_data
print("creating training data")
traindata = create_data(starting_dir="data")
train_X = []
train_y = []
for X, y in traindata:
train_X.append(X)
train_y.append(y)
print("creating testing data")
testdata = create_data(starting_dir="validation_data")
test_X = []
test_y = []
for X, y in testdata:
test_X.append(X)
test_y.append(y)
print(len(train_X))
print(len(test_X))
print(np.array(train_X).shape)
train_X = np.array(train_X).reshape(reshape)
test_X = np.array(test_X).reshape(reshape)
train_y = np.array(train_y)
test_y = np.array(test_y)
model = Sequential()
model.add(Conv1D(64, (3), input_shape=train_X.shape[1:]))
model.add(Activation('relu'))
model.add(Conv1D(64, (2)))
model.add(Activation('relu'))
model.add(MaxPooling1D(pool_size=(2)))
model.add(Conv1D(64, (2)))
model.add(Activation('relu'))
model.add(MaxPooling1D(pool_size=(2)))
model.add(Flatten())
model.add(Dense(512))
model.add(Dense(3))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
epochs = 10
batch_size = 32
for epoch in range(epochs):
model.fit(train_X, train_y, batch_size=batch_size, epochs=1, validation_data=(test_X, test_y))
score = model.evaluate(test_X, test_y, batch_size=batch_size)
#print(score)
MODEL_NAME = f"new_models/{round(score[1]*100,2)}-acc-64x3-batch-norm-{epoch}epoch-{int(time.time())}-loss-{round(score[0],2)}.model"
model.save(MODEL_NAME)
print("saved:")
print(MODEL_NAME)