1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151 | # This script trains a neural network model using accelerometer data
import sys,os
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../keras-examples/src')
import keras, datetime
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Activation, Conv3D, LeakyReLU, Flatten, Dropout
from keras.layers import LSTM
from util.random_rotate import uniform_random_rotation_matrix_3D
from util.history import ExperimentHistory
TRAIN_EPOCH = 100
BIN_SIZE = 4
def make_model(num_classes):
si = int(64/BIN_SIZE) # Size of input
model = Sequential()
model.add(Conv3D(32, kernel_size=(5, 5, 5),
input_shape=(si, si, si, 1),
data_format='channels_last'))
model.add(LeakyReLU(0.2))
for _ in range(2):
model.add(Conv3D(32, kernel_size=(5, 5, 5), data_format='channels_last'))
model.add(LeakyReLU(0.2))
model.add(Flatten())
model.add(Dense(256))
model.add(LeakyReLU(0.2))
model.add(Dropout(0.5))
model.add(Dense(num_classes))
model.add(Activation('softmax'))
model.summary()
return model
def read_accel(filename):
with open(filename) as f:
data = []
for line in f:
for v in line.rstrip().split(" "):
if not v.isdigit():
print(v, "is not digit")
data.append([int(v) for v in line.rstrip().split(" ")])
data = np.array(data)
return data
# Data generator
def data_gen(filelist, batch_size, window_width, random_rotate=False, rotation_range=0):
# Make a dict for converting a class ID to files
num_class = 0
with open(filelist) as f:
id2f = dict()
for line in f:
id, filename = line.split(" ", maxsplit=2)
id = int(id)
id2f.setdefault(id, []).append(read_accel(filename.rstrip()))
num_class = max(num_class, id)
num_class += 1
yield num_class # Return the number of classes at first
# Generate data eternally
shift = np.array([32, 32, 32]) # Specify the center of rotation
while True:
data = []
labels = []
for i in range(batch_size):
while True:
c = np.random.randint(0, num_class) # Select a class
if c in id2f:
break
# Select a file in the class
f = np.random.randint(0, len(id2f[c]))
# Select a window position
p = np.random.randint(0, max(0, len(id2f[c][f])-window_width)+1)
# Make a label for the selected class
labels.append(keras.utils.to_categorical(c, num_classes=num_class))
# Add selected data
if(random_rotate):
while True:
m = uniform_random_rotation_matrix_3D()
if m[0,0] > rotation_range and m[1,1] > rotation_range and m[2,2] > rotation_range:
break
data.append(np.dot(m, (id2f[c][f][p:p+window_width,:]-shift).T).T+shift)
else:
data.append(id2f[c][f][p:p+window_width,:])
if(len(data[-1]) < window_width):
padding = window_width - len(data[-1])
data[-1] = np.append(np.zeros((padding, data[-1].shape[1]), dtype=np.int),
data[-1], axis=0)
# Make 3D-histogram (x, y and z range are 0-63)
hist_data = []
for i in range(batch_size):
h = np.zeros(shape=(int(64/BIN_SIZE), int(64/BIN_SIZE), int(64/BIN_SIZE), 1))
for t in range(data[i].shape[0]):
if data[i][t,0] > 63 or data[i][t,0] < 0 or \
data[i][t,1] > 63 or data[i][t,1] < 0 or \
data[i][t,2] > 63 or data[i][t,2] < 0:
continue
x = int(data[i][t,0]/BIN_SIZE)
y = int(data[i][t,1]/BIN_SIZE)
z = int(data[i][t,2]/BIN_SIZE)
h[x, y, z, 0] += 1
hist_data.append(h)
yield np.array(hist_data), np.array(labels).reshape((batch_size, num_class))
def get_data(data_generator, steps):
dgr = [next(data_generator) for _ in range(steps)]
data = np.array([w for v in dgr for w in v[0]])
labels = np.array([w for v in dgr for w in v[1]])
return data, labels
def run(eh):
dg = data_gen("train.list", eh.batch_size, eh.window_width, eh.random_rotate, eh.rotation_range)
num_classes = next(dg)
dgv = data_gen("val.list", eh.batch_size, eh.window_width)
assert num_classes == next(dgv)
# Make a model
opt = keras.optimizers.Adamax(lr=0.002, beta_1=0.9, beta_2=0.999,
epsilon=1e-08, decay=1e-4)
model = make_model(num_classes)
model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy'])
# Train the model
hist = model.fit_generator(dg, steps_per_epoch=100,
validation_data=dgv, validation_steps=100,
epochs=TRAIN_EPOCH).history
# Get data for evaluation
score_train = model.evaluate(*get_data(dg, 1000))
score_val = model.evaluate(*get_data(dgv, 1000))
comments = "#Final model loss_train={0:10.6f} acc_train={1:10.6f} loss_val={2:10.6f} acc_val={3:10.6f}"\
.format(score_train[0], score_train[1], score_val[0], score_val[1])
eh.write("history.log", model, opt, hist, comments)
# Evaluate the last model by train and validation data
model.save("models/cnn_b{0}_ww{1}_rr{2}.hdf5"\
.format(BIN_SIZE, eh.window_width, str(eh.rotation_range) if eh.random_rotate else "no"))
if __name__ == '__main__':
if not os.path.exists("models"):
os.mkdir("models")
eh = ExperimentHistory()
eh.BIN_SIZE = BIN_SIZE
eh.batch_size = 32
for (rr, rrange) in [(False, 0), (True, 0.95), (True, 0.9), (True, 0.5), (True, 0), (True, -2)]:
eh.random_rotate = rr
eh.rotation_range = rrange
for ww in [32, 64, 128, 256, 512]:
eh.window_width = ww
run(eh)
|
0 件のコメント :
コメントを投稿