-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtrain.py
More file actions
77 lines (49 loc) · 2.47 KB
/
train.py
File metadata and controls
77 lines (49 loc) · 2.47 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
import numpy as np
import tensorflow as tf
import keras
import sound
import outils_entrainement as tt # stands for train tools
from keras.layers import Input, Dense, Add, Lambda, TimeDistributed
from keras.layers.recurrent import GRU
from keras.models import Model
import keras.backend as K
from keras.optimizers import SGD
from keras.models import load_model
def ctc_loss_lambda(args):
y_pred, y_true, input_length, label_length = args
return K.ctc_batch_cost(y_true, y_pred, input_length, label_length)
def ctc(y_true, y_pred):
return K.mean(y_pred)
def clipped_relu(x):
return keras.activations.relu(x, max_value=20)
NB_FREQUENCIES = 161
inputs = Input(shape=(None, NB_FREQUENCIES), name='main_input')
labels = Input(shape=(None,), name='labels')
input_length = Input(shape=(1,), name='input_length')
label_length = Input(shape=(1,), name='label_length')
h1 = TimeDistributed(Dense(128, activation=clipped_relu))(inputs)
h2 = TimeDistributed(Dense(128, activation=clipped_relu))(h1)
h3 = TimeDistributed(Dense(128, activation=clipped_relu))(h2)
lb = GRU(128, go_backwards = True, return_sequences = True)(h3)
lf = GRU(128, return_sequences = True)(h3)
h4 = Add()([lb,lf]) # add the two layers
h5 = TimeDistributed(Dense(128, activation=clipped_relu))(h4)
h6 = TimeDistributed(Dense(29, activation='softmax'), name='aux_output')(h5)
loss_out = Lambda(ctc_loss_lambda, output_shape=(1, ), name='main_output')([h6, labels, input_length, label_length])
model = keras.models.Model(inputs=[inputs, labels, input_length, label_length], outputs=[loss_out, h6])
model.summary()
model.load_weights('my_model_weights.h5')
sgd = SGD(nesterov=True)
model.compile(loss={'main_output': ctc, 'aux_output': lambda x, y: K.constant([0])}, metrics=['accuracy'], optimizer=sgd)
batch, lab, input_len, lab_len = tt.get_batch()
size_training_set = int(.8 * len(batch))
print('The training set is of size {}\n'.format(size_training_set))
[x_train, x_test] = np.split(batch, [size_training_set])
[y_train, y_test] = np.split(lab, [size_training_set])
[input_len_train, input_len_test] = np.split(input_len, [size_training_set])
[lab_len_train, lab_len_test] = np.split(lab_len, [size_training_set])
model.fit([x_train, y_train, input_len_train, lab_len_train], [y_train, x_train], batch_size=100, epochs=1)
score = model.evaluate([x_test, y_test, input_len_test, lab_len_test], [y_test, x_test])
print('The final score is {}'.format(score))
model.save('my_model.h5')
model.save_weights('my_model_weights.h5')