-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathrandom_search.py
More file actions
87 lines (73 loc) · 3.4 KB
/
random_search.py
File metadata and controls
87 lines (73 loc) · 3.4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
###############################################################################
############# Loading packages and set parameters ##################
###############################################################################
###############################################################################
# Pathways
import numpy as np
import pandas as pd
from train_model import train_model
# Constant parameters
learning_rate = 1e-3
training_epochs = 1
# Hyperparameters ranges
embed_sz_range = [25, 50, 128, 256]
hidden_sz_range = [128, 256, 512, 1024, 2048]
fc_sz_range = [128, 256, 512, 1024, 2048]
dropout_rate_range = [0, 0.25, 0.5, 0.75]
# Nb repetitions
nb_repeat = 50
# Your path to the results folder here. Should contain a directory data.
path_results = "xx"
###############################################################################
###################### Random search ########################################
###############################################################################
tested_embed_sz, tested_hidden_sz, tested_fc_sz, tested_dropout_rate, val_losses = [], [], [], [], []
for n in range(nb_repeat):
print("Repetition ", n+1, "/", nb_repeat)
# Random selection of hyperparameters
embed_sz = np.random.choice(embed_sz_range)
hidden_sz = np.random.choice(hidden_sz_range)
fc_sz = np.random.choice(fc_sz_range)
dropout_rate = np.random.choice(dropout_rate_range)
# Save paramters
tested_embed_sz.append(embed_sz)
tested_hidden_sz.append(hidden_sz)
tested_fc_sz.append(fc_sz)
tested_dropout_rate.append(dropout_rate)
print(
f"Embed_size : {embed_sz}; hidden size: {hidden_sz}; fc_sz : {fc_sz}; dropout rate : {dropout_rate}"
)
# Run model
val_loss = train_model(path_results,
model="TLSTM",
hidden_sz=hidden_sz,
fc_sz=fc_sz,
dropout_rate=dropout_rate,
discount="log",
learning_rate=learning_rate,
training_epochs=training_epochs,
embed_sz=embed_sz,
survival=True,
bidirectional=True,
save_results=False,
return_val_loss=True)
# Save loss on validation set
val_losses.append(val_loss)
###############################################################################
###################### Save results #########################################
###############################################################################
# Print best results
i = np.argmin(val_losses)
print("Best loss val is :", val_losses[i], " for embed size ", tested_embed_sz[i],
", hidden size ", tested_hidden_sz[i],
" ,fc size ", tested_fc_sz[i],
" ,dropout rate ", tested_dropout_rate[i])
# Save results
Y_concat = np.concatenate((np.expand_dims(tested_embed_sz, axis=1),
np.expand_dims(tested_hidden_sz, axis=1),
np.expand_dims(tested_fc_sz, axis=1),
np.expand_dims(tested_dropout_rate, axis=1),
np.expand_dims(val_losses, axis=1)), axis=1)
out_df = pd.DataFrame(Y_concat, columns=[
"Embed_sz", "Hidden_sz", "Fc_sz", "dropout_rate", "val_losses"])
out_df.to_csv(path_results + "/data/random_search.csv")