-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathelasticnet_train.seq
56 lines (52 loc) · 1.97 KB
/
elasticnet_train.seq
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import sys
import python
pydef get_all_training_files_in_folder(directory) -> list[tuple[str, str]]:
import os
files = []
for item in os.listdir(directory):
item_path = os.path.join(directory, item)
if os.path.isfile(item_path):
if item.endswith(".csv"):
files.append((item_path, item_path[:-4] + ".labels"))
return files
pydef training(directory, train_files, learning_rate, l1, l2):
import os
import numpy as np
y_list = []
X_list = []
for csv_f, lbl_f in train_files:
with open(lbl_f) as lbl_file:
for line in lbl_file:
age, Fage = line.rstrip().split(',')
y_list.append(Fage)
with open(csv_f) as csv_file:
X = np.genfromtxt(csv_file, dtype=float, delimiter=',')
X_list.append(X)
X_train = np.concatenate(X_list)
y_train = np.array(y_list, dtype=np.float)
#Dimensions of the normalized data
N = X_train.shape[0]
D = X_train.shape[1]
#Initialize the gradient descent and learning rate
beta = np.random.randn(D) / np.sqrt(D)
#Standard scaling
X_train = (X_train - np.mean(X_train, axis=0)) / np.std(X_train, axis=0)
for i in range(10000):
Yhat = X_train.dot(beta)
delta = Yhat - y_train
beta = beta - (learning_rate *(X_train.T.dot(delta) + l1*np.sign(beta) + l2*2*beta))
mse = delta.dot(delta)/N
print(mse)
intercept = np.sum(y_train - np.dot(X_train, beta))/N
np.save(os.path.join(directory, "enet_betas.npy"), beta)
np.save(os.path.join(directory, "enet_intercept.npy"), intercept)
#print(str(sum(beta != 0)) + " Variables and Neglected " + str(sum(beta == 0)) + " Variables")
input_folder = sys.argv[1]
save_folder = sys.argv[2]
learning_rate = 0.0000001
#L1 regularization term alpha
l1 = 0.02255706
#L2 regularization term lambda
l2 = 0.5
train_files = get_all_training_files_in_folder(input_folder)
training(train_files, learning_rate, l1, l2)