-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcopiedLinreg.py
76 lines (63 loc) · 1.88 KB
/
copiedLinreg.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
# imports
import numpy as np
class LinearRegressionUsingGD:
"""Linear Regression Using Gradient Descent.
Parameters
----------
eta : float
Learning rate
n_iterations : int
No of passes over the training set
Attributes
----------
w_ : weights/ after fitting the model
cost_ : total error of the model after each iteration
"""
def __init__(self, eta=0.05, n_iterations=1000):
self.eta = eta
self.n_iterations = n_iterations
def fit(self, x, y):
"""Fit the training data
Parameters
----------
x : array-like, shape = [n_samples, n_features]
Training samples
y : array-like, shape = [n_samples, n_target_values]
Target values
Returns
-------
self : object
"""
self.cost_ = []
self.w_ = np.zeros((x.shape[1], 1))
m = x.shape[0]
for _ in range(self.n_iterations):
y_pred = np.dot(x, self.w_)
residuals = y_pred - y
gradient_vector = np.dot(x.T, residuals)
self.w_ -= (self.eta / m) * gradient_vector
cost = np.sum((residuals ** 2)) / (2 * m)
self.cost_.append(cost)
return self
def predict(self, x):
""" Predicts the value after the model has been trained.
Parameters
----------
x : array-like, shape = [n_samples, n_features]
Test samples
Returns
-------
Predicted value
"""
return np.dot(x, self.w_)
# mean squared error
# mse = np.sum((y_pred - y_actual)**2)
# # root mean squared error
# # m is the number of training examples
# rmse = np.sqrt(mse/m)
# # sum of square of residuals
# ssr = np.sum((y_pred - y_actual)**2)
# # total sum of squares
# sst = np.sum((y_actual - np.mean(y_actual))**2)
# # R2 score
# r2_score = 1 - (ssr/sst)