Skip to content

Commit 5c8c0cc

Browse files
authored
Create gradient descent python
1 parent beb3cfd commit 5c8c0cc

1 file changed

Lines changed: 41 additions & 0 deletions

File tree

gradient descent python

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
import numpy as np
2+
import matplotlib.pyplot as plt
3+
from sklearn.datasets import load_wine
4+
from sklearn.preprocessing import StandardScaler
5+
6+
7+
data = load_wine()
8+
X = data.data[:, :1] # Use the first feature for simplicity
9+
y = data.target.astype(float)
10+
11+
scaler = StandardScaler()
12+
X = scaler.fit_transform(X)
13+
14+
15+
X_b = np.c_[np.ones((X.shape[0], 1)), X] # shape (n_samples, 2)
16+
17+
theta = np.random.randn(2)
18+
learning_rate = 0.1
19+
iterations = 100
20+
m = len(y)
21+
22+
23+
loss_history = []
24+
25+
26+
for i in range(iterations):
27+
predictions = X_b.dot(theta)
28+
errors = predictions - y
29+
gradient = (2/m) * X_b.T.dot(errors)
30+
theta = theta - learning_rate * gradient
31+
32+
loss = (1/m) * np.sum(errors**2) # MSE
33+
loss_history.append(loss)
34+
35+
plt.plot(range(iterations), loss_history, color='blue')
36+
plt.xlabel("Iterations")
37+
plt.ylabel("Mean Squared Error")
38+
plt.title("Gradient Descent on Wine Data")
39+
plt.show()
40+
41+
print("Final parameters:", theta)

0 commit comments

Comments
 (0)