Skip to content

Commit 91cbc22

Browse files
Refactor function parameters and improve logging format in gradient descent implementations
2 parents d868aba + e879c47 commit 91cbc22

2 files changed

Lines changed: 2 additions & 7 deletions

File tree

machine_learning/linear_regression_naive.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -41,11 +41,7 @@ def collect_dataset() -> np.ndarray:
4141

4242

4343
def run_steep_gradient_descent(
44-
data_x: np.ndarray,
45-
data_y: np.ndarray,
46-
len_data: int,
47-
alpha: float,
48-
theta: np.ndarray
44+
data_x: np.ndarray, data_y: np.ndarray, len_data: int, alpha: float, theta: np.ndarray
4945
) -> np.ndarray:
5046
"""Run one step of steep gradient descent.
5147

machine_learning/linear_regression_vectorized.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def gradient_descent(
6161

6262
if i % (iterations // 10) == 0: # log occasionally
6363
cost = np.sum(errors**2) / (2 * m)
64-
print(f"Iteration {i+1}: Error = {cost:.5f}")
64+
print(f"Iteration {i + 1}: Error = {cost:.5f}")
6565

6666
return theta
6767

@@ -96,4 +96,3 @@ def main() -> None:
9696

9797
doctest.testmod()
9898
main()
99-

0 commit comments

Comments
 (0)