File size: 1,725 Bytes
1ad8ad3 2d62ac6 1ad8ad3 a448ea8 1ad8ad3 2d62ac6 1ad8ad3 34d050c 1ad8ad3 6efbfeb 1ad8ad3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
# Input data
x1 = [50, 60, 70, 80, 90]
x2 = [20, 21, 22, 23, 24]
y_actual = [30, 35, 40, 45, 50]
# Learning rate and maximum number of iterations
alpha = 0.01
max_iters = 1000
# Initial values for Theta0 and Theta1
Theta0 = 0
Theta1 = 0
Theta2 = 0
# Start the iteration counter
iter_count = 0
# Loop until convergence or maximum number of iterations
while iter_count < max_iters:
# Compute the predicted output
y_pred = [Theta0 + Theta1 * x1[i] + Theta2 * x2[i] for i in range(len(x1))]
# Compute the errors
errors = [y_pred[i] - y_actual[i] for i in range(len(x1))]
# Update Theta0 and Theta1
Theta0 -= alpha * sum(errors) / len(x1)
Theta1 -= alpha * sum([errors[i] * x1[i] for i in range(len(x1))]) / len(x1)
Theta2 -= alpha * sum([errors[i] * x2[i] for i in range(len(x2))]) / len(x2)
# Compute the cost function
cost = sum([(y_pred[i] - y_actual[i]) ** 2 for i in range(len(x1))]) / (2 * len(x1))
# Print the cost function every 100 iterations
if iter_count % 100 == 0:
print("Iteration {}: Cost = {}, Theta0 = {}, Theta1 = {}".format(iter_count, cost, Theta0, Theta1, Theta2))
# Check for convergence (if the cost is decreasing by less than 0.0001)
if iter_count > 0 and abs(cost - prev_cost) < 0.0001:
print("Converged after {} iterations".format(iter_count))
break
# Update the iteration counter and previous cost
iter_count += 1
prev_cost = cost
# Print the final values of Theta0 and Theta1
print("Final values: Theta0 = {}, Theta1 = {}".format(Theta0, Theta1, Theta2))
print("Final Cost: Cost= {}".format(cost))
print("Final values: y_pred = {}, y_actual = {}".format(y_pred, y_actual))
|