durrani commited on
Commit
0ef611f
·
1 Parent(s): 8b0bd94
Files changed (1) hide show
  1. app.py +30 -19
app.py CHANGED
@@ -1,16 +1,18 @@
 
 
1
  # Input data
2
- x1 = [50, 60, 70, 80, 90]
3
- x2 = [20, 21, 22, 23, 24]
4
- y_actual = [30, 35, 40, 45, 50]
5
 
6
  # Learning rate and maximum number of iterations
7
  alpha = 0.01
8
  max_iters = 1000
9
 
10
  # Initial values for Theta0, Theta1, and Theta2
11
- Theta0 = 0
12
- Theta1 = 0
13
- Theta2 = 0
14
 
15
  # Start the iteration counter
16
  iter_count = 0
@@ -18,34 +20,43 @@ iter_count = 0
18
  # Loop until convergence or maximum number of iterations
19
  while iter_count < max_iters:
20
  # Compute the predicted output
21
- y_pred = [Theta0 + Theta1 * x1[i] + Theta2 * x2[i] for i in range(len(x1))]
22
 
23
  # Compute the errors
24
- errors = [y_pred[i] - y_actual[i] for i in range(len(x1))]
25
-
26
- # Update Theta0, Theta1, and Theta2
27
- Theta0 -= alpha * sum(errors) / len(x1)
28
- Theta1 -= alpha * sum([errors[i] * x1[i] for i in range(len(x1))]) / len(x1)
29
- Theta2 -= alpha * sum([errors[i] * x2[i] for i in range(len(x2))]) / len(x2)
30
 
31
  # Compute the cost function
32
- cost = sum([(y_pred[i] - y_actual[i]) ** 2 for i in range(len(x1))]) / (2 * len(x1))
33
 
34
  # Print the cost function every 100 iterations
35
  if iter_count % 100 == 0:
36
- print("Iteration {}: Cost = {}, Theta0 = {}, Theta1 = {}, Theta2 = {}".format(iter_count, cost, Theta0, Theta1,
37
- Theta2))
38
 
39
  # Check for convergence (if the cost is decreasing by less than 0.0001)
40
- if iter_count > 0 and abs(cost - prev_cost) < 0.0001:
41
  print("Converged after {} iterations".format(iter_count))
42
  break
43
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
44
  # Update the iteration counter and previous cost
45
  iter_count += 1
46
  prev_cost = cost
47
 
48
  # Print the final values of Theta0, Theta1, and Theta2
49
- print("Final values: Theta0 = {}, Theta1 = {}, Theta2 = {}".format(Theta0, Theta1, Theta2))
50
- print("Final Cost: Cost = {}".format(cost))
51
  print("Final values: y_pred = {}, y_actual = {}".format(y_pred, y_actual))
 
1
+ import torch
2
+
3
  # Input data
4
+ x1 = torch.tensor([50, 60, 70, 80, 90])
5
+ x2 = torch.tensor([20, 21, 22, 23, 24])
6
+ y_actual = torch.tensor([30, 35, 40, 45, 50])
7
 
8
  # Learning rate and maximum number of iterations
9
  alpha = 0.01
10
  max_iters = 1000
11
 
12
  # Initial values for Theta0, Theta1, and Theta2
13
+ Theta0 = torch.tensor(0.0, requires_grad=True)
14
+ Theta1 = torch.tensor(0.0, requires_grad=True)
15
+ Theta2 = torch.tensor(0.0, requires_grad=True)
16
 
17
  # Start the iteration counter
18
  iter_count = 0
 
20
  # Loop until convergence or maximum number of iterations
21
  while iter_count < max_iters:
22
  # Compute the predicted output
23
+ y_pred = Theta0 + Theta1 * x1 + Theta2 * x2
24
 
25
  # Compute the errors
26
+ errors = y_pred - y_actual
 
 
 
 
 
27
 
28
  # Compute the cost function
29
+ cost = torch.sum(errors ** 2) / (2 * len(x1))
30
 
31
  # Print the cost function every 100 iterations
32
  if iter_count % 100 == 0:
33
+ print("Iteration {}: Cost = {}, Theta0 = {}, Theta1 = {}, Theta2 = {}".format(iter_count, cost, Theta0.item(), Theta1.item(),
34
+ Theta2.item()))
35
 
36
  # Check for convergence (if the cost is decreasing by less than 0.0001)
37
+ if iter_count > 0 and torch.abs(cost - prev_cost) < 0.0001:
38
  print("Converged after {} iterations".format(iter_count))
39
  break
40
 
41
+ # Perform automatic differentiation to compute gradients
42
+ cost.backward()
43
+
44
+ # Update Theta0, Theta1, and Theta2 using gradient descent
45
+ with torch.no_grad():
46
+ Theta0 -= alpha * Theta0.grad
47
+ Theta1 -= alpha * Theta1.grad
48
+ Theta2 -= alpha * Theta2.grad
49
+
50
+ # Reset gradients for the next iteration
51
+ Theta0.grad.zero_()
52
+ Theta1.grad.zero_()
53
+ Theta2.grad.zero_()
54
+
55
  # Update the iteration counter and previous cost
56
  iter_count += 1
57
  prev_cost = cost
58
 
59
  # Print the final values of Theta0, Theta1, and Theta2
60
+ print("Final values: Theta0 = {}, Theta1 = {}, Theta2 = {}".format(Theta0.item(), Theta1.item(), Theta2.item()))
61
+ print("Final Cost: Cost = {}".format(cost.item()))
62
  print("Final values: y_pred = {}, y_actual = {}".format(y_pred, y_actual))