diff --git a/Module 2/Linear Regression.ipynb b/Module 2/Linear Regression.ipynb index 4652c5d..b64dec6 100644 --- a/Module 2/Linear Regression.ipynb +++ b/Module 2/Linear Regression.ipynb @@ -149,6 +149,15 @@ " cost = 0\n", " # ====================== YOUR CODE HERE ======================\n", " # Instructions: Compute the cost of a particular choice of theta\n", + ... + h = np.dot(X,theta) + for i in range(m): + k = (h[i]-y[i])**2 + cost = cost +k + cost = cost / (2*m) + ... + + " \n", " \n", " \n", @@ -183,14 +192,22 @@ "source": [ "def gradientDescent(X, y, theta, alpha, iterations):\n", " m = len(X) \n", + ... + y=y.reshape((100,1)) + ... " for i in range(len(X)):\n", " # ====================== YOUR CODE HERE ======================\n", " # Instructions: Perform a single gradient step on the parameter vector theta. \n", + ... + theta[0] = theta[0] - alpha * (1/m) * sum(h - y) + theta[1] = theta[1] -alpha * (1/m) *sum(np.multiply((h-y),X_init)) + ... " \n", " \n", " \n", " \n", " # ============================================================\n", + # I have made some changes in the range of the loop..i have used iterations instead of m " return theta\n", "\n", "theta = gradientDescent(X, y, theta, alpha, iterations)\n",