# HG changeset patch # User Jordi GutiƩrrez Hermoso # Date 1321850567 18000 # Node ID 9a9f76850dc6e422b15d5bd2da8e6a41e504d09c # Parent 0f14514e907fa2f244026789eb69b38e755dd8ab Implement linearRegCostFunction diff --git a/linearRegCostFunction.m b/linearRegCostFunction.m --- a/linearRegCostFunction.m +++ b/linearRegCostFunction.m @@ -1,37 +1,14 @@ function [J, grad] = linearRegCostFunction(X, y, theta, lambda) -%LINEARREGCOSTFUNCTION Compute cost and gradient for regularized linear -%regression with multiple variables -% [J, grad] = LINEARREGCOSTFUNCTION(X, y, theta, lambda) computes the -% cost of using theta as the parameter for linear regression to fit the -% data points in X and y. Returns the cost in J and the gradient in grad - -% Initialize some useful values -m = length(y); % number of training examples - -% You need to return the following variables correctly -J = 0; -grad = zeros(size(theta)); + ##LINEARREGCOSTFUNCTION Compute cost and gradient for regularized linear + ##regression with multiple variables + ## [J, grad] = LINEARREGCOSTFUNCTION(X, y, theta, lambda) computes the + ## cost of using theta as the parameter for linear regression to fit the + ## data points in X and y. Returns the cost in J and the gradient in grad -% ====================== YOUR CODE HERE ====================== -% Instructions: Compute the cost and gradient of regularized linear -% regression for a particular choice of theta. -% -% You should set J to the cost and grad to the gradient. -% - - - - - + m = length (y); + ht = X*theta; + J = (sumsq (ht - y) + lambda*sumsq (theta(2:end)))/(2*m); - - - - - + grad = (X'*(ht - y) + [0; lambda*theta(2:end)])/m; -% ========================================================================= - -grad = grad(:); - -end +endfunction