日期:2014-05-16 浏览次数:20433 次
function J = computeCost(X, y, theta) m = length(y); % number of training examples J = 0; predictions = X * theta; J = 1/(2*m)*(predictions - y)'*(predictions - y); end
function [theta, J_history] = gradientDescent(X, y, theta, alpha, num_iters) % X is m*(n+1) matrix % y is m*1 % theta is (n+1)*1 matrix % alpha is a number % num_iters is number of iterators m = length(y); % number of training examples J_history = zeros(num_iters, 1); %cost function的值的变化过程 %预先定义了迭代的次数 for iter = 1:num_iters temp1 = theta(1) - (alpha / m) * sum((X * theta - y).* X(:,1)); temp2 = theta(2) - (alpha / m) * sum((X * theta - y).* X(:,2)); theta(1) = temp1; theta(2) = temp2; J_history(iter) = computeCost(X, y, theta); end end
6.1101,17.592 5.5277,9.1302 8.5186,13.662 7.0032,11.854 5.8598,6.8233 8.3829,11.886 ........ |