-
Notifications
You must be signed in to change notification settings - Fork 11
/
costFunctionReg.m
34 lines (29 loc) · 1.24 KB
/
costFunctionReg.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
function [J, grad] = costFunctionReg(theta, X, y, lambda)
%COSTFUNCTIONREG Compute cost and gradient for logistic regression with regularization
% J = COSTFUNCTIONREG(theta, X, y, lambda) computes the cost of using
% theta as the parameter for regularized logistic regression and the
% gradient of the cost w.r.t. to the parameters.
function J = logisticRegressionRegularizedCost(theta, X, y)
estimatedResults = sigmoid(X * theta);
trainingExamples = length(y);
J = (- 1 / trainingExamples) * (
y' * log(estimatedResults)
+ (1 - y)' * log(1 - estimatedResults)
) + (lambda / (2 * trainingExamples)) * (
sum(theta .^ 2) - theta(1) ^ 2
);
endfunction
function gradient = gradientVector(theta, X, y)
trainingExamples = length(y);
gradient = (1 / trainingExamples) * (X' * (sigmoid(X * theta) - y));
endfunction
function gradient = regularizedGradientVector(theta, X, y)
trainingExamples = length(y);
gradient = gradientVector(theta, X, y);
modifiedHypothesis = (lambda / trainingExamples) * theta;
modifiedHypothesis(1) = 0;
gradient += modifiedHypothesis;
endfunction
J = logisticRegressionRegularizedCost(theta, X, y);
grad = regularizedGradientVector(theta, X, y);
end