-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathex1.R
123 lines (98 loc) · 3.45 KB
/
ex1.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
## Machine Learning Online Class - Exercise 1: Linear Regression
# R Instructions
# ------------
#
# This file contains code that helps you get started on the
# linear exercise. You will need to complete the following functions
# in this exericse:
#
# warmUpExercise.R
# plotData.m
# gradientDescent.m
# computeCost.m
# gradientDescentMulti.m
# computeCostMulti.m
# featureNormalize.m
# normalEqn.m
#
# For this exercise, you will not need to change any code in this file,
# or any other files other than those mentioned above.
#
# x refers to the population size in 10,000s
# y refers to the profit in $10,000s
#
## Initialization
rm(list=ls()) # clear all variable and functions in memory
## ==================== Part 1: Basic Function ====================
# Complete warmUpExercise.m
cat('Running warmUpExercise ... \n5x5 Identity Matrix: \n')
source('warmUpExercise.R')
warmUpExercise()
#fprintf('Program paused. Press enter to continue.\n');
#pause;
## ======================= Part 2: Plotting =======================
#fprintf('Plotting Data ...\n')
#data = load('ex1data1.txt');
#X = data(:, 1); y = data(:, 2);
#m = length(y); # number of training examples
# Plot Data
# Note: You have to complete the code in plotData.m
#plotData(X, y);
#fprintf('Program paused. Press enter to continue.\n');
#pause;
## =================== Part 3: Gradient descent ===================
#fprintf('Running Gradient Descent ...\n')
#X = [ones(m, 1), data(:,1)]; # Add a column of ones to x
#theta = zeros(2, 1); # initialize fitting parameters
# Some gradient descent settings
#iterations = 1500;
#alpha = 0.01;
# compute and display initial cost
#computeCost(X, y, theta)
# run gradient descent
#theta = gradientDescent(X, y, theta, alpha, iterations);
# print theta to screen
#fprintf('Theta found by gradient descent: ');
#fprintf('#f #f \n', theta(1), theta(2));
# Plot the linear fit
#hold on; # keep previous plot visible
#plot(X(:,2), X*theta, '-')
#legend('Training data', 'Linear regression')
#hold off # don't overlay any more plots on this figure
# Predict values for population sizes of 35,000 and 70,000
#predict1 = [1, 3.5] *theta;
#fprintf('For population = 35,000, we predict a profit of #f\n',...
#predict1*10000);
#predict2 = [1, 7] * theta;
#fprintf('For population = 70,000, we predict a profit of #f\n',...
#predict2*10000);
#fprintf('Program paused. Press enter to continue.\n');
#pause;
## ============= Part 4: Visualizing J(theta_0, theta_1) =============
#fprintf('Visualizing J(theta_0, theta_1) ...\n')
# Grid over which we will calculate J
#theta0_vals = linspace(-10, 10, 100);
#theta1_vals = linspace(-1, 4, 100);
# initialize J_vals to a matrix of 0's
#J_vals = zeros(length(theta0_vals), length(theta1_vals));
# Fill out J_vals
#for i = 1:length(theta0_vals)
#for j = 1:length(theta1_vals)
#t = [theta0_vals(i); theta1_vals(j)];
#J_vals(i,j) = computeCost(X, y, t);
#end
#end
# Because of the way meshgrids work in the surf command, we need to
# transpose J_vals before calling surf, or else the axes will be flipped
#J_vals = J_vals';
## Surface plot
#figure;
#surf(theta0_vals, theta1_vals, J_vals)
#xlabel('\theta_0'); ylabel('\theta_1');
## Contour plot
#figure;
## Plot J_vals as 15 contours spaced logarithmically between 0.01 and 100
#contour(theta0_vals, theta1_vals, J_vals, logspace(-2, 3, 20))
#xlabel('\theta_0'); ylabel('\theta_1');
#hold on;
#plot(theta(1), theta(2), 'rx', 'MarkerSize', 10, 'LineWidth', 2);