-
Notifications
You must be signed in to change notification settings - Fork 26
/
regression_test.go
134 lines (114 loc) · 3.4 KB
/
regression_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
package ml
import (
"testing"
"fmt"
)
// Using a predefined dataset stored in test_data/test_linear.dat , calculate
// the cost and gradient for different lambda y theta
func TestLinearRegCostFunction(t *testing.T) {
fmt.Println("Testing Linear Regression cost function...")
// Data obtained from the "Andrew Ng" machine learning course from Coursera
// https://www.coursera.org/course/ml
data := LoadFile("test_data/test_linear.dat")
data.LinearReg = true
theta := [][]float64{
[]float64{1, 1},
[]float64{13.08790, 0.36778},
}
lambda := []float64{
1,
0,
}
expectedJ := []float64{
303.9937949908333,
22.3738964254415,
}
expectedGrad := [][]float64{
[]float64{-15.219716666666665, 598.251293035},
[]float64{-3.2069500000601416e-05, 0.0009341162152152194},
}
for test := 0; test < len(expectedGrad); test++ {
data.Theta = theta[test]
j, grad, err := data.CostFunction(lambda[test], true)
if err != nil {
t.Error("The LinearRegCostFunction returned an error:", err)
}
if j != expectedJ[test] {
t.Error("The expected value for J is:", expectedJ[test], "but the returned value is:", j)
}
for i := 0; i < len(grad[0][0]); i++ {
if grad[0][0][i] != expectedGrad[test][i] {
t.Error("The expected gradient is:", expectedGrad[test][i], "but the returned value is:", grad[0][0][i])
}
}
}
}
func TestCalculateOptimumTheta(t *testing.T) {
fmt.Println("Testing Linear Regression Fmincg function...")
// Data obtained from the "Andrew Ng" machine learning course from Coursera
// https://www.coursera.org/course/ml
data := &Regression {
LinearReg: true,
X: [][]float64{
[]float64{1.0000, -15.9368},
[]float64{1.0000, -29.1530},
[]float64{1.0000, 36.1895},
[]float64{1.0000, 37.4922},
[]float64{1.0000, -48.0588},
[]float64{1.0000, -8.9415},
[]float64{1.0000, 15.3078},
[]float64{1.0000, -34.7063},
[]float64{1.0000, 1.3892},
[]float64{1.0000, -44.3838},
[]float64{1.0000, 7.0135},
[]float64{1.0000, 22.7627},
},
Y: []float64{
2.1343,
1.1733,
34.3591,
36.8380,
2.8090,
2.1211,
14.7103,
2.6142,
3.7402,
3.7317,
7.6277,
22.7524,
},
}
data.InitializeTheta()
Fmincg(data, 0.0, 10, true)
j, _, _ := data.CostFunction(0.0, false)
if j != 22.373896424566116 {
t.Error("The expected cost value is 22.373896424566116, but:", j, "obtained")
}
if data.Theta[0] != 13.087927305447673 || data.Theta[1] != 0.3677790632076952 {
t.Error("The expected theta values are 13.087927305447673 and 0.3677790632076952, but:", data.Theta, "obtained")
}
}
func TestLogisticHipotesis(t *testing.T) {
fmt.Println("Testing Logistic Regression Hipotesis...")
data := &Regression{
Theta: []float64{-25.161272, 0.206233, 0.201470},
LinearReg: false,
}
h := data.LogisticHipotesis([]float64{1, 45, 85})
if h != 0.7762878133064746 {
t.Error("The expected value is 0.7762878133064746, but the returned value is:", h)
}
}
func TestCalculateOptimumDataLogRegWithPrepare(t *testing.T) {
fmt.Println("Testing Logistic Regression Fmincg function...")
// Data obtained from the "Andrew Ng" machine learning course from Coursera
// https://www.coursera.org/course/ml
data := LoadFile("test_data/data_pol.txt")
data.LinearReg = false
data.InitializeTheta()
Fmincg(data, 1.0, 200, true)
j, _, _ := data.CostFunction(1, false)
if j > 0.529004 {
t.Error("The expected cost is lower than: 0.529003 but the returned value is:", j)
}
}