Skip to content

Commit

Permalink
add more gradient tests
Browse files Browse the repository at this point in the history
  • Loading branch information
jrevels committed Feb 12, 2016
1 parent d14c236 commit c0038b9
Show file tree
Hide file tree
Showing 4 changed files with 130 additions and 15 deletions.
6 changes: 3 additions & 3 deletions src/derivative.jl
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,11 @@ end
# derivative!/derivative #
##########################

function derivative!(f, out::Array, x::Real, ::Type{Val{ALL}})
function derivative!{ALL}(f, out::Array, x::Real, ::Type{Val{ALL}})
return handle_deriv_result!(out, f(DiffNumber(x, one(x))), Val{ALL})
end

function derivative(f, x::Real, ::Type{Val{ALL}})
function derivative{ALL}(f, x::Real, ::Type{Val{ALL}})
return handle_deriv_result(f(DiffNumber(x, one(x))), Val{ALL})
end

Expand Down Expand Up @@ -57,7 +57,7 @@ function handle_deriv_result(result::DiffNumber, ::Type{Val{true}})
return value(result), partials(result, 1)
end

function handle_deriv_result{T}(result::Array{T}, ::Type{Val{ALL}})
function handle_deriv_result{T,ALL}(result::Array{T}, ::Type{Val{ALL}})
return handle_deriv_result!(similar(result, numtype(T)), result, Val{ALL})
end

Expand Down
107 changes: 96 additions & 11 deletions test/GradientTest.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
module GradientTest

include(joinpath(Pkg.dir("ForwardDiff"), "test", "TestFuncs.jl"))

import Calculus

using Base.Test
using ForwardDiff
using ForwardDiff: default_value, KWARG_DEFAULTS
Expand All @@ -8,22 +12,103 @@ using ForwardDiff: default_value, KWARG_DEFAULTS
# @gradient/@gradient! #
########################

const ALL_DEFAULT = :(Val{$(default_value(KWARG_DEFAULTS, :all))})
const CHUNK_DEFAULT = :(Val{$(default_value(KWARG_DEFAULTS, :chunk))})
const INPUT_LENGTH_DEFAULT = :(Val{$(default_value(KWARG_DEFAULTS, :input_length))})
const MULTITHREAD_DEFAULT = :(Val{$(default_value(KWARG_DEFAULTS, :multithread))})
const OUTPUT_MUTATES_DEFAULT = :(Val{$(default_value(KWARG_DEFAULTS, :output_mutates))})
const ALL = :(Val{$(default_value(KWARG_DEFAULTS, :all))})
const CHUNK = :(Val{$(default_value(KWARG_DEFAULTS, :chunk))})
const LEN = :(Val{$(default_value(KWARG_DEFAULTS, :input_length))})
const MULTITHREAD = :(Val{$(default_value(KWARG_DEFAULTS, :multithread))})
const MUTATES = :(Val{$(default_value(KWARG_DEFAULTS, :output_mutates))})

@test macroexpand(:(ForwardDiff.@gradient(sin))) == :(ForwardDiff.gradient(sin, $ALL_DEFAULT, $CHUNK_DEFAULT, $INPUT_LENGTH_DEFAULT, $MULTITHREAD_DEFAULT, $OUTPUT_MUTATES_DEFAULT))
@test macroexpand(:(ForwardDiff.@gradient(sin))) == :(ForwardDiff.gradient(sin, $ALL, $CHUNK, $LEN, $MULTITHREAD, $MUTATES))
@test macroexpand(:(ForwardDiff.@gradient(sin; output_mutates=1, all=2, multithread=3, chunk=4, input_length=5))) == :(ForwardDiff.gradient(sin, Val{2}, Val{4}, Val{5}, Val{3}, Val{1}))
@test macroexpand(:(ForwardDiff.@gradient(sin, chunk=1, output_mutates=2))) == :(ForwardDiff.gradient(sin, $ALL_DEFAULT, Val{1}, $INPUT_LENGTH_DEFAULT, $MULTITHREAD_DEFAULT, Val{2}))
@test macroexpand(:(ForwardDiff.@gradient(sin, chunk=1, output_mutates=2))) == :(ForwardDiff.gradient(sin, $ALL, Val{1}, $LEN, $MULTITHREAD, Val{2}))

@test macroexpand(:(ForwardDiff.@gradient(sin, x))) == :(ForwardDiff.gradient(sin, x, $ALL_DEFAULT, $CHUNK_DEFAULT, $INPUT_LENGTH_DEFAULT, $MULTITHREAD_DEFAULT))
@test macroexpand(:(ForwardDiff.@gradient(sin, x))) == :(ForwardDiff.gradient(sin, x, $ALL, $CHUNK, $LEN, $MULTITHREAD))
@test macroexpand(:(ForwardDiff.@gradient(sin, x, input_length=1, all=2, multithread=3, chunk=4))) == :(ForwardDiff.gradient(sin, x, Val{2}, Val{4}, Val{1}, Val{3}))
@test macroexpand(:(ForwardDiff.@gradient(sin, x; chunk=1, multithread=2))) == :(ForwardDiff.gradient(sin, x, $ALL_DEFAULT, Val{1}, $INPUT_LENGTH_DEFAULT, Val{2}))
@test macroexpand(:(ForwardDiff.@gradient(sin, x; chunk=1, multithread=2))) == :(ForwardDiff.gradient(sin, x, $ALL, Val{1}, $LEN, Val{2}))

@test macroexpand(:(ForwardDiff.@gradient!(sin, output, x))) == :(ForwardDiff.gradient!(sin, output, x, $ALL_DEFAULT, $CHUNK_DEFAULT, $INPUT_LENGTH_DEFAULT, $MULTITHREAD_DEFAULT))
@test macroexpand(:(ForwardDiff.@gradient!(sin, output, x))) == :(ForwardDiff.gradient!(sin, output, x, $ALL, $CHUNK, $LEN, $MULTITHREAD))
@test macroexpand(:(ForwardDiff.@gradient!(sin, output, x, input_length=1, all=2, multithread=3, chunk=4))) == :(ForwardDiff.gradient!(sin, output, x, Val{2}, Val{4}, Val{1}, Val{3}))
@test macroexpand(:(ForwardDiff.@gradient!(sin, output, x; chunk=1, multithread=2))) == :(ForwardDiff.gradient!(sin, output, x, $ALL_DEFAULT, Val{1}, $INPUT_LENGTH_DEFAULT, Val{2}))
@test macroexpand(:(ForwardDiff.@gradient!(sin, output, x; chunk=1, multithread=2))) == :(ForwardDiff.gradient!(sin, output, x, $ALL, Val{1}, $LEN, Val{2}))

##################
# Test Functions #
##################

const XLEN = 10
const X = rand(XLEN)
const GRADEPS = 1e-6

# There's going to be some approximation error, since we're testing
# against a result calculated via finite difference.
test_approx_grad(a::Array, b::Array) = @test_approx_eq_eps a b GRADEPS
test_approx_grad(a::Number, b::Number) = @test_approx_eq_eps a b GRADEPS

function test_approx_grad(a::Tuple, b::Tuple)
test_approx_grad(a[1], b[1])
test_approx_grad(a[2], b[2])
end

output() = similar(X)

for f in TestFuncs.VECTOR_TO_SCALAR_FUNCS
result = f(X)
gradresult = Calculus.gradient(f, X)
for c in (default_value(KWARG_DEFAULTS, :chunk), div(XLEN, 2), div(XLEN, 2) + 1, XLEN)
###################
# single-threaded #
###################
# @gradient(f)
g1 = ForwardDiff.@gradient(f; chunk = c, input_length = XLEN)
g1! = ForwardDiff.@gradient(f; chunk = c, output_mutates = true)
g2 = ForwardDiff.@gradient(f; chunk = c, all = true)
g2! = ForwardDiff.@gradient(f; chunk = c, input_length = XLEN, all = true, output_mutates = true)
out1 = output()
out2 = output()
test_approx_grad(gradresult, g1(X))
test_approx_grad(gradresult, g1!(out1, X))
test_approx_grad(gradresult, out1)
test_approx_grad((result, gradresult), g2(X))
test_approx_grad((result, gradresult), g2!(out2, X))
test_approx_grad(gradresult, out2)
# @gradient(f, x)
test_approx_grad(gradresult, ForwardDiff.@gradient(f, X; chunk = c, input_length = XLEN))
test_approx_grad((result, gradresult), ForwardDiff.@gradient(f, X; chunk = c, all = true))
# @gradient!(f, out, x)
out3 = output()
out4 = output()
test_approx_grad(gradresult, ForwardDiff.@gradient!(f, out3, X; chunk = c, input_length = XLEN))
test_approx_grad(gradresult, out3)
test_approx_grad((result, gradresult), ForwardDiff.@gradient!(f, out4, X; chunk = c, all = true))
test_approx_grad(gradresult, out4)
if ForwardDiff.IS_MULTITHREADED_JULIA
#################
# multithreaded #
#################
# @gradient(f)
g1 = ForwardDiff.@gradient(f; multithread = true, chunk = c, input_length = XLEN)
g1! = ForwardDiff.@gradient(f; multithread = true, chunk = c, output_mutates = true)
g2 = ForwardDiff.@gradient(f; multithread = true, chunk = c, all = true)
g2! = ForwardDiff.@gradient(f; multithread = true, chunk = c, input_length = XLEN, all = true, output_mutates = true)
out1 = output()
out2 = output()
test_approx_grad(gradresult, g1(X))
test_approx_grad(gradresult, g1!(out1, X))
test_approx_grad(gradresult, out1)
test_approx_grad((result, gradresult), g2(X))
test_approx_grad((result, gradresult), g2!(out2, X))
test_approx_grad(gradresult, out2)
# @gradient(f, x)
test_approx_grad(gradresult, ForwardDiff.@gradient(f, X; multithread = true, chunk = c, input_length = XLEN))
test_approx_grad((result, gradresult), ForwardDiff.@gradient(f, X; multithread = true, chunk = c, all = true))
# @gradient!(f, out, x)
out3 = output()
out4 = output()
test_approx_grad(gradresult, ForwardDiff.@gradient!(f, out3, X; multithread = true, chunk = c, input_length = XLEN))
test_approx_grad(gradresult, out3)
test_approx_grad((result, gradresult), ForwardDiff.@gradient!(f, out4, X; multithread = true, chunk = c, all = true))
test_approx_grad(gradresult, out4)
end
end
end

end # module
30 changes: 30 additions & 0 deletions test/TestFuncs.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
module TestFuncs

@noinline function rosenbrock(x::AbstractVector)
a = one(eltype(x))
b = 100 * a
result = zero(eltype(x))
for i in 1:length(x)-1
result += (a - x[i])^2 + b*(x[i+1] - x[i]^2)^2
end
return result
end

@noinline function ackley(x::AbstractVector)
a, b, c = 20.0, -0.2, 2.0*π
len_recip = inv(length(x))
sum_sqrs = zero(eltype(x))
sum_cos = sum_sqrs
for i in x
sum_cos += cos(c*i)
sum_sqrs += i^2
end
return (-a * exp(b * sqrt(len_recip*sum_sqrs)) -
exp(len_recip*sum_cos) + a + e)
end

@noinline self_weighted_logit(x::AbstractVector) = inv(1.0 + exp(-dot(x, x)))

const VECTOR_TO_SCALAR_FUNCS = (rosenbrock, ackley, self_weighted_logit)

end # module
2 changes: 1 addition & 1 deletion test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ tic()
include("DiffNumberTest.jl")
println("done (took $(toq()) seconds).")

print("Testing Gradient-related functionality...")
print("Testing gradient-related functionality...")
tic()
include("GradientTest.jl")
println("done (took $(toq()) seconds).")

0 comments on commit c0038b9

Please sign in to comment.