From d6d530a21ec1aea4a26717d84d873f07b3bd5897 Mon Sep 17 00:00:00 2001 From: ArnoStrouwen Date: Sun, 13 Nov 2022 21:12:41 +0100 Subject: [PATCH] remove scalar->vector gradients --- src/gradients.jl | 60 ----------------------------------------- test/finitedifftests.jl | 49 --------------------------------- 2 files changed, 109 deletions(-) diff --git a/src/gradients.jl b/src/gradients.jl index 3ed61fb..69cf5de 100644 --- a/src/gradients.jl +++ b/src/gradients.jl @@ -352,63 +352,3 @@ function finite_difference_gradient!( end df end - -# vector of derivatives of a scalar->vector map -# this is effectively a vector of partial derivatives, but we still call it a gradient -function finite_difference_gradient!( - df, - f, - x::Number, - cache::GradientCache{T1,T2,T3,T4,fdtype,returntype,inplace}; - relstep=default_relstep(fdtype, eltype(x)), - absstep=relstep, - dir=true) where {T1,T2,T3,T4,fdtype,returntype,inplace} - - # NOTE: in this case epsilon is a scalar, we need two arrays for fx1 and fx2 - # c1 denotes fx1, c2 is fx2, sizes guaranteed by the cache constructor - fx, c1, c2 = cache.fx, cache.c1, cache.c2 - - if inplace == Val(true) - _c1, _c2 = c1, c2 - end - - if fdtype == Val(:forward) - epsilon = compute_epsilon(Val(:forward), x, relstep, absstep, dir) - if inplace == Val(true) - f(c1, x + epsilon) - else - _c1 = f(x + epsilon) - end - if typeof(fx) != Nothing - @. df = (_c1 - fx) / epsilon - else - if inplace == Val(true) - f(c2, x) - else - _c2 = f(x) - end - @. df = (_c1 - _c2) / epsilon - end - elseif fdtype == Val(:central) - epsilon = compute_epsilon(Val(:central), x, relstep, absstep, dir) - if inplace == Val(true) - f(c1, x + epsilon) - f(c2, x - epsilon) - else - _c1 = f(x + epsilon) - _c2 = f(x - epsilon) - end - @. df = (_c1 - _c2) / (2 * epsilon) - elseif fdtype == Val(:complex) && returntype <: Real - epsilon_complex = eps(real(eltype(x))) - if inplace == Val(true) - f(c1, x + im * epsilon_complex) - else - _c1 = f(x + im * epsilon_complex) - end - @. df = imag(_c1) / epsilon_complex - else - fdtype_error(returntype) - end - df -end diff --git a/test/finitedifftests.jl b/test/finitedifftests.jl index 29ae9de..89b1b60 100644 --- a/test/finitedifftests.jl +++ b/test/finitedifftests.jl @@ -261,55 +261,6 @@ central_cache = FiniteDiff.GradientCache(df, x, Val{:central}, eltype(df)) @test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, central_cache), df_ref) < 1e-8 end -f(df, x) = (df[1] = sin(x); df[2] = cos(x); df) -z = x = 2π * rand() -fx = fill(0.0, 2) -f(fx, x) -ff(df, x) = !all(x .<= z) ? error() : f(df, x) -df = fill(0.0, 2) -df_ref = [cos(x), -sin(x)] -forward_cache = FiniteDiff.GradientCache(df, x, Val{:forward}) -central_cache = FiniteDiff.GradientCache(df, x, Val{:central}) -complex_cache = FiniteDiff.GradientCache(df, x, Val{:complex}) - - -@time @testset "Gradient of f:scalar->vector real-valued tests" begin - @test_broken err_func(FiniteDiff.finite_difference_gradient(f, x, Val{:forward}), df_ref) < 1e-4 - @test err_func(FiniteDiff.finite_difference_gradient(f, x, Val{:forward}, eltype(x), Val{true}, fx), df_ref) < 1e-4 - @test err_func(FiniteDiff.finite_difference_gradient(ff, x, Val{:forward}, eltype(x), Val{true}, fx, dir=-1), df_ref) < 1e-4 - @test_throws Any err_func(FiniteDiff.finite_difference_gradient(ff, x, Val{:forward}), df_ref) < 1e-4 - @test err_func(FiniteDiff.finite_difference_gradient(f, x, Val{:central}, eltype(x), Val{true}, fx), df_ref) < 1e-8 - @test err_func(FiniteDiff.finite_difference_gradient(f, x, Val{:complex}, eltype(x), Val{true}, fx), df_ref) < 1e-15 - - @test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, Val{:forward}), df_ref) < 1e-4 - @test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, Val{:central}), df_ref) < 1e-8 - @test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, Val{:complex}), df_ref) < 1e-15 - - @test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, forward_cache), df_ref) < 1e-4 - @test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, central_cache), df_ref) < 1e-8 - @test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, complex_cache), df_ref) < 1e-15 -end - -f(df, x) = (df[1] = sin(x); df[2] = cos(x); df) -x = (2π * rand()) * (1 + im) -fx = fill(zero(typeof(x)), 2) -f(fx, x) -df = zero(fx) -df_ref = [cos(x), -sin(x)] -forward_cache = FiniteDiff.GradientCache(df, x, Val{:forward}) -central_cache = FiniteDiff.GradientCache(df, x, Val{:central}) - -@time @testset "Gradient of f:vector->scalar complex-valued tests" begin - @test err_func(FiniteDiff.finite_difference_gradient(f, x, Val{:forward}, eltype(x), Val{true}, fx), df_ref) < 1e-4 - @test err_func(FiniteDiff.finite_difference_gradient(f, x, Val{:central}, eltype(x), Val{true}, fx), df_ref) < 3e-7 - - @test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, Val{:forward}), df_ref) < 1e-4 - @test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, Val{:central}), df_ref) < 3e-7 - - @test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, forward_cache), df_ref) < 1e-4 - @test err_func(FiniteDiff.finite_difference_gradient!(df, f, x, central_cache), df_ref) < 3e-7 -end - function ret_allocs(res, _f, x, cache) allocs = @allocated FiniteDiff.finite_difference_gradient!(res, _f, x, cache) allocs