@@ -9,8 +9,8 @@ using FiniteDifferences: grad, jacobian, _jvp, jvp, j′vp, _j′vp, to_vec
9
9
ẋ, ẏ = randn (rng, T, N), randn (rng, T, M)
10
10
xy, ẋẏ = vcat (x, y), vcat (ẋ, ẏ)
11
11
ż_manual = _jvp (fdm, (xy)-> sum (sin, xy), xy, ẋẏ)[1 ]
12
- ż_auto = jvp (fdm, x-> sum (sin, x[1 ]) + sum (sin, x[2 ]), ((x, y), (ẋ, ẏ)))
13
- ż_multi = jvp (fdm, (x, y)-> sum (sin, x) + sum (sin, y), (x, ẋ), (y, ẏ))
12
+ ż_auto = @inferred ( jvp (fdm, x-> sum (sin, x[1 ]) + sum (sin, x[2 ]), ((x, y), (ẋ, ẏ) )))
13
+ ż_multi = @inferred ( jvp (fdm, (x, y)-> sum (sin, x) + sum (sin, y), (x, ẋ), (y, ẏ) ))
14
14
@test ż_manual ≈ ż_auto
15
15
@test ż_manual ≈ ż_multi
16
16
end
@@ -52,7 +52,7 @@ using FiniteDifferences: grad, jacobian, _jvp, jvp, j′vp, _j′vp, to_vec
52
52
@assert length (ȳ) == length (f (x))
53
53
54
54
# Check that the jacobian is as expected.
55
- J_fdm = jacobian (fdm, f, x)[1 ]
55
+ J_fdm = @inferred ( jacobian (fdm, f, x) )[1 ]
56
56
@test size (J_fdm) == (length (ȳ), length (x))
57
57
@test J_fdm ≈ J_exact
58
58
@test J_fdm == jacobian (fdm, f, x)[1 ]
@@ -79,7 +79,8 @@ using FiniteDifferences: grad, jacobian, _jvp, jvp, j′vp, _j′vp, to_vec
79
79
@test Ac == A
80
80
81
81
# Prevent regression against https://github.com/JuliaDiff/FiniteDifferences.jl/issues/67
82
- J = first (jacobian (fdm, identity, x))
82
+ # Type inference: https://github.com/JuliaDiff/FiniteDifferences.jl/issues/199
83
+ J = first (@inferred (jacobian (fdm, identity, x)))
83
84
@test J ≈ one (Matrix {T} (undef, size (J)))
84
85
end
85
86
0 commit comments