Skip to content

Commit 73b9084

Browse files
committed
Rewrite all benchmarks with PkgBenchmark.jl conventions
1 parent 3e19f80 commit 73b9084

11 files changed

Lines changed: 114 additions & 161 deletions

File tree

benchmark/Project.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
44
Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c"
55
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
66
Lux = "b2108857-7c20-44ae-9111-449ecde12c47"
7-
Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80"
87
ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267"
98
TaylorDiff = "b36ab563-344f-407b-a36a-4f200bebf99c"
109
TaylorSeries = "6aa5eb33-94cf-58f4-a9d0-e4b2c4fc25ea"

benchmark/benchmark.jl

Lines changed: 0 additions & 81 deletions
This file was deleted.

benchmark/benchmarks.jl

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
using BenchmarkTools
2+
using Random: seed!
3+
using ForwardDiff, Zygote, Flux
4+
using TaylorSeries: Taylor1
5+
using TaylorDiff
6+
7+
seed!(19260817)
8+
9+
include("scalar.jl")
10+
include("mlp.jl")
11+
include("taylor_expansion.jl")
12+
include("pinn.jl")
13+
14+
scalar = create_benchmark_scalar_function(sin, 0.1)
15+
mlp = create_benchmark_mlp((2, 16), [2.0, 3.0], [1.0, 1.0])
16+
17+
const SUITE = BenchmarkGroup("scalar" => scalar,
18+
"mlp" => mlp,
19+
"taylor_expansion" => taylor_expansion,
20+
"pinn" => pinn)

benchmark/case1.jl

Lines changed: 0 additions & 22 deletions
This file was deleted.

benchmark/linearmodel.jl

Lines changed: 0 additions & 19 deletions
This file was deleted.

benchmark/mlp.jl

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
function create_benchmark_mlp(mlp_conf::Tuple{Int, Int}, x::Vector{T},
2+
l::Vector{T}) where {T <: Number}
3+
input, hidden = mlp_conf
4+
W₁, W₂, b₁, b₂ = rand(hidden, input), rand(1, hidden), rand(hidden), rand(1)
5+
σ = exp
6+
mlp(x) = first(W₂ * σ.(W₁ * x + b₁) + b₂)
7+
f1 = z -> ForwardDiff.derivative(t -> mlp(x + t * l), z)
8+
f2 = x -> ForwardDiff.derivative(f1, x)
9+
f3 = x -> ForwardDiff.derivative(f2, x)
10+
f4 = x -> ForwardDiff.derivative(f3, x)
11+
f5 = x -> ForwardDiff.derivative(f4, x)
12+
f6 = x -> ForwardDiff.derivative(f5, x)
13+
f7 = x -> ForwardDiff.derivative(f6, x)
14+
functions = Function[f1, f2, f3, f4, f5, f6, f7]
15+
forwarddiff, taylordiff = BenchmarkGroup(), BenchmarkGroup()
16+
for (index, func) in enumerate(functions)
17+
forwarddiff[index] = @benchmarkable $func(0)
18+
end
19+
Ns = [Val{order + 1}() for order in 1:7]
20+
for (index, N) in enumerate(Ns)
21+
taylordiff[index] = @benchmarkable derivative($mlp, $x, $l, $N)
22+
end
23+
return BenchmarkGroup("forwarddiff" => forwarddiff,
24+
"taylordiff" => taylordiff)
25+
end

benchmark/pinn.jl

Lines changed: 18 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,13 @@
1-
using Flux
2-
using TaylorDiff
3-
using Zygote
4-
using Plots
5-
61
const input = 2
72
const hidden = 16
83

9-
# model = Chain(Dense(input => hidden, exp),
10-
# Dense(hidden => hidden, exp),
11-
# Dense(hidden => 1),
12-
# first)
13-
# trial(model, x) = x[1] * (1 - x[1]) * x[2] * (1 - x[2]) * model(x)
14-
15-
model = Chain(Dense(input => 1, exp), first)
16-
trial(model, x) = model(x)
4+
model = Chain(Dense(input => hidden, exp),
5+
Dense(hidden => hidden, exp),
6+
Dense(hidden => 1),
7+
first)
8+
trial(model, x) = x[1] * (1 - x[1]) * x[2] * (1 - x[2]) * model(x)
179

18-
M = 100
19-
data = [rand(Float32, input) for _ in 1:M]
10+
x = rand(Float32, input)
2011
function loss_by_finitediff(model, x)
2112
ε = cbrt(eps(Float32))
2213
ε₁ = [ε, 0]
@@ -28,24 +19,19 @@ function loss_by_finitediff(model, x)
2819
end
2920
function loss_by_taylordiff(model, x)
3021
f(x) = trial(model, x)
31-
error = derivative(f, x, Float32[1, 0], Val(3)) + derivative(f, x, Float32[0, 1], Val(3)) +
22+
error = derivative(f, x, Float32[1, 0], Val(3)) +
23+
derivative(f, x, Float32[0, 1], Val(3)) +
3224
sin* x[1]) * sin* x[2])
3325
abs2(error)
3426
end
3527

36-
opt = Flux.setup(Adam(), model)
37-
38-
allloss(model, loss) = sum([loss(model, x) for x in data])
39-
for epoch in 1:1000
40-
Flux.train!(loss_by_taylordiff, model, data, opt)
41-
end
42-
43-
grid = 0:0.01:1
44-
solution(x, y) = (sin* x) * sin* y)) / (2π^2)
45-
u = [trial(model, [x, y]) for x in grid, y in grid]
46-
utrue = [solution(x, y) for x in grid, y in grid]
47-
diff_u = abs.(u .- utrue)
48-
49-
surface(u)
50-
surface(utrue)
51-
surface(diff_u)
28+
pinn = BenchmarkGroup("primal" => BenchmarkGroup("finitediff" => (@benchmarkable loss_by_finitediff($model,
29+
$x)),
30+
"taylordiff" => (@benchmarkable loss_by_taylordiff($model,
31+
$x))),
32+
"gradient" => BenchmarkGroup("finitediff" => (@benchmarkable gradient($loss_by_finitediff,
33+
$model,
34+
$x)),
35+
"taylordiff" => (@benchmarkable gradient($loss_by_taylordiff,
36+
$model,
37+
$x))))

benchmark/scalar.jl

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
function create_benchmark_scalar_function(f::F, x::T) where {F, T <: Number}
2+
f1 = x -> ForwardDiff.derivative(f, x)
3+
f2 = x -> ForwardDiff.derivative(f1, x)
4+
f3 = x -> ForwardDiff.derivative(f2, x)
5+
f4 = x -> ForwardDiff.derivative(f3, x)
6+
f5 = x -> ForwardDiff.derivative(f4, x)
7+
f6 = x -> ForwardDiff.derivative(f5, x)
8+
f7 = x -> ForwardDiff.derivative(f6, x)
9+
f8 = x -> ForwardDiff.derivative(f7, x)
10+
f9 = x -> ForwardDiff.derivative(f8, x)
11+
functions = Function[f1, f2, f3, f4, f5, f6, f7, f8, f9]
12+
forwarddiff_group = BenchmarkGroup([index => @benchmarkable $func($(Ref(x))[])
13+
for (index, func) in enumerate(functions)]...)
14+
taylordiff_group = BenchmarkGroup()
15+
Ns = [Val{order + 1}() for order in 1:9]
16+
for (index, N) in enumerate(Ns)
17+
taylordiff_group[index] = @benchmarkable derivative($f, $x, $N)
18+
end
19+
return BenchmarkGroup("forwarddiff" => forwarddiff_group,
20+
"taylordiff" => taylordiff_group)
21+
end

benchmark/taylor_expansion.jl

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
function my_calculation(t, p, α, s)
2+
x = 1.0 / (1.0 - s * (t + 1) / (t - 1))
3+
rez = zero(x)
4+
for i in eachindex(p)
5+
rez += p[i] * x^α[i]
6+
end
7+
return rez * sqrt(2) / (1 - t)
8+
end
9+
10+
N, m = 100, 20
11+
p, α, s = rand(N), rand(N), rand()
12+
p ./= sum(p)
13+
t_ts = Taylor1(eltype(p), m)
14+
t_td = TaylorScalar{eltype(p), m + 1}(0.0, 1.0)
15+
taylor_expansion = BenchmarkGroup("taylorseries" => (@benchmarkable my_calculation($t_ts,
16+
$p, $α,
17+
$s)),
18+
"taylordiff" => (@benchmarkable my_calculation($t_td, $p,
19+
$α, $s)))

src/chainrules.jl

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,9 @@ end
3939
function rrule(::typeof(value), t::TaylorScalar{T, N}) where {N, T}
4040
value_pullback(v̄::NTuple{N, T}) = NoTangent(), TaylorScalar(v̄)
4141
# for structural tangent, convert to tuple
42-
value_pullback(v̄::Tangent{P, NTuple{N, T}}) where P = NoTangent(), TaylorScalar{T, N}(backing(v̄))
42+
function value_pullback(v̄::Tangent{P, NTuple{N, T}}) where {P}
43+
NoTangent(), TaylorScalar{T, N}(backing(v̄))
44+
end
4345
value_pullback(v̄) = NoTangent(), TaylorScalar{T, N}(map(x -> convert(T, x), Tuple(v̄)))
4446
return value(t), value_pullback
4547
end

0 commit comments

Comments
 (0)