-
Notifications
You must be signed in to change notification settings - Fork 99
Expand file tree
/
Copy pathMathOptInterfaceBenchmarkToolsExt.jl
More file actions
79 lines (73 loc) · 2.42 KB
/
MathOptInterfaceBenchmarkToolsExt.jl
File metadata and controls
79 lines (73 loc) · 2.42 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
# Copyright (c) 2017: Miles Lubin and contributors
# Copyright (c) 2017: Google Inc.
#
# Use of this source code is governed by an MIT-style license that can be found
# in the LICENSE.md file or at https://opensource.org/licenses/MIT.
module MathOptInterfaceBenchmarkToolsExt
import BenchmarkTools
import MathOptInterface as MOI
function MOI.Benchmarks.suite(
new_model::Function;
exclude::Vector{Regex} = Regex[],
)
group = BenchmarkTools.BenchmarkGroup()
for (name, func) in MOI.Benchmarks.BENCHMARKS
if any(occursin.(exclude, Ref(name)))
continue
end
group[name] = BenchmarkTools.@benchmarkable $func($new_model)
end
return group
end
function MOI.Benchmarks.create_baseline(
suite::BenchmarkTools.BenchmarkGroup,
name::String;
directory::String = "",
kwargs...,
)
BenchmarkTools.tune!(suite)
BenchmarkTools.save(
joinpath(directory, name * "_params.json"),
BenchmarkTools.params(suite),
)
results = BenchmarkTools.run(suite; kwargs...)
BenchmarkTools.save(joinpath(directory, name * "_baseline.json"), results)
return
end
function MOI.Benchmarks.compare_against_baseline(
suite::BenchmarkTools.BenchmarkGroup,
name::String;
directory::String = "",
report_filename::String = "report.txt",
kwargs...,
)
params_filename = joinpath(directory, name * "_params.json")
baseline_filename = joinpath(directory, name * "_baseline.json")
if !isfile(params_filename) || !isfile(baseline_filename)
error("You create a baseline with `create_baseline` first.")
end
BenchmarkTools.loadparams!(
suite,
BenchmarkTools.load(params_filename)[1],
:evals,
:samples,
)
new_results = BenchmarkTools.run(suite; kwargs...)
old_results = BenchmarkTools.load(baseline_filename)[1]
open(joinpath(directory, report_filename), "w") do io
println(stdout, "\n========== Results ==========")
println(io, "\n========== Results ==========")
for key in keys(new_results)
judgement = BenchmarkTools.judge(
BenchmarkTools.median(new_results[key]),
BenchmarkTools.median(old_results[key]),
)
println(stdout, "\n", key)
println(io, "\n", key)
show(stdout, MIME"text/plain"(), judgement)
show(io, MIME"text/plain"(), judgement)
end
end
return
end
end # module