Skip to content
Merged
17 changes: 17 additions & 0 deletions benchmarks/benchmark_list.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
ON_TRAVIS = get(ENV, "TRAVIS", "false") == "true"

if ON_TRAVIS
BENCHMARK_FILES = [
"dummy.run.jl",
"gdemo.run.jl",
"mvnormal.run.jl",
]
else
BENCHMARK_FILES = [
"dummy.run.jl",
"gdemo.run.jl",
"mvnormal.run.jl",
]
end

BENCHMARK_FILES = map(BENCHMARK_FILES) do x joinpath(@__DIR__, x) end
17 changes: 17 additions & 0 deletions benchmarks/dummy.run.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
using Turing, TuringBenchmarks.TuringTools

data = [0, 1, 0, 1, 1, 1, 1, 1, 1, 1]

@model constrained_test(obs) = begin
p ~ Beta(2,2)
for i = 1:length(obs)
obs[i] ~ Bernoulli(p)
end
p
end

bench_res = @tbenchmark(HMC(1000, 1.5, 3), constrained_test, data)

# bench_res[4].names = ["phi[1]", "phi[2]", "phi[3]", "phi[4]"]
LOG_DATA = build_log_data("Dummy-Benchmark", bench_res...)
print_log(LOG_DATA)
17 changes: 17 additions & 0 deletions benchmarks/gdemo.run.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
using Turing, TuringBenchmarks.TuringTools

@model gdemo(x, y) = begin
s ~ InverseGamma(2,3)
m ~ Normal(0,sqrt(s))
x ~ Normal(m, sqrt(s))
y ~ Normal(m, sqrt(s))
return s, m
end

data = (1.5, 2.0)

# sample(gdemo(1.5, 2.0), Turing.NUTS(2000000, 0.65));
bench_res = @tbenchmark(Turing.NUTS(2000000, 0.65), gdemo, data...)

LOG_DATA = build_log_data("GDemo-Benchmark", bench_res...)
print_log(LOG_DATA)
20 changes: 20 additions & 0 deletions benchmarks/mvnormal.run.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
using Turing, TuringBenchmarks.TuringTools

# Define the target distribution and its gradient
const D = 10

@model target(dim) = begin
Θ = Vector{Real}(undef, dim)
θ ~ MvNormal(zeros(D), ones(dim))
end

# Sampling parameter settings
n_samples = 100_000
n_adapts = 2_000

# Sampling
bench_res = @tbenchmark_expr("NUTS(Leapfrog(...))",
sample(target(D), HMC(n_samples, 0.1, 5)));

LOG_DATA = build_log_data("MvNormal-Benchmark", bench_res...)
print_log(LOG_DATA)
46 changes: 23 additions & 23 deletions benchmarks/runbenchmarks.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,17 @@
using Pkg
using Dates

PROJECT_DIR = abspath(@__DIR__) |> dirname

# prepare packages
Pkg.build("Turing")

BENCHMARK_REV = "master"
Pkg.add(PackageSpec(url="https://github.com/TuringLang/TuringBenchmarks.git", rev=BENCHMARK_REV))
Pkg.build("TuringBenchmarks")
Pkg.resolve()

# prepare BenchMark information
BASE_BRANCH = "master"
CURRENT_BRANCH = strip(read(`git rev-parse --abbrev-ref HEAD`, String))

Expand All @@ -18,28 +30,16 @@ COMMIT_SHA_7 = COMMIT_SHA[1:7]
TIME = Dates.format(now(), "YYYYmmddHHMM")
BM_JOB_NAME="BMCI-$(SANTI_BR_NAME)-$(COMMIT_SHA_7)-$(TIME)"

run(`git config remote.origin.fetch '+refs/heads/*:refs/remotes/origin/*'`)
run(`git fetch --all --unshallow`)

run(`git clone https://github.com/TuringLang/TuringBenchmarks.git ../TuringBenchmarks`)

delete!(ENV, "JULIA_PROJECT")

code_pre = """using Pkg
# Pkg.instantiate()
try pkg"develop ." catch end
try pkg"develop ." catch end
try pkg"build Turing" catch end
using Turing
try pkg"develop ../TuringBenchmarks" catch end
try pkg"develop ../TuringBenchmarks" catch end
pkg"add SpecialFunctions"
using TuringBenchmarks
"""
if get(ENV, "TRAVIS", "false") == "true"
run(`git config remote.origin.fetch '+refs/heads/*:refs/remotes/origin/*'`)
run(`git fetch --all --unshallow`)
end

code_run = """using TuringBenchmarks.Runner
Runner.run_bm_on_travis("$BM_JOB_NAME", ("master", "$CURRENT_BRANCH"), "$COMMIT_SHA")
# run
code_run = """using TuringBenchmarks
using TuringBenchmarks.Runner
TuringBenchmarks.set_project_path("$PROJECT_DIR")
TuringBenchmarks.set_benchmark_files(joinpath("$PROJECT_DIR", "benchmarks/benchmark_list.jl"))
Runner.run_bm_on_travis("$BM_JOB_NAME", ("$BASE_BRANCH", "$CURRENT_BRANCH"), "$COMMIT_SHA")
"""

run(`julia -e $code_pre`)
run(`julia -e $code_run`)
run(`julia --project=$PROJECT_DIR -e $code_run`)