Skip to content

Commit 05502e8

Browse files
authored
Formatted all files following bluestyle (#92)
* Formatted all files following bluestyle * Fixed autotuning * Moar formatting * Moar formatting * Fixing issues * last test correction * Fix docs * Fixed autotuning once more
1 parent 217d967 commit 05502e8

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

74 files changed

+1956
-2408
lines changed

Diff for: Project.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "AugmentedGaussianProcesses"
22
uuid = "38eea1fd-7d7d-5162-9d08-f89d0f2e271e"
33
authors = ["Theo Galy-Fajou <[email protected]>"]
4-
version = "0.10.0"
4+
version = "0.10.1"
55

66
[deps]
77
AdvancedHMC = "0bf59076-c3b1-5ca4-86bd-e02cd72cde3d"

Diff for: benchmark/create_benchmark_file.jl

+6-5
Original file line numberDiff line numberDiff line change
@@ -7,11 +7,12 @@ N = 3000
77
D = 20
88
K = 4
99
k = RBFKernel(1.0)
10-
X = rand(N,D)
11-
y = rand(MvNormal(kernelmatrix(X,k)+1e-3I))
10+
X = rand(N, D)
11+
y = rand(MvNormal(kernelmatrix(X, k) + 1e-3I))
1212
df = DataFrame(X)
1313
df.y_reg = y
1414
df.y_class = sign.(y)
15-
width = maximum(y)-minimum(y);normy = (y.-minimum(y))/width*K
16-
df.y_multi = floor.(Int64,normy)
17-
CSV.write(joinpath(@__DIR__,"benchmarkdata.csv"),df)
15+
width = maximum(y) - minimum(y);
16+
normy = (y .- minimum(y)) / width * K;
17+
df.y_multi = floor.(Int64, normy)
18+
CSV.write(joinpath(@__DIR__, "benchmarkdata.csv"), df)

Diff for: benchmark/models.jl

+80-29
Original file line numberDiff line numberDiff line change
@@ -14,37 +14,58 @@ const AGP = AugmentedGaussianProcesses
1414

1515
## Benchmark
1616

17-
18-
1917
compat = Dict{String,Dict{String,Vector{String}}}()
20-
likelihoodnames = ["Gaussian","StudentT","Logistic","BayesianSVM","LogisticSoftMax"]
21-
inferencenames = ["AnalyticVI","AnalyticSVI"]
22-
modelnames = ["GP","VGP","SVGP"]
23-
funcs = ["init","elbo","computematrices","updatevariational","updatehyperparam","predic","predicproba"]
24-
compat["GP"] = Dict{String,Vector{String}}("Gaussian"=>["AnalyticVI"])
18+
likelihoodnames = ["Gaussian", "StudentT", "Logistic", "BayesianSVM", "LogisticSoftMax"]
19+
inferencenames = ["AnalyticVI", "AnalyticSVI"]
20+
modelnames = ["GP", "VGP", "SVGP"]
21+
funcs = [
22+
"init",
23+
"elbo",
24+
"computematrices",
25+
"updatevariational",
26+
"updatehyperparam",
27+
"predic",
28+
"predicproba",
29+
]
30+
compat["GP"] = Dict{String,Vector{String}}("Gaussian" => ["AnalyticVI"])
2531
compat["VGP"] = Dict{String,Vector{String}}()
2632
compat["VGP"]["StudentT"] = ["AnalyticVI"]
2733
compat["VGP"]["Logistic"] = ["AnalyticVI"]
2834
compat["VGP"]["BayesianSVM"] = ["AnalyticVI"]
2935
compat["VGP"]["LogisticSoftMax"] = ["AnalyticVI"]
3036
compat["SVGP"] = Dict{String,Vector{String}}()
31-
compat["SVGP"]["Gaussian"] = ["AnalyticVI","AnalyticSVI"]
32-
compat["SVGP"]["StudentT"] = ["AnalyticVI","AnalyticSVI"]
33-
compat["SVGP"]["Logistic"] = ["AnalyticVI","AnalyticSVI"]
34-
compat["SVGP"]["BayesianSVM"] = ["AnalyticVI","AnalyticSVI"]
35-
compat["SVGP"]["LogisticSoftMax"] = ["AnalyticVI","AnalyticSVI"]
37+
compat["SVGP"]["Gaussian"] = ["AnalyticVI", "AnalyticSVI"]
38+
compat["SVGP"]["StudentT"] = ["AnalyticVI", "AnalyticSVI"]
39+
compat["SVGP"]["Logistic"] = ["AnalyticVI", "AnalyticSVI"]
40+
compat["SVGP"]["BayesianSVM"] = ["AnalyticVI", "AnalyticSVI"]
41+
compat["SVGP"]["LogisticSoftMax"] = ["AnalyticVI", "AnalyticSVI"]
3642

3743
const SUITE = BenchmarkGroup(["Models"])
3844
Random.seed!(1234)
39-
D = 20; N = 3000
45+
D = 20;
46+
N = 3000;
4047
data = CSV.read("benchmarkdata.csv")
41-
X = Matrix(data[:,1:D])
42-
y_key = Dict("Gaussian"=>:y_reg,"StudentT"=>:y_reg,"BayesianSVM"=>:y_class,"Logistic"=>:y_class,"LogisticSoftMax"=>:y_multi)
43-
n_ind = 50; batchsize = 50; ν = 5.0
44-
convertl(lname::String) = lname*(lname != "BayesianSVM" ? "Likelihood" : "")*"("*(lname == "StudentT" ? "ν" : "")*")"
45-
converti(iname::String) = iname*"("*(iname == "AnalyticSVI" ? "batchsize" : "")*")"
48+
X = Matrix(data[:, 1:D])
49+
y_key = Dict(
50+
"Gaussian" => :y_reg,
51+
"StudentT" => :y_reg,
52+
"BayesianSVM" => :y_class,
53+
"Logistic" => :y_class,
54+
"LogisticSoftMax" => :y_multi,
55+
)
56+
n_ind = 50;
57+
batchsize = 50;
58+
ν = 5.0;
59+
function convertl(lname::String)
60+
return lname *
61+
(lname != "BayesianSVM" ? "Likelihood" : "") *
62+
"(" *
63+
(lname == "StudentT" ? "ν" : "") *
64+
")"
65+
end
66+
converti(iname::String) = iname * "(" * (iname == "AnalyticSVI" ? "batchsize" : "") * ")"
4667
add_ind(mname::String) = mname == "SVGP" ? ",n_ind" : ""
47-
kernel = RBFKernel([2.0],variance=1.0,dim=D)
68+
kernel = RBFKernel([2.0]; variance=1.0, dim=D)
4869
models = Dict{String,Dict{String,Dict{String,AbstractGP}}}()
4970
SUITE["Models"] = BenchmarkGroup(modelnames)
5071
for model in String.(keys(compat))
@@ -56,19 +77,49 @@ for model in String.(keys(compat))
5677
for i in compat[model][likelihood]
5778
SUITE["Models"][model][likelihood][i] = BenchmarkGroup(funcs)
5879
if model == "GP"
59-
models[model][likelihood][i] = eval(Meta.parse(model*"(X,Vector(data[:$((y_key[likelihood]))]),kernel,atfrequency=1)"))
60-
SUITE["Models"][model][likelihood][i]["init"] = eval(Meta.parse("@benchmarkable $model(\$X,y_train,\$kernel,atfrequency=1) setup=(y_train = Vector(\$D[:\$((y_key[likelihood]))])"))
80+
models[model][likelihood][i] = eval(
81+
Meta.parse(
82+
model *
83+
"(X,Vector(data[:$((y_key[likelihood]))]),kernel,atfrequency=1)",
84+
),
85+
)
86+
SUITE["Models"][model][likelihood][i]["init"] = eval(
87+
Meta.parse(
88+
"@benchmarkable $model(\$X,y_train,\$kernel,atfrequency=1) setup=(y_train = Vector(\$D[:\$((y_key[likelihood]))])",
89+
),
90+
)
6191
else
6292
# println(Meta.parse(model*"(X,y[\"$likelihood\"],kernel,$(convertl(likelihood)) ,$(converti(i))$(add_ind(model)),atfrequency=1)"))
63-
models[model][likelihood][i] = eval(Meta.parse(model*"(X,Vector(data[:$((y_key[likelihood]))]),kernel,$(convertl(likelihood)) ,$(converti(i))$(add_ind(model)),atfrequency=1)"))
64-
SUITE["Models"][model][likelihood][i]["init"] = eval(Meta.parse("@benchmarkable $model(\$X,y_train,\$kernel,$(convertl(likelihood)),$(converti(i)) $(add_ind(model)),atfrequency=1) setup=(y_train = Vector(\$data[:\$((y_key[likelihood]))]))"))
93+
models[model][likelihood][i] = eval(
94+
Meta.parse(
95+
model *
96+
"(X,Vector(data[:$((y_key[likelihood]))]),kernel,$(convertl(likelihood)) ,$(converti(i))$(add_ind(model)),atfrequency=1)",
97+
),
98+
)
99+
SUITE["Models"][model][likelihood][i]["init"] = eval(
100+
Meta.parse(
101+
"@benchmarkable $model(\$X,y_train,\$kernel,$(convertl(likelihood)),$(converti(i)) $(add_ind(model)),atfrequency=1) setup=(y_train = Vector(\$data[:\$((y_key[likelihood]))]))",
102+
),
103+
)
65104
end
66-
SUITE["Models"][model][likelihood][i]["elbo"] = @benchmarkable ELBO(gpmodel) setup=(gpmodel=deepcopy($(models[model][likelihood][i])))
67-
SUITE["Models"][model][likelihood][i]["computematrices"] = @benchmarkable AGP.computeMatrices!(gpmodel) setup=(gpmodel=deepcopy($(models[model][likelihood][i])))
68-
SUITE["Models"][model][likelihood][i]["updatevariational"] = @benchmarkable AGP.variational_updates!(gpmodel) setup=(gpmodel=deepcopy($(models[model][likelihood][i])))
69-
SUITE["Models"][model][likelihood][i]["updatehyperparam"] = @benchmarkable AGP.update_hyperparameters!(gpmodel) setup=(gpmodel=deepcopy($(models[model][likelihood][i])))
70-
SUITE["Models"][model][likelihood][i]["predic"] = @benchmarkable predict_y(gpmodel,$X) setup=(gpmodel=deepcopy($(models[model][likelihood][i])))
71-
SUITE["Models"][model][likelihood][i]["predicproba"] = @benchmarkable proba_y(gpmodel,$X) setup=(gpmodel=deepcopy($(models[model][likelihood][i])))
105+
SUITE["Models"][model][likelihood][i]["elbo"] = @benchmarkable ELBO(gpmodel) setup = (
106+
gpmodel = deepcopy($(models[model][likelihood][i]))
107+
)
108+
SUITE["Models"][model][likelihood][i]["computematrices"] = @benchmarkable AGP.computeMatrices!(
109+
gpmodel
110+
) setup = (gpmodel = deepcopy($(models[model][likelihood][i])))
111+
SUITE["Models"][model][likelihood][i]["updatevariational"] = @benchmarkable AGP.variational_updates!(
112+
gpmodel
113+
) setup = (gpmodel = deepcopy($(models[model][likelihood][i])))
114+
SUITE["Models"][model][likelihood][i]["updatehyperparam"] = @benchmarkable AGP.update_hyperparameters!(
115+
gpmodel
116+
) setup = (gpmodel = deepcopy($(models[model][likelihood][i])))
117+
SUITE["Models"][model][likelihood][i]["predic"] = @benchmarkable predict_y(
118+
gpmodel, $X
119+
) setup = (gpmodel = deepcopy($(models[model][likelihood][i])))
120+
SUITE["Models"][model][likelihood][i]["predicproba"] = @benchmarkable proba_y(
121+
gpmodel, $X
122+
) setup = (gpmodel = deepcopy($(models[model][likelihood][i])))
72123
end
73124
end
74125
end

Diff for: coverage/coverage.jl

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# Only run coverage from linux nightly build on travis.
2-
get(ENV, "TRAVIS_OS_NAME", "") == "linux" || exit()
2+
get(ENV, "TRAVIS_OS_NAME", "") == "linux" || exit()
33
get(ENV, "TRAVIS_JULIA_VERSION", "") == "nightly" || exit()
44

55
using Coverage

Diff for: docs/examples/gpclassification.jl

+47-59
Original file line numberDiff line numberDiff line change
@@ -17,57 +17,50 @@ Y = data[:, end];
1717

1818
# ### We create a function to visualize the data
1919

20-
function plot_data(X, Y; size=(300,500))
21-
Plots.scatter(eachcol(X)...,
22-
group = Y,
23-
alpha=0.2,
24-
markerstrokewidth=0.0,
25-
lab="",
26-
size=size
27-
)
20+
function plot_data(X, Y; size=(300, 500))
21+
return Plots.scatter(
22+
eachcol(X)...; group=Y, alpha=0.2, markerstrokewidth=0.0, lab="", size=size
23+
)
2824
end
29-
plot_data(X, Y; size = (500, 500))
25+
plot_data(X, Y; size=(500, 500))
3026

3127
# ### Run sparse classification with increasing number of inducing points
3228
Ms = [4, 8, 16, 32, 64]
3329
models = Vector{AbstractGP}(undef, length(Ms) + 1)
3430
kernel = transform(SqExponentialKernel(), 1.0)
3531
for (i, num_inducing) in enumerate(Ms)
3632
@info "Training with $(num_inducing) points"
37-
m = SVGP(X, Y,
38-
kernel,
39-
LogisticLikelihood(),
40-
AnalyticVI(),
41-
num_inducing,
42-
optimiser = false,
43-
Zoptimiser = false
44-
)
33+
m = SVGP(
34+
X,
35+
Y,
36+
kernel,
37+
LogisticLikelihood(),
38+
AnalyticVI(),
39+
num_inducing;
40+
optimiser=false,
41+
Zoptimiser=false,
42+
)
4543
@time train!(m, 20)
4644
models[i] = m
4745
end
4846
# ### Running the full model
4947
@info "Running full model"
50-
mfull = VGP(X, Y,
51-
kernel,
52-
LogisticLikelihood(),
53-
AnalyticVI(),
54-
optimiser = false
55-
)
48+
mfull = VGP(X, Y, kernel, LogisticLikelihood(), AnalyticVI(); optimiser=false)
5649
@time train!(mfull, 5)
5750
models[end] = mfull
5851

5952
# ### We create a prediction and plot function on a grid
6053
function compute_grid(model, n_grid=50)
61-
mins = [-3.25,-2.85]
62-
maxs = [3.65,3.4]
63-
x_lin = range(mins[1], maxs[1], length=n_grid)
64-
y_lin = range(mins[2], maxs[2], length=n_grid)
54+
mins = [-3.25, -2.85]
55+
maxs = [3.65, 3.4]
56+
x_lin = range(mins[1], maxs[1]; length=n_grid)
57+
y_lin = range(mins[2], maxs[2]; length=n_grid)
6558
x_grid = Iterators.product(x_lin, y_lin)
66-
y_grid, _ = proba_y(model,vec(collect.(x_grid)))
59+
y_grid, _ = proba_y(model, vec(collect.(x_grid)))
6760
return y_grid, x_lin, y_lin
6861
end
6962

70-
function plot_model(model, X, Y, title = nothing; size = (300, 500))
63+
function plot_model(model, X, Y, title=nothing; size=(300, 500))
7164
n_grid = 50
7265
y_pred, x_lin, y_lin = compute_grid(model, n_grid)
7366
title = if isnothing(title)
@@ -76,45 +69,40 @@ function plot_model(model, X, Y, title = nothing; size = (300, 500))
7669
title
7770
end
7871
p = plot_data(X, Y; size=size)
79-
Plots.contour!(p,
80-
x_lin, y_lin,
81-
reshape(y_pred, n_grid, n_grid)',
82-
cbar=false, levels=[0.5],
83-
fill=false, color=:black,
84-
linewidth=2.0,
85-
title=title
86-
)
72+
Plots.contour!(
73+
p,
74+
x_lin,
75+
y_lin,
76+
reshape(y_pred, n_grid, n_grid)';
77+
cbar=false,
78+
levels=[0.5],
79+
fill=false,
80+
color=:black,
81+
linewidth=2.0,
82+
title=title,
83+
)
8784
if model isa SVGP
88-
Plots.scatter!(p,
89-
eachrow(hcat(AGP.Zview(model[1])...))...,
90-
msize=2.0, color="black",
91-
lab="")
85+
Plots.scatter!(
86+
p, eachrow(hcat(AGP.Zview(model[1])...))...; msize=2.0, color="black", lab=""
87+
)
9288
end
9389
return p
9490
end;
9591

9692
# ### Now run the prediction for every model and visualize the differences
97-
Plots.plot(plot_model.(models, Ref(X), Ref(Y))...,
98-
layout=(1, length(models)),
99-
size=(1000, 200)
100-
)
93+
Plots.plot(
94+
plot_model.(models, Ref(X), Ref(Y))...; layout=(1, length(models)), size=(1000, 200)
95+
)
10196

10297
# ## Bayesian SVM vs Logistic
10398
# ### We now create a model with the Bayesian SVM likelihood
10499

105-
mbsvm = VGP(X, Y,
106-
kernel,
107-
BayesianSVM(),
108-
AnalyticVI(),
109-
optimiser = false
110-
)
100+
mbsvm = VGP(X, Y, kernel, BayesianSVM(), AnalyticVI(); optimiser=false)
111101
@time train!(mbsvm, 5)
112102
# ### And compare it with the Logistic likelihood
113-
Plots.plot(plot_model.(
114-
[models[end], mbsvm],
115-
Ref(X),
116-
Ref(Y),
117-
["Logistic", "BSVM"];
118-
size = (500, 500)
119-
)...,
120-
layout=(1, 2))
103+
Plots.plot(
104+
plot_model.(
105+
[models[end], mbsvm], Ref(X), Ref(Y), ["Logistic", "BSVM"]; size=(500, 500)
106+
)...;
107+
layout=(1, 2),
108+
)

0 commit comments

Comments
 (0)