Skip to content

Commit e40fb85

Browse files
Merge pull request SciML#495 from LilithHafner/lh/format
Run JuliaFormatter.format()
2 parents 376241f + d724ea0 commit e40fb85

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

68 files changed

+731
-637
lines changed

docs/make.jl

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -51,34 +51,34 @@ function create_tutorials(dirname, targetdir, excludes = [])
5151
Literate.markdown(ipath, targetdir)
5252
Literate.markdown(ipath, targetdir, execute = false, postprocess = mdpost)
5353
push!(tutorials,
54-
relpath(joinpath(targetdir, fname * ".md"), joinpath(@__DIR__, "src")))
54+
relpath(joinpath(targetdir, fname * ".md"), joinpath(@__DIR__, "src")))
5555
end
5656
end
5757
return tutorials
5858
end
5959

6060
koopman_tutorial = create_tutorials(joinpath(@__DIR__, "src/libs/datadrivendmd/"),
61-
joinpath(@__DIR__, "src/libs/datadrivendmd/examples"))
61+
joinpath(@__DIR__, "src/libs/datadrivendmd/examples"))
6262
sparse_tutorial = create_tutorials(joinpath(@__DIR__, "src/libs/datadrivensparse/"),
63-
joinpath(@__DIR__, "src/libs/datadrivensparse/examples"))
63+
joinpath(@__DIR__, "src/libs/datadrivensparse/examples"))
6464
sr_tutorial = create_tutorials(joinpath(@__DIR__, "src/libs/datadrivensr/"),
65-
joinpath(@__DIR__, "src/libs/datadrivensr/examples"))
65+
joinpath(@__DIR__, "src/libs/datadrivensr/examples"))
6666

6767
# Must be after tutorials is created
6868
include("pages.jl")
6969

7070
# Create the docs
7171
makedocs(sitename = "DataDrivenDiffEq.jl",
72-
authors = "Julius Martensen, Christopher Rackauckas, et al.",
73-
modules = [DataDrivenDiffEq, DataDrivenDMD, DataDrivenSparse, DataDrivenSR],
74-
clean = true, doctest = false, linkcheck = true,
75-
warnonly = [:missing_docs, :cross_references],
76-
linkcheck_ignore = ["http://cwrowley.princeton.edu/papers/Hemati-2017a.pdf",
77-
"https://royalsocietypublishing.org/doi/10.1098/rspa.2020.0279",
78-
"https://www.pnas.org/doi/10.1073/pnas.1517384113"],
79-
format = Documenter.HTML(assets = ["assets/favicon.ico"],
80-
canonical = "https://docs.sciml.ai/DataDrivenDiffEq/stable/"),
81-
pages = pages)
72+
authors = "Julius Martensen, Christopher Rackauckas, et al.",
73+
modules = [DataDrivenDiffEq, DataDrivenDMD, DataDrivenSparse, DataDrivenSR],
74+
clean = true, doctest = false, linkcheck = true,
75+
warnonly = [:missing_docs, :cross_references],
76+
linkcheck_ignore = ["http://cwrowley.princeton.edu/papers/Hemati-2017a.pdf",
77+
"https://royalsocietypublishing.org/doi/10.1098/rspa.2020.0279",
78+
"https://www.pnas.org/doi/10.1073/pnas.1517384113"],
79+
format = Documenter.HTML(assets = ["assets/favicon.ico"],
80+
canonical = "https://docs.sciml.ai/DataDrivenDiffEq/stable/"),
81+
pages = pages)
8282

8383
deploydocs(repo = "github.com/SciML/DataDrivenDiffEq.jl.git";
84-
push_preview = true)
84+
push_preview = true)

docs/pages.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,13 @@ pages = [
66
"Solutions" => "solutions.md",
77
"Utilities" => "utils.md",
88
"DataDrivenDMD" => ["Background" => "libs/datadrivendmd/koopman.md",
9-
"Examples" => koopman_tutorial,
9+
"Examples" => koopman_tutorial
1010
],
1111
"DataDrivenSparse" => ["Background" => "libs/datadrivensparse/sparse_regression.md",
12-
"Examples" => sparse_tutorial,
12+
"Examples" => sparse_tutorial
1313
],
1414
"DataDrivenSR" => ["Background" => "libs/datadrivensr/symbolic_regression.md",
15-
"Examples" => sr_tutorial,
15+
"Examples" => sr_tutorial
1616
],
17-
"Citing" => "citations.md",
17+
"Citing" => "citations.md"
1818
]

docs/src/libs/datadrivensparse/example_02.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@ ts = sol.t;
3939
# in as a function `(u,p,t)->control` or an array of measurements.
4040

4141
prob = ContinuousDataDrivenProblem(X, ts, GaussianKernel(),
42-
U = (u, p, t) -> [exp(-((t - 5.0) / 5.0)^2)],
43-
p = ones(2))
42+
U = (u, p, t) -> [exp(-((t - 5.0) / 5.0)^2)],
43+
p = ones(2))
4444

4545
#md plot(prob, size = (600,600))
4646

@@ -66,7 +66,7 @@ sampler = DataProcessing(split = 0.8, shuffle = true, batchsize = 30, rng = rng)
6666
λs = exp10.(-10:0.1:0)
6767
opt = STLSQ(λs)
6868
res = solve(prob, basis, opt,
69-
options = DataDrivenCommonOptions(data_processing = sampler, digits = 1))
69+
options = DataDrivenCommonOptions(data_processing = sampler, digits = 1))
7070
#src println(res) #hide
7171

7272
# !!! info

docs/src/libs/datadrivensparse/example_03.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,9 @@ ode_problem = ODEProblem(michaelis_menten, u0, (0.0, 4.0));
2424
# for the processing.
2525

2626
prob = DataDrivenDataset(map(1:2) do i
27-
solve(remake(ode_problem, u0 = i * u0),
28-
Tsit5(), saveat = 0.1, tspan = (0.0, 4.0))
29-
end...)
27+
solve(remake(ode_problem, u0 = i * u0),
28+
Tsit5(), saveat = 0.1, tspan = (0.0, 4.0))
29+
end...)
3030

3131
#md plot(prob)
3232

docs/src/libs/datadrivensparse/example_04.jl

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,9 @@ using Test #src
2020
δ = 0.5
2121
end
2222

23-
@variables begin x[1:2](t) = [20.0; 12.0] end
23+
@variables begin
24+
x[1:2](t) = [20.0; 12.0]
25+
end
2426

2527
x = collect(x)
2628
D = Differential(t)
@@ -52,7 +54,7 @@ basis = Basis(eqs, x, independent_variable = t, implicits = D.(x))
5254

5355
sampler = DataProcessing(split = 0.8, shuffle = true, batchsize = 30)
5456
res = solve(dd_prob, basis, ImplicitOptimizer(STLSQ(1e-2:1e-2:1.0)),
55-
options = DataDrivenCommonOptions(data_processing = sampler, digits = 2))
57+
options = DataDrivenCommonOptions(data_processing = sampler, digits = 2))
5658
#md println(res) #hide
5759

5860
# And have a look at the resulting plot

docs/src/libs/datadrivensparse/example_05.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ end
3636
t = solution.t
3737

3838
ddprob = ContinuousDataDrivenProblem(X, t, DX = DX[3:4, :],
39-
U = (u, p, t) -> [-0.2 + 0.5 * sin(6 * t)])
39+
U = (u, p, t) -> [-0.2 + 0.5 * sin(6 * t)])
4040

4141
#md plot(ddprob)
4242

docs/src/libs/datadrivensr/example_01.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,9 @@ prob = ContinuousDataDrivenProblem(X, t, U = U)
3737
# We will stick to simple operations, use a `L1DistLoss`, and limit the verbosity of the algorithm.
3838

3939
eqsearch_options = SymbolicRegression.Options(binary_operators = [+, *],
40-
loss = L1DistLoss(),
41-
verbosity = -1, progress = false, npop = 30,
42-
timeout_in_seconds = 60.0)
40+
loss = L1DistLoss(),
41+
verbosity = -1, progress = false, npop = 30,
42+
timeout_in_seconds = 60.0)
4343

4444
alg = EQSearch(eq_options = eqsearch_options)
4545

docs/src/libs/datadrivensr/example_02.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,9 @@ u = collect(u)
3636
basis = Basis([polynomial_basis(u, 2); sin.(u)], u)
3737

3838
eqsearch_options = SymbolicRegression.Options(binary_operators = [+, *],
39-
loss = L1DistLoss(),
40-
verbosity = -1, progress = false, npop = 30,
41-
timeout_in_seconds = 60.0)
39+
loss = L1DistLoss(),
40+
verbosity = -1, progress = false, npop = 30,
41+
timeout_in_seconds = 60.0)
4242

4343
alg = EQSearch(eq_options = eqsearch_options)
4444

lib/DataDrivenDMD/src/algorithms.jl

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,14 +20,16 @@ function truncated_svd(A::AbstractMatrix{T}, truncation::Int) where {T <: Number
2020
end
2121

2222
# General method with inputs
23-
function (x::AbstractKoopmanAlgorithm)(X::AbstractArray, Y::AbstractArray, U::AbstractArray,
24-
B::AbstractArray)
23+
function (x::AbstractKoopmanAlgorithm)(
24+
X::AbstractArray, Y::AbstractArray, U::AbstractArray,
25+
B::AbstractArray)
2526
K, _ = x(X, Y - B * U)
2627
return (K, B)
2728
end
2829

29-
function (x::AbstractKoopmanAlgorithm)(X::AbstractArray, Y::AbstractArray, U::AbstractArray,
30-
::Nothing)
30+
function (x::AbstractKoopmanAlgorithm)(
31+
X::AbstractArray, Y::AbstractArray, U::AbstractArray,
32+
::Nothing)
3133
return x(X, Y, U)
3234
end
3335

@@ -113,7 +115,7 @@ end
113115

114116
# DMDc
115117
function (x::DMDSVD{T})(X::AbstractArray, Y::AbstractArray,
116-
U::AbstractArray) where {T <: Real}
118+
U::AbstractArray) where {T <: Real}
117119
isempty(U) && return x(X, Y)
118120
nx, m = size(X)
119121
nu, m = size(U)
@@ -176,7 +178,7 @@ function (x::TOTALDMD)(X::AbstractArray, Y::AbstractArray, U::AbstractArray)
176178
end
177179

178180
function (x::TOTALDMD)(X::AbstractArray, Y::AbstractArray, U::AbstractArray,
179-
B::AbstractArray)
181+
B::AbstractArray)
180182
_, _, Q = truncated_svd([X; Y], x.truncation)
181183
K, _ = x.alg(X * Q, (Y - B * U) * Q)
182184
return (K, B)

lib/DataDrivenDMD/src/result.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ struct KoopmanResult{K, B, C, Q, P, T} <: AbstractDataDrivenResult
2525
retcode::DDReturnCode
2626

2727
function KoopmanResult(k_::K, b::B, c::C, q::Q, p::P, X::AbstractMatrix{T},
28-
Y::AbstractMatrix{T}, U::AbstractMatrix) where {K, B, C, Q, P, T}
28+
Y::AbstractMatrix{T}, U::AbstractMatrix) where {K, B, C, Q, P, T}
2929
k = Matrix(k_)
3030
rss = isempty(b) ? sum(abs2, Y .- c * k * X) : sum(abs2, Y .- c * (k * X .+ b * U))
3131
dof = sum(!iszero, k)

lib/DataDrivenDMD/src/solve.jl

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
# This will get called within init in DataDrivenDiffEq
22

33
function DataDrivenDiffEq.get_fit_targets(::A, prob::ABSTRACT_CONT_PROB,
4-
basis::AbstractBasis) where {
5-
A <:
6-
AbstractKoopmanAlgorithm
7-
}
4+
basis::AbstractBasis) where {
5+
A <:
6+
AbstractKoopmanAlgorithm
7+
}
88
@unpack DX, X, p, t, U = prob
99

1010
@assert size(DX, 1)==size(X, 1) "$(A) needs equal number of observed states and differentials for continuous problems!"
@@ -32,10 +32,10 @@ function DataDrivenDiffEq.get_fit_targets(::A, prob::ABSTRACT_CONT_PROB,
3232
end
3333

3434
function DataDrivenDiffEq.get_fit_targets(::A, prob::ABSTRACT_DISCRETE_PROB,
35-
basis::AbstractBasis) where {
36-
A <:
37-
AbstractKoopmanAlgorithm
38-
}
35+
basis::AbstractBasis) where {
36+
A <:
37+
AbstractKoopmanAlgorithm
38+
}
3939
# TODO Maybe we could, but this would require X[:, i+2] -> split in three here
4040
@assert !is_implicit(basis) "$(A) does not support implicit arguments in the basis for discrete problems!"
4141

@@ -48,7 +48,7 @@ function DataDrivenDiffEq.get_fit_targets(::A, prob::ABSTRACT_DISCRETE_PROB,
4848
if is_controlled(basis)
4949
foreach(1:m) do i
5050
Ỹ[:, i] .= basis(X[:, i + 1], p, t[i + 1],
51-
U[:, i + 1])
51+
U[:, i + 1])
5252
end
5353
else
5454
foreach(1:m) do i
@@ -60,9 +60,9 @@ end
6060

6161
## Solve the Koopman
6262
function CommonSolve.solve!(prob::InternalDataDrivenProblem{A}) where {
63-
A <:
64-
AbstractKoopmanAlgorithm
65-
}
63+
A <:
64+
AbstractKoopmanAlgorithm
65+
}
6666
@unpack alg, basis, testdata, traindata, control_idx, options, problem, kwargs = prob
6767
@unpack selector = options
6868
# Check for
@@ -94,7 +94,7 @@ function convert_to_basis(res::KoopmanResult, basis::Basis, prob, options, contr
9494
end
9595

9696
function (algorithm::AbstractKoopmanAlgorithm)(prob::InternalDataDrivenProblem;
97-
control_input = nothing, kwargs...)
97+
control_input = nothing, kwargs...)
9898
@unpack traindata, testdata, control_idx, options = prob
9999
@unpack abstol = options
100100
# Preprocess control idx, indicates if any control is active in a single basis atom

lib/DataDrivenDMD/src/type.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -148,10 +148,10 @@ the L2 error of the prediction exceeds the `threshold`.
148148
`p` and `t` are the parameters of the basis and the vector of timepoints, if necessary.
149149
"""
150150
function update!(k::AbstractKoopman,
151-
X::AbstractArray, Y::AbstractArray;
152-
p::AbstractArray = [], t::AbstractVector = [],
153-
U::AbstractArray = [],
154-
threshold::T = eps()) where {T <: Real}
151+
X::AbstractArray, Y::AbstractArray;
152+
p::AbstractArray = [], t::AbstractVector = [],
153+
U::AbstractArray = [],
154+
threshold::T = eps()) where {T <: Real}
155155
@assert updatable(k) "Linear Koopman is not updatable."
156156

157157
Ψ₀ = k(X, p, t, U)

lib/DataDrivenDMD/test/runtests.jl

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,19 @@ using Test
77

88
const GROUP = get(ENV, "GROUP", "All")
99

10-
@time begin if GROUP == "All" || GROUP == "DataDrivenDMD"
11-
@safetestset "Linear autonomous" begin include("./linear_autonomous.jl") end
12-
@safetestset "Linear forced" begin include("./linear_forced.jl") end
13-
@safetestset "Nonlinear autonomous" begin include("./nonlinear_autonomous.jl") end
14-
@safetestset "Nonlinear forced" begin include("./nonlinear_forced.jl") end
15-
end end
10+
@time begin
11+
if GROUP == "All" || GROUP == "DataDrivenDMD"
12+
@safetestset "Linear autonomous" begin
13+
include("./linear_autonomous.jl")
14+
end
15+
@safetestset "Linear forced" begin
16+
include("./linear_forced.jl")
17+
end
18+
@safetestset "Nonlinear autonomous" begin
19+
include("./nonlinear_autonomous.jl")
20+
end
21+
@safetestset "Nonlinear forced" begin
22+
include("./nonlinear_forced.jl")
23+
end
24+
end
25+
end

lib/DataDrivenLux/src/algorithms/crossentropy.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -63,15 +63,15 @@ function init_model(x::CrossEntropy, basis::Basis, dataset::Dataset, intervals)
6363
end
6464

6565
return LayeredDAG(length(basis), size(dataset.y, 1), n_layers, arities, functions;
66-
skip = skip, input_functions = variable_mask, simplex = simplex)
66+
skip = skip, input_functions = variable_mask, simplex = simplex)
6767
end
6868

6969
function update_parameters!(cache::SearchCache{<:CrossEntropy})
7070
@unpack candidates, keeps, p, alg = cache
7171
@unpack alpha = alg
7272
= mean(map(candidates[keeps]) do candidate
73-
ComponentVector(get_configuration(candidate.model.model, p, candidate.st))
74-
end)
73+
ComponentVector(get_configuration(candidate.model.model, p, candidate.st))
74+
end)
7575
cache.p .= alpha * p + (one(alpha) - alpha) .*
7676
return
7777
end

lib/DataDrivenLux/src/algorithms/reinforce.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,8 +55,8 @@ function reinforce_loss(candidates, p, alg)
5555
rewards = reward(losses)
5656
# ∇U(θ) = E[∇log(p)*R(t)]
5757
mean(map(enumerate(candidates)) do (i, candidate)
58-
rewards[i] * -candidate(p)
59-
end)
58+
rewards[i] * -candidate(p)
59+
end)
6060
end
6161

6262
function update_parameters!(cache::SearchCache{<:Reinforce})

lib/DataDrivenLux/src/caches/cache.jl

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -30,11 +30,11 @@ function init_model(x::AbstractDAGSRAlgorithm, basis::Basis, dataset::Dataset, i
3030
end
3131

3232
return LayeredDAG(length(basis), size(dataset.y, 1), n_layers, arities, functions;
33-
skip = skip, input_functions = variable_mask, simplex = simplex)
33+
skip = skip, input_functions = variable_mask, simplex = simplex)
3434
end
3535

3636
function init_cache(x::X where {X <: AbstractDAGSRAlgorithm}, basis::Basis,
37-
problem::DataDrivenProblem; kwargs...)
37+
problem::DataDrivenProblem; kwargs...)
3838
@unpack rng, keep, observed, populationsize, optimizer, optim_options, optimiser, loss = x
3939
# Derive the model
4040
dataset = Dataset(problem)
@@ -56,9 +56,9 @@ function init_cache(x::X where {X <: AbstractDAGSRAlgorithm}, basis::Basis,
5656
# Derive the candidates
5757
candidates = map(1:populationsize) do i
5858
candidate = Candidate(rng_, model, basis, dataset; observed = observed,
59-
parameterdist = parameters, ptype = TData)
59+
parameterdist = parameters, ptype = TData)
6060
optimize_candidate!(candidate, dataset; optimizer = optimizer,
61-
options = optim_options)
61+
options = optim_options)
6262
candidate
6363
end
6464

@@ -93,8 +93,8 @@ function init_cache(x::X where {X <: AbstractDAGSRAlgorithm}, basis::Basis,
9393
optimiser_state = nothing
9494
end
9595
return SearchCache{typeof(x), ptype, typeof(optimiser_state)}(x, candidates, ages,
96-
keeps, sorting, ps,
97-
dataset, optimiser_state)
96+
keeps, sorting, ps,
97+
dataset, optimiser_state)
9898
end
9999

100100
function update_cache!(cache::SearchCache)
@@ -134,7 +134,7 @@ function optimize_cache!(cache::SearchCache{<:Any, __PROCESSUSE(1)}, p = cache.p
134134
return true
135135
else
136136
optimize_candidate!(candidate, cache.dataset, p; optimizer = optimizer,
137-
options = optim_options)
137+
options = optim_options)
138138
cache.ages[i] = 0
139139
return true
140140
end
@@ -151,7 +151,7 @@ function optimize_cache!(cache::SearchCache{<:Any, __PROCESSUSE(2)}, p = cache.p
151151
cache.ages[i] += 1
152152
else
153153
optimize_candidate!(cache.candidates[i], cache.dataset, p;
154-
optimizer = optimizer, options = optim_options)
154+
optimizer = optimizer, options = optim_options)
155155
cache.ages[i] = 0
156156
end
157157
end
@@ -169,7 +169,7 @@ function optimize_cache!(cache::SearchCache{<:Any, __PROCESSUSE(3)}, p = cache.p
169169
return true
170170
else
171171
optimize_candidate!(cache.candidates[i], cache.dataset, p;
172-
optimizer = optimizer, options = optim_options)
172+
optimizer = optimizer, options = optim_options)
173173
cache.ages[i] = 0
174174
return true
175175
end

0 commit comments

Comments
 (0)