Skip to content

Fix reshape #20

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Feb 25, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
99 changes: 0 additions & 99 deletions docs/src/hamiltonian_neural_network.md

This file was deleted.

2 changes: 2 additions & 0 deletions src/SymbolicNeuralNetworks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ module SymbolicNeuralNetworks

include("symbolic_neuralnet/symbolize.jl")

include("utils/create_array.jl")

export AbstractSymbolicNeuralNetwork
export SymbolicNeuralNetwork

Expand Down
15 changes: 11 additions & 4 deletions src/build_function/build_function.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,19 @@ function build_nn_function(eq::EqT, nn::AbstractSymbolicNeuralNetwork)
build_nn_function(eq, params(nn), nn.input)
end

function build_nn_function(eq::EqT, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr)
function build_nn_function(eq::EqT, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr; reduce = hcat)
gen_fun = _build_nn_function(eq, sparams, sinput)
gen_fun_returned(x, ps) = mapreduce(k -> gen_fun(x, ps, k), hcat, axes(x, 2))
gen_fun_returned(x::Union{AbstractVector, Symbolics.Arr}, ps) = gen_fun_returned(reshape(x, length(x), 1), ps)
gen_fun_returned(x, ps) = mapreduce(k -> gen_fun(x, ps, k), reduce, axes(x, 2))
function gen_fun_returned(x::Union{AbstractVector, Symbolics.Arr}, ps)
output_not_reshaped = gen_fun_returned(reshape(x, length(x), 1), ps)
# for vectors we do not reshape, as the output may be a matrix
output_not_reshaped
end
# check this! (definitely not correct in all cases!)
gen_fun_returned(x::AbstractArray{<:Number, 3}, ps) = reshape(gen_fun_returned(reshape(x, size(x, 1), size(x, 2) * size(x, 3)), ps), size(x, 1), size(x, 2), size(x, 3))
function gen_fun_returned(x::AbstractArray{<:Number, 3}, ps)
output_not_reshaped = gen_fun_returned(reshape(x, size(x, 1), size(x, 2) * size(x, 3)), ps)
reshape(output_not_reshaped, size(output_not_reshaped, 1), size(x, 2), size(x, 3))
end
gen_fun_returned
end

Expand Down
16 changes: 8 additions & 8 deletions src/build_function/build_function_arrays.jl
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ funcs_evaluated = funcs(input, params(nn))
(c = [0.9576465981186686],)
```
"""
function build_nn_function(eqs::AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...)
ps_semi = [function_valued_parameters(eq, sparams, sinput...) for eq in eqs]
function build_nn_function(eqs::AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...; reduce = hcat)
ps_semi = [function_valued_parameters(eq, sparams, sinput...; reduce = reduce) for eq in eqs]

_pbs_executable(ps_functions, params, input...) = apply_element_wise(ps_functions, params, input...)
__pbs_executable(input, params) = _pbs_executable(ps_semi, params, input)
Expand Down Expand Up @@ -65,8 +65,8 @@ funcs_evaluated = funcs(input, params(nn))

Internally this is using [`function_valued_parameters`](@ref) and [`apply_element_wise`](@ref).
"""
function build_nn_function(eqs::Union{NamedTuple, NeuralNetworkParameters}, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...)
ps = function_valued_parameters(eqs, sparams, sinput...)
function build_nn_function(eqs::Union{NamedTuple, NeuralNetworkParameters}, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...; reduce = hcat)
ps = function_valued_parameters(eqs, sparams, sinput...; reduce = reduce)
_pbs_executable(ps::Union{NamedTuple, NeuralNetworkParameters}, params::NeuralNetworkParameters, input::AbstractArray...) = apply_element_wise(ps, params, input...)
__pbs_executable(input::AbstractArray, params::NeuralNetworkParameters) = _pbs_executable(ps, params, input)
# return this one if sinput & soutput are supplied
Expand Down Expand Up @@ -104,13 +104,13 @@ b = c(input, ps).^2
(true, true)
```
"""
function function_valued_parameters(eqs::NeuralNetworkParameters, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...)
vals = Tuple(build_nn_function(eqs[key], sparams, sinput...) for key in keys(eqs))
function function_valued_parameters(eqs::NeuralNetworkParameters, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...; reduce = hcat)
vals = Tuple(build_nn_function(eqs[key], sparams, sinput...; reduce = reduce) for key in keys(eqs))
NeuralNetworkParameters{keys(eqs)}(vals)
end

function function_valued_parameters(eqs::NamedTuple, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...)
vals = Tuple(build_nn_function(eqs[key], sparams, sinput...) for key in keys(eqs))
function function_valued_parameters(eqs::NamedTuple, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...; reduce = hcat)
vals = Tuple(build_nn_function(eqs[key], sparams, sinput...; reduce = reduce) for key in keys(eqs))
NamedTuple{keys(eqs)}(vals)
end

Expand Down
28 changes: 23 additions & 5 deletions src/build_function/build_function_double_input.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""
build_nn_function(eqs, nn, soutput)

Build an executable function that can also depend on an output. It is then called with:
Build an executable function that can also depend on an output. The resulting `built_function` is then called with:
```julia
built_function(input, output, ps)
```
Expand All @@ -16,14 +16,32 @@
build_nn_function(eqs, params(nn), nn.input, soutput)
end

function build_nn_function(eq::EqT, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr, soutput::Symbolics.Arr)
function build_nn_function(eq::EqT, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr, soutput::Symbolics.Arr; reduce = hcat)
@assert ( (reduce == hcat) || (reduce == +) ) "Keyword reduce either has to be + or hcat!"
gen_fun = _build_nn_function(eq, sparams, sinput, soutput)
gen_fun_returned(input, output, ps) = mapreduce(k -> gen_fun(input, output, ps, k), +, axes(input, 2))
gen_fun_returned(input::AT, output::AT, ps) where {AT <: Union{AbstractVector, Symbolics.Arr}} = gen_fun_returned(reshape(input, length(input), 1), reshape(output, length(output), 1), ps)
gen_fun_returned(input::AT, output::AT, ps) where {T, AT <: AbstractArray{T, 3}} = gen_fun_returned(reshape(input, size(input, 1), size(input, 2) * size(input, 3)), reshape(output, size(output, 1), size(output, 2) * size(output, 3)), ps)
gen_fun_returned(input, output, ps) = mapreduce(k -> gen_fun(input, output, ps, k), reduce, axes(input, 2))
function gen_fun_returned(x::AT, y::AT, ps) where {AT <: Union{AbstractVector, Symbolics.Arr}}

Check warning on line 23 in src/build_function/build_function_double_input.jl

View check run for this annotation

Codecov / codecov/patch

src/build_function/build_function_double_input.jl#L23

Added line #L23 was not covered by tests
output_not_reshaped = gen_fun_returned(reshape(x, length(x), 1), reshape(y, length(y), 1), ps)
# for vectors we do not reshape, as the output may be a matrix
output_not_reshaped
end
# check this! (definitely not correct in all cases!)
function gen_fun_returned(x::AT, y::AT, ps) where {AT <: AbstractArray{<:Number, 3}}
output_not_reshaped = gen_fun_returned(reshape(x, size(x, 1), size(x, 2) * size(x, 3)), reshape(y, size(y, 1), size(y, 2) * size(y, 3)), ps)
# if arrays are added together then don't reshape!
optional_reshape(output_not_reshaped, reduce, x)
end
gen_fun_returned
end

function optional_reshape(output_not_reshaped::AbstractVecOrMat, ::typeof(+), ::AbstractArray{<:Number, 3})
output_not_reshaped

Check warning on line 38 in src/build_function/build_function_double_input.jl

View check run for this annotation

Codecov / codecov/patch

src/build_function/build_function_double_input.jl#L37-L38

Added lines #L37 - L38 were not covered by tests
end

function optional_reshape(output_not_reshaped::AbstractVecOrMat, ::typeof(hcat), input::AbstractArray{<:Number, 3})
reshape(output_not_reshaped, size(output_not_reshaped, 1), size(input, 2), size(input, 3))
end

"""
_build_nn_function(eq, params, sinput, soutput)

Expand Down
4 changes: 2 additions & 2 deletions src/derivatives/pullback.jl
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ function SymbolicPullback(nn::SymbolicNeuralNetwork, loss::NetworkLoss)
@variables soutput[1:output_dimension(nn.model)]
symbolic_loss = loss(nn.model, params(nn), nn.input, soutput)
symbolic_pullbacks = symbolic_pullback(symbolic_loss, nn)
pbs_executable = build_nn_function(symbolic_pullbacks, params(nn), nn.input, soutput)
pbs_executable = build_nn_function(symbolic_pullbacks, params(nn), nn.input, soutput; reduce = +)
function pbs(input, output, params)
pullback(::Union{Real, AbstractArray{<:Real}}) = _get_contents(_get_params(pbs_executable(input, output, params)))
pullback
Expand Down Expand Up @@ -146,4 +146,4 @@ _get_contents(nt::Tuple{<:Union{NamedTuple, NeuralNetworkParameters}}) = nt[1]
# (_pullback::SymbolicPullback)(ps, model, input_nt::QPTOAT)::Tuple = Zygote.pullback(ps -> _pullback.loss(model, ps, input_nt), ps)
function (_pullback::SymbolicPullback)(ps, model, input_nt_output_nt::Tuple{<:QPTOAT, <:QPTOAT})::Tuple
_pullback.loss(model, ps, input_nt_output_nt...), _pullback.fun(input_nt_output_nt..., ps)
end
end
4 changes: 4 additions & 0 deletions src/utils/create_array.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# TODO: this shouldn't be there (type piracy); remove once https://github.com/JuliaSymbolics/SymbolicUtils.jl/pull/679 has been merged!
function Symbolics.SymbolicUtils.Code.create_array(::Type{<:Base.ReshapedArray{T, N, P}}, S, nd::Val, d::Val, elems...) where {T, N, P}
Symbolics.SymbolicUtils.Code.create_array(P, S, nd, d, elems...)
end
40 changes: 40 additions & 0 deletions test/reshape_test.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
using SymbolicNeuralNetworks
using AbstractNeuralNetworks
using Symbolics
using Test

function set_up_network()
c = Chain(Dense(2, 3))
nn = SymbolicNeuralNetwork(c)
soutput = nn.model(nn.input, nn.params)
nn_cpu = NeuralNetwork(c)
nn, soutput, nn_cpu
end

function test_for_input()
nn, soutput, nn_cpu = set_up_network()
input = rand(2, 5)
input2 = reshape((@view input[:, 1:2]), 2, 1, 2)
built_function = build_nn_function(soutput, nn.params, nn.input)
outputs = built_function(input2, nn_cpu.params)
for i in 1:2
@test nn.model(input[:, i], nn_cpu.params) ≈ outputs[:, 1, i]
end
end

function test_for_input_and_output()
nn, soutput2, nn_cpu = set_up_network()
input = rand(2, 5)
output = rand(3, 5)
input2 = reshape((@view input[:, 1:2]), 2, 1, 2)
output2 = reshape((@view output[:, 1:2]), 3, 1, 2)
@variables soutput[1:3]
built_function = build_nn_function((soutput - soutput2).^2, nn.params, nn.input, soutput)
outputs = built_function(input2, output2, nn_cpu.params)
for i in 1:2
@test (nn.model(input[:, i], nn_cpu.params) - output[:, i]).^2 ≈ outputs[:, 1, i]
end
end

test_for_input()
test_for_input_and_output()
3 changes: 2 additions & 1 deletion test/runtests.jl
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
using SymbolicNeuralNetworks
using SafeTestsets

@safetestset "Check if reshape works in the correct way with the generated functions. " begin include("reshape_test.jl") end
@safetestset "Symbolic gradient " begin include("derivatives/symbolic_gradient.jl") end
@safetestset "Symbolic Neural network " begin include("derivatives/jacobian.jl") end
@safetestset "Symbolic Params " begin include("symbolic_neuralnet/symbolize.jl") end
@safetestset "Tests associated with 'build_function.jl' " begin include("build_function/build_function.jl") end
@safetestset "Tests associated with 'build_function_double_input.jl' " begin include("build_function/build_function_double_input.jl") end
@safetestset "Tests associated with 'build_function_array.jl " begin include("build_function/build_function_arrays.jl") end
@safetestset "Compare Zygote Pullback with Symbolic Pullback " begin include("derivatives/pullback.jl") end
@safetestset "Compare Zygote Pullback with Symbolic Pullback " begin include("derivatives/pullback.jl") end
Loading