Skip to content

Commit

Permalink
Merge branch 'SciML:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
ayushinav authored Feb 25, 2024
2 parents 0370d3b + d353034 commit 5d6d13e
Show file tree
Hide file tree
Showing 7 changed files with 29 additions and 36 deletions.
22 changes: 10 additions & 12 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "NeuralPDE"
uuid = "315f7962-48a3-4962-8226-d0f33b1235f0"
authors = ["Chris Rackauckas <[email protected]>"]
version = "5.11.0"
version = "5.12.0"

[deps]
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
Expand Down Expand Up @@ -33,25 +33,24 @@ Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
RuntimeGeneratedFunctions = "7e49a35a-f44a-4d26-94aa-eba1b4ca6b47"
SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
StochasticDiffEq = "789caeaf-c7a9-5a7d-9973-96adeb23e2a0"
SymbolicUtils = "d1185830-fcd6-423d-90d6-eec64667417b"
Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7"
UnPack = "3a884ed6-31ef-47d7-9d2a-63182c4928ed"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[compat]
Adapt = "3, 4"
Adapt = "4"
AdvancedHMC = "0.6"
ArrayInterface = "6, 7"
Aqua = "0.8"
CUDA = "4"
ArrayInterface = "7"
CUDA = "5.1"
ChainRulesCore = "1"
ComponentArrays = "0.13.2, 0.14, 0.15"
ComponentArrays = "0.15"
Cubature = "1.5"
DiffEqBase = "6"
DiffEqNoiseProcess = "5.1"
Distributions = "0.23, 0.24, 0.25"
DocStringExtensions = "0.8, 0.9"
Distributions = "0.25"
DocStringExtensions = "0.9"
DomainSets = "0.6, 0.7"
Flux = "0.14"
ForwardDiff = "0.10"
Expand All @@ -60,15 +59,15 @@ Integrals = "4"
LineSearches = "7.2"
LinearAlgebra = "1"
LogDensityProblems = "2"
Lux = "0.4, 0.5"
Lux = "0.5"
LuxCUDA = "0.3"
MCMCChains = "6"
ModelingToolkit = "8"
MonteCarloMeasurements = "1"
Optim = "1.7.8"
Optimization = "3"
OptimizationOptimJL = "0.1"
OptimizationOptimisers = "0.1"
OptimizationOptimJL = "0.2"
OptimizationOptimisers = "0.2"
OrdinaryDiffEq = "6"
Pkg = "1"
QuasiMonteCarlo = "0.3.2"
Expand All @@ -78,7 +77,6 @@ RuntimeGeneratedFunctions = "0.5"
SafeTestsets = "0.1"
SciMLBase = "2"
Statistics = "1"
StochasticDiffEq = "6.13"
SymbolicUtils = "1"
Symbolics = "5"
Test = "1"
Expand Down
12 changes: 6 additions & 6 deletions docs/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,19 +26,19 @@ SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b"
AdvancedHMC = "0.6"
Cubature = "1.5"
DiffEqBase = "6.106"
Distributions = "0.23, 0.24, 0.25"
Distributions = "0.25"
Documenter = "1"
DomainSets = "0.6, 0.7"
Flux = "0.13, 0.14"
Flux = "0.14"
Integrals = "4"
Lux = "0.4, 0.5"
Lux = "0.5"
ModelingToolkit = "8.33"
MonteCarloMeasurements = "1"
NeuralPDE = "5.3"
Optimization = "3.9"
OptimizationOptimJL = "0.1"
OptimizationOptimisers = "0.1"
OptimizationPolyalgorithms = "0.1"
OptimizationOptimJL = "0.2"
OptimizationOptimisers = "0.2"
OptimizationPolyalgorithms = "0.2"
OrdinaryDiffEq = "6.31"
Plots = "1.36"
QuasiMonteCarlo = "0.3"
Expand Down
3 changes: 1 addition & 2 deletions lib/NeuralPDELogging/test/adaptive_loss_log_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ using Test, NeuralPDE
using Optimization, OptimizationOptimisers
import ModelingToolkit: Interval, infimum, supremum
using Random, Lux
#using Plots
@info "Starting Soon!"

nonadaptive_loss = NeuralPDE.NonAdaptiveLoss(pde_loss_weights = 1, bc_loss_weights = 1)
Expand Down Expand Up @@ -70,7 +69,7 @@ function test_2d_poisson_equation_adaptive_loss(adaptive_loss, run, outdir, hasl
if haslogger
log_value(logger, "outer_error/loss", l, step = iteration[1])
if iteration[1] % 30 == 0
u_predict = reshape([first(phi([x, y], p)) for x in xs for y in ys],
u_predict = reshape([first(phi([x, y], p.u)) for x in xs for y in ys],
(length(xs), length(ys)))
diff_u = abs.(u_predict .- u_real)
total_diff = sum(diff_u)
Expand Down
2 changes: 1 addition & 1 deletion src/NeuralPDE.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ using Reexport, Statistics
@reexport using ModelingToolkit

using Zygote, ForwardDiff, Random, Distributions
using Adapt, DiffEqNoiseProcess, StochasticDiffEq
using Adapt, DiffEqNoiseProcess
using Optimization
using OptimizationOptimisers
using Integrals, Cubature
Expand Down
4 changes: 0 additions & 4 deletions src/pinn_types.jl
Original file line number Diff line number Diff line change
Expand Up @@ -507,10 +507,6 @@ function (f::Phi{<:Lux.AbstractExplicitLayer})(x::AbstractArray, θ)
y
end

function (f::Phi{<:Optimisers.Restructure})(x, θ)
f.f(θ)(adapt(parameterless_type(θ), x))
end

function get_u()
u = (cord, θ, phi) -> phi(cord, θ)
end
Expand Down
10 changes: 5 additions & 5 deletions test/NNPDE_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -173,8 +173,8 @@ function test_2d_poisson_equation(chain_, strategy_)
u(x, 0) ~ 0.0, u(x, 1) ~ -sin(pi * x) * sin(pi * 1)]
# Space and time domains
domains = [x Interval(0.0, 1.0), y Interval(0.0, 1.0)]

discretization = PhysicsInformedNN(chain_, strategy_)
ps = Lux.setup(Random.default_rng(), chain_)[1]
discretization = PhysicsInformedNN(chain_, strategy_; init_params = ps)
@named pde_system = PDESystem(eq, bcs, domains, [x, y], [u(x, y)])
prob = discretize(pde_system, discretization)
res = solve(prob, OptimizationOptimisers.Adam(0.1); maxiters = 500, cb = callback)
Expand Down Expand Up @@ -251,8 +251,8 @@ end

cb_ = function (p, l)
println("loss: ", l)
println("pde_losses: ", map(l_ -> l_(p), pde_inner_loss_functions))
println("bcs_losses: ", map(l_ -> l_(p), bcs_inner_loss_functions))
println("pde_losses: ", map(l_ -> l_(p.u), pde_inner_loss_functions))
println("bcs_losses: ", map(l_ -> l_(p.u), bcs_inner_loss_functions))
return false
end

Expand Down Expand Up @@ -352,7 +352,7 @@ end

cb_ = function (p, l)
println("loss: ", l)
println("losses: ", map(l -> l(p), loss_functions))
println("losses: ", map(l -> l(p.u), loss_functions))
return false
end

Expand Down
12 changes: 6 additions & 6 deletions test/additional_loss_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,9 @@ using ComponentArrays
phi = discretization.phi
cb_ = function (p, l)
println("loss: ", l)
println("pde_losses: ", map(l_ -> l_(p), pde_inner_loss_functions))
println("bcs_losses: ", map(l_ -> l_(p), bcs_inner_loss_functions))
println("additional_loss: ", norm_loss_function(phi, p, nothing))
println("pde_losses: ", map(l_ -> l_(p.u), pde_inner_loss_functions))
println("bcs_losses: ", map(l_ -> l_(p.u), bcs_inner_loss_functions))
println("additional_loss: ", norm_loss_function(phi, p.u, nothing))
return false
end
res = solve(prob, OptimizationOptimJL.LBFGS(), maxiters = 400, callback = cb_)
Expand All @@ -82,9 +82,9 @@ using ComponentArrays
phi = discretization.phi
cb_ = function (p, l)
println("loss: ", l)
println("pde_losses: ", map(l_ -> l_(p), pde_inner_loss_functions))
println("bcs_losses: ", map(l_ -> l_(p), bcs_inner_loss_functions))
println("additional_loss: ", norm_loss_function(phi, p, nothing))
println("pde_losses: ", map(l_ -> l_(p.u), pde_inner_loss_functions))
println("bcs_losses: ", map(l_ -> l_(p.u), bcs_inner_loss_functions))
println("additional_loss: ", norm_loss_function(phi, p.u, nothing))
return false
end
res = solve(prob, OptimizationOptimJL.LBFGS(), maxiters = 400, callback = cb_)
Expand Down

0 comments on commit 5d6d13e

Please sign in to comment.