Skip to content

Commit

Permalink
Merge branch 'main' into HLL_WavespeedDefault_Davis
Browse files Browse the repository at this point in the history
  • Loading branch information
ranocha authored Feb 23, 2024
2 parents ab7f71a + 029ddea commit 30d03a5
Show file tree
Hide file tree
Showing 6 changed files with 115 additions and 8 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/Downgrade.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ jobs:
- uses: julia-actions/cache@v1
- uses: julia-actions/julia-downgrade-compat@v1
with:
skip: LinearAlgebra,Printf,SparseArrays,DiffEqBase
skip: LinearAlgebra,Printf,SparseArrays,UUIDs,DiffEqBase
projects: ., test
- uses: julia-actions/julia-buildpkg@v1
env:
Expand Down
4 changes: 4 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ OffsetArrays = "6fe1bfb0-de20-5000-8ca7-80f57d26f881"
P4est = "7d669430-f675-4ae7-b43e-fab78ec5a902"
Polyester = "f517fe37-dbe3-4b94-8317-1923a5111588"
PrecompileTools = "aea7be01-6a6a-4083-8856-8a6e6704d82a"
Preferences = "21216c6a-2e73-6563-6e65-726566657250"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
RecipesBase = "3cdcf5f2-1ef4-517c-9805-6587b60abb01"
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
Expand All @@ -46,6 +47,7 @@ Triangulate = "f7e6ffb2-c36d-4f8f-a77e-16e897189344"
TriplotBase = "981d1d27-644d-49a2-9326-4793e63143c3"
TriplotRecipes = "808ab39a-a642-4abf-81ff-4cb34ebbffa3"
TrixiBase = "9a0f1c46-06d5-4909-a5a3-ce25d3fa3284"
UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"

[weakdeps]
Makie = "ee78f7c6-11fb-53f2-987a-cfe4a2b5a57a"
Expand Down Expand Up @@ -76,6 +78,7 @@ OffsetArrays = "1.12"
P4est = "0.4.9"
Polyester = "0.7.5"
PrecompileTools = "1.1"
Preferences = "1.3"
Printf = "1"
RecipesBase = "1.1"
Reexport = "1.0"
Expand All @@ -97,6 +100,7 @@ Triangulate = "2.2"
TriplotBase = "0.1"
TriplotRecipes = "0.1"
TrixiBase = "0.1.1"
UUIDs = "1.6"
julia = "1.8"

[extras]
Expand Down
6 changes: 6 additions & 0 deletions src/Trixi.jl
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,12 @@ using TrixiBase: TrixiBase
using SimpleUnPack: @pack!
using DataStructures: BinaryHeap, FasterForward, extract_all!

using UUIDs: UUID
using Preferences: @load_preference, set_preferences!

const _PREFERENCE_SQRT = @load_preference("sqrt", "sqrt_Trixi_NaN")
const _PREFERENCE_LOG = @load_preference("log", "log_Trixi_NaN")

# finite difference SBP operators
using SummationByPartsOperators: AbstractDerivativeOperator,
AbstractNonperiodicDerivativeOperator, DerivativeOperator,
Expand Down
97 changes: 97 additions & 0 deletions src/auxiliary/math.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,103 @@
@muladd begin
#! format: noindent

const TRIXI_UUID = UUID("a7f1ee26-1774-49b1-8366-f1abc58fbfcb")

"""
Trixi.set_sqrt_type(type; force = true)
Set the `type` of the square root function to be used in Trixi.jl.
The default is `"sqrt_Trixi_NaN"` which returns `NaN` for negative arguments
instead of throwing an error.
Alternatively, you can set `type` to `"sqrt_Base"` to use the Julia built-in `sqrt` function
which provides a stack-trace of the error which might come in handy when debugging code.
"""
function set_sqrt_type(type; force = true)
@assert type == "sqrt_Trixi_NaN"||type == "sqrt_Base" "Only allowed `sqrt` function types are `\"sqrt_Trixi_NaN\"` and `\"sqrt_Base\"`"
set_preferences!(TRIXI_UUID, "sqrt" => type, force = force)
@info "Please restart Julia and reload Trixi.jl for the `sqrt` computation change to take effect"
end

@static if _PREFERENCE_SQRT == "sqrt_Trixi_NaN"
"""
Trixi.sqrt(x::Real)
Custom square root function which returns `NaN` for negative arguments instead of throwing an error.
This is required to ensure [correct results for multithreaded computations](https://github.com/trixi-framework/Trixi.jl/issues/1766)
when using the [`Polyester` package](https://github.com/JuliaSIMD/Polyester.jl),
i.e., using the `@batch` macro instead of the Julia built-in `@threads` macro, see [`@threaded`](@ref).
We dispatch this function for `Float64, Float32, Float16` to the LLVM intrinsics
`llvm.sqrt.f64`, `llvm.sqrt.f32`, `llvm.sqrt.f16` as for these the LLVM functions can be used out-of the box,
i.e., they return `NaN` for negative arguments.
In principle, one could also use the `sqrt_llvm` call, but for transparency and consistency with [`log`](@ref) we
spell out the datatype-dependent functions here.
For other types, such as integers or dual numbers required for algorithmic differentiation, we
fall back to the Julia built-in `sqrt` function after a check for negative arguments.
Since these cases are not performance critical, the check for negativity does not hurt here
and can (as of now) even be optimized away by the compiler due to the implementation of `sqrt` in Julia.
When debugging code, it might be useful to change the implementation of this function to redirect to
the Julia built-in `sqrt` function, as this reports the exact place in code where the domain is violated
in the stacktrace.
See also [`Trixi.set_sqrt_type`](@ref).
"""
@inline sqrt(x::Real) = x < zero(x) ? oftype(x, NaN) : Base.sqrt(x)

# For `sqrt` we could use the `sqrt_llvm` call, ...
#@inline sqrt(x::Union{Float64, Float32, Float16}) = Base.sqrt_llvm(x)

# ... but for transparency and consistency we use the direct LLVM calls here.
@inline sqrt(x::Float64) = ccall("llvm.sqrt.f64", llvmcall, Float64, (Float64,), x)
@inline sqrt(x::Float32) = ccall("llvm.sqrt.f32", llvmcall, Float32, (Float32,), x)
@inline sqrt(x::Float16) = ccall("llvm.sqrt.f16", llvmcall, Float16, (Float16,), x)
end

"""
Trixi.set_log_type(type; force = true)
Set the `type` of the (natural) `log` function to be used in Trixi.jl.
The default is `"sqrt_Trixi_NaN"` which returns `NaN` for negative arguments
instead of throwing an error.
Alternatively, you can set `type` to `"sqrt_Base"` to use the Julia built-in `sqrt` function
which provides a stack-trace of the error which might come in handy when debugging code.
"""
function set_log_type(type; force = true)
@assert type == "log_Trixi_NaN"||type == "log_Base" "Only allowed log function types are `\"log_Trixi_NaN\"` and `\"log_Base\"`."
set_preferences!(TRIXI_UUID, "log" => type, force = force)
@info "Please restart Julia and reload Trixi.jl for the `log` computation change to take effect"
end

@static if _PREFERENCE_LOG == "log_Trixi_NaN"
"""
Trixi.log(x::Real)
Custom natural logarithm function which returns `NaN` for negative arguments instead of throwing an error.
This is required to ensure [correct results for multithreaded computations](https://github.com/trixi-framework/Trixi.jl/issues/1766)
when using the [`Polyester` package](https://github.com/JuliaSIMD/Polyester.jl),
i.e., using the `@batch` macro instead of the Julia built-in `@threads` macro, see [`@threaded`](@ref).
We dispatch this function for `Float64, Float32, Float16` to the respective LLVM intrinsics
`llvm.log.f64`, `llvm.log.f32`, `llvm.log.f16` as for this the LLVM functions can be used out-of the box, i.e.,
they return `NaN` for negative arguments.
For other types, such as integers or dual numbers required for algorithmic differentiation, we
fall back to the Julia built-in `log` function after a check for negative arguments.
Since these cases are not performance critical, the check for negativity does not hurt here.
When debugging code, it might be useful to change the implementation of this function to redirect to
the Julia built-in `log` function, as this reports the exact place in code where the domain is violated
in the stacktrace.
See also [`Trixi.set_log_type`](@ref).
"""
@inline log(x::Real) = x < zero(x) ? oftype(x, NaN) : Base.log(x)

@inline log(x::Float64) = ccall("llvm.log.f64", llvmcall, Float64, (Float64,), x)
@inline log(x::Float32) = ccall("llvm.log.f32", llvmcall, Float32, (Float32,), x)
@inline log(x::Float16) = ccall("llvm.log.f16", llvmcall, Float16, (Float16,), x)
end

"""
ln_mean(x, y)
Expand Down
12 changes: 6 additions & 6 deletions test/test_parabolic_1d.jl
Original file line number Diff line number Diff line change
Expand Up @@ -195,14 +195,14 @@ end
Prandtl = prandtl_number(),
gradient_variables = GradientVariablesEntropy()),
l2=[
2.459359632523962e-5,
2.3928390718460263e-5,
0.00011252414117082376,
2.4593501090944024e-5,
2.3928163240907908e-5,
0.00011252309905552921,
],
linf=[
0.0001185052018830568,
0.00018987717854305393,
0.0009597503607920999,
0.0001185048754512863,
0.0001898766501935486,
0.0009597450028770993,
])
# Ensure that we do not have excessive memory allocations
# (e.g., from type instabilities)
Expand Down
2 changes: 1 addition & 1 deletion test/test_unstructured_2d.jl
Original file line number Diff line number Diff line change
Expand Up @@ -701,7 +701,7 @@ end
1.8290174930157832e-11,
4.61017890529547e-11],
tspan=(0.0, 0.1),
atol=1.0e-11)
atol=1.0e-10)
# Ensure that we do not have excessive memory allocations
# (e.g., from type instabilities)
let
Expand Down

0 comments on commit 30d03a5

Please sign in to comment.