diff --git a/NDTensors/src/lib/BlockSparseArrays/ext/BlockSparseArraysGradedAxesExt/test/runtests.jl b/NDTensors/src/lib/BlockSparseArrays/ext/BlockSparseArraysGradedAxesExt/test/runtests.jl index e6b9955a77..67cf96de00 100644 --- a/NDTensors/src/lib/BlockSparseArrays/ext/BlockSparseArraysGradedAxesExt/test/runtests.jl +++ b/NDTensors/src/lib/BlockSparseArrays/ext/BlockSparseArraysGradedAxesExt/test/runtests.jl @@ -205,8 +205,8 @@ const elts = (Float32, Float64, Complex{Float32}, Complex{Float64}) end # Test case when all axes are dual. - @testset "dual BlockedOneTo" begin - r = gradedrange([U1(0) => 2, U1(1) => 2]) + @testset "dual GradedOneTo" begin + r = gradedrange([U1(-1) => 2, U1(1) => 2]) a = BlockSparseArray{elt}(dual(r), dual(r)) @views for i in [Block(1, 1), Block(2, 2)] a[i] = randn(elt, size(a[i])) diff --git a/NDTensors/src/lib/GradedAxes/src/gradedunitrange.jl b/NDTensors/src/lib/GradedAxes/src/gradedunitrange.jl index 7f951b54dd..036efd9c0f 100644 --- a/NDTensors/src/lib/GradedAxes/src/gradedunitrange.jl +++ b/NDTensors/src/lib/GradedAxes/src/gradedunitrange.jl @@ -53,7 +53,7 @@ struct GradedOneTo{T,BlockLasts<:Vector{T}} <: AbstractGradedUnitRange{T,BlockLa end end -function Base.show(io::IO, mimetype::MIME"text/plain", g::AbstractGradedUnitRange) +function Base.show(io::IO, ::MIME"text/plain", g::AbstractGradedUnitRange) v = map(b -> label(b) => unlabel(b), blocks(g)) println(io, typeof(g)) return print(io, join(repr.(v), '\n'))