From d979835d7206f65d6f7fa64c1bbd254210662be6 Mon Sep 17 00:00:00 2001 From: mtfishman Date: Thu, 20 Jun 2024 12:34:59 -0400 Subject: [PATCH] [NDTensorsMappedArraysExt] Support for using MappedArrays as data of ITensors --- NDTensors/Project.toml | 2 ++ .../NDTensorsMappedArraysExt.jl | 25 +++++++++++++++++++ .../src/base/abstractarray.jl | 8 ++++++ 3 files changed, 35 insertions(+) create mode 100644 NDTensors/ext/NDTensorsMappedArraysExt/NDTensorsMappedArraysExt.jl diff --git a/NDTensors/Project.toml b/NDTensors/Project.toml index a4e9bd391b..5fac791a5d 100644 --- a/NDTensors/Project.toml +++ b/NDTensors/Project.toml @@ -36,6 +36,7 @@ AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527" HDF5 = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f" +MappedArrays = "dbb5928d-eab1-5f90-85c2-b9b0edb7c900" Metal = "dde4c033-4e86-420c-a63e-0dd931031962" Octavian = "6fd5a793-0b7e-452c-907f-f8bfe9c57db4" TBLIS = "48530278-0828-4a49-9772-0f3830dfa1e9" @@ -46,6 +47,7 @@ NDTensorsAMDGPUExt = ["AMDGPU", "GPUArraysCore"] NDTensorsCUDAExt = ["CUDA", "GPUArraysCore"] NDTensorsGPUArraysCoreExt = "GPUArraysCore" NDTensorsHDF5Ext = "HDF5" +NDTensorsMappedArraysExt = ["MappedArrays"] NDTensorsMetalExt = ["GPUArraysCore", "Metal"] NDTensorsOctavianExt = "Octavian" NDTensorsTBLISExt = "TBLIS" diff --git a/NDTensors/ext/NDTensorsMappedArraysExt/NDTensorsMappedArraysExt.jl b/NDTensors/ext/NDTensorsMappedArraysExt/NDTensorsMappedArraysExt.jl new file mode 100644 index 0000000000..74372f3f2c --- /dev/null +++ b/NDTensors/ext/NDTensorsMappedArraysExt/NDTensorsMappedArraysExt.jl @@ -0,0 +1,25 @@ +module NDTensorsMappedArraysExt +using MappedArrays: AbstractMappedArray +using NDTensors: NDTensors +function NDTensors.similar(arraytype::Type{<:AbstractMappedArray}, dims::Tuple{Vararg{Int}}) + return similar(Array{eltype(arraytype)}, dims) +end +function NDTensors.similartype(storagetype::Type{<:AbstractMappedArray}) + return Array{eltype(storagetype),ndims(storagetype)} +end +function NDTensors.similartype( + storagetype::Type{<:AbstractMappedArray}, dims::Tuple{Vararg{Int}} +) + return Array{eltype(storagetype),length(dims)} +end + +using MappedArrays: ReadonlyMappedArray +using NDTensors: AllowAlias +# It is a bit unfortunate that we have to define this, it fixes an ambiguity +# error with MappedArrays. +function (arraytype::Type{ReadonlyMappedArray{T,N,A,F}} where {T,N,A<:AbstractArray,F})( + ::AllowAlias, a::AbstractArray +) + return a +end +end diff --git a/NDTensors/src/lib/TypeParameterAccessors/src/base/abstractarray.jl b/NDTensors/src/lib/TypeParameterAccessors/src/base/abstractarray.jl index aa28c6149b..57657f70f4 100644 --- a/NDTensors/src/lib/TypeParameterAccessors/src/base/abstractarray.jl +++ b/NDTensors/src/lib/TypeParameterAccessors/src/base/abstractarray.jl @@ -68,6 +68,14 @@ end return set_type_parameter(type, eltype, param) end +# These are generic fallback definitions. By convention, +# this is very commonly true of `AbstractArray` subtypes +# but it may not be correct, but it is very convenient +# to define this to make more operations "just work" +# on most AbstractArrays. +position(type::Type{<:AbstractArray}, ::typeof(eltype)) = Position(1) +position(type::Type{<:AbstractArray}, ::typeof(ndims)) = Position(2) + for wrapper in [:PermutedDimsArray, :(Base.ReshapedArray), :SubArray] @eval begin position(type::Type{<:$wrapper}, ::typeof(eltype)) = Position(1)