Skip to content

Commit

Permalink
[NDTensors] Get more Array storage functionality working (#1222)
Browse files Browse the repository at this point in the history
  • Loading branch information
mtfishman authored Oct 30, 2023
1 parent 74095ef commit 690b219
Show file tree
Hide file tree
Showing 26 changed files with 622 additions and 143 deletions.
29 changes: 25 additions & 4 deletions NDTensors/src/NDTensors.jl
Original file line number Diff line number Diff line change
Expand Up @@ -132,11 +132,32 @@ include("empty/adapt.jl")

#####################################
# Array Tensor (experimental)
# TODO: Move to `Experimental` module.
#
include("arraytensor/arraytensor.jl")
include("arraytensor/array.jl")
include("arraytensor/blocksparsearray.jl")
include("arraystorage/arraystorage/storage/arraystorage.jl")
include("arraystorage/arraystorage/storage/conj.jl")
include("arraystorage/arraystorage/storage/permutedims.jl")
include("arraystorage/arraystorage/storage/contract.jl")

include("arraystorage/arraystorage/tensor/arraystorage.jl")
include("arraystorage/arraystorage/tensor/zeros.jl")
include("arraystorage/arraystorage/tensor/indexing.jl")
include("arraystorage/arraystorage/tensor/permutedims.jl")
include("arraystorage/arraystorage/tensor/mul.jl")
include("arraystorage/arraystorage/tensor/contract.jl")
include("arraystorage/arraystorage/tensor/qr.jl")
include("arraystorage/arraystorage/tensor/eigen.jl")
include("arraystorage/arraystorage/tensor/svd.jl")

# DiagonalArray storage
include("arraystorage/diagonalarray/tensor/contract.jl")

# BlockSparseArray storage
include("arraystorage/blocksparsearray/storage/contract.jl")

# Combiner storage
include("arraystorage/combiner/storage/contract.jl")

include("arraystorage/combiner/tensor/contract.jl")

#####################################
# Deprecations
Expand Down
4 changes: 2 additions & 2 deletions NDTensors/src/abstractarray/fill.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@ function generic_randn(
return randn!(rng, data)
end

function generic_zeros(arraytype::Type{<:AbstractArray}, dim::Integer=0)
function generic_zeros(arraytype::Type{<:AbstractArray}, dims...)
arraytype_specified = set_unspecified_parameters(
leaf_parenttype(arraytype), DefaultParameters()
)
ElT = eltype(arraytype_specified)
return fill!(similar(arraytype_specified, dim), zero(ElT))
return fill!(similar(arraytype_specified, dims...), zero(ElT))
end
5 changes: 5 additions & 0 deletions NDTensors/src/abstractarray/similar.jl
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,11 @@ function similar(array::AbstractArray, eltype::Type, dims::Tuple)
return NDTensors.similar(similartype(typeof(array), eltype), dims)
end

# NDTensors.similar
function similar(array::AbstractArray, eltype::Type, dims::Int)
return NDTensors.similar(similartype(typeof(array), eltype), dims)
end

# NDTensors.similar
similar(array::AbstractArray, dims::Tuple) = NDTensors.similar(typeof(array), dims)

Expand Down
30 changes: 30 additions & 0 deletions NDTensors/src/arraystorage/arraystorage/storage/arraystorage.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# Used for dispatch to distinguish from Tensors wrapping TensorStorage.
# Remove once TensorStorage is removed.
const ArrayStorage{T,N} = Union{
Array{T,N},
ReshapedArray{T,N},
SubArray{T,N},
PermutedDimsArray{T,N},
StridedView{T,N},
BlockSparseArray{T,N},
}

const MatrixStorage{T} = Union{
ArrayStorage{T,2},
Transpose{T},
Adjoint{T},
Symmetric{T},
Hermitian{T},
UpperTriangular{T},
LowerTriangular{T},
UnitUpperTriangular{T},
UnitLowerTriangular{T},
Diagonal{T},
}

const MatrixOrArrayStorage{T} = Union{MatrixStorage{T},ArrayStorage{T}}

# TODO: Delete once `Dense` is removed.
function to_arraystorage(x::DenseTensor)
return tensor(reshape(data(x), size(x)), inds(x))
end
2 changes: 2 additions & 0 deletions NDTensors/src/arraystorage/arraystorage/storage/conj.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
conj(as::AliasStyle, A::AbstractArray) = conj(A)
conj(as::AllowAlias, A::Array{<:Real}) = A
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
# Combiner
promote_rule(::Type{<:Combiner}, arraytype::Type{<:MatrixOrArrayStorage}) = arraytype

# Generic AbstractArray code
function contract(
array1::MatrixOrArrayStorage,
Expand Down Expand Up @@ -43,26 +40,21 @@ function contraction_output(
end

# Required interface for specific AbstractArray types
# TODO: Define `default_α` and `default_β`.
# TODO: Define this as a `ttgt` or `matricize` backend.
function contract!(
arrayR::MatrixOrArrayStorage,
labelsR,
array_dest::MatrixOrArrayStorage,
labels_dest,
array1::MatrixOrArrayStorage,
labels1,
array2::MatrixOrArrayStorage,
labels2,
α=one(eltype(array_dest)),
β=zero(eltype(array_dest));
)
props = ContractionProperties(labels1, labels2, labelsR)
compute_contraction_properties!(props, array1, array2, arrayR)
props = ContractionProperties(labels1, labels2, labels_dest)
compute_contraction_properties!(props, array1, array2, array_dest)
# TODO: Change this to just `contract!`, or maybe `contract_ttgt!`?
_contract!(arrayR, array1, array2, props)
return arrayR
end

function permutedims!(
output_array::MatrixOrArrayStorage, array::MatrixOrArrayStorage, perm, f::Function
)
output_array = permutedims!!(
leaf_parenttype(output_array), output_array, leaf_parenttype(array), array, perm, f
)
return output_array
_contract!(array_dest, array1, array2, props, α, β)
return array_dest
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
function permutedims!(
output_array::MatrixOrArrayStorage, array::MatrixOrArrayStorage, perm, f::Function
)
output_array = permutedims!!(
leaf_parenttype(output_array), output_array, leaf_parenttype(array), array, perm, f
)
return output_array
end
22 changes: 22 additions & 0 deletions NDTensors/src/arraystorage/arraystorage/tensor/arraystorage.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
const ArrayStorageTensor{T,N,S,I} = Tensor{T,N,S,I} where {S<:ArrayStorage{T,N}}
const MatrixStorageTensor{T,S,I} = Tensor{T,2,S,I} where {S<:MatrixStorage{T}}
const MatrixOrArrayStorageTensor{T,S,I} =
Tensor{T,N,S,I} where {N,S<:MatrixOrArrayStorage{T}}

function Tensor(storage::MatrixOrArrayStorageTensor, inds::Tuple)
return Tensor(NeverAlias(), storage, inds)
end

function Tensor(as::AliasStyle, storage::MatrixOrArrayStorage, inds::Tuple)
return Tensor{eltype(storage),length(inds),typeof(storage),typeof(inds)}(
as, storage, inds
)
end

array(tensor::MatrixOrArrayStorageTensor) = storage(tensor)

# Linear algebra (matrix algebra)
# TODO: Remove `Base.`? Is it imported?
function Base.adjoint(tens::MatrixStorageTensor)
return tensor(adjoint(storage(tens)), reverse(inds(tens)))
end
31 changes: 31 additions & 0 deletions NDTensors/src/arraystorage/arraystorage/tensor/contract.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# TODO: Just call `contraction_output(storage(tensor1), storage(tensor2), indsR)`
function contraction_output(
tensor1::MatrixOrArrayStorageTensor, tensor2::MatrixOrArrayStorageTensor, indsR
)
tensortypeR = contraction_output_type(typeof(tensor1), typeof(tensor2), indsR)
return NDTensors.similar(tensortypeR, indsR)
end

# TODO: Define `default_α` and `default_β`.
function contract!(
tensor_dest::MatrixOrArrayStorageTensor,
labels_dest,
tensor1::MatrixOrArrayStorageTensor,
labels1,
tensor2::MatrixOrArrayStorageTensor,
labels2,
α=one(eltype(tensor_dest)),
β=zero(eltype(tensor_dest));
)
contract!(
storage(tensor_dest),
labels_dest,
storage(tensor1),
labels1,
storage(tensor2),
labels2,
α,
β,
)
return tensor_dest
end
74 changes: 74 additions & 0 deletions NDTensors/src/arraystorage/arraystorage/tensor/eigen.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
# TODO: Rewrite this function to be more modern:
# 1. List keyword arguments in function signature.
# 2. Output `Spectrum` as a keyword argument that gets overwritten.
# 3. Make this into two layers, one that handles indices and one that works with `AbstractMatrix`.
function eigen(
T::Hermitian{<:Any,<:ArrayStorageTensor};
maxdim=nothing,
mindim=1,
cutoff=nothing,
use_absolute_cutoff=false,
use_relative_cutoff=true,
# These are getting passed erroneously.
# TODO: Make sure they don't get passed down
# to here.
which_decomp=nothing,
tags=nothing,
eigen_perturbation=nothing,
normalize=nothing,
ishermitian=nothing,
ortho=nothing,
svd_alg=nothing,
)
truncate = !isnothing(maxdim) || !isnothing(cutoff)
# TODO: Define `default_maxdim(T)`.
maxdim = isnothing(maxdim) ? minimum(dims(T)) : maxdim
# TODO: Define `default_cutoff(T)`.
cutoff = isnothing(cutoff) ? zero(eltype(T)) : cutoff

matrixT = matrix(T)
## TODO Here I am calling parent to ensure that the correct `any` function
## is envoked for non-cpu matrices
if any(!isfinite, parent(matrixT))
throw(
ArgumentError(
"Trying to perform the eigendecomposition of a matrix containing NaNs or Infs"
),
)
end

DM, VM = eigen(matrixT)

# Sort by largest to smallest eigenvalues
# TODO: Replace `cpu` with `leaf_parenttype` dispatch.
p = sortperm(cpu(DM); rev=true, by=abs)
DM = DM[p]
VM = VM[:, p]

if truncate
DM, truncerr, _ = truncate!!(
DM; mindim, maxdim, cutoff, use_absolute_cutoff, use_relative_cutoff
)
dD = length(DM)
if dD < size(VM, 2)
VM = VM[:, 1:dD]
end
else
dD = length(DM)
truncerr = 0.0
end
spec = Spectrum(DM, truncerr)

# Make the new indices to go onto V
# TODO: Put in a separate function, such as
# `rewrap_inds` or something like that.
indstype = typeof(inds(T))
l = eltype(indstype)(dD)
r = eltype(indstype)(dD)
Vinds = indstype((dag(ind(T, 2)), dag(r)))
Dinds = indstype((l, dag(r)))
V = tensor(VM, Vinds)
# TODO: Replace with `DiagonalArray`.
D = tensor(Diag(DM), Dinds)
return D, V, spec
end
8 changes: 8 additions & 0 deletions NDTensors/src/arraystorage/arraystorage/tensor/indexing.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
function getindex(tensor::MatrixOrArrayStorageTensor, I::Integer...)
return storage(tensor)[I...]
end

function setindex!(tensor::MatrixOrArrayStorageTensor, v, I::Integer...)
storage(tensor)[I...] = v
return tensor
end
6 changes: 6 additions & 0 deletions NDTensors/src/arraystorage/arraystorage/tensor/mul.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
function LinearAlgebra.mul!(
C::MatrixStorageTensor, A::MatrixStorageTensor, B::MatrixStorageTensor
)
mul!(storage(C), storage(A), storage(B))
return C
end
9 changes: 9 additions & 0 deletions NDTensors/src/arraystorage/arraystorage/tensor/permutedims.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
function permutedims!(
output_tensor::MatrixOrArrayStorageTensor,
tensor::MatrixOrArrayStorageTensor,
perm,
f::Function,
)
permutedims!(storage(output_tensor), storage(tensor), perm, f)
return output_tensor
end
10 changes: 10 additions & 0 deletions NDTensors/src/arraystorage/arraystorage/tensor/qr.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
function qr(A::ArrayStorageTensor)
Q, R = qr(storage(A))
Q = convert(typeof(R), Q)
i, j = inds(A)
q = size(A, 1) < size(A, 2) ? i : j
q = sim(q)
Qₜ = tensor(Q, (i, q))
Rₜ = tensor(R, (dag(q), j))
return Qₜ, Rₜ
end
Loading

0 comments on commit 690b219

Please sign in to comment.