Skip to content

Commit

Permalink
Got multidim polynomials working. Default constructed for bitmaps of …
Browse files Browse the repository at this point in the history
…arb dimension. Refact tests
  • Loading branch information
JoeyT1994 committed Apr 10, 2024
1 parent 2d0678c commit 37a7ca5
Show file tree
Hide file tree
Showing 10 changed files with 193 additions and 123 deletions.
43 changes: 23 additions & 20 deletions examples/2d_laplace_solver.jl
Original file line number Diff line number Diff line change
@@ -1,21 +1,12 @@
using Test
using ITensorNumericalAnalysis

using Graphs: SimpleGraph, uniform_tree, binary_tree, random_regular_graph, is_tree
using NamedGraphs:
NamedGraph,
named_grid,
vertices,
named_comb_tree,
rename_vertices,
random_bfs_tree,
undirected_graph
using Graphs: SimpleGraph, uniform_tree
using NamedGraphs: NamedGraph
using ITensors: ITensors, Index, siteinds, dim, tags, replaceprime!, MPO, MPS, inner
using ITensorNetworks: ITensorNetwork, dmrg, ttn, maxlinkdim
using Dictionaries: Dictionary
using SplitApplyCombine: group
using Random: seed!
using Distributions: Uniform

using UnicodePlots

Expand All @@ -24,23 +15,34 @@ seed!(1234)
L = 14
g = NamedGraph(SimpleGraph(uniform_tree(L)))

vertex_to_dimension_map = Dictionary(vertices(g), [(v[1] % 2) + 1 for v in vertices(g)])
vertex_to_bit_map = Dictionary(vertices(g), [ceil(Int64, v[1] * 0.5) for v in vertices(g)])
bit_map = BitMap(vertex_to_bit_map, vertex_to_dimension_map)
bit_map = BitMap(g; map_dimension=2)
s = siteinds(g, bit_map)

ψ_fxy = 0.1 * rand_itn(s, bit_map; link_space=2)
= laplacian_operator(s, bit_map; scale=false)
= truncate(∇; cutoff=1e-12)
@show maxlinkdim(∇)
= truncate(∇; cutoff=1e-8)
println("2D Laplacian constructed for this tree, bond dimension is $(maxlinkdim(∇))")

dmrg_kwargs = (nsweeps=25, normalize=true, maxdim=20, cutoff=1e-12, outputlevel=1, nsites=2)
init_energy =
inner(ttn(itensornetwork(ψ_fxy))', ∇, ttn(itensornetwork(ψ_fxy))) /
inner(ttn(itensornetwork(ψ_fxy)), ttn(itensornetwork(ψ_fxy)))
println(
"Starting DMRG to find eigensolution of 2D Laplace operator. Initial energy is $init_energy",
)

dmrg_kwargs = (nsweeps=10, normalize=true, maxdim=15, cutoff=1e-10, outputlevel=1, nsites=2)
ϕ_fxy = dmrg(∇, ttn(itensornetwork(ψ_fxy)); dmrg_kwargs...)
ϕ_fxy = ITensorNetworkFunction(ITensorNetwork(ϕ_fxy), bit_map)

ϕ_fxy = truncate(ϕ_fxy; cutoff=1e-8)

final_energy = inner(ttn(itensornetwork(ϕ_fxy))', ∇, ttn(itensornetwork(ϕ_fxy)))
#Smallest eigenvalue in this case should be -8
@show final_energy
println(
"Finished DMRG. Found solution of energy $final_energy with bond dimension $(maxlinkdim(ϕ_fxy))",
)
println(
"Note that in 2D, the discrete laplacian with a step size of 1 has a lowest eigenvalue of -8.",
)

n_grid = 100
x_vals, y_vals = grid_points(bit_map, n_grid, 1), grid_points(bit_map, n_grid, 2)
Expand All @@ -51,4 +53,5 @@ for (i, x) in enumerate(x_vals)
end
end

show(heatmap(vals))
println("Here is the heatmap of the 2D function")
show(heatmap(vals; xfact=0.01, yfact=0.01, xoffset=0, yoffset=0, colormap=:inferno))
32 changes: 32 additions & 0 deletions examples/construct_multi_dimensional_function.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
using ITensorNumericalAnalysis

using Graphs: SimpleGraph, uniform_tree
using NamedGraphs: NamedGraph
using ITensors: siteinds
using Random: Random

L = 12
Random.seed!(1234)
g = NamedGraph(SimpleGraph(uniform_tree(L)))
bit_map = BitMap(g; map_dimension=3)
s = siteinds(g, bit_map)

println(
"Constructing the 3D function f(x,y,z) = x³(y + y²) + cosh(πz) as a tensor network on a randomly chosen tree with $L vertices",
)
ψ_fx = poly_itn(s, bit_map, [0.0, 0.0, 0.0, 1.0]; dimension=1)
ψ_fy = poly_itn(s, bit_map, [0.0, 1.0, 1.0, 0.0]; dimension=2)
ψ_fz = cosh_itn(s, bit_map; k=Float64(pi), dimension=3)
ψxyz = ψ_fx * ψ_fy + ψ_fz

ψxyz = truncate(ψxyz; cutoff=1e-12)
println("Maximum bond dimension of the network is $(maxlinkdim(ψxyz))")

x, y, z = 0.125, 0.625, 0.5
fxyz_xyz = calculate_fxyz(ψxyz, [x, y, z])
println(
"Tensor network evaluates the function as $fxyz_xyz at the co-ordinate: (x,y,z) = ($x, $y, $z)",
)
println(
"Actual value of the function is $(x^3 * (y + y^2) + cosh(pi * z)) at the co-ordinate: (x,y,z) = ($x, $y, $z)",
)
1 change: 1 addition & 0 deletions src/ITensorNumericalAnalysis.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ module ITensorNumericalAnalysis

include("bitmap.jl")
include("utils.jl")
include("polynomialutils.jl")
include("itensornetworkfunction.jl")
include("elementary_functions.jl")
include("elementary_operators.jl")
Expand Down
18 changes: 13 additions & 5 deletions src/bitmap.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,21 @@ vertex_digit(bm::BitMap) = bm.vertex_digit
vertex_dimension(bm::BitMap) = bm.vertex_dimension
base(bm::BitMap) = bm.base

default_bit_map(vertices::Vector) = Dictionary(vertices, [i for i in 1:length(vertices)])
function default_dimension_map(vertices::Vector)
return Dictionary(vertices, [1 for i in 1:length(vertices)])
function default_digit_map(vertices::Vector; map_dimension::Int64=1)
return Dictionary(
vertices, [ceil(Int64, i / map_dimension) for (i, v) in enumerate(vertices)]
)
end
function default_dimension_map(vertices::Vector; map_dimension::Int64)
return Dictionary(vertices, [(i % map_dimension) + 1 for (i, v) in enumerate(vertices)])
end

function BitMap(g; base::Int64=default_base())
return BitMap(default_bit_map(vertices(g)), default_dimension_map(vertices(g)), base)
function BitMap(g; base::Int64=default_base(), map_dimension::Int64=1)
return BitMap(
default_digit_map(vertices(g); map_dimension),
default_dimension_map(vertices(g); map_dimension),
base,
)
end
function BitMap(vertex_digit, vertex_dimension; base::Int64=default_base())
return BitMap(vertex_digit, vertex_dimension, base)
Expand Down
88 changes: 20 additions & 68 deletions src/elementary_functions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -146,84 +146,26 @@ function sin_itensornetwork(
return ψ1 + ψ2
end

# #FUNCTIONS NEEDED TO IMPLEMENT POLYNOMIALS
# """Exponent on x_i for the tensor Q(x_i) on the tree"""
function f_alpha_beta::Vector{Int64}, beta::Int64)
return !isempty(α) ? max(0, beta - 1 - sum(α) + length(α)) : max(0, beta - 1)
end

# """Coefficient on x_i for the tensor Q(x_i) on the tree"""
function _coeff(N::Int64, α::Vector{Int64}, beta)
@assert length(α) == N - 1
return if N == 1
1
else
prod([binomial(f_alpha_beta(α[1:(N - 1 - i)], beta), α[N - i] - 1) for i in 1:(N - 1)])
end
end

"""Constructor for the tensor that sits on a vertex of degree N"""
function Q_N_tensor(
N::Int64, siteind::Index, αind::Vector{Index}, betaind::Index, xivals::Vector{Float64}
)
@assert length(αind) == N - 1
@assert length(xivals) == dim(siteind)
n = dim(betaind) - 1
@assert all(x -> x == n + 1, dim.(αind))

link_dims = [n + 1 for i in 1:N]
dims = vcat([dim(siteind)], link_dims)
Q_N_array = zeros(Tuple(dims))
for (i, xi) in enumerate(xivals)
for j in 0:((n + 1)^(N) - 1)
is = digits(j; base=n + 1, pad=N) + ones(Int64, (N))
f = f_alpha_beta(is[1:(N - 1)], last(is))
Q_N_array[(i, Tuple(is)...)...] = _coeff(N, is[1:(N - 1)], last(is)) * (xi^f)
end
end

return ITensor(Q_N_array, siteind, αind, betaind)
end

# """Given a tree find the edge coming from the vertex v which is directed towards `root_vertex`"""
function get_edge_toward_root(g::AbstractGraph, v, root_vertex)
@assert is_tree(g)
@assert v != root_vertex

for vn in neighbors(g, v)
if length(a_star(g, vn, root_vertex)) < length(a_star(g, v, root_vertex))
return NamedEdge(v => vn)
end
end
end

"""Build a representation of the function f(x) = sum_{i=0}^{n}coeffs[i+1]*(x)^{i} on the graph structure specified
by indsnetwork"""
function polynomial_itensornetwork(
s::IndsNetwork, bit_map, coeffs::Vector{Float64}; dimension::Int64=default_dimension()
)
n = length(coeffs) - 1

n = length(coeffs)
#First treeify the index network (ignore edges that form loops)
g = underlying_graph(s)
g_tree = undirected_graph(random_bfs_tree(g, first(vertices(g))))
s_tree = add_edges(rem_edges(s, edges(g)), edges(g_tree))

dimension_vertices = vertices(bit_map, dimension)
source_vertex = first(dimension_vertices)
ψ = const_itensornetwork(s_tree, bit_map; linkdim=n)

#Pick a root

#Need the root vertex to be in the dimension vertices at the moment, should be a way around
root_vertices_dim = filter(v -> v dimension_vertices, leaf_vertices(s_tree))
@assert !isempty(root_vertices_dim)
root_vertex = first(root_vertices_dim)
ψ = const_itensornetwork(s_tree, bit_map; linkdim=n + 1)
#Place the Q_n tensors, making sure we get the right index pointing towards the root
for v in dimension_vertices
siteindex = s_tree[v][]
if v != root_vertex
e = get_edge_toward_root(g_tree, v, root_vertex)
betaindex = first(commoninds(ψ, e))
if v != source_vertex
e = get_edge_toward_vertex(g_tree, v, source_vertex)
betaindex = only(commoninds(ψ, e))
alphas = setdiff(inds(ψ[v]), Index[siteindex, betaindex])
ψ[v] = Q_N_tensor(
length(neighbors(g_tree, v)),
Expand All @@ -232,20 +174,30 @@ function polynomial_itensornetwork(
betaindex,
[bit_value_to_scalar(bit_map, v, i) for i in 0:(dim(siteindex) - 1)],
)
elseif v == root_vertex
betaindex = Index(n + 1, "DummyInd")
elseif v == source_vertex
betaindex = Index(n, "DummyInd")
alphas = setdiff(inds(ψ[v]), Index[siteindex])
ψv = Q_N_tensor(
2,
length(neighbors(g_tree, v)) + 1,
siteindex,
alphas,
betaindex,
[bit_value_to_scalar(bit_map, v, i) for i in 0:(dim(siteindex) - 1)],
)
ψ[v] = ψv * ITensor(reverse(coeffs), betaindex)
ψ[v] = ψv * ITensor(coeffs, betaindex)
end
end

#Put the transfer tensors in, these are special tensors that
# go on the digits (sites) that don't correspond to the desired dimension
for v in setdiff(vertices(ψ), dimension_vertices)
siteindex = s_tree[v][]
e = get_edge_toward_vertex(g_tree, v, source_vertex)
betaindex = only(commoninds(ψ, e))
alphas = setdiff(inds(ψ[v]), Index[siteindex, betaindex])
ψ[v] = transfer_tensor(siteindex, betaindex, alphas)
end

return ψ
end

Expand Down
6 changes: 6 additions & 0 deletions src/itensornetworkfunction.jl
Original file line number Diff line number Diff line change
Expand Up @@ -75,3 +75,9 @@ function calculate_fx(fitn::ITensorNetworkFunction, x::Float64)
@assert dimension(fitn) == 1
return calculate_fxyz(fitn, [x], [1])
end

function ITensorNetworks.truncate(fitn::ITensorNetworkFunction; kwargs...)
@assert is_tree(fitn)
ψ = truncate(ttn(itensornetwork(fitn)); kwargs...)
return ITensorNetworkFunction(ITensorNetwork(ψ), bit_map(fitn))
end
78 changes: 78 additions & 0 deletions src/polynomialutils.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
using ITensors: Index, ITensor, dim

"""Exponent on x_i for the tensor Q(x_i) on the tree"""
function f_alpha_beta::Vector{Int64}, beta::Int64)
return !isempty(α) ? max(0, beta - sum(α)) : max(0, beta)
end

"""Coefficient on x_i for the tensor Q(x_i) on the tree"""
function _coeff(N::Int64, α::Vector{Int64}, beta)
@assert length(α) == N - 1
return if N == 1
1
else
prod([binomial(f_alpha_beta(α[1:(N - 1 - i)], beta), α[N - i]) for i in 1:(N - 1)])
end
end

"""Constructor for the tensor that sits on a vertex of degree N"""
function Q_N_tensor(
N::Int64, siteind::Index, αind::Vector{Index}, betaind::Index, xivals::Vector{Float64}
)
@assert length(αind) == N - 1
@assert length(xivals) == dim(siteind)
n = dim(betaind) - 1
@assert all(x -> x == n + 1, dim.(αind))

link_dims = [n + 1 for i in 1:N]
dims = vcat([dim(siteind)], link_dims)
Q_N_array = zeros(Tuple(dims))
for (i, xi) in enumerate(xivals)
for j in 0:((n + 1)^(N) - 1)
is = digits(j; base=n + 1, pad=N)
f = f_alpha_beta(is[1:(N - 1)], last(is))
Q_N_array[(i, Tuple(is + ones(Int64, (N)))...)...] =
_coeff(N, is[1:(N - 1)], last(is)) * (xi^f)
end
end

return ITensor(Q_N_array, siteind, αind, betaind)
end

"""Tensor for transferring the alpha index (beta ind here) of a QN tensor (defined above) across multiple inds (alpha_inds)"""
#Needed for building multi-dimensional polynomials
function transfer_tensor(phys_ind::Index, beta_ind::Index, alpha_inds::Vector)
virt_inds = vcat(beta_ind, alpha_inds)
inds = vcat(phys_ind, virt_inds)
virt_dims = dim.(virt_inds)
@assert allequal(virt_dims)
dims = vcat(dim(phys_ind), virt_dims)
N = length(alpha_inds)
T_array = zeros(Tuple(dims))
for i in 0:(dim(phys_ind) - 1)
for j in 0:(dim(beta_ind) - 1)
if !isempty(alpha_inds)
for k in 0:((first(virt_dims))^(N) - 1)
is = digits(k; base=first(virt_dims), pad=N)
if sum(is) == j
T_array[(i + 1, j + 1, Tuple(is + ones(Int64, (N)))...)...] = 1.0
end
end
else
T_array[i + 1, j + 1] = j == 0 ? 1 : 0
end
end
end
return ITensor(T_array, phys_ind, beta_ind, alpha_inds)
end

"""Given a tree find the edge coming from the vertex v which is directed towards `source_vertex`"""
function get_edge_toward_vertex(g::AbstractGraph, v, source_vertex)
for vn in neighbors(g, v)
if length(a_star(g, vn, source_vertex)) < length(a_star(g, v, source_vertex))
return NamedEdge(v => vn)
end
end

return error("Couldn't find edge. Perhaps graph is not a tree or not connected.")
end
11 changes: 11 additions & 0 deletions test/test_examples.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
@eval module $(gensym())
using ITensorNumericalAnalysis: ITensorNumericalAnalysis
using Test: @testset

@testset "Test examples" begin
example_files = ["2d_laplace_solver.jl", "construct_multi_dimensional_function.jl"]
@testset "Test $example_file" for example_file in example_files
include(joinpath(pkgdir(ITensorNumericalAnalysis), "examples", example_file))
end
end
end
Loading

0 comments on commit 37a7ca5

Please sign in to comment.