From ae165fb65869e77985214eb04cf895235c8ad362 Mon Sep 17 00:00:00 2001 From: Joey Date: Thu, 4 Apr 2024 16:44:45 -0400 Subject: [PATCH] Remove Weierstrass example for now. Add Laplace solver example --- examples/2d_laplace_solver.jl | 58 ++++++++ examples/weierstrass.jl | 150 -------------------- src/TensorNetworkFunctionals.jl | 14 +- src/bitmaps.jl | 8 ++ src/itensornetworks_elementary_functions.jl | 5 + src/itensornetworks_elementary_operators.jl | 35 +++-- src/itensornetworksutils.jl | 12 +- 7 files changed, 112 insertions(+), 170 deletions(-) create mode 100644 examples/2d_laplace_solver.jl delete mode 100644 examples/weierstrass.jl diff --git a/examples/2d_laplace_solver.jl b/examples/2d_laplace_solver.jl new file mode 100644 index 0000000..46f4bf4 --- /dev/null +++ b/examples/2d_laplace_solver.jl @@ -0,0 +1,58 @@ +using Test +using TensorNetworkFunctionals + +using Graphs: SimpleGraph, uniform_tree, binary_tree, random_regular_graph, is_tree +using NamedGraphs: + NamedGraph, + named_grid, + vertices, + named_comb_tree, + rename_vertices, + random_bfs_tree, + undirected_graph +using ITensors: ITensors, Index, siteinds, dim, tags, replaceprime!, MPO, MPS, inner +using ITensorNetworks: + ITensorNetwork, + dmrg, + TTN, + maxlinkdim +using Dictionaries: Dictionary +using SplitApplyCombine: group +using Random: seed! +using Distributions: Uniform + +using UnicodePlots + +#Solve the 2D Laplace equation on a random tree +seed!(1234) +L = 14 +g = NamedGraph(SimpleGraph(uniform_tree(L))) +s = siteinds("S=1/2", g) + +vertex_to_dimension_map = Dictionary(vertices(g), [(v[1] % 2) + 1 for v in vertices(g)]) +vertex_to_bit_map = Dictionary(vertices(g), [ceil(Int64, v[1] * 0.5) for v in vertices(g)]) +bit_map = BitMap(vertex_to_bit_map, vertex_to_dimension_map) + +ψ_fxy = 0.1 * rand_itn(s, bit_map; link_space=2) +∇ = laplacian_operator(s, bit_map; scale=false) +∇ = truncate(∇; cutoff=1e-12) +@show maxlinkdim(∇) + +dmrg_kwargs = (nsweeps=25, normalize=true, maxdim=20, cutoff=1e-12, outputlevel=1, nsites=2) +ϕ_fxy = dmrg(∇, TTN(itensornetwork(ψ_fxy)); dmrg_kwargs...) +ϕ_fxy = ITensorNetworkFunction(ITensorNetwork(ϕ_fxy), bit_map) + +final_energy = inner(TTN(itensornetwork(ϕ_fxy))', ∇, TTN(itensornetwork(ϕ_fxy))) +#Smallest eigenvalue in this case should be -8 +@show final_energy + +n_grid = 100 +x_vals, y_vals = grid_points(bit_map, n_grid, 1), grid_points(bit_map, n_grid, 2) +vals = zeros((length(x_vals), length(y_vals))) +for (i, x) in enumerate(x_vals) + for (j, y) in enumerate(y_vals) + vals[i, j] = real(calculate_fxyz(ϕ_fxy, [x, y])) + end +end + +show(heatmap(vals)) diff --git a/examples/weierstrass.jl b/examples/weierstrass.jl deleted file mode 100644 index cd6d113..0000000 --- a/examples/weierstrass.jl +++ /dev/null @@ -1,150 +0,0 @@ -using ITensors -using ITensorNetworks -using NamedGraphs -using EllipsisNotation -using Graphs -using LaTeXStrings - -using ITensorNetworks: delta_network, contract_inner, distance_to_leaf -using NamedGraphs: add_edges, random_bfs_tree, rename_vertices - -using Plots -using Random - -using Statistics - -include("../src/QTT_utils.jl") - -plot_font = "Computer Modern" -default(; fontfamily=plot_font, linewidth=1.5, framestyle=:box, label=nothing, grid=false) - -"""ITensorNetwork representation of the weierstrass function (first nterms) over [0,1]: see https://mathworld.wolfram.com/WeierstrassFunction.html""" -function weierstrass_itn( - s::IndsNetwork, vertex_map::Dict, a::Int64, nterms::Int64; svd_kwargs... -) - ψ = sin_itn(s, vertex_map; a=0.0, k=Float64(pi)) - ψ[first(vertices(ψ))] *= (1.0 / pi) - for n in 2:nterms - omega = pi * (n^a) - ψt = sin_itn(s, vertex_map; a=0.0, k=omega) - ψt[first(vertices(ψt))] *= (1.0 / (omega)) - ψ = ψ + ψt - if isa(ψ, TreeTensorNetwork) - ψ = truncate(ψ; svd_kwargs...) - end - end - - return ψ -end - -"""Evaluate Weierstrass(x) (first nterms) see: see https://mathworld.wolfram.com/WeierstrassFunction.html""" -function weierstrass(a::Int64, nterms::Int64, x::Float64) - out = 0 - for n in 1:nterms - omega = pi * (n^a) - out += sin(omega * x) / omega - end - - return out -end - -function main() - L = 31 - g = named_grid((L, 1)) - a = 2 - nterms = 200 - s = siteinds("S=1/2", g) - - #Define a map which determines a canonical ordering of the vertices of the network - vertex_map = Dict(vertices(g) .=> [i for i in 1:length(vertices(g))]) - - no_xs = 1000 - xs = [(1.0 / no_xs) * i for i in 1:no_xs] - xevals = [calculate_x(calculate_xis(x, vertex_map), vertex_map) for x in xs] - bond_dims = [1, 2, 3, 4, 5, 6] - f_actual = weierstrass.(a, nterms, xevals) - - p1 = Plots.plot(xevals, f_actual; legend=false, titlefont=font(10, "Computer Modern")) - xlabel!("x") - title!(L"W(x) = \sum_{k}\sin(\pi k^{a} x) / (\pi k^{a})") - - mld, vertex_sequence = optimal_vertex_ordering(a, nterms, 1000, 1e-5) - #vertex_sequence = [i for i in 1:length(vertices(g))] - - ψ = weierstrass_itn(s, vertex_map, a, nterms; cutoff=1e-16) - p2 = Plots.plot(; titlefont=font(10, "Computer Modern"), yaxis=:log) - MPS_avg_error = zeros(length(bond_dims)) - MPS_memory_costs = zeros(length(bond_dims)) - for (χindex, χ) in enumerate(bond_dims) - println("Truncating Down to Bond Dimension $χ") - ψtrunc = truncate(ψ; maxdim=χ) - f_itn = zeros(no_xs) - for (index, x) in enumerate(xs) - xis = calculate_xis(x, vertex_map) - eval_point = calculate_x(xis, vertex_map) - ψproj = get_bitstring_network(ψtrunc, s, xis) - f_itn[index] = real(ITensors.contract(ψproj)[]) - end - MPS_avg_error[χindex] = mean(abs.(f_itn - f_actual)) - MPS_memory_costs[χindex] = Base.summarysize(ψtrunc) - Plots.plot!(xevals, abs.(f_itn - f_actual); label="χ = $χ") - end - xlabel!("x") - ylabel!("|W(x) - f(x, χ)|") - mld = maxlinkdim(ψ) - title!("MPS f(x, χ). χmax = $mld"; fontsize=10) - - Random.seed!(999) - g = NamedGraph(Graphs.SimpleGraph(binary_tree(5))) - g = rename_vertices(g, Dict(zip(vertices(g), [(v,) for v in vertices(g)]))) - s = siteinds("S=1/2", g) - vertex_map = Dict(vertices(g) .=> vertex_sequence) - ψ = weierstrass_itn(s, vertex_map, a, nterms; cutoff=1e-16) - p3 = Plots.plot(; titlefont=font(10, "Computer Modern"), yaxis=:log) - Tree_avg_error = zeros(length(bond_dims)) - Tree_memory_costs = zeros(length(bond_dims)) - - @show vertex_map - d = Dict(vertices(g) => [distance_to_leaf(g, v) for v in vertices(g)]) - @show d - - for (χindex, χ) in enumerate(bond_dims) - println("Truncating Down to Bond Dimension $χ") - ψtrunc = truncate(ψ; maxdim=χ) - f_itn = zeros(no_xs) - for (index, x) in enumerate(xs) - xis = calculate_xis(x, vertex_map) - eval_point = calculate_x(xis, vertex_map) - ψproj = get_bitstring_network(ψtrunc, s, xis) - f_itn[index] = real(ITensors.contract(ψproj)[]) - end - - Tree_avg_error[χindex] = mean(abs.(f_itn - f_actual)) - Tree_memory_costs[χindex] = Base.summarysize(ψtrunc) - - Plots.plot!(xevals, abs.(f_itn - f_actual); label="χ = $χ") - end - xlabel!("x") - ylabel!("|W(x) - f(x, χ)|") - mld = maxlinkdim(ψ) - title!("Binary Tree f(x, χ). χmax = $mld") - - p4 = Plots.plot( - MPS_memory_costs / (1e6), - MPS_avg_error; - labels="MPS", - yaxis=:log, - titlefont=font(10, "Computer Modern"), - xguidefontsize=10, - ) - Plots.plot!(Tree_memory_costs / (1e6), Tree_avg_error; labels="Tree", yaxis=:log) - - xlabel!("Memory Cost (Mb)") - title!("Mean(|W(x) - f(x, χ)|)") - - l = @layout [a d; b c] - plot(p1, p4, p2, p3; layout=l) - return Plots.savefig("examples/weierstrass_example_output.pdf") -end - -main() diff --git a/src/TensorNetworkFunctionals.jl b/src/TensorNetworkFunctionals.jl index 1c3b382..b21a0a1 100644 --- a/src/TensorNetworkFunctionals.jl +++ b/src/TensorNetworkFunctionals.jl @@ -6,7 +6,7 @@ include("itensornetworkfunction.jl") include("itensornetworks_elementary_functions.jl") include("itensornetworks_elementary_operators.jl") -export ITensorNetworkFunction +export ITensorNetworkFunction, itensornetwork export BitMap, default_dimension_map, vertex, @@ -14,7 +14,8 @@ export BitMap, calculate_x, calculate_bit_values, dimension, - base + base, + grid_points export const_itensornetwork, exp_itensornetwork, cosh_itensornetwork, @@ -24,10 +25,13 @@ export const_itensornetwork, sin_itensornetwork, get_edge_toward_root, polynomial_itensornetwork, + random_itensornetworkfunction, laplacian_operator, - derivative_operator -export const_itn, poly_itn, cosh_itn, sinh_itn, tanh_itn, exp_itn, sin_itn, cos_itn + derivative_operator, + identity_operator +export const_itn, + poly_itn, cosh_itn, sinh_itn, tanh_itn, exp_itn, sin_itn, cos_itn, rand_itn export calculate_fx, calculate_fxyz -export operate, apply_gx_operator, multiply +export operate, operator, multiply end diff --git a/src/bitmaps.jl b/src/bitmaps.jl index 8010581..cfedb67 100644 --- a/src/bitmaps.jl +++ b/src/bitmaps.jl @@ -125,3 +125,11 @@ end function calculate_bit_values(bm::BitMap, x::Float64; kwargs...) return calculate_bit_values(bm, [x], [1]; kwargs...) end + +function grid_points(bm::BitMap, N::Int64, dimension::Int64) + vals = Vector{Float64} + L = length(vertices(bm, dimension)) + a = round(base(bm)^L / N) + grid_points = [i * (a / base(bm)^L) for i in 0:(N + 1)] + return filter(x -> x <= 1, grid_points) +end diff --git a/src/itensornetworks_elementary_functions.jl b/src/itensornetworks_elementary_functions.jl index e1b2f44..fe7e24a 100644 --- a/src/itensornetworks_elementary_functions.jl +++ b/src/itensornetworks_elementary_functions.jl @@ -248,6 +248,10 @@ function polynomial_itensornetwork( return ITensorNetworkFunction(ψ, bit_map) end +function random_itensornetwork(s::IndsNetwork, bit_map; kwargs...) + return ITensorNetworkFunction(randomITensorNetwork(s; kwargs...), bit_map) +end + const const_itn = const_itensornetwork const poly_itn = polynomial_itensornetwork const cosh_itn = cosh_itensornetwork @@ -256,3 +260,4 @@ const tanh_itn = tanh_itensornetwork const exp_itn = exp_itensornetwork const sin_itn = sin_itensornetwork const cos_itn = cos_itensornetwork +const rand_itn = random_itensornetwork diff --git a/src/itensornetworks_elementary_operators.jl b/src/itensornetworks_elementary_operators.jl index f898695..608da0b 100644 --- a/src/itensornetworks_elementary_operators.jl +++ b/src/itensornetworks_elementary_operators.jl @@ -14,7 +14,9 @@ using ITensors: contract using ITensorNetworks: IndsNetwork, ITensorNetwork, TTN, TreeTensorNetwork, combine_linkinds -function plus_shift_ttn(s::IndsNetwork, bit_map; dimension=default_dimension()) +function plus_shift_ttn( + s::IndsNetwork, bit_map; dimension=default_dimension(), boundary_value=[0.0] +) @assert is_tree(s) ttn_op = OpSum() dim_vertices = vertices(bit_map, dimension) @@ -63,6 +65,7 @@ function stencil( shifts::Vector{Float64}, delta_power::Int64; dimension=default_dimension(), + scale=true, truncate_kwargs..., ) @assert length(shifts) == 3 @@ -73,24 +76,38 @@ function stencil( stencil_op = plus_shift + minus_shift + no_shift stencil_op = truncate(stencil_op; truncate_kwargs...) - for v in vertices(bit_map, dimension) - stencil_op[v] = (base(bit_map)^delta_power) * stencil_op[v] + if scale + for v in vertices(bit_map, dimension) + stencil_op[v] = (base(bit_map)^delta_power) * stencil_op[v] + end end - return truncate(stencil_op; truncate_kwargs...) + return stencil_op end -function laplacian_operator(s::IndsNetwork, bit_map; kwargs...) - return stencil(s, bit_map, [1.0, -2.0, 1.0], 2; kwargs...) +function laplacian_operator( + s::IndsNetwork, bit_map; dimensions=[i for i in 1:dimension(bit_map)], kwargs... +) + remaining_dims = copy(dimensions) + ∇ = stencil(s, bit_map, [1.0, -2.0, 1.0], 2; dimension=first(remaining_dims), kwargs...) + popfirst!(remaining_dims) + for rd in remaining_dims + ∇ += stencil(s, bit_map, [1.0, -2.0, 1.0], 2; dimension=rd, kwargs...) + end + return ∇ end function derivative_operator(s::IndsNetwork, bit_map; kwargs...) return 0.5 * stencil(s, bit_map, [1.0, 0.0, -1.0], 1; kwargs...) end -function apply_gx_operator(gx::ITensorNetworkFunction) - gx = copy(gx) - operator = itensornetwork(gx) +function identity_operator(s::IndsNetwork, bit_map; kwargs...) + return stencil(s, bit_map, [0.0, 1.0, 0.0], 0; kwargs...) +end + +function operator(fx::ITensorNetworkFunction) + fx = copy(fx) + operator = itensornetwork(fx) s = siteinds(operator) for v in vertices(operator) sind = s[v] diff --git a/src/itensornetworksutils.jl b/src/itensornetworksutils.jl index d4fa1c0..2b70f77 100644 --- a/src/itensornetworksutils.jl +++ b/src/itensornetworksutils.jl @@ -2,13 +2,13 @@ using ITensors: Index, dim, inds using ITensorNetworks: randomITensorNetwork, IndsNetwork """Build the order L tensor corresponding to fx(x): x ∈ [0,1].""" -function build_full_rank_tensor(L::Int64, fx::Function) - inds = [Index(2, "$i") for i in 1:L] - dims = Tuple([2 for i in 1:L]) +function build_full_rank_tensor(L::Int64, fx::Function; base::Int64=2) + inds = [Index(base, "$i") for i in 1:L] + dims = Tuple([base for i in 1:L]) array = zeros(dims) - for i in 0:(2^(L) - 1) - xis = digits(i; base=2, pad=L) - x = sum([xis[i] / (2^i) for i in 1:L]) + for i in 0:(base^(L) - 1) + xis = digits(i; base, pad=L) + x = sum([xis[i] / (base^i) for i in 1:L]) array[Tuple(xis + ones(Int64, (L)))...] = fx(x) end