Skip to content

[binary_tree_partition] [2/2]: Add binary_tree_partition #64

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Feb 10, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
Combinatorics = "861a8166-3701-5b0c-9a16-15d98fcdc6aa"
Compat = "34da2185-b29b-5c13-b0c7-acf172513d20"
DataGraphs = "b5a273c3-7e6c-41f6-98bd-8d7f1525a36a"
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4"
DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6"
Expand All @@ -33,6 +34,7 @@ AbstractTrees = "0.4.4"
Combinatorics = "1"
Compat = "3, 4"
DataGraphs = "0.1.7"
DataStructures = "0.18"
Dictionaries = "0.3.15"
DocStringExtensions = "0.8, 0.9"
Graphs = "1.6"
Expand Down
2 changes: 2 additions & 0 deletions src/ITensorNetworks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ using AbstractTrees
using Combinatorics
using Compat
using DataGraphs
using DataStructures
using Dictionaries
using DocStringExtensions
using Graphs
Expand Down Expand Up @@ -80,6 +81,7 @@ include("models.jl")
include("tebd.jl")
include("itensornetwork.jl")
include("mincut.jl")
include("binary_tree_partition.jl")
include("utility.jl")
include("specialitensornetworks.jl")
include("renameitensornetwork.jl")
Expand Down
184 changes: 184 additions & 0 deletions src/binary_tree_partition.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,184 @@
"""
Rewrite of the function
`DataStructures.root_union!(s::IntDisjointSet{T}, x::T, y::T) where {T<:Integer}`.
"""
function _introot_union!(s::DataStructures.IntDisjointSets, x, y; left_root=true)
parents = s.parents
rks = s.ranks
@inbounds xrank = rks[x]
@inbounds yrank = rks[y]
if !left_root
x, y = y, x
end
@inbounds parents[y] = x
s.ngroups -= 1
return x
end

"""
Rewrite of the function `DataStructures.root_union!(s::DisjointSet{T}, x::T, y::T)`.
The difference is that in the output of `_root_union!`, x is guaranteed to be the root of y when
setting `left_root=true`, and y will be the root of x when setting `left_root=false`.
In `DataStructures.root_union!`, the root value cannot be specified.
A specified root is useful in functions such as `_remove_deltas`, where when we union two
indices into one disjointset, we want the index that is the outinds if the given tensor network
to always be the root in the DisjointSets.
"""
function _root_union!(s::DisjointSets, x, y; left_root=true)
return s.revmap[_introot_union!(s.internal, s.intmap[x], s.intmap[y]; left_root=true)]
end

"""
Partition the input network containing both `tn` and `deltas` (a vector of delta tensors)
into two partitions, one adjacent to source_inds and the other adjacent to other external
inds of the network.
"""
function _binary_partition(
tn::ITensorNetwork, deltas::Vector{ITensor}, source_inds::Vector{<:Index}
)
all_tensors = [Vector{ITensor}(tn)..., deltas...]
external_inds = noncommoninds(all_tensors...)
# add delta tensor to each external ind
external_sim_ind = [sim(ind) for ind in external_inds]
new_deltas = [
delta(external_inds[i], external_sim_ind[i]) for i in 1:length(external_inds)
]
deltas = map(t -> replaceinds(t, external_inds => external_sim_ind), deltas)
deltas = [deltas..., new_deltas...]
tn = map_data(t -> replaceinds(t, external_inds => external_sim_ind), tn; edges=[])
p1, p2 = _mincut_partition_maxweightoutinds(
disjoint_union(tn, ITensorNetwork(deltas)),
source_inds,
setdiff(external_inds, source_inds),
)
tn_vs = [v[1] for v in p1 if v[2] == 1]
source_tn = subgraph(tn, tn_vs)
delta_indices = [v[1] for v in p1 if v[2] == 2]
source_deltas = Vector{ITensor}([deltas[i] for i in delta_indices])
source_tn, source_deltas = _remove_deltas(source_tn, source_deltas)
tn_vs = [v[1] for v in p2 if v[2] == 1]
remain_tn = subgraph(tn, tn_vs)
delta_indices = [v[1] for v in p2 if v[2] == 2]
remain_deltas = Vector{ITensor}([deltas[i] for i in delta_indices])
remain_tn, remain_deltas = _remove_deltas(remain_tn, remain_deltas)
@assert (
length(noncommoninds(all_tensors...)) == length(
noncommoninds(
Vector{ITensor}(source_tn)...,
source_deltas...,
Vector{ITensor}(remain_tn)...,
remain_deltas...,
),
)
)
return source_tn, source_deltas, remain_tn, remain_deltas
end

"""
Given an input tensor network containing tensors in the input `tn`` and
tensors in `deltas``, remove redundent delta tensors in `deltas` and change
inds accordingly to make the output `tn` and `out_deltas` represent the same
tensor network but with less delta tensors.
Note: inds of tensors in `tn` and `deltas` may be changed, and `out_deltas`
may still contain necessary delta tensors.

========
Example:
julia> is = [Index(2, "i") for i in 1:6]
julia> a = ITensor(is[1], is[2])
julia> b = ITensor(is[2], is[3])
julia> delta1 = delta(is[3], is[4])
julia> delta2 = delta(is[5], is[6])
julia> tn = ITensorNetwork([a,b])
julia> tn, out_deltas = ITensorNetworks._remove_deltas(tn, [delta1, delta2])
julia> noncommoninds(Vector{ITensor}(tn)...)
2-element Vector{Index{Int64}}:
(dim=2|id=339|"1")
(dim=2|id=489|"4")
julia> length(out_deltas)
1
"""
function _remove_deltas(tn::ITensorNetwork, deltas::Vector{ITensor})
out_delta_inds = Vector{Pair}()
network = [Vector{ITensor}(tn)..., deltas...]
outinds = noncommoninds(network...)
inds_list = map(t -> collect(inds(t)), deltas)
deltainds = collect(Set(vcat(inds_list...)))
ds = DisjointSets(deltainds)
for t in deltas
i1, i2 = inds(t)
if find_root!(ds, i1) in outinds && find_root!(ds, i2) in outinds
push!(out_delta_inds, find_root!(ds, i1) => find_root!(ds, i2))
end
if find_root!(ds, i1) in outinds
_root_union!(ds, find_root!(ds, i1), find_root!(ds, i2))
else
_root_union!(ds, find_root!(ds, i2), find_root!(ds, i1))
end
end
tn = map_data(
t -> replaceinds(t, deltainds => [find_root!(ds, i) for i in deltainds]), tn; edges=[]
)
out_deltas = Vector{ITensor}([delta(i.first, i.second) for i in out_delta_inds])
return tn, out_deltas
end

"""
Given an input tn and a rooted binary tree of indices, return a partition of tn with the
same binary tree structure as inds_btree.
Note: in the output partition, we add multiple delta tensors to the network so that
the output graph is guaranteed to be the same binary tree as inds_btree.
Note: in the output partition, tensor vertex names will be changed. For a given input
tensor with vertex name `v``, its name in the output partition will be `(v, 1)`, and any
delta tensor will have name `(v, 2)`.
Note: for a given binary tree with n indices, the output partition will contain 2n-1 vertices,
with each leaf vertex corresponding to a sub tn adjacent to one output index. Keeping these
leaf vertices in the partition makes later `approx_itensornetwork` algorithms more efficient.
"""
function binary_tree_partition(tn::ITensorNetwork, inds_btree::Vector)
output_tns = Vector{ITensorNetwork}()
output_deltas_vector = Vector{Vector{ITensor}}()
# Mapping each vertex of the binary tree to a tn and a vector of deltas
# representing the partition of the subtree containing this vertex and
# its descendant vertices.
v_to_subtree_tn_deltas = Dict{Union{Vector,Index},Tuple}()
v_to_subtree_tn_deltas[inds_btree] = (tn, Vector{ITensor}())
for v in PreOrderDFS(inds_btree)
@assert haskey(v_to_subtree_tn_deltas, v)
input_tn, input_deltas = v_to_subtree_tn_deltas[v]
if v isa Index
push!(output_tns, input_tn)
push!(output_deltas_vector, input_deltas)
continue
end
tn1, deltas1, input_tn, input_deltas = _binary_partition(
input_tn, input_deltas, collect(Leaves(v[1]))
)
v_to_subtree_tn_deltas[v[1]] = (tn1, deltas1)
tn1, deltas1, input_tn, input_deltas = _binary_partition(
input_tn, input_deltas, collect(Leaves(v[2]))
)
v_to_subtree_tn_deltas[v[2]] = (tn1, deltas1)
push!(output_tns, input_tn)
push!(output_deltas_vector, input_deltas)
end
# In subgraph_vertices, each element is a vector of vertices to be
# grouped in one partition.
subgraph_vs = Vector{Vector{Tuple}}()
delta_num = 0
for (tn, deltas) in zip(output_tns, output_deltas_vector)
vs = Vector{Tuple}([(v, 1) for v in vertices(tn)])
vs = vcat(vs, [(i + delta_num, 2) for i in 1:length(deltas)])
push!(subgraph_vs, vs)
delta_num += length(deltas)
end
out_tn = ITensorNetwork()
for tn in output_tns
for v in vertices(tn)
add_vertex!(out_tn, v)
out_tn[v] = tn[v]
end
end
tn_deltas = ITensorNetwork(vcat(output_deltas_vector...))
return partition(ITensorNetwork{Any}(disjoint_union(out_tn, tn_deltas)), subgraph_vs)
end
3 changes: 3 additions & 0 deletions src/exports.jl
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,9 @@ export AbstractITensorNetwork,
tdvp,
to_vec

# ITensorNetworks: binary_tree_partition.jl
export binary_tree_partition

# ITensorNetworks: lattices.jl
# TODO: DELETE
export hypercubic_lattice_graph, square_lattice_graph, chain_lattice_graph
Expand Down
9 changes: 9 additions & 0 deletions src/mincut.jl
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,15 @@ function _mincut_partitions(
return p1, p2
end

function _mincut_partition_maxweightoutinds(
tn::ITensorNetwork, source_inds::Vector{<:Index}, terminal_inds::Vector{<:Index}
)
tn, out_to_maxweight_ind = _maxweightoutinds_tn(tn, [source_inds..., terminal_inds...])
source_inds = [out_to_maxweight_ind[i] for i in source_inds]
terminal_inds = [out_to_maxweight_ind[i] for i in terminal_inds]
return _mincut_partitions(tn, source_inds, terminal_inds)
end

"""
Sum of shortest path distances among all outinds.
"""
Expand Down
21 changes: 20 additions & 1 deletion test/test_mincut.jl → test/test_binary_tree_partition.jl
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ using ITensorNetworks:
@test sort(p2) == [5, 6, 7, 8]
end

@testset "test inds_binary_tree of a 2D network" begin
@testset "test _binary_tree_partition_inds of a 2D network" begin
N = (3, 3, 3)
linkdim = 2
network = randomITensorNetwork(IndsNetwork(named_grid(N)); link_space=linkdim)
Expand All @@ -53,3 +53,22 @@ end
)
@test length(out) == 2
end

@testset "test binary_tree_partition" begin
i = Index(2, "i")
j = Index(2, "j")
k = Index(2, "k")
l = Index(2, "l")
m = Index(2, "m")
T = randomITensor(i, j, k, l, m)
M = MPS(T, (i, j, k, l, m); cutoff=1e-5, maxdim=5)
network = M[:]
out1 = contract(network...)
tn = ITensorNetwork(network)
inds_btree = _binary_tree_partition_inds(tn, [i, j, k, l, m]; maximally_unbalanced=false)
par = binary_tree_partition(tn, inds_btree)
networks = [Vector{ITensor}(par[v]) for v in vertices(par)]
network2 = vcat(networks...)
out2 = contract(network2...)
@test isapprox(out1, out2)
end