diff --git a/docs/src/experimental_methods.md b/docs/src/experimental_methods.md index 8f83db88..08045a23 100644 --- a/docs/src/experimental_methods.md +++ b/docs/src/experimental_methods.md @@ -37,9 +37,8 @@ Methods which still need to be discussed, modified, or deprecated. #### AbstractTreeTensorNetwork Type -* Required-to-implement abstract interface — `TreeTensorNetwork` provides all three (`treetensornetworks/abstracttreetensornetwork.jl`): +* Required-to-implement abstract interface — `TreeTensorNetwork` provides both (`treetensornetworks/abstracttreetensornetwork.jl`): ```julia - ITensorNetwork(tn::AbstractTTN) ortho_region(tn::AbstractTTN) set_ortho_region(tn::AbstractTTN, new_region) ``` @@ -134,11 +133,6 @@ Methods which still need to be discussed, modified, or deprecated. #### TreeTensorNetwork Type -* Get the underlying `ITensorNetwork` of a `TTN` (drops orthogonality metadata) (`treetensornetworks/treetensornetwork.jl`): - ```julia - ITensorNetwork(tn::TTN) - ``` - * Get the current orthogonality region — the set of vertices forming the gauge center (`treetensornetworks/treetensornetwork.jl`): ```julia ortho_region(tn::TTN) diff --git a/docs/src/interface_methods.md b/docs/src/interface_methods.md index 2fd054ee..3b4a2658 100644 --- a/docs/src/interface_methods.md +++ b/docs/src/interface_methods.md @@ -15,14 +15,6 @@ These ITensorNetwork constructor interfaces are foundational to other constructo ITensorNetwork{V}(tensors) ``` -* From a collection of `ITensor`s placed at the vertices of a given `NamedGraph`. No - edge inference; the graph's edges are used as-is. - ```julia - ITensorNetwork(tensors, graph::NamedGraph) - ITensorNetwork{V}(tensors, graph::NamedGraph) - ``` - - ## Analyzing ITensorNetworks @@ -65,12 +57,10 @@ These ITensorNetwork constructor interfaces are foundational to other constructo ## Global Operations on ITensorNetworks -* Scale tensors at chosen vertices by per-vertex weights, either out-of-place or in-place (`abstractitensornetwork.jl`). +* Map a function over the vertex tensors of an `ITensorNetwork`, returning a copy + (`abstractitensornetwork.jl`): ```julia - scale_tensors(tn::AbstractITensorNetwork, vertices_weights::Dictionary; kwargs...) - scale_tensors(weight_function::Function, tn; kwargs...) - scale_tensors!(tn::AbstractITensorNetwork, vertices_weights::Dictionary) - scale_tensors!(weight_function::Function, tn::AbstractITensorNetwork; kwargs...) + map(f, tn::AbstractITensorNetwork) ``` * Tensor product (disjoint union) of two ITensorNetworks (`abstractitensornetwork.jl`): diff --git a/docs/src/itensor_networks.md b/docs/src/itensor_networks.md index d14dc7fc..e2f4f86e 100644 --- a/docs/src/itensor_networks.md +++ b/docs/src/itensor_networks.md @@ -56,7 +56,7 @@ tensors = Dict(map(collect(vertices(g))) do v return v => random_itensor(site_v..., link_v...) end) -ψ = ITensorNetwork(tensors, g) +ψ = ITensorNetwork(tensors) ``` Higher-level construction routines (random networks, product states, OpSum-derived diff --git a/docs/src/solvers.md b/docs/src/solvers.md index d8bf4976..7769931f 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -30,7 +30,7 @@ function random_state(g, s; link_space) v => random_itensor(only(s[v]), (l[e] for e in incident_edges(g, v))...) for v in vertices(g) ) - return ITensorNetwork(ts, g) + return ITensorNetwork(ts) end # Build a Heisenberg Hamiltonian on a comb tree diff --git a/docs/src/tree_tensor_networks.md b/docs/src/tree_tensor_networks.md index 9f2411cf..67ee74de 100644 --- a/docs/src/tree_tensor_networks.md +++ b/docs/src/tree_tensor_networks.md @@ -53,7 +53,7 @@ tensors = Dict(map(collect(vertices(g))) do v link_v = [haskey(links, e) ? links[e] : links[reverse(e)] for e in incident_edges(g, v)] return v => random_itensor(site_v..., link_v...) end) -itn = ITensorNetwork(tensors, g) +itn = ITensorNetwork(tensors) psi = TreeTensorNetwork(itn) ``` @@ -65,7 +65,6 @@ itn_again = ITensorNetwork(psi) # TTN → ITensorNetwork ```@docs; canonical=false ITensorNetworks.TreeTensorNetwork -ITensorNetworks.ITensorNetwork(::ITensorNetworks.TreeTensorNetwork) ``` ## Orthogonal Gauge diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index 36971ac6..e6b48566 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -1,14 +1,13 @@ using Adapt: Adapt, adapt, adapt_structure -using DataGraphs: DataGraphs, edge_data, get_vertex_data, is_vertex_assigned, - set_vertex_data!, underlying_graph, underlying_graph_type, vertex_data -using Dictionaries: Dictionary +using DataGraphs: + DataGraphs, set_vertex_data!, underlying_graph, underlying_graph_type, vertex_data +using Dictionaries: Dictionaries, Dictionary using Graphs: Graphs, Graph, add_edge!, add_vertex!, bfs_tree, center, dst, edges, edgetype, has_edge, ne, neighbors, rem_edge!, src, vertices -using ITensors: ITensors, ITensor, addtags, commoninds, commontags, contract, dag, inds, - noprime, onehot, prime, replaceprime, replacetags, setprime, settags, sim, swaptags, - tags +using ITensors: ITensors, ITensor, Index, addtags, commoninds, commontags, contract, dag, + inds, noprime, onehot, prime, replaceprime, replacetags, setprime, settags, sim, + swaptags, tags using LinearAlgebra: LinearAlgebra, qr, qr! -using MacroTools: @capture using NDTensors: NDTensors, Algorithm, dim, scalartype using NamedGraphs.GraphsExtensions: add_edges, directed_graph, incident_edges, rename_vertices, vertextype, ⊔ @@ -17,9 +16,10 @@ using SplitApplyCombine: flatten abstract type AbstractITensorNetwork{V} <: AbstractDataGraph{V, ITensor, ITensor} end -# Field access -data_graph_type(::Type{<:AbstractITensorNetwork}) = not_implemented() -data_graph(graph::AbstractITensorNetwork) = not_implemented() +# Subtypes provide the storage: `underlying_graph(tn)` returns the named graph +# and `vertex_data(tn)` returns a `Dictionary{V, ITensor}`-like mapping. Edge +# data is unused — every `AbstractITensorNetwork` is treated as having no edge +# data. # TODO: Define a generic fallback for `AbstractDataGraph`? DataGraphs.edge_data_type(::Type{<:AbstractITensorNetwork}) = ITensor @@ -39,13 +39,16 @@ end # Copy Base.copy(tn::AbstractITensorNetwork) = not_implemented() -# Iteration -Base.iterate(tn::AbstractITensorNetwork, args...) = iterate(vertex_data(tn), args...) - -# Vertex-keyed access: `keys(tn)` returns the vertex set, `tn[v]` returns the -# tensor at vertex `v`. Together with `Base.iterate` above, this lets `tn` -# stand in as a `keys`/`values`-style collection of tensors keyed by vertex. +# Vertex-keyed access: `keys(tn)` returns the vertex set, `values(tn)` the +# tensors, and `tn[v]` the tensor at vertex `v`. Going through `values(tn)` +# (rather than `values(vertex_data(tn))`) lets callers stay agnostic about +# whether `vertex_data` is a `Dict`, `Dictionary`, or anything else with +# different default-iteration semantics. Base.keys(tn::AbstractITensorNetwork) = vertices(tn) +Base.keytype(::Type{<:AbstractITensorNetwork{V}}) where {V} = V +Base.keytype(tn::AbstractITensorNetwork) = keytype(typeof(tn)) +Base.values(tn::AbstractITensorNetwork) = (tn[v] for v in vertices(tn)) +Base.iterate(tn::AbstractITensorNetwork, args...) = iterate(values(tn), args...) # TODO: This contrasts with the `DataGraphs.AbstractDataGraph` definition, # where it is defined as the `vertextype`. Does that cause problems or should it be changed? @@ -53,101 +56,24 @@ Base.eltype(tn::AbstractITensorNetwork) = eltype(vertex_data(tn)) # Overload if needed Graphs.is_directed(::Type{<:AbstractITensorNetwork}) = false -GraphsExtensions.directed_graph(is::AbstractITensorNetwork) = directed_graph(data_graph(is)) - -# Derived interface, may need to be overloaded -function DataGraphs.underlying_graph_type(G::Type{<:AbstractITensorNetwork}) - return underlying_graph_type(data_graph_type(G)) +function GraphsExtensions.directed_graph(tn::AbstractITensorNetwork) + return directed_graph(underlying_graph(tn)) end function ITensors.datatype(tn::AbstractITensorNetwork) return mapreduce(v -> datatype(tn[v]), promote_type, vertices(tn)) end -function is_setindex!_expr(expr::Expr) - return is_assignment_expr(expr) && is_getindex_expr(first(expr.args)) -end -is_setindex!_expr(x) = false -is_getindex_expr(expr::Expr) = (expr.head === :ref) -is_getindex_expr(x) = false -is_assignment_expr(expr::Expr) = (expr.head === :(=)) -is_assignment_expr(expr) = false - -# TODO: Define this in terms of a function mapping -# preserve_graph_function(::typeof(setindex!)) = setindex!_preserve_graph -# preserve_graph_function(::typeof(map_vertex_data)) = map_vertex_data_preserve_graph -# Also allow annotating codeblocks like `@views`. -macro preserve_graph(expr) - if !is_setindex!_expr(expr) - error( - "preserve_graph must be used with setindex! syntax (as @preserve_graph a[i,j,...] = value)" - ) - end - @capture(expr, array_[indices__] = value_) - return :(setindex_preserve_graph!($(esc(array)), $(esc(value)), $(esc.(indices)...))) -end - -function setindex_preserve_graph!(tn::AbstractITensorNetwork, value, vertex) - data_graph(tn)[vertex] = value - return tn -end - -# AbstractDataGraphs overloads - -DataGraphs.underlying_graph(tn::AbstractITensorNetwork) = underlying_graph(data_graph(tn)) - -function DataGraphs.is_vertex_assigned(is::AbstractITensorNetwork, v) - return is_vertex_assigned(data_graph(is), v) -end - -function DataGraphs.is_edge_assigned(is::AbstractITensorNetwork, v) - return is_edge_assigned(data_graph(is), v) -end - -function DataGraphs.get_vertex_data(is::AbstractITensorNetwork, v) - return get_vertex_data(data_graph(is), v) -end +# AbstractDataGraphs overloads — defined directly in terms of the +# `underlying_graph` / `vertex_data` storage, with no edge data. -function DataGraphs.set_vertex_data!(tn::AbstractITensorNetwork, value, v) - @preserve_graph tn[v] = value - fix_edges!(tn, v) - return tn +function DataGraphs.is_vertex_assigned(tn::AbstractITensorNetwork, v) + return haskey(vertex_data(tn), v) end -function DataGraphs.set_vertices_data!(tn::AbstractITensorNetwork, values, vertices) - for v in vertices - @preserve_graph tn[v] = values[v] - end - for v in vertices - fix_edges!(tn, v) - end - return tn -end +DataGraphs.is_edge_assigned(::AbstractITensorNetwork, _) = false -# Reconcile the graph edges incident to `v` with the shared Indices of -# `tn[v]`, after a non-`@preserve_graph` write that may have changed -# which neighbors share an Index with `v`. Removes any incident edges -# that no longer correspond to a shared Index, and adds edges to any -# vertex now sharing one. The long-term invariant is graph-edge ↔ -# shared-Index one-to-one; until that's structural (e.g. via a reverse -# Index map on the type), this auto-reconciliation runs after -# `set_vertex_data!` / `set_vertices_data!` so plain `tn[v] = ...` -# writes don't desync the two. Callers that already maintain the -# invariant explicitly should bypass this with `@preserve_graph`. -function fix_edges!(tn::AbstractITensorNetwork, v) - for edge in incident_edges(tn, v) - rem_edge!(tn, edge) - end - for vertex in vertices(tn) - if v ≠ vertex - edge = v => vertex - if !isempty(linkinds(tn, edge)) - add_edge!(data_graph(tn), edge) - end - end - end - return tn -end +DataGraphs.get_vertex_data(tn::AbstractITensorNetwork, v) = vertex_data(tn)[v] function NamedGraphs.vertex_positions(tn::AbstractITensorNetwork) return NamedGraphs.vertex_positions(underlying_graph(tn)) @@ -157,14 +83,7 @@ function NamedGraphs.ordered_vertices(tn::AbstractITensorNetwork) end function Adapt.adapt_structure(to, tn::AbstractITensorNetwork) - # TODO: Define and use: - # - # @preserve_graph map_vertex_data(adapt(to), tn) - # - # or just: - # - # @preserve_graph map(adapt(to), tn) - return map_vertex_data_preserve_graph(adapt(to), tn) + return map(adapt(to), tn) end # @@ -182,22 +101,13 @@ function Base.union(tn1::AbstractITensorNetwork, tn2::AbstractITensorNetwork; kw tensors = Dict{vertextype(g), ITensor}( v => (v in vertices(tn1) ? tn1[v] : tn2[v]) for v in vertices(g) ) - tn = ITensorNetwork(tensors, g) - # Add any new edges that are introduced during the union - for v1 in vertices(tn1) - for v2 in vertices(tn2) - if !isempty(linkinds(tn, v1 => v2)) - add_edge!(data_graph(tn), v1 => v2) - end - end - end - return tn + # `ITensorNetwork(tensors)` infers edges from shared `Index`es, so any + # cross-network bonds between `tn1` and `tn2` are picked up automatically. + return ITensorNetwork(tensors) end function NamedGraphs.rename_vertices(f::Function, tn::AbstractITensorNetwork) - new_g = NamedGraphs.rename_vertices(f, underlying_graph(tn)) - tensors = Dict{vertextype(new_g), ITensor}(f(v) => tn[v] for v in vertices(tn)) - return ITensorNetwork(tensors, new_g) + return ITensorNetwork(Dict(f(v) => tn[v] for v in vertices(tn))) end # @@ -214,11 +124,8 @@ end # TODO: Define `eltype(::AbstractITensorNetwork)` as `ITensor`? -# TODO: Implement using `adapt` function NDTensors.convert_scalartype(eltype::Type{<:Number}, tn::AbstractITensorNetwork) - tn = copy(tn) - vertex_data(tn) .= ITensors.adapt.(Ref(eltype), vertex_data(tn)) - return tn + return map(adapt(eltype), tn) end function Base.complex(tn::AbstractITensorNetwork) @@ -300,12 +207,12 @@ function ITensors.replaceinds( @assert underlying_graph(is) == underlying_graph(is′) for v in vertices(is) isassigned(is, v) || continue - @preserve_graph tn[v] = replaceinds(tn[v], is[v] => is′[v]) + tn[v] = replaceinds(tn[v], is[v] => is′[v]) end for e in edges(is) isassigned(is, e) || continue for v in (src(e), dst(e)) - @preserve_graph tn[v] = replaceinds(tn[v], is[e] => is′[e]) + tn[v] = replaceinds(tn[v], is[e] => is′[e]) end end return tn @@ -368,42 +275,25 @@ end LinearAlgebra.adjoint(tn::Union{IndsNetwork, AbstractITensorNetwork}) = prime(tn) -function map_vertex_data(f, tn::AbstractITensorNetwork) - tn = copy(tn) +# In-place / out-of-place `map` over the vertex tensors of `tn`. Reverse-map +# reconciliation makes the per-vertex write cheap, so these match +# `Base.map` / `Base.map!`'s usual element-wise semantics without needing +# a separate "preserve graph" entry point. +function Base.map!(f, tn::AbstractITensorNetwork) for v in vertices(tn) tn[v] = f(tn[v]) end return tn end -# TODO: Define @preserve_graph map_vertex_data(f, tn)` -function map_vertex_data_preserve_graph(f, tn::AbstractITensorNetwork) - tn = copy(tn) - for v in vertices(tn) - @preserve_graph tn[v] = f(tn[v]) - end - return tn -end - -function map_vertices_preserve_graph!( - f, - tn::AbstractITensorNetwork; - vertices = vertices(tn) - ) - for v in vertices - @preserve_graph tn[v] = f(v) - end - return tn -end +Base.map(f, tn::AbstractITensorNetwork) = map!(f, copy(tn)) function Base.conj(tn::AbstractITensorNetwork) - # TODO: Use `@preserve_graph map_vertex_data(f, tn)` - return map_vertex_data_preserve_graph(conj, tn) + return map(conj, tn) end function ITensors.dag(tn::AbstractITensorNetwork) - # TODO: Use `@preserve_graph map_vertex_data(f, tn)` - return map_vertex_data_preserve_graph(dag, tn) + return map(dag, tn) end # TODO: should this make sure that internal indices @@ -460,25 +350,13 @@ function NDTensors.contract( V = promote_type(vertextype(tn), typeof(merged_vertex)) # TODO: Check `ITensorNetwork{V}`, shouldn't need a copy here. tn = ITensorNetwork{V}(copy(tn)) - neighbors_src = setdiff(neighbors(tn, src(edge)), [dst(edge)]) - neighbors_dst = setdiff(neighbors(tn, dst(edge)), [src(edge)]) new_itensor = tn[src(edge)] * tn[dst(edge)] - # The following is equivalent to: - # - # tn[dst(edge)] = new_itensor - # - # but without having to search all vertices - # to update the edges. rem_vertex!(tn, src(edge)) rem_vertex!(tn, dst(edge)) - add_vertex!(tn, merged_vertex) - for n_src in neighbors_src - add_edge!(data_graph(tn), merged_vertex => n_src) - end - for n_dst in neighbors_dst - add_edge!(data_graph(tn), merged_vertex => n_dst) - end - @preserve_graph tn[merged_vertex] = new_itensor + # `setindex!` (via `set_vertex_data!`) adds `merged_vertex` to the + # graph and reverse-map reconciliation picks up the new bonds to the + # surviving neighbors of `src(edge)` and `dst(edge)`. + tn[merged_vertex] = new_itensor return tn end @@ -494,8 +372,8 @@ end function LinearAlgebra.qr!(tn::AbstractITensorNetwork, edge::AbstractEdge) left_inds = setdiff(inds(tn[src(edge)]), inds(tn[dst(edge)])) Q, R = qr(tn[src(edge)], left_inds; tags = edge_tag(edge)) - @preserve_graph tn[src(edge)] = Q - @preserve_graph tn[dst(edge)] = R * tn[dst(edge)] + tn[src(edge)] = Q + tn[dst(edge)] = R * tn[dst(edge)] return tn end @@ -548,8 +426,8 @@ function _truncate_edge(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs.. left_inds = setdiff(inds(tn[src(edge)]), inds(tn[dst(edge)])) ltags = tags(tn, edge) U, S, V = svd(tn[src(edge)], left_inds; lefttags = ltags, kwargs...) - @preserve_graph tn[src(edge)] = U - @preserve_graph tn[dst(edge)] = tn[dst(edge)] * (S * V) + tn[src(edge)] = U + tn[dst(edge)] = tn[dst(edge)] * (S * V) return tn end @@ -677,17 +555,15 @@ function linkdims(tn::AbstractITensorNetwork{V}) where {V} return ld end -# Ensure `edge` is present in both the underlying graph and as a shared -# Index between the endpoint tensors. The long-term invariant is that the -# two are one-to-one; today they can drift apart, so this fills in -# whichever piece is missing. Returns the `Graphs.add_edge!` convention: -# `true` if a new graph edge was added, `false` if the graph edge was -# already present (regardless of whether link inds had to be threaded). +# Add a new edge between two vertices by threading a fresh link `Index` +# via QR; the second `setindex!` writes that `Index` into both endpoint +# tensors, and reverse-map reconciliation picks up the graph edge as a +# consequence. Returns the `Graphs.add_edge!` convention: `true` if a new +# graph edge was added, `false` if it was already there. function Graphs.add_edge!(tn::AbstractITensorNetwork, edge) - added = !has_edge(tn, edge) - added && add_edge!(data_graph(tn), edge) - isempty(linkinds(tn, edge)) && qr!(tn, edge) - return added + has_edge(tn, edge) && return false + qr!(tn, edge) + return true end # TODO: What to output? Could be an `IndsNetwork`. Or maybe @@ -780,30 +656,6 @@ function add(tn1::AbstractITensorNetwork, tn2::AbstractITensorNetwork) return tn12 end -# Scale each tensor of the network via a function vertex -> Number -function scale_tensors!( - weight_function::Function, - tn::AbstractITensorNetwork; - vertices = collect(Graphs.vertices(tn)) - ) - return map_vertices_preserve_graph!(v -> weight_function(v) * tn[v], tn; vertices) -end - -# Scale each tensor of the network by a scale factor for each vertex in the keys of the dictionary -function scale_tensors!(tn::AbstractITensorNetwork, vertices_weights::Dictionary) - return scale_tensors!(v -> vertices_weights[v], tn; vertices = keys(vertices_weights)) -end - -function scale_tensors(weight_function::Function, tn; kwargs...) - tn = copy(tn) - return scale_tensors!(weight_function, tn; kwargs...) -end - -function scale_tensors(tn::AbstractITensorNetwork, vertices_weights::Dictionary; kwargs...) - tn = copy(tn) - return scale_tensors!(tn, vertices_weights; kwargs...) -end - Base.:+(tn1::AbstractITensorNetwork, tn2::AbstractITensorNetwork) = add(tn1, tn2) ITensors.hasqns(tn::AbstractITensorNetwork) = any(v -> hasqns(tn[v]), vertices(tn)) @@ -812,10 +664,7 @@ function NamedGraphs.induced_subgraph_from_vertices( itn::AbstractITensorNetwork, subvertices ) - subgraph, vlist = induced_subgraph(underlying_graph(itn), subvertices) - subitn = similar_graph(itn, subgraph) - - subitn[Vertices(subvertices)] = vertex_data(itn) - - return subitn, vlist + _, vlist = induced_subgraph(underlying_graph(itn), subvertices) + sub_vs = collect(subvertices) + return ITensorNetwork(Dictionary(sub_vs, [itn[v] for v in sub_vs])), vlist end diff --git a/src/apply.jl b/src/apply.jl index 99fe9ecf..bfb25fcb 100644 --- a/src/apply.jl +++ b/src/apply.jl @@ -113,7 +113,7 @@ function ITensors.apply( if normalize oψᵥ ./= norm(oψᵥ) end - setindex_preserve_graph!(ψ, oψᵥ, v⃗[1]) + ψ[v⃗[1]] = oψᵥ elseif length(v⃗) == 2 envs = Vector{ITensor}(envs) if !iszero(ne(ITensorNetwork(envs))) @@ -135,8 +135,8 @@ function ITensors.apply( ψᵥ₁ ./= norm(ψᵥ₁) ψᵥ₂ ./= norm(ψᵥ₂) end - setindex_preserve_graph!(ψ, ψᵥ₁, v⃗[1]) - setindex_preserve_graph!(ψ, ψᵥ₂, v⃗[2]) + ψ[v⃗[1]] = ψᵥ₁ + ψ[v⃗[2]] = ψᵥ₂ elseif length(v⃗) < 1 error("Gate being applied does not share indices with tensor network.") elseif length(v⃗) > 2 diff --git a/src/caches/abstractbeliefpropagationcache.jl b/src/caches/abstractbeliefpropagationcache.jl index 79c6ef5e..d682b2a7 100644 --- a/src/caches/abstractbeliefpropagationcache.jl +++ b/src/caches/abstractbeliefpropagationcache.jl @@ -1,4 +1,5 @@ using Adapt: Adapt, adapt, adapt_structure +using DataGraphs: DataGraphs, underlying_graph, vertex_data using Graphs: Graphs, IsDirected, dst, src using ITensors: commoninds, delta, dir using LinearAlgebra: diag, dot @@ -18,10 +19,12 @@ function SimilarType.similar_type(bpc::AbstractBeliefPropagationCache) return typeof(tensornetwork(bpc)) end -function data_graph_type(bpc::AbstractBeliefPropagationCache) - return data_graph_type(tensornetwork(bpc)) +function DataGraphs.underlying_graph(bpc::AbstractBeliefPropagationCache) + return underlying_graph(tensornetwork(bpc)) +end +function DataGraphs.vertex_data(bpc::AbstractBeliefPropagationCache) + return vertex_data(tensornetwork(bpc)) end -data_graph(bpc::AbstractBeliefPropagationCache) = data_graph(tensornetwork(bpc)) #TODO: Take `dot` without precontracting the messages to allow scaling to more complex # messages @@ -45,9 +48,6 @@ default_messages(ptn::PartitionedGraph) = Dictionary() end default_partitioned_vertices(ψ::AbstractITensorNetwork) = group(v -> v, vertices(ψ)) -function Base.setindex!(bpc::AbstractBeliefPropagationCache, factor::ITensor, vertex) - return not_implemented() -end partitioned_tensornetwork(bpc::AbstractBeliefPropagationCache) = not_implemented() messages(bpc::AbstractBeliefPropagationCache) = not_implemented() function default_message( @@ -131,7 +131,7 @@ end function map_factors(f, bpc::AbstractBeliefPropagationCache, vs = vertices(bpc)) bpc = copy(bpc) for v in vs - @preserve_graph bpc[v] = f(bpc[v]) + bpc[v] = f(bpc[v]) end return bpc end @@ -176,14 +176,14 @@ function update_factors(bpc::AbstractBeliefPropagationCache, factors) bpc = copy(bpc) for vertex in eachindex(factors) # TODO: Add a check that this preserves the graph structure. - setindex_preserve_graph!(bpc, factors[vertex], vertex) + bpc[vertex] = factors[vertex] end return bpc end function update_factor(bpc, vertex, factor) bpc = copy(bpc) - setindex_preserve_graph!(bpc, factor, vertex) + bpc[vertex] = factor return bpc end @@ -371,8 +371,9 @@ function rescale_partitions( ) bpc = copy(bpc) tn = tensornetwork(bpc) - norms = map(v -> inv(norm(tn[v])), verts) - scale_tensors!(bpc, Dictionary(verts, norms)) + for v in verts + bpc[v] = inv(norm(tn[v])) * bpc[v] + end vertices_weights = Dictionary() for pv in partitions @@ -388,7 +389,9 @@ function rescale_partitions( end end - scale_tensors!(bpc, vertices_weights) + for (v, w) in pairs(vertices_weights) + bpc[v] = w * bpc[v] + end return bpc end diff --git a/src/caches/beliefpropagationcache.jl b/src/caches/beliefpropagationcache.jl index 0fcea4ef..0bb9ff93 100644 --- a/src/caches/beliefpropagationcache.jl +++ b/src/caches/beliefpropagationcache.jl @@ -1,3 +1,4 @@ +using DataGraphs: DataGraphs, set_vertex_data! using Graphs: IsDirected using ITensors: dir using LinearAlgebra: diag, dot @@ -22,15 +23,6 @@ struct BeliefPropagationCache{V, PV, PTN <: AbstractPartitionedGraph{V, PV}, MTS messages::MTS end -function NamedGraphs.similar_graph( - bpc::BeliefPropagationCache, - underlying_graph::AbstractGraph - ) - return BeliefPropagationCache( - similar_graph(bpc.underlying_graph, underlying_graph) - ) -end - #Constructors... function BeliefPropagationCache(ptn::PartitionedGraph; messages = default_messages(ptn)) return BeliefPropagationCache(ptn, messages) @@ -106,7 +98,12 @@ function default_bp_edge_sequence(bp_cache::BeliefPropagationCache) return default_edge_sequence(partitioned_tensornetwork(bp_cache)) end -Base.setindex!(bpc::BeliefPropagationCache, factor::ITensor, vertex) = not_implemented() +# Forward vertex writes to the underlying `ITensorNetwork` so its +# reverse-index map and edge reconciliation handle the update. +function DataGraphs.set_vertex_data!(bpc::BeliefPropagationCache, value, vertex) + set_vertex_data!(tensornetwork(bpc), value, vertex) + return bpc +end partitions(bpc::BeliefPropagationCache) = quotientvertices(partitioned_tensornetwork(bpc)) function PartitionedGraphs.quotientedges(bpc::BeliefPropagationCache) return quotientedges(partitioned_tensornetwork(bpc)) diff --git a/src/formnetworks/abstractformnetwork.jl b/src/formnetworks/abstractformnetwork.jl index 07ef6939..6a041dbc 100644 --- a/src/formnetworks/abstractformnetwork.jl +++ b/src/formnetworks/abstractformnetwork.jl @@ -1,3 +1,4 @@ +using DataGraphs: DataGraphs, underlying_graph, vertex_data using Graphs: induced_subgraph using NamedGraphs.SimilarType: SimilarType @@ -20,12 +21,9 @@ function SimilarType.similar_type(f::AbstractFormNetwork) return typeof(tensornetwork(f)) end -# TODO: Use `NamedGraphs.GraphsExtensions.parent_graph_type`. -function data_graph_type(f::AbstractFormNetwork) - return data_graph_type(tensornetwork(f)) -end -# TODO: Use `NamedGraphs.GraphsExtensions.parent_graph`. -data_graph(f::AbstractFormNetwork) = data_graph(tensornetwork(f)) +# TODO: Use `NamedGraphs.GraphsExtensions.parent_graph` / `parent_graph_type`. +DataGraphs.underlying_graph(f::AbstractFormNetwork) = underlying_graph(tensornetwork(f)) +DataGraphs.vertex_data(f::AbstractFormNetwork) = vertex_data(tensornetwork(f)) function operator_vertices(f::AbstractFormNetwork) return filter(v -> last(v) == operator_vertex_suffix(f), vertices(f)) diff --git a/src/formnetworks/bilinearformnetwork.jl b/src/formnetworks/bilinearformnetwork.jl index 4c632c1a..513a3d97 100644 --- a/src/formnetworks/bilinearformnetwork.jl +++ b/src/formnetworks/bilinearformnetwork.jl @@ -1,4 +1,5 @@ using Adapt: adapt +using DataGraphs: DataGraphs, set_vertex_data! using ITensors.NDTensors: datatype, denseblocks using ITensors: ITensor, Op, delta, prime, sim using NamedGraphs.GraphsExtensions: disjoint_union @@ -40,24 +41,19 @@ function BilinearFormNetwork( ) end -function NamedGraphs.similar_graph( - bf::BilinearFormNetwork, - underlying_graph::AbstractGraph - ) - return BilinearFormNetwork( - similar_graph(bf.tensornetwork, underlying_graph), - operator_vertex_suffix(bf), - bra_vertex_suffix(bf), - ket_vertex_suffix(bf) - ) -end - operator_vertex_suffix(blf::BilinearFormNetwork) = blf.operator_vertex_suffix bra_vertex_suffix(blf::BilinearFormNetwork) = blf.bra_vertex_suffix ket_vertex_suffix(blf::BilinearFormNetwork) = blf.ket_vertex_suffix # TODO: Use `NamedGraphs.GraphsExtensions.parent_graph`. tensornetwork(blf::BilinearFormNetwork) = blf.tensornetwork +# Forward vertex writes to the wrapped network so reverse-index map and +# edge reconciliation run on the underlying `ITensorNetwork`. +function DataGraphs.set_vertex_data!(blf::BilinearFormNetwork, value, vertex) + set_vertex_data!(tensornetwork(blf), value, vertex) + return blf +end + function Base.copy(blf::BilinearFormNetwork) return BilinearFormNetwork( copy(tensornetwork(blf)), @@ -88,12 +84,11 @@ function BilinearFormNetwork( s_mapped = dual_site_index_map(s) operator_inds = union_all_inds(s, s_mapped) - g = NamedGraph(underlying_graph(operator_inds)) - ts = Dict{vertextype(g), ITensor}() + ts = Dict{vertextype(operator_inds), ITensor}() for v in vertices(operator_inds) ts[v] = itensor_identity_map(scalartype(ket), s[v] .=> s_mapped[v]) end - O = ITensorNetwork(ts, g) + O = ITensorNetwork(ts) O = adapt(promote_type(datatype(bra), datatype(ket)), O) return BilinearFormNetwork(O, bra, ket; dual_site_index_map, kwargs...) end @@ -106,12 +101,7 @@ function update( ket_state::ITensor ) blf = copy(blf) - # TODO: Maybe add a check that it really does preserve the graph. - setindex_preserve_graph!( - tensornetwork(blf), bra_state, bra_vertex(blf, original_bra_state_vertex) - ) - setindex_preserve_graph!( - tensornetwork(blf), ket_state, ket_vertex(blf, original_ket_state_vertex) - ) + tensornetwork(blf)[bra_vertex(blf, original_bra_state_vertex)] = bra_state + tensornetwork(blf)[ket_vertex(blf, original_ket_state_vertex)] = ket_state return blf end diff --git a/src/formnetworks/linearformnetwork.jl b/src/formnetworks/linearformnetwork.jl index 94c45f96..654d858f 100644 --- a/src/formnetworks/linearformnetwork.jl +++ b/src/formnetworks/linearformnetwork.jl @@ -1,7 +1,6 @@ -using Graphs: AbstractGraph +using DataGraphs: DataGraphs, set_vertex_data! using ITensors: ITensor, prime using NamedGraphs.GraphsExtensions: disjoint_union -using NamedGraphs: similar_graph default_dual_link_index_map = prime @@ -40,12 +39,11 @@ ket_vertex_suffix(lf::LinearFormNetwork) = lf.ket_vertex_suffix # TODO: Use `NamedGraphs.GraphsExtensions.parent_graph`. tensornetwork(lf::LinearFormNetwork) = lf.tensornetwork -function NamedGraphs.similar_graph( - lf::LinearFormNetwork, - underlying_graph::AbstractGraph - ) - tn = similar_graph(tensornetwork(lf), underlying_graph) - return LinearFormNetwork(tn, bra_vertex_suffix(lf), ket_vertex_suffix(lf)) +# Forward vertex writes to the wrapped network so reverse-index map and +# edge reconciliation run on the underlying `ITensorNetwork`. +function DataGraphs.set_vertex_data!(lf::LinearFormNetwork, value, vertex) + set_vertex_data!(tensornetwork(lf), value, vertex) + return lf end function Base.copy(lf::LinearFormNetwork) @@ -56,9 +54,6 @@ end function update(lf::LinearFormNetwork, original_ket_state_vertex, ket_state::ITensor) lf = copy(lf) - # TODO: Maybe add a check that it really does preserve the graph. - setindex_preserve_graph!( - tensornetwork(lf), ket_state, ket_vertex(blf, original_ket_state_vertex) - ) + tensornetwork(lf)[ket_vertex(blf, original_ket_state_vertex)] = ket_state return lf end diff --git a/src/formnetworks/quadraticformnetwork.jl b/src/formnetworks/quadraticformnetwork.jl index 9767c30c..eb4b2674 100644 --- a/src/formnetworks/quadraticformnetwork.jl +++ b/src/formnetworks/quadraticformnetwork.jl @@ -1,4 +1,4 @@ -using NamedGraphs: similar_graph +using DataGraphs: DataGraphs, set_vertex_data!, underlying_graph, vertex_data default_index_map = prime default_inv_index_map = noprime @@ -15,17 +15,6 @@ struct QuadraticFormNetwork{ dual_inv_index_map::InvIndexMap end -function NamedGraphs.similar_graph( - qf::QuadraticFormNetwork, - underlying_graph::AbstractGraph - ) - return QuadraticFormNetwork( - similar_graph(bilinear_formnetwork(qf), underlying_graph), - dual_index_map(qf), - dual_inv_index_map(qf) - ) -end - bilinear_formnetwork(qf::QuadraticFormNetwork) = qf.formnetwork #Needed for implementation, forward from bilinear form @@ -34,8 +23,6 @@ for f in [ :bra_vertex_suffix, :ket_vertex_suffix, :tensornetwork, - :data_graph, - :data_graph_type, ] @eval begin function $f(qf::QuadraticFormNetwork, args...; kwargs...) @@ -44,8 +31,20 @@ for f in [ end end +function DataGraphs.underlying_graph(qf::QuadraticFormNetwork) + return underlying_graph(bilinear_formnetwork(qf)) +end +DataGraphs.vertex_data(qf::QuadraticFormNetwork) = vertex_data(bilinear_formnetwork(qf)) + dual_index_map(qf::QuadraticFormNetwork) = qf.dual_index_map dual_inv_index_map(qf::QuadraticFormNetwork) = qf.dual_inv_index_map + +# Forward vertex writes to the inner `BilinearFormNetwork`, which in turn +# forwards to the underlying `ITensorNetwork`. +function DataGraphs.set_vertex_data!(qf::QuadraticFormNetwork, value, vertex) + set_vertex_data!(bilinear_formnetwork(qf), value, vertex) + return qf +end function Base.copy(qf::QuadraticFormNetwork) return QuadraticFormNetwork( copy(bilinear_formnetwork(qf)), dual_index_map(qf), dual_inv_index_map(qf) diff --git a/src/itensornetwork.jl b/src/itensornetwork.jl index defa489c..019f814c 100644 --- a/src/itensornetwork.jl +++ b/src/itensornetwork.jl @@ -1,31 +1,39 @@ -using DataGraphs: DataGraphs, DataGraph -using ITensors: ITensors, ITensor -using NamedGraphs: NamedGraphs, NamedEdge, NamedGraph, similar_graph, vertextype +using DataGraphs: DataGraphs, set_vertex_data!, underlying_graph, vertex_data +using Dictionaries: Dictionaries, Dictionary +using Graphs: + Graphs, add_edge!, add_vertex!, has_edge, has_vertex, neighbors, rem_edge!, rem_vertex! +using ITensors: ITensor, Index, inds +using NamedGraphs: NamedGraphs, NamedGraph, vertextype """ ITensorNetwork{V} -A tensor network where each vertex holds an `ITensor`. The network graph is a -`NamedGraph{V}` and edges represent shared indices between neighboring tensors. +A tensor network where each vertex holds an `ITensor`. Storage is split +across three fields: -# Constructors + - `graph::NamedGraph{V}` — the network's graph (`V` is the vertex type), + - `vertex_data::Dictionary{V, ITensor}` — the tensor at each vertex, + - `ind_to_vertices::Dict{Index, Set{V}}` — reverse map from each `Index` + to the vertices it appears in. + +The reverse map keeps vertex lookup by shared `Index` O(1) and enforces +the graph-edge ↔ shared-`Index` invariant: every `Index` appears at +either one vertex (an external / site index) or two (a bond), and every +graph edge corresponds to exactly the pair of vertices sharing at least +one `Index`. Hyperedges (an `Index` shared by three or more vertices) +are rejected. -**From a collection of `ITensor`s** (edges inferred from shared indices): +# Construction ```julia ITensorNetwork(tensors) +ITensorNetwork{V}(tensors) ``` `tensors` is any collection where `keys(tensors)` are vertex labels and `values(tensors)` are the `ITensor`s at those vertices (e.g. a `Dict`, a `Dictionary`, or a `Vector{ITensor}` with linear-index vertex labels). - -**From a collection of `ITensor`s placed at the vertices of a given graph** -(no edge inference; the caller is responsible for the edges): - -```julia -ITensorNetwork(tensors, graph::NamedGraph) -``` +Edges are inferred from shared `Index`es. # Example @@ -41,90 +49,144 @@ julia> tn = ITensorNetwork([ITensor(i, j), ITensor(j, k)]); See also: `IndsNetwork`, [`TreeTensorNetwork`](@ref ITensorNetworks.TreeTensorNetwork). """ struct ITensorNetwork{V} <: AbstractITensorNetwork{V} - data_graph::DataGraph{V, ITensor, ITensor, NamedGraph{V}, NamedEdge{V}} - - # Sole inner ctor: place `tensors` at the vertices of `graph`. No checks — - # `tensors` must be indexable at every vertex, the graph's edges are - # taken at face value. - function ITensorNetwork{V}(tensors, graph::NamedGraph) where {V} - g = NamedGraph{V}(graph) - dg = DataGraph(g; vertex_data_type = ITensor, edge_data_type = ITensor) - for v in vertices(g) - dg[v] = tensors[v] - end - return new{V}(dg) - end + graph::NamedGraph{V} + vertex_data::Dictionary{V, ITensor} + ind_to_vertices::Dict{Index, Set{V}} end # -# Data access +# AbstractITensorNetwork interface (field access) # -data_graph(tn::ITensorNetwork) = getfield(tn, :data_graph) -data_graph_type(TN::Type{<:ITensorNetwork}) = fieldtype(TN, :data_graph) +DataGraphs.underlying_graph(tn::ITensorNetwork) = tn.graph +DataGraphs.vertex_data(tn::ITensorNetwork) = tn.vertex_data -function DataGraphs.underlying_graph_type(TN::Type{<:ITensorNetwork}) - return fieldtype(data_graph_type(TN), :underlying_graph) +function DataGraphs.underlying_graph_type(::Type{<:ITensorNetwork{V}}) where {V} + return NamedGraph{V} end # -# Construction from collections of ITensors +# Constructors # -# Tensors only: derive the vertex list from `keys(tensors)`, write the -# tensors at each vertex, then infer edges from shared Indices in an -# O(n²) sweep. Without a reverse index map, that's the only available cost. +# Empty network with no vertices. Mirrors the `Vector()` / `Dictionary()` +# convention; vertex type defaults to `Any` when unspecified. +function ITensorNetwork{V}() where {V} + return ITensorNetwork{V}( + NamedGraph{V}(), Dictionary{V, ITensor}(), Dict{Index, Set{V}}() + ) +end +ITensorNetwork() = ITensorNetwork{Any}() + +# Construct by writing each tensor into a freshly empty network via +# `setindex!`. The `setindex!` code path centralizes reverse-map +# registration, edge inference, and the hypergraph check, and walking +# `keys(tensors)` in order makes the resulting `neighbors(g, v)` / +# `edges(g)` iteration order deterministic in the input order. function ITensorNetwork{V}(tensors) where {V} - # Build the vertex list with element type `V` so that an empty `tensors` - # input doesn't get the graph's vertex type inferred to whatever - # `keys(tensors)` happens to give (e.g. `Int` for an empty `Vector{ITensor}`). - g = NamedGraph(V[v for v in keys(tensors)]) - default = Dict{V, ITensor}(v => ITensor() for v in vertices(g)) - tn = ITensorNetwork(default, g) - for v in vertices(g) - @preserve_graph tn[v] = tensors[v] - end - vs = collect(vertices(tn)) - for i in eachindex(vs), j in (i + 1):lastindex(vs) - v1, v2 = vs[i], vs[j] - if !isempty(commoninds(tn[v1], tn[v2])) - add_edge!(data_graph(tn), v1 => v2) - end + tn = ITensorNetwork{V}() + for v in keys(tensors) + tn[v] = tensors[v] end return tn end -# Non-parametric delegates: extract `V` via `keytype` / `vertextype`. -function ITensorNetwork(tensors) - return ITensorNetwork{keytype(tensors)}(tensors) -end -function ITensorNetwork(tensors, graph::NamedGraph) - return ITensorNetwork{vertextype(graph)}(tensors, graph) -end +ITensorNetwork(tensors) = ITensorNetwork{keytype(tensors)}(tensors) # # Vertex-type conversion and copy # -function ITensorNetwork{V}(tn::ITensorNetwork) where {V} - g = NamedGraph{V}(underlying_graph(tn)) - tensors = Dict{V, ITensor}(v => tn[v] for v in vertices(tn)) - return ITensorNetwork(tensors, g) +NamedGraphs.convert_vertextype(::Type{V}, tn::ITensorNetwork{V}) where {V} = tn +function NamedGraphs.convert_vertextype(::Type{V}, tn::ITensorNetwork) where {V} + return ITensorNetwork{V}(tn) end -ITensorNetwork(tn::ITensorNetwork) = copy(tn) +Base.copy(tn::ITensorNetwork) = ITensorNetwork(map(copy, vertex_data(tn))) -NamedGraphs.convert_vertextype(::Type{V}, tn::ITensorNetwork{V}) where {V} = tn -NamedGraphs.convert_vertextype(V::Type, tn::ITensorNetwork) = ITensorNetwork{V}(tn) +# +# Mutation: keep `graph`, `vertex_data`, and `ind_to_vertices` in sync. +# + +# Drop the inds of `vertex_data[v]` from the reverse map, leaving +# `vertex_data` and `graph` themselves untouched. Used both as a +# prelude to overwriting `v` and as a step in `_rem_vertex!`. +function _unregister_inds!( + vertex_data::Dictionary{V, ITensor}, + ind_to_vertices::Dict{Index, Set{V}}, + v + ) where {V} + haskey(vertex_data, v) || return nothing + for i in inds(vertex_data[v]) + owners = ind_to_vertices[i] + delete!(owners, v) + isempty(owners) && delete!(ind_to_vertices, i) + end + return nothing +end + +# Write `value` to vertex `v`, updating the reverse map and reconciling +# edges so the graph-edge ↔ shared-`Index` invariant holds. If `v` is +# new it's added to the graph — so this is also the natural way to grow +# the network one tensor at a time without a separate `add_vertex!` +# step. Operates on raw storage so `ITensorNetwork` and +# `TreeTensorNetwork` can share it. +function _set_vertex_data!( + graph::NamedGraph{V}, + vertex_data::Dictionary{V, ITensor}, + ind_to_vertices::Dict{Index, Set{V}}, + value, + v + ) where {V} + has_vertex(graph, v) || add_vertex!(graph, v) + _unregister_inds!(vertex_data, ind_to_vertices, v) + # `set!` updates in place when `v` is already present, preserving + # the insertion order of `vertex_data`. Plain `setindex!` would + # error on a missing key, and `insert!` would error on an existing + # one — `set!` handles both branches. + Dictionaries.set!(vertex_data, v, value) + for i in inds(value) + push!(get!(ind_to_vertices, i, Set{V}()), v) + length(ind_to_vertices[i]) <= 2 || error( + "Index $i now appears at $(length(ind_to_vertices[i])) vertices; " * + "`ITensorNetwork` forbids hyperedges (3+ vertices sharing an `Index`)." + ) + end + # Reconcile graph edges incident to `v` against the reverse map. + desired = Set{V}() + for i in inds(value) + for u in ind_to_vertices[i] + u == v || push!(desired, u) + end + end + for u in collect(neighbors(graph, v)) + u in desired || rem_edge!(graph, v => u) + end + for u in desired + has_edge(graph, v, u) || add_edge!(graph, v => u) + end + return nothing +end -function Base.copy(tn::ITensorNetwork{V}) where {V} - g = copy(underlying_graph(tn)) - tensors = Dict{V, ITensor}(v => copy(tn[v]) for v in vertices(g)) - return ITensorNetwork(tensors, g) +function DataGraphs.set_vertex_data!(tn::ITensorNetwork, value, v) + _set_vertex_data!(tn.graph, tn.vertex_data, tn.ind_to_vertices, value, v) + return tn +end + +# Drop `v` from the reverse map, vertex data, and graph in one shot. +function _rem_vertex!( + graph::NamedGraph{V}, + vertex_data::Dictionary{V, ITensor}, + ind_to_vertices::Dict{Index, Set{V}}, + v + ) where {V} + _unregister_inds!(vertex_data, ind_to_vertices, v) + haskey(vertex_data, v) && delete!(vertex_data, v) + rem_vertex!(graph, v) + return nothing end -function NamedGraphs.similar_graph(tn::ITensorNetwork, underlying_graph::AbstractGraph) - g = NamedGraph(underlying_graph) - default = Dict{vertextype(g), ITensor}(v => ITensor() for v in vertices(g)) - return ITensorNetwork(default, g) +function Graphs.rem_vertex!(tn::ITensorNetwork, v) + _rem_vertex!(tn.graph, tn.vertex_data, tn.ind_to_vertices, v) + return tn end diff --git a/src/normalize.jl b/src/normalize.jl index 44f6573d..83d77cec 100644 --- a/src/normalize.jl +++ b/src/normalize.jl @@ -6,10 +6,8 @@ end function rescale(alg::Algorithm"exact", tn::AbstractITensorNetwork; kwargs...) logn = logscalar(alg, tn; kwargs...) - vs = collect(vertices(tn)) - c = inv(exp(logn / length(vs))) - vertices_weights = Dictionary(vs, [c for v in vs]) - return scale_tensors(tn, vertices_weights) + c = inv(exp(logn / length(vertices(tn)))) + return map(t -> c * t, tn) end function rescale( @@ -58,10 +56,8 @@ function LinearAlgebra.normalize( alg::Algorithm"exact", tn::AbstractITensorNetwork; kwargs... ) logn = logscalar(alg, inner_network(tn, tn); kwargs...) - vs = collect(vertices(tn)) - c = inv(exp(logn / (2 * length(vs)))) - vertices_weights = Dictionary(vs, [c for v in vs]) - return scale_tensors(tn, vertices_weights) + c = inv(exp(logn / (2 * length(vertices(tn))))) + return map(t -> c * t, tn) end function LinearAlgebra.normalize( diff --git a/src/solvers/insert.jl b/src/solvers/insert.jl index 2331fc01..342d72e0 100644 --- a/src/solvers/insert.jl +++ b/src/solvers/insert.jl @@ -20,15 +20,17 @@ function insert!(region_iter, local_tensor; normalize = false, set_orthogonal_re local_tensor, indsTe; tags, trunc_kwargs... ) - @preserve_graph psi[first(region)] = U + psi[first(region)] = U prob = set_truncation_info!(prob; spectrum) else error("Region of length $(length(region)) not currently supported") end v = last(region) - @preserve_graph psi[v] = C + psi[v] = C psi = set_orthogonal_region ? set_ortho_region(psi, [v]) : psi - normalize && @preserve_graph psi[v] = psi[v] / norm(psi[v]) + if normalize + psi[v] = psi[v] / norm(psi[v]) + end prob.state = psi diff --git a/src/treetensornetworks/abstracttreetensornetwork.jl b/src/treetensornetworks/abstracttreetensornetwork.jl index cf355663..f1fb6bd2 100644 --- a/src/treetensornetworks/abstracttreetensornetwork.jl +++ b/src/treetensornetworks/abstracttreetensornetwork.jl @@ -12,12 +12,10 @@ abstract type AbstractTreeTensorNetwork{V} <: AbstractITensorNetwork{V} end const AbstractTTN = AbstractTreeTensorNetwork -function DataGraphs.underlying_graph_type(G::Type{<:AbstractTTN}) - return underlying_graph_type(data_graph_type(G)) +function DataGraphs.underlying_graph_type(::Type{<:AbstractTTN{V}}) where {V} + return NamedGraph{V} end -ITensorNetwork(tn::AbstractTTN) = error("Not implemented") - # # Field access # @@ -91,7 +89,7 @@ end # For ambiguity error function Base.truncate(tn::AbstractTTN, edge::AbstractEdge; kwargs...) - return typeof(tn)(truncate(ITensorNetwork(tn), edge; kwargs...)) + return TreeTensorNetwork(truncate(ITensorNetwork(tn), edge; kwargs...)) end # @@ -267,17 +265,15 @@ function Base.:+( tns[j] = orthogonalize(tns[j], root_vertex) end - # Output state: empty TTN over the same graph as the inputs. - # Tensor data and link indices are filled in by the directsum loop below. - g_out = NamedGraph(underlying_graph(siteinds(tns[1]))) - tensors_out = Dict{vertextype(g_out), ITensor}( - v => ITensor() for v in vertices(g_out) - ) - tn = TreeTensorNetwork(ITensorNetwork(tensors_out, g_out)) - - vs = post_order_dfs_vertices(tn, root_vertex) - es = post_order_dfs_edges(tn, root_vertex) - link_space = Dict{edgetype(tn), Index}() + # Drive traversal off the first input TTN (it has the right graph + # structure). Tensor data is built into a `Dict` buffer and wrapped as + # a `TreeTensorNetwork` at the end — edges of the output network are + # then inferred from the assigned tensors' shared `Index`es. + tn0 = first(tns) + vs = post_order_dfs_vertices(tn0, root_vertex) + es = post_order_dfs_edges(tn0, root_vertex) + link_space = Dict{edgetype(tn0), Index}() + tensors_out = Dict{vertextype(tn0), ITensor}() for v in reverse(vs) edges = filter(e -> dst(e) == v || src(e) == v, es) @@ -293,9 +289,9 @@ function Base.:+( if !isnothing(dim_out) tnv = replaceind(tnv, lv[dim_out] => dag(link_space[edges[dim_out]])) end - tn[v] = tnv + tensors_out[v] = tnv end - return tn + return TreeTensorNetwork(tensors_out) end # TODO: switch default algorithm once more are implemented diff --git a/src/treetensornetworks/opsum_to_ttn/opsum_to_ttn.jl b/src/treetensornetworks/opsum_to_ttn/opsum_to_ttn.jl index 0b607b3a..8861d189 100644 --- a/src/treetensornetworks/opsum_to_ttn/opsum_to_ttn.jl +++ b/src/treetensornetworks/opsum_to_ttn/opsum_to_ttn.jl @@ -290,10 +290,12 @@ function compress_ttn( ) end - # initialize TTN without the dummy indices added; tensors are filled in below - g0 = NamedGraph(underlying_graph(sites0)) - tensors0 = Dict{vertextype(g0), ITensor}(v => ITensor() for v in vertices(g0)) - H = TreeTensorNetwork(ITensorNetwork(tensors0, g0)) + # Buffer the per-vertex tensors in a plain `Dict` first. Wrapping in + # `ITensorNetwork` + `TreeTensorNetwork` is deferred until the loop + # below has assigned real tensors at every vertex, so the tree + # structure (and `Index` space type) is locked in only once the data + # is consistent. + H = Dict{vertextype(sites0), ITensor}(v => ITensor() for v in vertices(sites0)) function qnblock(i::Index, q::QN) for b in 2:(nblocks(i) - 1) flux(i, Block(b)) == q && return b @@ -443,7 +445,7 @@ function compress_ttn( H[v] += T * ITensorNetworks.computeSiteProd(sites, Prod([(Op("Id", v))])) end end - return H + return TreeTensorNetwork(H) end # diff --git a/src/treetensornetworks/treetensornetwork.jl b/src/treetensornetworks/treetensornetwork.jl index 01588063..19644bb3 100644 --- a/src/treetensornetworks/treetensornetwork.jl +++ b/src/treetensornetworks/treetensornetwork.jl @@ -1,13 +1,16 @@ -using Dictionaries: Indices +using DataGraphs: DataGraphs, set_vertex_data!, underlying_graph, vertex_data +using Dictionaries: Dictionary, Indices +using Graphs: Graphs, is_tree, rem_vertex!, vertices +using ITensors: ITensor, Index using NamedGraphs.GraphsExtensions: vertextype -using NamedGraphs: similar_graph +using NamedGraphs: NamedGraph """ TreeTensorNetwork{V} <: AbstractTreeTensorNetwork{V} -A tensor network whose underlying graph is a tree. In addition to the tensor data, -it tracks an `ortho_region`: the set of vertices that currently form the orthogonality -center of the network. +A tensor network whose underlying graph is a tree. Storage mirrors +[`ITensorNetwork`](@ref) — `graph`, `vertex_data`, and `ind_to_vertices` +— plus an `ortho_region` tracking the orthogonality center. `TTN` is an alias for `TreeTensorNetwork`. @@ -17,82 +20,60 @@ Use the [`TreeTensorNetwork`](@ref) constructors to build instances, and See also: [`ITensorNetwork`](@ref). """ struct TreeTensorNetwork{V} <: AbstractTreeTensorNetwork{V} - tensornetwork::ITensorNetwork{V} + graph::NamedGraph{V} + vertex_data::Dictionary{V, ITensor} + ind_to_vertices::Dict{Index, Set{V}} ortho_region::Indices{V} - global function _TreeTensorNetwork(tensornetwork::ITensorNetwork, ortho_region::Indices) - @assert is_tree(tensornetwork) - return new{vertextype(tensornetwork)}(tensornetwork, ortho_region) - end end -function _TreeTensorNetwork(tensornetwork::ITensorNetwork, ortho_region) - return _TreeTensorNetwork(tensornetwork, Indices(ortho_region)) -end - -function _TreeTensorNetwork(tensornetwork::ITensorNetwork) - return _TreeTensorNetwork(tensornetwork, vertices(tensornetwork)) +# Empty TTN with no vertices. The is-a-tree invariant holds trivially. +function TreeTensorNetwork{V}() where {V} + return TreeTensorNetwork{V}( + NamedGraph{V}(), + Dictionary{V, ITensor}(), + Dict{Index, Set{V}}(), + Indices{V}() + ) end +TreeTensorNetwork() = TreeTensorNetwork{Any}() """ - TreeTensorNetwork(tn::ITensorNetwork; ortho_region=vertices(tn)) -> TreeTensorNetwork - -Construct a `TreeTensorNetwork` from an `ITensorNetwork` with tree graph structure. + TreeTensorNetwork(tensors) -> TreeTensorNetwork -The `ortho_region` keyword specifies which vertices currently form the orthogonality center. -By default all vertices are included, meaning no particular gauge is assumed. To enforce an -actual orthogonal gauge, call [`orthogonalize`](@ref) afterward. +Construct a `TreeTensorNetwork` from any collection of tensors accepted by +`ITensorNetwork` (e.g. a `Dict`, `Dictionary`, a `Vector{ITensor}`, or another +`AbstractITensorNetwork`). Edges are inferred from shared `Index`es; the +underlying graph must be a tree. -Throws an error if the underlying graph of `tn` is not a tree. +The result starts with `ortho_region == vertices(tn)` — i.e. no particular +gauge is assumed. Use [`orthogonalize`](@ref) to bring the network into a +canonical gauge centered at a chosen vertex or region. # Example ```jldoctest -julia> using NamedGraphs.NamedGraphGenerators: named_comb_tree - -julia> using NamedGraphs: NamedGraph - -julia> using Graphs: vertices - -julia> using ITensors: ITensor - -julia> g = named_comb_tree((2, 2)); - -julia> s = siteinds("S=1/2", g); - -julia> tensors = Dict(v => ITensor(s[v]...) for v in vertices(g)); +julia> using ITensors: Index, ITensor -julia> itn = ITensorNetwork(tensors, NamedGraph(g)); +julia> i, j, k = Index(2, "i"), Index(2, "j"), Index(2, "k"); -julia> ttn_state = TreeTensorNetwork(itn; ortho_region = [first(vertices(itn))]); +julia> ttn = TreeTensorNetwork([ITensor(i, j), ITensor(j, k)]); ``` See also: [`ITensorNetwork`](@ref), [`orthogonalize`](@ref). """ -function TreeTensorNetwork(tn::ITensorNetwork; ortho_region = vertices(tn)) - return _TreeTensorNetwork(tn, ortho_region) -end -function TreeTensorNetwork{V}(tn::ITensorNetwork) where {V} - return TreeTensorNetwork(ITensorNetwork{V}(tn)) +function TreeTensorNetwork(tensors) + itn = ITensorNetwork(tensors) + @assert is_tree(itn) + V = vertextype(itn) + return TreeTensorNetwork{V}( + itn.graph, itn.vertex_data, itn.ind_to_vertices, Indices{V}(vertices(itn)) + ) end const TTN = TreeTensorNetwork -function NamedGraphs.similar_graph(ttn::TTN, underlying_graph::AbstractGraph) - return TTN(similar_graph(ttn.tensornetwork, underlying_graph)) -end - # Field access -""" - ITensorNetwork(tn::TreeTensorNetwork) -> ITensorNetwork - -Convert a `TreeTensorNetwork` to a plain `ITensorNetwork`, discarding orthogonality -metadata. The returned network shares the same underlying tensor data. - -See also: [`TreeTensorNetwork`](@ref). -""" -ITensorNetwork(tn::TTN) = copy(tn.tensornetwork) - """ ortho_region(tn::TreeTensorNetwork) -> Indices @@ -102,23 +83,33 @@ See also: [`orthogonalize`](@ref). """ ortho_region(tn::TTN) = tn.ortho_region -# Required for `AbstractITensorNetwork` interface -data_graph(tn::TTN) = data_graph(tn.tensornetwork) +# `AbstractITensorNetwork` storage hooks. +DataGraphs.underlying_graph(tn::TTN) = tn.graph +DataGraphs.vertex_data(tn::TTN) = tn.vertex_data -function data_graph_type(G::Type{<:TTN}) - return data_graph_type(fieldtype(G, :tensornetwork)) +function DataGraphs.set_vertex_data!(tn::TTN, value, v) + _set_vertex_data!(tn.graph, tn.vertex_data, tn.ind_to_vertices, value, v) + return tn end -function Base.copy(tn::TTN) - return _TreeTensorNetwork(copy(tn.tensornetwork), copy(tn.ortho_region)) +function Graphs.rem_vertex!(tn::TTN, v) + _rem_vertex!(tn.graph, tn.vertex_data, tn.ind_to_vertices, v) + return tn end -# -# Constructor -# +function Base.copy(tn::TTN{V}) where {V} + return TreeTensorNetwork{V}( + copy(tn.graph), + map(copy, tn.vertex_data), + Dict{Index, Set{V}}(i => copy(vs) for (i, vs) in tn.ind_to_vertices), + copy(tn.ortho_region) + ) +end # set_ortho_region: low-level update of the ortho_region metadata only, # without any gauge transformations. To move the orthogonality center use orthogonalize. -function set_ortho_region(tn::TTN, ortho_region) - return TreeTensorNetwork(tn.tensornetwork; ortho_region) +function set_ortho_region(tn::TTN{V}, ortho_region) where {V} + return TreeTensorNetwork{V}( + tn.graph, tn.vertex_data, tn.ind_to_vertices, Indices{V}(ortho_region) + ) end diff --git a/test/test_belief_propagation.jl b/test/test_belief_propagation.jl index 8776c455..29aafc68 100644 --- a/test/test_belief_propagation.jl +++ b/test/test_belief_propagation.jl @@ -1,6 +1,6 @@ using Compat: Compat using Graphs: vertices -using ITensorNetworks: ITensorNetworks, @preserve_graph, BeliefPropagationCache, contract, +using ITensorNetworks: ITensorNetworks, BeliefPropagationCache, contract, contraction_sequence, environment, inner_network, message, message_diff, partitioned_tensornetwork, scalar, siteinds, tensornetwork, update, update_factor, updated_message, ⊗ @@ -37,8 +37,8 @@ using Test: @test, @testset new_A_dag = ITensors.replaceind( dag(prime(new_A)), only(s[first(vket)])', only(s[first(vket)]) ) - @preserve_graph bpc[vket] = new_A - @preserve_graph bpc[vbra] = new_A_dag + bpc[vket] = new_A + bpc[vbra] = new_A_dag @test bpc[vket] == new_A @test bpc[vbra] == new_A_dag diff --git a/test/test_ttn_position.jl b/test/test_ttn_position.jl index f2e0e3ab..a5baf934 100644 --- a/test/test_ttn_position.jl +++ b/test/test_ttn_position.jl @@ -3,7 +3,7 @@ using Graphs: vertices using ITensorNetworks: ITensorNetwork, ProjTTN, TreeTensorNetwork, environments, position, siteinds using ITensors.NDTensors: with_auto_fermion -using ITensors: ITensor +using ITensors: ITensor, Index using NamedGraphs.NamedGraphGenerators: named_comb_tree, named_path_graph using NamedGraphs: NamedEdge, NamedGraph using Test: @test, @testset @@ -47,9 +47,11 @@ using .ModelHamiltonians: ModelHamiltonians end @testset "ProjTTN construction regression test" begin pos = Indices{Tuple{String, Int}}() - g = NamedGraph{Any}(named_path_graph(2)) - tensors = Dict{Any, ITensor}(v => ITensor() for v in vertices(g)) - operator = TreeTensorNetwork(ITensorNetwork{Any}(tensors, g)) + # Share a placeholder `Index` between the two tensors so the resulting + # `ITensorNetwork` has a single edge (and is therefore a valid tree). + link = Index(1, "Link") + tensors = Dict{Any, ITensor}(v => ITensor(link) for v in vertices(named_path_graph(2))) + operator = TreeTensorNetwork(tensors) environments = Dictionary{NamedEdge{Any}, ITensor}() @test ProjTTN(pos, operator, environments) isa ProjTTN{Any, Indices{Any}} end diff --git a/test/test_ttns.jl b/test/test_ttns.jl index 46278013..fe39b458 100644 --- a/test/test_ttns.jl +++ b/test/test_ttns.jl @@ -16,8 +16,8 @@ include("utils.jl") psi = TreeTensorNetwork(productstate(v -> "Up", sites)) # product state itn = ITensorNetwork(psi) # TTN → ITensorNetwork - @test vertex_data(itn) == vertex_data(psi.tensornetwork) - @test !(itn === psi.tensornetwork) + @test vertex_data(itn) == vertex_data(psi) + @test !(vertex_data(itn) === vertex_data(psi)) @test vertex_data(TreeTensorNetwork(itn)) == vertex_data(psi) end diff --git a/test/utils.jl b/test/utils.jl index b42f5ba6..e6492676 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -4,8 +4,9 @@ # inside its gensym module. using DataGraphs: underlying_graph, vertex_data -using Graphs: AbstractGraph, add_edge!, dst, edges, src, vertices -using ITensorNetworks: @preserve_graph, ITensorNetwork, IndsNetwork, data_graph +using Dictionaries: Indices +using Graphs: AbstractGraph, dst, edges, src, vertices +using ITensorNetworks: ITensorNetwork, IndsNetwork using ITensors.NDTensors: dim using ITensors: ITensors, ITensor, Index, QN, dag, hasqns, inds, itensor, onehot using NamedGraphs.GraphsExtensions: incident_edges @@ -35,14 +36,15 @@ function random_tensornetwork( g = NamedGraph(graph) links = Dict(e => Index(link_space, "Link") for e in edges(g)) links = merge(links, Dict(reverse(e) => links[e] for e in edges(g))) - tensors = Dict( - map(collect(vertices(g))) do v - link_v = [links[e] for e in incident_edges(g, v)] - inds_v = [siteinds[v]; link_v] - return v => itensor(randn(rng, eltype, dim.(inds_v)...), inds_v) - end - ) - return ITensorNetwork(tensors, g) + # `Indices`-keyed `map` returns a `Dictionary` (insertion-ordered), + # so the constructed `ITensorNetwork`'s vertex / edge order tracks + # `vertices(g)`. + ts = map(Indices(vertices(g))) do v + link_v = [links[e] for e in incident_edges(g, v)] + inds_v = [siteinds[v]; link_v] + return itensor(randn(rng, eltype, dim.(inds_v)...), inds_v) + end + return ITensorNetwork(ts) end # `IndsNetwork`: extract site inds (`Index[]` where unassigned). @@ -87,15 +89,13 @@ end # uses QR, which would push site-state QN flux into the link and leave BP's # `default_message` (single-index `delta`) with no compatible block. `onehot` # defaults to `Float64`, so we typecast it to `elt` to keep `productstate(elt, -# ...)` element-type-preserving. Uses `@preserve_graph` plus an explicit -# `add_edge!` on the underlying graph so the graph-edge ↔ shared-Index -# invariant is maintained without relying on auto-reconciliation. +# ...)` element-type-preserving. Reverse-map reconciliation on the second +# `setindex!` brings the graph edge along for free. function _add_edge!(elt::Type, tn, edge) iₑ = Index(_trivial_link_space(tn), "Link") X = ITensors.convert_eltype(elt, onehot(iₑ => 1)) - @preserve_graph tn[src(edge)] = tn[src(edge)] * X - @preserve_graph tn[dst(edge)] = tn[dst(edge)] * dag(X) - add_edge!(data_graph(tn), edge) + tn[src(edge)] = tn[src(edge)] * X + tn[dst(edge)] = tn[dst(edge)] * dag(X) return tn end @@ -108,15 +108,12 @@ function productstate(elt::Type, state::Function, s::IndsNetwork) return productstate(elt, Dict(v => state(v) for v in vertices(s)), s) end function productstate(elt::Type, state, s::IndsNetwork) - g = NamedGraph(collect(vertices(s))) - tensors = Dict( - map(collect(vertices(s))) do v - site_v = isassigned(vertex_data(s), v) ? s[v] : Index[] - t = ITensors.state(state[v], only(site_v)) - return v => ITensors.convert_eltype(elt, t) - end - ) - tn = ITensorNetwork(tensors, g) + ts = map(Indices(vertices(s))) do v + site_v = isassigned(vertex_data(s), v) ? s[v] : Index[] + t = ITensors.state(state[v], only(site_v)) + return ITensors.convert_eltype(elt, t) + end + tn = ITensorNetwork(ts) for e in edges(s) _add_edge!(elt, tn, e) end