From 1c13a09920f1d13034d8e3e7516f8fcf07c67878 Mon Sep 17 00:00:00 2001 From: Matthew Fishman Date: Mon, 11 May 2026 21:38:55 -0400 Subject: [PATCH 01/10] Add reverse Index map to ITensorNetwork; drop preserve_graph / fix_edges! MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds an `Index → Set{vertex}` reverse map as a field on `ITensorNetwork` and uses it to maintain the graph-edge ↔ shared-`Index` invariant on every vertex write in O(deg(v) + |inds(value)|), eliminating the need for the `@preserve_graph` bypass and the O(n) `fix_edges!` sweep. Removed: - `@preserve_graph` macro, `setindex_preserve_graph!`, `fix_edges!` - `map_vertex_data_preserve_graph`, `map_vertices_preserve_graph!` - `map_vertex_data` (replaced by `Base.map` / `Base.map!`) - `scale_tensors` / `scale_tensors!` (inlined at the three callsites in `normalize.jl` and `caches/abstractbeliefpropagationcache.jl`) - `Base.setindex!(::AbstractBeliefPropagationCache, ::ITensor, vertex) = not_implemented()` stubs (BPC writes now go through the underlying `ITensorNetwork`'s `set_vertex_data!`) Each wrapper type (`TreeTensorNetwork`, `BeliefPropagationCache`, `BilinearFormNetwork`, `LinearFormNetwork`, `QuadraticFormNetwork`) defines its own `set_vertex_data!` that forwards to its wrapped network; there is no abstract `tensornetwork(::AbstractITensorNetwork)` interface method. Simplified `Graphs.add_edge!(::AbstractITensorNetwork, edge)`: under the invariant the "fill missing half" branch collapses to a single `qr!`. --- docs/src/interface_methods.md | 8 +- src/abstractitensornetwork.jl | 169 +++---------------- src/apply.jl | 6 +- src/caches/abstractbeliefpropagationcache.jl | 18 +- src/caches/beliefpropagationcache.jl | 8 +- src/formnetworks/bilinearformnetwork.jl | 17 +- src/formnetworks/linearformnetwork.jl | 13 +- src/formnetworks/quadraticformnetwork.jl | 8 + src/itensornetwork.jl | 132 +++++++++++++-- src/normalize.jl | 12 +- src/solvers/insert.jl | 8 +- src/treetensornetworks/treetensornetwork.jl | 7 + test/test_belief_propagation.jl | 6 +- test/utils.jl | 14 +- 14 files changed, 216 insertions(+), 210 deletions(-) diff --git a/docs/src/interface_methods.md b/docs/src/interface_methods.md index 2fd054ee..88485d80 100644 --- a/docs/src/interface_methods.md +++ b/docs/src/interface_methods.md @@ -65,12 +65,10 @@ These ITensorNetwork constructor interfaces are foundational to other constructo ## Global Operations on ITensorNetworks -* Scale tensors at chosen vertices by per-vertex weights, either out-of-place or in-place (`abstractitensornetwork.jl`). +* Map a function over the vertex tensors of an `ITensorNetwork`, returning a copy + (`abstractitensornetwork.jl`): ```julia - scale_tensors(tn::AbstractITensorNetwork, vertices_weights::Dictionary; kwargs...) - scale_tensors(weight_function::Function, tn; kwargs...) - scale_tensors!(tn::AbstractITensorNetwork, vertices_weights::Dictionary) - scale_tensors!(weight_function::Function, tn::AbstractITensorNetwork; kwargs...) + map(f, tn::AbstractITensorNetwork) ``` * Tensor product (disjoint union) of two ITensorNetworks (`abstractitensornetwork.jl`): diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index 36971ac6..5a046375 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -4,11 +4,10 @@ using DataGraphs: DataGraphs, edge_data, get_vertex_data, is_vertex_assigned, using Dictionaries: Dictionary using Graphs: Graphs, Graph, add_edge!, add_vertex!, bfs_tree, center, dst, edges, edgetype, has_edge, ne, neighbors, rem_edge!, src, vertices -using ITensors: ITensors, ITensor, addtags, commoninds, commontags, contract, dag, inds, - noprime, onehot, prime, replaceprime, replacetags, setprime, settags, sim, swaptags, - tags +using ITensors: ITensors, ITensor, Index, addtags, commoninds, commontags, contract, dag, + inds, noprime, onehot, prime, replaceprime, replacetags, setprime, settags, sim, + swaptags, tags using LinearAlgebra: LinearAlgebra, qr, qr! -using MacroTools: @capture using NDTensors: NDTensors, Algorithm, dim, scalartype using NamedGraphs.GraphsExtensions: add_edges, directed_graph, incident_edges, rename_vertices, vertextype, ⊔ @@ -64,34 +63,6 @@ function ITensors.datatype(tn::AbstractITensorNetwork) return mapreduce(v -> datatype(tn[v]), promote_type, vertices(tn)) end -function is_setindex!_expr(expr::Expr) - return is_assignment_expr(expr) && is_getindex_expr(first(expr.args)) -end -is_setindex!_expr(x) = false -is_getindex_expr(expr::Expr) = (expr.head === :ref) -is_getindex_expr(x) = false -is_assignment_expr(expr::Expr) = (expr.head === :(=)) -is_assignment_expr(expr) = false - -# TODO: Define this in terms of a function mapping -# preserve_graph_function(::typeof(setindex!)) = setindex!_preserve_graph -# preserve_graph_function(::typeof(map_vertex_data)) = map_vertex_data_preserve_graph -# Also allow annotating codeblocks like `@views`. -macro preserve_graph(expr) - if !is_setindex!_expr(expr) - error( - "preserve_graph must be used with setindex! syntax (as @preserve_graph a[i,j,...] = value)" - ) - end - @capture(expr, array_[indices__] = value_) - return :(setindex_preserve_graph!($(esc(array)), $(esc(value)), $(esc.(indices)...))) -end - -function setindex_preserve_graph!(tn::AbstractITensorNetwork, value, vertex) - data_graph(tn)[vertex] = value - return tn -end - # AbstractDataGraphs overloads DataGraphs.underlying_graph(tn::AbstractITensorNetwork) = underlying_graph(data_graph(tn)) @@ -108,43 +79,9 @@ function DataGraphs.get_vertex_data(is::AbstractITensorNetwork, v) return get_vertex_data(data_graph(is), v) end -function DataGraphs.set_vertex_data!(tn::AbstractITensorNetwork, value, v) - @preserve_graph tn[v] = value - fix_edges!(tn, v) - return tn -end - function DataGraphs.set_vertices_data!(tn::AbstractITensorNetwork, values, vertices) for v in vertices - @preserve_graph tn[v] = values[v] - end - for v in vertices - fix_edges!(tn, v) - end - return tn -end - -# Reconcile the graph edges incident to `v` with the shared Indices of -# `tn[v]`, after a non-`@preserve_graph` write that may have changed -# which neighbors share an Index with `v`. Removes any incident edges -# that no longer correspond to a shared Index, and adds edges to any -# vertex now sharing one. The long-term invariant is graph-edge ↔ -# shared-Index one-to-one; until that's structural (e.g. via a reverse -# Index map on the type), this auto-reconciliation runs after -# `set_vertex_data!` / `set_vertices_data!` so plain `tn[v] = ...` -# writes don't desync the two. Callers that already maintain the -# invariant explicitly should bypass this with `@preserve_graph`. -function fix_edges!(tn::AbstractITensorNetwork, v) - for edge in incident_edges(tn, v) - rem_edge!(tn, edge) - end - for vertex in vertices(tn) - if v ≠ vertex - edge = v => vertex - if !isempty(linkinds(tn, edge)) - add_edge!(data_graph(tn), edge) - end - end + set_vertex_data!(tn, values[v], v) end return tn end @@ -157,14 +94,7 @@ function NamedGraphs.ordered_vertices(tn::AbstractITensorNetwork) end function Adapt.adapt_structure(to, tn::AbstractITensorNetwork) - # TODO: Define and use: - # - # @preserve_graph map_vertex_data(adapt(to), tn) - # - # or just: - # - # @preserve_graph map(adapt(to), tn) - return map_vertex_data_preserve_graph(adapt(to), tn) + return map(adapt(to), tn) end # @@ -300,12 +230,12 @@ function ITensors.replaceinds( @assert underlying_graph(is) == underlying_graph(is′) for v in vertices(is) isassigned(is, v) || continue - @preserve_graph tn[v] = replaceinds(tn[v], is[v] => is′[v]) + tn[v] = replaceinds(tn[v], is[v] => is′[v]) end for e in edges(is) isassigned(is, e) || continue for v in (src(e), dst(e)) - @preserve_graph tn[v] = replaceinds(tn[v], is[e] => is′[e]) + tn[v] = replaceinds(tn[v], is[e] => is′[e]) end end return tn @@ -368,42 +298,25 @@ end LinearAlgebra.adjoint(tn::Union{IndsNetwork, AbstractITensorNetwork}) = prime(tn) -function map_vertex_data(f, tn::AbstractITensorNetwork) - tn = copy(tn) +# In-place / out-of-place `map` over the vertex tensors of `tn`. Reverse-map +# reconciliation makes the per-vertex write cheap, so these match +# `Base.map` / `Base.map!`'s usual element-wise semantics without needing +# a separate "preserve graph" entry point. +function Base.map!(f, tn::AbstractITensorNetwork) for v in vertices(tn) tn[v] = f(tn[v]) end return tn end -# TODO: Define @preserve_graph map_vertex_data(f, tn)` -function map_vertex_data_preserve_graph(f, tn::AbstractITensorNetwork) - tn = copy(tn) - for v in vertices(tn) - @preserve_graph tn[v] = f(tn[v]) - end - return tn -end - -function map_vertices_preserve_graph!( - f, - tn::AbstractITensorNetwork; - vertices = vertices(tn) - ) - for v in vertices - @preserve_graph tn[v] = f(v) - end - return tn -end +Base.map(f, tn::AbstractITensorNetwork) = map!(f, copy(tn)) function Base.conj(tn::AbstractITensorNetwork) - # TODO: Use `@preserve_graph map_vertex_data(f, tn)` - return map_vertex_data_preserve_graph(conj, tn) + return map(conj, tn) end function ITensors.dag(tn::AbstractITensorNetwork) - # TODO: Use `@preserve_graph map_vertex_data(f, tn)` - return map_vertex_data_preserve_graph(dag, tn) + return map(dag, tn) end # TODO: should this make sure that internal indices @@ -478,7 +391,7 @@ function NDTensors.contract( for n_dst in neighbors_dst add_edge!(data_graph(tn), merged_vertex => n_dst) end - @preserve_graph tn[merged_vertex] = new_itensor + tn[merged_vertex] = new_itensor return tn end @@ -494,8 +407,8 @@ end function LinearAlgebra.qr!(tn::AbstractITensorNetwork, edge::AbstractEdge) left_inds = setdiff(inds(tn[src(edge)]), inds(tn[dst(edge)])) Q, R = qr(tn[src(edge)], left_inds; tags = edge_tag(edge)) - @preserve_graph tn[src(edge)] = Q - @preserve_graph tn[dst(edge)] = R * tn[dst(edge)] + tn[src(edge)] = Q + tn[dst(edge)] = R * tn[dst(edge)] return tn end @@ -548,8 +461,8 @@ function _truncate_edge(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs.. left_inds = setdiff(inds(tn[src(edge)]), inds(tn[dst(edge)])) ltags = tags(tn, edge) U, S, V = svd(tn[src(edge)], left_inds; lefttags = ltags, kwargs...) - @preserve_graph tn[src(edge)] = U - @preserve_graph tn[dst(edge)] = tn[dst(edge)] * (S * V) + tn[src(edge)] = U + tn[dst(edge)] = tn[dst(edge)] * (S * V) return tn end @@ -677,17 +590,15 @@ function linkdims(tn::AbstractITensorNetwork{V}) where {V} return ld end -# Ensure `edge` is present in both the underlying graph and as a shared -# Index between the endpoint tensors. The long-term invariant is that the -# two are one-to-one; today they can drift apart, so this fills in -# whichever piece is missing. Returns the `Graphs.add_edge!` convention: -# `true` if a new graph edge was added, `false` if the graph edge was -# already present (regardless of whether link inds had to be threaded). +# Add a new edge between two vertices by threading a fresh link `Index` +# via QR; the second `setindex!` writes that `Index` into both endpoint +# tensors, and reverse-map reconciliation picks up the graph edge as a +# consequence. Returns the `Graphs.add_edge!` convention: `true` if a new +# graph edge was added, `false` if it was already there. function Graphs.add_edge!(tn::AbstractITensorNetwork, edge) - added = !has_edge(tn, edge) - added && add_edge!(data_graph(tn), edge) - isempty(linkinds(tn, edge)) && qr!(tn, edge) - return added + has_edge(tn, edge) && return false + qr!(tn, edge) + return true end # TODO: What to output? Could be an `IndsNetwork`. Or maybe @@ -780,30 +691,6 @@ function add(tn1::AbstractITensorNetwork, tn2::AbstractITensorNetwork) return tn12 end -# Scale each tensor of the network via a function vertex -> Number -function scale_tensors!( - weight_function::Function, - tn::AbstractITensorNetwork; - vertices = collect(Graphs.vertices(tn)) - ) - return map_vertices_preserve_graph!(v -> weight_function(v) * tn[v], tn; vertices) -end - -# Scale each tensor of the network by a scale factor for each vertex in the keys of the dictionary -function scale_tensors!(tn::AbstractITensorNetwork, vertices_weights::Dictionary) - return scale_tensors!(v -> vertices_weights[v], tn; vertices = keys(vertices_weights)) -end - -function scale_tensors(weight_function::Function, tn; kwargs...) - tn = copy(tn) - return scale_tensors!(weight_function, tn; kwargs...) -end - -function scale_tensors(tn::AbstractITensorNetwork, vertices_weights::Dictionary; kwargs...) - tn = copy(tn) - return scale_tensors!(tn, vertices_weights; kwargs...) -end - Base.:+(tn1::AbstractITensorNetwork, tn2::AbstractITensorNetwork) = add(tn1, tn2) ITensors.hasqns(tn::AbstractITensorNetwork) = any(v -> hasqns(tn[v]), vertices(tn)) diff --git a/src/apply.jl b/src/apply.jl index 99fe9ecf..bfb25fcb 100644 --- a/src/apply.jl +++ b/src/apply.jl @@ -113,7 +113,7 @@ function ITensors.apply( if normalize oψᵥ ./= norm(oψᵥ) end - setindex_preserve_graph!(ψ, oψᵥ, v⃗[1]) + ψ[v⃗[1]] = oψᵥ elseif length(v⃗) == 2 envs = Vector{ITensor}(envs) if !iszero(ne(ITensorNetwork(envs))) @@ -135,8 +135,8 @@ function ITensors.apply( ψᵥ₁ ./= norm(ψᵥ₁) ψᵥ₂ ./= norm(ψᵥ₂) end - setindex_preserve_graph!(ψ, ψᵥ₁, v⃗[1]) - setindex_preserve_graph!(ψ, ψᵥ₂, v⃗[2]) + ψ[v⃗[1]] = ψᵥ₁ + ψ[v⃗[2]] = ψᵥ₂ elseif length(v⃗) < 1 error("Gate being applied does not share indices with tensor network.") elseif length(v⃗) > 2 diff --git a/src/caches/abstractbeliefpropagationcache.jl b/src/caches/abstractbeliefpropagationcache.jl index 79c6ef5e..944700b1 100644 --- a/src/caches/abstractbeliefpropagationcache.jl +++ b/src/caches/abstractbeliefpropagationcache.jl @@ -45,9 +45,6 @@ default_messages(ptn::PartitionedGraph) = Dictionary() end default_partitioned_vertices(ψ::AbstractITensorNetwork) = group(v -> v, vertices(ψ)) -function Base.setindex!(bpc::AbstractBeliefPropagationCache, factor::ITensor, vertex) - return not_implemented() -end partitioned_tensornetwork(bpc::AbstractBeliefPropagationCache) = not_implemented() messages(bpc::AbstractBeliefPropagationCache) = not_implemented() function default_message( @@ -131,7 +128,7 @@ end function map_factors(f, bpc::AbstractBeliefPropagationCache, vs = vertices(bpc)) bpc = copy(bpc) for v in vs - @preserve_graph bpc[v] = f(bpc[v]) + bpc[v] = f(bpc[v]) end return bpc end @@ -176,14 +173,14 @@ function update_factors(bpc::AbstractBeliefPropagationCache, factors) bpc = copy(bpc) for vertex in eachindex(factors) # TODO: Add a check that this preserves the graph structure. - setindex_preserve_graph!(bpc, factors[vertex], vertex) + bpc[vertex] = factors[vertex] end return bpc end function update_factor(bpc, vertex, factor) bpc = copy(bpc) - setindex_preserve_graph!(bpc, factor, vertex) + bpc[vertex] = factor return bpc end @@ -371,8 +368,9 @@ function rescale_partitions( ) bpc = copy(bpc) tn = tensornetwork(bpc) - norms = map(v -> inv(norm(tn[v])), verts) - scale_tensors!(bpc, Dictionary(verts, norms)) + for v in verts + bpc[v] = inv(norm(tn[v])) * bpc[v] + end vertices_weights = Dictionary() for pv in partitions @@ -388,7 +386,9 @@ function rescale_partitions( end end - scale_tensors!(bpc, vertices_weights) + for (v, w) in pairs(vertices_weights) + bpc[v] = w * bpc[v] + end return bpc end diff --git a/src/caches/beliefpropagationcache.jl b/src/caches/beliefpropagationcache.jl index 0fcea4ef..9a8e623a 100644 --- a/src/caches/beliefpropagationcache.jl +++ b/src/caches/beliefpropagationcache.jl @@ -1,3 +1,4 @@ +using DataGraphs: DataGraphs, set_vertex_data! using Graphs: IsDirected using ITensors: dir using LinearAlgebra: diag, dot @@ -106,7 +107,12 @@ function default_bp_edge_sequence(bp_cache::BeliefPropagationCache) return default_edge_sequence(partitioned_tensornetwork(bp_cache)) end -Base.setindex!(bpc::BeliefPropagationCache, factor::ITensor, vertex) = not_implemented() +# Forward vertex writes to the underlying `ITensorNetwork` so its +# reverse-index map and edge reconciliation handle the update. +function DataGraphs.set_vertex_data!(bpc::BeliefPropagationCache, value, vertex) + set_vertex_data!(tensornetwork(bpc), value, vertex) + return bpc +end partitions(bpc::BeliefPropagationCache) = quotientvertices(partitioned_tensornetwork(bpc)) function PartitionedGraphs.quotientedges(bpc::BeliefPropagationCache) return quotientedges(partitioned_tensornetwork(bpc)) diff --git a/src/formnetworks/bilinearformnetwork.jl b/src/formnetworks/bilinearformnetwork.jl index 4c632c1a..bfa9cf03 100644 --- a/src/formnetworks/bilinearformnetwork.jl +++ b/src/formnetworks/bilinearformnetwork.jl @@ -1,4 +1,5 @@ using Adapt: adapt +using DataGraphs: DataGraphs, set_vertex_data! using ITensors.NDTensors: datatype, denseblocks using ITensors: ITensor, Op, delta, prime, sim using NamedGraphs.GraphsExtensions: disjoint_union @@ -58,6 +59,13 @@ ket_vertex_suffix(blf::BilinearFormNetwork) = blf.ket_vertex_suffix # TODO: Use `NamedGraphs.GraphsExtensions.parent_graph`. tensornetwork(blf::BilinearFormNetwork) = blf.tensornetwork +# Forward vertex writes to the wrapped network so reverse-index map and +# edge reconciliation run on the underlying `ITensorNetwork`. +function DataGraphs.set_vertex_data!(blf::BilinearFormNetwork, value, vertex) + set_vertex_data!(tensornetwork(blf), value, vertex) + return blf +end + function Base.copy(blf::BilinearFormNetwork) return BilinearFormNetwork( copy(tensornetwork(blf)), @@ -106,12 +114,7 @@ function update( ket_state::ITensor ) blf = copy(blf) - # TODO: Maybe add a check that it really does preserve the graph. - setindex_preserve_graph!( - tensornetwork(blf), bra_state, bra_vertex(blf, original_bra_state_vertex) - ) - setindex_preserve_graph!( - tensornetwork(blf), ket_state, ket_vertex(blf, original_ket_state_vertex) - ) + tensornetwork(blf)[bra_vertex(blf, original_bra_state_vertex)] = bra_state + tensornetwork(blf)[ket_vertex(blf, original_ket_state_vertex)] = ket_state return blf end diff --git a/src/formnetworks/linearformnetwork.jl b/src/formnetworks/linearformnetwork.jl index 94c45f96..0bd33c0c 100644 --- a/src/formnetworks/linearformnetwork.jl +++ b/src/formnetworks/linearformnetwork.jl @@ -1,3 +1,4 @@ +using DataGraphs: DataGraphs, set_vertex_data! using Graphs: AbstractGraph using ITensors: ITensor, prime using NamedGraphs.GraphsExtensions: disjoint_union @@ -40,6 +41,13 @@ ket_vertex_suffix(lf::LinearFormNetwork) = lf.ket_vertex_suffix # TODO: Use `NamedGraphs.GraphsExtensions.parent_graph`. tensornetwork(lf::LinearFormNetwork) = lf.tensornetwork +# Forward vertex writes to the wrapped network so reverse-index map and +# edge reconciliation run on the underlying `ITensorNetwork`. +function DataGraphs.set_vertex_data!(lf::LinearFormNetwork, value, vertex) + set_vertex_data!(tensornetwork(lf), value, vertex) + return lf +end + function NamedGraphs.similar_graph( lf::LinearFormNetwork, underlying_graph::AbstractGraph @@ -56,9 +64,6 @@ end function update(lf::LinearFormNetwork, original_ket_state_vertex, ket_state::ITensor) lf = copy(lf) - # TODO: Maybe add a check that it really does preserve the graph. - setindex_preserve_graph!( - tensornetwork(lf), ket_state, ket_vertex(blf, original_ket_state_vertex) - ) + tensornetwork(lf)[ket_vertex(blf, original_ket_state_vertex)] = ket_state return lf end diff --git a/src/formnetworks/quadraticformnetwork.jl b/src/formnetworks/quadraticformnetwork.jl index 9767c30c..263063c3 100644 --- a/src/formnetworks/quadraticformnetwork.jl +++ b/src/formnetworks/quadraticformnetwork.jl @@ -1,3 +1,4 @@ +using DataGraphs: DataGraphs, set_vertex_data! using NamedGraphs: similar_graph default_index_map = prime @@ -46,6 +47,13 @@ end dual_index_map(qf::QuadraticFormNetwork) = qf.dual_index_map dual_inv_index_map(qf::QuadraticFormNetwork) = qf.dual_inv_index_map + +# Forward vertex writes to the inner `BilinearFormNetwork`, which in turn +# forwards to the underlying `ITensorNetwork`. +function DataGraphs.set_vertex_data!(qf::QuadraticFormNetwork, value, vertex) + set_vertex_data!(bilinear_formnetwork(qf), value, vertex) + return qf +end function Base.copy(qf::QuadraticFormNetwork) return QuadraticFormNetwork( copy(bilinear_formnetwork(qf)), dual_index_map(qf), dual_inv_index_map(qf) diff --git a/src/itensornetwork.jl b/src/itensornetwork.jl index defa489c..93c4e23c 100644 --- a/src/itensornetwork.jl +++ b/src/itensornetwork.jl @@ -1,5 +1,7 @@ -using DataGraphs: DataGraphs, DataGraph -using ITensors: ITensors, ITensor +using DataGraphs: + DataGraphs, DataGraph, is_vertex_assigned, set_vertex_data!, underlying_graph +using Graphs: Graphs, add_edge!, edgetype, has_edge, neighbors, rem_edge!, rem_vertex! +using ITensors: ITensors, ITensor, Index, inds using NamedGraphs: NamedGraphs, NamedEdge, NamedGraph, similar_graph, vertextype """ @@ -8,6 +10,11 @@ using NamedGraphs: NamedGraphs, NamedEdge, NamedGraph, similar_graph, vertextype A tensor network where each vertex holds an `ITensor`. The network graph is a `NamedGraph{V}` and edges represent shared indices between neighboring tensors. +The type maintains a reverse index map (`Index → vertices`) so that vertex +lookup by shared `Index` is O(1) and the graph-edge ↔ shared-`Index` +correspondence is reconciled in O(deg(v) + |inds(tn[v])|) on every tensor +write. + # Constructors **From a collection of `ITensor`s** (edges inferred from shared indices): @@ -42,17 +49,30 @@ See also: `IndsNetwork`, [`TreeTensorNetwork`](@ref ITensorNetworks.TreeTensorNe """ struct ITensorNetwork{V} <: AbstractITensorNetwork{V} data_graph::DataGraph{V, ITensor, ITensor, NamedGraph{V}, NamedEdge{V}} - - # Sole inner ctor: place `tensors` at the vertices of `graph`. No checks — - # `tensors` must be indexable at every vertex, the graph's edges are - # taken at face value. + # Reverse index map: for every `Index` appearing in any vertex tensor, + # the set of vertices holding that `Index`. Maintained by `setindex!` + # so that edge reconciliation after a write is O(deg(v) + |inds(tn[v])|) + # instead of an O(n) sweep over all vertices. + ind_to_vertices::Dict{Index, Set{V}} + + # Sole inner ctor: place `tensors` at the vertices of `graph` and build + # the reverse map from the resulting tensors. The graph's edges are taken + # at face value; callers are responsible for the graph-edge ↔ + # shared-`Index` invariant on construction (the public ctors below do + # this either by trusting the caller's graph or by re-inferring edges). function ITensorNetwork{V}(tensors, graph::NamedGraph) where {V} g = NamedGraph{V}(graph) dg = DataGraph(g; vertex_data_type = ITensor, edge_data_type = ITensor) for v in vertices(g) dg[v] = tensors[v] end - return new{V}(dg) + ind_to_vertices = Dict{Index, Set{V}}() + for v in vertices(dg) + for i in inds(dg[v]) + push!(get!(ind_to_vertices, i, Set{V}()), v) + end + end + return new{V}(dg, ind_to_vertices) end end @@ -67,13 +87,96 @@ function DataGraphs.underlying_graph_type(TN::Type{<:ITensorNetwork}) return fieldtype(data_graph_type(TN), :underlying_graph) end +# +# Reverse index map and edge reconciliation (implementation detail) +# + +# Internal accessor — keep `ind_to_vertices` package-private so that the +# `AbstractITensorNetwork` interface stays oblivious to the reverse map. +_ind_to_vertices(tn::ITensorNetwork) = getfield(tn, :ind_to_vertices) + +# Write `value` to `v` and re-establish the graph-edge ↔ shared-`Index` +# invariant: incident edges of `v` are exactly the vertices sharing an +# `Index` with `value`. The reverse map makes the reconciliation +# O(deg(v) + |inds(value)|), so `setindex!` does it unconditionally and +# the old `@preserve_graph` / `fix_edges!` bypass is no longer needed. +function DataGraphs.set_vertex_data!(tn::ITensorNetwork, value, v) + _unregister_inds!(tn, v) + data_graph(tn)[v] = value + _register_inds!(tn, v) + _reconcile_edges!(tn, v) + return tn +end + +# Drop `v` from the reverse map entry of each `Index` currently in `tn[v]`. +function _unregister_inds!(tn::ITensorNetwork, v) + is_vertex_assigned(tn, v) || return tn + map = _ind_to_vertices(tn) + for i in inds(tn[v]) + haskey(map, i) || continue + vs = map[i] + delete!(vs, v) + isempty(vs) && delete!(map, i) + end + return tn +end + +# Mirror vertex removal on the reverse map. `contract`, `induced_subgraph`, +# etc. structurally edit the graph and would otherwise leave stale entries +# behind, causing later edge reconciliation to point at vertices that no +# longer exist. Routes the underlying-graph update through the +# `AbstractDataGraph` fallback (which only touches the graph structure) +# instead of `DataGraph`'s override (which insists on deleting edge_data +# for every incident edge — `ITensorNetwork` edges carry no edge_data). +function Graphs.rem_vertex!(tn::ITensorNetwork, v) + _unregister_inds!(tn, v) + rem_vertex!(underlying_graph(data_graph(tn)), v) + return tn +end + +# Register `v` against each `Index` in `tn[v]`. +function _register_inds!(tn::ITensorNetwork{V}, v) where {V} + map = _ind_to_vertices(tn) + for i in inds(tn[v]) + push!(get!(map, i, Set{V}()), v) + end + return tn +end + +# Reconcile the graph edges incident to `v` so that they match exactly the +# set of vertices sharing an `Index` with `tn[v]`. O(deg(v) + |inds(tn[v])|). +function _reconcile_edges!(tn::ITensorNetwork{V}, v) where {V} + map = _ind_to_vertices(tn) + desired = Set{V}() + for i in inds(tn[v]) + for u in map[i] + u == v || push!(desired, u) + end + end + # `DataGraphs.rem_edge!` requires edge_data to be assigned for the edge + # — but `ITensorNetwork` edges carry no edge_data, so bypass it and + # work directly on the underlying `NamedGraph`. Edge inserts can stay + # on the `DataGraph` since `add_edge!` doesn't touch edge_data. + dg = data_graph(tn) + ug = underlying_graph(dg) + E = edgetype(tn) + for u in collect(neighbors(tn, v)) + u in desired || rem_edge!(ug, E(v, u)) + end + for u in desired + has_edge(tn, E(v, u)) || add_edge!(dg, E(v, u)) + end + return tn +end + # # Construction from collections of ITensors # -# Tensors only: derive the vertex list from `keys(tensors)`, write the -# tensors at each vertex, then infer edges from shared Indices in an -# O(n²) sweep. Without a reverse index map, that's the only available cost. +# Tensors only: derive the vertex list from `keys(tensors)`. Build an empty +# network on that vertex set, then write each tensor via `setindex!`; the +# reverse-index map drives edge reconciliation as each tensor lands, so edges +# are inferred in O(sum_v |inds(tn[v])|) total rather than an O(n²) sweep. function ITensorNetwork{V}(tensors) where {V} # Build the vertex list with element type `V` so that an empty `tensors` # input doesn't get the graph's vertex type inferred to whatever @@ -82,14 +185,7 @@ function ITensorNetwork{V}(tensors) where {V} default = Dict{V, ITensor}(v => ITensor() for v in vertices(g)) tn = ITensorNetwork(default, g) for v in vertices(g) - @preserve_graph tn[v] = tensors[v] - end - vs = collect(vertices(tn)) - for i in eachindex(vs), j in (i + 1):lastindex(vs) - v1, v2 = vs[i], vs[j] - if !isempty(commoninds(tn[v1], tn[v2])) - add_edge!(data_graph(tn), v1 => v2) - end + tn[v] = tensors[v] end return tn end diff --git a/src/normalize.jl b/src/normalize.jl index 44f6573d..83d77cec 100644 --- a/src/normalize.jl +++ b/src/normalize.jl @@ -6,10 +6,8 @@ end function rescale(alg::Algorithm"exact", tn::AbstractITensorNetwork; kwargs...) logn = logscalar(alg, tn; kwargs...) - vs = collect(vertices(tn)) - c = inv(exp(logn / length(vs))) - vertices_weights = Dictionary(vs, [c for v in vs]) - return scale_tensors(tn, vertices_weights) + c = inv(exp(logn / length(vertices(tn)))) + return map(t -> c * t, tn) end function rescale( @@ -58,10 +56,8 @@ function LinearAlgebra.normalize( alg::Algorithm"exact", tn::AbstractITensorNetwork; kwargs... ) logn = logscalar(alg, inner_network(tn, tn); kwargs...) - vs = collect(vertices(tn)) - c = inv(exp(logn / (2 * length(vs)))) - vertices_weights = Dictionary(vs, [c for v in vs]) - return scale_tensors(tn, vertices_weights) + c = inv(exp(logn / (2 * length(vertices(tn))))) + return map(t -> c * t, tn) end function LinearAlgebra.normalize( diff --git a/src/solvers/insert.jl b/src/solvers/insert.jl index 2331fc01..342d72e0 100644 --- a/src/solvers/insert.jl +++ b/src/solvers/insert.jl @@ -20,15 +20,17 @@ function insert!(region_iter, local_tensor; normalize = false, set_orthogonal_re local_tensor, indsTe; tags, trunc_kwargs... ) - @preserve_graph psi[first(region)] = U + psi[first(region)] = U prob = set_truncation_info!(prob; spectrum) else error("Region of length $(length(region)) not currently supported") end v = last(region) - @preserve_graph psi[v] = C + psi[v] = C psi = set_orthogonal_region ? set_ortho_region(psi, [v]) : psi - normalize && @preserve_graph psi[v] = psi[v] / norm(psi[v]) + if normalize + psi[v] = psi[v] / norm(psi[v]) + end prob.state = psi diff --git a/src/treetensornetworks/treetensornetwork.jl b/src/treetensornetworks/treetensornetwork.jl index 01588063..9e5f4f72 100644 --- a/src/treetensornetworks/treetensornetwork.jl +++ b/src/treetensornetworks/treetensornetwork.jl @@ -105,6 +105,13 @@ ortho_region(tn::TTN) = tn.ortho_region # Required for `AbstractITensorNetwork` interface data_graph(tn::TTN) = data_graph(tn.tensornetwork) +# Forward vertex writes to the wrapped `ITensorNetwork` so its +# reverse-index map and edge reconciliation run as usual. +function DataGraphs.set_vertex_data!(tn::TTN, value, v) + set_vertex_data!(tn.tensornetwork, value, v) + return tn +end + function data_graph_type(G::Type{<:TTN}) return data_graph_type(fieldtype(G, :tensornetwork)) end diff --git a/test/test_belief_propagation.jl b/test/test_belief_propagation.jl index 8776c455..29aafc68 100644 --- a/test/test_belief_propagation.jl +++ b/test/test_belief_propagation.jl @@ -1,6 +1,6 @@ using Compat: Compat using Graphs: vertices -using ITensorNetworks: ITensorNetworks, @preserve_graph, BeliefPropagationCache, contract, +using ITensorNetworks: ITensorNetworks, BeliefPropagationCache, contract, contraction_sequence, environment, inner_network, message, message_diff, partitioned_tensornetwork, scalar, siteinds, tensornetwork, update, update_factor, updated_message, ⊗ @@ -37,8 +37,8 @@ using Test: @test, @testset new_A_dag = ITensors.replaceind( dag(prime(new_A)), only(s[first(vket)])', only(s[first(vket)]) ) - @preserve_graph bpc[vket] = new_A - @preserve_graph bpc[vbra] = new_A_dag + bpc[vket] = new_A + bpc[vbra] = new_A_dag @test bpc[vket] == new_A @test bpc[vbra] == new_A_dag diff --git a/test/utils.jl b/test/utils.jl index b42f5ba6..1da81889 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -4,8 +4,8 @@ # inside its gensym module. using DataGraphs: underlying_graph, vertex_data -using Graphs: AbstractGraph, add_edge!, dst, edges, src, vertices -using ITensorNetworks: @preserve_graph, ITensorNetwork, IndsNetwork, data_graph +using Graphs: AbstractGraph, dst, edges, src, vertices +using ITensorNetworks: ITensorNetwork, IndsNetwork using ITensors.NDTensors: dim using ITensors: ITensors, ITensor, Index, QN, dag, hasqns, inds, itensor, onehot using NamedGraphs.GraphsExtensions: incident_edges @@ -87,15 +87,13 @@ end # uses QR, which would push site-state QN flux into the link and leave BP's # `default_message` (single-index `delta`) with no compatible block. `onehot` # defaults to `Float64`, so we typecast it to `elt` to keep `productstate(elt, -# ...)` element-type-preserving. Uses `@preserve_graph` plus an explicit -# `add_edge!` on the underlying graph so the graph-edge ↔ shared-Index -# invariant is maintained without relying on auto-reconciliation. +# ...)` element-type-preserving. Reverse-map reconciliation on the second +# `setindex!` brings the graph edge along for free. function _add_edge!(elt::Type, tn, edge) iₑ = Index(_trivial_link_space(tn), "Link") X = ITensors.convert_eltype(elt, onehot(iₑ => 1)) - @preserve_graph tn[src(edge)] = tn[src(edge)] * X - @preserve_graph tn[dst(edge)] = tn[dst(edge)] * dag(X) - add_edge!(data_graph(tn), edge) + tn[src(edge)] = tn[src(edge)] * X + tn[dst(edge)] = tn[dst(edge)] * dag(X) return tn end From 5c43b6029ae70003a6541d070d3ed806ff61e6b9 Mon Sep 17 00:00:00 2001 From: Matthew Fishman Date: Tue, 12 May 2026 11:49:36 -0400 Subject: [PATCH 02/10] Replace ITensorNetwork DataGraph storage with NamedGraph + Dict fields Storage is now `graph::NamedGraph{V}`, `vertex_data::Dict{V, ITensor}`, and `ind_to_vertices::Dict{Index{S}, Set{V}}`, with the constructor surface narrowed to `ITensorNetwork(tensors)` / `{V}` / `{V, S}` plus a graph-only empty-network form for `similar_graph`-style scaffolding. The 2-arg `(tensors, graph)` ctor is gone. Drop `data_graph` / `data_graph_type` from the `AbstractITensorNetwork` interface; subtypes (TreeTensorNetwork, AbstractBeliefPropagationCache, AbstractFormNetwork, QuadraticFormNetwork) now forward via `underlying_graph` + `vertex_data` directly. Add `Base.values(tn)` so callers don't depend on whether `vertex_data` is `Dict`- or `Dictionary`-shaped. Inline the old reverse-map / edge-reconciliation helpers into `set_vertex_data!` and `rem_vertex!`. Refactor `opsum_to_ttn` and the TTN `directsum` paths that previously relied on the 2-arg ctor to build an empty TTN-shaped scaffold: both now buffer into a plain `Dict{V, ITensor}` and wrap as `TreeTensorNetwork(ITensorNetwork(...))` once the tensor data is filled, so the tree invariant only has to hold on a fully-populated network. Co-Authored-By: Claude Opus 4.7 (1M context) --- src/abstractitensornetwork.jl | 88 ++--- src/caches/abstractbeliefpropagationcache.jl | 9 +- src/formnetworks/abstractformnetwork.jl | 10 +- src/formnetworks/bilinearformnetwork.jl | 5 +- src/formnetworks/quadraticformnetwork.jl | 9 +- src/itensornetwork.jl | 317 +++++++++--------- .../abstracttreetensornetwork.jl | 28 +- .../opsum_to_ttn/opsum_to_ttn.jl | 12 +- src/treetensornetworks/treetensornetwork.jl | 23 +- test/test_ttn_position.jl | 10 +- test/utils.jl | 34 +- 11 files changed, 264 insertions(+), 281 deletions(-) diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index 5a046375..f18b3804 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -1,7 +1,7 @@ using Adapt: Adapt, adapt, adapt_structure -using DataGraphs: DataGraphs, edge_data, get_vertex_data, is_vertex_assigned, - set_vertex_data!, underlying_graph, underlying_graph_type, vertex_data -using Dictionaries: Dictionary +using DataGraphs: + DataGraphs, set_vertex_data!, underlying_graph, underlying_graph_type, vertex_data +using Dictionaries: Dictionaries, Dictionary using Graphs: Graphs, Graph, add_edge!, add_vertex!, bfs_tree, center, dst, edges, edgetype, has_edge, ne, neighbors, rem_edge!, src, vertices using ITensors: ITensors, ITensor, Index, addtags, commoninds, commontags, contract, dag, @@ -16,9 +16,9 @@ using SplitApplyCombine: flatten abstract type AbstractITensorNetwork{V} <: AbstractDataGraph{V, ITensor, ITensor} end -# Field access -data_graph_type(::Type{<:AbstractITensorNetwork}) = not_implemented() -data_graph(graph::AbstractITensorNetwork) = not_implemented() +# Subtypes provide the storage: `underlying_graph(tn)` returns the named graph +# and `vertex_data(tn)` returns a `Dict{V, ITensor}`-like mapping. Edge data is +# unused — every `AbstractITensorNetwork` is treated as having no edge data. # TODO: Define a generic fallback for `AbstractDataGraph`? DataGraphs.edge_data_type(::Type{<:AbstractITensorNetwork}) = ITensor @@ -38,13 +38,14 @@ end # Copy Base.copy(tn::AbstractITensorNetwork) = not_implemented() -# Iteration -Base.iterate(tn::AbstractITensorNetwork, args...) = iterate(vertex_data(tn), args...) - -# Vertex-keyed access: `keys(tn)` returns the vertex set, `tn[v]` returns the -# tensor at vertex `v`. Together with `Base.iterate` above, this lets `tn` -# stand in as a `keys`/`values`-style collection of tensors keyed by vertex. +# Vertex-keyed access: `keys(tn)` returns the vertex set, `values(tn)` the +# tensors, and `tn[v]` the tensor at vertex `v`. Going through `values(tn)` +# (rather than `values(vertex_data(tn))`) lets callers stay agnostic about +# whether `vertex_data` is a `Dict`, `Dictionary`, or anything else with +# different default-iteration semantics. Base.keys(tn::AbstractITensorNetwork) = vertices(tn) +Base.values(tn::AbstractITensorNetwork) = (tn[v] for v in vertices(tn)) +Base.iterate(tn::AbstractITensorNetwork, args...) = iterate(values(tn), args...) # TODO: This contrasts with the `DataGraphs.AbstractDataGraph` definition, # where it is defined as the `vertextype`. Does that cause problems or should it be changed? @@ -52,32 +53,24 @@ Base.eltype(tn::AbstractITensorNetwork) = eltype(vertex_data(tn)) # Overload if needed Graphs.is_directed(::Type{<:AbstractITensorNetwork}) = false -GraphsExtensions.directed_graph(is::AbstractITensorNetwork) = directed_graph(data_graph(is)) - -# Derived interface, may need to be overloaded -function DataGraphs.underlying_graph_type(G::Type{<:AbstractITensorNetwork}) - return underlying_graph_type(data_graph_type(G)) +function GraphsExtensions.directed_graph(tn::AbstractITensorNetwork) + return directed_graph(underlying_graph(tn)) end function ITensors.datatype(tn::AbstractITensorNetwork) return mapreduce(v -> datatype(tn[v]), promote_type, vertices(tn)) end -# AbstractDataGraphs overloads - -DataGraphs.underlying_graph(tn::AbstractITensorNetwork) = underlying_graph(data_graph(tn)) +# AbstractDataGraphs overloads — defined directly in terms of the +# `underlying_graph` / `vertex_data` storage, with no edge data. -function DataGraphs.is_vertex_assigned(is::AbstractITensorNetwork, v) - return is_vertex_assigned(data_graph(is), v) +function DataGraphs.is_vertex_assigned(tn::AbstractITensorNetwork, v) + return haskey(vertex_data(tn), v) end -function DataGraphs.is_edge_assigned(is::AbstractITensorNetwork, v) - return is_edge_assigned(data_graph(is), v) -end +DataGraphs.is_edge_assigned(::AbstractITensorNetwork, _) = false -function DataGraphs.get_vertex_data(is::AbstractITensorNetwork, v) - return get_vertex_data(data_graph(is), v) -end +DataGraphs.get_vertex_data(tn::AbstractITensorNetwork, v) = vertex_data(tn)[v] function DataGraphs.set_vertices_data!(tn::AbstractITensorNetwork, values, vertices) for v in vertices @@ -112,22 +105,13 @@ function Base.union(tn1::AbstractITensorNetwork, tn2::AbstractITensorNetwork; kw tensors = Dict{vertextype(g), ITensor}( v => (v in vertices(tn1) ? tn1[v] : tn2[v]) for v in vertices(g) ) - tn = ITensorNetwork(tensors, g) - # Add any new edges that are introduced during the union - for v1 in vertices(tn1) - for v2 in vertices(tn2) - if !isempty(linkinds(tn, v1 => v2)) - add_edge!(data_graph(tn), v1 => v2) - end - end - end - return tn + # `ITensorNetwork(tensors)` infers edges from shared `Index`es, so any + # cross-network bonds between `tn1` and `tn2` are picked up automatically. + return ITensorNetwork(tensors) end function NamedGraphs.rename_vertices(f::Function, tn::AbstractITensorNetwork) - new_g = NamedGraphs.rename_vertices(f, underlying_graph(tn)) - tensors = Dict{vertextype(new_g), ITensor}(f(v) => tn[v] for v in vertices(tn)) - return ITensorNetwork(tensors, new_g) + return ITensorNetwork(Dict(f(v) => tn[v] for v in vertices(tn))) end # @@ -146,9 +130,7 @@ end # TODO: Implement using `adapt` function NDTensors.convert_scalartype(eltype::Type{<:Number}, tn::AbstractITensorNetwork) - tn = copy(tn) - vertex_data(tn) .= ITensors.adapt.(Ref(eltype), vertex_data(tn)) - return tn + return map(t -> ITensors.adapt(eltype, t), tn) end function Base.complex(tn::AbstractITensorNetwork) @@ -373,24 +355,12 @@ function NDTensors.contract( V = promote_type(vertextype(tn), typeof(merged_vertex)) # TODO: Check `ITensorNetwork{V}`, shouldn't need a copy here. tn = ITensorNetwork{V}(copy(tn)) - neighbors_src = setdiff(neighbors(tn, src(edge)), [dst(edge)]) - neighbors_dst = setdiff(neighbors(tn, dst(edge)), [src(edge)]) new_itensor = tn[src(edge)] * tn[dst(edge)] - # The following is equivalent to: - # - # tn[dst(edge)] = new_itensor - # - # but without having to search all vertices - # to update the edges. rem_vertex!(tn, src(edge)) rem_vertex!(tn, dst(edge)) add_vertex!(tn, merged_vertex) - for n_src in neighbors_src - add_edge!(data_graph(tn), merged_vertex => n_src) - end - for n_dst in neighbors_dst - add_edge!(data_graph(tn), merged_vertex => n_dst) - end + # Reverse-map reconciliation on assignment picks up the new bonds + # to the surviving neighbors of `src(edge)` and `dst(edge)`. tn[merged_vertex] = new_itensor return tn end @@ -535,7 +505,7 @@ function Base.show(io::IO, mime::MIME"text/plain", graph::AbstractITensorNetwork end println(io) println(io, "with vertex data:") - show(io, mime, inds.(vertex_data(graph))) + show(io, mime, Dict(v => inds(graph[v]) for v in vertices(graph))) return nothing end diff --git a/src/caches/abstractbeliefpropagationcache.jl b/src/caches/abstractbeliefpropagationcache.jl index 944700b1..d682b2a7 100644 --- a/src/caches/abstractbeliefpropagationcache.jl +++ b/src/caches/abstractbeliefpropagationcache.jl @@ -1,4 +1,5 @@ using Adapt: Adapt, adapt, adapt_structure +using DataGraphs: DataGraphs, underlying_graph, vertex_data using Graphs: Graphs, IsDirected, dst, src using ITensors: commoninds, delta, dir using LinearAlgebra: diag, dot @@ -18,10 +19,12 @@ function SimilarType.similar_type(bpc::AbstractBeliefPropagationCache) return typeof(tensornetwork(bpc)) end -function data_graph_type(bpc::AbstractBeliefPropagationCache) - return data_graph_type(tensornetwork(bpc)) +function DataGraphs.underlying_graph(bpc::AbstractBeliefPropagationCache) + return underlying_graph(tensornetwork(bpc)) +end +function DataGraphs.vertex_data(bpc::AbstractBeliefPropagationCache) + return vertex_data(tensornetwork(bpc)) end -data_graph(bpc::AbstractBeliefPropagationCache) = data_graph(tensornetwork(bpc)) #TODO: Take `dot` without precontracting the messages to allow scaling to more complex # messages diff --git a/src/formnetworks/abstractformnetwork.jl b/src/formnetworks/abstractformnetwork.jl index 07ef6939..6a041dbc 100644 --- a/src/formnetworks/abstractformnetwork.jl +++ b/src/formnetworks/abstractformnetwork.jl @@ -1,3 +1,4 @@ +using DataGraphs: DataGraphs, underlying_graph, vertex_data using Graphs: induced_subgraph using NamedGraphs.SimilarType: SimilarType @@ -20,12 +21,9 @@ function SimilarType.similar_type(f::AbstractFormNetwork) return typeof(tensornetwork(f)) end -# TODO: Use `NamedGraphs.GraphsExtensions.parent_graph_type`. -function data_graph_type(f::AbstractFormNetwork) - return data_graph_type(tensornetwork(f)) -end -# TODO: Use `NamedGraphs.GraphsExtensions.parent_graph`. -data_graph(f::AbstractFormNetwork) = data_graph(tensornetwork(f)) +# TODO: Use `NamedGraphs.GraphsExtensions.parent_graph` / `parent_graph_type`. +DataGraphs.underlying_graph(f::AbstractFormNetwork) = underlying_graph(tensornetwork(f)) +DataGraphs.vertex_data(f::AbstractFormNetwork) = vertex_data(tensornetwork(f)) function operator_vertices(f::AbstractFormNetwork) return filter(v -> last(v) == operator_vertex_suffix(f), vertices(f)) diff --git a/src/formnetworks/bilinearformnetwork.jl b/src/formnetworks/bilinearformnetwork.jl index bfa9cf03..c6f0386e 100644 --- a/src/formnetworks/bilinearformnetwork.jl +++ b/src/formnetworks/bilinearformnetwork.jl @@ -96,12 +96,11 @@ function BilinearFormNetwork( s_mapped = dual_site_index_map(s) operator_inds = union_all_inds(s, s_mapped) - g = NamedGraph(underlying_graph(operator_inds)) - ts = Dict{vertextype(g), ITensor}() + ts = Dict{vertextype(operator_inds), ITensor}() for v in vertices(operator_inds) ts[v] = itensor_identity_map(scalartype(ket), s[v] .=> s_mapped[v]) end - O = ITensorNetwork(ts, g) + O = ITensorNetwork(ts) O = adapt(promote_type(datatype(bra), datatype(ket)), O) return BilinearFormNetwork(O, bra, ket; dual_site_index_map, kwargs...) end diff --git a/src/formnetworks/quadraticformnetwork.jl b/src/formnetworks/quadraticformnetwork.jl index 263063c3..9db4c147 100644 --- a/src/formnetworks/quadraticformnetwork.jl +++ b/src/formnetworks/quadraticformnetwork.jl @@ -1,4 +1,4 @@ -using DataGraphs: DataGraphs, set_vertex_data! +using DataGraphs: DataGraphs, set_vertex_data!, underlying_graph, vertex_data using NamedGraphs: similar_graph default_index_map = prime @@ -35,8 +35,6 @@ for f in [ :bra_vertex_suffix, :ket_vertex_suffix, :tensornetwork, - :data_graph, - :data_graph_type, ] @eval begin function $f(qf::QuadraticFormNetwork, args...; kwargs...) @@ -45,6 +43,11 @@ for f in [ end end +function DataGraphs.underlying_graph(qf::QuadraticFormNetwork) + return underlying_graph(bilinear_formnetwork(qf)) +end +DataGraphs.vertex_data(qf::QuadraticFormNetwork) = vertex_data(bilinear_formnetwork(qf)) + dual_index_map(qf::QuadraticFormNetwork) = qf.dual_index_map dual_inv_index_map(qf::QuadraticFormNetwork) = qf.dual_inv_index_map diff --git a/src/itensornetwork.jl b/src/itensornetwork.jl index 93c4e23c..717af44f 100644 --- a/src/itensornetwork.jl +++ b/src/itensornetwork.jl @@ -1,38 +1,40 @@ -using DataGraphs: - DataGraphs, DataGraph, is_vertex_assigned, set_vertex_data!, underlying_graph -using Graphs: Graphs, add_edge!, edgetype, has_edge, neighbors, rem_edge!, rem_vertex! +using DataGraphs: DataGraphs, set_vertex_data!, underlying_graph, vertex_data +using Graphs: Graphs, AbstractGraph, add_edge!, add_vertex!, edges, has_edge, neighbors, + rem_edge!, rem_vertex!, vertices using ITensors: ITensors, ITensor, Index, inds -using NamedGraphs: NamedGraphs, NamedEdge, NamedGraph, similar_graph, vertextype +using NamedGraphs: NamedGraphs, NamedEdge, NamedGraph, vertextype """ - ITensorNetwork{V} + ITensorNetwork{V, S} -A tensor network where each vertex holds an `ITensor`. The network graph is a -`NamedGraph{V}` and edges represent shared indices between neighboring tensors. +A tensor network where each vertex holds an `ITensor`. Storage is split +across three fields: -The type maintains a reverse index map (`Index → vertices`) so that vertex -lookup by shared `Index` is O(1) and the graph-edge ↔ shared-`Index` -correspondence is reconciled in O(deg(v) + |inds(tn[v])|) on every tensor -write. + - `graph::NamedGraph{V}` — the network's graph (`V` is the vertex type), + - `vertex_data::Dict{V, ITensor}` — the tensor at each vertex, + - `ind_to_vertices::Dict{Index{S}, Set{V}}` — reverse map from each + `Index` to the vertices it appears in (`S` is the `Index` space type, + e.g. `Int` for plain dims or `Vector{Pair{QN, Int}}` for QN-graded). -# Constructors +The reverse map keeps vertex lookup by shared `Index` O(1) and enforces +the graph-edge ↔ shared-`Index` invariant: every `Index` appears at +either one vertex (an external / site index) or two (a bond), and every +graph edge corresponds to exactly the pair of vertices sharing at least +one `Index`. Hyperedges (an `Index` shared by three or more vertices) +are rejected. -**From a collection of `ITensor`s** (edges inferred from shared indices): +# Construction ```julia ITensorNetwork(tensors) +ITensorNetwork{V}(tensors) +ITensorNetwork{V, S}(tensors) ``` `tensors` is any collection where `keys(tensors)` are vertex labels and `values(tensors)` are the `ITensor`s at those vertices (e.g. a `Dict`, a `Dictionary`, or a `Vector{ITensor}` with linear-index vertex labels). - -**From a collection of `ITensor`s placed at the vertices of a given graph** -(no edge inference; the caller is responsible for the edges): - -```julia -ITensorNetwork(tensors, graph::NamedGraph) -``` +Edges are inferred from shared `Index`es. # Example @@ -47,180 +49,177 @@ julia> tn = ITensorNetwork([ITensor(i, j), ITensor(j, k)]); See also: `IndsNetwork`, [`TreeTensorNetwork`](@ref ITensorNetworks.TreeTensorNetwork). """ -struct ITensorNetwork{V} <: AbstractITensorNetwork{V} - data_graph::DataGraph{V, ITensor, ITensor, NamedGraph{V}, NamedEdge{V}} - # Reverse index map: for every `Index` appearing in any vertex tensor, - # the set of vertices holding that `Index`. Maintained by `setindex!` - # so that edge reconciliation after a write is O(deg(v) + |inds(tn[v])|) - # instead of an O(n) sweep over all vertices. - ind_to_vertices::Dict{Index, Set{V}} - - # Sole inner ctor: place `tensors` at the vertices of `graph` and build - # the reverse map from the resulting tensors. The graph's edges are taken - # at face value; callers are responsible for the graph-edge ↔ - # shared-`Index` invariant on construction (the public ctors below do - # this either by trusting the caller's graph or by re-inferring edges). - function ITensorNetwork{V}(tensors, graph::NamedGraph) where {V} - g = NamedGraph{V}(graph) - dg = DataGraph(g; vertex_data_type = ITensor, edge_data_type = ITensor) - for v in vertices(g) - dg[v] = tensors[v] - end - ind_to_vertices = Dict{Index, Set{V}}() - for v in vertices(dg) - for i in inds(dg[v]) - push!(get!(ind_to_vertices, i, Set{V}()), v) - end - end - return new{V}(dg, ind_to_vertices) - end +struct ITensorNetwork{V, S} <: AbstractITensorNetwork{V} + graph::NamedGraph{V} + vertex_data::Dict{V, ITensor} + ind_to_vertices::Dict{Index{S}, Set{V}} end # -# Data access +# AbstractITensorNetwork interface (field access) # -data_graph(tn::ITensorNetwork) = getfield(tn, :data_graph) -data_graph_type(TN::Type{<:ITensorNetwork}) = fieldtype(TN, :data_graph) +DataGraphs.underlying_graph(tn::ITensorNetwork) = tn.graph +DataGraphs.vertex_data(tn::ITensorNetwork) = tn.vertex_data -function DataGraphs.underlying_graph_type(TN::Type{<:ITensorNetwork}) - return fieldtype(data_graph_type(TN), :underlying_graph) +function DataGraphs.underlying_graph_type(::Type{<:ITensorNetwork{V}}) where {V} + return NamedGraph{V} end # -# Reverse index map and edge reconciliation (implementation detail) +# Constructors # -# Internal accessor — keep `ind_to_vertices` package-private so that the -# `AbstractITensorNetwork` interface stays oblivious to the reverse map. -_ind_to_vertices(tn::ITensorNetwork) = getfield(tn, :ind_to_vertices) - -# Write `value` to `v` and re-establish the graph-edge ↔ shared-`Index` -# invariant: incident edges of `v` are exactly the vertices sharing an -# `Index` with `value`. The reverse map makes the reconciliation -# O(deg(v) + |inds(value)|), so `setindex!` does it unconditionally and -# the old `@preserve_graph` / `fix_edges!` bypass is no longer needed. -function DataGraphs.set_vertex_data!(tn::ITensorNetwork, value, v) - _unregister_inds!(tn, v) - data_graph(tn)[v] = value - _register_inds!(tn, v) - _reconcile_edges!(tn, v) - return tn +# Infer `S` from a tensor's indices; default to `Int` (plain non-QN +# dim-as-`Int` Indices) when the collection is empty or every tensor +# has no indices. +_index_space_type(::Index{S}) where {S} = S +function _index_space_type(tensors) + for t in values(tensors) + is = inds(t) + isempty(is) || return _index_space_type(first(is)) + end + return Int end -# Drop `v` from the reverse map entry of each `Index` currently in `tn[v]`. -function _unregister_inds!(tn::ITensorNetwork, v) - is_vertex_assigned(tn, v) || return tn - map = _ind_to_vertices(tn) - for i in inds(tn[v]) - haskey(map, i) || continue - vs = map[i] - delete!(vs, v) - isempty(vs) && delete!(map, i) +# Build the reverse index map from `tensors`, infer the graph edges from +# that map, and enforce the no-hyperedge invariant. +function ITensorNetwork{V, S}(tensors) where {V, S} + vs = V[v for v in keys(tensors)] + graph = NamedGraph(vs) + vertex_data = Dict{V, ITensor}(v => tensors[v] for v in vs) + ind_to_vertices = Dict{Index{S}, Set{V}}() + for v in vs, i in inds(vertex_data[v]) + push!(get!(ind_to_vertices, i, Set{V}()), v) end - return tn + for (i, owners) in ind_to_vertices + length(owners) <= 2 || error( + "Index $i appears at $(length(owners)) vertices; `ITensorNetwork` " * + "is not a hypergraph — every `Index` must appear at one (external) " * + "or two (bond) vertices." + ) + end + # Walk `vs` in order so the edge add order — and therefore the + # `neighbors(g, v)` / `edges(g)` iteration order — is deterministic in + # the input vertex order, rather than the non-deterministic hash order + # of `values(ind_to_vertices)` and `Set` iteration. + for v in vs, i in inds(vertex_data[v]) + for u in ind_to_vertices[i] + u != v && !has_edge(graph, v, u) && add_edge!(graph, v => u) + end + end + return ITensorNetwork{V, S}(graph, vertex_data, ind_to_vertices) end -# Mirror vertex removal on the reverse map. `contract`, `induced_subgraph`, -# etc. structurally edit the graph and would otherwise leave stale entries -# behind, causing later edge reconciliation to point at vertices that no -# longer exist. Routes the underlying-graph update through the -# `AbstractDataGraph` fallback (which only touches the graph structure) -# instead of `DataGraph`'s override (which insists on deleting edge_data -# for every incident edge — `ITensorNetwork` edges carry no edge_data). -function Graphs.rem_vertex!(tn::ITensorNetwork, v) - _unregister_inds!(tn, v) - rem_vertex!(underlying_graph(data_graph(tn)), v) - return tn +function ITensorNetwork{V, S}(tn::AbstractITensorNetwork) where {V, S} + return ITensorNetwork{V, S}(Dict{V, ITensor}(v => tn[v] for v in vertices(tn))) end -# Register `v` against each `Index` in `tn[v]`. -function _register_inds!(tn::ITensorNetwork{V}, v) where {V} - map = _ind_to_vertices(tn) - for i in inds(tn[v]) - push!(get!(map, i, Set{V}()), v) - end - return tn +function ITensorNetwork{V}(tensors) where {V} + return ITensorNetwork{V, _index_space_type(tensors)}(tensors) end -# Reconcile the graph edges incident to `v` so that they match exactly the -# set of vertices sharing an `Index` with `tn[v]`. O(deg(v) + |inds(tn[v])|). -function _reconcile_edges!(tn::ITensorNetwork{V}, v) where {V} - map = _ind_to_vertices(tn) - desired = Set{V}() - for i in inds(tn[v]) - for u in map[i] - u == v || push!(desired, u) - end - end - # `DataGraphs.rem_edge!` requires edge_data to be assigned for the edge - # — but `ITensorNetwork` edges carry no edge_data, so bypass it and - # work directly on the underlying `NamedGraph`. Edge inserts can stay - # on the `DataGraph` since `add_edge!` doesn't touch edge_data. - dg = data_graph(tn) - ug = underlying_graph(dg) - E = edgetype(tn) - for u in collect(neighbors(tn, v)) - u in desired || rem_edge!(ug, E(v, u)) - end - for u in desired - has_edge(tn, E(v, u)) || add_edge!(dg, E(v, u)) - end - return tn +function ITensorNetwork{V}(tn::AbstractITensorNetwork) where {V} + return ITensorNetwork{V}(Dict{V, ITensor}(v => tn[v] for v in vertices(tn))) +end + +ITensorNetwork(tensors) = ITensorNetwork{keytype(tensors)}(tensors) + +# Empty network over `vertices(g)`: vertices are added to the graph but +# carry no tensor data yet, and the graph has no edges. Tensors are +# populated via `setindex!`, which infers bonds from shared `Index`es; +# any edges already in `g` are discarded since they'll be re-derived from +# the indices. Primarily used by `similar_graph` so that `induced_subgraph` +# and related operations can build their result incrementally. +function ITensorNetwork{V, S}(g::AbstractGraph) where {V, S} + return ITensorNetwork{V, S}( + NamedGraph(collect(V, vertices(g))), + Dict{V, ITensor}(), + Dict{Index{S}, Set{V}}() + ) end # -# Construction from collections of ITensors +# Vertex-type conversion and copy # -# Tensors only: derive the vertex list from `keys(tensors)`. Build an empty -# network on that vertex set, then write each tensor via `setindex!`; the -# reverse-index map drives edge reconciliation as each tensor lands, so edges -# are inferred in O(sum_v |inds(tn[v])|) total rather than an O(n²) sweep. -function ITensorNetwork{V}(tensors) where {V} - # Build the vertex list with element type `V` so that an empty `tensors` - # input doesn't get the graph's vertex type inferred to whatever - # `keys(tensors)` happens to give (e.g. `Int` for an empty `Vector{ITensor}`). - g = NamedGraph(V[v for v in keys(tensors)]) - default = Dict{V, ITensor}(v => ITensor() for v in vertices(g)) - tn = ITensorNetwork(default, g) - for v in vertices(g) - tn[v] = tensors[v] - end - return tn +NamedGraphs.convert_vertextype(::Type{V}, tn::ITensorNetwork{V}) where {V} = tn +function NamedGraphs.convert_vertextype(::Type{V}, tn::ITensorNetwork) where {V} + return ITensorNetwork{V}(Dict{V, ITensor}(v => tn[v] for v in vertices(tn))) end -# Non-parametric delegates: extract `V` via `keytype` / `vertextype`. -function ITensorNetwork(tensors) - return ITensorNetwork{keytype(tensors)}(tensors) -end -function ITensorNetwork(tensors, graph::NamedGraph) - return ITensorNetwork{vertextype(graph)}(tensors, graph) +function Base.copy(tn::ITensorNetwork{V, S}) where {V, S} + return ITensorNetwork{V, S}( + Dict{V, ITensor}(v => copy(tn[v]) for v in vertices(tn)) + ) end # -# Vertex-type conversion and copy +# Mutation: keep `graph`, `vertex_data`, and `ind_to_vertices` in sync. # -function ITensorNetwork{V}(tn::ITensorNetwork) where {V} - g = NamedGraph{V}(underlying_graph(tn)) - tensors = Dict{V, ITensor}(v => tn[v] for v in vertices(tn)) - return ITensorNetwork(tensors, g) +# Write `value` to vertex `v`, updating the reverse map and reconciling +# edges so the graph-edge ↔ shared-`Index` invariant holds. Cost is +# O(deg(v) + |inds(value)|). +function DataGraphs.set_vertex_data!(tn::ITensorNetwork{V, S}, value, v) where {V, S} + # Unregister old inds of `tn[v]` from the reverse map. + if haskey(tn.vertex_data, v) + for i in inds(tn.vertex_data[v]) + owners = tn.ind_to_vertices[i] + delete!(owners, v) + isempty(owners) && delete!(tn.ind_to_vertices, i) + end + end + # Write the new tensor. + tn.vertex_data[v] = value + # Register new inds. + for i in inds(value) + push!(get!(tn.ind_to_vertices, i, Set{V}()), v) + length(tn.ind_to_vertices[i]) <= 2 || error( + "Index $i now appears at $(length(tn.ind_to_vertices[i])) vertices; " * + "`ITensorNetwork` forbids hyperedges (3+ vertices sharing an `Index`)." + ) + end + # Reconcile graph edges incident to `v` against the reverse map. + desired = Set{V}() + for i in inds(value) + for u in tn.ind_to_vertices[i] + u == v || push!(desired, u) + end + end + for u in collect(neighbors(tn.graph, v)) + u in desired || rem_edge!(tn.graph, v => u) + end + for u in desired + has_edge(tn.graph, v, u) || add_edge!(tn.graph, v => u) + end + return tn end -ITensorNetwork(tn::ITensorNetwork) = copy(tn) - -NamedGraphs.convert_vertextype(::Type{V}, tn::ITensorNetwork{V}) where {V} = tn -NamedGraphs.convert_vertextype(V::Type, tn::ITensorNetwork) = ITensorNetwork{V}(tn) +# Drop `v` from the reverse map, vertex data, and graph in one shot. +function Graphs.rem_vertex!(tn::ITensorNetwork, v) + if haskey(tn.vertex_data, v) + for i in inds(tn.vertex_data[v]) + owners = tn.ind_to_vertices[i] + delete!(owners, v) + isempty(owners) && delete!(tn.ind_to_vertices, i) + end + delete!(tn.vertex_data, v) + end + rem_vertex!(tn.graph, v) + return tn +end -function Base.copy(tn::ITensorNetwork{V}) where {V} - g = copy(underlying_graph(tn)) - tensors = Dict{V, ITensor}(v => copy(tn[v]) for v in vertices(g)) - return ITensorNetwork(tensors, g) +# Add `v` to the graph without any tensor data. A subsequent +# `tn[v] = tensor` writes the tensor and reconciles edges. +function Graphs.add_vertex!(tn::ITensorNetwork, v) + add_vertex!(tn.graph, v) + return tn end -function NamedGraphs.similar_graph(tn::ITensorNetwork, underlying_graph::AbstractGraph) - g = NamedGraph(underlying_graph) - default = Dict{vertextype(g), ITensor}(v => ITensor() for v in vertices(g)) - return ITensorNetwork(default, g) +# Fresh `ITensorNetwork` over `vertices(g)` with no tensors. Used by +# `induced_subgraph_from_vertices` to build a same-typed empty container +# that subsequent `setindex!` calls populate. +function NamedGraphs.similar_graph(tn::ITensorNetwork{V, S}, g::AbstractGraph) where {V, S} + return ITensorNetwork{V, S}(g) end diff --git a/src/treetensornetworks/abstracttreetensornetwork.jl b/src/treetensornetworks/abstracttreetensornetwork.jl index cf355663..20526921 100644 --- a/src/treetensornetworks/abstracttreetensornetwork.jl +++ b/src/treetensornetworks/abstracttreetensornetwork.jl @@ -12,8 +12,8 @@ abstract type AbstractTreeTensorNetwork{V} <: AbstractITensorNetwork{V} end const AbstractTTN = AbstractTreeTensorNetwork -function DataGraphs.underlying_graph_type(G::Type{<:AbstractTTN}) - return underlying_graph_type(data_graph_type(G)) +function DataGraphs.underlying_graph_type(::Type{<:AbstractTTN{V}}) where {V} + return NamedGraph{V} end ITensorNetwork(tn::AbstractTTN) = error("Not implemented") @@ -267,17 +267,15 @@ function Base.:+( tns[j] = orthogonalize(tns[j], root_vertex) end - # Output state: empty TTN over the same graph as the inputs. - # Tensor data and link indices are filled in by the directsum loop below. - g_out = NamedGraph(underlying_graph(siteinds(tns[1]))) - tensors_out = Dict{vertextype(g_out), ITensor}( - v => ITensor() for v in vertices(g_out) - ) - tn = TreeTensorNetwork(ITensorNetwork(tensors_out, g_out)) - - vs = post_order_dfs_vertices(tn, root_vertex) - es = post_order_dfs_edges(tn, root_vertex) - link_space = Dict{edgetype(tn), Index}() + # Drive traversal off the first input TTN (it has the right graph + # structure). Tensor data is built into a `Dict` buffer and wrapped as + # a `TreeTensorNetwork` at the end — edges of the output network are + # then inferred from the assigned tensors' shared `Index`es. + tn0 = first(tns) + vs = post_order_dfs_vertices(tn0, root_vertex) + es = post_order_dfs_edges(tn0, root_vertex) + link_space = Dict{edgetype(tn0), Index}() + tensors_out = Dict{vertextype(tn0), ITensor}() for v in reverse(vs) edges = filter(e -> dst(e) == v || src(e) == v, es) @@ -293,9 +291,9 @@ function Base.:+( if !isnothing(dim_out) tnv = replaceind(tnv, lv[dim_out] => dag(link_space[edges[dim_out]])) end - tn[v] = tnv + tensors_out[v] = tnv end - return tn + return TreeTensorNetwork(ITensorNetwork(tensors_out)) end # TODO: switch default algorithm once more are implemented diff --git a/src/treetensornetworks/opsum_to_ttn/opsum_to_ttn.jl b/src/treetensornetworks/opsum_to_ttn/opsum_to_ttn.jl index 0b607b3a..008cb000 100644 --- a/src/treetensornetworks/opsum_to_ttn/opsum_to_ttn.jl +++ b/src/treetensornetworks/opsum_to_ttn/opsum_to_ttn.jl @@ -290,10 +290,12 @@ function compress_ttn( ) end - # initialize TTN without the dummy indices added; tensors are filled in below - g0 = NamedGraph(underlying_graph(sites0)) - tensors0 = Dict{vertextype(g0), ITensor}(v => ITensor() for v in vertices(g0)) - H = TreeTensorNetwork(ITensorNetwork(tensors0, g0)) + # Buffer the per-vertex tensors in a plain `Dict` first. Wrapping in + # `ITensorNetwork` + `TreeTensorNetwork` is deferred until the loop + # below has assigned real tensors at every vertex, so the tree + # structure (and `Index` space type) is locked in only once the data + # is consistent. + H = Dict{vertextype(sites0), ITensor}(v => ITensor() for v in vertices(sites0)) function qnblock(i::Index, q::QN) for b in 2:(nblocks(i) - 1) flux(i, Block(b)) == q && return b @@ -443,7 +445,7 @@ function compress_ttn( H[v] += T * ITensorNetworks.computeSiteProd(sites, Prod([(Op("Id", v))])) end end - return H + return TreeTensorNetwork(ITensorNetwork(H)) end # diff --git a/src/treetensornetworks/treetensornetwork.jl b/src/treetensornetworks/treetensornetwork.jl index 9e5f4f72..8152f807 100644 --- a/src/treetensornetworks/treetensornetwork.jl +++ b/src/treetensornetworks/treetensornetwork.jl @@ -1,4 +1,6 @@ +using DataGraphs: DataGraphs, underlying_graph, vertex_data using Dictionaries: Indices +using Graphs: Graphs, add_vertex!, rem_vertex! using NamedGraphs.GraphsExtensions: vertextype using NamedGraphs: similar_graph @@ -61,7 +63,7 @@ julia> s = siteinds("S=1/2", g); julia> tensors = Dict(v => ITensor(s[v]...) for v in vertices(g)); -julia> itn = ITensorNetwork(tensors, NamedGraph(g)); +julia> itn = ITensorNetwork(tensors); julia> ttn_state = TreeTensorNetwork(itn; ortho_region = [first(vertices(itn))]); @@ -102,18 +104,25 @@ See also: [`orthogonalize`](@ref). """ ortho_region(tn::TTN) = tn.ortho_region -# Required for `AbstractITensorNetwork` interface -data_graph(tn::TTN) = data_graph(tn.tensornetwork) +# `AbstractITensorNetwork` storage forwarding — delegate to the inner +# `ITensorNetwork` so its reverse-index map and edge reconciliation +# run on writes. +DataGraphs.underlying_graph(tn::TTN) = underlying_graph(tn.tensornetwork) +DataGraphs.vertex_data(tn::TTN) = vertex_data(tn.tensornetwork) -# Forward vertex writes to the wrapped `ITensorNetwork` so its -# reverse-index map and edge reconciliation run as usual. function DataGraphs.set_vertex_data!(tn::TTN, value, v) set_vertex_data!(tn.tensornetwork, value, v) return tn end -function data_graph_type(G::Type{<:TTN}) - return data_graph_type(fieldtype(G, :tensornetwork)) +function Graphs.rem_vertex!(tn::TTN, v) + rem_vertex!(tn.tensornetwork, v) + return tn +end + +function Graphs.add_vertex!(tn::TTN, v) + add_vertex!(tn.tensornetwork, v) + return tn end function Base.copy(tn::TTN) diff --git a/test/test_ttn_position.jl b/test/test_ttn_position.jl index f2e0e3ab..10a7ee45 100644 --- a/test/test_ttn_position.jl +++ b/test/test_ttn_position.jl @@ -3,7 +3,7 @@ using Graphs: vertices using ITensorNetworks: ITensorNetwork, ProjTTN, TreeTensorNetwork, environments, position, siteinds using ITensors.NDTensors: with_auto_fermion -using ITensors: ITensor +using ITensors: ITensor, Index using NamedGraphs.NamedGraphGenerators: named_comb_tree, named_path_graph using NamedGraphs: NamedEdge, NamedGraph using Test: @test, @testset @@ -47,9 +47,11 @@ using .ModelHamiltonians: ModelHamiltonians end @testset "ProjTTN construction regression test" begin pos = Indices{Tuple{String, Int}}() - g = NamedGraph{Any}(named_path_graph(2)) - tensors = Dict{Any, ITensor}(v => ITensor() for v in vertices(g)) - operator = TreeTensorNetwork(ITensorNetwork{Any}(tensors, g)) + # Share a placeholder `Index` between the two tensors so the resulting + # `ITensorNetwork` has a single edge (and is therefore a valid tree). + link = Index(1, "Link") + tensors = Dict{Any, ITensor}(v => ITensor(link) for v in vertices(named_path_graph(2))) + operator = TreeTensorNetwork(ITensorNetwork{Any}(tensors)) environments = Dictionary{NamedEdge{Any}, ITensor}() @test ProjTTN(pos, operator, environments) isa ProjTTN{Any, Indices{Any}} end diff --git a/test/utils.jl b/test/utils.jl index 1da81889..8bccb135 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -4,6 +4,7 @@ # inside its gensym module. using DataGraphs: underlying_graph, vertex_data +using Dictionaries: Dictionary using Graphs: AbstractGraph, dst, edges, src, vertices using ITensorNetworks: ITensorNetwork, IndsNetwork using ITensors.NDTensors: dim @@ -35,14 +36,15 @@ function random_tensornetwork( g = NamedGraph(graph) links = Dict(e => Index(link_space, "Link") for e in edges(g)) links = merge(links, Dict(reverse(e) => links[e] for e in edges(g))) - tensors = Dict( - map(collect(vertices(g))) do v - link_v = [links[e] for e in incident_edges(g, v)] - inds_v = [siteinds[v]; link_v] - return v => itensor(randn(rng, eltype, dim.(inds_v)...), inds_v) - end - ) - return ITensorNetwork(tensors, g) + # Use a `Dictionary` (insertion-ordered) so the constructed + # `ITensorNetwork`'s vertex / edge order tracks `vertices(g)`. + vs = collect(vertices(g)) + ts = map(vs) do v + link_v = [links[e] for e in incident_edges(g, v)] + inds_v = [siteinds[v]; link_v] + return itensor(randn(rng, eltype, dim.(inds_v)...), inds_v) + end + return ITensorNetwork(Dictionary(vs, ts)) end # `IndsNetwork`: extract site inds (`Index[]` where unassigned). @@ -106,15 +108,13 @@ function productstate(elt::Type, state::Function, s::IndsNetwork) return productstate(elt, Dict(v => state(v) for v in vertices(s)), s) end function productstate(elt::Type, state, s::IndsNetwork) - g = NamedGraph(collect(vertices(s))) - tensors = Dict( - map(collect(vertices(s))) do v - site_v = isassigned(vertex_data(s), v) ? s[v] : Index[] - t = ITensors.state(state[v], only(site_v)) - return v => ITensors.convert_eltype(elt, t) - end - ) - tn = ITensorNetwork(tensors, g) + vs = collect(vertices(s)) + ts = map(vs) do v + site_v = isassigned(vertex_data(s), v) ? s[v] : Index[] + t = ITensors.state(state[v], only(site_v)) + return ITensors.convert_eltype(elt, t) + end + tn = ITensorNetwork(Dictionary(vs, ts)) for e in edges(s) _add_edge!(elt, tn, e) end From 62b91ad37eade42dc90f62f7cadfe7db4bfad99e Mon Sep 17 00:00:00 2001 From: Matthew Fishman Date: Tue, 12 May 2026 12:37:36 -0400 Subject: [PATCH 03/10] Inline TreeTensorNetwork storage; drop similar_graph / empty-state paths - TreeTensorNetwork now holds graph, vertex_data, ind_to_vertices, and ortho_region directly, matching the ITensorNetwork field layout instead of wrapping an inner ITensorNetwork. - Extract _set_vertex_data! and _rem_vertex! helpers shared by both ITensorNetwork and TreeTensorNetwork. - Drop similar_graph and the empty-then-fill construction paths from the form networks and belief-propagation cache; networks are now always constructed with their full vertex data. - Rewrite induced_subgraph_from_vertices to feed tensors through the standard ITensorNetwork(tensors) constructor. Co-Authored-By: Claude Opus 4.7 (1M context) --- src/abstractitensornetwork.jl | 25 ++- src/caches/beliefpropagationcache.jl | 9 - src/formnetworks/bilinearformnetwork.jl | 12 -- src/formnetworks/linearformnetwork.jl | 10 - src/formnetworks/quadraticformnetwork.jl | 12 -- src/itensornetwork.jl | 185 +++++++----------- .../abstracttreetensornetwork.jl | 2 +- .../opsum_to_ttn/opsum_to_ttn.jl | 2 +- src/treetensornetworks/treetensornetwork.jl | 83 ++++---- test/test_ttns.jl | 4 +- 10 files changed, 134 insertions(+), 210 deletions(-) diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index f18b3804..5534fc7d 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -17,8 +17,9 @@ using SplitApplyCombine: flatten abstract type AbstractITensorNetwork{V} <: AbstractDataGraph{V, ITensor, ITensor} end # Subtypes provide the storage: `underlying_graph(tn)` returns the named graph -# and `vertex_data(tn)` returns a `Dict{V, ITensor}`-like mapping. Edge data is -# unused — every `AbstractITensorNetwork` is treated as having no edge data. +# and `vertex_data(tn)` returns a `Dictionary{V, ITensor}`-like mapping. Edge +# data is unused — every `AbstractITensorNetwork` is treated as having no edge +# data. # TODO: Define a generic fallback for `AbstractDataGraph`? DataGraphs.edge_data_type(::Type{<:AbstractITensorNetwork}) = ITensor @@ -128,9 +129,8 @@ end # TODO: Define `eltype(::AbstractITensorNetwork)` as `ITensor`? -# TODO: Implement using `adapt` function NDTensors.convert_scalartype(eltype::Type{<:Number}, tn::AbstractITensorNetwork) - return map(t -> ITensors.adapt(eltype, t), tn) + return map(adapt(eltype), tn) end function Base.complex(tn::AbstractITensorNetwork) @@ -358,9 +358,9 @@ function NDTensors.contract( new_itensor = tn[src(edge)] * tn[dst(edge)] rem_vertex!(tn, src(edge)) rem_vertex!(tn, dst(edge)) - add_vertex!(tn, merged_vertex) - # Reverse-map reconciliation on assignment picks up the new bonds - # to the surviving neighbors of `src(edge)` and `dst(edge)`. + # `setindex!` (via `set_vertex_data!`) adds `merged_vertex` to the + # graph and reverse-map reconciliation picks up the new bonds to the + # surviving neighbors of `src(edge)` and `dst(edge)`. tn[merged_vertex] = new_itensor return tn end @@ -505,7 +505,7 @@ function Base.show(io::IO, mime::MIME"text/plain", graph::AbstractITensorNetwork end println(io) println(io, "with vertex data:") - show(io, mime, Dict(v => inds(graph[v]) for v in vertices(graph))) + show(io, mime, inds.(vertex_data(graph))) return nothing end @@ -669,10 +669,7 @@ function NamedGraphs.induced_subgraph_from_vertices( itn::AbstractITensorNetwork, subvertices ) - subgraph, vlist = induced_subgraph(underlying_graph(itn), subvertices) - subitn = similar_graph(itn, subgraph) - - subitn[Vertices(subvertices)] = vertex_data(itn) - - return subitn, vlist + _, vlist = induced_subgraph(underlying_graph(itn), subvertices) + sub_vs = collect(subvertices) + return ITensorNetwork(Dictionary(sub_vs, [itn[v] for v in sub_vs])), vlist end diff --git a/src/caches/beliefpropagationcache.jl b/src/caches/beliefpropagationcache.jl index 9a8e623a..0bb9ff93 100644 --- a/src/caches/beliefpropagationcache.jl +++ b/src/caches/beliefpropagationcache.jl @@ -23,15 +23,6 @@ struct BeliefPropagationCache{V, PV, PTN <: AbstractPartitionedGraph{V, PV}, MTS messages::MTS end -function NamedGraphs.similar_graph( - bpc::BeliefPropagationCache, - underlying_graph::AbstractGraph - ) - return BeliefPropagationCache( - similar_graph(bpc.underlying_graph, underlying_graph) - ) -end - #Constructors... function BeliefPropagationCache(ptn::PartitionedGraph; messages = default_messages(ptn)) return BeliefPropagationCache(ptn, messages) diff --git a/src/formnetworks/bilinearformnetwork.jl b/src/formnetworks/bilinearformnetwork.jl index c6f0386e..513a3d97 100644 --- a/src/formnetworks/bilinearformnetwork.jl +++ b/src/formnetworks/bilinearformnetwork.jl @@ -41,18 +41,6 @@ function BilinearFormNetwork( ) end -function NamedGraphs.similar_graph( - bf::BilinearFormNetwork, - underlying_graph::AbstractGraph - ) - return BilinearFormNetwork( - similar_graph(bf.tensornetwork, underlying_graph), - operator_vertex_suffix(bf), - bra_vertex_suffix(bf), - ket_vertex_suffix(bf) - ) -end - operator_vertex_suffix(blf::BilinearFormNetwork) = blf.operator_vertex_suffix bra_vertex_suffix(blf::BilinearFormNetwork) = blf.bra_vertex_suffix ket_vertex_suffix(blf::BilinearFormNetwork) = blf.ket_vertex_suffix diff --git a/src/formnetworks/linearformnetwork.jl b/src/formnetworks/linearformnetwork.jl index 0bd33c0c..654d858f 100644 --- a/src/formnetworks/linearformnetwork.jl +++ b/src/formnetworks/linearformnetwork.jl @@ -1,8 +1,6 @@ using DataGraphs: DataGraphs, set_vertex_data! -using Graphs: AbstractGraph using ITensors: ITensor, prime using NamedGraphs.GraphsExtensions: disjoint_union -using NamedGraphs: similar_graph default_dual_link_index_map = prime @@ -48,14 +46,6 @@ function DataGraphs.set_vertex_data!(lf::LinearFormNetwork, value, vertex) return lf end -function NamedGraphs.similar_graph( - lf::LinearFormNetwork, - underlying_graph::AbstractGraph - ) - tn = similar_graph(tensornetwork(lf), underlying_graph) - return LinearFormNetwork(tn, bra_vertex_suffix(lf), ket_vertex_suffix(lf)) -end - function Base.copy(lf::LinearFormNetwork) return LinearFormNetwork( copy(tensornetwork(lf)), bra_vertex_suffix(lf), ket_vertex_suffix(lf) diff --git a/src/formnetworks/quadraticformnetwork.jl b/src/formnetworks/quadraticformnetwork.jl index 9db4c147..eb4b2674 100644 --- a/src/formnetworks/quadraticformnetwork.jl +++ b/src/formnetworks/quadraticformnetwork.jl @@ -1,5 +1,4 @@ using DataGraphs: DataGraphs, set_vertex_data!, underlying_graph, vertex_data -using NamedGraphs: similar_graph default_index_map = prime default_inv_index_map = noprime @@ -16,17 +15,6 @@ struct QuadraticFormNetwork{ dual_inv_index_map::InvIndexMap end -function NamedGraphs.similar_graph( - qf::QuadraticFormNetwork, - underlying_graph::AbstractGraph - ) - return QuadraticFormNetwork( - similar_graph(bilinear_formnetwork(qf), underlying_graph), - dual_index_map(qf), - dual_inv_index_map(qf) - ) -end - bilinear_formnetwork(qf::QuadraticFormNetwork) = qf.formnetwork #Needed for implementation, forward from bilinear form diff --git a/src/itensornetwork.jl b/src/itensornetwork.jl index 717af44f..56a39818 100644 --- a/src/itensornetwork.jl +++ b/src/itensornetwork.jl @@ -1,20 +1,20 @@ using DataGraphs: DataGraphs, set_vertex_data!, underlying_graph, vertex_data -using Graphs: Graphs, AbstractGraph, add_edge!, add_vertex!, edges, has_edge, neighbors, +using Dictionaries: Dictionaries, Dictionary +using Graphs: Graphs, add_edge!, add_vertex!, edges, has_edge, has_vertex, neighbors, rem_edge!, rem_vertex!, vertices using ITensors: ITensors, ITensor, Index, inds using NamedGraphs: NamedGraphs, NamedEdge, NamedGraph, vertextype """ - ITensorNetwork{V, S} + ITensorNetwork{V} A tensor network where each vertex holds an `ITensor`. Storage is split across three fields: - `graph::NamedGraph{V}` — the network's graph (`V` is the vertex type), - - `vertex_data::Dict{V, ITensor}` — the tensor at each vertex, - - `ind_to_vertices::Dict{Index{S}, Set{V}}` — reverse map from each - `Index` to the vertices it appears in (`S` is the `Index` space type, - e.g. `Int` for plain dims or `Vector{Pair{QN, Int}}` for QN-graded). + - `vertex_data::Dictionary{V, ITensor}` — the tensor at each vertex, + - `ind_to_vertices::Dict{Index, Set{V}}` — reverse map from each `Index` + to the vertices it appears in. The reverse map keeps vertex lookup by shared `Index` O(1) and enforces the graph-edge ↔ shared-`Index` invariant: every `Index` appears at @@ -28,7 +28,6 @@ are rejected. ```julia ITensorNetwork(tensors) ITensorNetwork{V}(tensors) -ITensorNetwork{V, S}(tensors) ``` `tensors` is any collection where `keys(tensors)` are vertex labels and @@ -49,10 +48,10 @@ julia> tn = ITensorNetwork([ITensor(i, j), ITensor(j, k)]); See also: `IndsNetwork`, [`TreeTensorNetwork`](@ref ITensorNetworks.TreeTensorNetwork). """ -struct ITensorNetwork{V, S} <: AbstractITensorNetwork{V} +struct ITensorNetwork{V} <: AbstractITensorNetwork{V} graph::NamedGraph{V} - vertex_data::Dict{V, ITensor} - ind_to_vertices::Dict{Index{S}, Set{V}} + vertex_data::Dictionary{V, ITensor} + ind_to_vertices::Dict{Index, Set{V}} end # @@ -70,89 +69,32 @@ end # Constructors # -# Infer `S` from a tensor's indices; default to `Int` (plain non-QN -# dim-as-`Int` Indices) when the collection is empty or every tensor -# has no indices. -_index_space_type(::Index{S}) where {S} = S -function _index_space_type(tensors) - for t in values(tensors) - is = inds(t) - isempty(is) || return _index_space_type(first(is)) - end - return Int -end - -# Build the reverse index map from `tensors`, infer the graph edges from -# that map, and enforce the no-hyperedge invariant. -function ITensorNetwork{V, S}(tensors) where {V, S} - vs = V[v for v in keys(tensors)] - graph = NamedGraph(vs) - vertex_data = Dict{V, ITensor}(v => tensors[v] for v in vs) - ind_to_vertices = Dict{Index{S}, Set{V}}() - for v in vs, i in inds(vertex_data[v]) - push!(get!(ind_to_vertices, i, Set{V}()), v) - end - for (i, owners) in ind_to_vertices - length(owners) <= 2 || error( - "Index $i appears at $(length(owners)) vertices; `ITensorNetwork` " * - "is not a hypergraph — every `Index` must appear at one (external) " * - "or two (bond) vertices." - ) - end - # Walk `vs` in order so the edge add order — and therefore the - # `neighbors(g, v)` / `edges(g)` iteration order — is deterministic in - # the input vertex order, rather than the non-deterministic hash order - # of `values(ind_to_vertices)` and `Set` iteration. - for v in vs, i in inds(vertex_data[v]) - for u in ind_to_vertices[i] - u != v && !has_edge(graph, v, u) && add_edge!(graph, v => u) - end - end - return ITensorNetwork{V, S}(graph, vertex_data, ind_to_vertices) -end - -function ITensorNetwork{V, S}(tn::AbstractITensorNetwork) where {V, S} - return ITensorNetwork{V, S}(Dict{V, ITensor}(v => tn[v] for v in vertices(tn))) -end - +# Construct by feeding `tensors` through `set_vertex_data!` one vertex +# at a time — this centralizes the reverse-map registration, edge +# inference, and hypergraph check in a single place (the `setindex!` +# code path). Walking `keys(tensors)` in order makes the resulting +# `neighbors(g, v)` / `edges(g)` iteration order deterministic in the +# input order. function ITensorNetwork{V}(tensors) where {V} - return ITensorNetwork{V, _index_space_type(tensors)}(tensors) -end - -function ITensorNetwork{V}(tn::AbstractITensorNetwork) where {V} - return ITensorNetwork{V}(Dict{V, ITensor}(v => tn[v] for v in vertices(tn))) + tn = ITensorNetwork{V}(NamedGraph{V}(), Dictionary{V, ITensor}(), Dict{Index, Set{V}}()) + for v in keys(tensors) + set_vertex_data!(tn, tensors[v], v) + end + return tn end ITensorNetwork(tensors) = ITensorNetwork{keytype(tensors)}(tensors) -# Empty network over `vertices(g)`: vertices are added to the graph but -# carry no tensor data yet, and the graph has no edges. Tensors are -# populated via `setindex!`, which infers bonds from shared `Index`es; -# any edges already in `g` are discarded since they'll be re-derived from -# the indices. Primarily used by `similar_graph` so that `induced_subgraph` -# and related operations can build their result incrementally. -function ITensorNetwork{V, S}(g::AbstractGraph) where {V, S} - return ITensorNetwork{V, S}( - NamedGraph(collect(V, vertices(g))), - Dict{V, ITensor}(), - Dict{Index{S}, Set{V}}() - ) -end - # # Vertex-type conversion and copy # NamedGraphs.convert_vertextype(::Type{V}, tn::ITensorNetwork{V}) where {V} = tn function NamedGraphs.convert_vertextype(::Type{V}, tn::ITensorNetwork) where {V} - return ITensorNetwork{V}(Dict{V, ITensor}(v => tn[v] for v in vertices(tn))) + return ITensorNetwork{V}(tn) end -function Base.copy(tn::ITensorNetwork{V, S}) where {V, S} - return ITensorNetwork{V, S}( - Dict{V, ITensor}(v => copy(tn[v]) for v in vertices(tn)) - ) -end +Base.copy(tn::ITensorNetwork) = ITensorNetwork(map(copy, vertex_data(tn))) # # Mutation: keep `graph`, `vertex_data`, and `ind_to_vertices` in sync. @@ -160,66 +102,79 @@ end # Write `value` to vertex `v`, updating the reverse map and reconciling # edges so the graph-edge ↔ shared-`Index` invariant holds. Cost is -# O(deg(v) + |inds(value)|). -function DataGraphs.set_vertex_data!(tn::ITensorNetwork{V, S}, value, v) where {V, S} - # Unregister old inds of `tn[v]` from the reverse map. - if haskey(tn.vertex_data, v) - for i in inds(tn.vertex_data[v]) - owners = tn.ind_to_vertices[i] +# O(deg(v) + |inds(value)|). If `v` isn't already in the network, it's +# added — so this is also the natural way to grow the network one tensor +# at a time without a separate `add_vertex!` step. Operates on raw +# storage so `ITensorNetwork` and `TreeTensorNetwork` can share it. +function _set_vertex_data!( + graph::NamedGraph{V}, + vertex_data::Dictionary{V, ITensor}, + ind_to_vertices::Dict{Index, Set{V}}, + value, + v + ) where {V} + # Add the vertex to the graph if it's new. + has_vertex(graph, v) || add_vertex!(graph, v) + # Unregister old inds of `vertex_data[v]` from the reverse map. + if haskey(vertex_data, v) + for i in inds(vertex_data[v]) + owners = ind_to_vertices[i] delete!(owners, v) - isempty(owners) && delete!(tn.ind_to_vertices, i) + isempty(owners) && delete!(ind_to_vertices, i) end end - # Write the new tensor. - tn.vertex_data[v] = value + # Write the new tensor. `Dictionaries.set!` inserts or updates; + # plain `setindex!` would error on a vertex not already in the dict. + Dictionaries.set!(vertex_data, v, value) # Register new inds. for i in inds(value) - push!(get!(tn.ind_to_vertices, i, Set{V}()), v) - length(tn.ind_to_vertices[i]) <= 2 || error( - "Index $i now appears at $(length(tn.ind_to_vertices[i])) vertices; " * + push!(get!(ind_to_vertices, i, Set{V}()), v) + length(ind_to_vertices[i]) <= 2 || error( + "Index $i now appears at $(length(ind_to_vertices[i])) vertices; " * "`ITensorNetwork` forbids hyperedges (3+ vertices sharing an `Index`)." ) end # Reconcile graph edges incident to `v` against the reverse map. desired = Set{V}() for i in inds(value) - for u in tn.ind_to_vertices[i] + for u in ind_to_vertices[i] u == v || push!(desired, u) end end - for u in collect(neighbors(tn.graph, v)) - u in desired || rem_edge!(tn.graph, v => u) + for u in collect(neighbors(graph, v)) + u in desired || rem_edge!(graph, v => u) end for u in desired - has_edge(tn.graph, v, u) || add_edge!(tn.graph, v => u) + has_edge(graph, v, u) || add_edge!(graph, v => u) end + return nothing +end + +function DataGraphs.set_vertex_data!(tn::ITensorNetwork, value, v) + _set_vertex_data!(tn.graph, tn.vertex_data, tn.ind_to_vertices, value, v) return tn end # Drop `v` from the reverse map, vertex data, and graph in one shot. -function Graphs.rem_vertex!(tn::ITensorNetwork, v) - if haskey(tn.vertex_data, v) - for i in inds(tn.vertex_data[v]) - owners = tn.ind_to_vertices[i] +function _rem_vertex!( + graph::NamedGraph{V}, + vertex_data::Dictionary{V, ITensor}, + ind_to_vertices::Dict{Index, Set{V}}, + v + ) where {V} + if haskey(vertex_data, v) + for i in inds(vertex_data[v]) + owners = ind_to_vertices[i] delete!(owners, v) - isempty(owners) && delete!(tn.ind_to_vertices, i) + isempty(owners) && delete!(ind_to_vertices, i) end - delete!(tn.vertex_data, v) + delete!(vertex_data, v) end - rem_vertex!(tn.graph, v) - return tn + rem_vertex!(graph, v) + return nothing end -# Add `v` to the graph without any tensor data. A subsequent -# `tn[v] = tensor` writes the tensor and reconciles edges. -function Graphs.add_vertex!(tn::ITensorNetwork, v) - add_vertex!(tn.graph, v) +function Graphs.rem_vertex!(tn::ITensorNetwork, v) + _rem_vertex!(tn.graph, tn.vertex_data, tn.ind_to_vertices, v) return tn end - -# Fresh `ITensorNetwork` over `vertices(g)` with no tensors. Used by -# `induced_subgraph_from_vertices` to build a same-typed empty container -# that subsequent `setindex!` calls populate. -function NamedGraphs.similar_graph(tn::ITensorNetwork{V, S}, g::AbstractGraph) where {V, S} - return ITensorNetwork{V, S}(g) -end diff --git a/src/treetensornetworks/abstracttreetensornetwork.jl b/src/treetensornetworks/abstracttreetensornetwork.jl index 20526921..af646925 100644 --- a/src/treetensornetworks/abstracttreetensornetwork.jl +++ b/src/treetensornetworks/abstracttreetensornetwork.jl @@ -293,7 +293,7 @@ function Base.:+( end tensors_out[v] = tnv end - return TreeTensorNetwork(ITensorNetwork(tensors_out)) + return TreeTensorNetwork(tensors_out) end # TODO: switch default algorithm once more are implemented diff --git a/src/treetensornetworks/opsum_to_ttn/opsum_to_ttn.jl b/src/treetensornetworks/opsum_to_ttn/opsum_to_ttn.jl index 008cb000..8861d189 100644 --- a/src/treetensornetworks/opsum_to_ttn/opsum_to_ttn.jl +++ b/src/treetensornetworks/opsum_to_ttn/opsum_to_ttn.jl @@ -445,7 +445,7 @@ function compress_ttn( H[v] += T * ITensorNetworks.computeSiteProd(sites, Prod([(Op("Id", v))])) end end - return TreeTensorNetwork(ITensorNetwork(H)) + return TreeTensorNetwork(H) end # diff --git a/src/treetensornetworks/treetensornetwork.jl b/src/treetensornetworks/treetensornetwork.jl index 8152f807..7a7268c0 100644 --- a/src/treetensornetworks/treetensornetwork.jl +++ b/src/treetensornetworks/treetensornetwork.jl @@ -1,15 +1,16 @@ -using DataGraphs: DataGraphs, underlying_graph, vertex_data -using Dictionaries: Indices -using Graphs: Graphs, add_vertex!, rem_vertex! +using DataGraphs: DataGraphs, set_vertex_data!, underlying_graph, vertex_data +using Dictionaries: Dictionaries, Dictionary, Indices +using Graphs: Graphs, add_vertex!, has_vertex, is_tree, rem_vertex!, vertices +using ITensors: ITensor, Index using NamedGraphs.GraphsExtensions: vertextype -using NamedGraphs: similar_graph +using NamedGraphs: NamedGraph """ TreeTensorNetwork{V} <: AbstractTreeTensorNetwork{V} -A tensor network whose underlying graph is a tree. In addition to the tensor data, -it tracks an `ortho_region`: the set of vertices that currently form the orthogonality -center of the network. +A tensor network whose underlying graph is a tree. Storage mirrors +[`ITensorNetwork`](@ref) — `graph`, `vertex_data`, and `ind_to_vertices` +— plus an `ortho_region` tracking the orthogonality center. `TTN` is an alias for `TreeTensorNetwork`. @@ -19,20 +20,31 @@ Use the [`TreeTensorNetwork`](@ref) constructors to build instances, and See also: [`ITensorNetwork`](@ref). """ struct TreeTensorNetwork{V} <: AbstractTreeTensorNetwork{V} - tensornetwork::ITensorNetwork{V} + graph::NamedGraph{V} + vertex_data::Dictionary{V, ITensor} + ind_to_vertices::Dict{Index, Set{V}} ortho_region::Indices{V} - global function _TreeTensorNetwork(tensornetwork::ITensorNetwork, ortho_region::Indices) - @assert is_tree(tensornetwork) - return new{vertextype(tensornetwork)}(tensornetwork, ortho_region) + global function _TreeTensorNetwork( + graph::NamedGraph{V}, + vertex_data::Dictionary{V, ITensor}, + ind_to_vertices::Dict{Index, Set{V}}, + ortho_region::Indices{V} + ) where {V} + @assert is_tree(graph) + return new{V}(graph, vertex_data, ind_to_vertices, ortho_region) end end -function _TreeTensorNetwork(tensornetwork::ITensorNetwork, ortho_region) - return _TreeTensorNetwork(tensornetwork, Indices(ortho_region)) +function _TreeTensorNetwork(tn::ITensorNetwork{V}, ortho_region::Indices{V}) where {V} + return _TreeTensorNetwork(tn.graph, tn.vertex_data, tn.ind_to_vertices, ortho_region) end -function _TreeTensorNetwork(tensornetwork::ITensorNetwork) - return _TreeTensorNetwork(tensornetwork, vertices(tensornetwork)) +function _TreeTensorNetwork(tn::ITensorNetwork{V}, ortho_region) where {V} + return _TreeTensorNetwork(tn, Indices{V}(ortho_region)) +end + +function _TreeTensorNetwork(tn::ITensorNetwork) + return _TreeTensorNetwork(tn, vertices(tn)) end """ @@ -78,12 +90,14 @@ function TreeTensorNetwork{V}(tn::ITensorNetwork) where {V} return TreeTensorNetwork(ITensorNetwork{V}(tn)) end -const TTN = TreeTensorNetwork - -function NamedGraphs.similar_graph(ttn::TTN, underlying_graph::AbstractGraph) - return TTN(similar_graph(ttn.tensornetwork, underlying_graph)) +# Build a `TreeTensorNetwork` directly from a tensor collection (anything +# accepted by `ITensorNetwork`), saving the caller a wrapping step. +function TreeTensorNetwork(tensors; kwargs...) + return TreeTensorNetwork(ITensorNetwork(tensors); kwargs...) end +const TTN = TreeTensorNetwork + # Field access """ ITensorNetwork(tn::TreeTensorNetwork) -> ITensorNetwork @@ -93,7 +107,7 @@ metadata. The returned network shares the same underlying tensor data. See also: [`TreeTensorNetwork`](@ref). """ -ITensorNetwork(tn::TTN) = copy(tn.tensornetwork) +ITensorNetwork(tn::TTN) = ITensorNetwork(map(copy, vertex_data(tn))) """ ortho_region(tn::TreeTensorNetwork) -> Indices @@ -104,29 +118,28 @@ See also: [`orthogonalize`](@ref). """ ortho_region(tn::TTN) = tn.ortho_region -# `AbstractITensorNetwork` storage forwarding — delegate to the inner -# `ITensorNetwork` so its reverse-index map and edge reconciliation -# run on writes. -DataGraphs.underlying_graph(tn::TTN) = underlying_graph(tn.tensornetwork) -DataGraphs.vertex_data(tn::TTN) = vertex_data(tn.tensornetwork) +# `AbstractITensorNetwork` storage hooks. +DataGraphs.underlying_graph(tn::TTN) = tn.graph +DataGraphs.vertex_data(tn::TTN) = tn.vertex_data function DataGraphs.set_vertex_data!(tn::TTN, value, v) - set_vertex_data!(tn.tensornetwork, value, v) + _set_vertex_data!(tn.graph, tn.vertex_data, tn.ind_to_vertices, value, v) return tn end function Graphs.rem_vertex!(tn::TTN, v) - rem_vertex!(tn.tensornetwork, v) - return tn -end - -function Graphs.add_vertex!(tn::TTN, v) - add_vertex!(tn.tensornetwork, v) + _rem_vertex!(tn.graph, tn.vertex_data, tn.ind_to_vertices, v) return tn end function Base.copy(tn::TTN) - return _TreeTensorNetwork(copy(tn.tensornetwork), copy(tn.ortho_region)) + V = vertextype(tn) + return _TreeTensorNetwork( + copy(tn.graph), + map(copy, tn.vertex_data), + Dict{Index, Set{V}}(i => copy(vs) for (i, vs) in tn.ind_to_vertices), + copy(tn.ortho_region) + ) end # @@ -136,5 +149,7 @@ end # set_ortho_region: low-level update of the ortho_region metadata only, # without any gauge transformations. To move the orthogonality center use orthogonalize. function set_ortho_region(tn::TTN, ortho_region) - return TreeTensorNetwork(tn.tensornetwork; ortho_region) + return _TreeTensorNetwork( + tn.graph, tn.vertex_data, tn.ind_to_vertices, Indices{vertextype(tn)}(ortho_region) + ) end diff --git a/test/test_ttns.jl b/test/test_ttns.jl index 46278013..fe39b458 100644 --- a/test/test_ttns.jl +++ b/test/test_ttns.jl @@ -16,8 +16,8 @@ include("utils.jl") psi = TreeTensorNetwork(productstate(v -> "Up", sites)) # product state itn = ITensorNetwork(psi) # TTN → ITensorNetwork - @test vertex_data(itn) == vertex_data(psi.tensornetwork) - @test !(itn === psi.tensornetwork) + @test vertex_data(itn) == vertex_data(psi) + @test !(vertex_data(itn) === vertex_data(psi)) @test vertex_data(TreeTensorNetwork(itn)) == vertex_data(psi) end From dc22cf8b301a5c6d663e9efccfaebc94f20a0cd1 Mon Sep 17 00:00:00 2001 From: Matthew Fishman Date: Tue, 12 May 2026 13:53:36 -0400 Subject: [PATCH 04/10] Drop _TreeTensorNetwork inner ctor; share unregister-inds helper - TreeTensorNetwork now uses the auto-generated all-fields constructor plus a single outer ctor that performs the is_tree check, matching the ITensorNetwork constructor design. - Add ITensorNetwork{V}() empty ctor used as the seed for the tensor-collection constructor, replacing the explicit three-field call. - Extract _unregister_inds! so _set_vertex_data! and _rem_vertex! share their reverse-map cleanup, without altering the in-place vertex_data update path. - Build dictionaries via map(Indices(...)) in test/utils.jl. - Fix the TreeTensorNetwork jldoctest to use a properly connected two-vertex example. Co-Authored-By: Claude Opus 4.7 (1M context) --- src/itensornetwork.jl | 72 ++++++++++++--------- src/treetensornetworks/treetensornetwork.jl | 64 +++++------------- test/utils.jl | 17 +++-- 3 files changed, 67 insertions(+), 86 deletions(-) diff --git a/src/itensornetwork.jl b/src/itensornetwork.jl index 56a39818..968dc911 100644 --- a/src/itensornetwork.jl +++ b/src/itensornetwork.jl @@ -1,9 +1,9 @@ using DataGraphs: DataGraphs, set_vertex_data!, underlying_graph, vertex_data using Dictionaries: Dictionaries, Dictionary -using Graphs: Graphs, add_edge!, add_vertex!, edges, has_edge, has_vertex, neighbors, - rem_edge!, rem_vertex!, vertices -using ITensors: ITensors, ITensor, Index, inds -using NamedGraphs: NamedGraphs, NamedEdge, NamedGraph, vertextype +using Graphs: + Graphs, add_edge!, add_vertex!, has_edge, has_vertex, neighbors, rem_edge!, rem_vertex! +using ITensors: ITensor, Index, inds +using NamedGraphs: NamedGraphs, NamedGraph, vertextype """ ITensorNetwork{V} @@ -69,6 +69,14 @@ end # Constructors # +# Empty network with no vertices, used as the starting point for the +# tensor-collection constructor below. +function ITensorNetwork{V}() where {V} + return ITensorNetwork{V}( + NamedGraph{V}(), Dictionary{V, ITensor}(), Dict{Index, Set{V}}() + ) +end + # Construct by feeding `tensors` through `set_vertex_data!` one vertex # at a time — this centralizes the reverse-map registration, edge # inference, and hypergraph check in a single place (the `setindex!` @@ -76,7 +84,7 @@ end # `neighbors(g, v)` / `edges(g)` iteration order deterministic in the # input order. function ITensorNetwork{V}(tensors) where {V} - tn = ITensorNetwork{V}(NamedGraph{V}(), Dictionary{V, ITensor}(), Dict{Index, Set{V}}()) + tn = ITensorNetwork{V}() for v in keys(tensors) set_vertex_data!(tn, tensors[v], v) end @@ -100,12 +108,29 @@ Base.copy(tn::ITensorNetwork) = ITensorNetwork(map(copy, vertex_data(tn))) # Mutation: keep `graph`, `vertex_data`, and `ind_to_vertices` in sync. # +# Drop the inds of `vertex_data[v]` from the reverse map, leaving +# `vertex_data` and `graph` themselves untouched. Used both as a +# prelude to overwriting `v` and as a step in `_rem_vertex!`. +function _unregister_inds!( + vertex_data::Dictionary{V, ITensor}, + ind_to_vertices::Dict{Index, Set{V}}, + v + ) where {V} + haskey(vertex_data, v) || return nothing + for i in inds(vertex_data[v]) + owners = ind_to_vertices[i] + delete!(owners, v) + isempty(owners) && delete!(ind_to_vertices, i) + end + return nothing +end + # Write `value` to vertex `v`, updating the reverse map and reconciling -# edges so the graph-edge ↔ shared-`Index` invariant holds. Cost is -# O(deg(v) + |inds(value)|). If `v` isn't already in the network, it's -# added — so this is also the natural way to grow the network one tensor -# at a time without a separate `add_vertex!` step. Operates on raw -# storage so `ITensorNetwork` and `TreeTensorNetwork` can share it. +# edges so the graph-edge ↔ shared-`Index` invariant holds. If `v` is +# new it's added to the graph — so this is also the natural way to grow +# the network one tensor at a time without a separate `add_vertex!` +# step. Operates on raw storage so `ITensorNetwork` and +# `TreeTensorNetwork` can share it. function _set_vertex_data!( graph::NamedGraph{V}, vertex_data::Dictionary{V, ITensor}, @@ -113,20 +138,13 @@ function _set_vertex_data!( value, v ) where {V} - # Add the vertex to the graph if it's new. has_vertex(graph, v) || add_vertex!(graph, v) - # Unregister old inds of `vertex_data[v]` from the reverse map. - if haskey(vertex_data, v) - for i in inds(vertex_data[v]) - owners = ind_to_vertices[i] - delete!(owners, v) - isempty(owners) && delete!(ind_to_vertices, i) - end - end - # Write the new tensor. `Dictionaries.set!` inserts or updates; - # plain `setindex!` would error on a vertex not already in the dict. + _unregister_inds!(vertex_data, ind_to_vertices, v) + # `set!` updates in place when `v` is already present, preserving + # the insertion order of `vertex_data`. Plain `setindex!` would + # error on a missing key, and `insert!` would error on an existing + # one — `set!` handles both branches. Dictionaries.set!(vertex_data, v, value) - # Register new inds. for i in inds(value) push!(get!(ind_to_vertices, i, Set{V}()), v) length(ind_to_vertices[i]) <= 2 || error( @@ -162,14 +180,8 @@ function _rem_vertex!( ind_to_vertices::Dict{Index, Set{V}}, v ) where {V} - if haskey(vertex_data, v) - for i in inds(vertex_data[v]) - owners = ind_to_vertices[i] - delete!(owners, v) - isempty(owners) && delete!(ind_to_vertices, i) - end - delete!(vertex_data, v) - end + _unregister_inds!(vertex_data, ind_to_vertices, v) + haskey(vertex_data, v) && delete!(vertex_data, v) rem_vertex!(graph, v) return nothing end diff --git a/src/treetensornetworks/treetensornetwork.jl b/src/treetensornetworks/treetensornetwork.jl index 7a7268c0..ae600936 100644 --- a/src/treetensornetworks/treetensornetwork.jl +++ b/src/treetensornetworks/treetensornetwork.jl @@ -1,6 +1,6 @@ using DataGraphs: DataGraphs, set_vertex_data!, underlying_graph, vertex_data -using Dictionaries: Dictionaries, Dictionary, Indices -using Graphs: Graphs, add_vertex!, has_vertex, is_tree, rem_vertex!, vertices +using Dictionaries: Dictionary, Indices +using Graphs: Graphs, is_tree, rem_vertex!, vertices using ITensors: ITensor, Index using NamedGraphs.GraphsExtensions: vertextype using NamedGraphs: NamedGraph @@ -24,27 +24,6 @@ struct TreeTensorNetwork{V} <: AbstractTreeTensorNetwork{V} vertex_data::Dictionary{V, ITensor} ind_to_vertices::Dict{Index, Set{V}} ortho_region::Indices{V} - global function _TreeTensorNetwork( - graph::NamedGraph{V}, - vertex_data::Dictionary{V, ITensor}, - ind_to_vertices::Dict{Index, Set{V}}, - ortho_region::Indices{V} - ) where {V} - @assert is_tree(graph) - return new{V}(graph, vertex_data, ind_to_vertices, ortho_region) - end -end - -function _TreeTensorNetwork(tn::ITensorNetwork{V}, ortho_region::Indices{V}) where {V} - return _TreeTensorNetwork(tn.graph, tn.vertex_data, tn.ind_to_vertices, ortho_region) -end - -function _TreeTensorNetwork(tn::ITensorNetwork{V}, ortho_region) where {V} - return _TreeTensorNetwork(tn, Indices{V}(ortho_region)) -end - -function _TreeTensorNetwork(tn::ITensorNetwork) - return _TreeTensorNetwork(tn, vertices(tn)) end """ @@ -61,31 +40,27 @@ Throws an error if the underlying graph of `tn` is not a tree. # Example ```jldoctest -julia> using NamedGraphs.NamedGraphGenerators: named_comb_tree - -julia> using NamedGraphs: NamedGraph +julia> using ITensors: Index, ITensor julia> using Graphs: vertices -julia> using ITensors: ITensor +julia> i, j, k = Index(2, "i"), Index(2, "j"), Index(2, "k"); -julia> g = named_comb_tree((2, 2)); +julia> itn = ITensorNetwork([ITensor(i, j), ITensor(j, k)]); -julia> s = siteinds("S=1/2", g); - -julia> tensors = Dict(v => ITensor(s[v]...) for v in vertices(g)); - -julia> itn = ITensorNetwork(tensors); - -julia> ttn_state = TreeTensorNetwork(itn; ortho_region = [first(vertices(itn))]); +julia> ttn = TreeTensorNetwork(itn; ortho_region = [first(vertices(itn))]); ``` See also: [`ITensorNetwork`](@ref), [`orthogonalize`](@ref). """ -function TreeTensorNetwork(tn::ITensorNetwork; ortho_region = vertices(tn)) - return _TreeTensorNetwork(tn, ortho_region) +function TreeTensorNetwork(tn::ITensorNetwork{V}; ortho_region = vertices(tn)) where {V} + @assert is_tree(tn) + return TreeTensorNetwork{V}( + tn.graph, tn.vertex_data, tn.ind_to_vertices, Indices{V}(ortho_region) + ) end + function TreeTensorNetwork{V}(tn::ITensorNetwork) where {V} return TreeTensorNetwork(ITensorNetwork{V}(tn)) end @@ -132,9 +107,8 @@ function Graphs.rem_vertex!(tn::TTN, v) return tn end -function Base.copy(tn::TTN) - V = vertextype(tn) - return _TreeTensorNetwork( +function Base.copy(tn::TTN{V}) where {V} + return TreeTensorNetwork{V}( copy(tn.graph), map(copy, tn.vertex_data), Dict{Index, Set{V}}(i => copy(vs) for (i, vs) in tn.ind_to_vertices), @@ -142,14 +116,10 @@ function Base.copy(tn::TTN) ) end -# -# Constructor -# - # set_ortho_region: low-level update of the ortho_region metadata only, # without any gauge transformations. To move the orthogonality center use orthogonalize. -function set_ortho_region(tn::TTN, ortho_region) - return _TreeTensorNetwork( - tn.graph, tn.vertex_data, tn.ind_to_vertices, Indices{vertextype(tn)}(ortho_region) +function set_ortho_region(tn::TTN{V}, ortho_region) where {V} + return TreeTensorNetwork{V}( + tn.graph, tn.vertex_data, tn.ind_to_vertices, Indices{V}(ortho_region) ) end diff --git a/test/utils.jl b/test/utils.jl index 8bccb135..e6492676 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -4,7 +4,7 @@ # inside its gensym module. using DataGraphs: underlying_graph, vertex_data -using Dictionaries: Dictionary +using Dictionaries: Indices using Graphs: AbstractGraph, dst, edges, src, vertices using ITensorNetworks: ITensorNetwork, IndsNetwork using ITensors.NDTensors: dim @@ -36,15 +36,15 @@ function random_tensornetwork( g = NamedGraph(graph) links = Dict(e => Index(link_space, "Link") for e in edges(g)) links = merge(links, Dict(reverse(e) => links[e] for e in edges(g))) - # Use a `Dictionary` (insertion-ordered) so the constructed - # `ITensorNetwork`'s vertex / edge order tracks `vertices(g)`. - vs = collect(vertices(g)) - ts = map(vs) do v + # `Indices`-keyed `map` returns a `Dictionary` (insertion-ordered), + # so the constructed `ITensorNetwork`'s vertex / edge order tracks + # `vertices(g)`. + ts = map(Indices(vertices(g))) do v link_v = [links[e] for e in incident_edges(g, v)] inds_v = [siteinds[v]; link_v] return itensor(randn(rng, eltype, dim.(inds_v)...), inds_v) end - return ITensorNetwork(Dictionary(vs, ts)) + return ITensorNetwork(ts) end # `IndsNetwork`: extract site inds (`Index[]` where unassigned). @@ -108,13 +108,12 @@ function productstate(elt::Type, state::Function, s::IndsNetwork) return productstate(elt, Dict(v => state(v) for v in vertices(s)), s) end function productstate(elt::Type, state, s::IndsNetwork) - vs = collect(vertices(s)) - ts = map(vs) do v + ts = map(Indices(vertices(s))) do v site_v = isassigned(vertex_data(s), v) ? s[v] : Index[] t = ITensors.state(state[v], only(site_v)) return ITensors.convert_eltype(elt, t) end - tn = ITensorNetwork(Dictionary(vs, ts)) + tn = ITensorNetwork(ts) for e in edges(s) _add_edge!(elt, tn, e) end From 7c3eb046a94a590b16a4edb2f647101b5a285669 Mon Sep 17 00:00:00 2001 From: Matthew Fishman Date: Tue, 12 May 2026 14:11:05 -0400 Subject: [PATCH 05/10] Cut down ITN and TTN constructor surface - Drop ITensorNetwork{V}() empty-arg ctor; inline the three-field call at the single seed site inside ITensorNetwork{V}(tensors). External callers that want an empty network can pass an empty tensor collection (Dict{V, ITensor}() etc.). - Drop the TreeTensorNetwork(::ITensorNetwork; ortho_region=...) and TreeTensorNetwork{V}(::ITensorNetwork) overloads. All construction now routes through TreeTensorNetwork(tensors; ortho_region=nothing), which builds an ITensorNetwork first and then performs the is_tree check. - Define Base.keytype on AbstractITensorNetwork so an AbstractITN can be passed as a tensor collection to ITensorNetwork(tensors). Co-Authored-By: Claude Opus 4.7 (1M context) --- src/abstractitensornetwork.jl | 2 ++ src/itensornetwork.jl | 13 ++------ src/treetensornetworks/treetensornetwork.jl | 35 +++++++++------------ 3 files changed, 20 insertions(+), 30 deletions(-) diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index 5534fc7d..d478b29c 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -45,6 +45,8 @@ Base.copy(tn::AbstractITensorNetwork) = not_implemented() # whether `vertex_data` is a `Dict`, `Dictionary`, or anything else with # different default-iteration semantics. Base.keys(tn::AbstractITensorNetwork) = vertices(tn) +Base.keytype(::Type{<:AbstractITensorNetwork{V}}) where {V} = V +Base.keytype(tn::AbstractITensorNetwork) = keytype(typeof(tn)) Base.values(tn::AbstractITensorNetwork) = (tn[v] for v in vertices(tn)) Base.iterate(tn::AbstractITensorNetwork, args...) = iterate(values(tn), args...) diff --git a/src/itensornetwork.jl b/src/itensornetwork.jl index 968dc911..31d44859 100644 --- a/src/itensornetwork.jl +++ b/src/itensornetwork.jl @@ -69,22 +69,15 @@ end # Constructors # -# Empty network with no vertices, used as the starting point for the -# tensor-collection constructor below. -function ITensorNetwork{V}() where {V} - return ITensorNetwork{V}( - NamedGraph{V}(), Dictionary{V, ITensor}(), Dict{Index, Set{V}}() - ) -end - # Construct by feeding `tensors` through `set_vertex_data!` one vertex # at a time — this centralizes the reverse-map registration, edge # inference, and hypergraph check in a single place (the `setindex!` # code path). Walking `keys(tensors)` in order makes the resulting # `neighbors(g, v)` / `edges(g)` iteration order deterministic in the -# input order. +# input order. An empty `tensors` (`Dict{V, ITensor}()`, etc.) yields +# an empty network — there is no separate empty-arg constructor. function ITensorNetwork{V}(tensors) where {V} - tn = ITensorNetwork{V}() + tn = ITensorNetwork{V}(NamedGraph{V}(), Dictionary{V, ITensor}(), Dict{Index, Set{V}}()) for v in keys(tensors) set_vertex_data!(tn, tensors[v], v) end diff --git a/src/treetensornetworks/treetensornetwork.jl b/src/treetensornetworks/treetensornetwork.jl index ae600936..7d6db422 100644 --- a/src/treetensornetworks/treetensornetwork.jl +++ b/src/treetensornetworks/treetensornetwork.jl @@ -27,15 +27,17 @@ struct TreeTensorNetwork{V} <: AbstractTreeTensorNetwork{V} end """ - TreeTensorNetwork(tn::ITensorNetwork; ortho_region=vertices(tn)) -> TreeTensorNetwork + TreeTensorNetwork(tensors; ortho_region=nothing) -> TreeTensorNetwork -Construct a `TreeTensorNetwork` from an `ITensorNetwork` with tree graph structure. +Construct a `TreeTensorNetwork` from any collection of tensors accepted by +`ITensorNetwork` (e.g. a `Dict`, `Dictionary`, a `Vector{ITensor}`, or another +`AbstractITensorNetwork`). Edges are inferred from shared `Index`es; the +underlying graph must be a tree. -The `ortho_region` keyword specifies which vertices currently form the orthogonality center. -By default all vertices are included, meaning no particular gauge is assumed. To enforce an -actual orthogonal gauge, call [`orthogonalize`](@ref) afterward. - -Throws an error if the underlying graph of `tn` is not a tree. +`ortho_region` specifies which vertices currently form the orthogonality +center. The default `nothing` includes all vertices, meaning no particular +gauge is assumed. To enforce an actual orthogonal gauge, call +[`orthogonalize`](@ref) afterward. # Example @@ -54,23 +56,16 @@ julia> ttn = TreeTensorNetwork(itn; ortho_region = [first(vertices(itn))]); See also: [`ITensorNetwork`](@ref), [`orthogonalize`](@ref). """ -function TreeTensorNetwork(tn::ITensorNetwork{V}; ortho_region = vertices(tn)) where {V} - @assert is_tree(tn) +function TreeTensorNetwork(tensors; ortho_region = nothing) + itn = ITensorNetwork(tensors) + @assert is_tree(itn) + V = vertextype(itn) + region = isnothing(ortho_region) ? vertices(itn) : ortho_region return TreeTensorNetwork{V}( - tn.graph, tn.vertex_data, tn.ind_to_vertices, Indices{V}(ortho_region) + itn.graph, itn.vertex_data, itn.ind_to_vertices, Indices{V}(region) ) end -function TreeTensorNetwork{V}(tn::ITensorNetwork) where {V} - return TreeTensorNetwork(ITensorNetwork{V}(tn)) -end - -# Build a `TreeTensorNetwork` directly from a tensor collection (anything -# accepted by `ITensorNetwork`), saving the caller a wrapping step. -function TreeTensorNetwork(tensors; kwargs...) - return TreeTensorNetwork(ITensorNetwork(tensors); kwargs...) -end - const TTN = TreeTensorNetwork # Field access From c634c015fd6c92cde5c3114dc67bad379958913b Mon Sep 17 00:00:00 2001 From: Matthew Fishman Date: Tue, 12 May 2026 14:27:45 -0400 Subject: [PATCH 06/10] Add empty ITensorNetwork / TreeTensorNetwork constructors MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Mirror the `Vector()` / `Dictionary()` convention by providing parameterless and `{V}`-only constructors that yield an empty network: - `ITensorNetwork()` / `ITensorNetwork{V}()` - `TreeTensorNetwork()` / `TreeTensorNetwork{V}()` The default vertex type is `Any`. The body of `ITensorNetwork{V}(tensors)` now seeds from `ITensorNetwork{V}()` and writes each tensor through `setindex!`, removing the explicit field call and the prior `set_vertex_data!` call at this layer. Also drop the redundant `ITensorNetwork(::TreeTensorNetwork)` and the `ITensorNetwork(::AbstractTTN)` "not implemented" placeholder — the generic `ITensorNetwork(tensors)` now handles a `TreeTensorNetwork` via `keytype` and `setindex!`. Update the docs to drop the dangling references. Co-Authored-By: Claude Opus 4.7 (1M context) --- docs/src/experimental_methods.md | 8 +----- docs/src/tree_tensor_networks.md | 1 - src/itensornetwork.jl | 25 ++++++++++++------- .../abstracttreetensornetwork.jl | 2 -- src/treetensornetworks/treetensornetwork.jl | 19 +++++++------- 5 files changed, 26 insertions(+), 29 deletions(-) diff --git a/docs/src/experimental_methods.md b/docs/src/experimental_methods.md index 8f83db88..08045a23 100644 --- a/docs/src/experimental_methods.md +++ b/docs/src/experimental_methods.md @@ -37,9 +37,8 @@ Methods which still need to be discussed, modified, or deprecated. #### AbstractTreeTensorNetwork Type -* Required-to-implement abstract interface — `TreeTensorNetwork` provides all three (`treetensornetworks/abstracttreetensornetwork.jl`): +* Required-to-implement abstract interface — `TreeTensorNetwork` provides both (`treetensornetworks/abstracttreetensornetwork.jl`): ```julia - ITensorNetwork(tn::AbstractTTN) ortho_region(tn::AbstractTTN) set_ortho_region(tn::AbstractTTN, new_region) ``` @@ -134,11 +133,6 @@ Methods which still need to be discussed, modified, or deprecated. #### TreeTensorNetwork Type -* Get the underlying `ITensorNetwork` of a `TTN` (drops orthogonality metadata) (`treetensornetworks/treetensornetwork.jl`): - ```julia - ITensorNetwork(tn::TTN) - ``` - * Get the current orthogonality region — the set of vertices forming the gauge center (`treetensornetworks/treetensornetwork.jl`): ```julia ortho_region(tn::TTN) diff --git a/docs/src/tree_tensor_networks.md b/docs/src/tree_tensor_networks.md index 9f2411cf..eec2dcb3 100644 --- a/docs/src/tree_tensor_networks.md +++ b/docs/src/tree_tensor_networks.md @@ -65,7 +65,6 @@ itn_again = ITensorNetwork(psi) # TTN → ITensorNetwork ```@docs; canonical=false ITensorNetworks.TreeTensorNetwork -ITensorNetworks.ITensorNetwork(::ITensorNetworks.TreeTensorNetwork) ``` ## Orthogonal Gauge diff --git a/src/itensornetwork.jl b/src/itensornetwork.jl index 31d44859..019f814c 100644 --- a/src/itensornetwork.jl +++ b/src/itensornetwork.jl @@ -69,17 +69,24 @@ end # Constructors # -# Construct by feeding `tensors` through `set_vertex_data!` one vertex -# at a time — this centralizes the reverse-map registration, edge -# inference, and hypergraph check in a single place (the `setindex!` -# code path). Walking `keys(tensors)` in order makes the resulting -# `neighbors(g, v)` / `edges(g)` iteration order deterministic in the -# input order. An empty `tensors` (`Dict{V, ITensor}()`, etc.) yields -# an empty network — there is no separate empty-arg constructor. +# Empty network with no vertices. Mirrors the `Vector()` / `Dictionary()` +# convention; vertex type defaults to `Any` when unspecified. +function ITensorNetwork{V}() where {V} + return ITensorNetwork{V}( + NamedGraph{V}(), Dictionary{V, ITensor}(), Dict{Index, Set{V}}() + ) +end +ITensorNetwork() = ITensorNetwork{Any}() + +# Construct by writing each tensor into a freshly empty network via +# `setindex!`. The `setindex!` code path centralizes reverse-map +# registration, edge inference, and the hypergraph check, and walking +# `keys(tensors)` in order makes the resulting `neighbors(g, v)` / +# `edges(g)` iteration order deterministic in the input order. function ITensorNetwork{V}(tensors) where {V} - tn = ITensorNetwork{V}(NamedGraph{V}(), Dictionary{V, ITensor}(), Dict{Index, Set{V}}()) + tn = ITensorNetwork{V}() for v in keys(tensors) - set_vertex_data!(tn, tensors[v], v) + tn[v] = tensors[v] end return tn end diff --git a/src/treetensornetworks/abstracttreetensornetwork.jl b/src/treetensornetworks/abstracttreetensornetwork.jl index af646925..755724e8 100644 --- a/src/treetensornetworks/abstracttreetensornetwork.jl +++ b/src/treetensornetworks/abstracttreetensornetwork.jl @@ -16,8 +16,6 @@ function DataGraphs.underlying_graph_type(::Type{<:AbstractTTN{V}}) where {V} return NamedGraph{V} end -ITensorNetwork(tn::AbstractTTN) = error("Not implemented") - # # Field access # diff --git a/src/treetensornetworks/treetensornetwork.jl b/src/treetensornetworks/treetensornetwork.jl index 7d6db422..6ff5e058 100644 --- a/src/treetensornetworks/treetensornetwork.jl +++ b/src/treetensornetworks/treetensornetwork.jl @@ -26,6 +26,15 @@ struct TreeTensorNetwork{V} <: AbstractTreeTensorNetwork{V} ortho_region::Indices{V} end +# Empty TTN with no vertices. The is-a-tree invariant holds trivially. +function TreeTensorNetwork{V}() where {V} + itn = ITensorNetwork{V}() + return TreeTensorNetwork{V}( + itn.graph, itn.vertex_data, itn.ind_to_vertices, Indices{V}() + ) +end +TreeTensorNetwork() = TreeTensorNetwork{Any}() + """ TreeTensorNetwork(tensors; ortho_region=nothing) -> TreeTensorNetwork @@ -69,16 +78,6 @@ end const TTN = TreeTensorNetwork # Field access -""" - ITensorNetwork(tn::TreeTensorNetwork) -> ITensorNetwork - -Convert a `TreeTensorNetwork` to a plain `ITensorNetwork`, discarding orthogonality -metadata. The returned network shares the same underlying tensor data. - -See also: [`TreeTensorNetwork`](@ref). -""" -ITensorNetwork(tn::TTN) = ITensorNetwork(map(copy, vertex_data(tn))) - """ ortho_region(tn::TreeTensorNetwork) -> Indices From f29d8cdf784d4ff518912f2ddce6160927ee7e6e Mon Sep 17 00:00:00 2001 From: Matthew Fishman Date: Tue, 12 May 2026 14:40:15 -0400 Subject: [PATCH 07/10] Simplify TreeTensorNetwork(ITensorNetwork{Any}(tensors)) test pattern MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit `Dict{Any, ITensor}` has `keytype = Any`, so `TreeTensorNetwork(tensors)` already produces a TTN with `V = Any` — the explicit `ITensorNetwork{Any}(...)` wrap is redundant. Co-Authored-By: Claude Opus 4.7 (1M context) --- test/test_ttn_position.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_ttn_position.jl b/test/test_ttn_position.jl index 10a7ee45..a5baf934 100644 --- a/test/test_ttn_position.jl +++ b/test/test_ttn_position.jl @@ -51,7 +51,7 @@ end # `ITensorNetwork` has a single edge (and is therefore a valid tree). link = Index(1, "Link") tensors = Dict{Any, ITensor}(v => ITensor(link) for v in vertices(named_path_graph(2))) - operator = TreeTensorNetwork(ITensorNetwork{Any}(tensors)) + operator = TreeTensorNetwork(tensors) environments = Dictionary{NamedEdge{Any}, ITensor}() @test ProjTTN(pos, operator, environments) isa ProjTTN{Any, Indices{Any}} end From 31498fe413a7825a118379d21679b97da231581f Mon Sep 17 00:00:00 2001 From: Matthew Fishman Date: Tue, 12 May 2026 14:44:11 -0400 Subject: [PATCH 08/10] Inline tensor collection in TreeTensorNetwork docstring example MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The intermediate `ITensorNetwork` wrap was redundant — `TreeTensorNetwork` accepts a tensor collection directly. Use a literal vertex label (`[1]`) in `ortho_region` instead of looking it up via `vertices(itn)`. Co-Authored-By: Claude Opus 4.7 (1M context) --- src/treetensornetworks/treetensornetwork.jl | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/treetensornetworks/treetensornetwork.jl b/src/treetensornetworks/treetensornetwork.jl index 6ff5e058..d4234cd0 100644 --- a/src/treetensornetworks/treetensornetwork.jl +++ b/src/treetensornetworks/treetensornetwork.jl @@ -53,13 +53,9 @@ gauge is assumed. To enforce an actual orthogonal gauge, call ```jldoctest julia> using ITensors: Index, ITensor -julia> using Graphs: vertices - julia> i, j, k = Index(2, "i"), Index(2, "j"), Index(2, "k"); -julia> itn = ITensorNetwork([ITensor(i, j), ITensor(j, k)]); - -julia> ttn = TreeTensorNetwork(itn; ortho_region = [first(vertices(itn))]); +julia> ttn = TreeTensorNetwork([ITensor(i, j), ITensor(j, k)]; ortho_region = [1]); ``` From 0a47475daf579cbfa5e36e82789f5c018a83677d Mon Sep 17 00:00:00 2001 From: Matthew Fishman Date: Tue, 12 May 2026 14:48:37 -0400 Subject: [PATCH 09/10] Drop TreeTensorNetwork ortho_region kwarg and redundant set_vertices_data! MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The `ortho_region` keyword on `TreeTensorNetwork(tensors)` was not used by any caller and let users assert a gauge without performing the associated transformation. Removing it nudges callers toward `orthogonalize` (which does the QR sweep) and `set_ortho_region` (the low-level metadata update used internally). Also drop our `set_vertices_data!` override on `AbstractITensorNetwork` — `DataGraphs` already provides exactly the same loop as a default. Co-Authored-By: Claude Opus 4.7 (1M context) --- src/abstractitensornetwork.jl | 7 ------- src/treetensornetworks/treetensornetwork.jl | 16 +++++++--------- 2 files changed, 7 insertions(+), 16 deletions(-) diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index d478b29c..e6b48566 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -75,13 +75,6 @@ DataGraphs.is_edge_assigned(::AbstractITensorNetwork, _) = false DataGraphs.get_vertex_data(tn::AbstractITensorNetwork, v) = vertex_data(tn)[v] -function DataGraphs.set_vertices_data!(tn::AbstractITensorNetwork, values, vertices) - for v in vertices - set_vertex_data!(tn, values[v], v) - end - return tn -end - function NamedGraphs.vertex_positions(tn::AbstractITensorNetwork) return NamedGraphs.vertex_positions(underlying_graph(tn)) end diff --git a/src/treetensornetworks/treetensornetwork.jl b/src/treetensornetworks/treetensornetwork.jl index d4234cd0..3eccf707 100644 --- a/src/treetensornetworks/treetensornetwork.jl +++ b/src/treetensornetworks/treetensornetwork.jl @@ -36,17 +36,16 @@ end TreeTensorNetwork() = TreeTensorNetwork{Any}() """ - TreeTensorNetwork(tensors; ortho_region=nothing) -> TreeTensorNetwork + TreeTensorNetwork(tensors) -> TreeTensorNetwork Construct a `TreeTensorNetwork` from any collection of tensors accepted by `ITensorNetwork` (e.g. a `Dict`, `Dictionary`, a `Vector{ITensor}`, or another `AbstractITensorNetwork`). Edges are inferred from shared `Index`es; the underlying graph must be a tree. -`ortho_region` specifies which vertices currently form the orthogonality -center. The default `nothing` includes all vertices, meaning no particular -gauge is assumed. To enforce an actual orthogonal gauge, call -[`orthogonalize`](@ref) afterward. +The result starts with `ortho_region == vertices(tn)` — i.e. no particular +gauge is assumed. Use [`orthogonalize`](@ref) to bring the network into a +canonical gauge centered at a chosen vertex or region. # Example @@ -55,19 +54,18 @@ julia> using ITensors: Index, ITensor julia> i, j, k = Index(2, "i"), Index(2, "j"), Index(2, "k"); -julia> ttn = TreeTensorNetwork([ITensor(i, j), ITensor(j, k)]; ortho_region = [1]); +julia> ttn = TreeTensorNetwork([ITensor(i, j), ITensor(j, k)]); ``` See also: [`ITensorNetwork`](@ref), [`orthogonalize`](@ref). """ -function TreeTensorNetwork(tensors; ortho_region = nothing) +function TreeTensorNetwork(tensors) itn = ITensorNetwork(tensors) @assert is_tree(itn) V = vertextype(itn) - region = isnothing(ortho_region) ? vertices(itn) : ortho_region return TreeTensorNetwork{V}( - itn.graph, itn.vertex_data, itn.ind_to_vertices, Indices{V}(region) + itn.graph, itn.vertex_data, itn.ind_to_vertices, Indices{V}(vertices(itn)) ) end From 27fa3b24e5656d482a72e3bb9fa24e026a00b61a Mon Sep 17 00:00:00 2001 From: Matthew Fishman Date: Tue, 12 May 2026 15:09:25 -0400 Subject: [PATCH 10/10] Fix docs build: drop ITensorNetwork(tensors, graph) calls The two-argument `ITensorNetwork(tensors, graph)` ctor was removed earlier in this PR but lingered in three docs/src/ pages and one prose reference. Migrate them to the single-arg form, which infers edges from shared `Index`es. Also fix `truncate(tn::AbstractTTN, edge)` to call the generic `TreeTensorNetwork(...)` ctor instead of the now-removed `TreeTensorNetwork{V}(::ITensorNetwork)` specialization, and simplify the empty `TreeTensorNetwork{V}()` to build the all-fields struct directly instead of indirecting through `ITensorNetwork{V}()`. Co-Authored-By: Claude Opus 4.7 (1M context) --- docs/src/interface_methods.md | 8 -------- docs/src/itensor_networks.md | 2 +- docs/src/solvers.md | 2 +- docs/src/tree_tensor_networks.md | 2 +- src/treetensornetworks/abstracttreetensornetwork.jl | 2 +- src/treetensornetworks/treetensornetwork.jl | 6 ++++-- 6 files changed, 8 insertions(+), 14 deletions(-) diff --git a/docs/src/interface_methods.md b/docs/src/interface_methods.md index 88485d80..3b4a2658 100644 --- a/docs/src/interface_methods.md +++ b/docs/src/interface_methods.md @@ -15,14 +15,6 @@ These ITensorNetwork constructor interfaces are foundational to other constructo ITensorNetwork{V}(tensors) ``` -* From a collection of `ITensor`s placed at the vertices of a given `NamedGraph`. No - edge inference; the graph's edges are used as-is. - ```julia - ITensorNetwork(tensors, graph::NamedGraph) - ITensorNetwork{V}(tensors, graph::NamedGraph) - ``` - - ## Analyzing ITensorNetworks diff --git a/docs/src/itensor_networks.md b/docs/src/itensor_networks.md index d14dc7fc..e2f4f86e 100644 --- a/docs/src/itensor_networks.md +++ b/docs/src/itensor_networks.md @@ -56,7 +56,7 @@ tensors = Dict(map(collect(vertices(g))) do v return v => random_itensor(site_v..., link_v...) end) -ψ = ITensorNetwork(tensors, g) +ψ = ITensorNetwork(tensors) ``` Higher-level construction routines (random networks, product states, OpSum-derived diff --git a/docs/src/solvers.md b/docs/src/solvers.md index d8bf4976..7769931f 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -30,7 +30,7 @@ function random_state(g, s; link_space) v => random_itensor(only(s[v]), (l[e] for e in incident_edges(g, v))...) for v in vertices(g) ) - return ITensorNetwork(ts, g) + return ITensorNetwork(ts) end # Build a Heisenberg Hamiltonian on a comb tree diff --git a/docs/src/tree_tensor_networks.md b/docs/src/tree_tensor_networks.md index eec2dcb3..67ee74de 100644 --- a/docs/src/tree_tensor_networks.md +++ b/docs/src/tree_tensor_networks.md @@ -53,7 +53,7 @@ tensors = Dict(map(collect(vertices(g))) do v link_v = [haskey(links, e) ? links[e] : links[reverse(e)] for e in incident_edges(g, v)] return v => random_itensor(site_v..., link_v...) end) -itn = ITensorNetwork(tensors, g) +itn = ITensorNetwork(tensors) psi = TreeTensorNetwork(itn) ``` diff --git a/src/treetensornetworks/abstracttreetensornetwork.jl b/src/treetensornetworks/abstracttreetensornetwork.jl index 755724e8..f1fb6bd2 100644 --- a/src/treetensornetworks/abstracttreetensornetwork.jl +++ b/src/treetensornetworks/abstracttreetensornetwork.jl @@ -89,7 +89,7 @@ end # For ambiguity error function Base.truncate(tn::AbstractTTN, edge::AbstractEdge; kwargs...) - return typeof(tn)(truncate(ITensorNetwork(tn), edge; kwargs...)) + return TreeTensorNetwork(truncate(ITensorNetwork(tn), edge; kwargs...)) end # diff --git a/src/treetensornetworks/treetensornetwork.jl b/src/treetensornetworks/treetensornetwork.jl index 3eccf707..19644bb3 100644 --- a/src/treetensornetworks/treetensornetwork.jl +++ b/src/treetensornetworks/treetensornetwork.jl @@ -28,9 +28,11 @@ end # Empty TTN with no vertices. The is-a-tree invariant holds trivially. function TreeTensorNetwork{V}() where {V} - itn = ITensorNetwork{V}() return TreeTensorNetwork{V}( - itn.graph, itn.vertex_data, itn.ind_to_vertices, Indices{V}() + NamedGraph{V}(), + Dictionary{V, ITensor}(), + Dict{Index, Set{V}}(), + Indices{V}() ) end TreeTensorNetwork() = TreeTensorNetwork{Any}()