From 7e78f7dd7df43fdad010c69f0843db96ea36e8ba Mon Sep 17 00:00:00 2001 From: mtfishman <7855256+mtfishman@users.noreply.github.com> Date: Wed, 1 Oct 2025 00:49:56 +0000 Subject: [PATCH 1/3] Format .jl files (Runic) --- Project.toml | 2 +- docs/make.jl | 22 +-- docs/make_index.jl | 16 +-- docs/make_readme.jl | 16 +-- examples/README.jl | 2 +- src/abstracttensornetwork.jl | 258 +++++++++++++++++------------------ src/tensornetwork.jl | 70 +++++----- test/runtests.jl | 80 +++++------ test/test_aqua.jl | 2 +- test/test_basics.jl | 102 +++++++------- 10 files changed, 286 insertions(+), 284 deletions(-) diff --git a/Project.toml b/Project.toml index ce4f361..e06d0fd 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "ITensorNetworksNext" uuid = "302f2e75-49f0-4526-aef7-d8ba550cb06c" authors = ["ITensor developers and contributors"] -version = "0.1.2" +version = "0.1.3" [deps] Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" diff --git a/docs/make.jl b/docs/make.jl index 5a50658..1b29518 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -2,23 +2,23 @@ using ITensorNetworksNext: ITensorNetworksNext using Documenter: Documenter, DocMeta, deploydocs, makedocs DocMeta.setdocmeta!( - ITensorNetworksNext, :DocTestSetup, :(using ITensorNetworksNext); recursive=true + ITensorNetworksNext, :DocTestSetup, :(using ITensorNetworksNext); recursive = true ) include("make_index.jl") makedocs(; - modules=[ITensorNetworksNext], - authors="ITensor developers and contributors", - sitename="ITensorNetworksNext.jl", - format=Documenter.HTML(; - canonical="https://itensor.github.io/ITensorNetworksNext.jl", - edit_link="main", - assets=["assets/favicon.ico", "assets/extras.css"], - ), - pages=["Home" => "index.md", "Reference" => "reference.md"], + modules = [ITensorNetworksNext], + authors = "ITensor developers and contributors", + sitename = "ITensorNetworksNext.jl", + format = Documenter.HTML(; + canonical = "https://itensor.github.io/ITensorNetworksNext.jl", + edit_link = "main", + assets = ["assets/favicon.ico", "assets/extras.css"], + ), + pages = ["Home" => "index.md", "Reference" => "reference.md"], ) deploydocs(; - repo="github.com/ITensor/ITensorNetworksNext.jl", devbranch="main", push_preview=true + repo = "github.com/ITensor/ITensorNetworksNext.jl", devbranch = "main", push_preview = true ) diff --git a/docs/make_index.jl b/docs/make_index.jl index 44fa493..038bc87 100644 --- a/docs/make_index.jl +++ b/docs/make_index.jl @@ -2,20 +2,20 @@ using Literate: Literate using ITensorNetworksNext: ITensorNetworksNext function ccq_logo(content) - include_ccq_logo = """ + include_ccq_logo = """ ```@raw html Flatiron Center for Computational Quantum Physics logo. Flatiron Center for Computational Quantum Physics logo. ``` """ - content = replace(content, "{CCQ_LOGO}" => include_ccq_logo) - return content + content = replace(content, "{CCQ_LOGO}" => include_ccq_logo) + return content end Literate.markdown( - joinpath(pkgdir(ITensorNetworksNext), "examples", "README.jl"), - joinpath(pkgdir(ITensorNetworksNext), "docs", "src"); - flavor=Literate.DocumenterFlavor(), - name="index", - postprocess=ccq_logo, + joinpath(pkgdir(ITensorNetworksNext), "examples", "README.jl"), + joinpath(pkgdir(ITensorNetworksNext), "docs", "src"); + flavor = Literate.DocumenterFlavor(), + name = "index", + postprocess = ccq_logo, ) diff --git a/docs/make_readme.jl b/docs/make_readme.jl index 960d376..088dc58 100644 --- a/docs/make_readme.jl +++ b/docs/make_readme.jl @@ -2,20 +2,20 @@ using Literate: Literate using ITensorNetworksNext: ITensorNetworksNext function ccq_logo(content) - include_ccq_logo = """ + include_ccq_logo = """ Flatiron Center for Computational Quantum Physics logo. """ - content = replace(content, "{CCQ_LOGO}" => include_ccq_logo) - return content + content = replace(content, "{CCQ_LOGO}" => include_ccq_logo) + return content end Literate.markdown( - joinpath(pkgdir(ITensorNetworksNext), "examples", "README.jl"), - joinpath(pkgdir(ITensorNetworksNext)); - flavor=Literate.CommonMarkFlavor(), - name="README", - postprocess=ccq_logo, + joinpath(pkgdir(ITensorNetworksNext), "examples", "README.jl"), + joinpath(pkgdir(ITensorNetworksNext)); + flavor = Literate.CommonMarkFlavor(), + name = "README", + postprocess = ccq_logo, ) diff --git a/examples/README.jl b/examples/README.jl index 4aaa79b..e3ee854 100644 --- a/examples/README.jl +++ b/examples/README.jl @@ -1,5 +1,5 @@ # # ITensorNetworksNext.jl -# +# # [![Stable](https://img.shields.io/badge/docs-stable-blue.svg)](https://itensor.github.io/ITensorNetworksNext.jl/stable/) # [![Dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://itensor.github.io/ITensorNetworksNext.jl/dev/) # [![Build Status](https://github.com/ITensor/ITensorNetworksNext.jl/actions/workflows/Tests.yml/badge.svg?branch=main)](https://github.com/ITensor/ITensorNetworksNext.jl/actions/workflows/Tests.yml?query=branch%3Amain) diff --git a/src/abstracttensornetwork.jl b/src/abstracttensornetwork.jl index e666e93..73bf9d6 100644 --- a/src/abstracttensornetwork.jl +++ b/src/abstracttensornetwork.jl @@ -1,44 +1,44 @@ using Adapt: Adapt, adapt, adapt_structure using BackendSelection: @Algorithm_str, Algorithm using DataGraphs: - DataGraphs, - AbstractDataGraph, - edge_data, - underlying_graph, - underlying_graph_type, - vertex_data + DataGraphs, + AbstractDataGraph, + edge_data, + underlying_graph, + underlying_graph_type, + vertex_data using Dictionaries: Dictionary using Graphs: - Graphs, - AbstractEdge, - AbstractGraph, - Graph, - add_edge!, - add_vertex!, - bfs_tree, - center, - dst, - edges, - edgetype, - ne, - neighbors, - nv, - rem_edge!, - src, - vertices + Graphs, + AbstractEdge, + AbstractGraph, + Graph, + add_edge!, + add_vertex!, + bfs_tree, + center, + dst, + edges, + edgetype, + ne, + neighbors, + nv, + rem_edge!, + src, + vertices using LinearAlgebra: LinearAlgebra, factorize using MacroTools: @capture using NamedDimsArrays: dimnames using NamedGraphs: NamedGraphs, NamedGraph, not_implemented, steiner_tree using NamedGraphs.GraphsExtensions: - ⊔, directed_graph, incident_edges, rem_edges!, rename_vertices, vertextype + ⊔, directed_graph, incident_edges, rem_edges!, rename_vertices, vertextype using SplitApplyCombine: flatten -abstract type AbstractTensorNetwork{V,VD} <: AbstractDataGraph{V,VD,Nothing} end +abstract type AbstractTensorNetwork{V, VD} <: AbstractDataGraph{V, VD, Nothing} end function Graphs.rem_edge!(tn::AbstractTensorNetwork, e) - rem_edge!(underlying_graph(tn), e) - return tn + rem_edge!(underlying_graph(tn), e) + return tn end # TODO: Define a generic fallback for `AbstractDataGraph`? @@ -46,14 +46,14 @@ DataGraphs.edge_data_eltype(::Type{<:AbstractTensorNetwork}) = error("No edge da # Graphs.jl overloads function Graphs.weights(graph::AbstractTensorNetwork) - V = vertextype(graph) - es = Tuple.(edges(graph)) - ws = Dictionary{Tuple{V,V},Float64}(es, undef) - for e in edges(graph) - w = log2(dim(commoninds(graph, e))) - ws[(src(e), dst(e))] = w - end - return ws + V = vertextype(graph) + es = Tuple.(edges(graph)) + ws = Dictionary{Tuple{V, V}, Float64}(es, undef) + for e in edges(graph) + w = log2(dim(commoninds(graph, e))) + ws[(src(e), dst(e))] = w + end + return ws end # Copy @@ -71,85 +71,85 @@ Graphs.is_directed(::Type{<:AbstractTensorNetwork}) = false # Derived interface, may need to be overloaded function DataGraphs.underlying_graph_type(G::Type{<:AbstractTensorNetwork}) - return underlying_graph_type(data_graph_type(G)) + return underlying_graph_type(data_graph_type(G)) end # AbstractDataGraphs overloads function DataGraphs.vertex_data(graph::AbstractTensorNetwork, args...) - return error("Not implemented") + return error("Not implemented") end function DataGraphs.edge_data(graph::AbstractTensorNetwork, args...) - return error("Not implemented") + return error("Not implemented") end DataGraphs.underlying_graph(tn::AbstractTensorNetwork) = error("Not implemented") function NamedGraphs.vertex_positions(tn::AbstractTensorNetwork) - return NamedGraphs.vertex_positions(underlying_graph(tn)) + return NamedGraphs.vertex_positions(underlying_graph(tn)) end function NamedGraphs.ordered_vertices(tn::AbstractTensorNetwork) - return NamedGraphs.ordered_vertices(underlying_graph(tn)) + return NamedGraphs.ordered_vertices(underlying_graph(tn)) end function Adapt.adapt_structure(to, tn::AbstractTensorNetwork) - # TODO: Define and use: - # - # @preserve_graph map_vertex_data(adapt(to), tn) - # - # or just: - # - # @preserve_graph map(adapt(to), tn) - return map_vertex_data_preserve_graph(adapt(to), tn) + # TODO: Define and use: + # + # @preserve_graph map_vertex_data(adapt(to), tn) + # + # or just: + # + # @preserve_graph map(adapt(to), tn) + return map_vertex_data_preserve_graph(adapt(to), tn) end function linkinds(tn::AbstractTensorNetwork, edge::Pair) - return linkinds(tn, edgetype(tn)(edge)) + return linkinds(tn, edgetype(tn)(edge)) end function linkinds(tn::AbstractTensorNetwork, edge::AbstractEdge) - return nameddimsindices(tn[src(edge)]) ∩ nameddimsindices(tn[dst(edge)]) + return nameddimsindices(tn[src(edge)]) ∩ nameddimsindices(tn[dst(edge)]) end function linkaxes(tn::AbstractTensorNetwork, edge::Pair) - return linkaxes(tn, edgetype(tn)(edge)) + return linkaxes(tn, edgetype(tn)(edge)) end function linkaxes(tn::AbstractTensorNetwork, edge::AbstractEdge) - return axes(tn[src(edge)]) ∩ axes(tn[dst(edge)]) + return axes(tn[src(edge)]) ∩ axes(tn[dst(edge)]) end function linknames(tn::AbstractTensorNetwork, edge::Pair) - return linknames(tn, edgetype(tn)(edge)) + return linknames(tn, edgetype(tn)(edge)) end function linknames(tn::AbstractTensorNetwork, edge::AbstractEdge) - return dimnames(tn[src(edge)]) ∩ dimnames(tn[dst(edge)]) + return dimnames(tn[src(edge)]) ∩ dimnames(tn[dst(edge)]) end function siteinds(tn::AbstractTensorNetwork, v) - s = nameddimsindices(tn[v]) - for v′ in neighbors(tn, v) - s = setdiff(s, nameddimsindices(tn[v′])) - end - return s + s = nameddimsindices(tn[v]) + for v′ in neighbors(tn, v) + s = setdiff(s, nameddimsindices(tn[v′])) + end + return s end function siteaxes(tn::AbstractTensorNetwork, edge::AbstractEdge) - s = axes(tn[src(edge)]) ∩ axes(tn[dst(edge)]) - for v′ in neighbors(tn, v) - s = setdiff(s, axes(tn[v′])) - end - return s + s = axes(tn[src(edge)]) ∩ axes(tn[dst(edge)]) + for v′ in neighbors(tn, v) + s = setdiff(s, axes(tn[v′])) + end + return s end function sitenames(tn::AbstractTensorNetwork, edge::AbstractEdge) - s = dimnames(tn[src(edge)]) ∩ dimnames(tn[dst(edge)]) - for v′ in neighbors(tn, v) - s = setdiff(s, dimnames(tn[v′])) - end - return s + s = dimnames(tn[src(edge)]) ∩ dimnames(tn[dst(edge)]) + for v′ in neighbors(tn, v) + s = setdiff(s, dimnames(tn[v′])) + end + return s end function setindex_preserve_graph!(tn::AbstractTensorNetwork, value, vertex) - vertex_data(tn)[vertex] = value - return tn + vertex_data(tn)[vertex] = value + return tn end # TODO: Move to `BaseExtensions` module. function is_setindex!_expr(expr::Expr) - return is_assignment_expr(expr) && is_getindex_expr(first(expr.args)) + return is_assignment_expr(expr) && is_getindex_expr(first(expr.args)) end is_setindex!_expr(x) = false is_getindex_expr(expr::Expr) = (expr.head === :ref) @@ -162,118 +162,118 @@ is_assignment_expr(expr) = false # preserve_graph_function(::typeof(map_vertex_data)) = map_vertex_data_preserve_graph # Also allow annotating codeblocks like `@views`. macro preserve_graph(expr) - if !is_setindex!_expr(expr) - error( - "preserve_graph must be used with setindex! syntax (as @preserve_graph a[i,j,...] = value)", - ) - end - @capture(expr, array_[indices__] = value_) - return :(setindex_preserve_graph!($(esc(array)), $(esc(value)), $(esc.(indices)...))) + if !is_setindex!_expr(expr) + error( + "preserve_graph must be used with setindex! syntax (as @preserve_graph a[i,j,...] = value)", + ) + end + @capture(expr, array_[indices__] = value_) + return :(setindex_preserve_graph!($(esc(array)), $(esc(value)), $(esc.(indices)...))) end # Update the graph of the TensorNetwork `tn` to include # edges that should exist based on the tensor connectivity. function add_missing_edges!(tn::AbstractTensorNetwork) - foreach(v -> add_missing_edges!(tn, v), vertices(tn)) - return tn + foreach(v -> add_missing_edges!(tn, v), vertices(tn)) + return tn end # Update the graph of the TensorNetwork `tn` to include # edges that should be incident to the vertex `v` # based on the tensor connectivity. function add_missing_edges!(tn::AbstractTensorNetwork, v) - for v′ in vertices(tn) - if v ≠ v′ - e = v => v′ - if !isempty(linkinds(tn, e)) - add_edge!(tn, e) - end + for v′ in vertices(tn) + if v ≠ v′ + e = v => v′ + if !isempty(linkinds(tn, e)) + add_edge!(tn, e) + end + end end - end - return tn + return tn end # Fix the edges of the TensorNetwork `tn` to match # the tensor connectivity. function fix_edges!(tn::AbstractTensorNetwork) - foreach(v -> fix_edges!(tn, v), vertices(tn)) - return tn + foreach(v -> fix_edges!(tn, v), vertices(tn)) + return tn end # Fix the edges of the TensorNetwork `tn` to match # the tensor connectivity at vertex `v`. function fix_edges!(tn::AbstractTensorNetwork, v) - rem_incident_edges!(tn, v) - rem_edges!(tn, incident_edges(tn, v)) - add_missing_edges!(tn, v) - return tn + rem_incident_edges!(tn, v) + rem_edges!(tn, incident_edges(tn, v)) + add_missing_edges!(tn, v) + return tn end # Customization point. using NamedDimsArrays: AbstractNamedUnitRange, namedunitrange, nametype, randname function trivial_unitrange(type::Type{<:AbstractUnitRange}) - return Base.oneto(one(eltype(type))) + return Base.oneto(one(eltype(type))) end function rand_trivial_namedunitrange( - ::Type{<:AbstractNamedUnitRange{<:Any,R,N}} -) where {R,N} - return namedunitrange(trivial_unitrange(R), randname(N)) + ::Type{<:AbstractNamedUnitRange{<:Any, R, N}} + ) where {R, N} + return namedunitrange(trivial_unitrange(R), randname(N)) end dag(x) = x using NamedDimsArrays: nameddimsindices function insert_trivial_link!(tn, e) - add_edge!(tn, e) - l = rand_trivial_namedunitrange(eltype(nameddimsindices(tn[src(e)]))) - x = similar(tn[src(e)], (l,)) - x[1] = 1 - @preserve_graph tn[src(e)] = tn[src(e)] * x - @preserve_graph tn[dst(e)] = tn[dst(e)] * dag(x) - return tn + add_edge!(tn, e) + l = rand_trivial_namedunitrange(eltype(nameddimsindices(tn[src(e)]))) + x = similar(tn[src(e)], (l,)) + x[1] = 1 + @preserve_graph tn[src(e)] = tn[src(e)] * x + @preserve_graph tn[dst(e)] = tn[dst(e)] * dag(x) + return tn end function Base.setindex!(tn::AbstractTensorNetwork, value, v) - @preserve_graph tn[v] = value - fix_edges!(tn, v) - return tn + @preserve_graph tn[v] = value + fix_edges!(tn, v) + return tn end using NamedGraphs.OrdinalIndexing: OrdinalSuffixedInteger # Fix ambiguity error. function Base.setindex!(graph::AbstractTensorNetwork, value, vertex::OrdinalSuffixedInteger) - graph[vertices(graph)[vertex]] = value - return graph + graph[vertices(graph)[vertex]] = value + return graph end # Fix ambiguity error. function Base.setindex!(tn::AbstractTensorNetwork, value, edge::AbstractEdge) - return error("No edge data.") + return error("No edge data.") end # Fix ambiguity error. function Base.setindex!(tn::AbstractTensorNetwork, value, edge::Pair) - return error("No edge data.") + return error("No edge data.") end using NamedGraphs.OrdinalIndexing: OrdinalSuffixedInteger # Fix ambiguity error. function Base.setindex!( - tn::AbstractTensorNetwork, - value, - edge::Pair{<:OrdinalSuffixedInteger,<:OrdinalSuffixedInteger}, -) - return error("No edge data.") + tn::AbstractTensorNetwork, + value, + edge::Pair{<:OrdinalSuffixedInteger, <:OrdinalSuffixedInteger}, + ) + return error("No edge data.") end function Base.show(io::IO, mime::MIME"text/plain", graph::AbstractTensorNetwork) - println(io, "$(typeof(graph)) with $(nv(graph)) vertices:") - show(io, mime, vertices(graph)) - println(io, "\n") - println(io, "and $(ne(graph)) edge(s):") - for e in edges(graph) - show(io, mime, e) + println(io, "$(typeof(graph)) with $(nv(graph)) vertices:") + show(io, mime, vertices(graph)) + println(io, "\n") + println(io, "and $(ne(graph)) edge(s):") + for e in edges(graph) + show(io, mime, e) + println(io) + end println(io) - end - println(io) - println(io, "with vertex data:") - show(io, mime, axes.(vertex_data(graph))) - return nothing + println(io, "with vertex data:") + show(io, mime, axes.(vertex_data(graph))) + return nothing end Base.show(io::IO, graph::AbstractTensorNetwork) = show(io, MIME"text/plain"(), graph) diff --git a/src/tensornetwork.jl b/src/tensornetwork.jl index 3fd794b..7423669 100644 --- a/src/tensornetwork.jl +++ b/src/tensornetwork.jl @@ -7,68 +7,68 @@ using NamedGraphs.GraphsExtensions: arranged_edges, vertextype function _TensorNetwork end -struct TensorNetwork{V,VD,UG<:AbstractGraph{V},Tensors<:AbstractDictionary{V,VD}} <: - AbstractTensorNetwork{V,VD} - underlying_graph::UG - tensors::Tensors - global @inline function _TensorNetwork( - underlying_graph::UG, tensors::Tensors - ) where {V,VD,UG<:AbstractGraph{V},Tensors<:AbstractDictionary{V,VD}} - # This assumes the tensor connectivity matches the graph structure. - return new{V,VD,UG,Tensors}(underlying_graph, tensors) - end +struct TensorNetwork{V, VD, UG <: AbstractGraph{V}, Tensors <: AbstractDictionary{V, VD}} <: + AbstractTensorNetwork{V, VD} + underlying_graph::UG + tensors::Tensors + global @inline function _TensorNetwork( + underlying_graph::UG, tensors::Tensors + ) where {V, VD, UG <: AbstractGraph{V}, Tensors <: AbstractDictionary{V, VD}} + # This assumes the tensor connectivity matches the graph structure. + return new{V, VD, UG, Tensors}(underlying_graph, tensors) + end end DataGraphs.underlying_graph(tn::TensorNetwork) = getfield(tn, :underlying_graph) DataGraphs.vertex_data(tn::TensorNetwork) = getfield(tn, :tensors) function DataGraphs.underlying_graph_type(type::Type{<:TensorNetwork}) - return fieldtype(type, :underlying_graph) + return fieldtype(type, :underlying_graph) end # Determine the graph structure from the tensors. function TensorNetwork(t::AbstractDictionary) - g = NamedGraph(eachindex(t)) - for v1 in vertices(g) - for v2 in vertices(g) - if v1 ≠ v2 - if !isdisjoint(dimnames(t[v1]), dimnames(t[v2])) - add_edge!(g, v1 => v2) + g = NamedGraph(eachindex(t)) + for v1 in vertices(g) + for v2 in vertices(g) + if v1 ≠ v2 + if !isdisjoint(dimnames(t[v1]), dimnames(t[v2])) + add_edge!(g, v1 => v2) + end + end end - end end - end - return _TensorNetwork(g, t) + return _TensorNetwork(g, t) end function TensorNetwork(tensors::AbstractDict) - return TensorNetwork(Dictionary(tensors)) + return TensorNetwork(Dictionary(tensors)) end function TensorNetwork(graph::AbstractGraph, tensors::AbstractDictionary) - tn = TensorNetwork(tensors) - arranged_edges(tn) ⊆ arranged_edges(graph) || - error("The edges in the tensors do not match the graph structure.") - for e in setdiff(arranged_edges(graph), arranged_edges(tn)) - insert_trivial_link!(tn, e) - end - return tn + tn = TensorNetwork(tensors) + arranged_edges(tn) ⊆ arranged_edges(graph) || + error("The edges in the tensors do not match the graph structure.") + for e in setdiff(arranged_edges(graph), arranged_edges(tn)) + insert_trivial_link!(tn, e) + end + return tn end function TensorNetwork(graph::AbstractGraph, tensors::AbstractDict) - return TensorNetwork(graph, Dictionary(tensors)) + return TensorNetwork(graph, Dictionary(tensors)) end function TensorNetwork(f, graph::AbstractGraph) - return TensorNetwork(graph, Dict(v => f(v) for v in vertices(graph))) + return TensorNetwork(graph, Dict(v => f(v) for v in vertices(graph))) end function Base.copy(tn::TensorNetwork) - TensorNetwork(copy(underlying_graph(tn)), copy(vertex_data(tn))) + return TensorNetwork(copy(underlying_graph(tn)), copy(vertex_data(tn))) end TensorNetwork(tn::TensorNetwork) = copy(tn) TensorNetwork{V}(tn::TensorNetwork{V}) where {V} = copy(tn) function TensorNetwork{V}(tn::TensorNetwork) where {V} - g′ = convert_vertextype(V, underlying_graph(tn)) - d = vertex_data(tn) - d′ = dictionary(V(k) => d[k] for k in eachindex(d)) - return TensorNetwork(g′, d′) + g′ = convert_vertextype(V, underlying_graph(tn)) + d = vertex_data(tn) + d′ = dictionary(V(k) => d[k] for k in eachindex(d)) + return TensorNetwork(g′, d′) end NamedGraphs.convert_vertextype(::Type{V}, tn::TensorNetwork{V}) where {V} = tn diff --git a/test/runtests.jl b/test/runtests.jl index 98b2d2b..0008050 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -6,60 +6,62 @@ using Suppressor: Suppressor const pat = r"(?:--group=)(\w+)" arg_id = findfirst(contains(pat), ARGS) const GROUP = uppercase( - if isnothing(arg_id) - get(ENV, "GROUP", "ALL") - else - only(match(pat, ARGS[arg_id]).captures) - end, + if isnothing(arg_id) + get(ENV, "GROUP", "ALL") + else + only(match(pat, ARGS[arg_id]).captures) + end, ) "match files of the form `test_*.jl`, but exclude `*setup*.jl`" function istestfile(fn) - return endswith(fn, ".jl") && startswith(basename(fn), "test_") && !contains(fn, "setup") + return endswith(fn, ".jl") && startswith(basename(fn), "test_") && !contains(fn, "setup") end "match files of the form `*.jl`, but exclude `*_notest.jl` and `*setup*.jl`" function isexamplefile(fn) - return endswith(fn, ".jl") && !endswith(fn, "_notest.jl") && !contains(fn, "setup") + return endswith(fn, ".jl") && !endswith(fn, "_notest.jl") && !contains(fn, "setup") end @time begin - # tests in groups based on folder structure - for testgroup in filter(isdir, readdir(@__DIR__)) - if GROUP == "ALL" || GROUP == uppercase(testgroup) - groupdir = joinpath(@__DIR__, testgroup) - for file in filter(istestfile, readdir(groupdir)) - filename = joinpath(groupdir, file) - @eval @safetestset $file begin - include($filename) + # tests in groups based on folder structure + for testgroup in filter(isdir, readdir(@__DIR__)) + if GROUP == "ALL" || GROUP == uppercase(testgroup) + groupdir = joinpath(@__DIR__, testgroup) + for file in filter(istestfile, readdir(groupdir)) + filename = joinpath(groupdir, file) + @eval @safetestset $file begin + include($filename) + end + end end - end end - end - # single files in top folder - for file in filter(istestfile, readdir(@__DIR__)) - (file == basename(@__FILE__)) && continue # exclude this file to avoid infinite recursion - @eval @safetestset $file begin - include($file) + # single files in top folder + for file in filter(istestfile, readdir(@__DIR__)) + (file == basename(@__FILE__)) && continue # exclude this file to avoid infinite recursion + @eval @safetestset $file begin + include($file) + end end - end - # test examples - examplepath = joinpath(@__DIR__, "..", "examples") - for (root, _, files) in walkdir(examplepath) - contains(chopprefix(root, @__DIR__), "setup") && continue - for file in filter(isexamplefile, files) - filename = joinpath(root, file) - @eval begin - @safetestset $file begin - $(Expr( - :macrocall, - GlobalRef(Suppressor, Symbol("@suppress")), - LineNumberNode(@__LINE__, @__FILE__), - :(include($filename)), - )) + # test examples + examplepath = joinpath(@__DIR__, "..", "examples") + for (root, _, files) in walkdir(examplepath) + contains(chopprefix(root, @__DIR__), "setup") && continue + for file in filter(isexamplefile, files) + filename = joinpath(root, file) + @eval begin + @safetestset $file begin + $( + Expr( + :macrocall, + GlobalRef(Suppressor, Symbol("@suppress")), + LineNumberNode(@__LINE__, @__FILE__), + :(include($filename)), + ) + ) + end + end end - end end - end end diff --git a/test/test_aqua.jl b/test/test_aqua.jl index 34bfff1..0afead5 100644 --- a/test/test_aqua.jl +++ b/test/test_aqua.jl @@ -3,5 +3,5 @@ using Aqua: Aqua using Test: @testset @testset "Code quality (Aqua.jl)" begin - Aqua.test_all(ITensorNetworksNext) + Aqua.test_all(ITensorNetworksNext) end diff --git a/test/test_basics.jl b/test/test_basics.jl index 59e5e35..0c9d803 100644 --- a/test/test_basics.jl +++ b/test/test_basics.jl @@ -8,56 +8,56 @@ using NamedGraphs.NamedGraphGenerators: named_grid using Test: @test, @testset @testset "ITensorNetworksNext" begin - @testset "Construct TensorNetwork product state" begin - dims = (3, 3) - g = named_grid(dims) - s = Dict(v => Index(2) for v in vertices(g)) - tn = TensorNetwork(g) do v - return randn(s[v]) + @testset "Construct TensorNetwork product state" begin + dims = (3, 3) + g = named_grid(dims) + s = Dict(v => Index(2) for v in vertices(g)) + tn = TensorNetwork(g) do v + return randn(s[v]) + end + @test nv(tn) == 9 + @test ne(tn) == ne(g) + @test issetequal(vertices(tn), vertices(g)) + @test issetequal(arranged_edges(tn), arranged_edges(g)) + for v in vertices(tn) + @test siteinds(tn, v) == [s[v]] + end + for v1 in vertices(tn) + for v2 in vertices(tn) + v1 == v2 && continue + haslink = !isempty(linkinds(tn, v1 => v2)) + @test haslink == has_edge(tn, v1 => v2) + end + end + for e in edges(tn) + @test isone(length(only(linkinds(tn, e)))) + end + end + @testset "Construct TensorNetwork partition function" begin + dims = (3, 3) + g = named_grid(dims) + l = Dict(e => Index(2) for e in edges(g)) + l = merge(l, Dict(reverse(e) => l[e] for e in edges(g))) + tn = TensorNetwork(g) do v + is = map(e -> l[e], incident_edges(g, v)) + return randn(Tuple(is)) + end + @test nv(tn) == 9 + @test ne(tn) == ne(g) + @test issetequal(vertices(tn), vertices(g)) + @test issetequal(arranged_edges(tn), arranged_edges(g)) + for v in vertices(tn) + @test isempty(siteinds(tn, v)) + end + for v1 in vertices(tn) + for v2 in vertices(tn) + v1 == v2 && continue + haslink = !isempty(linkinds(tn, v1 => v2)) + @test haslink == has_edge(tn, v1 => v2) + end + end + for e in edges(tn) + @test only(linkinds(tn, e)) == l[e] + end end - @test nv(tn) == 9 - @test ne(tn) == ne(g) - @test issetequal(vertices(tn), vertices(g)) - @test issetequal(arranged_edges(tn), arranged_edges(g)) - for v in vertices(tn) - @test siteinds(tn, v) == [s[v]] - end - for v1 in vertices(tn) - for v2 in vertices(tn) - v1 == v2 && continue - haslink = !isempty(linkinds(tn, v1 => v2)) - @test haslink == has_edge(tn, v1 => v2) - end - end - for e in edges(tn) - @test isone(length(only(linkinds(tn, e)))) - end - end - @testset "Construct TensorNetwork partition function" begin - dims = (3, 3) - g = named_grid(dims) - l = Dict(e => Index(2) for e in edges(g)) - l = merge(l, Dict(reverse(e) => l[e] for e in edges(g))) - tn = TensorNetwork(g) do v - is = map(e -> l[e], incident_edges(g, v)) - return randn(Tuple(is)) - end - @test nv(tn) == 9 - @test ne(tn) == ne(g) - @test issetequal(vertices(tn), vertices(g)) - @test issetequal(arranged_edges(tn), arranged_edges(g)) - for v in vertices(tn) - @test isempty(siteinds(tn, v)) - end - for v1 in vertices(tn) - for v2 in vertices(tn) - v1 == v2 && continue - haslink = !isempty(linkinds(tn, v1 => v2)) - @test haslink == has_edge(tn, v1 => v2) - end - end - for e in edges(tn) - @test only(linkinds(tn, e)) == l[e] - end - end end From 5dd5ca18f668763dc564245c9020244eb593276e Mon Sep 17 00:00:00 2001 From: mtfishman Date: Fri, 3 Oct 2025 14:20:41 -0400 Subject: [PATCH 2/3] Update template --- .JuliaFormatter.toml | 3 --- .github/workflows/FormatCheck.yml | 13 ++++++++----- .pre-commit-config.yaml | 8 ++++---- 3 files changed, 12 insertions(+), 12 deletions(-) delete mode 100644 .JuliaFormatter.toml diff --git a/.JuliaFormatter.toml b/.JuliaFormatter.toml deleted file mode 100644 index 4c49a86..0000000 --- a/.JuliaFormatter.toml +++ /dev/null @@ -1,3 +0,0 @@ -# See https://domluna.github.io/JuliaFormatter.jl/stable/ for a list of options -style = "blue" -indent = 2 diff --git a/.github/workflows/FormatCheck.yml b/.github/workflows/FormatCheck.yml index 3f78afc..1525861 100644 --- a/.github/workflows/FormatCheck.yml +++ b/.github/workflows/FormatCheck.yml @@ -1,11 +1,14 @@ name: "Format Check" on: - push: - branches: - - 'main' - tags: '*' - pull_request: + pull_request_target: + paths: ['**/*.jl'] + types: [opened, synchronize, reopened, ready_for_review] + +permissions: + contents: read + actions: write + pull-requests: write jobs: format-check: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 88bc8b4..3fc4743 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,5 @@ ci: - skip: [julia-formatter] + skip: [runic] repos: - repo: https://github.com/pre-commit/pre-commit-hooks @@ -11,7 +11,7 @@ repos: - id: end-of-file-fixer exclude_types: [markdown] # incompatible with Literate.jl -- repo: "https://github.com/domluna/JuliaFormatter.jl" - rev: v2.1.6 +- repo: https://github.com/fredrikekre/runic-pre-commit + rev: v2.0.1 hooks: - - id: "julia-formatter" + - id: runic From 65b72e7b33850dc8dd602279e83cd9e1196deab4 Mon Sep 17 00:00:00 2001 From: mtfishman Date: Fri, 3 Oct 2025 14:43:01 -0400 Subject: [PATCH 3/3] Bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index e06d0fd..f8db590 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "ITensorNetworksNext" uuid = "302f2e75-49f0-4526-aef7-d8ba550cb06c" authors = ["ITensor developers and contributors"] -version = "0.1.3" +version = "0.1.4" [deps] Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"