Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions .JuliaFormatter.toml

This file was deleted.

2 changes: 1 addition & 1 deletion .github/workflows/CompatHelper.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: "CompatHelper"

on:
schedule:
- cron: 0 0 * * *
- cron: '0 0 * * *'
workflow_dispatch:
permissions:
contents: write
Expand Down
13 changes: 8 additions & 5 deletions .github/workflows/FormatCheck.yml
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
name: "Format Check"

on:
push:
branches:
- 'main'
tags: '*'
pull_request:
pull_request_target:
paths: ['**/*.jl']
types: [opened, synchronize, reopened, ready_for_review]

permissions:
contents: read
actions: write
pull-requests: write

jobs:
format-check:
Expand Down
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@
.vscode/
Manifest.toml
benchmark/*.json
dev/
docs/LocalPreferences.toml
docs/Manifest.toml
docs/build/
docs/src/index.md
examples/LocalPreferences.toml
test/LocalPreferences.toml
8 changes: 4 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
ci:
skip: [julia-formatter]
skip: [runic]

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
Expand All @@ -11,7 +11,7 @@ repos:
- id: end-of-file-fixer
exclude_types: [markdown] # incompatible with Literate.jl

- repo: "https://github.com/domluna/JuliaFormatter.jl"
rev: v2.1.6
- repo: https://github.com/fredrikekre/runic-pre-commit
rev: v2.0.1
hooks:
- id: "julia-formatter"
- id: runic
24 changes: 12 additions & 12 deletions docs/make.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,24 +2,24 @@ using ITensorNetworks: ITensorNetworks
using Documenter: Documenter, DocMeta, deploydocs, makedocs

DocMeta.setdocmeta!(
ITensorNetworks, :DocTestSetup, :(using ITensorNetworks); recursive=true
ITensorNetworks, :DocTestSetup, :(using ITensorNetworks); recursive = true
)

include("make_index.jl")

makedocs(;
modules=[ITensorNetworks],
authors="ITensor developers <support@itensor.org> and contributors",
sitename="ITensorNetworks.jl",
format=Documenter.HTML(;
canonical="https://itensor.github.io/ITensorNetworks.jl",
edit_link="main",
assets=["assets/favicon.ico", "assets/extras.css"],
),
pages=["Home" => "index.md", "Reference" => "reference.md"],
warnonly=true,
modules = [ITensorNetworks],
authors = "ITensor developers <support@itensor.org> and contributors",
sitename = "ITensorNetworks.jl",
format = Documenter.HTML(;
canonical = "https://itensor.github.io/ITensorNetworks.jl",
edit_link = "main",
assets = ["assets/favicon.ico", "assets/extras.css"],
),
pages = ["Home" => "index.md", "Reference" => "reference.md"],
warnonly = true,
)

deploydocs(;
repo="github.com/ITensor/ITensorNetworks.jl", devbranch="main", push_preview=true
repo = "github.com/ITensor/ITensorNetworks.jl", devbranch = "main", push_preview = true
)
16 changes: 8 additions & 8 deletions docs/make_index.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,20 @@ using Literate: Literate
using ITensorNetworks: ITensorNetworks

function ccq_logo(content)
include_ccq_logo = """
include_ccq_logo = """
```@raw html
<img class="display-light-only" src="assets/CCQ.png" width="20%" alt="Flatiron Center for Computational Quantum Physics logo."/>
<img class="display-dark-only" src="assets/CCQ-dark.png" width="20%" alt="Flatiron Center for Computational Quantum Physics logo."/>
```
"""
content = replace(content, "{CCQ_LOGO}" => include_ccq_logo)
return content
content = replace(content, "{CCQ_LOGO}" => include_ccq_logo)
return content
end

Literate.markdown(
joinpath(pkgdir(ITensorNetworks), "examples", "README.jl"),
joinpath(pkgdir(ITensorNetworks), "docs", "src");
flavor=Literate.DocumenterFlavor(),
name="index",
postprocess=ccq_logo,
joinpath(pkgdir(ITensorNetworks), "examples", "README.jl"),
joinpath(pkgdir(ITensorNetworks), "docs", "src");
flavor = Literate.DocumenterFlavor(),
name = "index",
postprocess = ccq_logo,
)
16 changes: 8 additions & 8 deletions docs/make_readme.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,20 @@ using Literate: Literate
using ITensorNetworks: ITensorNetworks

function ccq_logo(content)
include_ccq_logo = """
include_ccq_logo = """
<picture>
<source media="(prefers-color-scheme: dark)" width="20%" srcset="docs/src/assets/CCQ-dark.png">
<img alt="Flatiron Center for Computational Quantum Physics logo." width="20%" src="docs/src/assets/CCQ.png">
</picture>
"""
content = replace(content, "{CCQ_LOGO}" => include_ccq_logo)
return content
content = replace(content, "{CCQ_LOGO}" => include_ccq_logo)
return content
end

Literate.markdown(
joinpath(pkgdir(ITensorNetworks), "examples", "README.jl"),
joinpath(pkgdir(ITensorNetworks));
flavor=Literate.CommonMarkFlavor(),
name="README",
postprocess=ccq_logo,
joinpath(pkgdir(ITensorNetworks), "examples", "README.jl"),
joinpath(pkgdir(ITensorNetworks));
flavor = Literate.CommonMarkFlavor(),
name = "README",
postprocess = ccq_logo,
)
2 changes: 1 addition & 1 deletion examples/README.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# > or for us to clearly announce parts of the code we are changing.

# # ITensorNetworks.jl
#
#
# [![Stable](https://img.shields.io/badge/docs-stable-blue.svg)](https://itensor.github.io/ITensorNetworks.jl/stable/)
# [![Dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://itensor.github.io/ITensorNetworks.jl/dev/)
# [![Build Status](https://github.com/ITensor/ITensorNetworks.jl/actions/workflows/Tests.yml/badge.svg?branch=main)](https://github.com/ITensor/ITensorNetworks.jl/actions/workflows/Tests.yml?query=branch%3Amain)
Expand Down
66 changes: 33 additions & 33 deletions ext/ITensorNetworksEinExprsExt/ITensorNetworksEinExprsExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,57 +2,57 @@ module ITensorNetworksEinExprsExt

using ITensors: Index, ITensor, @Algorithm_str, inds, noncommoninds
using ITensorNetworks:
ITensorNetworks,
ITensorList,
ITensorNetwork,
vertextype,
vertex_data,
contraction_sequence
ITensorNetworks,
ITensorList,
ITensorNetwork,
vertextype,
vertex_data,
contraction_sequence
using EinExprs: EinExprs, EinExpr, einexpr, SizedEinExpr

function to_einexpr(ts::ITensorList)
IndexType = Any
IndexType = Any

tensor_exprs = EinExpr{IndexType}[]
inds_dims = Dict{IndexType,Int}()
tensor_exprs = EinExpr{IndexType}[]
inds_dims = Dict{IndexType, Int}()

for tensor_v in ts
inds_v = collect(inds(tensor_v))
push!(tensor_exprs, EinExpr{IndexType}(; head=inds_v))
merge!(inds_dims, Dict(inds_v .=> size(tensor_v)))
end
for tensor_v in ts
inds_v = collect(inds(tensor_v))
push!(tensor_exprs, EinExpr{IndexType}(; head = inds_v))
merge!(inds_dims, Dict(inds_v .=> size(tensor_v)))
end

externalinds_tn = reduce(noncommoninds, ts)
return SizedEinExpr(sum(tensor_exprs; skip=externalinds_tn), inds_dims)
externalinds_tn = reduce(noncommoninds, ts)
return SizedEinExpr(sum(tensor_exprs; skip = externalinds_tn), inds_dims)
end

function tensor_inds_to_vertex(ts::ITensorList)
IndexType = Any
VertexType = Int
IndexType = Any
VertexType = Int

mapping = Dict{Set{IndexType},VertexType}()
mapping = Dict{Set{IndexType}, VertexType}()

for (v, tensor_v) in enumerate(ts)
inds_v = collect(inds(tensor_v))
mapping[Set(inds_v)] = v
end
for (v, tensor_v) in enumerate(ts)
inds_v = collect(inds(tensor_v))
mapping[Set(inds_v)] = v
end

return mapping
return mapping
end

function ITensorNetworks.contraction_sequence(
::Algorithm"einexpr", tn::ITensorList; optimizer=EinExprs.Exhaustive()
)
expr = to_einexpr(tn)
path = einexpr(optimizer, expr)
return to_contraction_sequence(path, tensor_inds_to_vertex(tn))
::Algorithm"einexpr", tn::ITensorList; optimizer = EinExprs.Exhaustive()
)
expr = to_einexpr(tn)
path = einexpr(optimizer, expr)
return to_contraction_sequence(path, tensor_inds_to_vertex(tn))
end

function to_contraction_sequence(expr, tensor_inds_to_vertex)
EinExprs.nargs(expr) == 0 && return tensor_inds_to_vertex[Set(expr.head)]
return map(
expr -> to_contraction_sequence(expr, tensor_inds_to_vertex), EinExprs.args(expr)
)
EinExprs.nargs(expr) == 0 && return tensor_inds_to_vertex[Set(expr.head)]
return map(
expr -> to_contraction_sequence(expr, tensor_inds_to_vertex), EinExprs.args(expr)
)
end

end
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,15 @@ using ITensorNetworks: ITensorNetworks
using NDTensors.AlgorithmSelection: @Algorithm_str

function ITensorNetworks.mincut(
::Algorithm"GraphsFlows",
graph::AbstractGraph,
source_vertex,
target_vertex;
capacity_matrix,
alg=GraphsFlows.PushRelabelAlgorithm(),
)
# TODO: Replace with `Backend(backend)`.
return GraphsFlows.mincut(graph, source_vertex, target_vertex, capacity_matrix, alg)
::Algorithm"GraphsFlows",
graph::AbstractGraph,
source_vertex,
target_vertex;
capacity_matrix,
alg = GraphsFlows.PushRelabelAlgorithm(),
)
# TODO: Replace with `Backend(backend)`.
return GraphsFlows.mincut(graph, source_vertex, target_vertex, capacity_matrix, alg)
end

end
Loading
Loading