Skip to content

Commit

Permalink
Machete kills network graph
Browse files Browse the repository at this point in the history
  • Loading branch information
dexter2206 committed Mar 9, 2021
1 parent 2b45fd7 commit 4a889a8
Show file tree
Hide file tree
Showing 6 changed files with 83 additions and 136 deletions.
146 changes: 78 additions & 68 deletions src/PEPS.jl
Expand Up @@ -8,7 +8,7 @@ const DEFAULT_CONTROL_PARAMS = Dict(
"β" => 1.
)

mutable struct PEPSNetwork
struct PEPSNetwork
size::NTuple{2, Int}
map::Dict
fg::MetaDiGraph
Expand All @@ -26,54 +26,71 @@ mutable struct PEPSNetwork
β::Number,
origin::Symbol=:NW,
args_override::Dict{String, Number}=Dict{String, Number}()
)
map, i_max, j_max = peps_indices(m, n, origin)

# v => (l, u, r, d)
nbrs = Dict(
map[i, j] => (map[i, j-1], map[i-1, j], map[i, j+1], map[i+1, j])
for i 1:i_max, j 1:j_max
)

pn = new((m, n))
pn.map, pn.i_max, pn.j_max = peps_indices(m, n, origin)
args = merge(DEFAULT_CONTROL_PARAMS, args_override)
pn = new((m, n), map, fg, nbrs, origin, i_max, j_max, β, args)
end
end

nbrs = Dict()
for i 1:pn.i_max, j 1:pn.j_max
# v => (l, u, r, d)
push!(nbrs,
pn.map[i, j] => (pn.map[i, j-1], pn.map[i-1, j],
pn.map[i, j+1], pn.map[i+1, j]))
end
pn.fg = fg
pn.nbrs = nbrs
pn.β = β
pn.args = merge(DEFAULT_CONTROL_PARAMS, args_override)
pn
function _get_projector(fg::MetaDiGraph, v::Int, w::Int)
if has_edge(fg, w, v)
get_prop(fg, w, v, :pr)'
elseif has_edge(fg, v, w)
get_prop(fg, v, w, :pl)
else
loc_dim = length(get_prop(fg, v, :loc_en))
ones(loc_dim, 1)
end
end

generate_tensor(pn::PEPSNetwork,
m::NTuple{2,Int},
) = generate_tensor(pn, pn.map[m])
@memoize function generate_tensor(network::PEPSNetwork, v::Int)
# TODO: does this require full network, or can we pass only fg?
loc_exp = exp.(-network.β .* get_prop(network.fg, v, :loc_en))

generate_tensor(pn::PEPSNetwork,
m::NTuple{2, Int},
n::NTuple{2, Int},
) = generate_tensor(pn, pn.map[m], pn.map[n])
dim = zeros(Int, length(network.nbrs[v]))
@cast A[_, i] := loc_exp[i]

generate_boundary(pn::PEPSNetwork,
m::NTuple{2, Int},
n::NTuple{2, Int},
σ::Int,
) = generate_boundary(pn, pn.map[m], pn.map[n], σ)
for (j, w) enumerate(network.nbrs[v])
pv = _get_projector(network.fg, v, w)
@cast A[(c, γ), σ] |= A[c, σ] * pv[σ, γ]
dim[j] = size(pv, 2)
end
reshape(A, dim..., :)
end

@memoize function generate_tensor(network::PEPSNetwork, v::Int, w::Int)
fg = network.fg
if has_edge(fg, w, v)
en = get_prop(fg, w, v, :en)'
elseif has_edge(fg, v, w)
en = get_prop(fg, v, w, :en)
else
en = zeros(1, 1)
end
exp.(-network.β .* en)
end

function PEPSRow(::Type{T}, peps::PEPSNetwork, i::Int) where {T <: Number}
ψ = PEPSRow(T, peps.j_max)

# generate tensors from projectors
for j 1:length(ψ)
ψ[j] = generate_tensor(peps, (i, j))
ψ[j] = generate_tensor(peps, peps.map[i, j])
end

# include energy
for j 1:peps.j_max
A = ψ[j]
h = generate_tensor(peps, (i, j-1), (i, j))
v = generate_tensor(peps, (i-1, j), (i, j))
h = generate_tensor(peps, peps.map[i, j-1], peps.map[i, j])
v = generate_tensor(peps, peps.map[i-1, j], peps.map[i, j])
@tensor B[l, u, r, d, σ] := h[l, l̃] * v[u, ũ] * A[l̃, ũ, r, d, σ]
ψ[j] = B
end
Expand Down Expand Up @@ -101,6 +118,7 @@ function MPO(::Type{T},
end
W
end

MPO(peps::PEPSNetwork,
i::Int,
config::Dict{Int, Int} = Dict{Int, Int}()
Expand Down Expand Up @@ -138,68 +156,56 @@ end
ceil(k / peps.j_max), (k - 1) % peps.j_max + 1
end

@inline function _get_local_state(
peps::PEPSNetwork,
v::Vector{Int},
i::Int,
j::Int,
)
k = j + peps.j_max * (i - 1)
if k > length(v) || k <= 0 return 1 end
v[k]
function generate_boundary(fg::MetaDiGraph, v::Int, w::Int, state::Int)
if v vertices(fg) return 1 end
loc_dim = length(get_prop(fg, v, :loc_en))
pv = _get_projector(fg, v, w)
findfirst(x -> x > 0, pv[state, :])
end

function generate_boundary(
peps::PEPSNetwork,
v::Vector{Int},
i::Int,
j::Int,
)
function generate_boundary(peps::PEPSNetwork, v::Vector{Int}, i::Int, j::Int)
∂v = zeros(Int, peps.j_max + 1)

# on the left below
for k 1:j-1
∂v[k] = generate_boundary(
peps.network_graph,
(i, k),
(i+1, k),
peps.fg,
peps.map[i, k],
peps.map[i+1, k],
_get_local_state(peps, v, i, k))
end

# on the left at the current row
∂v[j] = generate_boundary(
peps.network_graph,
(i, j-1),
(i, j),
peps.fg,
peps.map[i, j-1],
peps.map[i, j],
_get_local_state(peps, v, i, j-1))

# on the right above
for k j:peps.j_max
∂v[k+1] = generate_boundary(
peps.network_graph,
(i-1, k),
(i, k),
peps.fg,
peps.map[i-1, k],
peps.map[i, k],
_get_local_state(peps, v, i-1, k))
end
∂v
end

function generate_boundary(
peps::PEPSNetwork,
v::Vector{Int},
)
i, j = _get_coordinates(peps, length(v)+1)
generate_boundary(peps, v, i, j)
function _get_local_state(peps::PEPSNetwork, v::Vector{Int}, i::Int, j::Int)
k = j + peps.j_max * (i - 1)
0 < k <= lenght(v) ? v[k] : 1
end

@inline function _contract(
A::Array{T, 5},
M::Array{T, 3},
L::Vector{T},
R::Matrix{T},
∂v::Vector{Int},
) where {T <: Number}
# function generate_boundary(peps::PEPSNetwork, v::Vector{Int})
# i, j = _get_coordinates(peps, length(v)+1)
# generate_boundary(peps, v, i, j)
# end

function _contract(
A::Array{T, 5}, M::Array{T, 3}, L::Vector{T}, R::Matrix{T}, ∂v::Vector{Int}
) where {T <: Number}
l, u = ∂v
@cast Ã[r, d, σ] := A[$l, $u, r, d, σ]
@tensor prob[σ] := L[x] * M[x, d, y] *
Expand Down Expand Up @@ -227,7 +233,11 @@ function conditional_probability(
R = right_env(ψ, W, ∂v[j+2:peps.j_max+1])
A = generate_tensor(peps, i, j)

prob = _contract(A, ψ[j], L, R, ∂v[j:j+1])
l, u = ∂v[j:j+1]
M = ψ[j]
Ã[:, :, :] = A[l, u, :, :, :]
@tensor prob[σ] := L[x] * M[x, d, y] *
Ã[r, d, σ] * R[y, r] order = (x, d, r, y)
_normalize_probability(prob)
end

Expand Down
3 changes: 0 additions & 3 deletions src/SpinGlassPEPS.jl
Expand Up @@ -23,10 +23,7 @@ module SpinGlassPEPS
include("exact.jl")
include("factor.jl")
include("PEPS.jl")
include("network.jl")
include("MPS_search.jl")
include("notation.jl")
include("peps_no_types.jl")

function __init__()
@require CUDA="052768ef-5323-5732-b1bb-66c8b64840ba" begin
Expand Down
2 changes: 1 addition & 1 deletion src/factor.jl
Expand Up @@ -32,7 +32,7 @@ function factor_graph(
energy::Function=energy,
spectrum::Function=full_spectrum,
cluster_assignment_rule::Dict{Int, Int} # e.g. square lattice
)
)
ns = Dict(i => num_states_cl for i Set(values(cluster_assignment_rule)))
factor_graph(
ig,
Expand Down
8 changes: 1 addition & 7 deletions src/ising.jl
Expand Up @@ -3,8 +3,6 @@ export energy, rank_vec
export Spectrum, cluster, rank, nodes, basis_size

const Instance = Union{String, Dict}
const SimpleEdge = LightGraphs.SimpleGraphs.SimpleEdge
const EdgeIter = Union{LightGraphs.SimpleGraphs.SimpleEdgeIter, Base.Iterators.Filter, Array}

struct Spectrum
energies::Array{<:Number}
Expand Down Expand Up @@ -85,11 +83,7 @@ function cluster(ig::MetaGraph, verts)
rank = getindex.(Ref(get_prop(ig, :rank)), vmap)
J = get_prop(ig, :J)[vmap, vmap]

set_prop!(sub_ig, :rank, rank)
set_prop!(sub_ig, :J, J)
set_prop!(sub_ig, :h, h)
set_prop!(sub_ig, :vmap, vmap)

set_props!(sub_ig, Dict(:rank => rank, :J => J, :h => h, :vmap => vmap))
sub_ig
end

Expand Down
55 changes: 0 additions & 55 deletions src/network.jl

This file was deleted.

5 changes: 3 additions & 2 deletions test/factor.jl
Expand Up @@ -166,8 +166,9 @@ end
@testset "each edge comprises expected bunch of edges from source Ising graph" begin
for e edges(fg)
outer_edges = get_prop(fg, e, :outer_edges)
println(collect(outer_edges))
println(cedges[Tuple(e)])
# println(collect(outer_edges))
# println(cedges[Tuple(e)])
# Note: this test is ok if we translate edges correctly.
# TODO: fix this by translating from nodes to graph coordinates
# @test issetequal(cedges[Tuple(e)], collect(outer_edges))
end
Expand Down

0 comments on commit 4a889a8

Please sign in to comment.