Skip to content

Commit

Permalink
Add initial h_gain_schedule implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
zmorrell committed Oct 12, 2022
1 parent 4574c4a commit b91e40a
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 16 deletions.
9 changes: 8 additions & 1 deletion src/base.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,20 @@ struct AnnealingSchedule
A::Function
B::Function
init_default::Function
h_gain_schedule::Function
end

"""
A short hand AnnealingSchedule constructor that uses the initial_state_default,
which is the most common case for the conventions of this implementation.
"""
AnnealingSchedule(A,B) = AnnealingSchedule(A, B, initial_state_default)
AnnealingSchedule(A,B) = AnnealingSchedule(A, B, initial_state_default, one)

"""
A short hand AnnealingSchedule constructor that uses the Base.one function for the
annealing schedule, but allows for a different inital state.
"""
AnnealingSchedule(A,B,init_default) = AnnealingSchedule(A, B, init_default, one)

#predefining Pauli Matrices
const _IMAT = SparseArrays.sparse([1,2], [1,2], [1.0+0im;1.0+0im])
Expand Down
25 changes: 21 additions & 4 deletions src/dwave.jl
Original file line number Diff line number Diff line change
Expand Up @@ -187,16 +187,18 @@ end

"""
Function to modify an existing annealing schedule to use a customized
annealing schedule (asch). These parameters are the same as those
used in a dwisc call or a dwave schedule.
annealing schedule (asch) or h gain schedule (hgs). These parameters
are the same as those used in a dwisc call or a dwave schedule.
Inputs:
annealing_schedule - annealing_schedule
Parameters:
asch - This is the annealing-schedule parameter. This is a list of tuples of the form
[(s₀,s_effective₀), (s₀,s_effective₁), ..., (sₙ,s_effectiveₙ)].
hgs - This is the h_gain_schedule parameter. This is a list of tuples of the form
[(s₀,hgs(s₀)), (s₀,hgs(s₁)), ..., (sₙ,hgs(sₙ))].
"""
function annealing_protocol_dwave(annealing_schedule::AnnealingSchedule; asch=[(0,0) (1,1)])
function annealing_protocol_dwave(annealing_schedule::AnnealingSchedule; asch=[(0,0) (1,1)], hgs = [(0,1), (1,1)])
asch_slopes = zeros(length(asch)-1)
for i in 1:(length(asch)-1)
s0,s_eff_0 = asch[i]
Expand All @@ -209,9 +211,24 @@ function annealing_protocol_dwave(annealing_schedule::AnnealingSchedule; asch=[(
return sum([(asch_slopes[i]*(s-asch[i][1]) + asch[i][2]) * (asch[i][1] <= s < asch[i+1][1]) for i = 1:(length(asch)-1)]) + ((s == asch[end][1])*asch[end][2])
end

hgs_slopes = zeros(length(asch)-1)
for i in 1:(length(hgs)-1)
s0,s_eff_0 = hgs[i]
s1,s_eff_1 = hgs[i+1]
hgs_slopes[i] = (s_eff_1 - s_eff_0)/(s1 - s0)
end

#branchless piecewise function using linear interpolation from y = m*(x-x0) + y0
function hgs_func(s)
return sum([(hgs_slopes[i]*(s-hgs[i][1]) + hgs[i][2]) * (hgs[i][1] <= s < hgs[i+1][1]) for i = 1:(length(hgs)-1)]) + ((s == hgs[end][1])*hgs[end][2])
end


new_annealing_schedule = AnnealingSchedule(
s -> annealing_schedule.A(asch_func(s)),
s -> annealing_schedule.B(asch_func(s))
s -> annealing_schedule.B(asch_func(s)),
annealing_schedule.init_default,
hgs_func
)
return new_annealing_schedule
end
Expand Down
39 changes: 28 additions & 11 deletions src/simulate.jl
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,11 @@ function hamiltonian_transverse_ising(ising_model::Dict, annealing_schedule::Ann
x_component = _sum_X(n)
z_component = SparseArrays.spzeros(2^n, 2^n)
for (tup,w) in ising_model
z_component += _kron_Z(n, tup, w)
if length(tup) == 1
z_component += annealing_schedule.h_gain_schedule(s) * _kron_Z(n, tup, w)
else
z_component += _kron_Z(n, tup, w)
end
end

return annealing_schedule.A(s) * x_component + annealing_schedule.B(s) * z_component
Expand Down Expand Up @@ -134,12 +138,6 @@ function simulate_magnus_optimized(ising_model::Dict, annealing_time::Real, anne
R0 = initial_state * initial_state'

x_component = _sum_X(n)
z_component = SparseArrays.spzeros(2^n, 2^n)
for (tup,w) in ising_model
z_component = z_component + _kron_Z(n, tup, w)
end

H_parts = _H_parts(x_component, z_component, order)

s_steps = range(0, 1, length=steps)
R_current = R0
Expand All @@ -153,6 +151,19 @@ function simulate_magnus_optimized(ising_model::Dict, annealing_time::Real, anne
s0 = s_steps[i]
s1 = s_steps[i+1]

#modified to allow for h_gain_schedule. using mean h_gain schedule of step
#as an approximation to avoid having to reformulate magnus expansion.
z_component = SparseArrays.spzeros(2^n, 2^n)
mean_hgs = (annealing_schedule.h_gain_schedule(s0) + annealing_schedule.h_gain_schedule(s1))/2
for (tup,w) in ising_model
if length(tup) == 1
z_component = z_component + mean_hgs * _kron_Z(n, tup, w)
else
z_component = z_component + _kron_Z(n, tup, w)
end
end

H_parts = _H_parts(x_component, z_component, order)
Ω_list = _Ω_list_optimized(annealing_time, s0, s1, annealing_schedule, H_parts, order)

#for (i,Ωi) in enumerate(Ω_list)
Expand Down Expand Up @@ -431,10 +442,6 @@ function simulate_magnus_generic(ising_model::Dict, annealing_time::Real, anneal
R0 = initial_state * initial_state'

x_component = _sum_X(n)
z_component = SparseArrays.spzeros(2^n, 2^n)
for (tup,w) in ising_model
z_component = z_component + _kron_Z(n, tup, w)
end

s_steps = range(0, 1, length=steps)
R_current = R0
Expand All @@ -449,6 +456,16 @@ function simulate_magnus_generic(ising_model::Dict, annealing_time::Real, anneal
s0 = s_steps[i]
s1 = s_steps[i+1]

mean_hgs = (annealing_schedule.h_gain_schedule(s0) + annealing_schedule.h_gain_schedule(s1))/2
z_component = SparseArrays.spzeros(2^n, 2^n)
for (tup,w) in ising_model
if length(tup) == 1
z_component = z_component + mean_hgs * _kron_Z(n, tup, w)
else
z_component = z_component + _kron_Z(n, tup, w)
end
end

aqc = _get_quadratic_coefficients(annealing_schedule.A, s0, s1)
bqc = _get_quadratic_coefficients(annealing_schedule.B, s0, s1)

Expand Down

0 comments on commit b91e40a

Please sign in to comment.