From 07345545e71dab88c4eeb937c0abeb1984937f91 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Mon, 28 Oct 2019 15:03:51 +0800 Subject: [PATCH 1/2] new, port zygote --- examples/PortZygote/gate_learning.jl | 31 ++++++++++++++++++++ examples/PortZygote/simple_example.jl | 17 +++++++++++ examples/PortZygote/zygote_patch.jl | 42 +++++++++++++++++++++++++++ 3 files changed, 90 insertions(+) create mode 100644 examples/PortZygote/gate_learning.jl create mode 100644 examples/PortZygote/simple_example.jl create mode 100644 examples/PortZygote/zygote_patch.jl diff --git a/examples/PortZygote/gate_learning.jl b/examples/PortZygote/gate_learning.jl new file mode 100644 index 0000000..5a48d2d --- /dev/null +++ b/examples/PortZygote/gate_learning.jl @@ -0,0 +1,31 @@ +using YaoExtensions, Yao +using Test, Random +using QuAlgorithmZoo: Adam, update! + +include("zygote_patch.jl") + +function loss(u, ansatz) + m = Matrix(ansatz) + sum(abs.(u .- m)) +end + +function learn_su4(u::AbstractMatrix; optimizer=Adam(lr=0.1), niter=100) + ansatz = general_U4() * put(2, 1=>phase(0.0)) # initial values are 0, here, we attach a global phase. + params = parameters(ansatz) + for i=1:1000 + println("Step = $i, loss = $(loss(u,ansatz))") + grad = gradient(ansatz->loss(u, ansatz), ansatz)[1] + update!(params, grad, optimizer) + dispatch!(ansatz, params) + end + return ansatz +end + +using Random +Random.seed!(2) +u = rand_unitary(4) +using LinearAlgebra +#u[:,1] .*= -conj(det(u)) +#@show det(u) +c = learn_su4(u; optimizer=Adam(lr=0.005)) +det(mat(c)) diff --git a/examples/PortZygote/simple_example.jl b/examples/PortZygote/simple_example.jl new file mode 100644 index 0000000..ae10024 --- /dev/null +++ b/examples/PortZygote/simple_example.jl @@ -0,0 +1,17 @@ +include("zygote_patch.jl") + +import YaoExtensions, Random + +c = YaoExtensions.variational_circuit(5) +dispatch!(c, :random) + +function loss(reg::AbstractRegister, circuit::AbstractBlock{N}) where N + #copy(reg) |> circuit + reg = apply!(copy(reg), circuit) + st = state(reg) + sum(real(st.*st)) +end + +reg0 = zero_state(5) +paramsδ = gradient(c->loss(reg0, c), c)[1] +regδ = gradient(reg->loss(reg, c), reg0)[1] diff --git a/examples/PortZygote/zygote_patch.jl b/examples/PortZygote/zygote_patch.jl new file mode 100644 index 0000000..f577146 --- /dev/null +++ b/examples/PortZygote/zygote_patch.jl @@ -0,0 +1,42 @@ +using Zygote +using Zygote: @adjoint +using Yao, Yao.AD + +@adjoint function apply!(reg::ArrayReg, block::AbstractBlock) + out = apply!(reg, block) + out, function (outδ) + (in, inδ), paramsδ = apply_back((out, outδ), block) + return (inδ, paramsδ) + end +end + +@adjoint function Matrix(block::AbstractBlock) + out = Matrix(block) + out, function (outδ) + paramsδ = mat_back(block, outδ) + return (paramsδ,) + end +end + +@adjoint function ArrayReg{B}(raw::AbstractArray) where B + ArrayReg{B}(raw), adjy->(reshape(adjy.state, size(raw)),) +end + +@adjoint function ArrayReg{B}(raw::ArrayReg) where B + ArrayReg{B}(raw), adjy->(adjy,) +end + +@adjoint function ArrayReg(raw::AbstractArray) + ArrayReg(raw), adjy->(reshape(adjy.state, size(raw)),) +end + +@adjoint function copy(reg::ArrayReg) where B + copy(reg), adjy->(adjy,) +end + +@adjoint state(reg::ArrayReg) = state(reg), adjy->(ArrayReg(adjy),) +@adjoint statevec(reg::ArrayReg) = statevec(reg), adjy->(ArrayReg(adjy),) +@adjoint state(reg::AdjointArrayReg) = state(reg), adjy->(ArrayReg(adjy')',) +@adjoint statevec(reg::AdjointArrayReg) = statevec(reg), adjy->(ArrayReg(adjy')',) +@adjoint parent(reg::AdjointArrayReg) = parent(reg), adjy->(adjy',) +@adjoint Base.adjoint(reg::ArrayReg) = Base.adjoint(reg), adjy->(parent(adjy),) From 07ff8be02f4b3168608dfe4fa2f3b58820cc3092 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Mon, 28 Oct 2019 15:43:55 +0800 Subject: [PATCH 2/2] switch to LBFGS --- examples/PortZygote/gate_learning.jl | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/examples/PortZygote/gate_learning.jl b/examples/PortZygote/gate_learning.jl index 5a48d2d..ec4e9d5 100644 --- a/examples/PortZygote/gate_learning.jl +++ b/examples/PortZygote/gate_learning.jl @@ -1,7 +1,8 @@ using YaoExtensions, Yao using Test, Random -using QuAlgorithmZoo: Adam, update! +using Optim: LBFGS, optimize +# port the `Matrix` function to Yao's AD. include("zygote_patch.jl") function loss(u, ansatz) @@ -9,23 +10,22 @@ function loss(u, ansatz) sum(abs.(u .- m)) end -function learn_su4(u::AbstractMatrix; optimizer=Adam(lr=0.1), niter=100) +""" + learn_u4(u::AbstractMatrix; niter=100) + +Learn a general U4 gate. The optimizer is LBFGS. +""" +function learn_u4(u::AbstractMatrix; niter=100) ansatz = general_U4() * put(2, 1=>phase(0.0)) # initial values are 0, here, we attach a global phase. params = parameters(ansatz) - for i=1:1000 - println("Step = $i, loss = $(loss(u,ansatz))") - grad = gradient(ansatz->loss(u, ansatz), ansatz)[1] - update!(params, grad, optimizer) - dispatch!(ansatz, params) - end + g!(G, x) = (dispatch!(ansatz, x); G .= gradient(ansatz->loss(u, ansatz), ansatz)[1]) + optimize(x->(dispatch!(ansatz, x); loss(u, ansatz)), g!, parameters(ansatz), + LBFGS(), Optim.Options(iterations=niter)) + println("final loss = $(loss(u,ansatz))") return ansatz end using Random Random.seed!(2) u = rand_unitary(4) -using LinearAlgebra -#u[:,1] .*= -conj(det(u)) -#@show det(u) -c = learn_su4(u; optimizer=Adam(lr=0.005)) -det(mat(c)) +c = learn_u4(u)