-
Notifications
You must be signed in to change notification settings - Fork 8
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
150 changed files
with
19,414 additions
and
266 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -25,3 +25,5 @@ Manifest.toml | |
|
||
# Vi/Vim backup files | ||
.*.swp | ||
|
||
.DS_Store |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
using ITensors, ITensorMPS | ||
using Zygote | ||
|
||
s = siteind("Qubit") | ||
|
||
f(x) = op("Ry", s; θ=x)[1, 1] | ||
|
||
x = 0.2 | ||
@show f(x), cos(x / 2) | ||
@show f'(x), -sin(x / 2) / 2 | ||
|
||
# Simple gate optimization | ||
ψ0 = state(s, "0") | ||
ψp = state(s, "+") | ||
|
||
function loss(x) | ||
U = op("Ry", s; θ=x) | ||
Uψ0 = replaceprime(U * ψ0, 1 => 0) | ||
return -(dag(ψp) * Uψ0)[] | ||
end | ||
|
||
# Extremely simple gradient descent implementation, | ||
# where gradients are computing with automatic differentiation | ||
# using Zygote. | ||
function gradient_descent(f, x0; γ, nsteps, grad_tol) | ||
@show γ, nsteps | ||
x = x0 | ||
f_x = f(x) | ||
∇f_x = f'(x) | ||
step = 0 | ||
@show step, x, f_x, ∇f_x | ||
for step in 1:nsteps | ||
x -= γ * ∇f_x | ||
f_x = f(x) | ||
∇f_x = f'(x) | ||
@show step, x, f_x, ∇f_x | ||
if norm(∇f_x) ≤ grad_tol | ||
break | ||
end | ||
end | ||
return x, f_x, ∇f_x | ||
end | ||
|
||
x0 = 0 | ||
γ = 2.0 # Learning rate | ||
nsteps = 30 # Number of steps of gradient descent | ||
grad_tol = 1e-4 # Stop if gradient falls below this value | ||
x, loss_x, ∇loss_x = gradient_descent(loss, x0; γ=γ, nsteps=nsteps, grad_tol=grad_tol) | ||
|
||
@show x0, loss(x0) | ||
@show x, loss(x) | ||
@show π / 2, loss(π / 2) |
65 changes: 65 additions & 0 deletions
65
examples/autodiff/circuit_optimization/state_preparation.jl
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,65 @@ | ||
using ITensors, ITensorMPS | ||
using OptimKit | ||
using Random | ||
using Zygote | ||
|
||
nsites = 20 # Number of sites | ||
nlayers = 3 # Layers of gates in the ansatz | ||
gradtol = 1e-4 # Tolerance for stopping gradient descent | ||
|
||
# A layer of the circuit we want to optimize | ||
function layer(nsites, θ⃗) | ||
RY_layer = [("Ry", (n,), (θ=θ⃗[n],)) for n in 1:nsites] | ||
CX_layer = [("CX", (n, n + 1)) for n in 1:2:(nsites - 1)] | ||
return [RY_layer; CX_layer] | ||
end | ||
|
||
# The variational circuit we want to optimize | ||
function variational_circuit(nsites, nlayers, θ⃗) | ||
range = 1:nsites | ||
circuit = layer(nsites, θ⃗[range]) | ||
for n in 1:(nlayers - 1) | ||
circuit = [circuit; layer(nsites, θ⃗[range .+ n * nsites])] | ||
end | ||
return circuit | ||
end | ||
|
||
Random.seed!(1234) | ||
|
||
θ⃗ᵗᵃʳᵍᵉᵗ = 2π * rand(nsites * nlayers) | ||
𝒰ᵗᵃʳᵍᵉᵗ = variational_circuit(nsites, nlayers, θ⃗ᵗᵃʳᵍᵉᵗ) | ||
|
||
s = siteinds("Qubit", nsites) | ||
Uᵗᵃʳᵍᵉᵗ = ops(𝒰ᵗᵃʳᵍᵉᵗ, s) | ||
|
||
ψ0 = MPS(s, "0") | ||
|
||
# Create the random target state | ||
ψᵗᵃʳᵍᵉᵗ = apply(Uᵗᵃʳᵍᵉᵗ, ψ0; cutoff=1e-8) | ||
|
||
# | ||
# The loss function, a function of the gate parameters | ||
# and implicitly depending on the target state: | ||
# | ||
# loss(θ⃗) = -|⟨θ⃗ᵗᵃʳᵍᵉᵗ|U(θ⃗)|0⟩|² = -|⟨θ⃗ᵗᵃʳᵍᵉᵗ|θ⃗⟩|² | ||
# | ||
function loss(θ⃗) | ||
nsites = length(ψ0) | ||
s = siteinds(ψ0) | ||
𝒰θ⃗ = variational_circuit(nsites, nlayers, θ⃗) | ||
Uθ⃗ = ops(𝒰θ⃗, s) | ||
ψθ⃗ = apply(Uθ⃗, ψ0) | ||
return -abs(inner(ψᵗᵃʳᵍᵉᵗ, ψθ⃗))^2 | ||
end | ||
|
||
θ⃗₀ = randn!(copy(θ⃗ᵗᵃʳᵍᵉᵗ)) | ||
|
||
@show loss(θ⃗₀), loss(θ⃗ᵗᵃʳᵍᵉᵗ) | ||
|
||
loss_∇loss(x) = (loss(x), convert(Vector, loss'(x))) | ||
algorithm = LBFGS(; gradtol=gradtol, verbosity=2) | ||
θ⃗ₒₚₜ, lossₒₚₜ, ∇lossₒₚₜ, numfg, normgradhistory = optimize(loss_∇loss, θ⃗₀, algorithm) | ||
|
||
@show loss(θ⃗ₒₚₜ), loss(θ⃗ᵗᵃʳᵍᵉᵗ) | ||
|
||
nothing |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,81 @@ | ||
using ITensors, ITensorMPS | ||
using OptimKit | ||
using Random | ||
using Zygote | ||
|
||
nsites = 4 # Number of sites | ||
nlayers = 2 # Layers of gates in the ansatz | ||
gradtol = 1e-4 # Tolerance for stopping gradient descent | ||
|
||
# The Hamiltonian we are minimizing | ||
function ising_hamiltonian(nsites; h) | ||
ℋ = OpSum() | ||
for j in 1:(nsites - 1) | ||
ℋ -= 1, "Z", j, "Z", j + 1 | ||
end | ||
for j in 1:nsites | ||
ℋ += h, "X", j | ||
end | ||
return ℋ | ||
end | ||
|
||
# A layer of the circuit we want to optimize | ||
function layer(nsites, θ⃗) | ||
RY_layer = [("Ry", (n,), (θ=θ⃗[n],)) for n in 1:nsites] | ||
CX_layer = [("CX", (n, n + 1)) for n in 1:2:(nsites - 1)] | ||
return [RY_layer; CX_layer] | ||
end | ||
|
||
# The variational circuit we want to optimize | ||
function variational_circuit(nsites, nlayers, θ⃗) | ||
range = 1:nsites | ||
circuit = layer(nsites, θ⃗[range]) | ||
for n in 1:(nlayers - 1) | ||
circuit = [circuit; layer(nsites, θ⃗[range .+ n * nsites])] | ||
end | ||
return circuit | ||
end | ||
|
||
s = siteinds("Qubit", nsites) | ||
|
||
h = 1.3 | ||
ℋ = ising_hamiltonian(nsites; h=h) | ||
H = MPO(ℋ, s) | ||
ψ0 = MPS(s, "0") | ||
|
||
# | ||
# The loss function, a function of the gate parameters | ||
# and implicitly depending on the Hamiltonian and state: | ||
# | ||
# loss(θ⃗) = ⟨0|U(θ⃗)† H U(θ⃗)|0⟩ = ⟨θ⃗|H|θ⃗⟩ | ||
# | ||
function loss(θ⃗) | ||
nsites = length(ψ0) | ||
s = siteinds(ψ0) | ||
𝒰θ⃗ = variational_circuit(nsites, nlayers, θ⃗) | ||
Uθ⃗ = ops(𝒰θ⃗, s) | ||
ψθ⃗ = apply(Uθ⃗, ψ0; cutoff=1e-8) | ||
return inner(ψθ⃗, H, ψθ⃗; cutoff=1e-8) | ||
end | ||
|
||
Random.seed!(1234) | ||
θ⃗₀ = 2π * rand(nsites * nlayers) | ||
|
||
@show loss(θ⃗₀) | ||
|
||
println("\nOptimize circuit with gradient optimization") | ||
|
||
loss_∇loss(x) = (loss(x), convert(Vector, loss'(x))) | ||
algorithm = LBFGS(; gradtol=1e-3, verbosity=2) | ||
θ⃗ₒₚₜ, lossₒₚₜ, ∇lossₒₚₜ, numfg, normgradhistory = optimize(loss_∇loss, θ⃗₀, algorithm) | ||
|
||
@show loss(θ⃗ₒₚₜ) | ||
|
||
println("\nRun DMRG as a comparison") | ||
|
||
e_dmrg, ψ_dmrg = dmrg(H, ψ0; nsweeps=5, maxdim=10) | ||
|
||
println("\nCompare variational circuit energy to DMRG energy") | ||
@show loss(θ⃗ₒₚₜ), e_dmrg | ||
|
||
nothing |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
using ITensors, ITensorMPS | ||
using OptimKit | ||
using Zygote | ||
|
||
function ising(n; J, h) | ||
os = OpSum() | ||
for j in 1:(n - 1) | ||
os -= J, "Z", j, "Z", j + 1 | ||
end | ||
for j in 1:n | ||
os -= h, "X", j | ||
end | ||
return os | ||
end | ||
|
||
function loss(H, ψ) | ||
n = length(ψ) | ||
ψHψ = ITensor(1.0) | ||
ψψ = ITensor(1.0) | ||
for j in 1:n | ||
ψHψ = ψHψ * dag(ψ[j]') * H[j] * ψ[j] | ||
ψψ = ψψ * replaceinds(dag(ψ[j]'), s[j]' => s[j]) * ψ[j] | ||
end | ||
return ψHψ[] / ψψ[] | ||
end | ||
|
||
n = 10 | ||
s = siteinds("S=1/2", n) | ||
J = 1.0 | ||
h = 0.5 | ||
|
||
# Loss function only works with `Vector{ITensor}`, | ||
# extract with `ITensors.data`. | ||
ψ0 = ITensors.data(random_mps(s; linkdims=10)) | ||
H = ITensors.data(MPO(ising(n; J, h), s)) | ||
|
||
loss(ψ) = loss(H, ψ) | ||
|
||
optimizer = LBFGS(; maxiter=25, verbosity=2) | ||
function loss_and_grad(x) | ||
y, (∇,) = withgradient(loss, x) | ||
return y, ∇ | ||
end | ||
ψ, fs, gs, niter, normgradhistory = optimize(loss_and_grad, ψ0, optimizer) | ||
Edmrg, ψdmrg = dmrg(MPO(H), MPS(ψ0); nsweeps=10, cutoff=1e-8) | ||
|
||
@show loss(ψ0), norm(loss'(ψ0)) | ||
@show loss(ψ), norm(loss'(ψ)) | ||
@show loss(ITensors.data(ψdmrg)), norm(loss'(ITensors.data(ψdmrg))) |
Oops, something went wrong.