Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
62 commits
Select commit Hold shift + click to select a range
7d3605f
Backend timing helper mixin for experiments
wshanks Jul 27, 2022
f527788
Docstrings
wshanks Jul 27, 2022
c34b855
Update qiskit_experiments/framework/backend_timing_mixin.py
wshanks Jul 27, 2022
da530e8
Remove unnecessary check
wshanks Jul 29, 2022
9d736ec
Always use lcm for delay duration calculation
wshanks Aug 17, 2022
247f93e
Add some safety checks on pulse duration
wshanks Aug 17, 2022
e36625a
Merge remote-tracking branch 'upstream/main' into backend-timing
wshanks Aug 17, 2022
56898b5
Switch from Backend to BackendData
wshanks Aug 19, 2022
617a92a
Cache backend_data
wshanks Aug 19, 2022
6781df2
Update docstring
wshanks Aug 19, 2022
134e707
Refactor lcm
wshanks Aug 19, 2022
cf93cef
Change method names for clarity
wshanks Aug 21, 2022
7111e7d
wip
wshanks Aug 31, 2022
68d1d2d
BackendTiming examples
wshanks Sep 15, 2022
0191ee5
Rename backend_timing_mixin.py
wshanks Sep 15, 2022
9bb8dd8
Put BackendTiming in framework namespace and docs
wshanks Sep 15, 2022
a226cd5
Merge remote-tracking branch 'upstream/main' into backend-timing
wshanks Sep 15, 2022
d483881
Work with backend instead of experiment in BackendTiming
wshanks Sep 15, 2022
5909e43
BackendTiming doc strings
wshanks Sep 15, 2022
43cfaba
Update qiskit_experiments/framework/backend_timing.py
wshanks Sep 16, 2022
7c7a671
Update qiskit_experiments/framework/backend_timing.py
wshanks Sep 16, 2022
3c11b2c
Add round_pulse_samples method
wshanks Sep 16, 2022
847397a
Expose dt
wshanks Sep 16, 2022
ed7cb85
Reword docs about samples and seconds as input
wshanks Sep 16, 2022
d8a33d5
Add backend_timing tests
wshanks Sep 19, 2022
64ed0e9
Clean up documentation
wshanks Sep 19, 2022
28db76d
ZZRamsey experiment
wshanks Sep 19, 2022
d793ab0
removed timing class then imported module BackendTiming before replac…
thaddeus-pellegrini Sep 19, 2022
68b7fb2
Remove leftover code and imports
wshanks Sep 19, 2022
e1f9cfd
Handle backends without dt
wshanks Sep 19, 2022
910af47
Add ZZRamsey to stanard import / docs paths
wshanks Sep 19, 2022
414bcb8
Simple ZZ tests
wshanks Sep 19, 2022
12fea61
Merge remote-tracking branch 'upstream/main' into zz_phase
wshanks Sep 20, 2022
d531cc1
Refactor delay and samples methods
wshanks Sep 20, 2022
81a5f06
black
wshanks Sep 20, 2022
abeef11
Merge branch 'main' into backend-timing
wshanks Sep 20, 2022
51f3d0f
Refactor ZZ tests
wshanks Sep 20, 2022
f7ff5f2
Merge remote-tracking branch 'origin/backend-timing' into zz_phase
wshanks Sep 20, 2022
877c00f
Improve tests
wshanks Sep 20, 2022
b6a2523
lint
wshanks Sep 21, 2022
1143824
Fix typo in docstring
wshanks Sep 21, 2022
b5c9438
ZZ documentation updates
wshanks Sep 21, 2022
0696bdf
Update wording about fit parameters
wshanks Oct 11, 2022
d996be8
Add release note
wshanks Oct 11, 2022
76d0bf5
Merge remote-tracking branch 'upstream/main' into zz_phase
wshanks Oct 11, 2022
6ebce2d
Update for changes to plotter code
wshanks Oct 11, 2022
5e29f92
Align ZZ model and documentation
wshanks Oct 12, 2022
da8aeb5
Use helper function to avoid edge cases
wshanks Oct 12, 2022
5d5835e
Change property to method
wshanks Oct 12, 2022
c787899
Reduce circuit metadata
wshanks Oct 12, 2022
a223230
Rework parametrized circuit method
wshanks Oct 13, 2022
896e548
Remove the word "fake"
wshanks Oct 13, 2022
e338717
Rename zz_rotations to num_rotations
wshanks Oct 13, 2022
c10c528
Merge remote-tracking branch 'upstream/main' into zz_phase
wshanks Oct 13, 2022
18cc892
Reword virtual frequency descriptions
wshanks Oct 13, 2022
1de188b
black
wshanks Oct 13, 2022
ab773ee
Merge remote-tracking branch 'upstream/main' into zz_phase
wshanks Oct 13, 2022
e31347c
Fix docstring formatting
wshanks Oct 13, 2022
63f8ed6
fix
wshanks Oct 14, 2022
e472fb2
Correct description of freq fit parameter
wshanks Oct 17, 2022
48dcb8d
Merge remote-tracking branch 'upstream/main' into zz_phase
wshanks Oct 17, 2022
8cf764d
Merge branch 'main' into zz_phase
wshanks Oct 18, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions qiskit_experiments/library/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
~characterization.LocalReadoutError
~characterization.CorrelatedReadoutError
~characterization.ResonatorSpectroscopy
~characterization.ZZRamsey


.. _calibration:
Expand Down Expand Up @@ -148,6 +149,7 @@ class instance to manage parameters and pulse schedules.
ResonatorSpectroscopy,
LocalReadoutError,
CorrelatedReadoutError,
ZZRamsey,
)
from .randomized_benchmarking import StandardRB, InterleavedRB
from .tomography import StateTomography, ProcessTomography
Expand Down
4 changes: 4 additions & 0 deletions qiskit_experiments/library/characterization/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
LocalReadoutError
CorrelatedReadoutError
ResonatorSpectroscopy
ZZRamsey


Analysis
Expand All @@ -71,6 +72,7 @@
ResonatorSpectroscopyAnalysis
LocalReadoutErrorAnalysis
CorrelatedReadoutErrorAnalysis
ZZRamseyAnalysis

"""

Expand All @@ -90,6 +92,7 @@
ResonatorSpectroscopyAnalysis,
LocalReadoutErrorAnalysis,
CorrelatedReadoutErrorAnalysis,
ZZRamseyAnalysis,
)

from .t1 import T1
Expand All @@ -110,3 +113,4 @@
from .local_readout_error import LocalReadoutError
from .correlated_readout_error import CorrelatedReadoutError
from .resonator_spectroscopy import ResonatorSpectroscopy
from .zz_ramsey import ZZRamsey
Original file line number Diff line number Diff line change
Expand Up @@ -28,3 +28,4 @@
from .local_readout_error_analysis import LocalReadoutErrorAnalysis
from .correlated_readout_error_analysis import CorrelatedReadoutErrorAnalysis
from .resonator_spectroscopy_analysis import ResonatorSpectroscopyAnalysis
from .zz_ramsey_analysis import ZZRamseyAnalysis
Original file line number Diff line number Diff line change
@@ -0,0 +1,248 @@
# This code is part of Qiskit.
#
# (C) Copyright IBM 2022.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Analysis class for ZZ Ramsey experiment
"""

from typing import List, Tuple, Union

import lmfit
import numpy as np

from qiskit.providers.options import Options

import qiskit_experiments.curve_analysis as curve
from qiskit_experiments.curve_analysis import CurveAnalysis, CurveData, CurveFitResult, FitOptions
from qiskit_experiments.curve_analysis.utils import is_error_not_significant


class ZZRamseyAnalysis(CurveAnalysis):
# Disable long line check because we can't break the long math lines
# pylint: disable=line-too-long
r"""A class to analyze a :math:`ZZ` Ramsey experiment.

# section: fit_model

Analyze a :math:`ZZ` Ramsey experiment by fitting the :code:`'0'` and
:code:`'1'` series to sinusoidal functions as defined in the
:class:`ZZRamsey` experiment. The two functions share the frequency,
amplitude, decay constant, baseline, and phase parameters.

.. math::

y_0 = - {\rm amp} \cdot e^{-x/\tau} \cos\left(2 \pi\cdot {\rm freq - zz / 2}\cdot x + {\rm phase}\right) + {\rm base} \\

y_1 = - {\rm amp} \cdot e^{-x/\tau} \cos\left(2 \pi\cdot {\rm freq + zz / 2}\cdot x + {\rm phase}\right) + {\rm base}

:math:`freq` is the same as the virtual frequency :math:`f` mentioned
in :class:`ZZRamsey`.

# section: fit_parameters

defpar \rm amp:
desc: Amplitude of the sinusoidal curves.
init_guess: Half of the maximum y value less the minimum y value.
bounds: [0, the peak to peak range of the data]
defpar \tau:
desc: The exponential decay of the curve amplitudes.
init_guess: Inferred by comparing the peak to peak amplitude for
longer delay values with that of shorter delay values and
assuming an exponential decay in amplitude.
bounds: [1/4 of the typical time spacing,
10 times the maximum delay time].
defpar \rm base:
desc: Base line of both series.
init_guess: The average of the data, excluding outliers
bounds: [the minimum amplitude less the peak to peak of the data,
the maximum amplitude plus the peak to peak of the data]
defpar \rm freq:
desc: Average frequency of both series.
init_guess: The average of the frequencies with the highest power
spectral density for each series.
bounds: [0, the Nyquist frequency of the data].
defpar \rm zz:
desc: The :math:`ZZ` value for the qubit pair. In terms of the fit,
this is frequency difference between series 1 and series 0.
init_guess: The difference between the frequencies with the highest
power spectral density for each series
bounds: [-inf, inf].
defpar \rm phase:
desc: Common phase offset.
init_guess: Zero
bounds: [-pi, pi].
"""
# pylint: enable=line-too-long

def __init__(self):
super().__init__(
models=[
lmfit.models.ExpressionModel(
expr="-amp * exp(-x / tau) * cos(2 * pi * (freq - zz / 2) * x + phase) + base",
name="0",
data_sort_key={"series": "0"},
),
lmfit.models.ExpressionModel(
expr="-amp * exp(-x / tau) * cos(2 * pi * (freq + zz / 2) * x + phase) + base",
name="1",
data_sort_key={"series": "1"},
),
]
)

@classmethod
def _default_options(cls) -> Options:
"""Return the default analysis options.

See
:meth:`~qiskit_experiment.curve_analysis.CurveAnalysis._default_options`
for descriptions of analysis options.
"""
default_options = super()._default_options()
default_options.result_parameters = ["zz"]
default_options.plotter.set_figure_options(
xlabel="Delay",
xval_unit="s",
ylabel="P(1)",
)

return default_options

def _generate_fit_guesses(
self,
user_opt: FitOptions,
curve_data: CurveData,
) -> Union[FitOptions, List[FitOptions]]:
"""Compute the initial guesses.

Args:
user_opt: Fit options filled with user provided guess and bounds.
curve_data: Preprocessed data to be fit.

Returns:
List of fit options that are passed to the fitter function.
"""
y_max = np.max(curve_data.y)
y_min = np.min(curve_data.y)
y_ptp = y_max - y_min
x_max = np.max(curve_data.x)

data_0 = curve_data.get_subset_of("0")
data_1 = curve_data.get_subset_of("1")

def typical_step(arr):
"""Find the typical step size of an array"""
steps = np.diff(np.sort(arr))
# If points are not unique, there will be 0's that don't count as
# steps
steps = steps[steps != 0]
return np.median(steps)

x_step = max(typical_step(data_0.x), typical_step(data_1.x))

user_opt.bounds.set_if_empty(
amp=(0, y_max - y_min),
tau=(x_step / 4, 10 * x_max),
base=(y_min - y_ptp, y_max + y_ptp),
phase=(-np.pi, np.pi),
freq=(0, 1 / 2 / x_step),
)

freq_guesses = [
curve.guess.frequency(data_0.x, data_0.y),
curve.guess.frequency(data_1.x, data_1.y),
]
base_guesses = [
curve.guess.constant_sinusoidal_offset(data_0.y),
curve.guess.constant_sinusoidal_offset(data_1.y),
]

def rough_sinusoidal_decay_constant(
x_data: np.ndarray, y_data: np.ndarray, bounds: Tuple[float, float]
) -> float:
"""Estimate the decay constant of y_data vs x_data

This function assumes the data is roughly evenly spaced and that
the y_data goes through a few periods so that the peak to peak
value early in the data can be compared to the peak to peak later
in the data to estimate the decay constant.

Args:
x_data: x-axis data
y_data: y-axis data
bounds: minimum and maximum allowed decay constant

Returns:
The bounded guess of the decay constant
"""
x_median = np.median(x_data)
i_left = x_data < x_median
i_right = x_data > x_median

y_left = np.ptp(y_data[i_left])
y_right = np.ptp(y_data[i_right])
x_left = np.average(x_data[i_left])
x_right = np.average(x_data[i_right])

# Now solve y_left = exp(-x_left / tau) and
# y_right = exp(-x_right / tau) for tau
denom = np.log(y_right / y_left)
if denom < 0:
tau = (x_left - x_right) / denom
else:
# If amplitude is constant or growing from left to right, bound
# to the maximum allowed tau
tau = bounds[1]

return max(min(tau, bounds[1]), bounds[0])

user_opt.p0.set_if_empty(
tau=rough_sinusoidal_decay_constant(curve_data.x, curve_data.y, user_opt.bounds["tau"]),
amp=y_ptp / 2,
phase=0.0,
freq=float(np.average(freq_guesses)),
base=np.average(base_guesses),
zz=freq_guesses[1] - freq_guesses[0],
)

return user_opt

def _evaluate_quality(self, fit_data: CurveFitResult) -> Union[str, None]:
"""Algorithmic criteria for whether the fit is good or bad.

A good fit has:
- a reduced chi-squared lower than three
- an error on the frequency smaller than the frequency

Args:
fit_data: The fit result of the analysis

Returns:
The automated fit quality assessment as a string
"""
freq = fit_data.ufloat_params["freq"]
zz = fit_data.ufloat_params["zz"]
amp = fit_data.ufloat_params["amp"]
base = fit_data.ufloat_params["base"]

rough_freq_magnitude = 1 / (fit_data.x_range[1] - fit_data.x_range[0])

criteria = [
is_error_not_significant(amp, fraction=0.2),
is_error_not_significant(base, absolute=0.2 * amp.nominal_value),
is_error_not_significant(freq, absolute=0.2 * rough_freq_magnitude),
is_error_not_significant(zz, absolute=0.2 * rough_freq_magnitude),
]

if all(criteria):
return "good"

return "bad"
Loading