Skip to content

Commit

Permalink
directly call protected methods of analysis instance. avoiding direct…
Browse files Browse the repository at this point in the history
…ly calling _run_analysis can solve the issue of drawing, i.e. it gives back the control of drawer to composite instance. implementation is bit redundant since many method returns NotImplemented, but this will be solved by promoting CurveData to dataframe, where we can manage data blongs to different group in the same object. new standard analysis BlochTrajectoryAnalysis is added as a basis of CR hamiltonian analysis, which could be used outside the context of CR Hamiltonian.
  • Loading branch information
nkanazawa1989 committed Jun 27, 2022
1 parent de42cd1 commit 27f24ef
Show file tree
Hide file tree
Showing 17 changed files with 807 additions and 571 deletions.
6 changes: 4 additions & 2 deletions qiskit_experiments/curve_analysis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,7 @@ def _create_analysis_results(self, fit_data, quality, **metadata):
BaseCurveAnalysis
CurveAnalysis
MultiGroupCurveAnalysis
CompositeCurveAnalysis
Data Classes
============
Expand Down Expand Up @@ -497,6 +497,7 @@ def _create_analysis_results(self, fit_data, quality, **metadata):
:toctree: ../stubs/
:template: autosummary/analysis.rst
BlochTrajectoryAnalysis
DecayAnalysis
DumpedOscillationAnalysis
OscillationAnalysis
Expand Down Expand Up @@ -549,7 +550,7 @@ def _create_analysis_results(self, fit_data, quality, **metadata):
"""
from .base_curve_analysis import BaseCurveAnalysis
from .curve_analysis import CurveAnalysis
from .grouped_curve_analysis import MultiGroupCurveAnalysis
from .composite_curve_analysis import CompositeCurveAnalysis
from .curve_data import (
CurveData,
CurveFitResult,
Expand Down Expand Up @@ -577,6 +578,7 @@ def _create_analysis_results(self, fit_data, quality, **metadata):
ResonanceAnalysis,
GaussianAnalysis,
ErrorAmplificationAnalysis,
BlochTrajectoryAnalysis,
)

# deprecated
Expand Down
55 changes: 34 additions & 21 deletions qiskit_experiments/curve_analysis/base_curve_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,16 @@ class BaseCurveAnalysis(BaseAnalysis, ABC):
def parameters(self) -> List[str]:
"""Return parameters estimated by this analysis."""

@property
@abstractmethod
def name(self) -> str:
"""Return name of this analysis."""

@property
@abstractmethod
def models(self) -> List[lmfit.Model]:
"""Return fit models."""

@property
def drawer(self) -> BaseCurveDrawer:
"""A short-cut for curve drawer instance."""
Expand Down Expand Up @@ -155,6 +165,10 @@ def _default_options(cls) -> Options:
fixed_parameters (Dict[str, Any]): Fitting model parameters that are fixed
during the curve fitting. This should be provided with default value
keyed on one of the parameter names in the series definition.
filter_data (Dict[str, Any]): Dictionary of experiment data metadata to filter.
Experiment outcomes with metadata that matches with this dictionary
are used in the analysis. If not specified, all experiment data are
input to the curve fitter. By default no filtering condition is set.
"""
options = super()._default_options()

Expand All @@ -173,6 +187,7 @@ def _default_options(cls) -> Options:
options.p0 = {}
options.bounds = {}
options.fixed_parameters = {}
options.filter_data = {}

# Set automatic validator for particular option values
options.set_validator(field="data_processor", validator_value=DataProcessor)
Expand Down Expand Up @@ -339,23 +354,31 @@ def _run_data_processing(
data sorting option is not provided.
DataProcessorError: When key for x values is not found in the metadata.
"""

def _matched(metadata, **filters):
try:
return all(metadata[key] == val for key, val in filters.items())
except KeyError:
return False

if not self.options.filter_data:
analyzed_data = raw_data
else:
analyzed_data = [
d for d in raw_data if _matched(d["metadata"], **self.options.filter_data)
]

x_key = self.options.x_key

try:
xdata = np.asarray([datum["metadata"][x_key] for datum in raw_data], dtype=float)
xdata = np.asarray([datum["metadata"][x_key] for datum in analyzed_data], dtype=float)
except KeyError as ex:
raise DataProcessorError(
f"X value key {x_key} is not defined in circuit metadata."
) from ex

ydata = self.options.data_processor(raw_data)
shots = np.asarray([datum.get("shots", np.nan) for datum in raw_data])

def _matched(metadata, **filters):
try:
return all(metadata[key] == val for key, val in filters.items())
except KeyError:
return False
ydata = self.options.data_processor(analyzed_data)
shots = np.asarray([datum.get("shots", np.nan) for datum in analyzed_data])

if len(models) == 1:
# all data belongs to the single model
Expand All @@ -372,7 +395,7 @@ def _matched(metadata, **filters):
if tags is None:
continue
matched_inds = np.asarray(
[_matched(d["metadata"], **tags) for d in raw_data], dtype=bool
[_matched(d["metadata"], **tags) for d in analyzed_data], dtype=bool
)
data_allocation[matched_inds] = idx

Expand All @@ -389,16 +412,13 @@ def _run_curve_fit(
self,
curve_data: CurveData,
models: List[lmfit.Model],
init_guesses: Dict[str, float],
) -> CurveFitResult:
"""Perform curve fitting on given data collection and fit models.
Args:
curve_data: Formatted data to fit.
models: A list of LMFIT models that are used to build a cost function
for the LMFIT minimizer.
init_guesses: Dictionary of fit parameter initial guesses keyed on the
fit parameter name.
Returns:
The best fitting outcome with minimum reduced chi-squared value.
Expand All @@ -420,7 +440,7 @@ def _run_curve_fit(

default_fit_opt = FitOptions(
parameters=unite_parameter_names,
default_p0=init_guesses,
default_p0=self.options.p0,
default_bounds=self.options.bounds,
**self.options.lmfit_options,
)
Expand Down Expand Up @@ -563,9 +583,6 @@ def _create_curve_data(
"""
samples = []

if not self.options.return_data_points:
return samples

for model in models:
sub_data = curve_data.get_subset_of(model._name)
raw_datum = AnalysisResultData(
Expand Down Expand Up @@ -595,10 +612,6 @@ def _initialize(
Args:
experiment_data: Experiment data to analyze.
"""
# Initialize canvas
if self.options.plot:
self.drawer.initialize_canvas()

# Initialize data processor
# TODO move this to base analysis in follow-up
data_processor = self.options.data_processor or get_processor(experiment_data, self.options)
Expand Down
Loading

0 comments on commit 27f24ef

Please sign in to comment.