Skip to content

Commit

Permalink
Merge branch 'master' into Tektronix_5014_performance
Browse files Browse the repository at this point in the history
  • Loading branch information
jenshnielsen committed Apr 21, 2021
2 parents 887e25d + 5a2ea57 commit 8c16444
Show file tree
Hide file tree
Showing 16 changed files with 119 additions and 62 deletions.
1 change: 1 addition & 0 deletions .github/static/environment_forge_full.yml
Expand Up @@ -27,6 +27,7 @@ dependencies:
- packaging>=20.0
- ipywidgets>=7.5.0
- broadbean>=0.9.1
- uncertainties>=3.0.2
# test requirements from setup.cfg
- deepdiff>=5.0.2
- hypothesis>=5.49.0
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/docs.yaml
Expand Up @@ -42,7 +42,7 @@ jobs:
run: sudo apt install pandoc
if: runner.os == 'Linux'
- name: Install pandoc on windows
uses: crazy-max/ghaction-chocolatey@v1.4.1
uses: crazy-max/ghaction-chocolatey@v1.4.2
with:
args: install pandoc
if: runner.os == 'Windows'
Expand Down
2 changes: 2 additions & 0 deletions docs/changes/0.25.0.rst
Expand Up @@ -10,6 +10,8 @@ Breaking Changes:
- `get_data_by_id` no longer returns data for standalone parameters
if there are no dependent parameters in the dataset. For the same reason
`plot_by_id` and `plot_dataset` will no longer plot these standalone parameters.
- `xarray` exported data no longer exports extra metadata columns under the `extra_metadata` tag
since the extra dictionary level prevents exporting to `netcdf`


------------
Expand Down
2 changes: 1 addition & 1 deletion docs_requirements.txt
Expand Up @@ -15,7 +15,7 @@ docutils==0.16
entrypoints==0.3
idna==2.10
imagesize==1.2.0
importlib-metadata==3.10.0;python_version<'3.8'
importlib-metadata==4.0.0;python_version<'3.8'
ipykernel==5.5.3
ipython==7.22.0
ipython-genutils==0.2.0
Expand Down
8 changes: 3 additions & 5 deletions qcodes/dataset/exporters/export_to_xarray.py
Expand Up @@ -73,11 +73,9 @@ def _add_metadata_to_xarray(
if dataset.completed_timestamp_raw is not None:
xrdataset.attrs[
"completed_timestamp_raw"] = dataset.completed_timestamp_raw
if len(dataset._metadata) > 0:
xrdataset.attrs['extra_metadata'] = {}

for metadata_tag, metadata in dataset._metadata.items():
xrdataset.attrs['extra_metadata'][metadata_tag] = metadata
if len(dataset.metadata) > 0:
for metadata_tag, metadata in dataset.metadata.items():
xrdataset.attrs[metadata_tag] = metadata


def load_to_xarray_dataset(dataset: DataSet, data: ParameterData) -> xr.Dataset:
Expand Down
11 changes: 11 additions & 0 deletions qcodes/instrument_drivers/Keysight/Keysight_34411A_submodules.py
@@ -0,0 +1,11 @@
from typing import Any
from .private.Keysight_344xxA_submodules import _Keysight_344xxA


class Keysight_34411A(_Keysight_344xxA):
"""
This is the qcodes driver for the Keysight 34411A Multimeter
"""
def __init__(self, name: str, address: str, silent: bool = False,
**kwargs: Any):
super().__init__(name, address, silent, **kwargs)
Expand Up @@ -86,7 +86,7 @@ def __init__(self, parent: '_Keysight_344xxA', name: str, **kwargs: Any):
get_cmd='TRIGger:SLOPe?',
vals=vals.Enum('POS', 'NEG'))

if self.parent.is_34465A_34470A and self.parent.has_DIG:
if self.parent.has_DIG or (self.parent.model == '34411A'):
self.add_parameter('level',
label='Trigger Level',
unit='V',
Expand Down Expand Up @@ -122,7 +122,7 @@ def __init__(self, parent: '_Keysight_344xxA', name: str, **kwargs: Any):
it buffers one trigger.""")
_trigger_source_vals = vals.Enum('IMM', 'EXT', 'BUS')

if self.parent.has_DIG:
if self.parent.has_DIG or (self.parent.model == '34411A'):
_trigger_source_vals = vals.Enum('IMM', 'EXT', 'BUS', 'INT')
# extra empty lines are needed for readability of the docstring
_trigger_source_docstring += textwrap.dedent("""\
Expand Down Expand Up @@ -170,12 +170,18 @@ def __init__(self, parent: '_Keysight_344xxA', name: str, **kwargs: Any):
Specifies the number of measurements (samples) the instrument
takes per trigger.
MAX selects 1 billion readings. However, when pretrigger is
selected, the maximum is 50,000 readings (without the MEM
option) or 2,000,000 readings (with the MEM option)"""))

if self.parent.has_DIG:
if self.parent.has_MEM:
For the models 34460A and above, MAX selects 1 billion readings.
However, when pretrigger is selected, the maximum is 50,000
readings (without the MEM option) or 2,000,000 readings (with the
MEM option).
For the model 34410A the maximum is 50,000 readings, and for the
model 34411A the maximum is 1,000,000 readings. The latter does
not depend on the pretrigger count."""))

if self.parent.has_DIG or (self.parent.model == '34411A'):
if self.parent.model == '34411A':
_max_pretrig_count = int(1e6) - 1
elif self.parent.has_MEM:
_max_pretrig_count = int(2e6) - 1
else:
_max_pretrig_count = int(5e4) - 1
Expand All @@ -199,7 +205,7 @@ def __init__(self, parent: '_Keysight_344xxA', name: str, **kwargs: Any):
``sample.count`` parameter for information on the maximum
number of sample counts."""))

if self.parent.is_34465A_34470A:
if self.parent.is_34465A_34470A or self.parent.is_34410A_34411A:
self.add_parameter('source',
label='Sample Timing Source',
set_cmd='SAMPle:SOURce {}',
Expand Down Expand Up @@ -418,8 +424,8 @@ def get_raw(self) -> np.ndarray: # pylint: disable=method-hidden

class _Keysight_344xxA(KeysightErrorQueueMixin, VisaInstrument):
"""
Instrument class for Keysight 34460A, 34461A, 34465A and 34470A
multimeters.
Instrument class for Keysight 34410A, 34411A, 34460A, 34461A, 34465A and
34470A multimeters.
The driver currently only supports using the instrument as a voltmeter
for DC measurements.
Expand Down Expand Up @@ -451,6 +457,7 @@ def __init__(self, name: str, address: str, silent: bool = False,
self.model = idn['model']

self.is_34465A_34470A = self.model in ['34465A', '34470A']
self.is_34410A_34411A = self.model in ['34410A', '34411A']

####################################
# Instrument specifications
Expand All @@ -466,6 +473,7 @@ def __init__(self, name: str, address: str, silent: bool = False,
self.has_MEM = self.is_34465A_34470A and 'MEM' in options

PLCs = {'34410A': [0.006, 0.02, 0.06, 0.2, 1, 2, 10, 100],
'34411A': [0.001, 0.002, 0.006, 0.02, 0.06, 0.2, 1, 2, 10, 100],
'34460A': [0.02, 0.2, 1, 10, 100],
'34461A': [0.02, 0.2, 1, 10, 100],
'34465A': [0.02, 0.06, 0.2, 1, 10, 100],
Expand All @@ -475,15 +483,10 @@ def __init__(self, name: str, address: str, silent: bool = False,
PLCs['34465A'] = [0.001, 0.002, 0.006] + PLCs['34465A']
PLCs['34470A'] = [0.001, 0.002, 0.006] + PLCs['34470A']

ranges = {'34410A': [10**n for n in range(3, 10)], # 100 to 1 G
'34460A': [10**n for n in range(-3, 9)], # 1 m to 100 M
'34461A': [10**n for n in range(-3, 9)], # 1 m to 100 M
'34465A': [10**n for n in range(-3, 10)], # 1 m to 1 G
'34470A': [10**n for n in range(-3, 10)], # 1 m to 1 G
}

# The resolution factor order matches the order of PLCs
res_factors = {'34410A': [30e-6, 15e-5, 6e-6, 3e-6, 1.5e-6, 0.7e-6,
res_factors = {'34410A': [6e-6, 3e-6, 1.5e-6, 0.7e-6,
0.3e-6, 0.2e-6, 0.1e-6, 0.03e-6],
'34411A': [30e-6, 15e-5, 6e-6, 3e-6, 1.5e-6, 0.7e-6,
0.3e-6, 0.2e-6, 0.1e-6, 0.03e-6],
'34460A': [300e-6, 100e-6, 30e-6, 10e-6, 3e-6],
'34461A': [100e-6, 10e-6, 3e-6, 1e-6, 0.3e-6],
Expand All @@ -497,7 +500,7 @@ def __init__(self, name: str, address: str, silent: bool = False,
res_factors['34470A'] = [30e-6, 10e-6, 3e-6] + res_factors['34470A']

self._resolution_factors = res_factors[self.model]
self.ranges = ranges[self.model]
self.ranges = [10**n for n in range(-1, 4)] # 100 m to 1 k
self.NPLC_list = PLCs[self.model]

####################################
Expand Down Expand Up @@ -616,14 +619,21 @@ def __init__(self, name: str, address: str, silent: bool = False,
####################################
# Aperture parameters

if self.is_34465A_34470A:
# Define the extreme aperture time values for the 34465A and 34470A
if self.is_34465A_34470A or self.is_34410A_34411A:
# Define the extreme aperture time values for the 34410A, 34411A,
# 34465A and 34470A. The upper limits for 34410A and 34411A in the
# case of a 60Hz line frequency are just calculated by multiplying
# the respective limit with 50/60.
utility_freq = self.line_frequency()
if utility_freq == 50:
apt_times = {'34465A': [0.3e-3, 2],
apt_times = {'34410A': [100e-6, 1],
'34411A': [20e-6, 1],
'34465A': [0.3e-3, 2],
'34470A': [0.3e-3, 2]}
elif utility_freq == 60:
apt_times = {'34465A': [0.3e-3, 1.67],
apt_times = {'34410A': [100e-6, 0.83],
'34411A': [20e-6, 0.83],
'34465A': [0.3e-3, 1.67],
'34470A': [0.3e-3, 1.67]}
if self.has_DIG:
apt_times['34465A'][0] = 20e-6
Expand Down Expand Up @@ -781,10 +791,11 @@ def abort_measurement(self) -> None:

def _licenses(self) -> Sequence[str]:
"""
Return extra licenses purchased with the DMM. The 34410A does not have
optional modules, hence always returns an empty tuple.
Return extra licenses purchased with the DMM. The 34410A and 34411A
models do not have optional modules, hence always returns an empty
tuple.
"""
if self.model != '34410A':
if not self.is_34410A_34411A:
licenses_raw = self.ask('SYST:LIC:CAT?')
licenses_list = [x.strip('"') for x in licenses_raw.split(',')]
return licenses_list
Expand All @@ -793,14 +804,14 @@ def _licenses(self) -> Sequence[str]:
def _options(self) -> Tuple[str, ...]:
"""
Return enabled options of the DMM returned by ``*OPT?`` command.
The 34410A model does not have options, hence always returns
The 34410A and 34411A models do not have options, hence always returns
an empty tuple.
Note that for firmware version 3.0, output of ```*OPT?`` will contain
the ``DIG`` option only if it has been purchased before, although
the option itself is enabled by default in the firmware version 3.0.
"""
if self.model != '34410A':
if not self.is_34410A_34411A:
options_raw = self.ask('*OPT?')
options_list = [opt for opt in options_raw.split(',') if opt != '0']
return tuple(options_list)
Expand Down
29 changes: 29 additions & 0 deletions qcodes/tests/dataset/test_dataset_export.py
Expand Up @@ -4,6 +4,7 @@
import pytest
import xarray as xr

import qcodes
from qcodes import new_data_set
from qcodes.dataset.descriptions.dependencies import InterDependencies_
from qcodes.dataset.descriptions.param_spec import ParamSpecBase
Expand Down Expand Up @@ -230,6 +231,34 @@ def test_export_to_xarray_ds_dict_extra_metadata(mock_dataset):
_assert_xarray_metadata_is_as_expected(datarray, mock_dataset)


def test_export_to_xarray_extra_metadate_can_be_stored(mock_dataset, tmp_path):

nt_metadata = {
"foo": {
"bar": {
"baz": "test"
},
"spam": [1, 2, 3],
}
}
mock_dataset.add_metadata("foo_metadata", json.dumps(nt_metadata))
mock_dataset.export(export_type="netcdf", path=str(tmp_path))
data_as_xarray = mock_dataset.to_xarray_dataset()

loaded_data = xr.load_dataset(
tmp_path/f"{qcodes.config.dataset.export_prefix}{mock_dataset.run_id}.nc"
)

# check that the metadata in the qcodes dataset is roundtripped to the loaded
# dataset
for key in mock_dataset.metadata.keys():
assert mock_dataset.metadata[key] == loaded_data.attrs[key]
# check that the added metadata roundtrip correctly
assert loaded_data.attrs["foo_metadata"] == json.dumps(nt_metadata)
# check that all attrs roundtrip correctly within the xarray ds
assert loaded_data.attrs == data_as_xarray.attrs


def _assert_xarray_metadata_is_as_expected(xarray_ds, qc_dataset):

assert xarray_ds.ds_name == qc_dataset.name
Expand Down
10 changes: 5 additions & 5 deletions qcodes/tests/drivers/test_keysight_34465a.py
Expand Up @@ -108,15 +108,15 @@ def test_set_get_autorange(driver):


def test_increase_decrease_range(driver):
driver_range_user = driver.ranges[4]
driver_range_user = driver.ranges[2]
driver.increase_range(driver_range_user)
assert driver.range.get() == driver.ranges[5]
assert driver.range() == driver.ranges[3]
driver.increase_range(driver_range_user, 2)
assert driver.range() == driver.ranges[6]
assert driver.range() == driver.ranges[4]
driver.decrease_range(driver_range_user)
assert driver.range() == driver.ranges[3]
assert driver.range() == driver.ranges[1]
driver.decrease_range(driver_range_user, -2)
assert driver.range() == driver.ranges[2]
assert driver.range() == driver.ranges[0]
driver_range_user = driver.ranges[3]
driver.decrease_range(driver_range_user, -2)
assert driver.range() == driver.ranges[1]
Expand Down
5 changes: 5 additions & 0 deletions qcodes/tests/helpers/test_json_encoder.py
Expand Up @@ -2,6 +2,7 @@
import json

import numpy as np
import uncertainties
import pytest
from qcodes.utils.helpers import NumpyJSONEncoder
from qcodes.utils.types import numpy_ints, numpy_floats, numpy_complex
Expand Down Expand Up @@ -35,6 +36,10 @@ def test_complex_types():
assert e.encode(complex_type(complex(1, 2))) == \
'{"__dtype__": "complex", "re": 1.0, "im": 2.0}'

def test_UFloat_type():
e = NumpyJSONEncoder()
assert e.encode(uncertainties.ufloat(1.0, 2.0)) == \
'{"__dtype__": "UFloat", "nominal_value": 1.0, "std_dev": 2.0}'

def test_numpy_int_types():
e = NumpyJSONEncoder()
Expand Down
13 changes: 0 additions & 13 deletions qcodes/tests/helpers/test_waitsecs.py
Expand Up @@ -13,19 +13,6 @@ def test_bad_calls():
wait_secs(arg)


def test_good_calls():
for secs in [0.001, 0.003, 0.01, 0.03, 0.1, 0.3, 1]:
finish_clock = time.perf_counter() + secs
secs_out = wait_secs(finish_clock)
assert secs_out > secs - 3e-4
# add a tiny offset as this test may fail if
# otherwise if the two calls to perf_counter are close
# enough to return the same result as a + b - a cannot
# in general be assumed to be <= b in floating point
# math (here a is perf_counter() and b is the wait time
assert secs_out <= secs + 1e-14


def test_warning():
with LogCapture() as logs:
secs_out = wait_secs(time.perf_counter() - 1)
Expand Down
11 changes: 11 additions & 0 deletions qcodes/utils/helpers.py
Expand Up @@ -4,6 +4,7 @@
import logging
import math
import numbers
import uncertainties
import os
import time
from asyncio import iscoroutinefunction
Expand Down Expand Up @@ -51,6 +52,10 @@ def default(self, obj: Any) -> Any:
converted to a dictionary with fields ``re`` and ``im`` containing floating
numbers for the real and imaginary parts respectively, and a field
``__dtype__`` containing value ``complex``.
* Numbers with uncertainties (numbers that conforms to ``uncertainties.UFloat``) get
converted to a dictionary with fields ``nominal_value`` and ``std_dev`` containing floating
numbers for the nominal and uncertainty parts respectively, and a field
``__dtype__`` containing value ``UFloat``.
* Object with a ``_JSONEncoder`` method get converted the return value of
that method.
* Objects which support the pickle protocol get converted using the
Expand All @@ -72,6 +77,12 @@ def default(self, obj: Any) -> Any:
're': float(obj.real),
'im': float(obj.imag)
}
elif isinstance(obj, uncertainties.UFloat):
return {
'__dtype__': 'UFloat',
'nominal_value': float(obj.nominal_value),
'std_dev': float(obj.std_dev)
}
elif hasattr(obj, '_JSONEncoder'):
# Use object's custom JSON encoder
jsosencode = getattr(obj, "_JSONEncoder")
Expand Down
7 changes: 4 additions & 3 deletions requirements.txt
Expand Up @@ -15,11 +15,11 @@ decorator~=4.4.2
defusedxml~=0.7.1
entrypoints~=0.3
google-api-core~=1.26.3
google-auth~=1.28.1
google-auth~=1.29.0
googleapis-common-protos~=1.53.0
h5py~=3.2.1
idna~=2.10
importlib-metadata==3.10.0;python_version<'3.8'
importlib-metadata==4.0.0;python_version<'3.8'
ipykernel~=5.5.3
ipython~=7.22.0
ipython-genutils~=0.2.0
Expand Down Expand Up @@ -74,14 +74,15 @@ ruamel.yaml.clib~=0.2.2
schema~=0.7.4
Send2Trash~=1.5.0
six~=1.15.0
slack-sdk~=3.4.2
slack-sdk~=3.5.0
tabulate~=0.8.9
terminado~=0.9.4
testpath~=0.4.4
tornado~=6.1
tqdm~=4.60.0
traitlets~=5.0.5
typing-extensions~=3.7.4.3
uncertainties>=3.0.2
urllib3~=1.26.4
wcwidth~=0.2.5
webencodings~=0.5.1
Expand Down
2 changes: 1 addition & 1 deletion science_requirements.txt
@@ -1 +1 @@
spyder==5.0.0
spyder==5.0.1

0 comments on commit 8c16444

Please sign in to comment.