diff --git a/doc/releases/changelog-dev.md b/doc/releases/changelog-dev.md
index b4918253e1d..91c06f0653c 100644
--- a/doc/releases/changelog-dev.md
+++ b/doc/releases/changelog-dev.md
@@ -621,6 +621,9 @@ Deprecations cycles are tracked at [doc/developement/deprecations.rst](https://d
run on a device defined with a shot vector.
[(#3422)](https://github.com/PennyLaneAI/pennylane/pull/3422)
+* The `qml.data` module now works as expected on Windows.
+ [(#3504)](https://github.com/PennyLaneAI/pennylane/pull/3504)
+
Contributors
This release contains contributions from (in alphabetical order):
diff --git a/pennylane/data/data_manager.py b/pennylane/data/data_manager.py
index 5a5cb9d3414..b0aeadba076 100644
--- a/pennylane/data/data_manager.py
+++ b/pennylane/data/data_manager.py
@@ -18,6 +18,7 @@
from collections.abc import Iterable
from concurrent.futures import ThreadPoolExecutor, wait, FIRST_EXCEPTION
import os
+from os.path import sep as pathsep
from time import sleep
from urllib.parse import quote
@@ -25,8 +26,8 @@
from pennylane.data.dataset import Dataset
S3_URL = "https://xanadu-quantum-datasets.s3.amazonaws.com"
-FOLDERMAP_URL = os.path.join(S3_URL, "foldermap.json")
-DATA_STRUCT_URL = os.path.join(S3_URL, "data_struct.json")
+FOLDERMAP_URL = f"{S3_URL}/foldermap.json"
+DATA_STRUCT_URL = f"{S3_URL}/data_struct.json"
_foldermap = {}
_data_struct = {}
@@ -137,7 +138,8 @@ def _refresh_data_struct():
def _fetch_and_save(filename, dest_folder):
"""Download a single file from S3 and save it locally."""
- response = requests.get(os.path.join(S3_URL, quote(filename)), timeout=5.0)
+ webfile = filename if pathsep == "/" else filename.replace(pathsep, "/")
+ response = requests.get(f"{S3_URL}/{quote(webfile)}", timeout=5.0)
response.raise_for_status()
with open(os.path.join(dest_folder, filename), "wb") as f:
f.write(response.content)
@@ -161,16 +163,16 @@ def _s3_download(data_name, folders, attributes, dest_folder, force, num_threads
if not os.path.exists(local_folder):
os.makedirs(local_folder)
- prefix = os.path.join(data_name, folder, f"{folder.replace('/', '_')}_")
+ prefix = os.path.join(data_name, folder, f"{folder.replace(pathsep, '_')}_")
# TODO: consider combining files within a folder (switch to append)
files.extend([f"{prefix}{attr}.dat" for attr in attributes])
if not force:
- start = len(dest_folder.rstrip("/")) + 1
+ start = len(dest_folder.rstrip(pathsep)) + 1
existing_files = {
os.path.join(path, name)[start:]
- for path, _, files in os.walk(dest_folder)
- for name in files
+ for path, _, local_files in os.walk(dest_folder)
+ for name in local_files
}
files = list(set(files) - existing_files)
@@ -196,13 +198,15 @@ def _generate_folders(node, folders):
next_folders = folders[1:]
folders = set(node) if folders[0] == ["full"] else set(folders[0]).intersection(set(node))
- if not next_folders:
- return folders
- return [
- os.path.join(folder, child)
- for folder in folders
- for child in _generate_folders(node[folder], next_folders)
- ]
+ return (
+ [
+ os.path.join(folder, child)
+ for folder in folders
+ for child in _generate_folders(node[folder], next_folders)
+ ]
+ if next_folders
+ else folders
+ )
def load(
@@ -251,7 +255,7 @@ def load(
for folder in all_folders:
real_folder = os.path.join(directory_path, data_name, folder)
data_files.append(
- Dataset(data_name, real_folder, folder.replace("/", "_"), docstring, standard=True)
+ Dataset(data_name, real_folder, folder.replace(pathsep, "_"), docstring, standard=True)
)
return data_files
@@ -263,10 +267,7 @@ def _direc_to_dict(path):
if not dirs:
return None
tree = {x: _direc_to_dict(os.path.join(root, x)) for x in dirs}
- vals = [x is None for x in tree.values()]
- if all(vals):
- return list(dirs)
- return tree
+ return list(dirs) if all(x is None for x in tree.values()) else tree
def list_datasets(path=None):
@@ -417,7 +418,7 @@ def load_interactive():
)
print("\nPlease confirm your choices:")
- print("dataset:", os.path.join(data_name, *[description[param] for param in params]))
+ print("dataset:", "/".join([data_name] + [description[param] for param in params]))
print("attributes:", attributes)
print("force:", force)
print("dest folder:", os.path.join(dest_folder, "datasets"))
diff --git a/pennylane/data/dataset.py b/pennylane/data/dataset.py
index 5b2777960c2..d49f4317593 100644
--- a/pennylane/data/dataset.py
+++ b/pennylane/data/dataset.py
@@ -106,7 +106,7 @@ class Dataset(ABC):
def __std_init__(self, data_name, folder, attr_prefix, docstring):
"""Constructor for standardized datasets."""
self._dtype = data_name
- self._folder = folder.rstrip("/")
+ self._folder = folder.rstrip(os.path.sep)
self._prefix = os.path.join(self._folder, attr_prefix) + "_{}.dat"
self._prefix_len = len(attr_prefix) + 1
self._description = os.path.join(data_name, self._folder.split(data_name)[-1][1:])
@@ -116,7 +116,7 @@ def __std_init__(self, data_name, folder, attr_prefix, docstring):
if not os.path.exists(self._fullfile):
self._fullfile = None
- for f in glob(self._folder + "/*.dat"):
+ for f in glob(f"{self._folder}{os.path.sep}*.dat"):
self.read(f, lazy=True)
def __base_init__(self, **kwargs):
@@ -144,8 +144,9 @@ def __repr__(self):
attr_str = (
str(list(self.attrs))
if len(self.attrs) < 3
- else str(list(self.attrs)[:2])[:-1] + ", ...]"
+ else f"{str(list(self.attrs)[:2])[:-1]}, ...]"
)
+
std_str = f"description: {self._description}, " if self._is_standard else ""
return f""
@@ -241,7 +242,7 @@ def __copy__(self):
# The methods below are intended for use only by standard Dataset objects
def __get_filename_for_attribute(self, attribute):
- return self._fullfile if self._fullfile else self._prefix.format(attribute)
+ return self._fullfile or self._prefix.format(attribute)
def __get_attribute_from_filename(self, filename):
return os.path.basename(filename)[self._prefix_len : -4]