Skip to content

Commit

Permalink
Move datastore code from using "Dataset" to "dataset"
Browse files Browse the repository at this point in the history
This makes it consistent with ucrrent style.
  • Loading branch information
timj committed Apr 17, 2020
1 parent abf2313 commit 312906b
Show file tree
Hide file tree
Showing 6 changed files with 38 additions and 38 deletions.
20 changes: 10 additions & 10 deletions python/lsst/daf/butler/datastores/chainedDatastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,12 +242,12 @@ def get(self, ref, parameters=None):
Reference to the required Dataset.
parameters : `dict`
`StorageClass`-specific parameters that specify, for example,
a slice of the Dataset to be loaded.
a slice of the dataset to be loaded.
Returns
-------
inMemoryDataset : `object`
Requested Dataset or slice thereof as an InMemoryDataset.
Requested dataset or slice thereof as an InMemoryDataset.
Raises
------
Expand All @@ -262,7 +262,7 @@ def get(self, ref, parameters=None):
for datastore in self.datastores:
try:
inMemoryObject = datastore.get(ref, parameters)
log.debug("Found Dataset %s in datastore %s", ref, datastore.name)
log.debug("Found dataset %s in datastore %s", ref, datastore.name)
return inMemoryObject
except FileNotFoundError:
pass
Expand All @@ -281,7 +281,7 @@ def put(self, inMemoryDataset, ref):
Parameters
----------
inMemoryDataset : `object`
The Dataset to store.
The dataset to store.
ref : `DatasetRef`
Reference to the associated Dataset.
Expand Down Expand Up @@ -420,8 +420,8 @@ def getUri(self, ref, predict=False):
Returns
-------
uri : `str`
URI string pointing to the Dataset within the datastore. If the
Dataset does not exist in the datastore, and if ``predict`` is
URI string pointing to the dataset within the datastore. If the
dataset does not exist in the datastore, and if ``predict`` is
`True`, the URI will be a prediction and will include a URI
fragment "#predicted".
Expand Down Expand Up @@ -470,15 +470,15 @@ def getUri(self, ref, predict=False):
raise FileNotFoundError("Dataset {} not in any datastore".format(ref))

def remove(self, ref):
"""Indicate to the Datastore that a Dataset can be removed.
"""Indicate to the datastore that a dataset can be removed.
The dataset will be removed from each datastore. The dataset is
not required to exist in every child datastore.
Parameters
----------
ref : `DatasetRef`
Reference to the required Dataset.
Reference to the required dataset.
Raises
------
Expand Down Expand Up @@ -509,15 +509,15 @@ def emptyTrash(self):
datastore.emptyTrash()

def transfer(self, inputDatastore, ref):
"""Retrieve a Dataset from an input `Datastore`,
"""Retrieve a dataset from an input `Datastore`,
and store the result in this `Datastore`.
Parameters
----------
inputDatastore : `Datastore`
The external `Datastore` from which to retreive the Dataset.
ref : `DatasetRef`
Reference to the required Dataset in the input data store.
Reference to the required dataset in the input data store.
Returns
-------
Expand Down
14 changes: 7 additions & 7 deletions python/lsst/daf/butler/datastores/fileLikeDatastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ def getStoredItemInfo(self, ref):
# Docstring inherited from GenericBaseDatastore
records = list(self.registry.fetchOpaqueData(self._tableName, dataset_id=ref.id))
if len(records) == 0:
raise KeyError(f"Unable to retrieve location associated with Dataset {ref}.")
raise KeyError(f"Unable to retrieve location associated with dataset {ref}.")
assert len(records) == 1, "Primary key constraint should make more than one result impossible."
record = records[0]
# Convert name of StorageClass to instance
Expand Down Expand Up @@ -376,7 +376,7 @@ def _prepare_for_get(self, ref, parameters=None):
Reference to the required Dataset.
parameters : `dict`
`StorageClass`-specific parameters that specify, for example,
a slice of the Dataset to be loaded.
a slice of the dataset to be loaded.
Returns
-------
Expand All @@ -388,7 +388,7 @@ def _prepare_for_get(self, ref, parameters=None):
# Get file metadata and internal metadata
location, storedFileInfo = self._get_dataset_location_info(ref)
if location is None:
raise FileNotFoundError(f"Could not retrieve Dataset {ref}.")
raise FileNotFoundError(f"Could not retrieve dataset {ref}.")

# We have a write storage class and a read storage class and they
# can be different for concrete composites.
Expand Down Expand Up @@ -417,7 +417,7 @@ def _prepare_for_put(self, inMemoryDataset, ref):
Parameters
----------
inMemoryDataset : `object`
The Dataset to store.
The dataset to store.
ref : `DatasetRef`
Reference to the associated Dataset.
Expand Down Expand Up @@ -589,8 +589,8 @@ def getUri(self, ref, predict=False):
Returns
-------
uri : `str`
URI string pointing to the Dataset within the datastore. If the
Dataset does not exist in the datastore, and if ``predict`` is
URI string pointing to the dataset within the datastore. If the
dataset does not exist in the datastore, and if ``predict`` is
`True`, the URI will be a prediction and will include a URI
fragment "#predicted".
If the datastore does not have entities that relate well
Expand Down Expand Up @@ -640,7 +640,7 @@ def getUri(self, ref, predict=False):

@transactional
def trash(self, ref):
"""Indicate to the Datastore that a Dataset can be removed.
"""Indicate to the datastore that a dataset can be removed.
Parameters
----------
Expand Down
12 changes: 6 additions & 6 deletions python/lsst/daf/butler/datastores/genericDatastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def getStoredItemInfo(self, ref):
Parameters
----------
ref : `DatasetRef`
The Dataset that is to be queried.
The dataset that is to be queried.
Returns
-------
Expand All @@ -80,7 +80,7 @@ def removeStoredItemInfo(self, ref):
Parameters
----------
ref : `DatasetRef`
The Dataset that has been removed.
The dataset that has been removed.
"""
raise NotImplementedError()

Expand Down Expand Up @@ -171,7 +171,7 @@ def _validate_put_parameters(self, inMemoryDataset, ref):
Parameters
----------
inMemoryDataset : `object`
The Dataset to store.
The dataset to store.
ref : `DatasetRef`
Reference to the associated Dataset.
"""
Expand All @@ -192,7 +192,7 @@ def _validate_put_parameters(self, inMemoryDataset, ref):
return

def remove(self, ref):
"""Indicate to the Datastore that a Dataset can be removed.
"""Indicate to the Datastore that a dataset can be removed.
.. warning::
Expand Down Expand Up @@ -220,15 +220,15 @@ def remove(self, ref):
self.emptyTrash()

def transfer(self, inputDatastore, ref):
"""Retrieve a Dataset from an input `Datastore`,
"""Retrieve a dataset from an input `Datastore`,
and store the result in this `Datastore`.
Parameters
----------
inputDatastore : `Datastore`
The external `Datastore` from which to retreive the Dataset.
ref : `DatasetRef`
Reference to the required Dataset in the input data store.
Reference to the required dataset in the input data store.
"""
assert inputDatastore is not self # unless we want it for renames?
Expand Down
12 changes: 6 additions & 6 deletions python/lsst/daf/butler/datastores/inMemoryDatastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,12 +227,12 @@ def get(self, ref, parameters=None):
Reference to the required Dataset.
parameters : `dict`
`StorageClass`-specific parameters that specify, for example,
a slice of the Dataset to be loaded.
a slice of the dataset to be loaded.
Returns
-------
inMemoryDataset : `object`
Requested Dataset or slice thereof as an InMemoryDataset.
Requested dataset or slice thereof as an InMemoryDataset.
Raises
------
Expand Down Expand Up @@ -281,7 +281,7 @@ def put(self, inMemoryDataset, ref):
Parameters
----------
inMemoryDataset : `object`
The Dataset to store.
The dataset to store.
ref : `DatasetRef`
Reference to the associated Dataset.
Expand Down Expand Up @@ -336,8 +336,8 @@ def getUri(self, ref, predict=False):
Returns
-------
uri : `str`
URI string pointing to the Dataset within the datastore. If the
Dataset does not exist in the datastore, and if ``predict`` is
URI string pointing to the dataset within the datastore. If the
dataset does not exist in the datastore, and if ``predict`` is
`True`, the URI will be a prediction and will include a URI
fragment "#predicted".
If the datastore does not have entities that relate well
Expand All @@ -363,7 +363,7 @@ def getUri(self, ref, predict=False):
return "mem://{}".format(name)

def trash(self, ref):
"""Indicate to the Datastore that a Dataset can be removed.
"""Indicate to the Datastore that a dataset can be removed.
Parameters
----------
Expand Down
10 changes: 5 additions & 5 deletions python/lsst/daf/butler/datastores/posixDatastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,12 +121,12 @@ def get(self, ref, parameters=None):
Reference to the required Dataset.
parameters : `dict`
`StorageClass`-specific parameters that specify, for example,
a slice of the Dataset to be loaded.
a slice of the dataset to be loaded.
Returns
-------
inMemoryDataset : `object`
Requested Dataset or slice thereof as an InMemoryDataset.
Requested dataset or slice thereof as an InMemoryDataset.
Raises
------
Expand Down Expand Up @@ -157,7 +157,7 @@ def get(self, ref, parameters=None):
try:
result = formatter.read(component=getInfo.component)
except Exception as e:
raise ValueError(f"Failure from formatter '{formatter.name()}' for Dataset {ref.id}") from e
raise ValueError(f"Failure from formatter '{formatter.name()}' for dataset {ref.id}") from e

return self._post_process_get(result, getInfo.readStorageClass, getInfo.assemblerParams)

Expand All @@ -168,7 +168,7 @@ def put(self, inMemoryDataset, ref):
Parameters
----------
inMemoryDataset : `object`
The Dataset to store.
The dataset to store.
ref : `DatasetRef`
Reference to the associated Dataset.
Expand Down Expand Up @@ -383,7 +383,7 @@ def export(self, refs: Iterable[DatasetRef], *,
for ref in refs:
location, storedFileInfo = self._get_dataset_location_info(ref)
if location is None:
raise FileNotFoundError(f"Could not retrieve Dataset {ref}.")
raise FileNotFoundError(f"Could not retrieve dataset {ref}.")
if transfer is None:
# TODO: do we also need to return the readStorageClass somehow?
yield FileDataset(refs=[ref], path=location.pathInStore, formatter=storedFileInfo.formatter)
Expand Down
8 changes: 4 additions & 4 deletions python/lsst/daf/butler/datastores/s3Datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,12 +122,12 @@ def get(self, ref, parameters=None):
Reference to the required Dataset.
parameters : `dict`
`StorageClass`-specific parameters that specify, for example,
a slice of the Dataset to be loaded.
a slice of the dataset to be loaded.
Returns
-------
inMemoryDataset : `object`
Requested Dataset or slice thereof as an InMemoryDataset.
Requested dataset or slice thereof as an InMemoryDataset.
Raises
------
Expand Down Expand Up @@ -193,7 +193,7 @@ def get(self, ref, parameters=None):
formatter._fileDescriptor.location = Location(*os.path.split(tmpFile.name))
result = formatter.read(component=getInfo.component)
except Exception as e:
raise ValueError(f"Failure from formatter for Dataset {ref.id}: {e}") from e
raise ValueError(f"Failure from formatter for dataset {ref.id}: {e}") from e

return self._post_process_get(result, getInfo.readStorageClass, getInfo.assemblerParams)

Expand All @@ -204,7 +204,7 @@ def put(self, inMemoryDataset, ref):
Parameters
----------
inMemoryDataset : `object`
The Dataset to store.
The dataset to store.
ref : `DatasetRef`
Reference to the associated Dataset.
Expand Down

0 comments on commit 312906b

Please sign in to comment.