Skip to content

Commit

Permalink
deduplciate mock code
Browse files Browse the repository at this point in the history
removes use of @patch, and just replaces mocked functions.
  • Loading branch information
n8pease committed Nov 16, 2020
1 parent 13475bb commit 8d2c58b
Show file tree
Hide file tree
Showing 3 changed files with 69 additions and 88 deletions.
58 changes: 56 additions & 2 deletions python/lsst/daf/butler/tests/_testRepo.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,25 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.


__all__ = ["makeTestRepo", "makeTestCollection", "addDatasetType", "expandUniqueId"]
__all__ = ["makeTestRepo", "makeTestCollection", "addDatasetType", "expandUniqueId", "DatastoreMock"]

import random
from lsst.daf.butler import Butler, Config, DatasetType
from typing import (
Any,
Iterable,
Mapping,
Optional,
Tuple,
)
from unittest.mock import MagicMock

from lsst.daf.butler import (
Butler,
Config,
DatasetRef,
DatasetType,
FileDataset,
)


def makeTestRepo(root, dataIds, *, config=None, **kwargs):
Expand Down Expand Up @@ -255,3 +270,42 @@ def addDatasetType(butler, name, dimensions, storageClass):
return datasetType
except KeyError as e:
raise ValueError from e


class DatastoreMock:
"""Mixin class with methods for mocking a datastore."""

@staticmethod
def apply(butler):
"""Apply datastore mocks to a butler."""
butler.datastore.export = DatastoreMock._mock_export
butler.datastore.get = DatastoreMock._mock_get
butler.datastore.ingest = MagicMock()

@staticmethod
def _mock_export(refs: Iterable[DatasetRef], *,
directory: Optional[str] = None,
transfer: Optional[str] = None) -> Iterable[FileDataset]:
"""A mock of `Datastore.export` that satisfies the requirement that
the refs passed in are included in the `FileDataset` objects
returned.
This can be used to construct a `Datastore` mock that can be used
in repository export via::
datastore = unittest.mock.Mock(spec=Datastore)
datastore.export = DatastoreMock._mock_export
"""
for ref in refs:
yield FileDataset(refs=[ref],
path="mock/path",
formatter="lsst.daf.butler.formatters.json.JsonFormatter")

@staticmethod
def _mock_get(ref: DatasetRef, parameters: Optional[Mapping[str, Any]] = None
) -> Tuple[int, Optional[Mapping[str, Any]]]:
"""A mock of `Datastore.get` that just returns the integer dataset ID
value and parameters it was given.
"""
return (ref.id, parameters)
52 changes: 8 additions & 44 deletions tests/test_cliCmdQueryCollections.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,27 +25,17 @@
from astropy.table import Table
from numpy import array
import os
from typing import (
Any,
Iterable,
Mapping,
Optional,
Tuple,
)
import unittest

from lsst.daf.butler import (
Butler,
CollectionType,
DatasetRef,
Datastore,
FileDataset,
)
from lsst.daf.butler.cli.butler import cli
from lsst.daf.butler.cli.cmd import query_collections
from lsst.daf.butler.cli.utils import clickResultMsg, LogCliRunner
from lsst.daf.butler.script import queryCollections
from lsst.daf.butler.tests import CliCmdTestBase
from lsst.daf.butler.tests import CliCmdTestBase, DatastoreMock
from lsst.daf.butler.tests.utils import ButlerTestHelper, readTable


Expand Down Expand Up @@ -122,46 +112,20 @@ def testGetCollections(self):

class ChainedCollectionsTest(ButlerTestHelper, unittest.TestCase):

@staticmethod
def _mock_export(refs: Iterable[DatasetRef], *,
directory: Optional[str] = None,
transfer: Optional[str] = None) -> Iterable[FileDataset]:
"""A mock of `Datastore.export` that satisfies the requirement that
the refs passed in are included in the `FileDataset` objects
returned.
This can be used to construct a `Datastore` mock that can be used
in repository export via::
datastore = unittest.mock.Mock(spec=Datastore)
datastore.export = _mock_export
"""
for ref in refs:
yield FileDataset(refs=[ref],
path="mock/path",
formatter="lsst.daf.butler.formatters.json.JsonFormatter")

@staticmethod
def _mock_get(ref: DatasetRef, parameters: Optional[Mapping[str, Any]] = None
) -> Tuple[int, Optional[Mapping[str, Any]]]:
"""A mock of `Datastore.get` that just returns the integer dataset ID
value and parameters it was given.
"""
return (ref.id, parameters)

def setUp(self):
self.runner = LogCliRunner()

def testChained(self):
with self.runner.isolated_filesystem():

# Create a butler and add some chained collections
# Create a butler and add some chained collections:
butlerCfg = Butler.makeRepo("here")
with unittest.mock.patch.object(Datastore, "fromConfig", spec=Datastore.fromConfig):
butler1 = Butler(butlerCfg, writeable=True)
butler1.datastore.export = self._mock_export
butler1.datastore.get = self._mock_get

butler1 = Butler(butlerCfg, writeable=True)

# Replace datastore functions with mocks:
DatastoreMock.apply(butler1)

butler1.import_(filename=os.path.join(TESTDIR, "data", "registry", "base.yaml"))
butler1.import_(filename=os.path.join(TESTDIR, "data", "registry", "datasets.yaml"))
registry1 = butler1.registry
Expand Down
47 changes: 5 additions & 42 deletions tests/test_simpleButler.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,61 +24,25 @@
import os
import shutil
import tempfile
from typing import (
Any,
Iterable,
Mapping,
Optional,
Tuple,
)
from typing import Any
import unittest
import unittest.mock

import astropy.time

from lsst.daf.butler import (
Butler,
ButlerConfig,
CollectionType,
DatasetRef,
Datastore,
FileDataset,
Registry,
Timespan,
)
from lsst.daf.butler.registry import RegistryConfig
from lsst.daf.butler.tests import DatastoreMock


TESTDIR = os.path.abspath(os.path.dirname(__file__))


def _mock_export(refs: Iterable[DatasetRef], *,
directory: Optional[str] = None,
transfer: Optional[str] = None) -> Iterable[FileDataset]:
"""A mock of `Datastore.export` that satisifies the requirement that the
refs passed in are included in the `FileDataset` objects returned.
This can be used to construct a `Datastore` mock that can be used in
repository export via::
datastore = unittest.mock.Mock(spec=Datastore)
datastore.export = _mock_export
"""
for ref in refs:
yield FileDataset(refs=[ref],
path="mock/path",
formatter="lsst.daf.butler.formatters.json.JsonFormatter")


def _mock_get(ref: DatasetRef, parameters: Optional[Mapping[str, Any]] = None
) -> Tuple[int, Optional[Mapping[str, Any]]]:
"""A mock of `Datastore.get` that just returns the integer dataset ID value
and parameters it was given.
"""
return (ref.id, parameters)


class SimpleButlerTestCase(unittest.TestCase):
"""Tests for butler (including import/export functionality) that should not
depend on the Registry Database backend or Datastore implementation, and
Expand All @@ -100,15 +64,14 @@ def makeButler(self, **kwargs: Any) -> Butler:
# make separate temporary directory for registry of this instance
tmpdir = tempfile.mkdtemp(dir=self.root)
config["registry", "db"] = f"sqlite:///{tmpdir}/gen3.sqlite3"
config["root"] = self.root

# have to make a registry first
registryConfig = RegistryConfig(config.get("registry"))
Registry.createFromConfig(registryConfig)

with unittest.mock.patch.object(Datastore, "fromConfig", spec=Datastore.fromConfig):
butler = Butler(config, **kwargs)
butler.datastore.export = _mock_export
butler.datastore.get = _mock_get
butler = Butler(config, **kwargs)
DatastoreMock.apply(butler)
return butler

def testReadBackwardsCompatibility(self):
Expand Down

0 comments on commit 8d2c58b

Please sign in to comment.