diff --git a/satpy/node.py b/satpy/node.py index 7fc87fa553..79827c871d 100644 --- a/satpy/node.py +++ b/satpy/node.py @@ -21,11 +21,11 @@ # along with this program. If not, see . """Nodes to build trees.""" -import logging - from satpy import DatasetDict, DatasetID, DATASET_KEYS +from satpy.readers import TooManyResults +from satpy.utils import get_logger -LOG = logging.getLogger(__name__) +LOG = get_logger(__name__) class Node(object): @@ -291,22 +291,26 @@ def _find_reader_dataset(self, dataset_key, **dfilter): that method for more information. """ + too_many = False for reader_name, reader_instance in self.readers.items(): try: ds_id = reader_instance.get_dataset_key(dataset_key, **dfilter) + except TooManyResults: + LOG.trace("Too many datasets matching key {} in reader {}".format(dataset_key, reader_name)) + too_many = True + continue except KeyError: - # LOG.debug("Can't find dataset %s in reader %s", - # str(dataset_key), reader_name) + LOG.trace("Can't find dataset %s in reader %s", str(dataset_key), reader_name) continue - # LOG.debug("Found {} in reader {}".format(str(ds_id), - # reader_name)) + LOG.trace("Found {} in reader {} when asking for {}".format(str(ds_id), reader_name, repr(dataset_key))) try: - # now that we know we have the exact DatasetID see if we have - # already created a Node for it + # now that we know we have the exact DatasetID see if we have already created a Node for it return self.getitem(ds_id) except KeyError: # we haven't created a node yet, create it now return Node(ds_id, {'reader_name': reader_name}) + if too_many: + raise TooManyResults("Too many keys matching: {}".format(dataset_key)) def _get_compositor_prereqs(self, parent, prereq_names, skip=False, **dfilter): @@ -380,10 +384,7 @@ def _find_compositor(self, dataset_key, **dfilter): try: compositor = self.get_compositor(dataset_key) except KeyError: - raise KeyError("Can't find anything called {}".format( - str(dataset_key))) - # FIXME: What if resolution/calibration/polarization are lists? - # compositor.attrs.update(dfilter) + raise KeyError("Can't find anything called {}".format(str(dataset_key))) dataset_key = compositor.id root = Node(dataset_key, data=(compositor, [], [])) if src_node is not None: @@ -391,17 +392,17 @@ def _find_compositor(self, dataset_key, **dfilter): root.data[1].append(src_node) # 2.1 get the prerequisites - prereqs, unknowns = self._get_compositor_prereqs( - root, compositor.attrs['prerequisites'], **dfilter) + LOG.trace("Looking for composite prerequisites for: {}".format(dataset_key)) + prereqs, unknowns = self._get_compositor_prereqs(root, compositor.attrs['prerequisites'], **dfilter) if unknowns: # Should we remove all of the unknown nodes that were found # if there is an unknown prerequisite are we in trouble? return None, unknowns root.data[1].extend(prereqs) + LOG.trace("Looking for optional prerequisites for: {}".format(dataset_key)) optional_prereqs, _ = self._get_compositor_prereqs( - root, compositor.attrs['optional_prerequisites'], skip=True, - **dfilter) + root, compositor.attrs['optional_prerequisites'], skip=True, **dfilter) root.data[2].extend(optional_prereqs) return root, set() @@ -419,32 +420,44 @@ def _find_dependencies(self, dataset_key, **dfilter): """ # 0 check if the *exact* dataset is already loaded try: - return self.getitem(dataset_key), set() + node = self.getitem(dataset_key) + LOG.trace("Found exact dataset already loaded: {}".format(node.name)) + return node, set() except KeyError: # exact dataset isn't loaded, let's load it below - pass + LOG.trace("Exact dataset {} isn't loaded, will try reader...".format(dataset_key)) # 1 try to get *best* dataset from reader - node = self._find_reader_dataset(dataset_key, **dfilter) + try: + node = self._find_reader_dataset(dataset_key, **dfilter) + except TooManyResults: + LOG.warning("Too many possible datasets to load for {}".format(dataset_key)) + return None, set([dataset_key]) if node is not None: + LOG.trace("Found reader provided dataset:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node.name)) return node, set() + LOG.trace("Could not find dataset in reader: {}".format(dataset_key)) # 2 try to find a composite by name (any version of it is good enough) try: # assume that there is no such thing as a "better" composite # version so if we find any DatasetIDs already loaded then # we want to use them - return self[dataset_key], set() + node = self[dataset_key] + LOG.trace("Composite already loaded:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node.name)) + return node, set() except KeyError: # composite hasn't been loaded yet, let's load it below - pass + LOG.trace("Composite hasn't been loaded yet, will load: {}".format(dataset_key)) # 3 try to find a composite that matches try: node, unknowns = self._find_compositor(dataset_key, **dfilter) + LOG.trace("Found composite:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node and node.name)) except KeyError: node = None unknowns = set([dataset_key]) + LOG.trace("Composite not found: {}".format(dataset_key)) return node, unknowns diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index ddd7da3431..b841a415ca 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -42,6 +42,10 @@ LOG = logging.getLogger(__name__) +class TooManyResults(KeyError): + pass + + def _wl_dist(wl_a, wl_b): """Return the distance between two requested wavelengths.""" if isinstance(wl_a, tuple): @@ -238,7 +242,7 @@ def get_key(key, key_container, num_results=1, best=True, if num_results == 1 and not res: raise KeyError("No dataset matching '{}' found".format(str(key))) elif num_results == 1 and len(res) != 1: - raise KeyError("No unique dataset matching {}".format(str(key))) + raise TooManyResults("No unique dataset matching {}".format(str(key))) elif num_results == 1: return res[0] elif num_results == 0: diff --git a/satpy/tests/test_scene.py b/satpy/tests/test_scene.py index 1d517ccd4d..8431780e67 100644 --- a/satpy/tests/test_scene.py +++ b/satpy/tests/test_scene.py @@ -1463,6 +1463,26 @@ def _test(self, sensor_names): self.assertIn('comp11', scene.datasets) self.assertIn('comp23', scene.datasets) + @mock.patch('satpy.composites.CompositorLoader.load_compositors', autospec=True) + @mock.patch('satpy.scene.Scene.create_reader_instances') + def test_load_too_many(self, cri, cl): + """Test dependency tree if too many reader keys match.""" + import satpy.scene + from satpy.tests.utils import create_fake_reader, test_composites + from satpy import DatasetID + datasets = [DatasetID(name='duplicate1', wavelength=(0.1, 0.2, 0.3)), + DatasetID(name='duplicate2', wavelength=(0.1, 0.2, 0.3))] + reader = create_fake_reader('fake_reader', 'fake_sensor', datasets=datasets) + reader.datasets = reader.available_dataset_ids = reader.all_dataset_ids = datasets + cri.return_value = {'fake_reader': reader} + comps, mods = test_composites('fake_sensor') + cl.return_value = (comps, mods) + scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') + # mock the available comps/mods in the compositor loader + avail_comps = scene.available_composite_ids() + self.assertEqual(len(avail_comps), 0) + self.assertRaises(KeyError, scene.load, [0.21]) + class TestSceneResampling(unittest.TestCase): diff --git a/satpy/utils.py b/satpy/utils.py index c6d1c7384c..8cf063a113 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -39,6 +39,7 @@ from six.moves import configparser _is_logging_on = False +TRACE_LEVEL = 5 class OrderedConfigParser(object): @@ -80,19 +81,22 @@ def sections(self): def ensure_dir(filename): - """Checks if the dir of f exists, otherwise create it. - """ + """Checks if the dir of f exists, otherwise create it.""" directory = os.path.dirname(filename) if directory and not os.path.isdir(directory): os.makedirs(directory) def debug_on(): - """Turn debugging logging on. - """ + """Turn debugging logging on.""" logging_on(logging.DEBUG) +def trace_on(): + """Turn trace logging on.""" + logging_on(TRACE_LEVEL) + + def logging_on(level=logging.WARNING): """Turn logging on. """ @@ -121,6 +125,16 @@ def logging_off(): def get_logger(name): """Return logger with null handler added if needed.""" + if not hasattr(logging.Logger, 'trace'): + logging.addLevelName(TRACE_LEVEL, 'TRACE') + + def trace(self, message, *args, **kwargs): + if self.isEnabledFor(TRACE_LEVEL): + # Yes, logger takes its '*args' as 'args'. + self._log(TRACE_LEVEL, message, args, **kwargs) + + logging.Logger.trace = trace + log = logging.getLogger(name) if not log.handlers: log.addHandler(logging.NullHandler())