diff --git a/setup.cfg b/setup.cfg index 4efa2075..1cc2d11d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -4,7 +4,7 @@ ignore = bootstrap.py [flake8] -ignore = C901,N801,N802,N803,N805,N806,N812,E301 +ignore = E301 exclude = bootstrap.py [bdist_wheel] diff --git a/src/Products/PluginIndexes/BooleanIndex/BooleanIndex.py b/src/Products/PluginIndexes/BooleanIndex/BooleanIndex.py index 2b917ec3..f665f316 100644 --- a/src/Products/PluginIndexes/BooleanIndex/BooleanIndex.py +++ b/src/Products/PluginIndexes/BooleanIndex/BooleanIndex.py @@ -46,7 +46,7 @@ class BooleanIndex(UnIndex): has a roughly equal 50/50 split. """ - meta_type = "BooleanIndex" + meta_type = 'BooleanIndex' manage_options = ( {'label': 'Settings', @@ -55,7 +55,7 @@ class BooleanIndex(UnIndex): 'action': 'manage_browse'}, ) - query_options = ["query"] + query_options = ['query'] manage = manage_main = DTMLFile('dtml/manageBooleanIndex', globals()) manage_main._setName('manage_main') @@ -136,12 +136,13 @@ def removeForwardIndexEntry(self, entry, documentId, check=True): except ConflictError: raise except Exception: - LOG.exception( - '%s: unindex_object could not remove documentId %s ' - 'from index %s. This should not happen.' % ( - self.__class__.__name__, - str(documentId), - str(self.id))) + LOG.exception('%(context)s: unindex_object could not ' + 'remove documentId %(doc_id)s from ' + 'index %(index)r. This should not ' + 'happen.', dict( + context=self.__class__.__name__, + doc_id=documentId, + index=self.id)) elif check: # is the index (after removing the current entry) larger than # 60% of the total length? than switch the indexed value @@ -174,7 +175,7 @@ def _index_object(self, documentId, obj, threshold=None, attr=''): raise except Exception: LOG.error('Should not happen: oldDatum was there, now ' - 'its not, for document with id %s' % + 'its not, for document with id %s', documentId) if datum is not _marker: @@ -203,7 +204,7 @@ def unindex_object(self, documentId): raise except Exception: LOG.debug('Attempt to unindex nonexistent document' - ' with id %s' % documentId, exc_info=True) + ' with id %s', documentId, exc_info=True) def query_index(self, record, resultset=None): index = self._index diff --git a/src/Products/PluginIndexes/CompositeIndex/CompositeIndex.py b/src/Products/PluginIndexes/CompositeIndex/CompositeIndex.py index d9ec0f1c..c39ab358 100644 --- a/src/Products/PluginIndexes/CompositeIndex/CompositeIndex.py +++ b/src/Products/PluginIndexes/CompositeIndex/CompositeIndex.py @@ -14,6 +14,7 @@ import logging from itertools import product from itertools import combinations +from six.moves import urllib import time import transaction @@ -142,8 +143,8 @@ def rawAttributes(self): return self._attributes def __repr__(self): - return "" % \ - (self.id, self.meta_type, self.attributes) + return ('').format(self) @implementer(ITransposeQuery) @@ -153,7 +154,7 @@ class CompositeIndex(KeywordIndex): or sequences of items """ - meta_type = "CompositeIndex" + meta_type = 'CompositeIndex' manage_options = ( {'label': 'Settings', @@ -162,7 +163,7 @@ class CompositeIndex(KeywordIndex): 'action': 'manage_browse'}, ) - query_options = ("query", "operator") + query_options = ('query', 'operator') def __init__(self, id, ignore_ex=None, call_methods=None, extra=None, caller=None): @@ -304,8 +305,10 @@ def make_query(self, query): zc = aq_parent(aq_parent(self)) skip = zc.getProperty('skip_compositeindex', False) if skip: - LOG.debug('%s: skip composite query build %r' % - (self.__class__.__name__, zc)) + LOG.debug('%(context)s: skip composite query build ' + 'for %(zcatalog)r', dict( + context=self.__class__.__name__, + zcatalog=zc)) return query except AttributeError: pass @@ -377,7 +380,7 @@ def addComponent(self, c_id, c_meta_type, c_attributes): # Add a component object by 'c_id'. if c_id in self._components: raise KeyError('A component with this ' - 'name already exists: %s' % c_id) + 'name already exists: {0}'.format(c_id)) self._components[c_id] = Component(c_id, c_meta_type, @@ -387,7 +390,7 @@ def addComponent(self, c_id, c_meta_type, c_attributes): def delComponent(self, c_id): # Delete the component object specified by 'c_id'. if c_id not in self._components: - raise KeyError('no such Component: %s' % c_id) + raise KeyError('no such Component: {0}'.format(c_id)) del self._components[c_id] @@ -486,10 +489,11 @@ def manage_fastBuild(self, threshold=None, URL1=None, ct = time.clock() - ct if RESPONSE: - RESPONSE.redirect(URL1 + '/manage_main?' - 'manage_tabs_message=ComponentIndex%%20fast%%20' - 'reindexed%%20in%%20%.3f%%20' - 'seconds%%20(%.3f%%20cpu)' % (tt, ct)) + msg = ('ComponentIndex fast reindexed ' + 'in {0:.3f}s ({1:.3f}s cpu time)').format(tt, ct) + param = urllib.parse.urlencode({'manage_tabs_message': msg}) + + RESPONSE.redirect(URL1 + '/manage_main?' + param) manage = manage_main = DTMLFile('dtml/manageCompositeIndex', globals()) manage_main._setName('manage_main') diff --git a/src/Products/PluginIndexes/CompositeIndex/tests/testCompositeIndex.py b/src/Products/PluginIndexes/CompositeIndex/tests/testCompositeIndex.py index ef5e9a92..d3393ccb 100644 --- a/src/Products/PluginIndexes/CompositeIndex/tests/testCompositeIndex.py +++ b/src/Products/PluginIndexes/CompositeIndex/tests/testCompositeIndex.py @@ -132,8 +132,8 @@ def defaultSearch(self, req, expectedValues=None, verbose=False): break if verbose and (index.id in req): - logger.info("index %s: %s hits in %3.2fms" % - (index.id, r and len(r) or 0, duration)) + logger.info('index %s: %s hits in %3.2fms', + index.id, r and len(r) or 0, duration) if not rs: return set() @@ -148,7 +148,7 @@ def compositeSearch(self, req, expectedValues=None, verbose=False): query = comp_index.make_query(req) # catch successful? - self.assertTrue('comp01' in query) + self.assertIn('comp01', query) return self.defaultSearch(query, expectedValues=expectedValues, @@ -168,8 +168,8 @@ def info(index): n_obj = index.numObjects() ratio = float(size) / float(n_obj) logger.info('' % - (index.id, size, n_obj, ratio * 1000)) + '%3s length: %5s ratio: %6.3f pm>', + index.id, size, n_obj, ratio * 1000) return ratio for index in self._indexes: @@ -256,26 +256,27 @@ def profileSearch(query, warmup=False, verbose=False): duration1 = (time() - st) * 1000 if verbose: - logger.info("atomic: %s hits in %3.2fms" % - (len(res1), duration1)) + logger.info('atomic: %s hits in %3.2fms', + len(res1), duration1) st = time() res2 = self.compositeSearch(query, verbose=False) duration2 = (time() - st) * 1000 if verbose: - logger.info("composite: %s hits in %3.2fms" % - (len(res2), duration2)) + logger.info('composite: %s hits in %3.2fms', + len(res2), duration2) if verbose: - logger.info('[composite/atomic] factor %3.2f' % - (duration1 / duration2,)) + logger.info('[composite/atomic] factor %3.2f', + duration1 / duration2,) if not warmup: # if length of result is greater than zero composite # search must be roughly faster than default search if res1 and res2: - assert 0.5 * duration2 < duration1, (duration2, duration1) + self.assertLess( + 0.5 * duration2, duration1, (duration2, duration1)) # is result identical? self.assertEqual(len(res1), len(res2), '%s != %s for %s' % @@ -285,7 +286,7 @@ def profileSearch(query, warmup=False, verbose=False): for l in lengths: self.clearIndexes() logger.info('************************************\n' - 'indexing %s objects' % l) + 'indexing %s objects', l) for i in range(l): name = '%s' % i @@ -299,14 +300,14 @@ def profileSearch(query, warmup=False, verbose=False): logger.info('\nstart queries') # warming up indexes - logger.info("warming up indexes") + logger.info('warming up indexes') for name, query in queries: profileSearch(query, warmup=True) # in memory measure - logger.info("in memory measure") + logger.info('in memory measure') for name, query in queries: - logger.info("\nquery: %s" % name) + logger.info('\nquery: %s', name) profileSearch(query, verbose=True) logger.info('\nqueries finished') diff --git a/src/Products/PluginIndexes/DateIndex/DateIndex.py b/src/Products/PluginIndexes/DateIndex/DateIndex.py index 29682597..39bfb8b6 100644 --- a/src/Products/PluginIndexes/DateIndex/DateIndex.py +++ b/src/Products/PluginIndexes/DateIndex/DateIndex.py @@ -142,9 +142,9 @@ def index_object(self, documentId, obj, threshold=None): except ConflictError: raise except Exception: - LOG.error("Should not happen: ConvertedDate was there," - " now it's not, for document with id %s" % - documentId) + LOG.error('Should not happen: ConvertedDate was there,' + ' now it\'s not, for document' + ' with id %s', documentId) if ConvertedDate is not _marker: self.insertForwardIndexEntry(ConvertedDate, documentId) @@ -193,8 +193,9 @@ def _convert(self, value, default=None): if t_val > MAX32: # t_val must be integer fitting in the 32bit range raise OverflowError( - "%s is not within the range of indexable dates (index: %s)" - % (value, self.id)) + ('{0} is not within the range of' + ' indexable dates (index: {1})'.format( + value, self.id))) return t_val diff --git a/src/Products/PluginIndexes/DateRangeIndex/DateRangeIndex.py b/src/Products/PluginIndexes/DateRangeIndex/DateRangeIndex.py index 53b1a33f..78724e9e 100644 --- a/src/Products/PluginIndexes/DateRangeIndex/DateRangeIndex.py +++ b/src/Products/PluginIndexes/DateRangeIndex/DateRangeIndex.py @@ -61,7 +61,7 @@ class DateRangeIndex(UnIndex): security = ClassSecurityInfo() - meta_type = "DateRangeIndex" + meta_type = 'DateRangeIndex' query_options = ('query', ) manage_options = ({'label': 'Properties', @@ -93,46 +93,46 @@ def __init__(self, id, since_field=None, until_field=None, ceiling_value, precision_value) self.clear() - security.declareProtected(view, 'getSinceField') + @security.protected(view) def getSinceField(self): """Get the name of the attribute indexed as start date. """ return self._since_field - security.declareProtected(view, 'getUntilField') + @security.protected(view) def getUntilField(self): """Get the name of the attribute indexed as end date. """ return self._until_field - security.declareProtected(view, 'getFloorValue') + @security.protected(view) def getFloorValue(self): """ """ return self.floor_value - security.declareProtected(view, 'getCeilingValue') + @security.protected(view) def getCeilingValue(self): """ """ return self.ceiling_value - security.declareProtected(view, 'getPrecisionValue') + @security.protected(view) def getPrecisionValue(self): """ """ return self.precision_value manage_indexProperties = DTMLFile('manageDateRangeIndex', _dtmldir) - security.declareProtected(manage_zcatalog_indexes, 'manage_edit') + @security.protected(manage_zcatalog_indexes) def manage_edit(self, since_field, until_field, floor_value, ceiling_value, precision_value, REQUEST): """ """ self._edit(since_field, until_field, floor_value, ceiling_value, precision_value) - REQUEST['RESPONSE'].redirect('%s/manage_main' - '?manage_tabs_message=Updated' - % REQUEST.get('URL2')) + REQUEST['RESPONSE'].redirect('{0}/manage_main' + '?manage_tabs_message=Updated'.format( + REQUEST.get('URL2'))) - security.declarePrivate('_edit') + @security.private def _edit(self, since_field, until_field, floor_value=None, ceiling_value=None, precision_value=None): """Update the fields used to compute the range. @@ -146,7 +146,7 @@ def _edit(self, since_field, until_field, floor_value=None, if precision_value not in (None, ''): self.precision_value = int(precision_value) - security.declareProtected(manage_zcatalog_indexes, 'clear') + @security.protected(manage_zcatalog_indexes) def clear(self): """Start over fresh.""" self._always = IITreeSet() @@ -224,7 +224,7 @@ def uniqueValues(self, name=None, withLengths=0): the form '(value, length)'. """ if name not in (self._since_field, self._until_field): - raise StopIteration + return if name == self._since_field: sets = (self._since, self._since_only) @@ -248,13 +248,13 @@ def getRequestCacheKey(self, record, resultset=None): tid = str(term) # unique index identifier - iid = '_%s_%s_%s' % (self.__class__.__name__, - self.id, self.getCounter()) + iid = '_{0}_{1}_{2}'.format(self.__class__.__name__, + self.id, self.getCounter()) # record identifier if resultset is None: - rid = '_%s' % (tid, ) + rid = '_{0}'.format(tid) else: - rid = '_inverse_%s' % (tid, ) + rid = '_inverse_{0}'.format(tid) return (iid, rid) diff --git a/src/Products/PluginIndexes/DateRangeIndex/tests.py b/src/Products/PluginIndexes/DateRangeIndex/tests.py index d9d94be2..7598986a 100644 --- a/src/Products/PluginIndexes/DateRangeIndex/tests.py +++ b/src/Products/PluginIndexes/DateRangeIndex/tests.py @@ -97,11 +97,14 @@ def _checkApply(self, index, req, expectedValues, resultset=None): def checkApply(): result, used = index._apply_index(req, resultset=resultset) - if hasattr(result, 'keys'): + try: result = result.keys() + except AttributeError: + pass + assert used == (index._since_field, index._until_field) assert len(result) == len(expectedValues), \ - '%s: %s | %s' % (req, list(result), expectedValues) + '{0}: {1} | {2}'.format(req, list(result), expectedValues) for k, v in expectedValues: assert k in result return (result, used) @@ -148,7 +151,12 @@ def test_retrieval(self): index = self._makeOne('work', 'start', 'stop') for i, dummy in dummies: - index.index_object(i, dummy) + result = index.index_object(i, dummy) + self.assertEqual(result, 1) + + # don't index datum twice + result = index.index_object(i, dummy) + self.assertEqual(result, 0) for i, dummy in dummies: self.assertEqual(index.getEntryForObject(i), dummy.datum()) @@ -162,6 +170,13 @@ def test_retrieval(self): self.assertEqual(index.getEntryForObject(result), match[1].datum()) + # check update + i, dummy = dummies[0] + start = dummy._start + dummy._start = start and start + 1 or 1 + index.index_object(i, dummy) + self.assertEqual(index.getEntryForObject(0), dummy.datum()) + def test_longdates(self): too_large = 2 ** 31 too_small = -2 ** 31 diff --git a/src/Products/PluginIndexes/FieldIndex/FieldIndex.py b/src/Products/PluginIndexes/FieldIndex/FieldIndex.py index d4e9e7fd..01bc835e 100644 --- a/src/Products/PluginIndexes/FieldIndex/FieldIndex.py +++ b/src/Products/PluginIndexes/FieldIndex/FieldIndex.py @@ -19,7 +19,7 @@ class FieldIndex(UnIndex): """Index for simple fields. """ - meta_type = "FieldIndex" + meta_type = 'FieldIndex' query_options = ('query', 'range', 'not') manage_options = ( diff --git a/src/Products/PluginIndexes/KeywordIndex/KeywordIndex.py b/src/Products/PluginIndexes/KeywordIndex/KeywordIndex.py index 36732510..cf3c1bec 100644 --- a/src/Products/PluginIndexes/KeywordIndex/KeywordIndex.py +++ b/src/Products/PluginIndexes/KeywordIndex/KeywordIndex.py @@ -11,7 +11,6 @@ # ############################################################################## -import sys from logging import getLogger from BTrees.OOBTree import difference @@ -37,7 +36,7 @@ class KeywordIndex(UnIndex): This should have an _apply_index that returns a relevance score """ - meta_type = "KeywordIndex" + meta_type = 'KeywordIndex' query_options = ('query', 'range', 'not', 'operator') manage_options = ( @@ -135,9 +134,10 @@ def unindex_object(self, documentId): try: del self._unindex[documentId] except KeyError: - LOG.debug('%s: Attempt to unindex nonexistent ' - 'document with id %s' % - (self.__class__.__name__, documentId), + LOG.debug('%(context)s: Attempt to unindex nonexistent ' + 'document with id %(doc_id)s', dict( + context=self.__class__.__name__, + doc_id=documentId), exc_info=True) manage = manage_main = DTMLFile('dtml/manageKeywordIndex', globals()) diff --git a/src/Products/PluginIndexes/PathIndex/PathIndex.py b/src/Products/PluginIndexes/PathIndex/PathIndex.py index fbb538b6..5a61ccc4 100644 --- a/src/Products/PluginIndexes/PathIndex/PathIndex.py +++ b/src/Products/PluginIndexes/PathIndex/PathIndex.py @@ -55,7 +55,7 @@ class PathIndex(Persistent, SimpleItem): 'all docids with this path component on this level' """ - meta_type = "PathIndex" + meta_type = 'PathIndex' operators = ('or', 'and') useOperator = 'or' @@ -134,8 +134,9 @@ def unindex_object(self, docid): """ See IPluggableIndex. """ if docid not in self._unindex: - LOG.debug('Attempt to unindex nonexistent document with id %s' - % docid) + LOG.debug('Attempt to unindex nonexistent ' + 'document with id %s', docid) + return comps = self._unindex[docid].split('/') @@ -150,8 +151,8 @@ def unindex_object(self, docid): if not self._index[comp]: del self._index[comp] except KeyError: - LOG.debug('Attempt to unindex document with id %s failed' - % docid) + LOG.debug('Attempt to unindex document ' + 'with id %s failed', docid) self._length.change(-1) del self._unindex[docid] @@ -277,8 +278,8 @@ def _search(self, path, default_level=0): if level < 0: # Search at every level, return the union of all results return multiunion( - [self._search(path, level) - for level in range(self._depth + 1)]) + [self._search(path, lvl) + for lvl in range(self._depth + 1)]) comps = list(filter(None, path.split('/'))) diff --git a/src/Products/PluginIndexes/TopicIndex/FilteredSet.py b/src/Products/PluginIndexes/TopicIndex/FilteredSet.py index 5b7cac28..6f1bcffb 100644 --- a/src/Products/PluginIndexes/TopicIndex/FilteredSet.py +++ b/src/Products/PluginIndexes/TopicIndex/FilteredSet.py @@ -65,7 +65,10 @@ def setExpression(self, expr): self.expr = expr def __repr__(self): - return '%s: (%s) %s' % (self.id, self.expr, list(map(None, self.ids))) + return '{0}: ({1}) {2}'.format( + self.id, self.expr, + list(map(None, self.ids)) + ) __str__ = __repr__ @@ -86,8 +89,8 @@ def index_object(self, documentId, o): except ConflictError: raise except Exception: - LOG.warn('eval() failed Object: %s, expr: %s' % - (o.getId(), self.expr), exc_info=sys.exc_info()) + LOG.warn('eval() failed Object: %s, expr: %s', + o.getId(), self.expr, exc_info=sys.exc_info()) def factory(f_id, f_type, expr): @@ -95,4 +98,4 @@ def factory(f_id, f_type, expr): if f_type == 'PythonFilteredSet': return PythonFilteredSet(f_id, expr) else: - raise TypeError('unknown type for FilteredSets: %s' % f_type) + raise TypeError('unknown type for FilteredSets: {0}'.format(f_type)) diff --git a/src/Products/PluginIndexes/TopicIndex/TopicIndex.py b/src/Products/PluginIndexes/TopicIndex/TopicIndex.py index 39b0090b..1c18b0d4 100644 --- a/src/Products/PluginIndexes/TopicIndex/TopicIndex.py +++ b/src/Products/PluginIndexes/TopicIndex/TopicIndex.py @@ -41,7 +41,7 @@ class TopicIndex(Persistent, SimpleItem): Ids of indexed objects that eval with this expression to 1. """ - meta_type = "TopicIndex" + meta_type = 'TopicIndex' query_options = ('query', 'operator') manage_options = ( @@ -74,16 +74,16 @@ def unindex_object(self, docid): fs.unindex_object(docid) except KeyError: LOG.debug('Attempt to unindex document' - ' with id %s failed' % docid) + ' with id %s failed', docid) return 1 def numObjects(self): """Return the number of indexed objects.""" - return "n/a" + return 'n/a' def indexSize(self): """Return the size of the index in terms of distinct values.""" - return "n/a" + return 'n/a' def search(self, filter_id): f = self.filteredSets.get(filter_id, None) @@ -128,8 +128,8 @@ def getEntryForObject(self, docid, default=_marker): def addFilteredSet(self, filter_id, typeFilteredSet, expr): # Add a FilteredSet object. if filter_id in self.filteredSets: - raise KeyError( - 'A FilteredSet with this name already exists: %s' % filter_id) + raise KeyError(('A FilteredSet with this name already ' + 'exists: {0}'.format(filter_id))) self.filteredSets[filter_id] = factory( filter_id, typeFilteredSet, expr) @@ -137,14 +137,14 @@ def delFilteredSet(self, filter_id): # Delete the FilteredSet object specified by 'filter_id'. if filter_id not in self.filteredSets: raise KeyError( - 'no such FilteredSet: %s' % filter_id) + 'no such FilteredSet: {0}'.format(filter_id)) del self.filteredSets[filter_id] def clearFilteredSet(self, filter_id): # Clear the FilteredSet object specified by 'filter_id'. f = self.filteredSets.get(filter_id, None) if f is None: - raise KeyError('no such FilteredSet: %s' % filter_id) + raise KeyError('no such FilteredSet: {0}'.format(filter_id)) f.clear() def manage_addFilteredSet(self, filter_id, typeFilteredSet, expr, URL1, diff --git a/src/Products/PluginIndexes/UUIDIndex/UUIDIndex.py b/src/Products/PluginIndexes/UUIDIndex/UUIDIndex.py index 87979dd4..b9e274f6 100644 --- a/src/Products/PluginIndexes/UUIDIndex/UUIDIndex.py +++ b/src/Products/PluginIndexes/UUIDIndex/UUIDIndex.py @@ -36,14 +36,14 @@ class UUIDIndex(UnIndex): For each datum only one documentId can exist. """ - meta_type = "UUIDIndex" + meta_type = 'UUIDIndex' manage_options = ( {'label': 'Settings', 'action': 'manage_main'}, {'label': 'Browse', 'action': 'manage_browse'}, ) - query_options = ["query", "range"] + query_options = ['query', 'range'] manage = manage_main = DTMLFile('dtml/manageUUIDIndex', globals()) manage_main._setName('manage_main') @@ -73,7 +73,7 @@ def uniqueValues(self, name=None, withLengths=0): if name is None: name = self.id elif name != self.id: - raise StopIteration + return if not withLengths: for key in self._index.keys(): @@ -96,7 +96,7 @@ def insertForwardIndexEntry(self, entry, documentId): self._length.change(1) elif old_docid != documentId: logger.error("A different document with value '%s' already " - "exists in the index.'" % entry) + 'exists in the index.', entry) def removeForwardIndexEntry(self, entry, documentId): """Take the entry provided and remove any reference to documentId diff --git a/src/Products/PluginIndexes/cache.py b/src/Products/PluginIndexes/cache.py index 7536bda3..76a4cb6e 100644 --- a/src/Products/PluginIndexes/cache.py +++ b/src/Products/PluginIndexes/cache.py @@ -11,8 +11,6 @@ # ############################################################################## -_marker = [] - class RequestCache(dict): @@ -22,13 +20,11 @@ class RequestCache(dict): _sets = 0 def get(self, key, default=None): - value = super(RequestCache, self).get(key, _marker) - - if value is _marker: - self._misses += 1 + try: + value = self[key] + except KeyError: return default - self._hits += 1 return value def __getitem__(self, key): @@ -58,5 +54,6 @@ def stats(self): return stats def __str__(self): - return "" % \ - (len(self), self._hits, self._misses, self._sets) + return ('').format(len(self), self._hits, + self._misses, self._sets) diff --git a/src/Products/PluginIndexes/tests/test_unindex.py b/src/Products/PluginIndexes/tests/test_unindex.py index 8579c528..383306e3 100644 --- a/src/Products/PluginIndexes/tests/test_unindex.py +++ b/src/Products/PluginIndexes/tests/test_unindex.py @@ -1,4 +1,4 @@ -############################################################################## +############################################################################# # # Copyright (c) 2002 Zope Foundation and Contributors. # @@ -67,17 +67,17 @@ def test_removeForwardIndexEntry_with_ConflictError(self): 'conflicts', 42) def test_get_object_datum(self): - from Products.PluginIndexes.common.UnIndex import _marker + from Products.PluginIndexes.unindex import _marker idx = self._makeOne('interesting') dummy = object() - self.assertEquals(idx._get_object_datum(dummy, 'interesting'), _marker) + self.assertEqual(idx._get_object_datum(dummy, 'interesting'), _marker) class DummyContent2(object): interesting = 'GOT IT' dummy = DummyContent2() - self.assertEquals(idx._get_object_datum(dummy, 'interesting'), - 'GOT IT') + self.assertEqual(idx._get_object_datum(dummy, 'interesting'), + 'GOT IT') class DummyContent3(object): exc = None @@ -87,14 +87,14 @@ def interesting(self): raise self.exc return 'GOT IT' dummy = DummyContent3() - self.assertEquals(idx._get_object_datum(dummy, 'interesting'), - 'GOT IT') + self.assertEqual(idx._get_object_datum(dummy, 'interesting'), + 'GOT IT') dummy.exc = AttributeError - self.assertEquals(idx._get_object_datum(dummy, 'interesting'), _marker) + self.assertEqual(idx._get_object_datum(dummy, 'interesting'), _marker) dummy.exc = TypeError - self.assertEquals(idx._get_object_datum(dummy, 'interesting'), _marker) + self.assertEqual(idx._get_object_datum(dummy, 'interesting'), _marker) def test_cache(self): idx = self._makeOne(id='foo') @@ -124,7 +124,7 @@ def testQuery(record, expect=1): self.assertEqual(len(result), 0) # Dummy tests, result is always empty. - # TODO: Sophisticated tests have to be placed on tests + # NOTE: Sophisticated tests have to be placed on tests # of inherited classes (FieldIndex, KeywordIndex etc.) # # 'or' operator diff --git a/src/Products/PluginIndexes/unindex.py b/src/Products/PluginIndexes/unindex.py index 7f0547ec..79aea817 100644 --- a/src/Products/PluginIndexes/unindex.py +++ b/src/Products/PluginIndexes/unindex.py @@ -183,7 +183,7 @@ def removeForwardIndexEntry(self, entry, documentId): try: del self._index[entry] except KeyError: - # XXX swallow KeyError because it was probably + # swallow KeyError because it was probably # removed and then _length AttributeError raised pass if isinstance(self.__len__, Length): @@ -191,18 +191,20 @@ def removeForwardIndexEntry(self, entry, documentId): del self.__len__ self._length.change(-1) except Exception: - LOG.error('%s: unindex_object could not remove ' - 'documentId %s from index %s. This ' - 'should not happen.' % - (self.__class__.__name__, - str(documentId), str(self.id)), + LOG.error('%(context)s: unindex_object could not remove ' + 'documentId %(doc_id)s from index %(index)r. This ' + 'should not happen.', dict( + context=self.__class__.__name__, + doc_id=documentId, + index=self.id), exc_info=sys.exc_info()) else: - LOG.error('%s: unindex_object tried to retrieve set %s ' - 'from index %s but couldn\'t. This ' - 'should not happen.' % - (self.__class__.__name__, - repr(entry), str(self.id))) + LOG.error('%(context)s: unindex_object tried to ' + 'retrieve set %(entry)r from index %(index)r ' + 'but couldn\'t. This should not happen.', dict( + context=self.__class__.__name__, + entry=entry, + index=self.id)) def insertForwardIndexEntry(self, entry, documentId): """Take the entry provided and put it in the correct place @@ -267,7 +269,7 @@ def _index_object(self, documentId, obj, threshold=None, attr=''): raise except Exception: LOG.error('Should not happen: oldDatum was there, ' - 'now its not, for document: %s' % documentId) + 'now its not, for document: %s', documentId) if datum is not _marker: self.insertForwardIndexEntry(datum, documentId) @@ -323,7 +325,7 @@ def unindex_object(self, documentId): raise except Exception: LOG.debug('Attempt to unindex nonexistent document' - ' with id %s' % documentId, exc_info=True) + ' with id %s', documentId, exc_info=True) def _apply_not(self, not_parm, resultset=None): index = self._index @@ -351,7 +353,8 @@ def getRequestCache(self): catalog = aq_parent(aq_parent(aq_inner(self))) if catalog is not None: # unique catalog identifier - key = '_catalogcache_%s_%s' % (catalog.getId(), id(catalog)) + key = '_catalogcache_{0}_{1}'.format( + catalog.getId(), id(catalog)) cache = REQUEST.get(key, None) if cache is None: cache = REQUEST[key] = RequestCache() @@ -385,8 +388,8 @@ def getRequestCacheKey(self, record, resultset=None): rid = frozenset(params) # unique index identifier - iid = '_%s_%s_%s' % (self.__class__.__name__, - self.id, self.getCounter()) + iid = '_{0}_{1}_{2}'.format(self.__class__.__name__, + self.id, self.getCounter()) return (iid, rid) def _apply_index(self, request, resultset=None): @@ -471,19 +474,19 @@ def query_index(self, record, resultset=None): # Range parameter range_parm = record.get('range', None) if range_parm: - opr = "range" + opr = 'range' opr_args = [] - if range_parm.find("min") > -1: - opr_args.append("min") - if range_parm.find("max") > -1: - opr_args.append("max") + if range_parm.find('min') > -1: + opr_args.append('min') + if range_parm.find('max') > -1: + opr_args.append('max') if record.get('usage', None): # see if any usage params are sent to field opr = record.usage.lower().split(':') opr, opr_args = opr[0], opr[1:] - if opr == "range": # range search + if opr == 'range': # range search if 'min' in opr_args: lo = min(record.keys) else: @@ -564,9 +567,9 @@ def query_index(self, record, resultset=None): except TypeError: # key is not valid for this Btree so the value is None LOG.error( - '{context!s}: query_index tried ' - 'to look up key {key!r} from index {index!r} ' - 'but key was of the wrong type.'.format( + '%(context)s: query_index tried ' + 'to look up key %(key)r from index %(index)r ' + 'but key was of the wrong type.', dict( context=self.__class__.__name__, key=k, index=self.id, @@ -675,7 +678,7 @@ def uniqueValues(self, name=None, withLengths=0): if name is None: name = self.id elif name != self.id: - raise StopIteration + return if not withLengths: for key in self._index.keys(): diff --git a/src/Products/PluginIndexes/util.py b/src/Products/PluginIndexes/util.py index 487b374f..f7046f86 100644 --- a/src/Products/PluginIndexes/util.py +++ b/src/Products/PluginIndexes/util.py @@ -21,9 +21,15 @@ def safe_callable(ob): # Works with ExtensionClasses and Acquisition. - if hasattr(ob, '__class__'): - return hasattr(ob, '__call__') or isinstance(ob, six.class_types) - else: + try: + ob.__class__ + + try: + return bool(ob.__call__) + except AttributeError: + return isinstance(ob, six.class_types) + + except AttributeError: return callable(ob) @@ -47,6 +53,6 @@ def datetime_to_minutes(value, precision=1, if value > max_value or value < min_value: # value must be integer fitting in the range (default 32bit) raise OverflowError( - '%s is not within the range of dates allowed.' % value) + '{0} is not within the range of dates allowed.'.format(value)) return value diff --git a/src/Products/ZCTextIndex/RiceCode.py b/src/Products/ZCTextIndex/RiceCode.py index 71de1996..39073bbf 100644 --- a/src/Products/ZCTextIndex/RiceCode.py +++ b/src/Products/ZCTextIndex/RiceCode.py @@ -122,7 +122,7 @@ def __len__(self): def tolist(self): """Return the items as a list.""" - l = [] + result = [] i = 0 # bit offset binary_range = range(self.m) for j in range(self.len): @@ -136,8 +136,8 @@ def tolist(self): for k in binary_range: binary = (binary << 1) | self.bits[i] i += 1 - l.append((unary << self.m) + (binary + 1)) - return l + result.append((unary << self.m) + (binary + 1)) + return result def tostring(self): """Return a binary string containing the encoded data. @@ -175,8 +175,8 @@ def encode_deltas(l): def decode_deltas(start, enc_deltas): deltas = enc_deltas.tolist() - l = [start] + result = [start] for i in range(1, len(deltas)): - l.append(l[i - 1] + deltas[i]) - l.append(l[-1] + deltas[-1]) - return l + result.append(result[i - 1] + deltas[i]) + result.append(result[-1] + deltas[-1]) + return result diff --git a/src/Products/ZCTextIndex/SetOps.py b/src/Products/ZCTextIndex/SetOps.py index f3fe8eef..858dd031 100644 --- a/src/Products/ZCTextIndex/SetOps.py +++ b/src/Products/ZCTextIndex/SetOps.py @@ -21,11 +21,11 @@ from Products.ZCTextIndex.NBest import NBest -def mass_weightedIntersection(l): +def mass_weightedIntersection(l_): "A list of (mapping, weight) pairs -> their weightedIntersection IIBucket." - l = [(x, wx) for (x, wx) in l if x is not None] - if len(l) < 2: - return _trivial(l) + l_ = [(x, wx) for (x, wx) in l_ if x is not None] + if len(l_) < 2: + return _trivial(l_) # Intersect with smallest first. We expect the input maps to be # IIBuckets, so it doesn't hurt to get their lengths repeatedly # (len(Bucket) is fast; len(BTree) is slow). @@ -33,21 +33,21 @@ def mass_weightedIntersection(l): def _key(value): return len(value) - l.sort(key=_key) - (x, wx), (y, wy) = l[:2] + l_.sort(key=_key) + (x, wx), (y, wy) = l_[:2] dummy, result = weightedIntersection(x, y, wx, wy) - for x, wx in l[2:]: + for x, wx in l_[2:]: dummy, result = weightedIntersection(result, x, 1, wx) return result -def mass_weightedUnion(l): +def mass_weightedUnion(l_): "A list of (mapping, weight) pairs -> their weightedUnion IIBucket." - if len(l) < 2: - return _trivial(l) + if len(l_) < 2: + return _trivial(l_) # Balance unions as closely as possible, smallest to largest. - merge = NBest(len(l)) - for x, weight in l: + merge = NBest(len(l_)) + for x, weight in l_: merge.add((x, weight), len(x)) while len(merge) > 1: # Merge the two smallest so far, and add back to the queue. @@ -59,13 +59,13 @@ def mass_weightedUnion(l): return result -def _trivial(l): +def _trivial(l_): # l is empty or has only one (mapping, weight) pair. If there is a # pair, we may still need to multiply the mapping by its weight. - assert len(l) <= 1 - if len(l) == 0: + assert len(l_) <= 1 + if len(l_) == 0: return IIBucket() - [(result, weight)] = l + [(result, weight)] = l_ if weight != 1: dummy, result = weightedUnion(IIBucket(), result, 0, weight) return result diff --git a/src/Products/ZCTextIndex/ZCTextIndex.py b/src/Products/ZCTextIndex/ZCTextIndex.py index bd8cd69f..b31d0fe4 100644 --- a/src/Products/ZCTextIndex/ZCTextIndex.py +++ b/src/Products/ZCTextIndex/ZCTextIndex.py @@ -247,7 +247,7 @@ def getIndexSourceNames(self): """Return sequence of names of indexed attributes""" try: return self._indexed_attrs - except: + except Exception: return [self._fieldname] def getIndexQueryNames(self): diff --git a/src/Products/ZCTextIndex/tests/testIndex.py b/src/Products/ZCTextIndex/tests/testIndex.py index b77fba6c..0528372c 100644 --- a/src/Products/ZCTextIndex/tests/testIndex.py +++ b/src/Products/ZCTextIndex/tests/testIndex.py @@ -35,11 +35,11 @@ def setUp(self): self.index = self.IndexFactory(self.lexicon) def test_index_document(self, docid=1): - doc = "simple document contains five words" - self.assert_(not self.index.has_doc(docid)) + doc = 'simple document contains five words' + self.assertFalse(self.index.has_doc(docid)) self.index.index_doc(docid, doc) - self.assert_(self.index.has_doc(docid)) - self.assert_(self.index._docweight[docid]) + self.assertTrue(self.index.has_doc(docid)) + self.assertTrue(self.index._docweight[docid]) self.assertEqual(len(self.index._docweight), 1) self.assertEqual( len(self.index._docweight), self.index.document_count()) @@ -50,7 +50,7 @@ def test_index_document(self, docid=1): self.index.length()) for map in self.index._wordinfo.values(): self.assertEqual(len(map), 1) - self.assert_(docid in map) + self.assertIn(docid, map) def test_unindex_document(self): docid = 1 @@ -66,10 +66,10 @@ def test_unindex_document(self): def test_index_two_documents(self): self.test_index_document() - doc = "another document just four" + doc = 'another document just four' docid = 2 self.index.index_doc(docid, doc) - self.assert_(self.index._docweight[docid]) + self.assertTrue(self.index._docweight[docid]) self.assertEqual(len(self.index._docweight), 2) self.assertEqual( len(self.index._docweight), self.index.document_count()) @@ -78,14 +78,14 @@ def test_index_two_documents(self): self.assertEqual(len(self.index.get_words(docid)), 4) self.assertEqual(len(self.index._wordinfo), self.index.length()) - wids = self.lexicon.termToWordIds("document") + wids = self.lexicon.termToWordIds('document') self.assertEqual(len(wids), 1) document_wid = wids[0] for wid, map in self.index._wordinfo.items(): if wid == document_wid: self.assertEqual(len(map), 2) - self.assert_(1 in map) - self.assert_(docid in map) + self.assertIn(1, map) + self.assertIn(docid, map) else: self.assertEqual(len(map), 1) @@ -97,7 +97,7 @@ def test_index_two_unindex_one(self): self.assertEqual(len(self.index._docweight), 1) self.assertEqual( len(self.index._docweight), self.index.document_count()) - self.assert_(self.index._docweight[docid]) + self.assertTrue(self.index._docweight[docid]) self.assertEqual(len(self.index._wordinfo), 4) self.assertEqual(len(self.index._docwords), 1) self.assertEqual(len(self.index.get_words(docid)), 4) @@ -105,12 +105,12 @@ def test_index_two_unindex_one(self): self.index.length()) for map in self.index._wordinfo.values(): self.assertEqual(len(map), 1) - self.assert_(docid in map) + self.assertIn(docid, map) def test_index_duplicated_words(self, docid=1): - doc = "very simple repeat repeat repeat document test" + doc = 'very simple repeat repeat repeat document test' self.index.index_doc(docid, doc) - self.assert_(self.index._docweight[docid]) + self.assertTrue(self.index._docweight[docid]) self.assertEqual(len(self.index._wordinfo), 5) self.assertEqual(len(self.index._docwords), 1) self.assertEqual(len(self.index.get_words(docid)), 7) @@ -118,41 +118,41 @@ def test_index_duplicated_words(self, docid=1): self.index.length()) self.assertEqual( len(self.index._docweight), self.index.document_count()) - wids = self.lexicon.termToWordIds("repeat") + wids = self.lexicon.termToWordIds('repeat') self.assertEqual(len(wids), 1) for wid, map in self.index._wordinfo.items(): self.assertEqual(len(map), 1) - self.assert_(docid in map) + self.assertIn(docid, map) def test_simple_query_oneresult(self): self.index.index_doc(1, 'not the same document') - results = self.index.search("document") + results = self.index.search('document') self.assertEqual(list(results.keys()), [1]) def test_simple_query_noresults(self): self.index.index_doc(1, 'not the same document') - results = self.index.search("frobnicate") + results = self.index.search('frobnicate') self.assertEqual(list(results.keys()), []) def test_query_oneresult(self): self.index.index_doc(1, 'not the same document') self.index.index_doc(2, 'something about something else') - results = self.index.search("document") + results = self.index.search('document') self.assertEqual(list(results.keys()), [1]) def test_search_phrase(self): - self.index.index_doc(1, "the quick brown fox jumps over the lazy dog") - self.index.index_doc(2, "the quick fox jumps lazy over the brown dog") - results = self.index.search_phrase("quick brown fox") + self.index.index_doc(1, 'the quick brown fox jumps over the lazy dog') + self.index.index_doc(2, 'the quick fox jumps lazy over the brown dog') + results = self.index.search_phrase('quick brown fox') self.assertEqual(list(results.keys()), [1]) def test_search_glob(self): - self.index.index_doc(1, "how now brown cow") - self.index.index_doc(2, "hough nough browne cough") - self.index.index_doc(3, "bar brawl") - results = self.index.search_glob("bro*") + self.index.index_doc(1, 'how now brown cow') + self.index.index_doc(2, 'hough nough browne cough') + self.index.index_doc(3, 'bar brawl') + results = self.index.search_glob('bro*') self.assertEqual(list(results.keys()), [1, 2]) - results = self.index.search_glob("b*") + results = self.index.search_glob('b*') self.assertEqual(list(results.keys()), [1, 2, 3]) @@ -174,7 +174,7 @@ def tearDown(self): self.storage.cleanup() def openDB(self): - n = 'fs_tmp__%s' % os.getpid() + n = 'fs_tmp__{0}'.format(os.getpid()) self.storage = FileStorage(n) self.db = DB(self.storage) @@ -271,12 +271,12 @@ def test_upgrade_document_count(self): del self.index1.document_count self.index1.index_doc(1, 'gazes upon my shadow') self.index2.index_doc(1, 'gazes upon my shadow') - self.assert_(self.index1.document_count.__class__ is Length) + self.assertIs(self.index1.document_count.__class__, Length) self.assertEqual( self.index1.document_count(), self.index2.document_count()) del self.index1.document_count self.index1.unindex_doc(0) self.index2.unindex_doc(0) - self.assert_(self.index1.document_count.__class__ is Length) + self.assertIs(self.index1.document_count.__class__, Length) self.assertEqual( self.index1.document_count(), self.index2.document_count()) diff --git a/src/Products/ZCTextIndex/tests/testLexicon.py b/src/Products/ZCTextIndex/tests/testLexicon.py index 751bc1d7..4974e697 100644 --- a/src/Products/ZCTextIndex/tests/testLexicon.py +++ b/src/Products/ZCTextIndex/tests/testLexicon.py @@ -110,7 +110,7 @@ def testTermToWordIds(self): wids = lexicon.sourceToWordIds('cats and dogs') wids = lexicon.termToWordIds('dogs') self.assertEqual(len(wids), 1) - self.assert_(wids[0] > 0) + self.assertGreater(wids[0], 0) def testMissingTermToWordIds(self): from Products.ZCTextIndex.Lexicon import Splitter @@ -132,7 +132,7 @@ def process_post_glob(self, lst): wids = lexicon.sourceToWordIds('cats and dogs') wids = lexicon.termToWordIds('dogs') self.assertEqual(len(wids), 1) - self.assert_(wids[0] > 0) + self.assertTrue(wids[0] > 0) def testMissingTermToWordIdsWithProcess_post_glob(self): """This test is for added process_post_glob""" @@ -155,7 +155,7 @@ def testOnePipelineElement(self): wids = lexicon.sourceToWordIds('cats and dogs') wids = lexicon.termToWordIds('fish') self.assertEqual(len(wids), 1) - self.assert_(wids[0] > 0) + self.assertTrue(wids[0] > 0) def testSplitterAdaptorFold(self): from Products.ZCTextIndex.Lexicon import CaseNormalizer @@ -188,7 +188,7 @@ def testTwoElementPipeline(self): wids = lexicon.sourceToWordIds('cats and dogs') wids = lexicon.termToWordIds('hsif') self.assertEqual(len(wids), 1) - self.assert_(wids[0] > 0) + self.assertTrue(wids[0] > 0) def testThreeElementPipeline(self): from Products.ZCTextIndex.Lexicon import Splitter @@ -201,7 +201,7 @@ def testThreeElementPipeline(self): wids = lexicon.sourceToWordIds('cats and dogs') wids = lexicon.termToWordIds('hsif') self.assertEqual(len(wids), 1) - self.assert_(wids[0] > 0) + self.assertTrue(wids[0] > 0) def testSplitterLocaleAwareness(self): import locale @@ -219,7 +219,7 @@ def testSplitterLocaleAwareness(self): return # This test doesn't work here :-( expected = ['m\xfclltonne', 'waschb\xe4r', 'beh\xf6rde', '\xfcberflieger'] - words = [" ".join(expected)] + words = [' '.join(expected)] words = Splitter().process(words) self.assertEqual(words, expected) words = HTMLWordSplitter().process(words) @@ -247,29 +247,29 @@ def openDB(self): from ZODB.DB import DB from ZODB.FileStorage import FileStorage - n = 'fs_tmp__%s' % os.getpid() + n = 'fs_tmp__{0}'.format(os.getpid()) self.storage = FileStorage(n) self.db = DB(self.storage) def testAddWordConflict(self): from Products.ZCTextIndex.Lexicon import Splitter - self.l = self._makeOne(Splitter()) + self.lex = self._makeOne(Splitter()) self.openDB() r1 = self.db.open().root() - r1['l'] = self.l + r1['lex'] = self.lex transaction.commit() r2 = self.db.open().root() - copy = r2['l'] + copy = r2['lex'] # Make sure the data is loaded list(copy._wids.items()) list(copy._words.items()) copy.length() - self.assertEqual(self.l._p_serial, copy._p_serial) + self.assertEqual(self.lex._p_serial, copy._p_serial) - self.l.sourceToWordIds('mary had a little lamb') + self.lex.sourceToWordIds('mary had a little lamb') transaction.commit() copy.sourceToWordIds('whose fleece was') diff --git a/src/Products/ZCTextIndex/tests/testZCTextIndex.py b/src/Products/ZCTextIndex/tests/testZCTextIndex.py index 4aa60ccc..0783b275 100644 --- a/src/Products/ZCTextIndex/tests/testZCTextIndex.py +++ b/src/Products/ZCTextIndex/tests/testZCTextIndex.py @@ -70,7 +70,7 @@ def dummyUnrestrictedTraverse(self, path): def eq(scaled1, scaled2, epsilon=scaled_int(0.01)): if abs(scaled1 - scaled2) > epsilon: - raise AssertionError("%s != %s" % (scaled1, scaled2)) + raise AssertionError('{0} != {1}'.format(scaled1, scaled2)) # A series of text chunks to use for the re-index tests (testDocUpdate). @@ -203,12 +203,12 @@ def testReindex(self): def testStopWords(self): # the only non-stopword is question - text = ("to be or not to be " - "that is the question") + text = ('to be or not to be ' + 'that is the question') doc = Indexable(text) self.zc_index.index_object(1, doc) for word in text.split(): - if word != "question": + if word != 'question': wids = self.lexicon.termToWordIds(word) self.assertEqual(wids, []) self.assertEqual(len(self.index.get_words(1)), 1) @@ -239,7 +239,7 @@ def testDocUpdate(self): d = {} # word -> list of version numbers containing that word for version, i in zip(text, range(N)): # use a simple splitter rather than an official one - words = [w for w in re.split("\W+", version.lower()) + words = [w for w in re.split('\W+', version.lower()) if len(w) > 1 and w not in stop] word_seen = {} for w in words: @@ -254,21 +254,24 @@ def testDocUpdate(self): unique.setdefault(versionlist[0], []).append(w) elif len(versionlist) == N: common.append(w) - self.assert_(len(common) > 0) - self.assert_(len(unique) > 0) + self.assertGreater(len(common), 0) + self.assertGreater(len(unique), 0) for version, i in zip(text, range(N)): doc = Indexable(version) self.zc_index.index_object(docid, doc) for w in common: nbest, total = self.zc_index.query(w) - self.assertEqual(total, 1, "did not find %s" % w) + self.assertEqual(total, 1, 'did not find {0}'.format(w)) for k, v in unique.items(): if k == i: continue for w in v: nbest, total = self.zc_index.query(w) - self.assertEqual(total, 0, "did not expect to find %s" % w) + self.assertEqual( + total, 0, + 'did not expect to find {0}'.format(w) + ) class CosineIndexTests(ZCIndexTestsBase, testIndex.CosineIndexTest): @@ -287,14 +290,14 @@ def test_z3interfaces(self): verifyClass(IZCTextIndex, ZCTextIndex) def testRanking(self): - self.words = ["cold", "days", "eat", "hot", "lot", "nine", "old", - "pease", "porridge", "pot"] - self.docs = ["Pease porridge hot, pease porridge cold,", - "Pease porridge in the pot,", - "Nine days old.", - "In the pot cold, in the pot hot,", - "Pease porridge, pease porridge,", - "Eat the lot."] + self.words = ['cold', 'days', 'eat', 'hot', 'lot', 'nine', 'old', + 'pease', 'porridge', 'pot'] + self.docs = ['Pease porridge hot, pease porridge cold,', + 'Pease porridge in the pot,', + 'Nine days old.', + 'In the pot cold, in the pot hot,', + 'Pease porridge, pease porridge,', + 'Eat the lot.'] self._ranking_index() self._ranking_tf() self._ranking_idf() @@ -302,28 +305,28 @@ def testRanking(self): # A digression to exercise re-indexing. docs = self.docs - for variant in "hot cold porridge python", "pease hot pithy": + for variant in ('hot cold porridge python', 'pease hot pithy'): self.zc_index.index_object(len(docs), Indexable(variant)) try: self._ranking_tf() except (AssertionError, KeyError): pass else: - self.fail("expected _ranking_tf() to fail -- reindex") + self.fail('expected _ranking_tf() to fail -- reindex') try: self._ranking_idf() except (AssertionError, KeyError): pass else: - self.fail("expected _ranking_idf() to fail -- reindex") + self.fail('expected _ranking_idf() to fail -- reindex') try: self._ranking_queries() except AssertionError: pass else: - self.fail("expected _ranking_queries() to fail -- reindex") + self.fail('expected _ranking_queries() to fail -- reindex') # This should leave things exactly as they were. self.zc_index.index_object(len(docs), Indexable(docs[-1])) @@ -365,8 +368,8 @@ def _ranking_idf(self): eq(scaled_int(idfs[i]), self.index._get_wt(word)) def _ranking_queries(self): - queries = ["eat", "porridge", "hot OR porridge", - "eat OR nine OR day OR old OR porridge"] + queries = ['eat', 'porridge', 'hot OR porridge', + 'eat OR nine OR day OR old OR porridge'] wqs = [1.95, 1.10, 1.77, 3.55] results = [[(6, 0.71)], [(1, 0.61), (2, 0.58), (5, 0.71)], @@ -385,7 +388,7 @@ def _ranking_queries(self): d[doc] = scaled_int(score) for doc, score in r: score = scaled_int(float(score / SCALE_FACTOR) / wq) - self.assert_(0 <= score <= SCALE_FACTOR) + self.assertTrue(0 <= score <= SCALE_FACTOR) eq(d[doc], score) @@ -393,9 +396,9 @@ class OkapiIndexTests(ZCIndexTestsBase, testIndex.OkapiIndexTest): # A white-box test. def testAbsoluteScores(self): - docs = ["one", - "one two", - "one two three"] + docs = ['one', + 'one two', + 'one two three'] for i in range(len(docs)): self.zc_index.index_object(i + 1, Indexable(docs[i])) @@ -403,14 +406,14 @@ def testAbsoluteScores(self): self._checkAbsoluteScores() # Exercise re-indexing. - for variant in "one xyz", "xyz two three", "abc def": + for variant in ('one xyz', 'xyz two three', 'abc def'): self.zc_index.index_object(len(docs), Indexable(variant)) try: self._checkAbsoluteScores() except AssertionError: pass else: - self.fail("expected _checkAbsoluteScores() to fail -- reindex") + self.fail('expected _checkAbsoluteScores() to fail -- reindex') # This should leave things exactly as they were. self.zc_index.index_object(len(docs), Indexable(docs[-1])) self._checkAbsoluteScores() @@ -419,31 +422,31 @@ def _checkAbsoluteScores(self): self.assertEqual(self.index._totaldoclen(), 6) # So the mean doc length is 2. We use that later. - r, num = self.zc_index.query("one") + r, num = self.zc_index.query('one') self.assertEqual(num, 3) self.assertEqual(len(r), 3) - # Because our Okapi's B parameter is > 0, and "one" only appears + # Because our Okapi's B parameter is > 0, and 'one' only appears # once in each doc, the verbosity hypothesis favors shorter docs. self.assertEqual([doc for doc, score in r], [1, 2, 3]) # The way the Okapi math works, a word that appears exactly once in # an average (length) doc gets tf score 1. Our second doc has # an average length, so its score should by 1 (tf) times the - # inverse doc frequency of "one". But "one" appears in every + # inverse doc frequency of 'one'. But 'one' appears in every # doc, so its IDF is log(1 + 3/3) = log(2). self.assertEqual(r[1][1], scaled_int(inverse_doc_frequency(3, 3))) - # Similarly for "two". - r, num = self.zc_index.query("two") + # Similarly for 'two'. + r, num = self.zc_index.query('two') self.assertEqual(num, 2) self.assertEqual(len(r), 2) self.assertEqual([doc for doc, score in r], [2, 3]) self.assertEqual(r[0][1], scaled_int(inverse_doc_frequency(2, 3))) - # And "three", except that doesn't appear in an average-size doc, so + # And 'three', except that doesn't appear in an average-size doc, so # the math is much more involved. - r, num = self.zc_index.query("three") + r, num = self.zc_index.query('three') self.assertEqual(num, 1) self.assertEqual(len(r), 1) self.assertEqual([doc for doc, score in r], [3]) @@ -457,48 +460,48 @@ def _checkAbsoluteScores(self): # to think. def testRelativeScores(self): # Create 9 10-word docs. - # All contain one instance of "one". - # Doc #i contains i instances of "two" and 9-i of "xyz". + # All contain one instance of 'one'. + # Doc #i contains i instances of 'two' and 9-i of 'xyz'. for i in range(1, 10): - doc = "one " + "two " * i + "xyz " * (9 - i) + doc = 'one ' + 'two ' * i + 'xyz ' * (9 - i) self.zc_index.index_object(i, Indexable(doc)) self._checkRelativeScores() # Exercise re-indexing. - self.zc_index.index_object(9, Indexable("two xyz")) + self.zc_index.index_object(9, Indexable('two xyz')) try: self._checkRelativeScores() except AssertionError: pass else: - self.fail("expected _checkRelativeScores() to fail after reindex") + self.fail('expected _checkRelativeScores() to fail after reindex') # This should leave things exactly as they were. self.zc_index.index_object(9, Indexable(doc)) self._checkRelativeScores() def _checkRelativeScores(self): - r, num = self.zc_index.query("one two") + r, num = self.zc_index.query('one two') self.assertEqual(num, 9) self.assertEqual(len(r), 9) # The more twos in a doc, the better the score should be. self.assertEqual([doc for doc, score in r], list(range(9, 0, -1))) - # Search for "two" alone shouldn't make any difference to relative + # Search for 'two' alone shouldn't make any difference to relative # results. - r, num = self.zc_index.query("two") + r, num = self.zc_index.query('two') self.assertEqual(num, 9) self.assertEqual(len(r), 9) self.assertEqual([doc for doc, score in r], list(range(9, 0, -1))) # Searching for xyz should skip doc 9, and favor the lower-numbered # docs (they have more instances of xyz). - r, num = self.zc_index.query("xyz") + r, num = self.zc_index.query('xyz') self.assertEqual(num, 8) self.assertEqual(len(r), 8) self.assertEqual([doc for doc, score in r], list(range(1, 9))) - # And relative results shouldn't change if we add "one". - r, num = self.zc_index.query("xyz one") + # And relative results shouldn't change if we add 'one'. + r, num = self.zc_index.query('xyz one') self.assertEqual(num, 8) self.assertEqual(len(r), 8) self.assertEqual([doc for doc, score in r], list(range(1, 9))) @@ -514,7 +517,7 @@ def _checkRelativeScores(self): # The loser will be the most unbalanced, but is that doc 1 (1 two 8 # xyz) or doc 8 (8 two 1 xyz)? Again xyz has a higher idf, so doc 1 # is more valuable, and doc 8 is the loser. - r, num = self.zc_index.query("xyz one two") + r, num = self.zc_index.query('xyz one two') self.assertEqual(num, 8) self.assertEqual(len(r), 8) self.assertEqual(r[0][0], 4) # winner @@ -523,9 +526,9 @@ def _checkRelativeScores(self): self.assertEqual(r[-2][0], 1) # penultimate loser # And nothing about the relative results in the last test should - # change if we leave "one" out of the search (it appears in all + # change if we leave 'one' out of the search (it appears in all # docs, so it's a wash). - r, num = self.zc_index.query("two xyz") + r, num = self.zc_index.query('two xyz') self.assertEqual(num, 8) self.assertEqual(len(r), 8) self.assertEqual(r[0][0], 4) # winner @@ -546,7 +549,7 @@ class QueryTestsBase(object): # docid 3: foo, ham # docid 4: ham - docs = ["foo bar ham", "bar ham", "foo ham", "ham"] + docs = ['foo bar ham', 'bar ham', 'foo ham', 'ham'] def setUp(self): self.lexicon = PLexicon('lexicon', '', diff --git a/src/Products/ZCatalog/Catalog.py b/src/Products/ZCatalog/Catalog.py index 68e6774f..d68bd571 100644 --- a/src/Products/ZCatalog/Catalog.py +++ b/src/Products/ZCatalog/Catalog.py @@ -164,8 +164,8 @@ def addColumn(self, name, default_value=None, threshold=10000): if name != name.strip(): # Someone could have mistakenly added a space at the end # of the input field. - LOG.warn("stripped space from new column %r -> %r", name, - name.strip()) + LOG.warning('stripped space from new column %r -> %r', name, + name.strip()) name = name.strip() if name in schema: @@ -206,7 +206,7 @@ def delColumn(self, name, threshold=10000): if name not in self.schema: LOG.error('delColumn attempted to delete nonexistent ' - 'column %s.' % str(name)) + 'column %s.', str(name)) return del names[_index] @@ -254,8 +254,8 @@ def addIndex(self, name, index_type): if name != name.strip(): # Someone could have mistakenly added a space at the end # of the input field. - LOG.warn("stripped space from new index %r -> %r", name, - name.strip()) + LOG.warning('stripped space from new index %r -> %r', name, + name.strip()) name = name.strip() indexes = self.indexes @@ -370,7 +370,7 @@ def catalogObject(self, object, uid, threshold=None, idxs=None, total = total + blah else: LOG.error('catalogObject was passed bad index ' - 'object %s.' % str(x)) + 'object %s.', str(x)) return total @@ -404,7 +404,7 @@ def uncatalogObject(self, uid): else: LOG.error('uncatalogObject unsuccessfully ' 'attempted to uncatalog an object ' - 'with a uid of %s. ' % str(uid)) + 'with a uid of %s. ', str(uid)) def uniqueValuesFor(self, name): """ return unique values for FieldIndex name """ diff --git a/src/Products/ZCatalog/ZCatalog.py b/src/Products/ZCatalog/ZCatalog.py index 2df7078c..03accd98 100644 --- a/src/Products/ZCatalog/ZCatalog.py +++ b/src/Products/ZCatalog/ZCatalog.py @@ -280,7 +280,7 @@ def refreshCatalog(self, clear=0, pghandler=None): except ConflictError: raise except Exception: - LOG.error('Recataloging object at %s failed' % p, + LOG.error('Recataloging object at %s failed', p, exc_info=sys.exc_info()) if pghandler: @@ -429,7 +429,7 @@ def reindexIndex(self, name, REQUEST, pghandler=None): obj = self.resolve_url(p, REQUEST) if obj is None: LOG.error('reindexIndex could not resolve ' - 'an object from the uid %r.' % p) + 'an object from the uid %r.', p) else: # don't update metadata when only reindexing a single # index via the UI diff --git a/src/Products/ZCatalog/tests/test_brains.py b/src/Products/ZCatalog/tests/test_brains.py index f3a563eb..d96e0cc2 100644 --- a/src/Products/ZCatalog/tests/test_brains.py +++ b/src/Products/ZCatalog/tests/test_brains.py @@ -120,9 +120,9 @@ class Brain(AbstractCatalogBrain): def testHasKey(self): b = self._makeBrain(1) - self.assertTrue('test_field' in b) - self.assertTrue('data_record_id_' in b) - self.assertFalse('godel' in b) + self.assertIn('test_field', b) + self.assertIn('data_record_id_', b) + self.assertNotIn('godel', b) def testGetPath(self): b = [self._makeBrain(rid) for rid in range(3)] diff --git a/src/Products/ZCatalog/tests/test_catalog.py b/src/Products/ZCatalog/tests/test_catalog.py index e0d4af26..8cfe8c13 100644 --- a/src/Products/ZCatalog/tests/test_catalog.py +++ b/src/Products/ZCatalog/tests/test_catalog.py @@ -30,7 +30,7 @@ def __init__(self, num): self.num = num def title(self): - return '%d' % self.num + return '{0:d}'.format(self.num) class Dummy(ExtensionClass.Base): @@ -76,7 +76,7 @@ def _make_one(self): def test_add(self): catalog = self._make_one() catalog.addColumn('id') - self.assertEqual('id' in catalog.schema, True, 'add column failed') + self.assertIn('id', catalog.schema, 'add column failed') def test_add_bad(self): from Products.ZCatalog.Catalog import CatalogError @@ -86,10 +86,10 @@ def test_add_bad(self): def test_add_with_space(self): catalog = self._make_one() catalog.addColumn(' space ') - self.assertEqual(' space ' not in catalog.schema, True, + self.assertNotIn(' space ', catalog.schema, 'space not stripped in add column') - self.assertEqual('space' in catalog.schema, True, - 'stripping space in add column failed') + self.assertIn('space', catalog.schema, + 'stripping space in add column failed') def test_add_brains(self): catalog = self._make_one() @@ -97,10 +97,10 @@ def test_add_brains(self): catalog.addColumn('col3') for i in range(3): catalog.catalogObject(Dummy(3), repr(i)) - self.assertTrue('col2' not in catalog.data.values()[0]) + self.assertNotIn('col2', catalog.data.values()[0]) catalog.addColumn('col2', default_value='new') - self.assert_('col2' in catalog.schema, 'add column failed') - self.assertTrue('new' in catalog.data.values()[0]) + self.assertIn('col2', catalog.schema, 'add column failed') + self.assertIn('new', catalog.data.values()[0]) def test_add_threshold(self): catalog = self._make_one() @@ -113,7 +113,7 @@ def test_del(self): catalog = self._make_one() catalog.addColumn('id') catalog.delColumn('id') - self.assert_('id' not in catalog.schema, 'del column failed') + self.assertNotIn('id', catalog.schema, 'del column failed') def test_del_brains(self): catalog = self._make_one() @@ -122,10 +122,10 @@ def test_del_brains(self): catalog.addColumn('col3') for i in range(3): catalog.catalogObject(Dummy(3), repr(i)) - self.assertTrue('col2' in catalog.data.values()[0]) + self.assertIn('col2', catalog.data.values()[0]) catalog.delColumn('col2') - self.assert_('col2' not in catalog.schema, 'del column failed') - self.assertTrue('col2' not in catalog.data.values()[0]) + self.assertNotIn('col2', catalog.schema, 'del column failed') + self.assertNotIn('col2', catalog.data.values()[0]) def test_del_threshold(self): catalog = self._make_one() @@ -145,7 +145,7 @@ def test_add_field_index(self): catalog = self._make_one() idx = FieldIndex('id') catalog.addIndex('id', idx) - self.assert_(isinstance(catalog.indexes['id'], FieldIndex)) + self.assertIsInstance(catalog.indexes['id'], FieldIndex) def test_add_text_index(self): catalog = self._make_one() @@ -154,33 +154,33 @@ def test_add_text_index(self): index_factory=OkapiIndex, lexicon_id='lexicon') catalog.addIndex('id', idx) i = catalog.indexes['id'] - self.assert_(isinstance(i, ZCTextIndex)) + self.assertIsInstance(i, ZCTextIndex) def test_add_keyword_index(self): catalog = self._make_one() idx = KeywordIndex('id') catalog.addIndex('id', idx) i = catalog.indexes['id'] - self.assert_(isinstance(i, KeywordIndex)) + self.assertIsInstance(i, KeywordIndex) def test_add_with_space(self): catalog = self._make_one() idx = KeywordIndex(' space ') catalog.addIndex(' space ', idx) - self.assertEqual(' space ' not in catalog.indexes, True, + self.assertNotIn(' space ', catalog.indexes, 'space not stripped in add index') - self.assertEqual('space' in catalog.indexes, True, - 'stripping space in add index failed') + self.assertIn('space', catalog.indexes, + 'stripping space in add index failed') i = catalog.indexes['space'] # Note: i.id still has spaces in it. - self.assert_(isinstance(i, KeywordIndex)) + self.assertIsInstance(i, KeywordIndex) def test_del_field_index(self): catalog = self._make_one() idx = FieldIndex('id') catalog.addIndex('id', idx) catalog.delIndex('id') - self.assert_('id' not in catalog.indexes) + self.assertNotIn('id', catalog.indexes) def test_del_text_index(self): catalog = self._make_one() @@ -189,14 +189,14 @@ def test_del_text_index(self): index_factory=OkapiIndex, lexicon_id='lexicon') catalog.addIndex('id', idx) catalog.delIndex('id') - self.assert_('id' not in catalog.indexes) + self.assertNotIn('id', catalog.indexes) def test_del_keyword_index(self): catalog = self._make_one() idx = KeywordIndex('id') catalog.addIndex('id', idx) catalog.delIndex('id') - self.assert_('id' not in catalog.indexes) + self.assertNotIn('id', catalog.indexes) class TestCatalog(unittest.TestCase): @@ -292,12 +292,15 @@ def extra(catalog): def testUniqueValuesForLength(self): catalog = self._make_one() a = catalog.uniqueValuesFor('att1') - self.assertEqual(len(a), 1, 'bad number of unique values %s' % a) + self.assertEqual( + len(a), 1, + 'bad number of unique values {0}'.format(a) + ) def testUniqueValuesForContent(self): catalog = self._make_one() a = catalog.uniqueValuesFor('att1') - self.assertEqual(a[0], 'att1', 'bad content %s' % a[0]) + self.assertEqual(a[0], 'att1', 'bad content {0}'.format(a[0])) # hasuid # recordify @@ -325,7 +328,7 @@ def testKeywordIndexWithMinMaxRangeWrongSyntax(self): # checkKeywordIndex with min/max range wrong syntax. catalog = self._make_one() a = catalog(att3={'query': ['att'], 'range': 'min:max'}) - self.assert_(len(a) != self.upper) + self.assertNotEqual(len(a), self.upper) def testCombinedTextandKeywordQuery(self): catalog = self._make_one() @@ -337,8 +340,10 @@ def testCombinedTextandKeywordQuery(self): def testResultLength(self): catalog = self._make_one() a = catalog(att1='att1') - self.assertEqual(len(a), self.upper, - 'length should be %s, its %s' % (self.upper, len(a))) + self.assertEqual( + len(a), self.upper, + 'length should be {0}, its {1}'.format(self.upper, len(a)) + ) def test_query_empty(self): # Queries with empty mappings used to return all. @@ -357,7 +362,10 @@ def extra(catalog): catalog.addIndex('col1', col1) catalog = self._make_one(extra=extra) a = catalog({'col1': ''}) - self.assertEqual(len(a), 0, 'length should be 0, its %s' % len(a)) + self.assertEqual( + len(a), 0, + 'length should be 0, its {0}'.format(len(a)) + ) def test_field_index_length(self): catalog = self._make_one() @@ -419,37 +427,37 @@ def _make_one(self, extra=None): def test_sorted_search_indexes_empty(self): catalog = self._make_one() result = catalog._sorted_search_indexes({}) - self.assertEquals(len(result), 0) + self.assertEqual(len(result), 0) def test_sorted_search_indexes_one(self): catalog = self._make_one() result = catalog._sorted_search_indexes({'att1': 'a'}) - self.assertEquals(result, ['att1']) + self.assertEqual(result, ['att1']) def test_sorted_search_indexes_many(self): catalog = self._make_one() query = {'att1': 'a', 'att2': 'b', 'num': 1} result = catalog._sorted_search_indexes(query) - self.assertEquals(set(result), set(['att1', 'att2', 'num'])) + self.assertEqual(set(result), set(['att1', 'att2', 'num'])) def test_sorted_search_indexes_priority(self): # att2 and col2 don't support ILimitedResultIndex, att1 does catalog = self._make_one() query = {'att1': 'a', 'att2': 'b', 'col2': 'c'} result = catalog._sorted_search_indexes(query) - self.assertEquals(result.index('att2'), 0) - self.assertEquals(result.index('att1'), 1) + self.assertEqual(result.index('att2'), 0) + self.assertEqual(result.index('att1'), 1) def test_sorted_search_indexes_match_alternate_attr(self): catalog = self._make_one() query = {'bar': 'b'} result = catalog._sorted_search_indexes(query) - self.assertEquals(result, ['foo']) + self.assertEqual(result, ['foo']) def test_sorted_search_indexes_no_match(self): catalog = self._make_one() result = catalog._sorted_search_indexes({'baz': 'a'}) - self.assertEquals(result, []) + self.assertEqual(result, []) def test_sortResults(self): catalog = self._make_one() @@ -890,19 +898,19 @@ def test_uncatalog_field_index(self): catalog = self._make_one() self._uncatalog(catalog) a = catalog(att1='att1') - self.assertEqual(len(a), 0, 'len: %s' % len(a)) + self.assertEqual(len(a), 0, 'len: {0}'.format(len(a))) def test_uncatalog_text_index(self): catalog = self._make_one() self._uncatalog(catalog) a = catalog(att2='att2') - self.assertEqual(len(a), 0, 'len: %s' % len(a)) + self.assertEqual(len(a), 0, 'len: {0}'.format(len(a))) def test_uncatalog_keyword_index(self): catalog = self._make_one() self._uncatalog(catalog) a = catalog(att3='att3') - self.assertEqual(len(a), 0, 'len: %s' % len(a)) + self.assertEqual(len(a), 0, 'len: {0}'.format(len(a))) def test_bad_uncatalog(self): catalog = self._make_one() @@ -946,8 +954,10 @@ def test_range_search(self): n = m + 10 for r in catalog(number={'query': (m, n), 'range': 'min:max'}): size = r.number - self.assert_(m <= size and size <= n, - "%d vs [%d,%d]" % (r.number, m, n)) + self.assertTrue( + m <= size and size <= n, + '{0:d} vs [{1:d},{2:d}]'.format(r.number, m, n) + ) class TestMergeResults(unittest.TestCase): diff --git a/src/Products/ZCatalog/tests/test_plan.py b/src/Products/ZCatalog/tests/test_plan.py index dc0ddba0..6c72b0a6 100644 --- a/src/Products/ZCatalog/tests/test_plan.py +++ b/src/Products/ZCatalog/tests/test_plan.py @@ -44,13 +44,13 @@ def numbers(self): return (self.num, self.num + 1) def getPhysicalPath(self): - return '/%s' % self.num + return '/{0}'.format(self.num) def start(self): - return '2013-07-%.2d' % (self.num + 1) + return '2013-07-{0:02d}'.format(self.num + 1) def end(self): - return '2013-07-%.2d' % (self.num + 2) + return '2013-07-{0:02d}'.format(self.num + 2) class TestNestedDict(unittest.TestCase): @@ -63,10 +63,10 @@ def _makeOne(self): return NestedDict def test_novalue(self): - self.assertEquals(getattr(self.nest, 'value', None), None) + self.assertEqual(getattr(self.nest, 'value', None), None) def test_nolock(self): - self.assertEquals(getattr(self.nest, 'lock', None), None) + self.assertEqual(getattr(self.nest, 'lock', None), None) class TestPriorityMap(unittest.TestCase): @@ -82,31 +82,31 @@ def _makeOne(self): return PriorityMap def test_get_value(self): - self.assertEquals(self.pmap.get_value(), {}) + self.assertEqual(self.pmap.get_value(), {}) def test_get(self): - self.assertEquals(self.pmap.get('foo'), {}) + self.assertEqual(self.pmap.get('foo'), {}) def test_set(self): self.pmap.set('foo', {'bar': 1}) - self.assertEquals(self.pmap.get('foo'), {'bar': 1}) + self.assertEqual(self.pmap.get('foo'), {'bar': 1}) def test_clear(self): self.pmap.set('foo', {'bar': 1}) self.pmap.clear() - self.assertEquals(self.pmap.value, {}) + self.assertEqual(self.pmap.value, {}) def test_get_entry(self): - self.assertEquals(self.pmap.get_entry('foo', 'bar'), {}) + self.assertEqual(self.pmap.get_entry('foo', 'bar'), {}) def test_set_entry(self): self.pmap.set_entry('foo', 'bar', {'baz': 1}) - self.assertEquals(self.pmap.get_entry('foo', 'bar'), {'baz': 1}) + self.assertEqual(self.pmap.get_entry('foo', 'bar'), {'baz': 1}) def test_clear_entry(self): self.pmap.set('foo', {'bar': 1}) self.pmap.clear_entry('foo') - self.assertEquals(self.pmap.get('foo'), {}) + self.assertEqual(self.pmap.get('foo'), {}) class TestPriorityMapDefault(unittest.TestCase): @@ -123,13 +123,13 @@ def _makeOne(self): def test_empty(self): self.pmap.load_default() - self.assertEquals(self.pmap.get_value(), {}) + self.assertEqual(self.pmap.get_value(), {}) def test_load_failure(self): try: os.environ['ZCATALOGQUERYPLAN'] = 'Products.ZCatalog.invalid' self.pmap.load_default() - self.assertEquals(self.pmap.get_value(), {}) + self.assertEqual(self.pmap.get_value(), {}) finally: del os.environ['ZCATALOGQUERYPLAN'] @@ -145,7 +145,7 @@ def test_load(self): 'index1': Benchmark(duration=2.0, hits=3, limit=True), 'index2': Benchmark(duration=1.5, hits=2, limit=False), }}} - self.assertEquals(self.pmap.get_value(), expected) + self.assertEqual(self.pmap.get_value(), expected) finally: del os.environ['ZCATALOGQUERYPLAN'] @@ -159,7 +159,7 @@ def test_load_from_path(self): 'index1': Benchmark(duration=2.0, hits=3, limit=True), 'index2': Benchmark(duration=1.5, hits=2, limit=False), }}} - self.assertEquals(self.pmap.get_value(), expected) + self.assertEqual(self.pmap.get_value(), expected) class TestReports(unittest.TestCase): @@ -175,10 +175,10 @@ def _makeOne(self): return Reports def test_value(self): - self.assertEquals(self.reports.value, {}) + self.assertEqual(self.reports.value, {}) def test_lock(self): - self.assertEquals(type(self.reports.lock), LockType) + self.assertEqual(type(self.reports.lock), LockType) class TestCatalogPlan(cleanup.CleanUp, unittest.TestCase): @@ -195,18 +195,18 @@ def _makeOne(self, catalog=None, query=None): def test_get_id(self): plan = self._makeOne() - self.assertEquals(plan.get_id(), ('', 'NonPersistentCatalog')) + self.assertEqual(plan.get_id(), ('', 'NonPersistentCatalog')) def test_get_id_persistent(self): zcat = ZCatalog('catalog') plan = self._makeOne(zcat._catalog) - self.assertEquals(plan.get_id(), ('catalog', )) + self.assertEqual(plan.get_id(), ('catalog', )) def test_getCatalogPlan_empty(self): zcat = ZCatalog('catalog') self._makeOne(zcat._catalog) plan_str = zcat.getCatalogPlan() - self.assertTrue('queryplan = {' in plan_str) + self.assertIn('queryplan = {', plan_str) def test_getCatalogPlan_full(self): zcat = ZCatalog('catalog') @@ -220,40 +220,40 @@ def test_getCatalogPlan_full(self): plan.stop_split('index2') plan.stop() plan_str = zcat.getCatalogPlan() - self.assertTrue('queryplan = {' in plan_str) - self.assertTrue('index1' in plan_str) + self.assertIn('queryplan = {', plan_str) + self.assertIn('index1', plan_str) def test_plan_empty(self): plan = self._makeOne() - self.assertEquals(plan.plan(), None) + self.assertEqual(plan.plan(), None) def test_start(self): plan = self._makeOne() plan.start() - self.assert_(plan.start_time <= time.time()) + self.assertLessEqual(plan.start_time, time.time()) def test_start_split(self): plan = self._makeOne() plan.start_split('index1') - self.assert_('index1' in plan.interim) + self.assertIn('index1', plan.interim) def test_stop_split(self): plan = self._makeOne() plan.start_split('index1') plan.stop_split('index1') - self.assert_('index1' in plan.interim) + self.assertIn('index1', plan.interim) i1 = plan.interim['index1'] - self.assert_(i1.start <= i1.end) - self.assert_('index1' in plan.benchmark) + self.assertLessEqual(i1.start, i1.end) + self.assertIn('index1', plan.benchmark) def test_stop_split_sort_on(self): plan = self._makeOne() plan.start_split('sort_on') plan.stop_split('sort_on') - self.assert_('sort_on' in plan.interim) + self.assertIn('sort_on', plan.interim) so = plan.interim['sort_on'] - self.assert_(so.start <= so.end) - self.assert_('sort_on' not in plan.benchmark) + self.assertLessEqual(so.start, so.end) + self.assertNotIn('sort_on', plan.benchmark) def test_stop(self): plan = self._makeOne(query={'index1': 1, 'index2': 2}) @@ -267,12 +267,12 @@ def test_stop(self): time.sleep(0.02) # wait at least one Windows clock tick plan.stop() - self.assert_(plan.duration > 0) - self.assert_('index1' in plan.benchmark) - self.assertEquals(plan.benchmark['index1'].hits, 2) - self.assert_('index2' in plan.benchmark) - self.assertEquals(plan.benchmark['index2'].hits, 0) - self.assertEquals(set(plan.plan()), set(('index1', 'index2'))) + self.assertGreater(plan.duration, 0) + self.assertIn('index1', plan.benchmark) + self.assertEqual(plan.benchmark['index1'].hits, 2) + self.assertIn('index2', plan.benchmark) + self.assertEqual(plan.benchmark['index2'].hits, 0) + self.assertEqual(set(plan.plan()), set(('index1', 'index2'))) def test_log(self): plan = self._makeOne(query={'index1': 1}) @@ -283,14 +283,14 @@ def test_log(self): plan.stop() plan.log() report = plan.report() - self.assertEquals(len(report), 1) - self.assertEquals(report[0]['counter'], 2) + self.assertEqual(len(report), 1) + self.assertEqual(report[0]['counter'], 2) plan.reset() - self.assertEquals(len(plan.report()), 0) + self.assertEqual(len(plan.report()), 0) def test_valueindexes_get(self): plan = self._makeOne() - self.assertEquals(plan.valueindexes(), frozenset()) + self.assertEqual(plan.valueindexes(), frozenset()) def test_valueindexes_set(self): from ..plan import PriorityMap @@ -298,7 +298,7 @@ def test_valueindexes_set(self): plan = self._makeOne() indexes = frozenset(['index1', 'index2']) PriorityMap.set_entry(plan.cid, VALUE_INDEX_KEY, indexes) - self.assertEquals(plan.valueindexes(), frozenset(indexes)) + self.assertEqual(plan.valueindexes(), frozenset(indexes)) # Test the actual logic for determining value indexes # Test make_key diff --git a/src/Products/ZCatalog/tests/test_zcatalog.py b/src/Products/ZCatalog/tests/test_zcatalog.py index 20d8be9e..04a50672 100644 --- a/src/Products/ZCatalog/tests/test_zcatalog.py +++ b/src/Products/ZCatalog/tests/test_zcatalog.py @@ -34,7 +34,7 @@ def __init__(self, num): self.num = num def title(self): - return '%d' % self.num + return '{0:d}'.format(self.num) class ZDummyFalse(ZDummy): @@ -50,7 +50,7 @@ def __init__(self, num, fail): self.fail = fail def __len__(self): - self.fail("__len__() was called") + self.fail('__len__() was called') class DummyNonzeroFail(ZDummy): @@ -60,7 +60,7 @@ def __init__(self, num, fail): self.fail = fail def __nonzero__(self): - self.fail("__nonzero__() was called") + self.fail('__nonzero__() was called') class FakeTraversalError(KeyError): @@ -146,7 +146,7 @@ def test_interfaces(self): verifyClass(IZCatalog, ZCatalog) def test_len(self): - self.assertEquals(len(self._catalog), self.upper) + self.assertEqual(len(self._catalog), self.upper) # manage_edit # manage_subbingToggle @@ -206,7 +206,7 @@ def testReindexIndexesFalse(self): ob.num = 9999 self._catalog.reindexIndex('title', {}) result = self._catalog(title='9999') - self.assertEquals(1, len(result)) + self.assertEqual(1, len(result)) # manage_reindexIndex # catalog_object @@ -223,7 +223,7 @@ def test_getobject_traversal(self): def resolve_url(path, REQUEST): # make resolve_url fail if ZCatalog falls back on it - self.fail(".resolve_url() should not be called by .getobject()") + self.fail('.resolve_url() should not be called by .getobject()') catalog.resolve_url = resolve_url @@ -236,7 +236,7 @@ def resolve_url(path, REQUEST): # and if there is a None at the traversal point, that's where it # should return self.d['0'] = None - self.assertEquals(catalog.getobject(rid0), None) + self.assertEqual(catalog.getobject(rid0), None) def testGetMetadataForUID(self): testNum = str(self.upper - 3) # as good as any.. @@ -312,13 +312,13 @@ class TestAddDelColumnIndex(ZCatalogBase, unittest.TestCase): def testAddIndex(self): self._catalog.addIndex('id', self._makeOneIndex('id')) - self.assert_('id' in self._catalog.indexes()) + self.assertIn('id', self._catalog.indexes()) def testDelIndex(self): self._catalog.addIndex('title', self._makeOneIndex('title')) - self.assert_('title' in self._catalog.indexes()) + self.assertTrue('title', self._catalog.indexes()) self._catalog.delIndex('title') - self.assert_('title' not in self._catalog.indexes()) + self.assertNotIn('title', self._catalog.indexes()) def testClearIndex(self): self._catalog.addIndex('title', self._makeOneIndex('title')) @@ -326,18 +326,18 @@ def testClearIndex(self): for x in range(10): ob = ZDummy(x) self._catalog.catalog_object(ob, str(x)) - self.assertEquals(len(idx), 10) + self.assertEqual(len(idx), 10) self._catalog.clearIndex('title') - self.assertEquals(len(idx), 0) + self.assertEqual(len(idx), 0) def testAddColumn(self): self._catalog.addColumn('num', default_value=0) - self.assert_('num' in self._catalog.schema()) + self.assertIn('num', self._catalog.schema()) def testDelColumn(self): self._catalog.addColumn('title') self._catalog.delColumn('title') - self.assert_('title' not in self._catalog.schema()) + self.assertNotIn('title', self._catalog.schema()) class TestZCatalogGetObject(ZCatalogBase, unittest.TestCase): diff --git a/tox.ini b/tox.ini index a7a3a099..4fac82bf 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,6 @@ [tox] envlist = + flake8, py27, py34, py35, @@ -30,4 +31,12 @@ commands = coverage combine coverage html -i coverage xml -i - coverage report -i + coverage report -i --skip-covered --fail-under=86 + +[testenv:flake8] +basepython = python3.6 +deps = + flake8 + flake8-debugger +commands = + flake8 --doctests src setup.py {posargs}