diff --git a/cherrymusicserver/sqlitecache.py b/cherrymusicserver/sqlitecache.py index 7f96ab4c..0a826923 100644 --- a/cherrymusicserver/sqlitecache.py +++ b/cherrymusicserver/sqlitecache.py @@ -32,13 +32,15 @@ #python 2.6+ backward compability from __future__ import unicode_literals +from test.test_descrtut import defaultdict + import os import re import sqlite3 import sys import traceback -from collections import deque +from collections import deque, Counter from operator import itemgetter import cherrymusicserver as cherry @@ -64,7 +66,7 @@ scanreportinterval = 1 AUTOSAVEINTERVAL = 100 -debug = False +debug = True keepInRam = False #if debug: @@ -107,8 +109,11 @@ def load_db_to_memory(self): @classmethod def searchterms(cls, searchterm): - words = re.findall('(\w+|[^\s\w]+)',searchterm.replace('_', ' ').replace('%',' '),re.UNICODE) - words = [word.lower() for word in words] + searchterm = searchterm.replace('_', ' ').replace('%',' ') + words = [ + word.lower() for word in + re.findall('(\w+|[^\s\w]+)', searchterm, re.UNICODE) + ] if UNIDECODE_AVAILABLE: unidecoded = [unidecode.unidecode(word) for word in words] words += unidecoded @@ -145,7 +150,7 @@ def fetchFileIds(self, terms, maxFileIdsPerTerm, mode): log.d('Query used: %r, %r', sql, params) #print(self.conn.execute('EXPLAIN QUERY PLAN ' + sql, params).fetchall()) self.db.execute(sql, params) - resultlist += self.db.fetchall() + resultlist += [t[0] for t in self.db.fetchall()] return resultlist def searchfor(self, value, maxresults=10): @@ -175,21 +180,15 @@ def searchfor(self, value, maxresults=10): maxFileIdsPerTerm = file_search_limit with Performance(_('file id fetching')): - #unpack tuples - fileids = [t[0] for t in self.fetchFileIds(terms, maxFileIdsPerTerm, mode)] + fileids = self.fetchFileIds(terms, maxFileIdsPerTerm, mode) if len(fileids) > file_search_limit: with Performance(_('sorting results by fileid occurrences')): - resultfileids = {} - for fileid in fileids: - if fileid in resultfileids: - resultfileids[fileid] += 1 - else: - resultfileids[fileid] = 1 # sort items by occurrences and only return maxresults - fileids = sorted(resultfileids.items(), key=itemgetter(1), reverse=True) - fileids = [t[0] for t in fileids] - fileids = fileids[:min(len(fileids), file_search_limit)] + fileids = [ + fid[0] for fid in + Counter(fileids).most_common(file_search_limit) + ] if mode == 'normal': with Performance(_('querying fullpaths for %s fileIds') % len(fileids)): @@ -210,7 +209,7 @@ def listdir(self, path): if targetdir is None: log.e(_('media cache cannot listdir %r: path not in database'), path) return [] - return list(map(lambda f: f.basename, self.fetch_child_files(targetdir))) + return [f.basename for f in self.fetch_child_files(targetdir)] def randomFileEntries(self, count): ''' Return a number of random entries from the file cache. diff --git a/cherrymusicserver/util.py b/cherrymusicserver/util.py index e2b97ea4..dd891401 100644 --- a/cherrymusicserver/util.py +++ b/cherrymusicserver/util.py @@ -184,16 +184,16 @@ def __enter__(self): if PERFORMANCE_TEST: self.time = time() Performance.indentation += 1 - log.w('| ' * (Performance.indentation - 1) - + '/ˉˉ' + self.text) + log.w('│ ' * (Performance.indentation - 1) + + '╭──' + self.text) return self def __exit__(self, type, value, traceback): global PERFORMANCE_TEST if PERFORMANCE_TEST: duration = (time() - self.time) * 1000 - log.w('| ' * (Performance.indentation-1) - + '\__ %g ms' % (duration,)) + log.w('│ ' * (Performance.indentation-1) + + '╰──%g ms' % (duration,)) Performance.indentation -= 1 def log(self, text):