Skip to content

Commit

Permalink
Attempting to fix #2496.
Browse files Browse the repository at this point in the history
  • Loading branch information
felixfontein committed Mar 12, 2017
1 parent a40fdd9 commit c6b42be
Show file tree
Hide file tree
Showing 3 changed files with 87 additions and 48 deletions.
127 changes: 79 additions & 48 deletions nikola/nikola.py
Expand Up @@ -959,31 +959,9 @@ def __init__(self, **config):
for k, v in self.config['COMPILERS'].items(): for k, v in self.config['COMPILERS'].items():
self.config['_COMPILERS_RAW'][k] = list(v) self.config['_COMPILERS_RAW'][k] = list(v)


compilers = defaultdict(set)
# Also add aliases for combinations with TRANSLATIONS_PATTERN
for compiler, exts in self.config['COMPILERS'].items():
for ext in exts:
compilers[compiler].add(ext)
for lang in self.config['TRANSLATIONS'].keys():
candidate = utils.get_translation_candidate(self.config, "f" + ext, lang)
compilers[compiler].add(candidate)

# Get search path for themes # Get search path for themes
self.themes_dirs = ['themes'] + self.config['EXTRA_THEMES_DIRS'] self.themes_dirs = ['themes'] + self.config['EXTRA_THEMES_DIRS']


# Avoid redundant compilers
# Remove compilers that match nothing in POSTS/PAGES
# And put them in "bad compilers"
pp_exts = set([os.path.splitext(x[0])[1] for x in self.config['post_pages']])
self.config['COMPILERS'] = {}
self.disabled_compilers = {}
self.bad_compilers = set([])
for k, v in compilers.items():
if pp_exts.intersection(v):
self.config['COMPILERS'][k] = sorted(list(v))
else:
self.bad_compilers.add(k)

self._set_global_context_from_config() self._set_global_context_from_config()
self._set_global_context_from_data() self._set_global_context_from_data()


Expand All @@ -998,6 +976,28 @@ def __init__(self, **config):
self.state._set_site(self) self.state._set_site(self)
self.cache._set_site(self) self.cache._set_site(self)


def _filter_duplicate_plugins(self, plugin_list):
"""Find repeated plugins and discard the less local copy."""
def plugin_position_in_places(plugin):
# plugin here is a tuple:
# (path to the .plugin file, path to plugin module w/o .py, plugin metadata)
for i, place in enumerate(self._plugin_places):
if plugin[0].startswith(place):
return i

plugin_dict = defaultdict(list)
for data in plugin_list:
plugin_dict[data[2].name].append(data)
result = []
for name, plugins in plugin_dict.items():
if len(plugins) > 1:
# Sort by locality
plugins.sort(key=plugin_position_in_places)
utils.LOGGER.debug("Plugin {} exists in multiple places, using {}".format(
plugins[-1][2].name, plugins[-1][0]))
result.append(plugins[-1])
return result

def init_plugins(self, commands_only=False, load_all=False): def init_plugins(self, commands_only=False, load_all=False):
"""Load plugins as needed.""" """Load plugins as needed."""
self.plugin_manager = PluginManager(categories_filter={ self.plugin_manager = PluginManager(categories_filter={
Expand Down Expand Up @@ -1031,9 +1031,30 @@ def init_plugins(self, commands_only=False, load_all=False):
os.path.expanduser('~/.nikola/plugins'), os.path.expanduser('~/.nikola/plugins'),
] + [utils.sys_encode(path) for path in extra_plugins_dirs if path] ] + [utils.sys_encode(path) for path in extra_plugins_dirs if path]


compilers = defaultdict(set)
# Also add aliases for combinations with TRANSLATIONS_PATTERN
for compiler, exts in self.config['COMPILERS'].items():
for ext in exts:
compilers[compiler].add(ext)
for lang in self.config['TRANSLATIONS'].keys():
candidate = utils.get_translation_candidate(self.config, "f" + ext, lang)
compilers[compiler].add(candidate)

# Avoid redundant compilers
# Remove compilers that match nothing in POSTS/PAGES
# And put them in "bad compilers"
self.config['COMPILERS'] = {}
self.disabled_compilers = {}
self.disabled_compiler_extensions = defaultdict(list)
self.bad_compilers = set([])
for k, v in compilers.items():
# self.config['COMPILERS'][k] = sorted(list(v))
self.bad_compilers.add(k)

self.plugin_manager.getPluginLocator().setPluginPlaces(self._plugin_places) self.plugin_manager.getPluginLocator().setPluginPlaces(self._plugin_places)
self.plugin_manager.locatePlugins() self.plugin_manager.locatePlugins()
bad_candidates = set([]) bad_candidates = set([])
bad_compilers_data = dict()
if not load_all: if not load_all:
for p in self.plugin_manager._candidates: for p in self.plugin_manager._candidates:
if commands_only: if commands_only:
Expand All @@ -1048,8 +1069,7 @@ def init_plugins(self, commands_only=False, load_all=False):
if p[-1].name in self.bad_compilers: if p[-1].name in self.bad_compilers:
bad_candidates.add(p) bad_candidates.add(p)
self.disabled_compilers[p[-1].name] = p self.disabled_compilers[p[-1].name] = p
utils.LOGGER.debug('Not loading unneeded compiler {}', p[-1].name) if p[-1].name not in compilers and \
if p[-1].name not in self.config['COMPILERS'] and \
p[-1].details.has_option('Nikola', 'PluginCategory') and p[-1].details.get('Nikola', 'PluginCategory') in ('Compiler', 'PageCompiler'): p[-1].details.has_option('Nikola', 'PluginCategory') and p[-1].details.get('Nikola', 'PluginCategory') in ('Compiler', 'PageCompiler'):
bad_candidates.add(p) bad_candidates.add(p)
self.disabled_compilers[p[-1].name] = p self.disabled_compilers[p[-1].name] = p
Expand All @@ -1061,31 +1081,43 @@ def init_plugins(self, commands_only=False, load_all=False):
# Remove compiler extensions we don't need # Remove compiler extensions we don't need
if p[-1].details.has_option('Nikola', 'compiler') and p[-1].details.get('Nikola', 'compiler') in self.disabled_compilers: if p[-1].details.has_option('Nikola', 'compiler') and p[-1].details.get('Nikola', 'compiler') in self.disabled_compilers:
bad_candidates.add(p) bad_candidates.add(p)
utils.LOGGER.debug('Not loading compiler extension {}', p[-1].name) self.disabled_compiler_extensions[p[-1].details.get('Nikola', 'compiler')].append(p)
self.plugin_manager._candidates = list(set(self.plugin_manager._candidates) - bad_candidates) self.plugin_manager._candidates = list(set(self.plugin_manager._candidates) - bad_candidates)


# Find repeated plugins and discard the less local copy self.plugin_manager._candidates = self._filter_duplicate_plugins(self.plugin_manager._candidates)
def plugin_position_in_places(plugin):
# plugin here is a tuple:
# (path to the .plugin file, path to plugin module w/o .py, plugin metadata)
for i, place in enumerate(self._plugin_places):
if plugin[0].startswith(place):
return i

plugin_dict = defaultdict(list)
for data in self.plugin_manager._candidates:
plugin_dict[data[2].name].append(data)
self.plugin_manager._candidates = []
for name, plugins in plugin_dict.items():
if len(plugins) > 1:
# Sort by locality
plugins.sort(key=plugin_position_in_places)
utils.LOGGER.debug("Plugin {} exists in multiple places, using {}".format(
plugins[-1][2].name, plugins[-1][0]))
self.plugin_manager._candidates.append(plugins[-1])

self.plugin_manager.loadPlugins() self.plugin_manager.loadPlugins()


# Search for compiler plugins which we disabled but shouldn't have
self._activate_plugins_of_category("PostScanner")
if not load_all:
file_extensions = set()
for post_scanner in [p.plugin_object for p in self.plugin_manager.getPluginsOfCategory('PostScanner')]:
exts = post_scanner.supported_extensions()
if exts is not None:
file_extensions.update(exts)
else:
# Stop scanning for more: once we get None, we have to load all compilers anyway
file_extensions = None
break
to_add = []
for k, v in compilers.items():
if file_extensions is None or file_extensions.intersection(v):
self.config['COMPILERS'][k] = sorted(list(v))
self.bad_compilers.remove(k)
p = self.disabled_compilers.pop(k, None)
if p:
to_add.append(p)
for p in self.disabled_compiler_extensions.pop(k, []):
to_add.append(p)
for name, p in self.disabled_compilers.items():
utils.LOGGER.debug('Not loading unneeded compiler {}', p[-1].name)
for name, plugins in self.disabled_compiler_extensions.items():
for p in plugins:
utils.LOGGER.debug('Not loading compiler extension {}', p[-1].name)
if to_add:
self.plugin_manager._candidates = self._filter_duplicate_plugins(to_add)
self.plugin_manager.loadPlugins()

self._activate_plugins_of_category("Taxonomy") self._activate_plugins_of_category("Taxonomy")
self.taxonomy_plugins = {} self.taxonomy_plugins = {}
for taxonomy in [p.plugin_object for p in self.plugin_manager.getPluginsOfCategory('Taxonomy')]: for taxonomy in [p.plugin_object for p in self.plugin_manager.getPluginsOfCategory('Taxonomy')]:
Expand All @@ -1108,7 +1140,6 @@ def plugin_position_in_places(plugin):
plugin_info.plugin_object.short_help = plugin_info.description plugin_info.plugin_object.short_help = plugin_info.description
self._commands[plugin_info.name] = plugin_info.plugin_object self._commands[plugin_info.name] = plugin_info.plugin_object


self._activate_plugins_of_category("PostScanner")
self._activate_plugins_of_category("Task") self._activate_plugins_of_category("Task")
self._activate_plugins_of_category("LateTask") self._activate_plugins_of_category("LateTask")
self._activate_plugins_of_category("TaskMultiplier") self._activate_plugins_of_category("TaskMultiplier")
Expand Down Expand Up @@ -1728,7 +1759,7 @@ def generic_rss_renderer(self, lang, title, link, description, timeline, output_
post.date.astimezone(dateutil.tz.tzutc())), post.date.astimezone(dateutil.tz.tzutc())),
'categories': post._tags.get(lang, []), 'categories': post._tags.get(lang, []),
'creator': post.author(lang), 'creator': post.author(lang),
'guid': post.guid(lang), 'guid': post.permalink(lang, absolute=True),
} }


if post.author(lang): if post.author(lang):
Expand Down Expand Up @@ -2397,7 +2428,7 @@ def atom_post_text(post, text):
entry_title = lxml.etree.SubElement(entry_root, "title") entry_title = lxml.etree.SubElement(entry_root, "title")
entry_title.text = post.title(lang) entry_title.text = post.title(lang)
entry_id = lxml.etree.SubElement(entry_root, "id") entry_id = lxml.etree.SubElement(entry_root, "id")
entry_id.text = post.guid(lang) entry_id.text = post.permalink(lang, absolute=True)
entry_updated = lxml.etree.SubElement(entry_root, "updated") entry_updated = lxml.etree.SubElement(entry_root, "updated")
entry_updated.text = post.formatted_updated('webiso') entry_updated.text = post.formatted_updated('webiso')
entry_published = lxml.etree.SubElement(entry_root, "published") entry_published = lxml.etree.SubElement(entry_root, "published")
Expand Down
4 changes: 4 additions & 0 deletions nikola/plugin_categories.py
Expand Up @@ -95,6 +95,10 @@ def scan(self):
"""Create a list of posts from some source. Returns a list of Post objects.""" """Create a list of posts from some source. Returns a list of Post objects."""
raise NotImplementedError() raise NotImplementedError()


def supported_extensions(self):
"""Return a list of supported file extensions, or None if such a list isn't known beforehand."""
return None



class Command(BasePlugin, DoitCommand): class Command(BasePlugin, DoitCommand):
"""Doit command implementation.""" """Doit command implementation."""
Expand Down
4 changes: 4 additions & 0 deletions nikola/plugins/misc/scan_posts.py
Expand Up @@ -106,3 +106,7 @@ def scan(self):
raise err raise err


return timeline return timeline

def supported_extensions(self):
"""Return a list of supported file extensions, or None if such a list isn't known beforehand."""
return list(set([os.path.splitext(x[0])[1] for x in self.site.config['post_pages']]))

0 comments on commit c6b42be

Please sign in to comment.