Skip to content
Browse files

Merge branch 'develop'

Conflicts:
	README.mkd
  • Loading branch information...
2 parents 6890732 + 847a94e commit 142c8d635238916fb3782e10b788a1ae93ebc431 @mythmon mythmon committed Jul 21, 2011
Showing with 6,961 additions and 1 deletion.
  1. +3 −0 .gitignore
  2. +24 −1 README.mkd
  3. 0 bravo/__init__.py
  4. +55 −0 bravo/errors.py
  5. +456 −0 bravo/ibravo.py
  6. +292 −0 bravo/plugin.py
  7. +12 −0 exocet/__init__.py
  8. +121 −0 exocet/_components.py
  9. +807 −0 exocet/_exocet.py
  10. +927 −0 exocet/_filepath.py
  11. +959 −0 exocet/_modules.py
  12. +129 −0 exocet/_reflect.py
  13. +114 −0 exocet/_runtime.py
  14. +168 −0 exocet/_win32.py
  15. +216 −0 exocet/_zippath.py
  16. 0 exocet/test/__init__.py
  17. +1 −0 exocet/test/_ospathExample.py
  18. +239 −0 exocet/test/test_exocet.py
  19. +782 −0 exocet/test/test_modules.py
  20. +1,255 −0 exocet/test/test_paths.py
  21. +3 −0 exocet/test/testpackage/__init__.py
  22. +1 −0 exocet/test/testpackage/baz.py
  23. +4 −0 exocet/test/testpackage/foo.py
  24. +3 −0 exocet/test/testpackage/topmodule.py
  25. +2 −0 exocet/test/testpackage/util.py
  26. 0 exocet/test/testpackage_circular/__init__.py
  27. +1 −0 exocet/test/testpackage_circular/baz.py
  28. +2 −0 exocet/test/testpackage_circular/foo.py
  29. +1 −0 exocet/test/testpackage_circular/topmodule.py
  30. +6 −0 exocet/test/testpackage_circular/util.py
  31. +3 −0 exocet/test/testpackage_localimports/__init__.py
  32. +1 −0 exocet/test/testpackage_localimports/baz.py
  33. +6 −0 exocet/test/testpackage_localimports/foo.py
  34. +3 −0 exocet/test/testpackage_localimports/topmodule.py
  35. +1 −0 exocet/test/testpackage_localimports/util.py
  36. +4 −0 hamper.conf.dist
  37. +20 −0 hamper/IHamper.py
  38. +1 −0 hamper/__init__.py
  39. +134 −0 hamper/commander.py
  40. 0 hamper/plugins/__init__.py
  41. +114 −0 hamper/plugins/commands.py
  42. +52 −0 hamper/plugins/plugin_utils.py
  43. +23 −0 scripts/hamper
  44. +16 −0 setup.py
View
3 .gitignore
@@ -0,0 +1,3 @@
+*.pyc
+.*.swp
+.*.swo
View
25 README.mkd
@@ -6,7 +6,30 @@ many commits on the master branch.
[gitflow]: http://nvie.com/posts/a-successful-git-branching-model/
-###Authors
+Installation
+============
+As of right now, you can't install hamper, unless you are clever.
+
+Configuration
+=============
+Make a file named `hamper.conf`. This should be a YAML file containing these,
+hopefully self-explanatory, fields:
+
+- `nickname`
+- `channel`
+- `server`
+- `port`
+
+For an example check out `hamper.conf.dist`.
+
+Usage
+=====
+Then, with `hamper.conf` in your current directory and hamper on your python
+path, run `main.py`. I like to use this command
+
+ PYTHONPATH="~/git/hamper" python2 ~/git/hamper/hamper/main.py
+
+###Credits
- Daniel Thornton <merthel@gmail.com>
- Jordan Evans <evans.jordan.m@gmail.com>
View
0 bravo/__init__.py
No changes.
View
55 bravo/errors.py
@@ -0,0 +1,55 @@
+"""
+Module for specifying types of errors which might occur internally.
+"""
+
+# Errors which can be raised by serializers in the course of doing things
+# which serializers might normally do.
+
+class SerializerException(Exception):
+ """
+ Something bad happened in a serializer.
+ """
+
+class SerializerReadException(SerializerException):
+ """
+ A serializer had issues reading data.
+ """
+
+class SerializerWriteException(SerializerException):
+ """
+ A serializer had issues writing data.
+ """
+
+# Errors from plugin loading.
+
+class InvariantException(Exception):
+ """
+ Exception raised by failed invariant conditions.
+ """
+
+class PluginException(Exception):
+ """
+ Signal an error encountered during plugin handling.
+ """
+
+# Errors from NBT handling.
+
+class MalformedFileError(Exception):
+ """
+ Exception raised on parse error.
+ """
+
+# Errors from bravo clients.
+
+class BuildError(Exception):
+ """
+ Something went wrong with the build.
+ """
+
+# Errors from the world.
+
+class ChunkNotLoaded(Exception):
+ """
+ The requested chunk is not currently loaded. If you need it, you will need
+ to request it yourself.
+ """
View
456 bravo/ibravo.py
@@ -0,0 +1,456 @@
+from twisted.python.components import registerAdapter
+from twisted.web.resource import IResource
+from zope.interface import implements, invariant, Attribute, Interface
+
+from bravo.errors import InvariantException
+
+class IBravoPlugin(Interface):
+ """
+ Interface for plugins.
+
+ This interface stores common metadata used during plugin discovery.
+ """
+
+ name = Attribute("""
+ The name of the plugin.
+
+ This name is used to reference the plugin in configurations, and also
+ to uniquely index the plugin.
+ """)
+
+def sorted_invariant(s):
+ intersection = set(s.before) & set(s.after)
+ if intersection:
+ raise InvariantException("Plugin wants to come before and after %r" %
+ intersection)
+
+class ISortedPlugin(IBravoPlugin):
+ """
+ Parent interface for sorted plugins.
+
+ Sorted plugins have an innate and automatic ordering inside lists thanks
+ to the ability to advertise their dependencies.
+ """
+
+ invariant(sorted_invariant)
+
+ before = Attribute("""
+ Plugins which must come before this plugin in the pipeline.
+
+ Should be a tuple, list, or some other iterable.
+ """)
+
+ after = Attribute("""
+ Plugins which must come after this plugin in the pipeline.
+
+ Should be a tuple, list, or some other iterable.
+ """)
+
+class IAuthenticator(IBravoPlugin):
+ """
+ Interface for authenticators.
+
+ Authenticators participate in a two-step login process with a handshake.
+ """
+
+ def handshake(protocol, container):
+ """
+ Handle a handshake.
+
+ This function should always return True or False, depending on whether
+ the handshake was successful.
+ """
+
+ def login(protocol, container):
+ """
+ Handle a login.
+
+ This function should return a ``Deferred`` which will fire depending
+ on whether the login was successful.
+ """
+
+class ITerrainGenerator(ISortedPlugin):
+ """
+ Interface for terrain generators.
+ """
+
+ def populate(chunk, seed):
+ """
+ Given a chunk and a seed value, populate the chunk with terrain.
+
+ This function should assume that it runs as part of a pipeline, and
+ that the chunk may already be partially or totally populated.
+ """
+
+def command_invariant(c):
+ if c.__doc__ is None:
+ raise InvariantException("Command has no documentation")
+
+class ICommand(IBravoPlugin):
+ """
+ A command.
+
+ Commands must be documented, as an invariant. The documentation for a
+ command will be displayed for clients upon request, via internal help
+ commands.
+ """
+
+ invariant(command_invariant)
+
+ aliases = Attribute("""
+ Additional keywords which may be used to alias this command.
+ """)
+
+ usage = Attribute("""
+ String explaining how to use this command.
+ """)
+
+class IChatCommand(ICommand):
+ """
+ Interface for chat commands.
+
+ Chat commands are invoked from the chat inside clients, so they are always
+ called by a specific client.
+
+ This interface is specifically designed to exist comfortably side-by-side
+ with `IConsoleCommand`.
+ """
+
+ def chat_command(username, parameters):
+ """
+ Handle a command from the chat interface.
+
+ :param str username: username of player
+ :param list parameters: additional parameters passed to the command
+
+ :returns: a generator object or other iterable yielding lines
+ """
+
+class IConsoleCommand(ICommand):
+ """
+ Interface for console commands.
+
+ Console commands are invoked from a console or some other location with
+ two defining attributes: Access restricted to superusers, and no user
+ issuing the command. As such, no access control list applies to them, but
+ they must be given usernames to operate on explicitly.
+ """
+
+ def console_command(parameters):
+ """
+ Handle a command.
+
+ :param list parameters: additional parameters passed to the command
+
+ :returns: a generator object or other iterable yielding lines
+ """
+
+class ChatToConsole(object):
+ """
+ Adapt a chat command to be used on the console.
+
+ This largely consists of passing the username correctly.
+ """
+
+ implements(IConsoleCommand)
+
+ def __init__(self, chatcommand):
+ self.chatcommand = chatcommand
+
+ self.aliases = self.chatcommand.aliases
+ self.info = self.chatcommand.info
+ self.name = self.chatcommand.name
+ self.usage = "<username> %s" % self.chatcommand.usage
+
+ def console_command(self, parameters):
+ if IConsoleCommand.providedBy(self.chatcommand):
+ return self.chatcommand.console_command(parameters)
+ else:
+ username = parameters.pop(0) if parameters else ""
+ return self.chatcommand.chat_command(username, parameters)
+
+registerAdapter(ChatToConsole, IChatCommand, IConsoleCommand)
+
+def recipe_invariant(r):
+ # Size invariant.
+ if len(r.recipe) != r.dimensions[0] * r.dimensions[1]:
+ raise InvariantException("Recipe size is invalid")
+
+class IRecipe(IBravoPlugin):
+ """
+ Recipe for crafting materials from other materials.
+ """
+
+ invariant(recipe_invariant)
+
+ dimensions = Attribute("""
+ Tuple representing the size of the recipe.
+ """)
+
+ recipe = Attribute("""
+ Tuple representing the items of the recipe.
+
+ Recipes need to be filled out left-to-right, top-to-bottom, with one
+ of two things:
+
+ * A tuple (slot, count) for the item/block that needs to be present;
+ * None, if the slot needs to be empty.
+ """)
+
+ provides = Attribute("""
+ Tuple representing the yield of this recipe.
+
+ This tuple must be of the format (slot, count).
+ """)
+
+class ISeason(IBravoPlugin):
+ """
+ Seasons are transformational stages run during certain days to emulate an
+ environment.
+ """
+
+ def transform(chunk):
+ """
+ Apply the season to the given chunk.
+ """
+
+ day = Attribute("""
+ Day of the year on which to switch to this season.
+ """)
+
+class ISerializer(IBravoPlugin):
+ """
+ Class that understands how to serialize several different kinds of objects
+ to and from disk-friendly formats.
+
+ Implementors of this interface are expected to provide a uniform
+ implementation of their serialization technique.
+ """
+
+ def save_chunk(chunk):
+ """
+ Save a chunk.
+
+ May return a ``Deferred`` that will fire on completion.
+ """
+
+ def load_chunk(chunk):
+ """
+ Load a chunk.
+
+ May return a ``Deferred`` that will fire on completion.
+ """
+
+ def save_level(level):
+ """
+ Save a level.
+
+ May return a ``Deferred`` that will fire on completion.
+ """
+
+ def load_level(level):
+ """
+ Load a level.
+
+ May return a ``Deferred`` that will fire on completion.
+ """
+
+ def save_player(player):
+ """
+ Save a player.
+
+ May return a ``Deferred`` that will fire on completion.
+ """
+
+ def load_player(player):
+ """
+ Load a player.
+
+ May return a ``Deferred`` that will fire on completion.
+ """
+
+ def load_plugin_data(name):
+ """
+ Load plugin-specific data.
+
+ May return a ``Deferred`` that will fire on completion.
+ """
+
+ def save_plugin_data(name, value):
+ """
+ Save plugin-specific data.
+
+ May return a ``Deferred`` that will fire on completion.
+ """
+
+class ISerializerFactory(IBravoPlugin):
+ """
+ Factory for ``ISerializer`` instances.
+
+ I am so sorry for this.
+ """
+
+# Hooks
+
+class IPreBuildHook(ISortedPlugin):
+ """
+ Hook for actions to be taken before a block is placed.
+ """
+
+ def pre_build_hook(player, builddata):
+ """
+ Do things.
+
+ The ``player`` is a ``Player`` entity and can be modified as needed.
+
+ The ``builddata`` tuple has all of the useful things. It stores a
+ ``Block`` that will be placed, as well as the block coordinates and
+ face of the place where the block will be built.
+
+ ``builddata`` needs to be passed to the next hook in sequence, but it
+ can be modified in passing in order to modify the way blocks are
+ placed.
+
+ Any access to chunks must be done through the factory. To get the
+ current factory, import it from ``bravo.parameters``:
+
+ >>> from bravo.parameters import factory
+
+ The second variable in the return value indicates whether processing
+ of building should continue after this hook runs. Use it to halt build
+ hook processing, if needed.
+
+ For sanity purposes, build hooks may return a ``Deferred`` which will
+ fire with their return values, but are not obligated to do so.
+
+ A trivial do-nothing build hook looks like the following:
+
+ >>> def pre_build_hook(self, player, builddata):
+ ... return True, builddata
+
+ To make life more pleasant when returning deferred values, use
+ ``inlineCallbacks``, which many of the standard build hooks use:
+
+ >>> @inlineCallbacks
+ ... def pre_build_hook(self, player, builddata):
+ ... returnValue((True, builddata))
+
+ This form makes it much easier to deal with asynchronous operations on
+ the factory and world.
+
+ :param ``Player`` player: player entity doing the building
+ :param namedtuple builddata: permanent building location and data
+
+ :returns: ``Deferred`` with tuple of build data and whether subsequent
+ hooks will run
+ """
+
+class IPostBuildHook(ISortedPlugin):
+ """
+ Hook for actions to be taken after a block is placed.
+ """
+
+ def post_build_hook(player, coords, block):
+ """
+ Do things.
+ """
+
+class IDigHook(ISortedPlugin):
+ """
+ Hook for actions to be taken after a block is dug up.
+ """
+
+ def dig_hook(chunk, x, y, z, block):
+ """
+ Do things.
+
+ :param `Chunk` chunk: digging location
+ :param int x: X coordinate
+ :param int y: Y coordinate
+ :param int z: Z coordinate
+ :param `Block` block: dug block
+ """
+
+class ISignHook(ISortedPlugin):
+ """
+ Hook for actions to be taken after a sign is updated.
+
+ This hook fires both on sign creation and sign editing.
+ """
+
+ def sign_hook(chunk, x, y, z, text, new):
+ """
+ Do things.
+
+ :param `Chunk` chunk: digging location
+ :param int x: X coordinate
+ :param int y: Y coordinate
+ :param int z: Z coordinate
+ :param list text: list of lines of text
+ :param bool new: whether this sign is newly placed
+ """
+
+class IUseHook(ISortedPlugin):
+ """
+ Hook for actions to be taken when a player interacts with an entity.
+
+ Each plugin needs to specify a list of entity types it is interested in
+ in advance, and it will only be called for those.
+ """
+
+ def use_hook(player, target, alternate):
+ """
+ Do things.
+
+ :param `Player` player: player
+ :param `Entity` target: target of the interaction
+ :param bool alternate: whether the player right-clicked the target
+ """
+
+ targets = Attribute("""
+ List of entity names this plugin wants to be called for.
+ """)
+
+class IAutomaton(IBravoPlugin):
+ """
+ An automaton.
+
+ Automatons are given blocks from chunks which interest them, and may do
+ processing on those blocks.
+ """
+
+ blocks = Attribute("""
+ List of blocks which this automaton is interested in.
+ """)
+
+ def feed(coordinates):
+ """
+ Provide this automaton with block coordinates to handle later.
+ """
+
+ def scan(chunk):
+ """
+ Provide this automaton with an entire chunk which this automaton may
+ handle as it pleases.
+
+ A utility scanner which will simply `feed()` this automaton is in
+ bravo.utilities.automatic.
+ """
+
+ def start():
+ """
+ Run the automaton.
+ """
+
+ def stop():
+ """
+ Stop the automaton.
+
+ After this method is called, the automaton should not continue
+ processing data; it needs to stop immediately.
+ """
+
+class IWorldResource(IBravoPlugin, IResource):
+ """
+ Interface for a world specific web resource.
+ """
View
292 bravo/plugin.py
@@ -0,0 +1,292 @@
+"""
+The ``plugin`` module implements a sophisticated, featureful plugin loader
+based on Exocet, with interface-based discovery.
+"""
+
+from types import ModuleType
+from xml.sax import saxutils
+
+from exocet import ExclusiveMapper, getModule, load, pep302Mapper
+
+from twisted.internet import reactor
+from twisted.python import log
+
+from zope.interface.exceptions import BrokenImplementation
+from zope.interface.exceptions import BrokenMethodImplementation
+from zope.interface.verify import verifyObject
+
+from bravo.errors import PluginException
+from bravo.ibravo import InvariantException, ISortedPlugin
+
+blacklisted = set([
+ "asyncore", # Use Twisted's event loop.
+ "ctypes", # Segfault protection.
+ "gc", # Haha, no.
+ "imp", # Haha, no.
+ "inspect", # Haha, no.
+ "multiprocessing", # Use Twisted's process interface.
+ "socket", # Use Twisted's socket interface.
+ "subprocess", # Use Twisted's process interface.
+ "thread", # Use Twisted's thread interface.
+ "threading", # Use Twisted's thread interface.
+])
+overrides = {
+ "twisted.internet.reactor": reactor,
+ "saxutils": saxutils,
+}
+bravoMapper = ExclusiveMapper(pep302Mapper,
+ blacklisted).withOverrides(overrides)
+
+def sort_plugins(plugins):
+ """
+ Make a sorted list of plugins by dependency.
+
+ If the list cannot be arranged into a DAG, an error will be raised. This
+ usually means that a cyclic dependency was found.
+
+ :raises PluginException: cyclic dependency detected
+ """
+
+ l = []
+ d = dict((plugin.name, plugin) for plugin in plugins)
+
+ def visit(plugin):
+ if plugin not in l:
+ for name in plugin.before:
+ if name in d:
+ visit(d[name])
+ l.append(plugin)
+
+ for plugin in plugins:
+ if not any(name in d for name in plugin.after):
+ visit(plugin)
+
+ return l
+
+def add_plugin_edges(d):
+ """
+ Mirror edges to all plugins in a dictionary.
+ """
+
+ for plugin in d.itervalues():
+ plugin.after = set(plugin.after)
+ plugin.before = set(plugin.before)
+
+ for name, plugin in d.iteritems():
+ for edge in list(plugin.before):
+ if edge in d:
+ d[edge].after.add(name)
+ else:
+ plugin.before.discard(edge)
+ for edge in list(plugin.after):
+ if edge in d:
+ d[edge].before.add(name)
+ else:
+ plugin.after.discard(edge)
+
+ return d
+
+def expand_names(plugins, names):
+ """
+ Given a list of names, expand wildcards and discard disabled names.
+
+ Used to implement * and - options in plugin lists.
+
+ :param dict plugins: plugins to use for expansion
+ :param list names: names to examine
+
+ :returns: a list of filtered plugin names
+ """
+
+ # Wildcard.
+ if "*" in names:
+ # Get the exceptions.
+ exceptions = set(name[1:] for name in names if name.startswith("-"))
+
+ # And now the names. Everything that isn't excepted.
+ names = [name for name in plugins.iterkeys()
+ if name not in exceptions]
+
+ return names
+
+def verify_plugin(interface, plugin):
+ """
+ Plugin interface verification.
+
+ This function will call ``verifyObject()`` and ``validateInvariants()`` on
+ the plugins passed to it.
+
+ The primary purpose of this wrapper is to do logging, but it also permits
+ code to be slightly cleaner, easier to test, and callable from other
+ modules.
+ """
+
+ try:
+ verifyObject(interface, plugin)
+ interface.validateInvariants(plugin)
+ log.msg(" ( ^^) Plugin: %s" % plugin.name)
+ except BrokenImplementation, bi:
+ if hasattr(plugin, "name"):
+ log.msg(" ( ~~) Plugin %s is missing attribute %r!" %
+ (plugin.name, bi.name))
+ else:
+ log.msg(" ( >&) Plugin %s is unnamed and useless!" % plugin)
+ except BrokenMethodImplementation, bmi:
+ log.msg(" ( Oo) Plugin %s has a broken %s()!" % (plugin.name,
+ bmi.method))
+ log.msg(bmi)
+ except InvariantException, ie:
+ log.msg(" ( >&) Plugin %s failed validation!" % plugin.name)
+ log.msg(ie)
+ else:
+ return plugin
+
+ raise PluginException("Plugin failed verification")
+
+def synthesize_parameters(parameters):
+ """
+ Create a faked module which has the given parameters in it.
+
+ This should work everywhere. If it doesn't, let me know.
+ """
+
+ module = ModuleType("parameters")
+ module.__dict__.update(parameters)
+ return module
+
+__cache = {}
+
+def get_plugins(interface, package, parameters=None):
+ """
+ Lazily find objects in a package which implement a given interface.
+
+ If the optional dictionary of parameters is provided, it will be passed
+ into each plugin module as the "bravo.parameters" module. An example
+ access from inside the plugin:
+
+ >>> from bravo.parameters import foo, bar
+
+ Since the parameters are available as a real module, the parameters may be
+ imported and used like any other module:
+
+ >>> from bravo import parameters as params
+
+ This is a rewrite of Twisted's ``twisted.plugin.getPlugins`` which uses
+ Exocet instead of Twisted to find the plugins.
+
+ :param interface interface: the interface to match against
+ :param str package: the name of the package to search
+ :param dict parameters: parameters to pass into the plugins
+ """
+
+ mapper = bravoMapper
+
+ # If parameters are provided, add them to the mapper in a synthetic
+ # module.
+ if parameters:
+ mapper = mapper.withOverrides(
+ {"bravo.parameters": synthesize_parameters(parameters)})
+
+ # This stack will let us iteratively recurse into packages during the
+ # module search.
+ stack = [getModule(package)]
+
+ # While there are packages left to search...
+ while stack:
+ # For each package/module in the package...
+ for pm in stack.pop().iterModules():
+ # If it's a package, append it to the list of packages to search.
+ if pm.isPackage():
+ stack.append(pm)
+
+ try:
+ # Load the module.
+ m = load(pm, mapper)
+
+ # Make a good attempt to iterate through the module's
+ # contents, and see what matches our interface.
+ for obj in vars(m).itervalues():
+ try:
+ adapted = interface(obj, None)
+ except:
+ log.err()
+ else:
+ if adapted is not None:
+ yield adapted
+ except ImportError, ie:
+ log.msg(ie)
+
+def retrieve_plugins(interface, search, parameters=None):
+ """
+ Look up all plugins for a certain interface.
+
+ If the plugin cache is enabled, this function will not attempt to reload
+ plugins from disk or discover new plugins.
+
+ :param interface interface: the interface to use
+ :param dict parameters: parameters to pass into the plugins
+
+ :returns: a dict of plugins, keyed by name
+ :raises PluginException: no plugins could be found for the given interface
+ """
+
+ if not parameters and interface in __cache:
+ return __cache[interface]
+
+ log.msg("Discovering %s..." % interface)
+ d = {}
+ for p in get_plugins(interface, search, parameters):
+ try:
+ verify_plugin(interface, p)
+ d[p.name] = p
+ except PluginException:
+ pass
+
+ if issubclass(interface, ISortedPlugin):
+ # Sortable plugins need their edges mirrored.
+ d = add_plugin_edges(d)
+
+ # Cache non-parameterized plugins.
+ if not parameters:
+ __cache[interface] = d
+
+ return d
+
+def retrieve_named_plugins(interface, names, search, parameters=None):
+ """
+ Look up a list of plugins by name.
+
+ Plugins are returned in the same order as their names.
+
+ :param interface interface: the interface to use
+ :param list names: plugins to find
+ :param dict parameters: parameters to pass into the plugins
+
+ :returns: a list of plugins
+ :raises PluginException: no plugins could be found for the given interface
+ """
+
+ d = retrieve_plugins(interface, search, parameters)
+
+ # Handle wildcards and options.
+ names = expand_names(d, names)
+
+ try:
+ return [d[name] for name in names]
+ except KeyError, e:
+ raise PluginException("Couldn't find plugin %s for interface %s!" %
+ (e.args[0], interface.__name__))
+
+def retrieve_sorted_plugins(interface, names, parameters=None):
+ """
+ Look up a list of plugins, sorted by interdependencies.
+
+ :param dict parameters: parameters to pass into the plugins
+ """
+
+ l = retrieve_named_plugins(interface, names, parameters)
+ try:
+ return sort_plugins(l)
+ except KeyError, e:
+ raise PluginException("Couldn't find plugin %s for interface %s!" %
+ (e.args[0], interface))
View
12 exocet/__init__.py
@@ -0,0 +1,12 @@
+# Copyright (c) 2010-2011 Allen Short. See LICENSE file for details.
+from exocet._exocet import (load, loadNamed, loadPackage, loadPackageNamed,
+ proxyModule, emptyMapper, pep302Mapper, IMapper,
+ DictMapper, ExclusiveMapper, CallableMapper,
+ getModule)
+
+__all__= ['load', 'loadNamed', 'loadPackage', 'loadPackageNamed',
+ 'getModule', 'proxyModule', 'emptyMapper', 'pep302Mapper',
+ 'IMapper', 'DictMapper', 'CallableMapper']
+
+__version__ = '0.5'
+
View
121 exocet/_components.py
@@ -0,0 +1,121 @@
+# -*- test-case-name: twisted.python.test.test_components -*-
+# Copyright (c) 2001-2009 Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Component architecture for Twisted, based on Zope3 components.
+
+Using the Zope3 API directly is strongly recommended. Everything
+you need is in the top-level of the zope.interface package, e.g.::
+
+ from zope.interface import Interface, implements
+
+ class IFoo(Interface):
+ pass
+
+ class Foo:
+ implements(IFoo)
+
+ print IFoo.implementedBy(Foo) # True
+ print IFoo.providedBy(Foo()) # True
+
+L{twisted.python.components.registerAdapter} from this module may be used to
+add to Twisted's global adapter registry.
+
+L{twisted.python.components.proxyForInterface} is a factory for classes
+which allow access to only the parts of another class defined by a specified
+interface.
+"""
+
+# zope3 imports
+from zope.interface import interface, declarations
+from zope.interface.adapter import AdapterRegistry
+
+
+# The global adapter registry of the modules package
+globalRegistry = AdapterRegistry()
+
+# Attribute that registerAdapter looks at. Is this supposed to be public?
+ALLOW_DUPLICATES = 0
+
+# Define a function to find the registered adapter factory, using either a
+# version of Zope Interface which has the `registered' method or an older
+# version which does not.
+if getattr(AdapterRegistry, 'registered', None) is None:
+ def _registered(registry, required, provided):
+ """
+ Return the adapter factory for the given parameters in the given
+ registry, or None if there is not one.
+ """
+ return registry.get(required).selfImplied.get(provided, {}).get('')
+else:
+ def _registered(registry, required, provided):
+ """
+ Return the adapter factory for the given parameters in the given
+ registry, or None if there is not one.
+ """
+ return registry.registered([required], provided)
+
+
+def registerAdapter(adapterFactory, origInterface, *interfaceClasses):
+ """Register an adapter class.
+
+ An adapter class is expected to implement the given interface, by
+ adapting instances implementing 'origInterface'. An adapter class's
+ __init__ method should accept one parameter, an instance implementing
+ 'origInterface'.
+ """
+ self = globalRegistry
+ assert interfaceClasses, "You need to pass an Interface"
+ global ALLOW_DUPLICATES
+
+ # deal with class->interface adapters:
+ if not isinstance(origInterface, interface.InterfaceClass):
+ origInterface = declarations.implementedBy(origInterface)
+
+ for interfaceClass in interfaceClasses:
+ factory = _registered(self, origInterface, interfaceClass)
+ if factory is not None and not ALLOW_DUPLICATES:
+ raise ValueError("an adapter (%s) was already registered." % (factory, ))
+ for interfaceClass in interfaceClasses:
+ self.register([origInterface], interfaceClass, '', adapterFactory)
+
+
+def getAdapterFactory(fromInterface, toInterface, default):
+ """Return registered adapter for a given class and interface.
+
+ Note that is tied to the *Twisted* global registry, and will
+ thus not find adapters registered elsewhere.
+ """
+ self = globalRegistry
+ if not isinstance(fromInterface, interface.InterfaceClass):
+ fromInterface = declarations.implementedBy(fromInterface)
+ factory = self.lookup1(fromInterface, toInterface)
+ if factory is None:
+ factory = default
+ return factory
+
+
+# add global adapter lookup hook for our newly created registry
+def _hook(iface, ob, lookup=globalRegistry.lookup1):
+ factory = lookup(declarations.providedBy(ob), iface)
+ if factory is None:
+ return None
+ else:
+ return factory(ob)
+interface.adapter_hooks.append(_hook)
+
+
+def getRegistry():
+ """Returns the Twisted global
+ C{zope.interface.adapter.AdapterRegistry} instance.
+ """
+ return globalRegistry
+
+
+__all__ = [
+ # Sticking around:
+ "registerAdapter", "getAdapterFactory",
+ "getRegistry",
+]
View
807 exocet/_exocet.py
@@ -0,0 +1,807 @@
+# -*- test-case-name: exocet.test.test_exocet -*-
+# Copyright (c) 2010-2011 Allen Short. See LICENSE file for details.
+
+
+import sys, __builtin__, itertools, traceback
+from exocet._modules import getModule
+from types import ModuleType
+from zope.interface import Interface, implements
+
+DEBUG = False
+_sysModulesSpecialCases = {
+ "os": ['path'],
+ "twisted.internet": ["reactor"],
+}
+
+def trace(*args):
+ if DEBUG:
+ print ' '.join(str(x) for x in args)
+
+
+class IMapper(Interface):
+ """
+ An object that maps names used in C{import} statements to objects (such as
+ modules).
+ """
+
+ def lookup(name):
+ """
+ Return a boolean indicating whether this name can be resolved
+ sucessfully by this mapper.
+ """
+
+
+ def contains(name):
+ """
+ Return a boolean indicating whether this name can be resolved
+ sucessfully by this mapper.
+ """
+
+
+ def withOverrides(overrides):
+ """
+ Create a new mapper based on this one, with the mappings provided
+ overriding existing names.
+ """
+
+
+class CallableMapper(object):
+ """
+ A mapper based on a callable that returns a module or raises C{ImportError}.
+ """
+ implements(IMapper)
+ def __init__(self, baseLookup):
+ self._baseLookup = baseLookup
+
+
+ def lookup(self, name):
+ """
+ Call our callable to do lookup.
+ @see L{IMapper.lookup}
+ """
+ try:
+ return self._baseLookup(name)
+ except ImportError:
+ raise ImportError("No module named %r in mapper %r" % (name, self))
+
+
+ def contains(self, name):
+ """
+ @see L{IMapper.contains}
+ """
+ try:
+ self.lookup(name)
+ return True
+ except ImportError:
+ return False
+
+
+ def withOverrides(self, overrides):
+ """
+ @see L{IMapper.withOverrides}
+ """
+ return _StackedMapper([DictMapper(overrides), self])
+
+
+class DictMapper(object):
+ """
+ A mapper that looks up names in a dictionary or other mapping.
+ """
+ implements(IMapper)
+ def __init__(self, _dict):
+ self._dict = _dict
+
+
+ def lookup(self, name):
+ """
+ @see L{IMapper.lookup}
+ """
+ if name in self._dict:
+ return self._dict[name]
+ else:
+ raise ImportError("No module named %r in mapper %r" % (name, self))
+
+
+ def contains(self, name):
+ """
+ @see L{IMapper.contains}
+ """
+ return name in self._dict
+
+
+ def withOverrides(self, overrides):
+ """
+ @see L{IMapper.withOverrides}
+ """
+ return _StackedMapper([DictMapper(overrides), self])
+
+
+
+class _StackedMapper(object):
+ """
+ A mapper that consults multiple other mappers, in turn.
+ """
+
+ def __init__(self, submappers):
+ self._submappers = submappers
+
+
+ def lookup(self, name):
+ """
+ @see L{IMapper.lookup}
+ """
+ for m in self._submappers:
+ try:
+ val = m.lookup(name)
+ break
+ except ImportError, e:
+ continue
+ else:
+ raise e
+ return val
+
+ def contains(self, name):
+ """
+ @see L{IMapper.contains}
+ """
+ try:
+ self.lookup(name)
+ return True
+ except ImportError:
+ return False
+
+
+ def withOverrides(self, overrides):
+ """
+ @see L{IMapper.withOverrides}
+ """
+ return _StackedMapper([DictMapper(overrides), self])
+
+
+
+class ExclusiveMapper(object):
+ """
+ A mapper that wraps another mapper, but excludes certain names.
+
+ This mapper can be used to implement a blacklist.
+ """
+
+ implements(IMapper)
+
+ def __init__(self, submapper, excluded):
+ self._submapper = submapper
+ self._excluded = excluded
+
+
+ def lookup(self, name):
+ """
+ @see l{Imapper.lookup}
+ """
+
+ if name in self._excluded:
+ raise ImportError("Module %s blacklisted in mapper %s"
+ % (name, self))
+ return self._submapper.lookup(name)
+
+
+ def contains(self, name):
+ """
+ @see l{Imapper.contains}
+ """
+
+ if name in self._excluded:
+ return False
+ return self._submapper.contains(name)
+
+
+ def withOverrides(self, overrides):
+ """
+ @see L{IMapper.withOverrides}
+ """
+ return _StackedMapper([DictMapper(overrides), self])
+
+
+
+class _PackageMapper(CallableMapper):
+ """
+ A mapper that allows direct mutation as a result of intrapackage
+ imports. For internal use only. Void where prohibited. If symptoms
+ persist, see a professional.
+ """
+ implements(IMapper)
+ def __init__(self, name, baseMapper):
+ self._name = name
+ self._baseMapper = baseMapper
+ self._intrapackageImports = {}
+
+
+ def _baseLookup(self, name):
+ if name in self._intrapackageImports:
+ return self._intrapackageImports[name]
+ else:
+ return self._baseMapper.lookup(name)
+
+
+ def add(self, name, module):
+ """
+ Add an intrapackage import.
+ """
+ #if not name.startswith(self._name):
+ # raise ValueError("%r is not a member of the %r package" %
+ # (name, self._name))
+ self._intrapackageImports[name] = module
+
+
+
+def _noLookup(name):
+ raise ImportError(name)
+
+class _PEP302Mapper(CallableMapper):
+ """
+ Mapper that uses Python's default import mechanism to load modules.
+
+ @cvar _oldSysModules: Set by L{_isolateImports} when clearing
+ L{sys.modules} to its former contents.
+ """
+
+ _oldSysModules = {}
+
+ def __init__(self):
+ self._metaPath = list(sys.meta_path)
+
+
+ def _baseLookup(self, name):
+ try:
+ prevImport = __import__
+ prevMetaPath = list(sys.meta_path)
+ prevSysModules = sys.modules.copy()
+ __builtin__.__import__ = prevImport
+ sys.meta_path[:] = self._metaPath
+ sys.modules.clear()
+ sys.modules.update(self._oldSysModules)
+ topLevel = _originalImport(name)
+ trace("pep302Mapper imported %r as %r@%d" % (name, topLevel, id(topLevel)))
+ packages = name.split(".")[1:]
+ m = topLevel
+ trace("subelements:", packages)
+ for p in packages:
+ trace("getattr", m, p)
+ m = getattr(m, p)
+ trace("done:", m, id(m))
+ return m
+ finally:
+ self._oldSysModules.update(sys.modules)
+ sys.meta_path[:] = prevMetaPath
+ sys.modules.clear()
+ sys.modules.update(prevSysModules)
+ __builtin__.__import__ = prevImport
+
+
+emptyMapper = CallableMapper(_noLookup)
+pep302Mapper = _PEP302Mapper()
+
+def lookupWithMapper(mapper, fqn):
+ """
+ Look up a FQN in a mapper, logging all non-ImportError exceptions and
+ converting them to ImportErrors.
+ """
+ try:
+ return mapper.lookup(fqn)
+ except ImportError, e:
+ raise e
+ except:
+ print "Error raised by Exocet mapper while loading %r" % (fqn)
+ traceback.print_exc()
+ raise ImportError(fqn)
+
+class MakerFinder(object):
+ """
+ The object used as Exocet's PEP 302 meta-import hook. 'import' statements
+ result in calls to find_module/load_module. A replacement for the
+ C{__import__} function is provided as a method, as well.
+
+ @ivar mapper: A L{Mapper}.
+
+ @ivar oldImport: the implementation of C{__import__} being wrapped by this
+ object's C{xocImport} method.
+ """
+ def __init__(self, oldImport, mapper):
+ self.mapper = mapper
+ self.oldImport = oldImport
+
+
+ def find_module(self, fullname, path=None):
+ """
+ Module finder nethod required by PEP 302 for meta-import hooks.
+
+ @param fullname: The name of the module/package being imported.
+ @param path: The __path__ attribute of the package, if applicable.
+ """
+ trace("find_module", fullname, path)
+ return self
+
+
+ def load_module(self, fqn):
+ """
+ Module loader method required by PEP 302 for meta-import hooks.
+
+ @param fqn: The fully-qualified name of the module requested.
+ """
+ trace("load_module", fqn)
+ trace("sys.modules", sys.modules)
+ p = lookupWithMapper(self.mapper, fqn)
+ trace("load_module", fqn , "done", id(p))
+
+ if fqn in _sysModulesSpecialCases:
+ # This module didn't have access to our isolated sys.modules when it
+ # did its sys.modules modification. Replicate it here.
+ for submoduleName in _sysModulesSpecialCases[fqn]:
+ subfqn = '.'.join([fqn, submoduleName])
+ sys.modules[subfqn] = getattr(p, submoduleName, None)
+ return p
+
+
+ def xocImport(self, name, *args, **kwargs):
+ """
+ Wrapper around C{__import__}. Needed to ensure builtin modules aren't
+ loaded from the global context.
+ """
+ trace("Import invoked:", name, kwargs.keys())
+ if name in sys.builtin_module_names:
+ trace("Loading builtin module", name)
+ return self.load_module(name)
+ else:
+ return self.oldImport(name, *args, **kwargs)
+
+
+
+
+def loadNamed(fqn, mapper, m=None):
+ """
+ Load a Python module, eliminating as much of its access to global state as
+ possible. If a package name is given, its __init__.py is loaded.
+
+ @param fqn: The fully qualified name of a Python module, e.g
+ C{twisted.python.filepath}.
+
+ @param mapper: A L{Mapper}.
+
+ @param m: An optional empty module object to load code into. (For
+ resolving circular module imports.)
+
+ @returns: An instance of the module name requested.
+ """
+
+ maker = getModule(fqn)
+ return load(maker, mapper, m=m)
+
+
+def load(maker, mapper, m=None):
+ """
+ Load a Python module, eliminating as much of its access to global state as
+ possible. If a package name is given, its __init__.py is loaded.
+
+ @param maker: A module maker object (i.e., a L{modules.PythonModule} instance)
+
+ @param mapper: A L{Mapper}.
+
+ @param m: An optional empty module object to load code into. (For
+ resolving circular module imports.)
+
+ @returns: An instance of the module name requested.
+ """
+
+ if maker.isPackage() and m is None:
+ pkgContext = PackageContext(maker, mapper, getModule)
+ return pkgContext.loadNamed(maker.name)
+ else:
+ mf = MakerFinder(__builtin__.__import__, mapper)
+ if maker.filePath.splitext()[1] in [".so", ".pyd"]:
+ #it's native code, gotta suck it up and load it globally (really at a
+ ## loss on how to unit test this without significant inconvenience)
+ return maker.load()
+ return _isolateImports(mf, _loadSingle, maker, mf, m)
+
+def _loadSingle(mk, mf, m=None):
+ trace("execfile", mk.name, m)
+ if m is None:
+ m = ModuleType(mk.name)
+ contents = {}
+ code = execfile(mk.filePath.path, contents)
+ contents['__exocet_context__'] = mf
+ m.__dict__.update(contents)
+ m.__file__ = mk.filePath.path
+ return m
+
+def _isolateImports(mf, f, *a, **kw):
+ """
+ Internal guts for actual code loading. Displaces the global environment
+ and executes the code, then restores the previous global settings.
+
+ @param mk: A L{modules._modules.PythonModule} object; i.e., a module
+ maker.
+ @param mf: A L{MakerFinder} instance.
+ @param m: An optional empty module object to load code into. (For resolving
+ circular module imports.)
+ """
+
+
+ oldMetaPath = sys.meta_path
+ oldPathHooks = sys.path_hooks
+ _PEP302Mapper._oldSysModules = sys.modules.copy()
+ oldImport = __builtin__.__import__
+ #where is your god now?
+ sys.path_hooks = []
+ sys.modules.clear()
+ sys.meta_path = [mf]
+ __builtin__.__import__ = mf.xocImport
+
+
+
+ #stupid special case for the stdlib
+ if mf.mapper.contains('warnings'):
+ sys.modules['warnings'] = mf.mapper.lookup('warnings')
+
+ try:
+ return f(*a, **kw)
+ finally:
+ sys.meta_path = oldMetaPath
+ sys.path_hooks = oldPathHooks
+ sys.modules.clear()
+ sys.modules.update(_PEP302Mapper._oldSysModules)
+ __builtin__.__import__ = oldImport
+
+def loadPackageNamed(fqn, mapper):
+ """
+ Analyze a Python package to determine its internal and external
+ dependencies and produce an object capable of creating modules
+ that refer to each other.
+
+ @param fqn: The fully qualified name of a Python module, e.g
+ C{twisted.python.filepath}.
+
+ @param mapper: A L{Mapper}.
+
+ @returns: A package loader object.
+ """
+ pkg = getModule(fqn)
+ return loadPackage(pkg, mapper)
+
+
+def loadPackage(pkg, mapper):
+ """
+ Analyze a Python package to determine its internal and external
+ dependencies and produce an object capable of creating modules
+ that refer to each other.
+
+ @param pkg: a module maker object (i.e., a L{modules.PythonModule} instance)
+
+ @param mapper: A L{Mapper}.
+
+ @returns: A package loader object.
+ """
+ if not pkg.isPackage():
+ raise ValueError("%r is not a package" % (pkg.name,))
+ return PackageContext(pkg, mapper, getModule)
+
+
+def _buildAndStoreEmptyModule(maker, mapper):
+ m = ModuleType(maker.name)
+ m.__path__ = maker.filePath.path
+ mapper.add(maker.name, m)
+ return m
+
+
+class PackageContext(object):
+ """
+ A context for loading interdependent modules from a package. This object
+ allows modules in a package to refer to one another consistently without
+ requiring them to be in sys.modules. Calling this object's C{loadNamed}
+ method will retrieve an already loaded instance if one exists, or else
+ loads it (and all its previously unloaded dependencies within the package)
+ and remembers them for later use.
+ """
+ def __init__(self, packageMaker, mapper, getModule):
+ """
+ @param packageMaker: A L{modules._modules.PythonModule} object
+ representing a package.
+ @param mapper: A L{Mapper}.
+ @param getModule: A function to fetch module maker objects, given an
+ FQN. (For example, {twisted.python.}modules.getModule.)
+ """
+ self.packageMaker = packageMaker
+ self.mapper = _PackageMapper(packageMaker.name, mapper)
+ initModule = _buildAndStoreEmptyModule(packageMaker, self.mapper)
+ if '.' in packageMaker.name:
+ modulePath = packageMaker.name.split(".")
+ if not self.mapper.contains(modulePath[0]):
+ parentMaker = getModule(modulePath[0])
+ parent = _buildAndStoreEmptyModule(parentMaker, self.mapper)
+ self.mapper.add(modulePath[0], parent)
+ for seg in modulePath[1:-1]:
+ new = _buildAndStoreEmptyModule(parentMaker[seg],
+ self.mapper)
+ setattr(parent, seg, new)
+ parent = new
+ setattr(parent, modulePath[-1], initModule)
+ self.allModules = analyzePackage(self.packageMaker)
+ sortedModules = robust_topological_sort(self.allModules)
+ self.loadOrder = list(itertools.chain(*sortedModules))
+ self.loadOrder.reverse()
+ trace("load __init__", vars(initModule), id(initModule))
+ self._reallyLoadModule(packageMaker.name, initModule)
+
+ def _searchDeps(self, fqn, depsFoundSoFar=None):
+ """
+ Searches recursively for modules not yet loaded in this package.
+
+ @param fqn: A module name.
+ @param depsFoundSoFar: A set of dependencies found in previous
+ invocations of this function, or None.
+ """
+ trace("searchDeps", fqn, depsFoundSoFar)
+ if depsFoundSoFar is not None:
+ deps = depsFoundSoFar
+ else:
+ deps = set()
+ deps.add(fqn)
+ maker, internalDeps, externalDeps = self.allModules[fqn]
+ for dep in internalDeps:
+ if dep not in deps and not self.mapper.contains(dep):
+ self._searchDeps(dep, deps)
+ return deps
+
+ def _unloadedDeps(self, fqn):
+ """
+ Returns an iterable of names of unloaded modules in this package that
+ must be loaded to satisfy the dependencies of the named package, in
+ the order they should be loaded.
+
+ @param fqn: A module name in this package.
+ """
+ unloadedDeps = self._searchDeps(fqn)
+ trace("raw unloaded deps", unloadedDeps)
+ trace("load order", self.loadOrder)
+ for dep in self.loadOrder:
+ if dep in unloadedDeps:
+ yield dep
+
+ def loadNamed(self, fqn):
+ """
+ Load the named module in this package, and any dependencies it may
+ have, if not already loaded. Returns the named module.
+
+ @param fqn: The fully-qualified name of a module in this package.
+ """
+ trace("package loadModule", fqn)
+ if self.mapper.contains(fqn):
+ trace("package module load satisfied from cache")
+ return lookupWithMapper(self.mapper, fqn)
+ m = self._reallyLoadModule(fqn)
+ trace("package module load triggered code loading", m)
+ return m
+
+ def _reallyLoadModule(self, fqn, m=None):
+ """
+ Do the actual work of loading modules (and their dependencies) into
+ this package context.
+
+ @param fqn: The fully-qualified name of a module in this package.
+ @param m: An optional empty module object to load code into. (For
+ resolving circular module imports.)
+ """
+ unloadedDeps = list(self._unloadedDeps(fqn))
+ if self.packageMaker.name in unloadedDeps:
+ unloadedDeps.remove(self.packageMaker.name)
+ trace("unloaded deps", unloadedDeps)
+ for name in unloadedDeps:
+ self.mapper.add(name, ModuleType(name))
+ for name in unloadedDeps:
+ submod = lookupWithMapper(self.mapper, name)
+ loadNamed(name, self.mapper, submod)
+ trace("loaded", name, submod)
+ setattr(lookupWithMapper(self.mapper, self.packageMaker.name),
+ name, submod)
+ self.mapper.add(fqn, loadNamed(fqn, self.mapper, m))
+ return lookupWithMapper(self.mapper, fqn)
+
+
+def analyzePackage(pkg):
+ """
+ Build a graph of internal and external dependencies of all modules in a
+ package.
+
+ @param pkg: A L{modules._modules.PythonModule} object representing a
+ package.
+
+ @return: A mapping of fully qualified module names in a package to (module
+ object, internal dependencies, external dependencies) triples.
+ """
+ allModules = {}
+ def collect(module):
+ if module.isPackage():
+ for mod in module.walkModules():
+ if mod is not module:
+ collect(mod)
+
+ allModules[module.name] = (module, set(), set())
+
+
+ collect(pkg)
+ for name, (m, imports, externalDeps) in allModules.items():
+ if m.filePath.splitext()[1] in [".so", ".pyd"]:
+ continue
+ for im in m.iterImportNames():
+ prefix, dot, item = im.rpartition(".")
+ if im in allModules:
+ imports.add(im)
+ elif prefix in allModules:
+ imports.add(prefix)
+ else:
+ externalDeps.add(im)
+
+ return allModules
+
+
+
+## Topological sort based on Paul Harrison's public domain toposort.py
+
+def strongly_connected_components(graph):
+ """
+ Find the strongly connected components in a graph using Tarjan's
+ algorithm.
+
+ @param graph: a mapping of node names to lists of successor nodes.
+ """
+
+ result = []
+ stack = []
+ low = {}
+
+ def visit(node):
+ if node in low: return
+
+ num = len(low)
+ low[node] = num
+ stack_pos = len(stack)
+ stack.append(node)
+
+ for successor in graph[node][1]:
+ visit(successor)
+ low[node] = min(low[node], low[successor])
+
+ if num == low[node]:
+ component = tuple(stack[stack_pos:])
+ del stack[stack_pos:]
+ result.append(component)
+ for item in component:
+ low[item] = len(graph)
+
+ for node in graph:
+ visit(node)
+
+ return result
+
+
+def topological_sort(graph):
+ """
+ Topological sort of a directed graph.
+
+ @param graph: Mapping of nodes to lists of other nodes.
+
+ @return: List of nodes, topologically sorted.
+ """
+ count = {}
+ for node in graph:
+ count[node] = 0
+ for node in graph:
+ for successor in graph[node]:
+ count[successor] += 1
+
+ ready = [node for node in graph if count[node] == 0]
+
+ result = []
+ while ready:
+ node = ready.pop(-1)
+ result.append(node)
+
+ for successor in graph[node]:
+ count[successor] -= 1
+ if count[successor] == 0:
+ ready.append(successor)
+
+ return result
+
+
+def robust_topological_sort(graph):
+ """
+ First identify strongly connected components,
+ then perform a topological sort on these components.
+
+ @param graph: Mapping of nodes to lists of other nodes.
+
+ @return: List of tuples of strongly connected groups of nodes,
+ topologically sorted.
+ """
+ components = strongly_connected_components(graph)
+
+ node_component = {}
+ for component in components:
+ for node in component:
+ node_component[node] = component
+
+ component_graph = {}
+ for component in components:
+ component_graph[component] = []
+
+ for node in graph:
+ node_c = node_component[node]
+ for successor in graph[node][1]:
+ successor_c = node_component[successor]
+ if node_c != successor_c:
+ component_graph[node_c].append(successor_c)
+
+ return topological_sort(component_graph)
+
+
+
+def proxyModule(original, **replacements):
+ """
+ Create a proxy for a module object, overriding some of its attributes with
+ replacement objects.
+
+ @param original: A module.
+ @param replacements: Attribute names and objects to associate with them.
+
+ @returns: A module proxy with attributes containing the replacement
+ objects; other attribute accesses are delegated to the original module.
+ """
+ class _ModuleProxy(object):
+ def __getattribute__(self, name):
+ if name in replacements:
+ return replacements[name]
+ else:
+ return getattr(original, name)
+
+ def __repr__(self):
+ return "<Proxy for %r: %s replaced>" % (
+ original, ', '.join(replacements.keys()))
+ return _ModuleProxy()
+
+
+def redirectLocalImports(name, globals=None, *a, **kw):
+ """
+ Catch function-level imports in modules loaded via Exocet. This ensures
+ that any imports done after module load time look up imported names in the
+ same context the module was originally loaded in.
+ """
+
+ if globals is not None:
+ mf = globals.get('__exocet_context__', None)
+ if mf is not None:
+ trace("isolated __import__ of", name, "called in exocet module", mf, mf.mapper)
+ return _isolateImports(mf, _originalImport, name, globals, *a, **kw)
+ else:
+ return _originalImport(name, globals, *a, **kw)
+ else:
+ return _originalImport(name, globals, *a, **kw)
+
+
+_originalImport = None
+
+def installExocetGlobalHook():
+ """
+ Install the global Exocet import hook.
+ """
+ global _originalImport
+ _originalImport = __builtin__.__import__
+ __builtin__.__import__ = redirectLocalImports
+
+
+
+def uninstallExocetGlobalHook():
+ __builtin__.__import__ = _originalImport
+
+
+installExocetGlobalHook()
+
View
927 exocet/_filepath.py
@@ -0,0 +1,927 @@
+# -*- test-case-name: filepath.test.test_paths -*-
+# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Object-oriented filesystem path representation.
+"""
+
+import os
+import errno
+import random
+import base64
+from hashlib import sha1
+
+from os.path import isabs, exists, normpath, abspath, splitext
+from os.path import basename, dirname
+from os.path import join as joinpath
+from os import sep as slash
+from os import listdir, utime, stat
+
+from stat import S_ISREG, S_ISDIR
+
+# Please keep this as light as possible on other Twisted imports; many, many
+# things import this module, and it would be good if it could easily be
+# modified for inclusion in the standard library. --glyph
+
+from exocet._runtime import platform
+
+from exocet._win32 import ERROR_FILE_NOT_FOUND, ERROR_PATH_NOT_FOUND
+from exocet._win32 import ERROR_INVALID_NAME, ERROR_DIRECTORY, O_BINARY
+from exocet._win32 import WindowsError
+
+_CREATE_FLAGS = (os.O_EXCL |
+ os.O_CREAT |
+ os.O_RDWR |
+ O_BINARY)
+
+
+def _stub_islink(path):
+ """
+ Always return 'false' if the operating system does not support symlinks.
+
+ @param path: a path string.
+ @type path: L{str}
+ @return: false
+ """
+ return False
+
+
+def _stub_urandom(n):
+ """
+ Provide random data in versions of Python prior to 2.4. This is an
+ effectively compatible replacement for 'os.urandom'.
+
+ @type n: L{int}
+ @param n: the number of bytes of data to return
+ @return: C{n} bytes of random data.
+ @rtype: str
+ """
+ randomData = [random.randrange(256) for n in xrange(n)]
+ return ''.join(map(chr, randomData))
+
+
+def _stub_armor(s):
+ """
+ ASCII-armor for random data. This uses a hex encoding, although we will
+ prefer url-safe base64 encoding for features in this module if it is
+ available.
+ """
+ return s.encode('hex')
+
+islink = getattr(os.path, 'islink', _stub_islink)
+randomBytes = getattr(os, 'urandom', _stub_urandom)
+armor = getattr(base64, 'urlsafe_b64encode', _stub_armor)
+
+class InsecurePath(Exception):
+ """
+ Error that is raised when the path provided to FilePath is invalid.
+ """
+
+
+
+class LinkError(Exception):
+ """
+ An error with symlinks - either that there are cyclical symlinks or that
+ symlink are not supported on this platform.
+ """
+
+
+
+class UnlistableError(OSError):
+ """
+ An exception which is used to distinguish between errors which mean 'this
+ is not a directory you can list' and other, more catastrophic errors.
+
+ This error will try to look as much like the original error as possible,
+ while still being catchable as an independent type.
+
+ @ivar originalException: the actual original exception instance, either an
+ L{OSError} or a L{WindowsError}.
+ """
+ def __init__(self, originalException):
+ """
+ Create an UnlistableError exception.
+
+ @param originalException: an instance of OSError.
+ """
+ self.__dict__.update(originalException.__dict__)
+ self.originalException = originalException
+
+
+
+class _WindowsUnlistableError(UnlistableError, WindowsError):
+ """
+ This exception is raised on Windows, for compatibility with previous
+ releases of FilePath where unportable programs may have done "except
+ WindowsError:" around a call to children().
+
+ It is private because all application code may portably catch
+ L{UnlistableError} instead.
+ """
+
+
+
+def _secureEnoughString():
+ """
+ Create a pseudorandom, 16-character string for use in secure filenames.
+ """
+ return armor(sha1(randomBytes(64)).digest())[:16]
+
+
+
+class _PathHelper:
+ """
+ Abstract helper class also used by ZipPath; implements certain utility
+ methods.
+ """
+
+ def getContent(self):
+ fp = self.open()
+ try:
+ return fp.read()
+ finally:
+ fp.close()
+
+
+ def parents(self):
+ """
+ @return: an iterator of all the ancestors of this path, from the most
+ recent (its immediate parent) to the root of its filesystem.
+ """
+ path = self
+ parent = path.parent()
+ # root.parent() == root, so this means "are we the root"
+ while path != parent:
+ yield parent
+ path = parent
+ parent = parent.parent()
+
+
+ def children(self):
+ """
+ List the children of this path object.
+
+ @raise OSError: If an error occurs while listing the directory. If the
+ error is 'serious', meaning that the operation failed due to an access
+ violation, exhaustion of some kind of resource (file descriptors or
+ memory), OSError or a platform-specific variant will be raised.
+
+ @raise UnlistableError: If the inability to list the directory is due
+ to this path not existing or not being a directory, the more specific
+ OSError subclass L{UnlistableError} is raised instead.
+
+ @return: an iterable of all currently-existing children of this object
+ accessible with L{_PathHelper.child}.
+ """
+ try:
+ subnames = self.listdir()
+ except WindowsError, winErrObj:
+ # WindowsError is an OSError subclass, so if not for this clause
+ # the OSError clause below would be handling these. Windows error
+ # codes aren't the same as POSIX error codes, so we need to handle
+ # them differently.
+
+ # Under Python 2.5 on Windows, WindowsError has a winerror
+ # attribute and an errno attribute. The winerror attribute is
+ # bound to the Windows error code while the errno attribute is
+ # bound to a translation of that code to a perhaps equivalent POSIX
+ # error number.
+
+ # Under Python 2.4 on Windows, WindowsError only has an errno
+ # attribute. It is bound to the Windows error code.
+
+ # For simplicity of code and to keep the number of paths through
+ # this suite minimal, we grab the Windows error code under either
+ # version.
+
+ # Furthermore, attempting to use os.listdir on a non-existent path
+ # in Python 2.4 will result in a Windows error code of
+ # ERROR_PATH_NOT_FOUND. However, in Python 2.5,
+ # ERROR_FILE_NOT_FOUND results instead. -exarkun
+ winerror = getattr(winErrObj, 'winerror', winErrObj.errno)
+ if winerror not in (ERROR_PATH_NOT_FOUND,
+ ERROR_FILE_NOT_FOUND,
+ ERROR_INVALID_NAME,
+ ERROR_DIRECTORY):
+ raise
+ raise _WindowsUnlistableError(winErrObj)
+ except OSError, ose:
+ if ose.errno not in (errno.ENOENT, errno.ENOTDIR):
+ # Other possible errors here, according to linux manpages:
+ # EACCES, EMIFLE, ENFILE, ENOMEM. None of these seem like the
+ # sort of thing which should be handled normally. -glyph
+ raise
+ raise UnlistableError(ose)
+ return map(self.child, subnames)
+
+ def walk(self, descend=None):
+ """
+ Yield myself, then each of my children, and each of those children's
+ children in turn. The optional argument C{descend} is a predicate that
+ takes a FilePath, and determines whether or not that FilePath is
+ traversed/descended into. It will be called with each path for which
+ C{isdir} returns C{True}. If C{descend} is not specified, all
+ directories will be traversed (including symbolic links which refer to
+ directories).
+
+ @param descend: A one-argument callable that will return True for
+ FilePaths that should be traversed, False otherwise.
+
+ @return: a generator yielding FilePath-like objects.
+ """
+ yield self
+ if self.isdir():
+ for c in self.children():
+ # we should first see if it's what we want, then we
+ # can walk through the directory
+ if (descend is None or descend(c)):
+ for subc in c.walk(descend):
+ if os.path.realpath(self.path).startswith(
+ os.path.realpath(subc.path)):
+ raise LinkError("Cycle in file graph.")
+ yield subc
+ else:
+ yield c
+
+
+ def sibling(self, path):
+ return self.parent().child(path)
+
+ def segmentsFrom(self, ancestor):
+ """
+ Return a list of segments between a child and its ancestor.
+
+ For example, in the case of a path X representing /a/b/c/d and a path Y
+ representing /a/b, C{Y.segmentsFrom(X)} will return C{['c',
+ 'd']}.
+
+ @param ancestor: an instance of the same class as self, ostensibly an
+ ancestor of self.
+
+ @raise: ValueError if the 'ancestor' parameter is not actually an
+ ancestor, i.e. a path for /x/y/z is passed as an ancestor for /a/b/c/d.
+
+ @return: a list of strs
+ """
+ # this might be an unnecessarily inefficient implementation but it will
+ # work on win32 and for zipfiles; later I will deterimine if the
+ # obvious fast implemenation does the right thing too
+ f = self
+ p = f.parent()
+ segments = []
+ while f != ancestor and p != f:
+ segments[0:0] = [f.basename()]
+ f = p
+ p = p.parent()
+ if f == ancestor and segments:
+ return segments
+ raise ValueError("%r not parent of %r" % (ancestor, self))
+
+
+ # new in 8.0
+ def __hash__(self):
+ """
+ Hash the same as another FilePath with the same path as mine.
+ """
+ return hash((self.__class__, self.path))
+
+
+ # pending deprecation in 8.0
+ def getmtime(self):
+ """
+ Deprecated. Use getModificationTime instead.
+ """
+ return int(self.getModificationTime())
+
+
+ def getatime(self):
+ """
+ Deprecated. Use getAccessTime instead.
+ """
+ return int(self.getAccessTime())
+
+
+ def getctime(self):
+ """
+ Deprecated. Use getStatusChangeTime instead.
+ """
+ return int(self.getStatusChangeTime())
+
+
+
+class FilePath(_PathHelper):
+ """
+ I am a path on the filesystem that only permits 'downwards' access.
+
+ Instantiate me with a pathname (for example,
+ FilePath('/home/myuser/public_html')) and I will attempt to only provide
+ access to files which reside inside that path. I may be a path to a file,
+ a directory, or a file which does not exist.
+
+ The correct way to use me is to instantiate me, and then do ALL filesystem
+ access through me. In other words, do not import the 'os' module; if you
+ need to open a file, call my 'open' method. If you need to list a
+ directory, call my 'path' method.
+
+ Even if you pass me a relative path, I will convert that to an absolute
+ path internally.
+
+ Note: although time-related methods do return floating-point results, they
+ may still be only second resolution depending on the platform and the last
+ value passed to L{os.stat_float_times}. If you want greater-than-second
+ precision, call C{os.stat_float_times(True)}, or use Python 2.5.
+ Greater-than-second precision is only available in Windows on Python2.5 and
+ later.
+
+ @type alwaysCreate: C{bool}
+ @ivar alwaysCreate: When opening this file, only succeed if the file does
+ not already exist.
+
+ @type path: C{str}
+ @ivar path: The path from which 'downward' traversal is permitted.
+
+ @ivar statinfo: The currently cached status information about the file on
+ the filesystem that this L{FilePath} points to. This attribute is
+ C{None} if the file is in an indeterminate state (either this
+ L{FilePath} has not yet had cause to call C{stat()} yet or
+ L{FilePath.changed} indicated that new information is required), 0 if
+ C{stat()} was called and returned an error (i.e. the path did not exist
+ when C{stat()} was called), or a C{stat_result} object that describes
+ the last known status of the underlying file (or directory, as the case
+ may be). Trust me when I tell you that you do not want to use this
+ attribute. Instead, use the methods on L{FilePath} which give you
+ information about it, like C{getsize()}, C{isdir()},
+ C{getModificationTime()}, and so on.
+ @type statinfo: C{int} or L{types.NoneType} or L{os.stat_result}
+ """
+
+ statinfo = None
+ path = None
+
+ def __init__(self, path, alwaysCreate=False):
+ self.path = abspath(path)
+ self.alwaysCreate = alwaysCreate
+
+ def __getstate__(self):
+ d = self.__dict__.copy()
+ if d.has_key('statinfo'):
+ del d['statinfo']
+ return d
+
+ def child(self, path):
+ if platform.isWindows() and path.count(":"):
+ # Catch paths like C:blah that don't have a slash
+ raise InsecurePath("%r contains a colon." % (path,))
+ norm = normpath(path)
+ if slash in norm:
+ raise InsecurePath("%r contains one or more directory separators" % (path,))
+ newpath = abspath(joinpath(self.path, norm))
+ if not newpath.startswith(self.path):
+ raise InsecurePath("%r is not a child of %s" % (newpath, self.path))
+ return self.clonePath(newpath)
+
+ def preauthChild(self, path):
+ """
+ Use me if `path' might have slashes in it, but you know they're safe.
+
+ (NOT slashes at the beginning. It still needs to be a _child_).
+ """
+ newpath = abspath(joinpath(self.path, normpath(path)))
+ if not newpath.startswith(self.path):
+ raise InsecurePath("%s is not a child of %s" % (newpath, self.path))
+ return self.clonePath(newpath)
+
+ def childSearchPreauth(self, *paths):
+ """Return my first existing child with a name in 'paths'.
+
+ paths is expected to be a list of *pre-secured* path fragments; in most
+ cases this will be specified by a system administrator and not an
+ arbitrary user.
+
+ If no appropriately-named children exist, this will return None.
+ """
+ p = self.path
+ for child in paths:
+ jp = joinpath(p, child)
+ if exists(jp):
+ return self.clonePath(jp)
+
+ def siblingExtensionSearch(self, *exts):
+ """Attempt to return a path with my name, given multiple possible
+ extensions.
+
+ Each extension in exts will be tested and the first path which exists
+ will be returned. If no path exists, None will be returned. If '' is
+ in exts, then if the file referred to by this path exists, 'self' will
+ be returned.
+
+ The extension '*' has a magic meaning, which means "any path that
+ begins with self.path+'.' is acceptable".
+ """
+ p = self.path
+ for ext in exts:
+ if not ext and self.exists():
+ return self
+ if ext == '*':
+ basedot = basename(p)+'.'
+ for fn in listdir(dirname(p)):
+ if fn.startswith(basedot):
+ return self.clonePath(joinpath(dirname(p), fn))
+ p2 = p + ext
+ if exists(p2):
+ return self.clonePath(p2)
+
+
+ def realpath(self):
+ """
+ Returns the absolute target as a FilePath if self is a link, self
+ otherwise. The absolute link is the ultimate file or directory the
+ link refers to (for instance, if the link refers to another link, and
+ another...). If the filesystem does not support symlinks, or
+ if the link is cyclical, raises a LinkError.
+
+ Behaves like L{os.path.realpath} in that it does not resolve link
+ names in the middle (ex. /x/y/z, y is a link to w - realpath on z
+ will return /x/y/z, not /x/w/z).
+
+ @return: FilePath of the target path
+ @raises LinkError: if links are not supported or links are cyclical.
+ """
+ if self.islink():
+ result = os.path.realpath(self.path)
+ if result == self.path:
+ raise LinkError("Cyclical link - will loop forever")
+ return self.clonePath(result)
+ return self
+
+
+ def siblingExtension(self, ext):
+ return self.clonePath(self.path+ext)
+
+
+ def linkTo(self, linkFilePath):
+ """
+ Creates a symlink to self to at the path in the L{FilePath}
+ C{linkFilePath}. Only works on posix systems due to its dependence on
+ C{os.symlink}. Propagates C{OSError}s up from C{os.symlink} if
+ C{linkFilePath.parent()} does not exist, or C{linkFilePath} already
+ exists.
+
+ @param linkFilePath: a FilePath representing the link to be created
+ @type linkFilePath: L{FilePath}
+ """
+ os.symlink(self.path, linkFilePath.path)
+
+
+ def open(self, mode='r'):
+ """
+ Open this file using C{mode} or for writing if C{alwaysCreate} is
+ C{True}.
+
+ In all cases the file is opened in binary mode, so it is not necessary
+ to include C{b} in C{mode}.
+
+ @param mode: The mode to open the file in. Default is C{r}.
+ @type mode: C{str}
+ @raises AssertionError: If C{a} is included in the mode and
+ C{alwaysCreate} is C{True}.
+ @rtype: C{file}
+ @return: An open C{file} object.
+ """
+ if self.alwaysCreate:
+ assert 'a' not in mode, ("Appending not supported when "
+ "alwaysCreate == True")
+ return self.create()
+ # This hack is necessary because of a bug in Python 2.7 on Windows:
+ # http://bugs.python.org/issue7686
+ mode = mode.replace('b', '')
+ return open(self.path, mode + 'b')
+
+ # stat methods below
+
+ def restat(self, reraise=True):
+ """
+ Re-calculate cached effects of 'stat'. To refresh information on this path
+ after you know the filesystem may have changed, call this method.
+
+ @param reraise: a boolean. If true, re-raise exceptions from
+ L{os.stat}; otherwise, mark this path as not existing, and remove any
+ cached stat information.
+ """
+ try:
+ self.statinfo = stat(self.path)
+ except OSError:
+ self.statinfo = 0
+ if reraise:
+ raise
+
+
+ def changed(self):
+ """
+ Clear any cached information about the state of this path on disk.
+
+ @since: 10.1.0
+ """
+ self.statinfo = None
+
+
+ def chmod(self, mode):
+ """
+ Changes the permissions on self, if possible. Propagates errors from
+ C{os.chmod} up.
+
+ @param mode: integer representing the new permissions desired (same as
+ the command line chmod)
+ @type mode: C{int}
+ """
+ os.chmod(self.path, mode)
+
+
+ def getsize(self):
+ st = self.statinfo
+ if not st:
+ self.restat()
+ st = self.statinfo
+ return st.st_size
+
+
+ def getModificationTime(self):
+ """
+ Retrieve the time of last access from this file.
+
+ @return: a number of seconds from the epoch.
+ @rtype: float
+ """
+ st = self.statinfo
+ if not st:
+ self.restat()
+ st = self.statinfo
+ return float(st.st_mtime)
+
+
+ def getStatusChangeTime(self):
+ """
+ Retrieve the time of the last status change for this file.
+
+ @return: a number of seconds from the epoch.
+ @rtype: float
+ """
+ st = self.statinfo
+ if not st:
+ self.restat()
+ st = self.statinfo
+ return float(st.st_ctime)
+
+
+ def getAccessTime(self):
+ """
+ Retrieve the time that this file was last accessed.
+
+ @return: a number of seconds from the epoch.
+ @rtype: float
+ """
+ st = self.statinfo
+ if not st:
+ self.restat()
+ st = self.statinfo
+ return float(st.st_atime)
+
+
+ def exists(self):
+ """
+ Check if the C{path} exists.
+
+ @return: C{True} if the stats of C{path} can be retrieved successfully,
+ C{False} in the other cases.
+ @rtype: C{bool}
+ """
+ if self.statinfo:
+ return True
+ else:
+ self.restat(False)
+ if self.statinfo:
+ return True
+ else:
+ return False
+
+
+ def isdir(self):
+ st = self.statinfo
+ if not st:
+ self.restat(False)
+ st = self.statinfo
+ if not st:
+ return False
+ return S_ISDIR(st.st_mode)
+
+ def isfile(self):
+ st = self.statinfo
+ if not st:
+ self.restat(False)
+ st = self.statinfo
+ if not st:
+ return False
+ return S_ISREG(st.st_mode)
+
+ def islink(self):
+ # We can't use cached stat results here, because that is the stat of
+ # the destination - (see #1773) which in *every case* but this one is