Browse files

update code for hackathon

  • Loading branch information...
1 parent 4d699bb commit 0533bf5cc52bf19914736f31392fcaee19df7323 @gleitz gleitz committed Mar 26, 2012
Showing with 2,130 additions and 1,955 deletions.
  1. +1 −1 README.md
  2. +5 −5 app/templates/base.html
  3. 0 djangoappengine/LICENSE
  4. 0 djangoappengine/__init__.py
  5. +188 −178 djangoappengine/boot.py
  6. 0 djangoappengine/db/__init__.py
  7. +158 −231 djangoappengine/db/base.py
  8. +561 −502 djangoappengine/db/compiler.py
  9. +14 −13 djangoappengine/db/creation.py
  10. +12 −4 djangoappengine/db/db_settings.py
  11. +50 −20 djangoappengine/db/utils.py
  12. 0 djangoappengine/deferred/__init__.py
  13. +13 −7 djangoappengine/deferred/handler.py
  14. 0 djangoappengine/lib/__init__.py
  15. 0 djangoappengine/lib/memcache.py
  16. +28 −7 djangoappengine/mail.py
  17. +78 −0 djangoappengine/main/__init__.py
  18. +74 −94 djangoappengine/main/main.py
  19. 0 djangoappengine/management/__init__.py
  20. 0 djangoappengine/management/commands/__init__.py
  21. +67 −73 djangoappengine/management/commands/deploy.py
  22. +5 −2 djangoappengine/management/commands/remote.py
  23. +154 −93 djangoappengine/management/commands/runserver.py
  24. +0 −73 djangoappengine/management/commands/testserver.py
  25. 0 djangoappengine/models.py
  26. +69 −39 djangoappengine/settings_base.py
  27. 0 djangoappengine/storage.py
  28. +1 −0 djangoappengine/tests/__init__.py
  29. +1 −0 djangoappengine/tests/backend.py
  30. +13 −3 djangoappengine/tests/decimals.py
  31. +1 −1 djangoappengine/tests/field_db_conversion.py
  32. +87 −80 djangoappengine/tests/field_options.py
  33. +441 −426 djangoappengine/tests/filter.py
  34. 0 djangoappengine/tests/not_return_sets.py
  35. +0 −1 djangoappengine/tests/order.py
  36. +93 −89 djangoappengine/tests/testmodels.py
  37. +5 −3 djangoappengine/utils.py
  38. +1 −1 media/css/screen.css
  39. +10 −9 media/js/app.js
View
2 README.md
@@ -23,7 +23,7 @@ Getting started
1. Download the [Google App Engine Python SDK](http://code.google.com/appengine/downloads.html)
2. Test your app loads the test page
- * run `python dev_server.sh` (this defaults to running on port 80 which may require root permissions)
+ * run `sh dev_server.sh` (this defaults to running on port 80 which may require root permissions)
* open a web browser and navigate to [http://localhost](http://localhost)
* ensure that you see the success page
* now navigate to [http://localhost/gifts/](http://localhost/gifts/)
View
10 app/templates/base.html
@@ -10,11 +10,11 @@
</head>
<body class="{% block body_class %}{% endblock %}">
<div id="container">
- {% block content %}{% endblock %}
- </div>
+ {% block content %}{% endblock %}
+ </div>
<script src="/media/js/jquery-1.4.2.min.js"></script>
- <script src="/media/js/mustache.js"></script>
- <script src="/media/js/jquery.placeholder.min.js"></script>
- {% block scripts %}{% endblock %}
+ <script src="/media/js/mustache.js"></script>
+ <script src="/media/js/jquery.placeholder.min.js"></script>
+ {% block scripts %}{% endblock %}
</body>
</html>
View
0 djangoappengine/LICENSE 100755 → 100644
File mode changed.
View
0 djangoappengine/__init__.py 100755 → 100644
File mode changed.
View
366 djangoappengine/boot.py 100755 → 100644
@@ -1,178 +1,188 @@
-import logging
-import os
-import sys
-
-PROJECT_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
-
-# Overrides for os.environ
-env_ext = {'DJANGO_SETTINGS_MODULE': 'settings'}
-
-def setup_env():
- """Configures app engine environment for command-line apps."""
- # Try to import the appengine code from the system path.
- try:
- from google.appengine.api import apiproxy_stub_map
- except ImportError:
- for k in [k for k in sys.modules if k.startswith('google')]:
- del sys.modules[k]
-
- # Not on the system path. Build a list of alternative paths where it
- # may be. First look within the project for a local copy, then look for
- # where the Mac OS SDK installs it.
- paths = [os.path.join(PROJECT_DIR, '.google_appengine'),
- os.environ.get('APP_ENGINE_SDK'),
- '/usr/local/google_appengine',
- '/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine']
- for path in os.environ.get('PATH', '').split(os.pathsep):
- path = path.rstrip(os.sep)
- if path.endswith('google_appengine'):
- paths.append(path)
- if os.name in ('nt', 'dos'):
- path = r'%(PROGRAMFILES)s\Google\google_appengine' % os.environ
- paths.append(path)
- # Loop through all possible paths and look for the SDK dir.
- sdk_path = None
- for path in paths:
- if not path:
- continue
- path = os.path.expanduser(path)
- path = os.path.realpath(path)
- if os.path.exists(path):
- sdk_path = path
- break
- if sdk_path is None:
- # The SDK could not be found in any known location.
- sys.stderr.write('The Google App Engine SDK could not be found!\n'
- "Make sure it's accessible via your PATH "
- "environment and called google_appengine.\n")
- sys.exit(1)
- # Add the SDK and the libraries within it to the system path.
- extra_paths = [sdk_path]
- lib = os.path.join(sdk_path, 'lib')
- # Automatically add all packages in the SDK's lib folder:
- for dir in os.listdir(lib):
- path = os.path.join(lib, dir)
- # Package can be under 'lib/<pkg>/<pkg>/' or 'lib/<pkg>/lib/<pkg>/'
- detect = (os.path.join(path, dir), os.path.join(path, 'lib', dir))
- for path in detect:
- if os.path.isdir(path) and not dir == 'django':
- extra_paths.append(os.path.dirname(path))
- break
- sys.path = extra_paths + sys.path
- from google.appengine.api import apiproxy_stub_map
-
- setup_project()
- from .utils import have_appserver
- if have_appserver:
- # App Engine's threading.local is broken
- setup_threading()
- setup_logging()
-
- if not have_appserver:
- # Patch Django to support loading management commands from zip files
- from django.core import management
- management.find_commands = find_commands
-
-def find_commands(management_dir):
- """
- Given a path to a management directory, returns a list of all the command
- names that are available.
- This version works for django deployments which are file based or
- contained in a ZIP (in sys.path).
-
- Returns an empty list if no commands are defined.
- """
- import pkgutil
- return [modname for importer, modname, ispkg in pkgutil.iter_modules(
- [os.path.join(management_dir, 'commands')]) if not ispkg]
-
-def setup_threading():
- # XXX: GAE's threading.local doesn't work correctly with subclassing
- try:
- from django.utils._threading_local import local
- import threading
- threading.local = local
- except ImportError:
- pass
-
-def setup_logging():
- # Fix Python 2.6 logging module
- logging.logMultiprocessing = 0
-
- # Enable logging
- level = logging.DEBUG
- from .utils import have_appserver
- if have_appserver:
- # We can't import settings at this point when running a normal
- # manage.py command because this module gets imported from settings.py
- from django.conf import settings
- if not settings.DEBUG:
- level = logging.INFO
- logging.getLogger().setLevel(level)
-
-def setup_project():
- from .utils import have_appserver, on_production_server
- if have_appserver:
- # This fixes a pwd import bug for os.path.expanduser()
- env_ext['HOME'] = PROJECT_DIR
-
- # The dev_appserver creates a sandbox which restricts access to certain
- # modules and builtins in order to emulate the production environment.
- # Here we get the subprocess module back into the dev_appserver sandbox.
- # This module is just too important for development.
- # Also we add the compiler/parser module back and enable https connections
- # (seem to be broken on Windows because the _ssl module is disallowed).
- if not have_appserver:
- from google.appengine.tools import dev_appserver
- try:
- # Backup os.environ. It gets overwritten by the dev_appserver,
- # but it's needed by the subprocess module.
- env = dev_appserver.DEFAULT_ENV
- dev_appserver.DEFAULT_ENV = os.environ.copy()
- dev_appserver.DEFAULT_ENV.update(env)
- # Backup the buffer() builtin. The subprocess in Python 2.5 on
- # Linux and OS X uses needs it, but the dev_appserver removes it.
- dev_appserver.buffer = buffer
- except AttributeError:
- logging.warn('Could not patch the default environment. '
- 'The subprocess module will not work correctly.')
-
- try:
- # Allow importing compiler/parser and _ssl modules (for https)
- dev_appserver.HardenedModulesHook._WHITE_LIST_C_MODULES.extend(
- ('parser', '_ssl'))
- except AttributeError:
- logging.warn('Could not patch modules whitelist. '
- 'The compiler and parser modules will not work and '
- 'SSL support is disabled.')
- elif not on_production_server:
- try:
- # Restore the real subprocess module
- from google.appengine.api.mail_stub import subprocess
- sys.modules['subprocess'] = subprocess
- # Re-inject the buffer() builtin into the subprocess module
- from google.appengine.tools import dev_appserver
- subprocess.buffer = dev_appserver.buffer
- except Exception, e:
- logging.warn('Could not add the subprocess module to the sandbox: %s' % e)
-
- os.environ.update(env_ext)
-
- extra_paths = [PROJECT_DIR, os.path.join(os.path.dirname(__file__), 'lib')]
- zip_packages_dir = os.path.join(PROJECT_DIR, 'zip-packages')
-
- # We support zipped packages in the common and project folders.
- if os.path.isdir(zip_packages_dir):
- for zip_package in os.listdir(zip_packages_dir):
- extra_paths.append(os.path.join(zip_packages_dir, zip_package))
-
- # App Engine causes main.py to be reloaded if an exception gets raised
- # on the first request of a main.py instance, so don't call setup_project()
- # multiple times. We ensure this indirectly by checking if we've already
- # modified sys.path, already.
- if len(sys.path) < len(extra_paths) or \
- sys.path[:len(extra_paths)] != extra_paths:
- for path in extra_paths:
- while path in sys.path:
- sys.path.remove(path)
- sys.path = extra_paths + sys.path
+import logging
+import os
+import sys
+
+PROJECT_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
+DATA_ROOT = os.path.join(PROJECT_DIR, '.gaedata')
+
+# Overrides for os.environ
+env_ext = {'DJANGO_SETTINGS_MODULE': 'settings'}
+
+def setup_env():
+ """Configures app engine environment for command-line apps."""
+ # Try to import the appengine code from the system path.
+ try:
+ from google.appengine.api import apiproxy_stub_map
+ except ImportError:
+ for k in [k for k in sys.modules if k.startswith('google')]:
+ del sys.modules[k]
+
+ # Not on the system path. Build a list of alternative paths where it
+ # may be. First look within the project for a local copy, then look for
+ # where the Mac OS SDK installs it.
+ paths = [os.path.join(PROJECT_DIR, '.google_appengine'),
+ os.environ.get('APP_ENGINE_SDK'),
+ '/usr/local/google_appengine',
+ '/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine']
+ for path in os.environ.get('PATH', '').split(os.pathsep):
+ path = path.rstrip(os.sep)
+ if path.endswith('google_appengine'):
+ paths.append(path)
+ if os.name in ('nt', 'dos'):
+ path = r'%(PROGRAMFILES)s\Google\google_appengine' % os.environ
+ paths.append(path)
+ # Loop through all possible paths and look for the SDK dir.
+ sdk_path = None
+ for path in paths:
+ if not path:
+ continue
+ path = os.path.expanduser(path)
+ path = os.path.realpath(path)
+ if os.path.exists(path):
+ sdk_path = path
+ break
+ if sdk_path is None:
+ # The SDK could not be found in any known location.
+ sys.stderr.write('The Google App Engine SDK could not be found!\n'
+ "Make sure it's accessible via your PATH "
+ "environment and called google_appengine.\n")
+ sys.exit(1)
+ # Add the SDK and the libraries within it to the system path.
+ extra_paths = [sdk_path]
+ lib = os.path.join(sdk_path, 'lib')
+ # Automatically add all packages in the SDK's lib folder:
+ for name in os.listdir(lib):
+ root = os.path.join(lib, name)
+ subdir = name
+ # Package can be under 'lib/<pkg>/<pkg>/' or 'lib/<pkg>/lib/<pkg>/'
+ detect = (os.path.join(root, subdir), os.path.join(root, 'lib', subdir))
+ for path in detect:
+ if os.path.isdir(path):
+ extra_paths.append(os.path.dirname(path))
+ break
+ else:
+ if name == 'webapp2':
+ extra_paths.append(root)
+ sys.path = extra_paths + sys.path
+ from google.appengine.api import apiproxy_stub_map
+
+ setup_project()
+ from .utils import have_appserver
+ if have_appserver:
+ # App Engine's threading.local is broken
+ setup_threading()
+ elif not os.path.exists(DATA_ROOT):
+ os.mkdir(DATA_ROOT)
+ setup_logging()
+
+ if not have_appserver:
+ # Patch Django to support loading management commands from zip files
+ from django.core import management
+ management.find_commands = find_commands
+
+def find_commands(management_dir):
+ """
+ Given a path to a management directory, returns a list of all the command
+ names that are available.
+ This version works for django deployments which are file based or
+ contained in a ZIP (in sys.path).
+
+ Returns an empty list if no commands are defined.
+ """
+ import pkgutil
+ return [modname for importer, modname, ispkg in pkgutil.iter_modules(
+ [os.path.join(management_dir, 'commands')]) if not ispkg]
+
+def setup_threading():
+ if sys.version_info >= (2, 7):
+ return
+ # XXX: On Python 2.5 GAE's threading.local doesn't work correctly with subclassing
+ try:
+ from django.utils._threading_local import local
+ import threading
+ threading.local = local
+ except ImportError:
+ pass
+
+def setup_logging():
+ # Fix Python 2.6 logging module
+ logging.logMultiprocessing = 0
+
+ # Enable logging
+ level = logging.DEBUG
+ from .utils import have_appserver
+ if have_appserver:
+ # We can't import settings at this point when running a normal
+ # manage.py command because this module gets imported from settings.py
+ from django.conf import settings
+ if not settings.DEBUG:
+ level = logging.INFO
+ logging.getLogger().setLevel(level)
+
+def setup_project():
+ from .utils import have_appserver, on_production_server
+ if have_appserver:
+ # This fixes a pwd import bug for os.path.expanduser()
+ env_ext['HOME'] = PROJECT_DIR
+
+ # The dev_appserver creates a sandbox which restricts access to certain
+ # modules and builtins in order to emulate the production environment.
+ # Here we get the subprocess module back into the dev_appserver sandbox.
+ # This module is just too important for development.
+ # Also we add the compiler/parser module back and enable https connections
+ # (seem to be broken on Windows because the _ssl module is disallowed).
+ if not have_appserver:
+ from google.appengine.tools import dev_appserver
+ try:
+ # Backup os.environ. It gets overwritten by the dev_appserver,
+ # but it's needed by the subprocess module.
+ env = dev_appserver.DEFAULT_ENV
+ dev_appserver.DEFAULT_ENV = os.environ.copy()
+ dev_appserver.DEFAULT_ENV.update(env)
+ # Backup the buffer() builtin. The subprocess in Python 2.5 on
+ # Linux and OS X uses needs it, but the dev_appserver removes it.
+ dev_appserver.buffer = buffer
+ except AttributeError:
+ logging.warn('Could not patch the default environment. '
+ 'The subprocess module will not work correctly.')
+
+ try:
+ # Allow importing compiler/parser, _ssl (for https),
+ # _io for Python 2.7 io support on OS X
+ dev_appserver.HardenedModulesHook._WHITE_LIST_C_MODULES.extend(
+ ('parser', '_ssl', '_io'))
+ except AttributeError:
+ logging.warn('Could not patch modules whitelist. '
+ 'The compiler and parser modules will not work and '
+ 'SSL support is disabled.')
+ elif not on_production_server:
+ try:
+ # Restore the real subprocess module
+ from google.appengine.api.mail_stub import subprocess
+ sys.modules['subprocess'] = subprocess
+ # Re-inject the buffer() builtin into the subprocess module
+ from google.appengine.tools import dev_appserver
+ subprocess.buffer = dev_appserver.buffer
+ except Exception, e:
+ logging.warn('Could not add the subprocess module to the sandbox: %s' % e)
+
+ os.environ.update(env_ext)
+
+ extra_paths = [PROJECT_DIR, os.path.join(os.path.dirname(__file__), 'lib')]
+ zip_packages_dir = os.path.join(PROJECT_DIR, 'zip-packages')
+
+ # We support zipped packages in the common and project folders.
+ if os.path.isdir(zip_packages_dir):
+ for zip_package in os.listdir(zip_packages_dir):
+ extra_paths.append(os.path.join(zip_packages_dir, zip_package))
+
+ # App Engine causes main.py to be reloaded if an exception gets raised
+ # on the first request of a main.py instance, so don't call setup_project()
+ # multiple times. We ensure this indirectly by checking if we've already
+ # modified sys.path, already.
+ if len(sys.path) < len(extra_paths) or \
+ sys.path[:len(extra_paths)] != extra_paths:
+ for path in extra_paths:
+ while path in sys.path:
+ sys.path.remove(path)
+ sys.path = extra_paths + sys.path
View
0 djangoappengine/db/__init__.py 100755 → 100644
File mode changed.
View
389 djangoappengine/db/base.py 100755 → 100644
@@ -1,231 +1,158 @@
-from ..utils import appid, have_appserver, on_production_server
-from .creation import DatabaseCreation
-from django.db.backends.util import format_number
-from djangotoolbox.db.base import NonrelDatabaseFeatures, \
- NonrelDatabaseOperations, NonrelDatabaseWrapper, NonrelDatabaseClient, \
- NonrelDatabaseValidation, NonrelDatabaseIntrospection
-from urllib2 import HTTPError, URLError
-import logging
-import os
-import time
-
-REMOTE_API_SCRIPT = '$PYTHON_LIB/google/appengine/ext/remote_api/handler.py'
-
-def auth_func():
- import getpass
- return raw_input('Login via Google Account: '), getpass.getpass('Password: ')
-
-def rpc_server_factory(*args, ** kwargs):
- from google.appengine.tools import appengine_rpc
- kwargs['save_cookies'] = True
- return appengine_rpc.HttpRpcServer(*args, ** kwargs)
-
-def get_datastore_paths(options):
- """Returns a tuple with the path to the datastore and history file.
-
- The datastore is stored in the same location as dev_appserver uses by
- default, but the name is altered to be unique to this project so multiple
- Django projects can be developed on the same machine in parallel.
-
- Returns:
- (datastore_path, history_path)
- """
- from google.appengine.tools import dev_appserver_main
- datastore_path = options.get('datastore_path',
- dev_appserver_main.DEFAULT_ARGS['datastore_path'].replace(
- 'dev_appserver', 'django_%s' % appid))
- blobstore_path = options.get('blobstore_path',
- dev_appserver_main.DEFAULT_ARGS['blobstore_path'].replace(
- 'dev_appserver', 'django_%s' % appid))
- history_path = options.get('history_path',
- dev_appserver_main.DEFAULT_ARGS['history_path'].replace(
- 'dev_appserver', 'django_%s' % appid))
- return datastore_path, blobstore_path, history_path
-
-def get_test_datastore_paths(inmemory=True):
- """Returns a tuple with the path to the test datastore and history file.
-
- If inmemory is true, (None, None) is returned to request an in-memory
- datastore. If inmemory is false the path returned will be similar to the path
- returned by get_datastore_paths but with a different name.
-
- Returns:
- (datastore_path, history_path)
- """
- if inmemory:
- return None, None, None
- datastore_path, blobstore_path, history_path = get_datastore_paths()
- datastore_path = datastore_path.replace('.datastore', '.testdatastore')
- blobstore_path = blobstore_path.replace('.blobstore', '.testblobstore')
- history_path = history_path.replace('.datastore', '.testdatastore')
- return datastore_path, blobstore_path, history_path
-
-def destroy_datastore(*args):
- """Destroys the appengine datastore at the specified paths."""
- for path in args:
- if not path:
- continue
- try:
- os.remove(path)
- except OSError, error:
- if error.errno != 2:
- logging.error("Failed to clear datastore: %s" % error)
-
-class DatabaseFeatures(NonrelDatabaseFeatures):
- allows_primary_key_0 = True
- supports_dicts = True
-
-class DatabaseOperations(NonrelDatabaseOperations):
- compiler_module = __name__.rsplit('.', 1)[0] + '.compiler'
-
- DEFAULT_MAX_DIGITS = 16
- def value_to_db_decimal(self, value, max_digits, decimal_places):
- if value is None:
- return None
- sign = value < 0 and u'-' or u''
- if sign:
- value = abs(value)
- if max_digits is None:
- max_digits = self.DEFAULT_MAX_DIGITS
-
- if decimal_places is None:
- value = unicode(value)
- else:
- value = format_number(value, max_digits, decimal_places)
- decimal_places = decimal_places or 0
- n = value.find('.')
-
- if n < 0:
- n = len(value)
- if n < max_digits - decimal_places:
- value = u"0" * (max_digits - decimal_places - n) + value
- return sign + value
-
- def sql_flush(self, style, tables, sequences):
- self.connection.flush()
- return []
-
-class DatabaseClient(NonrelDatabaseClient):
- pass
-
-class DatabaseValidation(NonrelDatabaseValidation):
- pass
-
-class DatabaseIntrospection(NonrelDatabaseIntrospection):
- pass
-
-class DatabaseWrapper(NonrelDatabaseWrapper):
- def __init__(self, *args, **kwds):
- super(DatabaseWrapper, self).__init__(*args, **kwds)
- self.features = DatabaseFeatures(self)
- self.ops = DatabaseOperations(self)
- self.client = DatabaseClient(self)
- self.creation = DatabaseCreation(self)
- self.validation = DatabaseValidation(self)
- self.introspection = DatabaseIntrospection(self)
- options = self.settings_dict
- self.use_test_datastore = False
- self.test_datastore_inmemory = True
- self.remote = options.get('REMOTE', False)
- if on_production_server:
- self.remote = False
- self.remote_app_id = options.get('REMOTE_APP_ID', appid)
- self.remote_api_path = options.get('REMOTE_API_PATH', None)
- self.secure_remote_api = options.get('SECURE_REMOTE_API', True)
- self._setup_stubs()
-
- def _get_paths(self):
- if self.use_test_datastore:
- return get_test_datastore_paths(self.test_datastore_inmemory)
- else:
- return get_datastore_paths(self.settings_dict)
-
- def _setup_stubs(self):
- # If this code is being run without an appserver (eg. via a django
- # commandline flag) then setup a default stub environment.
- if not have_appserver:
- from google.appengine.tools import dev_appserver_main
- args = dev_appserver_main.DEFAULT_ARGS.copy()
- args['datastore_path'], args['blobstore_path'], args['history_path'] = self._get_paths()
- from google.appengine.tools import dev_appserver
- dev_appserver.SetupStubs(appid, **args)
- # If we're supposed to set up the remote_api, do that now.
- if self.remote:
- self.setup_remote()
-
- def setup_remote(self):
- if not self.remote_api_path:
- from ..utils import appconfig
- for handler in appconfig.handlers:
- if handler.script == REMOTE_API_SCRIPT:
- self.remote_api_path = handler.url
- break
- self.remote = True
- remote_url = 'https://%s.appspot.com%s' % (self.remote_app_id,
- self.remote_api_path)
- logging.info('Setting up remote_api for "%s" at %s' %
- (self.remote_app_id, remote_url))
- if not have_appserver:
- print 'Connecting to remote_api handler'
- from google.appengine.ext.remote_api import remote_api_stub
- remote_api_stub.ConfigureRemoteApi(self.remote_app_id,
- self.remote_api_path, auth_func, secure=self.secure_remote_api,
- rpc_server_factory=rpc_server_factory)
- retry_delay = 1
- while retry_delay <= 16:
- try:
- remote_api_stub.MaybeInvokeAuthentication()
- except HTTPError, e:
- if not have_appserver:
- print 'Retrying in %d seconds...' % retry_delay
- time.sleep(retry_delay)
- retry_delay *= 2
- else:
- break
- else:
- try:
- remote_api_stub.MaybeInvokeAuthentication()
- except HTTPError, e:
- raise URLError("%s\n"
- "Couldn't reach remote_api handler at %s.\n"
- "Make sure you've deployed your project and "
- "installed a remote_api handler in app.yaml."
- % (e, remote_url))
- logging.info('Now using the remote datastore for "%s" at %s' %
- (self.remote_app_id, remote_url))
-
- def flush(self):
- """Helper function to remove the current datastore and re-open the stubs"""
- if self.remote:
- import random, string
- code = ''.join([random.choice(string.ascii_letters) for x in range(4)])
- print '\n\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
- print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
- print "Warning! You're about to delete the *production* datastore!"
- print 'Only models defined in your INSTALLED_APPS can be removed!'
- print 'If you want to clear the whole datastore you have to use the ' \
- 'datastore viewer in the dashboard. Also, in order to delete all '\
- 'unneeded indexes you have to run appcfg.py vacuum_indexes.'
- print 'In order to proceed you have to enter the following code:'
- print code
- response = raw_input('Repeat: ')
- if code == response:
- print 'Deleting...'
- from django.db import models
- from google.appengine.api import datastore as ds
- for model in models.get_models():
- print 'Deleting %s...' % model._meta.db_table
- while True:
- data = ds.Query(model._meta.db_table, keys_only=True).Get(200)
- if not data:
- break
- ds.Delete(data)
- print "Datastore flushed! Please check your dashboard's " \
- 'datastore viewer for any remaining entities and remove ' \
- 'all unneeded indexes with manage.py vacuum_indexes.'
- else:
- print 'Aborting'
- exit()
- else:
- destroy_datastore(*self._get_paths())
- self._setup_stubs()
+from ..boot import DATA_ROOT
+from ..utils import appid, on_production_server
+from .creation import DatabaseCreation
+from .stubs import stub_manager
+from django.db.backends.util import format_number
+from djangotoolbox.db.base import NonrelDatabaseFeatures, \
+ NonrelDatabaseOperations, NonrelDatabaseWrapper, NonrelDatabaseClient, \
+ NonrelDatabaseValidation, NonrelDatabaseIntrospection
+from google.appengine.ext.db.metadata import get_kinds, get_namespaces
+from google.appengine.api.datastore import Query, Delete
+from google.appengine.api.namespace_manager import set_namespace
+import logging
+import os
+import shutil
+
+DATASTORE_PATHS = {
+ 'datastore_path': os.path.join(DATA_ROOT, 'datastore'),
+ 'blobstore_path': os.path.join(DATA_ROOT, 'blobstore'),
+ #'rdbms_sqlite_path': os.path.join(DATA_ROOT, 'rdbms'),
+ 'prospective_search_path': os.path.join(DATA_ROOT, 'prospective-search'),
+}
+
+def get_datastore_paths(options):
+ paths = {}
+ for key, path in DATASTORE_PATHS.items():
+ paths[key] = options.get(key, path)
+ return paths
+
+def destroy_datastore(paths):
+ """Destroys the appengine datastore at the specified paths."""
+ for path in paths.values():
+ if not path:
+ continue
+ try:
+ if os.path.isdir(path):
+ shutil.rmtree(path)
+ else:
+ os.remove(path)
+ except OSError, error:
+ if error.errno != 2:
+ logging.error("Failed to clear datastore: %s" % error)
+
+class DatabaseFeatures(NonrelDatabaseFeatures):
+ allows_primary_key_0 = True
+ supports_dicts = True
+
+class DatabaseOperations(NonrelDatabaseOperations):
+ compiler_module = __name__.rsplit('.', 1)[0] + '.compiler'
+
+ DEFAULT_MAX_DIGITS = 16
+
+ def value_to_db_decimal(self, value, max_digits, decimal_places):
+ if value is None:
+ return None
+
+ if value.is_signed():
+ sign = u'-'
+ value = abs(value)
+ else:
+ sign = u''
+
+ if max_digits is None:
+ max_digits = self.DEFAULT_MAX_DIGITS
+
+ if decimal_places is None:
+ value = unicode(value)
+ else:
+ value = format_number(value, max_digits, decimal_places)
+ decimal_places = decimal_places or 0
+ n = value.find('.')
+
+ if n < 0:
+ n = len(value)
+ if n < max_digits - decimal_places:
+ value = u"0" * (max_digits - decimal_places - n) + value
+ return sign + value
+
+ def sql_flush(self, style, tables, sequences):
+ self.connection.flush()
+ return []
+
+class DatabaseClient(NonrelDatabaseClient):
+ pass
+
+class DatabaseValidation(NonrelDatabaseValidation):
+ pass
+
+class DatabaseIntrospection(NonrelDatabaseIntrospection):
+ def table_names(self):
+ """Returns a list of names of all tables that exist in the database."""
+ return [kind.key().name() for kind in Query(kind='__kind__').Run()]
+
+class DatabaseWrapper(NonrelDatabaseWrapper):
+ def __init__(self, *args, **kwds):
+ super(DatabaseWrapper, self).__init__(*args, **kwds)
+ self.features = DatabaseFeatures(self)
+ self.ops = DatabaseOperations(self)
+ self.client = DatabaseClient(self)
+ self.creation = DatabaseCreation(self)
+ self.validation = DatabaseValidation(self)
+ self.introspection = DatabaseIntrospection(self)
+ options = self.settings_dict
+ self.remote_app_id = options.get('REMOTE_APP_ID', appid)
+ self.domain = options.get('DOMAIN', 'appspot.com')
+ self.remote_api_path = options.get('REMOTE_API_PATH', None)
+ self.secure_remote_api = options.get('SECURE_REMOTE_API', True)
+
+ remote = options.get('REMOTE', False)
+ if on_production_server:
+ remote = False
+ if remote:
+ stub_manager.setup_remote_stubs(self)
+ else:
+ stub_manager.setup_stubs(self)
+
+ def flush(self):
+ """Helper function to remove the current datastore and re-open the stubs"""
+ if stub_manager.active_stubs == 'remote':
+ import random
+ import string
+ code = ''.join([random.choice(string.ascii_letters) for x in range(4)])
+ print '\n\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
+ print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
+ print "Warning! You're about to delete the *production* datastore!"
+ print 'Only models defined in your INSTALLED_APPS can be removed!'
+ print 'If you want to clear the whole datastore you have to use the ' \
+ 'datastore viewer in the dashboard. Also, in order to delete all '\
+ 'unneeded indexes you have to run appcfg.py vacuum_indexes.'
+ print 'In order to proceed you have to enter the following code:'
+ print code
+ response = raw_input('Repeat: ')
+ if code == response:
+ print 'Deleting...'
+ delete_all_entities()
+ print "Datastore flushed! Please check your dashboard's " \
+ 'datastore viewer for any remaining entities and remove ' \
+ 'all unneeded indexes with appcfg.py vacuum_indexes.'
+ else:
+ print 'Aborting'
+ exit()
+ elif stub_manager.active_stubs == 'test':
+ stub_manager.deactivate_test_stubs()
+ stub_manager.activate_test_stubs()
+ else:
+ destroy_datastore(get_datastore_paths(self.settings_dict))
+ stub_manager.setup_local_stubs(self)
+
+def delete_all_entities():
+ for namespace in get_namespaces():
+ set_namespace(namespace)
+ for kind in get_kinds():
+ if kind.startswith('__'):
+ continue
+ while True:
+ data = Query(kind=kind, keys_only=True).Get(200)
+ if not data:
+ break
+ Delete(data)
View
1,063 djangoappengine/db/compiler.py 100755 → 100644
@@ -1,502 +1,561 @@
-from .db_settings import get_indexes
-
-import datetime
-import sys
-
-from django.db.models.sql import aggregates as sqlaggregates
-from django.db.models.sql.constants import LOOKUP_SEP, MULTI, SINGLE
-from django.db.models.sql.where import AND, OR
-from django.db.utils import DatabaseError, IntegrityError
-from django.utils.tree import Node
-
-from functools import wraps
-
-from google.appengine.api.datastore import Entity, Query, MultiQuery, \
- Put, Get, Delete, Key
-from google.appengine.api.datastore_errors import Error as GAEError
-from google.appengine.api.datastore_types import Text, Category, Email, Link, \
- PhoneNumber, PostalAddress, Text, Blob, ByteString, GeoPt, IM, Key, \
- Rating, BlobKey
-
-from djangotoolbox.db.basecompiler import NonrelQuery, NonrelCompiler, \
- NonrelInsertCompiler, NonrelUpdateCompiler, NonrelDeleteCompiler
-
-import cPickle as pickle
-
-import decimal
-
-# Valid query types (a dictionary is used for speedy lookups).
-OPERATORS_MAP = {
- 'exact': '=',
- 'gt': '>',
- 'gte': '>=',
- 'lt': '<',
- 'lte': '<=',
-
- # The following operators are supported with special code below:
- 'isnull': None,
- 'in': None,
- 'startswith': None,
- 'range': None,
- 'year': None,
-}
-
-NEGATION_MAP = {
- 'gt': '<=',
- 'gte': '<',
- 'lt': '>=',
- 'lte': '>',
- # TODO: support these filters
- #'exact': '!=', # this might actually become individual '<' and '>' queries
-}
-
-def safe_call(func):
- @wraps(func)
- def _func(*args, **kwargs):
- try:
- return func(*args, **kwargs)
- except GAEError, e:
- raise DatabaseError, DatabaseError(str(e)), sys.exc_info()[2]
- return _func
-
-class GAEQuery(NonrelQuery):
- # ----------------------------------------------
- # Public API
- # ----------------------------------------------
- def __init__(self, compiler, fields):
- super(GAEQuery, self).__init__(compiler, fields)
- self.inequality_field = None
- self.pk_filters = None
- self.excluded_pks = ()
- self.has_negated_exact_filter = False
- self.ordering = ()
- self.gae_ordering = []
- pks_only = False
- if len(fields) == 1 and fields[0].primary_key:
- pks_only = True
- self.db_table = self.query.get_meta().db_table
- self.pks_only = pks_only
- start_cursor = getattr(self.query, '_gae_start_cursor', None)
- end_cursor = getattr(self.query, '_gae_end_cursor', None)
- self.gae_query = [Query(self.db_table, keys_only=self.pks_only,
- cursor=start_cursor, end_cursor=end_cursor)]
-
- # This is needed for debugging
- def __repr__(self):
- return '<GAEQuery: %r ORDER %r>' % (self.gae_query, self.ordering)
-
- @safe_call
- def fetch(self, low_mark, high_mark):
- query = self._build_query()
- executed = False
- if self.excluded_pks and high_mark is not None:
- high_mark += len(self.excluded_pks)
- if self.pk_filters is not None:
- results = self.get_matching_pk(low_mark, high_mark)
- else:
- if high_mark is None:
- kw = {}
- if low_mark:
- kw['offset'] = low_mark
- results = query.Run(**kw)
- executed = True
- elif high_mark > low_mark:
- results = query.Get(high_mark - low_mark, low_mark)
- executed = True
- else:
- results = ()
-
- for entity in results:
- if isinstance(entity, Key):
- key = entity
- else:
- key = entity.key()
- if key in self.excluded_pks:
- continue
- yield self._make_entity(entity)
-
- if executed and not isinstance(query, MultiQuery):
- self.query._gae_cursor = query.GetCompiledCursor()
-
- @safe_call
- def count(self, limit=None):
- if self.pk_filters is not None:
- return len(self.get_matching_pk(0, limit))
- if self.excluded_pks:
- return len(list(self.fetch(0, 2000)))
- kw = {}
- if limit is not None:
- kw['limit'] = limit
- return self._build_query().Count(**kw)
-
- @safe_call
- def delete(self):
- if self.pk_filters is not None:
- keys = [key for key in self.pk_filters if key is not None]
- else:
- keys = self.fetch()
- if keys:
- Delete(keys)
-
- @safe_call
- def order_by(self, ordering):
- self.ordering = ordering
- for order in self.ordering:
- if order.startswith('-'):
- order, direction = order[1:], Query.DESCENDING
- else:
- direction = Query.ASCENDING
- if order == self.query.get_meta().pk.column:
- order = '__key__'
- self.gae_ordering.append((order, direction))
-
- # This function is used by the default add_filters() implementation
- @safe_call
- def add_filter(self, column, lookup_type, negated, db_type, value):
- if value in ([], ()):
- self.pk_filters = []
- return
-
- # Emulated/converted lookups
- if column == self.query.get_meta().pk.column:
- column = '__key__'
- db_table = self.query.get_meta().db_table
- if lookup_type in ('exact', 'in'):
- # Optimization: batch-get by key
- if self.pk_filters is not None:
- raise DatabaseError("You can't apply multiple AND filters "
- "on the primary key. "
- "Did you mean __in=[...]?")
- if not isinstance(value, (tuple, list)):
- value = [value]
- pks = [create_key(db_table, pk) for pk in value if pk]
- if negated:
- self.excluded_pks = pks
- else:
- self.pk_filters = pks
- return
- else:
- # XXX: set db_type to 'gae_key' in order to allow
- # convert_value_for_db to recognize the value to be a Key and
- # not a str. Otherwise the key would be converted back to a
- # unicode (see convert_value_for_db)
- db_type = 'gae_key'
- key_type_error = 'Lookup values on primary keys have to be' \
- 'a string or an integer.'
- if lookup_type == 'range':
- if isinstance(value,(list, tuple)) and not(isinstance(
- value[0], (basestring, int, long)) and \
- isinstance(value[1], (basestring, int, long))):
- raise DatabaseError(key_type_error)
- elif not isinstance(value,(basestring, int, long)):
- raise DatabaseError(key_type_error)
- # for lookup type range we have to deal with a list
- if lookup_type == 'range':
- value[0] = create_key(db_table, value[0])
- value[1] = create_key(db_table, value[1])
- else:
- value = create_key(db_table, value)
- if lookup_type not in OPERATORS_MAP:
- raise DatabaseError("Lookup type %r isn't supported" % lookup_type)
-
- # We check for negation after lookup_type isnull because it
- # simplifies the code. All following lookup_type checks assume
- # that they're not negated.
- if lookup_type == 'isnull':
- if (negated and value) or not value:
- # TODO/XXX: is everything greater than None?
- op = '>'
- else:
- op = '='
- value = None
- elif negated and lookup_type == 'exact':
- if self.has_negated_exact_filter:
- raise DatabaseError("You can't exclude more than one __exact "
- "filter")
- self.has_negated_exact_filter = True
- self._combine_filters(column, db_type,
- (('<', value), ('>', value)))
- return
- elif negated:
- try:
- op = NEGATION_MAP[lookup_type]
- except KeyError:
- raise DatabaseError("Lookup type %r can't be negated" % lookup_type)
- if self.inequality_field and column != self.inequality_field:
- raise DatabaseError("Can't have inequality filters on multiple "
- "columns (here: %r and %r)" % (self.inequality_field, column))
- self.inequality_field = column
- elif lookup_type == 'in':
- # Create sub-query combinations, one for each value
- if len(self.gae_query) * len(value) > 30:
- raise DatabaseError("You can't query against more than "
- "30 __in filter value combinations")
- op_values = [('=', v) for v in value]
- self._combine_filters(column, db_type, op_values)
- return
- elif lookup_type == 'startswith':
- self._add_filter(column, '>=', db_type, value)
- if isinstance(value, str):
- value = value.decode('utf8')
- if isinstance(value, Key):
- value = list(value.to_path())
- if isinstance(value[-1], str):
- value[-1] = value[-1].decode('utf8')
- value[-1] += u'\ufffd'
- value = Key.from_path(*value)
- else:
- value += u'\ufffd'
- self._add_filter(column, '<=', db_type, value)
- return
- elif lookup_type in ('range', 'year'):
- self._add_filter(column, '>=', db_type, value[0])
- op = '<=' if lookup_type == 'range' else '<'
- self._add_filter(column, op, db_type, value[1])
- return
- else:
- op = OPERATORS_MAP[lookup_type]
-
- self._add_filter(column, op, db_type, value)
-
- # ----------------------------------------------
- # Internal API
- # ----------------------------------------------
- def _add_filter(self, column, op, db_type, value):
- for query in self.gae_query:
- key = '%s %s' % (column, op)
- value = self.convert_value_for_db(db_type, value)
- if isinstance(value, Text):
- raise DatabaseError('TextField is not indexed, by default, '
- "so you can't filter on it. "
- 'Please add an index definition for the '
- 'column "%s" as described here:\n'
- 'http://www.allbuttonspressed.com/blog/django/2010/07/Managing-per-field-indexes-on-App-Engine'
- % column)
- if key in query:
- existing_value = query[key]
- if isinstance(existing_value, list):
- existing_value.append(value)
- else:
- query[key] = [existing_value, value]
- else:
- query[key] = value
-
- def _combine_filters(self, column, db_type, op_values):
- gae_query = self.gae_query
- combined = []
- for query in gae_query:
- for op, value in op_values:
- self.gae_query = [Query(self.db_table,
- keys_only=self.pks_only)]
- self.gae_query[0].update(query)
- self._add_filter(column, op, db_type, value)
- combined.append(self.gae_query[0])
- self.gae_query = combined
-
- def _make_entity(self, entity):
- if isinstance(entity, Key):
- key = entity
- entity = {}
- else:
- key = entity.key()
-
- entity[self.query.get_meta().pk.column] = key
- return entity
-
- @safe_call
- def _build_query(self):
- if len(self.gae_query) > 1:
- return MultiQuery(self.gae_query, self.gae_ordering)
- query = self.gae_query[0]
- query.Order(*self.gae_ordering)
- return query
-
- def get_matching_pk(self, low_mark=0, high_mark=None):
- if not self.pk_filters:
- return []
-
- results = [result for result in Get(self.pk_filters)
- if result is not None and
- self.matches_filters(result)]
- if self.ordering:
- results.sort(cmp=self.order_pk_filtered)
- if high_mark is not None and high_mark < len(results) - 1:
- results = results[:high_mark]
- if low_mark:
- results = results[low_mark:]
- return results
-
- def order_pk_filtered(self, lhs, rhs):
- left = dict(lhs)
- left[self.query.get_meta().pk.column] = lhs.key().to_path()
- right = dict(rhs)
- right[self.query.get_meta().pk.column] = rhs.key().to_path()
- return self._order_in_memory(left, right)
-
- def matches_filters(self, entity):
- item = dict(entity)
- pk = self.query.get_meta().pk
- value = self.convert_value_from_db(pk.db_type(connection=self.connection),
- entity.key())
- item[pk.column] = value
- result = self._matches_filters(item, self.query.where)
- return result
-
-class SQLCompiler(NonrelCompiler):
- """
- A simple App Engine query: no joins, no distinct, etc.
- """
- query_class = GAEQuery
-
- def convert_value_from_db(self, db_type, value):
- if isinstance(value, (list, tuple, set)) and \
- db_type.startswith(('ListField:', 'SetField:')):
- db_sub_type = db_type.split(':', 1)[1]
- value = [self.convert_value_from_db(db_sub_type, subvalue)
- for subvalue in value]
-
- if db_type.startswith('SetField:') and value is not None:
- value = set(value)
-
- if db_type.startswith('DictField:') and value is not None:
- value = pickle.loads(value)
- if ':' in db_type:
- db_sub_type = db_type.split(':', 1)[1]
- value = dict((key, self.convert_value_from_db(db_sub_type, value[key]))
- for key in value)
-
- # the following GAE database types are all unicode subclasses, cast them
- # to unicode so they appear like pure unicode instances for django
- if isinstance(value, basestring) and value and db_type.startswith('decimal'):
- value = decimal.Decimal(value)
- elif isinstance(value, (Category, Email, Link, PhoneNumber, PostalAddress,
- Text, unicode)):
- value = unicode(value)
- elif isinstance(value, Blob):
- value = str(value)
- elif isinstance(value, str):
- # always retrieve strings as unicode (it is possible that old datasets
- # contain non unicode strings, nevertheless work with unicode ones)
- value = value.decode('utf-8')
- elif isinstance(value, Key):
- # for now we do not support KeyFields thus a Key has to be the own
- # primary key
- # TODO: GAE: support parents via GAEKeyField
- assert value.parent() is None, "Parents are not yet supported!"
- if db_type == 'integer':
- if value.id() is None:
- raise DatabaseError('Wrong type for Key. Expected integer, found'
- 'None')
- else:
- value = value.id()
- elif db_type == 'text':
- if value.name() is None:
- raise DatabaseError('Wrong type for Key. Expected string, found'
- 'None')
- else:
- value = value.name()
- else:
- raise DatabaseError("%s fields cannot be keys on GAE" % db_type)
- elif db_type == 'date' and isinstance(value, datetime.datetime):
- value = value.date()
- elif db_type == 'time' and isinstance(value, datetime.datetime):
- value = value.time()
- return value
-
- def convert_value_for_db(self, db_type, value):
- if isinstance(value, unicode):
- value = unicode(value)
- elif isinstance(value, str):
- value = str(value)
- elif isinstance(value, (list, tuple, set)) and \
- db_type.startswith(('ListField:', 'SetField:')):
- db_sub_type = db_type.split(':', 1)[1]
- value = [self.convert_value_for_db(db_sub_type, subvalue)
- for subvalue in value]
- elif isinstance(value, decimal.Decimal) and db_type.startswith("decimal:"):
- value = self.connection.ops.value_to_db_decimal(value, *eval(db_type[8:]))
- elif isinstance(value, dict) and db_type.startswith('DictField:'):
- if ':' in db_type:
- db_sub_type = db_type.split(':', 1)[1]
- value = dict([(key, self.convert_value_for_db(db_sub_type, value[key]))
- for key in value])
- value = Blob(pickle.dumps(value))
-
- if db_type == 'gae_key':
- return value
- elif db_type == 'longtext':
- # long text fields cannot be indexed on GAE so use GAE's database
- # type Text
- value = Text((isinstance(value, str) and value.decode('utf-8')) or value)
- elif db_type == 'text':
- value = (isinstance(value, str) and value.decode('utf-8')) or value
- elif db_type == 'blob':
- value = Blob(value)
- elif type(value) is str:
- # always store unicode strings
- value = value.decode('utf-8')
- elif db_type == 'date' or db_type == 'time' or db_type == 'datetime':
- # here we have to check the db_type because GAE always stores datetimes
- value = to_datetime(value)
- return value
-
-class SQLInsertCompiler(NonrelInsertCompiler, SQLCompiler):
- @safe_call
- def insert(self, data, return_id=False):
- gae_data = {}
- opts = self.query.get_meta()
- indexes = get_indexes().get(self.query.model, {})
- unindexed_fields = indexes.get('unindexed', ())
- unindexed_cols = [opts.get_field(name).column
- for name in unindexed_fields]
- kwds = {'unindexed_properties': unindexed_cols}
- for column, value in data.items():
- if column == opts.pk.column:
- if isinstance(value, basestring):
- kwds['name'] = value
- else:
- kwds['id'] = value
- elif isinstance(value, (tuple, list)) and not len(value):
- # gae does not store emty lists (and even does not allow passing empty
- # lists to Entity.update) so skip them
- continue
- else:
- gae_data[column] = value
-
- entity = Entity(self.query.get_meta().db_table, **kwds)
- entity.update(gae_data)
- key = Put(entity)
- return key.id_or_name()
-
-class SQLUpdateCompiler(NonrelUpdateCompiler, SQLCompiler):
- pass
-
-class SQLDeleteCompiler(NonrelDeleteCompiler, SQLCompiler):
- pass
-
-def to_datetime(value):
- """Convert a time or date to a datetime for datastore storage.
-
- Args:
- value: A datetime.time, datetime.date or string object.
-
- Returns:
- A datetime object with date set to 1970-01-01 if value is a datetime.time
- A datetime object with date set to value.year - value.month - value.day and
- time set to 0:00 if value is a datetime.date
- """
-
- if value is None:
- return value
- elif isinstance(value, datetime.datetime):
- return value
- elif isinstance(value, datetime.date):
- return datetime.datetime(value.year, value.month, value.day)
- elif isinstance(value, datetime.time):
- return datetime.datetime(1970, 1, 1, value.hour, value.minute,
- value.second, value.microsecond)
-
-def create_key(db_table, value):
- if isinstance(value, (int, long)) and value < 1:
- return None
- return Key.from_path(db_table, value)
+from .db_settings import get_model_indexes
+from .utils import commit_locked
+from .expressions import ExpressionEvaluator
+
+import datetime
+import sys
+
+from django.db.models.sql import aggregates as sqlaggregates
+from django.db.models.sql.constants import LOOKUP_SEP, MULTI, SINGLE
+from django.db.models.sql.where import AND, OR
+from django.db.utils import DatabaseError, IntegrityError
+from django.utils.tree import Node
+
+from functools import wraps
+
+from google.appengine.api.datastore import Entity, Query, MultiQuery, \
+ Put, Get, Delete, Key
+from google.appengine.api.datastore_errors import Error as GAEError
+from google.appengine.api.datastore_types import Text, Category, Email, Link, \
+ PhoneNumber, PostalAddress, Text, Blob, ByteString, GeoPt, IM, Key, \
+ Rating, BlobKey
+
+from djangotoolbox.db.basecompiler import NonrelQuery, NonrelCompiler, \
+ NonrelInsertCompiler, NonrelUpdateCompiler, NonrelDeleteCompiler
+
+import cPickle as pickle
+
+import decimal
+
+# Valid query types (a dictionary is used for speedy lookups).
+OPERATORS_MAP = {
+ 'exact': '=',
+ 'gt': '>',
+ 'gte': '>=',
+ 'lt': '<',
+ 'lte': '<=',
+
+ # The following operators are supported with special code below:
+ 'isnull': None,
+ 'in': None,
+ 'startswith': None,
+ 'range': None,
+ 'year': None,
+}
+
+NEGATION_MAP = {
+ 'gt': '<=',
+ 'gte': '<',
+ 'lt': '>=',
+ 'lte': '>',
+ # TODO: support these filters
+ #'exact': '!=', # this might actually become individual '<' and '>' queries
+}
+
+NOT_PROVIDED = object()
+
+def safe_call(func):
+ @wraps(func)
+ def _func(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except GAEError, e:
+ raise DatabaseError, DatabaseError(str(e)), sys.exc_info()[2]
+ return _func
+
+class GAEQuery(NonrelQuery):
+ # ----------------------------------------------
+ # Public API
+ # ----------------------------------------------
+ def __init__(self, compiler, fields):
+ super(GAEQuery, self).__init__(compiler, fields)
+ self.inequality_field = None
+ self.pk_filters = None
+ self.excluded_pks = ()
+ self.has_negated_exact_filter = False
+ self.ordering = ()
+ self.gae_ordering = []
+ pks_only = False
+ if len(fields) == 1 and fields[0].primary_key:
+ pks_only = True
+ self.db_table = self.query.get_meta().db_table
+ self.pks_only = pks_only
+ start_cursor = getattr(self.query, '_gae_start_cursor', None)
+ end_cursor = getattr(self.query, '_gae_end_cursor', None)
+ self.gae_query = [Query(self.db_table, keys_only=self.pks_only,
+ cursor=start_cursor, end_cursor=end_cursor)]
+
+ # This is needed for debugging
+ def __repr__(self):
+ return '<GAEQuery: %r ORDER %r>' % (self.gae_query, self.ordering)
+
+ @safe_call
+ def fetch(self, low_mark, high_mark):
+ query = self._build_query()
+ executed = False
+ if self.excluded_pks and high_mark is not None:
+ high_mark += len(self.excluded_pks)
+ if self.pk_filters is not None:
+ results = self.get_matching_pk(low_mark, high_mark)
+ else:
+ if high_mark is None:
+ kw = {}
+ if low_mark:
+ kw['offset'] = low_mark
+ results = query.Run(**kw)
+ executed = True
+ elif high_mark > low_mark:
+ results = query.Get(high_mark - low_mark, low_mark)
+ executed = True
+ else:
+ results = ()
+
+ for entity in results:
+ if isinstance(entity, Key):
+ key = entity
+ else:
+ key = entity.key()
+ if key in self.excluded_pks:
+ continue
+ yield self._make_entity(entity)
+
+ if executed and not isinstance(query, MultiQuery):
+ try:
+ self.query._gae_cursor = query.GetCompiledCursor()
+ except:
+ pass
+
+ @safe_call
+ def count(self, limit=NOT_PROVIDED):
+ if self.pk_filters is not None:
+ return len(self.get_matching_pk(0, limit))
+ if self.excluded_pks:
+ return len(list(self.fetch(0, 2000)))
+ # The datastore's Count() method has a 'limit' kwarg, which has
+ # a default value (obviously). This value can be overridden to anything
+ # you like, and importantly can be overridden to unlimited by passing
+ # a value of None. Hence *this* method has a default value of
+ # NOT_PROVIDED, rather than a default value of None
+ kw = {}
+ if limit is not NOT_PROVIDED:
+ kw['limit'] = limit
+ return self._build_query().Count(**kw)
+
+ @safe_call
+ def delete(self):
+ if self.pk_filters is not None:
+ keys = [key for key in self.pk_filters if key is not None]
+ else:
+ keys = self.fetch()
+ if keys:
+ Delete(keys)
+
+ @safe_call
+ def order_by(self, ordering):
+ self.ordering = ordering
+ for order in self.ordering:
+ if order.startswith('-'):
+ order, direction = order[1:], Query.DESCENDING
+ else:
+ direction = Query.ASCENDING
+ if order == self.query.get_meta().pk.column:
+ order = '__key__'
+ self.gae_ordering.append((order, direction))
+
+ # This function is used by the default add_filters() implementation
+ @safe_call
+ def add_filter(self, column, lookup_type, negated, db_type, value):
+ if value in ([], ()):
+ self.pk_filters = []
+ return
+
+ # Emulated/converted lookups
+ if column == self.query.get_meta().pk.column:
+ column = '__key__'
+ db_table = self.query.get_meta().db_table
+ if lookup_type in ('exact', 'in'):
+ # Optimization: batch-get by key
+ if self.pk_filters is not None:
+ raise DatabaseError("You can't apply multiple AND filters "
+ "on the primary key. "
+ "Did you mean __in=[...]?")
+ if not isinstance(value, (tuple, list)):
+ value = [value]
+ pks = [create_key(db_table, pk) for pk in value if pk]
+ if negated:
+ self.excluded_pks = pks
+ else:
+ self.pk_filters = pks
+ return
+ else:
+ # XXX: set db_type to 'gae_key' in order to allow
+ # convert_value_for_db to recognize the value to be a Key and
+ # not a str. Otherwise the key would be converted back to a
+ # unicode (see convert_value_for_db)
+ db_type = 'gae_key'
+ key_type_error = 'Lookup values on primary keys have to be' \
+ 'a string or an integer.'
+ if lookup_type == 'range':
+ if isinstance(value, (list, tuple)) and not (
+ isinstance(value[0], (basestring, int, long)) and
+ isinstance(value[1], (basestring, int, long))):
+ raise DatabaseError(key_type_error)
+ elif not isinstance(value, (basestring, int, long)):
+ raise DatabaseError(key_type_error)
+ # for lookup type range we have to deal with a list
+ if lookup_type == 'range':
+ value[0] = create_key(db_table, value[0])
+ value[1] = create_key(db_table, value[1])
+ else:
+ value = create_key(db_table, value)
+ if lookup_type not in OPERATORS_MAP:
+ raise DatabaseError("Lookup type %r isn't supported" % lookup_type)
+
+ # We check for negation after lookup_type isnull because it
+ # simplifies the code. All following lookup_type checks assume
+ # that they're not negated.
+ if lookup_type == 'isnull':
+ if (negated and value) or not value:
+ # TODO/XXX: is everything greater than None?
+ op = '>'
+ else:
+ op = '='
+ value = None
+ elif negated and lookup_type == 'exact':
+ if self.has_negated_exact_filter:
+ raise DatabaseError("You can't exclude more than one __exact "
+ "filter")
+ self.has_negated_exact_filter = True
+ self._combine_filters(column, db_type,
+ (('<', value), ('>', value)))
+ return
+ elif negated:
+ try:
+ op = NEGATION_MAP[lookup_type]
+ except KeyError:
+ raise DatabaseError("Lookup type %r can't be negated" % lookup_type)
+ if self.inequality_field and column != self.inequality_field:
+ raise DatabaseError("Can't have inequality filters on multiple "
+ "columns (here: %r and %r)" % (self.inequality_field, column))
+ self.inequality_field = column
+ elif lookup_type == 'in':
+ # Create sub-query combinations, one for each value
+ if len(self.gae_query) * len(value) > 30:
+ raise DatabaseError("You can't query against more than "
+ "30 __in filter value combinations")
+ op_values = [('=', v) for v in value]
+ self._combine_filters(column, db_type, op_values)
+ return
+ elif lookup_type == 'startswith':
+ self._add_filter(column, '>=', db_type, value)
+ if isinstance(value, str):
+ value = value.decode('utf8')
+ if isinstance(value, Key):
+ value = list(value.to_path())
+ if isinstance(value[-1], str):
+ value[-1] = value[-1].decode('utf8')
+ value[-1] += u'\ufffd'
+ value = Key.from_path(*value)
+ else:
+ value += u'\ufffd'
+ self._add_filter(column, '<=', db_type, value)
+ return
+ elif lookup_type in ('range', 'year'):
+ self._add_filter(column, '>=', db_type, value[0])
+ op = '<=' if lookup_type == 'range' else '<'
+ self._add_filter(column, op, db_type, value[1])
+ return
+ else:
+ op = OPERATORS_MAP[lookup_type]
+
+ self._add_filter(column, op, db_type, value)
+
+ # ----------------------------------------------
+ # Internal API
+ # ----------------------------------------------
+ def _add_filter(self, column, op, db_type, value):
+ for query in self.gae_query:
+ key = '%s %s' % (column, op)
+ value = self.convert_value_for_db(db_type, value)
+ if isinstance(value, Text):
+ raise DatabaseError('TextField is not indexed, by default, '
+ "so you can't filter on it. Please add "
+ 'an index definition for the column %s '
+ 'on the model %s.%s as described here:\n'
+ 'http://www.allbuttonspressed.com/blog/django/2010/07/Managing-per-field-indexes-on-App-Engine'
+ % (column, self.query.model.__module__, self.query.model.__name__))
+ if key in query:
+ existing_value = query[key]
+ if isinstance(existing_value, list):
+ existing_value.append(value)
+ else:
+ query[key] = [existing_value, value]
+ else:
+ query[key] = value
+
+ def _combine_filters(self, column, db_type, op_values):
+ gae_query = self.gae_query
+ combined = []
+ for query in gae_query:
+ for op, value in op_values:
+ self.gae_query = [Query(self.db_table,
+ keys_only=self.pks_only)]
+ self.gae_query[0].update(query)
+ self._add_filter(column, op, db_type, value)
+ combined.append(self.gae_query[0])
+ self.gae_query = combined
+
+ def _make_entity(self, entity):
+ if isinstance(entity, Key):
+ key = entity
+ entity = {}
+ else:
+ key = entity.key()
+
+ entity[self.query.get_meta().pk.column] = key
+ return entity
+
+ @safe_call
+ def _build_query(self):
+ for query in self.gae_query:
+ query.Order(*self.gae_ordering)
+ if len(self.gae_query) > 1:
+ return MultiQuery(self.gae_query, self.gae_ordering)
+ return self.gae_query[0]
+
+ def get_matching_pk(self, low_mark=0, high_mark=None):
+ if not self.pk_filters:
+ return []
+
+ results = [result for result in Get(self.pk_filters)
+ if result is not None and
+ self.matches_filters(result)]
+ if self.ordering:
+ results.sort(cmp=self.order_pk_filtered)
+ if high_mark is not None and high_mark < len(results) - 1:
+ results = results[:high_mark]
+ if low_mark:
+ results = results[low_mark:]
+ return results
+
+ def order_pk_filtered(self, lhs, rhs):
+ left = dict(lhs)
+ left[self.query.get_meta().pk.column] = lhs.key().to_path()
+ right = dict(rhs)
+ right[self.query.get_meta().pk.column] = rhs.key().to_path()
+ return self._order_in_memory(left, right)
+
+ def matches_filters(self, entity):
+ item = dict(entity)
+ pk = self.query.get_meta().pk
+ value = self.convert_value_from_db(pk.db_type(connection=self.connection),
+ entity.key())
+ item[pk.column] = value
+ result = self._matches_filters(item, self.query.where)
+ return result
+
+class SQLCompiler(NonrelCompiler):
+ """
+ A simple App Engine query: no joins, no distinct, etc.
+ """
+ query_class = GAEQuery
+
+ def convert_value_from_db(self, db_type, value):
+ if isinstance(value, (list, tuple, set)) and \
+ db_type.startswith(('ListField:', 'SetField:')):
+ db_sub_type = db_type.split(':', 1)[1]
+ value = [self.convert_value_from_db(db_sub_type, subvalue)
+ for subvalue in value]
+
+ if db_type.startswith('SetField:') and value is not None:
+ value = set(value)
+
+ if db_type.startswith('DictField:') and value is not None:
+ value = pickle.loads(value)
+ if ':' in db_type:
+ db_sub_type = db_type.split(':', 1)[1]
+ value = dict((key, self.convert_value_from_db(db_sub_type, value[key]))
+ for key in value)
+
+ # the following GAE database types are all unicode subclasses, cast them
+ # to unicode so they appear like pure unicode instances for django
+ if isinstance(value, basestring) and value and db_type.startswith('decimal'):
+ value = decimal.Decimal(value)
+ elif isinstance(value, (Category, Email, Link, PhoneNumber, PostalAddress,
+ Text, unicode)):
+ value = unicode(value)
+ elif isinstance(value, Blob):
+ value = str(value)
+ elif isinstance(value, str):
+ # always retrieve strings as unicode (it is possible that old datasets
+ # contain non unicode strings, nevertheless work with unicode ones)
+ value = value.decode('utf-8')
+ elif isinstance(value, Key):
+ # for now we do not support KeyFields thus a Key has to be the own
+ # primary key
+ # TODO: GAE: support parents via GAEKeyField
+ assert value.parent() is None, "Parents are not yet supported!"
+ if db_type == 'integer':
+ if value.id() is None:
+ raise DatabaseError('Wrong type for Key. Expected integer, found'
+ 'None')
+ else:
+ value = value.id()
+ elif db_type == 'text':
+ if value.name() is None:
+ raise DatabaseError('Wrong type for Key. Expected string, found'
+ 'None')
+ else:
+ value = value.name()
+ else:
+ raise DatabaseError("%s fields cannot be keys on GAE" % db_type)
+ elif db_type == 'date' and isinstance(value, datetime.datetime):
+ value = value.date()
+ elif db_type == 'time' and isinstance(value, datetime.datetime):
+ value = value.time()
+ return value
+
+ def convert_value_for_db(self, db_type, value):
+ if isinstance(value, unicode):
+ value = unicode(value)
+ elif isinstance(value, str):
+ value = str(value)
+ elif isinstance(value, (list, tuple, set)) and \
+ db_type.startswith(('ListField:', 'SetField:')):
+ db_sub_type = db_type.split(':', 1)[1]
+ value = [self.convert_value_for_db(db_sub_type, subvalue)
+ for subvalue in value]
+ elif isinstance(value, decimal.Decimal) and db_type.startswith("decimal:"):
+ value = self.connection.ops.value_to_db_decimal(value, *eval(db_type[8:]))
+ elif isinstance(value, dict) and db_type.startswith('DictField:'):
+ if ':' in db_type:
+ db_sub_type = db_type.split(':', 1)[1]
+ value = dict([(key, self.convert_value_for_db(db_sub_type, value[key]))
+ for key in value])
+ value = Blob(pickle.dumps(value))
+
+ if db_type == 'gae_key':
+ return value
+ elif db_type == 'longtext':
+ # long text fields cannot be indexed on GAE so use GAE's database
+ # type Text
+ if value is not None:
+ value = Text(value.decode('utf-8') if isinstance(value, str) else value)
+ elif db_type == 'text':
+ value = value.decode('utf-8') if isinstance(value, str) else value
+ elif db_type == 'blob':
+ if value is not None:
+ value = Blob(value)
+ elif type(value) is str:
+ # always store unicode strings
+ value = value.decode('utf-8')
+ elif db_type == 'date' or db_type == 'time' or db_type == 'datetime':
+ # here we have to check the db_type because GAE always stores datetimes
+ value = to_datetime(value)
+ return value
+
+class SQLInsertCompiler(NonrelInsertCompiler, SQLCompiler):
+ @safe_call
+ def insert(self, data, return_id=False):
+ gae_data = {}
+ opts = self.query.get_meta()
+ unindexed_fields = get_model_indexes(self.query.model)['unindexed']
+ unindexed_cols = [opts.get_field(name).column
+ for name in unindexed_fields]
+ kwds = {'unindexed_properties': unindexed_cols}
+ for column, value in data.items():
+ if column == opts.pk.column:
+ if isinstance(value, basestring):
+ kwds['name'] = value
+ else:
+ kwds['id'] = value
+ elif isinstance(value, (tuple, list)) and not len(value):
+ # gae does not store emty lists (and even does not allow passing empty
+ # lists to Entity.update) so skip them
+ continue
+ else:
+ gae_data[column] = value
+
+ entity = Entity(self.query.get_meta().db_table, **kwds)
+ entity.update(gae_data)
+ key = Put(entity)
+ return key.id_or_name()
+
+class SQLUpdateCompiler(NonrelUpdateCompiler, SQLCompiler):
+ def execute_sql(self, result_type=MULTI):
+ # modify query to fetch pks only and then execute the query
+ # to get all pks
+ pk = self.query.model._meta.pk.name
+ self.query.add_immediate_loading([pk])
+ pks = [row for row in self.results_iter()]
+ self.update_entities(pks)
+ return len(pks)
+
+ def update_entities(self, pks):
+ for pk in pks:
+ self.update_entity(pk[0])
+
+ @commit_locked
+ def update_entity(self, pk):
+ gae_query = self.build_query()
+ key = create_key(self.query.get_meta().db_table, pk)
+ entity = Get(key)
+ if not gae_query.matches_filters(entity):
+ return
+
+ qn = self.quote_name_unless_alias
+ update_dict = {}
+ for field, o, value in self.query.values:
+ if hasattr(value, 'prepare_database_save'):
+ value = value.prepare_database_save(field)
+ else:
+ value = field.get_db_prep_save(value, connection=self.connection)
+
+ if hasattr(value, "evaluate"):
+ assert not value.negated
+ assert not value.subtree_parents
+ value = ExpressionEvaluator(value, self.query, entity,
+ allow_joins=False)
+
+ if hasattr(value, 'as_sql'):
+ # evaluate expression and return the new value
+ val = value.as_sql(qn, self.connection)
+ update_dict[field] = val
+ else:
+ update_dict[field] = value
+
+ for field, value in update_dict.iteritems():
+ db_type = field.db_type(connection=self.connection)
+ entity[qn(field.column)] = self.convert_value_for_db(db_type, value)
+
+ key = Put(entity)
+
+class SQLDeleteCompiler(NonrelDeleteCompiler, SQLCompiler):
+ pass
+
+def to_datetime(value):
+ """Convert a time or date to a datetime for datastore storage.
+
+ Args:
+ value: A datetime.time, datetime.date or string object.
+
+ Returns:
+ A datetime object with date set to 1970-01-01 if value is a datetime.time
+ A datetime object with date set to value.year - value.month - value.day and
+ time set to 0:00 if value is a datetime.date
+ """
+
+ if value is None:
+ return value
+ elif isinstance(value, datetime.datetime):
+ return value
+ elif isinstance(value, datetime.date):
+ return datetime.datetime(value.year, value.month, value.day)
+ elif isinstance(value, datetime.time):
+ return datetime.datetime(1970, 1, 1, value.hour, value.minute,
+ value.second, value.microsecond)
+
+def create_key(db_table, value):
+ if isinstance(value, (int, long)) and value < 1:
+ return None
+ return Key.from_path(db_table, value)
View
27 djangoappengine/db/creation.py 100755 → 100644
@@ -1,15 +1,16 @@
-from .db_settings import get_indexes
+from .db_settings import get_model_indexes
+from .stubs import stub_manager
from djangotoolbox.db.creation import NonrelDatabaseCreation
class StringType(object):
def __init__(self, internal_type):
self.internal_type = internal_type
def __mod__(self, field):
- indexes = get_indexes().get(field['model'], {})
- if field['name'] in indexes.get('indexed', ()):
+ indexes = get_model_indexes(field['model'])
+ if field['name'] in indexes['indexed']:
return 'text'
- elif field['name'] in indexes.get('unindexed', ()):
+ elif field['name'] in indexes['unindexed']:
return 'longtext'
return self.internal_type
@@ -30,13 +31,13 @@ class DatabaseCreation(NonrelDatabaseCreation):
data_types = get_data_types()
- def create_test_db(self, *args, **kw):
- """Destroys the test datastore. A new store will be recreated on demand"""
- self.destroy_test_db()
- self.connection.use_test_datastore = True
- self.connection.flush()
+ def _create_test_db(self, *args, **kw):
+ self._had_test_stubs = stub_manager.active_stubs != 'test'
+ if self._had_test_stubs:
+ stub_manager.activate_test_stubs()
- def destroy_test_db(self, *args, **kw):
- """Destroys the test datastore files."""
- from .base import destroy_datastore, get_test_datastore_paths
- destroy_datastore(*get_test_datastore_paths())
+ def _destroy_test_db(self, *args, **kw):
+ if self._had_test_stubs:
+ stub_manager.deactivate_test_stubs()
+ stub_manager.setup_stubs(self.connection)
+ del self._had_test_stubs
View
16 djangoappengine/db/db_settings.py 100755 → 100644
@@ -1,20 +1,28 @@
from django.conf import settings
from django.utils.importlib import import_module
+# TODO: add autodiscover() and make API more like dbindexer's register_index
+
_MODULE_NAMES = getattr(settings, 'GAE_SETTINGS_MODULES', ())
FIELD_INDEXES = None
# TODO: add support for eventual consistency setting on specific models
+def get_model_indexes(model):
+ indexes = get_indexes()
+ model_index = {'indexed': [], 'unindexed': []}
+ for item in reversed(model.mro()):
+ config = indexes.get(item, {})
+ model_index['indexed'].extend(config.get('indexed', ()))
+ model_index['unindexed'].extend(config.get('unindexed', ()))
+ return model_index
+
def get_indexes():
global FIELD_INDEXES
if FIELD_INDEXES is None:
field_indexes = {}
for name in _MODULE_NAMES:
- try:
- field_indexes.update(import_module(name).FIELD_INDEXES)
- except (ImportError, AttributeError):
- pass
+ field_indexes.update(import_module(name).FIELD_INDEXES)
FIELD_INDEXES = field_indexes
return FIELD_INDEXES
View
70 djangoappengine/db/utils.py 100755 → 100644
@@ -1,20 +1,50 @@
-from google.appengine.datastore.datastore_pb import CompiledCursor
-import base64
-
-def get_cursor(queryset):
- # Evaluate QuerySet
- len(queryset)
- cursor = getattr(queryset.query, '_gae_cursor', None)
- return base64.urlsafe_b64encode(cursor.Encode())
-
-def set_cursor(queryset, start=None, end=None):
- if start is not None:
- start = base64.urlsafe_b64decode(str(start))
- start = CompiledCursor(start)
- queryset.query._gae_start_cursor = start
- if end is not None:
- end = base64.urlsafe_b64decode(str(end))
- end = CompiledCursor(end)
- queryset.query._gae_end_cursor = end
- # Evaluate QuerySet
- len(queryset)
+from google.appengine.datastore.datastore_query import Cursor
+from django.db import models, DEFAULT_DB_ALIAS
+try:
+ from functools import wraps
+except ImportError:
+ from django.utils.functional import wraps # Python 2.3, 2.4 fallback.
+
+class CursorQueryMixin(object):
+ def clone(self, *args, **kwargs):
+ kwargs['_gae_cursor'] = getattr(self, '_gae_cursor', None)
+ kwargs['_gae_start_cursor'] = getattr(self, '_gae_start_cursor', None)
+ kwargs['_gae_end_cursor'] = getattr(self, '_gae_end_cursor', None)
+ return super(CursorQueryMixin, self).clone(*args, **kwargs)
+
+def get_cursor(queryset):
+ # Evaluate QuerySet
+ len(queryset)
+ cursor = getattr(queryset.query, '_gae_cursor', None)
+ return Cursor.to_websafe_string(cursor)
+
+def set_cursor(queryset, start=None, end=None):
+ queryset = queryset.all()
+
+ class CursorQuery(CursorQueryMixin, queryset.query.__class__):
+ pass
+
+ queryset.query = queryset.query.clone(klass=CursorQuery)
+ if start is not None:
+ start = Cursor.from_websafe_string(start)
+ queryset.query._gae_start_cursor = start
+ if end is not None:
+ end = Cursor.from_websafe_string(end)
+ queryset.query._gae_end_cursor = end
+ return queryset
+
+def commit_locked(func_or_using=None):
+ """
+ Decorator that locks rows on DB reads.
+ """
+ def inner_commit_locked(func, using=None):
+ def _commit_locked(*args, **kw):
+ from google.appengine.api.datastore import RunInTransaction
+ return RunInTransaction(func, *args, **kw)
+ return wraps(func)(_commit_locked)
+ if func_or_using is None:
+ func_or_using = DEFAULT_DB_ALIAS
+ if callable(func_or_using):
+ return inner_commit_locked(func_or_using, DEFAULT_DB_ALIAS)
+ return lambda func: inner_commit_locked(func, func_or_using)
+
View
0 djangoappengine/deferred/__init__.py 100755 → 100644
File mode changed.
View
20 djangoappengine/deferred/handler.py 100755 → 100644
@@ -1,13 +1,19 @@
-import os, sys
+# Initialize Django
+from djangoappengine import main
-parent_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
-if parent_dir not in sys.path:
- sys.path.insert(0, parent_dir)
+from django.utils.importlib import import_module
+from django.conf import settings
-# Initialize Django
-from djangoappengine.main import main as gaemain
+# load all models.py to ensure signal handling installation or index loading
+# of some apps
+for app in settings.INSTALLED_APPS:
+ try:
+ import_module('%s.models' % (app))
+ except ImportError:
+ pass
-# Import and run the actual handler
from google.appengine.ext.deferred.handler import main
+from google.appengine.ext.deferred.deferred import application
+
if __name__ == '__main__':
main()
View
0 djangoappengine/lib/__init__.py 100755 → 100644
File mode changed.
View
0 djangoappengine/lib/memcache.py 100755 → 100644
File mode changed.
View
35 djangoappengine/mail.py 100755 → 100644
@@ -1,11 +1,16 @@
+from email.MIMEBase import MIMEBase
from django.core.mail.backends.base import BaseEmailBackend
from django.core.mail import EmailMultiAlternatives
from django.core.exceptions import ImproperlyConfigured
-
from google.appengine.api import mail as aeemail
+from google.appengine.runtime import apiproxy_errors
-def _send_deferred(message):
- message.send()
+def _send_deferred(message, fail_silently=False):
+ try:
+ message.send()
+ except (aeemail.Error, apiproxy_errors.Error):
+ if not fail_silently:
+ raise
class EmailBackend(BaseEmailBackend):
can_defer = False
@@ -25,11 +30,22 @@ def _copy_message(self, message):
body=message.body)
if message.extra_headers.get('Reply-To', None):
gmsg.reply_to = message.extra_headers['Reply-To']
+ if message.cc:
+ gmsg.cc = list(message.cc)
if message.bcc:
gmsg.bcc = list(message.bcc)
if message.attachments:
- gmsg.attachments = [(a[0], a[1]) for a in message.attachments]
- if isinstance(message, EmailMultiAlternatives): # look for HTML
+ # Must be populated with (filename, filecontents) tuples
+ attachments = []
+ for attachment in message.attachments:
+ if isinstance(attachment, MIMEBase):
+ attachments.append((attachment.get_filename(),
+ attachment.get_payload(decode=True)))
+ else:
+ attachments.append((attachment[0], attachment[1]))
+ gmsg.attachments = attachments
+ # Look for HTML alternative content
+ if isinstance(message, EmailMultiAlternatives):
for content, mimetype in message.alternatives:
if mimetype == 'text/html':
gmsg.html = content
@@ -50,15 +66,20 @@ def _send(self, message):
return True
try:
message.send()
- except aeemail.Error:
+ except (aeemail.Error, apiproxy_errors.Error):
if not self.fail_silently:
raise
return False
return True
def _defer_message(self, message):
from google.appengine.ext import deferred
- deferred.defer(_send_deferred, message)
+ from django.conf import settings
+ queue_name = getattr(settings, 'EMAIL_QUEUE_NAME', 'default')
+ deferred.defer(_send_deferred,
+ message,
+ fail_silently=self.fail_silently,
+ _queue=queue_name)
class AsyncEmailBackend(EmailBackend):
can_defer = True
View
78 djangoappengine/main/__init__.py 100755 → 100644
@@ -0,0 +1,78 @@
+import os
+import sys
+
+# Add parent folder to sys.path, so we can import boot.
+# App Engine causes main.py to be reloaded if an exception gets raised
+# on the first request of a main.py instance, so don't add project_dir multiple
+# times.
+project_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
+if project_dir not in sys.path or sys.path.index(project_dir) > 0:
+ while project_dir in sys.path:
+ sys.path.remove(project_dir)
+ sys.path.insert(0, project_dir)
+
+for path in sys.path[:]:
+ if path != project_dir and os.path.isdir(os.path.join(path, 'django')):
+ sys.path.remove(path)
+ break
+
+# Remove the standard version of Django.
+if 'django' in sys.modules and sys.modules['django'].VERSION < (1, 2):
+ for k in [k for k in sys.modules
+ if k.startswith('django.') or k == 'django']:
+ del sys.modules[k]
+
+from djangoappengine.boot import setup_env
+setup_env()
+
+def validate_models():
+ """Since BaseRunserverCommand is only run once, we need to call
+ model valdidation here to ensure it is run every time the code
+ changes.
+
+ """
+ import logging
+ from django.core.management.validation import get_validation_errors
+ try:
+ from cStringIO import StringIO
+ except ImportError:
+ from StringIO import StringIO
+
+ logging.info("Validating models...")
+
+ s = StringIO()
+ num_errors = get_validation_errors(s, None)
+
+ if num_errors:
+ s.seek(0)
+ error_text = s.read()
+ logging.critical("One or more models did not validate:\n%s" % error_text)
+ else:
+ logging.info("All models validated.")
+
+from djangoappengine.utils import on_production_server
+if not on_production_server:
+ validate_models()
+
+from django.core.handlers.wsgi import WSGIHandler
+from google.appengine.ext.webapp.util import run_wsgi_app
+from django.conf import settings
+
+def log_traceback(*args, **kwargs):
+ import logging
+ logging.exception('Exception in request:')
+
+from django.core import signals
+signals.got_request_exception.connect(log_traceback)
+
+# Create a Django application for WSGI
+application = WSGIHandler()
+
+# Add the staticfiles handler if necessary
+if settings.DEBUG and 'django.contrib.staticfiles' in settings.INSTALLED_APPS:
+ from django.contrib.staticfiles.handlers import StaticFilesHandler
+ application = StaticFilesHandler(application)
+
+if getattr(settings, 'ENABLE_APPSTATS', False):
+ from google.appengine.ext.appstats.recording import appstats_wsgi_middleware
+ application = appstats_wsgi_middleware(application)
View
168 djangoappengine/main/main.py 100755 → 100644
@@ -1,94 +1,74 @@
-import os
-import sys
-
-# Add parent folder to sys.path, so we can import boot.
-# App Engine causes main.py to be reloaded if an exception gets raised
-# on the first request of a main.py instance, so don't add project_dir multiple
-# times.
-project_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
-if project_dir not in sys.path or sys.path.index(project_dir) > 0:
- while project_dir in sys.path:
- sys.path.remove(project_dir)
- sys.path.insert(0, project_dir)
-
-for path in sys.path[:]:
- if path != project_dir and os.path.isdir(os.path.join(path, 'django')):
- sys.path.remove(path)
- break
-
-# Remove the standard version of Django.
-if 'django' in sys.modules and sys.modules['django'].VERSION < (1, 2):
- for k in [k for k in sys.modules
- if k.startswith('django\.') or k == 'django']:
- del sys.modules[k]
-
-from djangoappengine.boot import setup_env, setup_logging, env_ext
-setup_env()