Skip to content
Browse files

First version.

  • Loading branch information...
0 parents commit d948c15c4e532003af7822a0b6861ca6e511cec5 unknown committed Nov 10, 2011
37 app.yaml
@@ -0,0 +1,37 @@
+application: put your own google app id here
+version: 1
+runtime: python27
+api_version: 1
+threadsafe: true
+
+libraries:
+- name: PIL
+ version: latest
+- name: markupsafe
+ version: latest
+- name: setuptools
+ version: latest
+- name: jinja2
+ version: latest
+
+builtins:
+- remote_api: on
+- datastore_admin: on
+- appstats: on
+- deferred: on
+
+inbound_services:
+- mail
+- channel_presence
+- warmup
+
+admin_console:
+ pages:
+ - name: Edit Domains
+ url: /account/admin/editdomains
+ - name: Edit Pages
+ url: /account/admin/editpages
+
+handlers:
+- url: /.*
+ script: main.application
9 appengine_config.py
@@ -0,0 +1,9 @@
+from gaesessions import SessionMiddleware
+def webapp_add_wsgi_middleware(app):
+ #from google.appengine.ext.appstats import recording
+ #app = recording.appstats_wsgi_middleware(app)
+ app = SessionMiddleware(app,
+ cookie_key='Generate your own key using os.urandom(64) offline\x04c\x9d(\x88\xcc\xf3!\xdd\x14\x13\x19\xa2\xec'
+ )
+ return app
+
515 gaesessions/__init__.py
@@ -0,0 +1,515 @@
+"""A fast, lightweight, and secure session WSGI middleware for use with GAE."""
+from Cookie import CookieError, SimpleCookie
+from base64 import b64decode, b64encode
+import datetime
+import hashlib
+import hmac
+import logging
+import pickle
+import os
+import threading
+import time
+
+from google.appengine.api import memcache
+from google.appengine.ext import db
+
+# Configurable cookie options
+COOKIE_NAME_PREFIX = "DgU" # identifies a cookie as being one used by gae-sessions (so you can set cookies too)
+COOKIE_PATH = "/"
+DEFAULT_COOKIE_ONLY_THRESH = 10240 # 10KB: GAE only allows ~16000B in HTTP header - leave ~6KB for other info
+DEFAULT_LIFETIME = datetime.timedelta(days=7)
+
+# constants
+SID_LEN = 43 # timestamp (10 chars) + underscore + md5 (32 hex chars)
+SIG_LEN = 44 # base 64 encoded HMAC-SHA256
+MAX_COOKIE_LEN = 4096
+EXPIRE_COOKIE_FMT = ' %s=; expires=Wed, 01-Jan-1970 00:00:00 GMT; Path=' + COOKIE_PATH
+COOKIE_FMT = ' ' + COOKIE_NAME_PREFIX + '%02d="%s"; %sPath=' + COOKIE_PATH + '; HttpOnly'
+COOKIE_FMT_SECURE = COOKIE_FMT + '; Secure'
+COOKIE_DATE_FMT = '%a, %d-%b-%Y %H:%M:%S GMT'
+COOKIE_OVERHEAD = len(COOKIE_FMT % (0, '', '')) + len('expires=Xxx, xx XXX XXXX XX:XX:XX GMT; ') + 150 # 150=safety margin (e.g., in case browser uses 4000 instead of 4096)
+MAX_DATA_PER_COOKIE = MAX_COOKIE_LEN - COOKIE_OVERHEAD
+
+_tls = threading.local()
+
+
+def get_current_session():
+ """Returns the session associated with the current request."""
+ return _tls.current_session
+
+
+def set_current_session(session):
+ """Sets the session associated with the current request."""
+ _tls.current_session = session
+
+
+def is_gaesessions_key(k):
+ return k.startswith(COOKIE_NAME_PREFIX)
+
+
+class SessionModel(db.Model):
+ """Contains session data. key_name is the session ID and pdump contains a
+ pickled dictionary which maps session variables to their values."""
+ pdump = db.BlobProperty()
+
+
+class Session(object):
+ """Manages loading, reading/writing key-value pairs, and saving of a session.
+
+ ``sid`` - if set, then the session for that sid (if any) is loaded. Otherwise,
+ sid will be loaded from the HTTP_COOKIE (if any).
+ """
+ DIRTY_BUT_DONT_PERSIST_TO_DB = 1
+
+ def __init__(self, sid=None, lifetime=DEFAULT_LIFETIME, no_datastore=False,
+ cookie_only_threshold=DEFAULT_COOKIE_ONLY_THRESH, cookie_key=None):
+ self._accessed = False
+ self.sid = None
+ self.cookie_keys = []
+ self.cookie_data = None
+ self.data = {}
+ self.dirty = False # has the session been changed?
+
+ self.lifetime = lifetime
+ self.no_datastore = no_datastore
+ self.cookie_only_thresh = cookie_only_threshold
+ self.base_key = cookie_key
+
+ if sid:
+ self.__set_sid(sid, False)
+ self.data = None
+ else:
+ self.__read_cookie()
+
+ @staticmethod
+ def __compute_hmac(base_key, sid, text):
+ """Computes the signature for text given base_key and sid."""
+ key = base_key + sid
+ return b64encode(hmac.new(key, text, hashlib.sha256).digest())
+
+ def __read_cookie(self):
+ """Reads the HTTP Cookie and loads the sid and data from it (if any)."""
+ try:
+ # check the cookie to see if a session has been started
+ cookie = SimpleCookie(os.environ['HTTP_COOKIE'])
+ self.cookie_keys = filter(is_gaesessions_key, cookie.keys())
+ if not self.cookie_keys:
+ return # no session yet
+ self.cookie_keys.sort()
+ data = ''.join(cookie[k].value for k in self.cookie_keys)
+ i = SIG_LEN + SID_LEN
+ sig, sid, b64pdump = data[:SIG_LEN], data[SIG_LEN:i], data[i:]
+ pdump = b64decode(b64pdump)
+ actual_sig = Session.__compute_hmac(self.base_key, sid, pdump)
+ if sig == actual_sig:
+ self.__set_sid(sid, False)
+ # check for expiration and terminate the session if it has expired
+ if self.get_expiration() != 0 and time.time() > self.get_expiration():
+ return self.terminate()
+
+ if pdump:
+ self.data = self.__decode_data(pdump)
+ else:
+ self.data = None # data is in memcache/db: load it on-demand
+ else:
+ logging.warn('cookie with invalid sig received from %s: %s' % (os.environ.get('REMOTE_ADDR'), b64pdump))
+ except (CookieError, KeyError, IndexError, TypeError):
+ # there is no cookie (i.e., no session) or the cookie is invalid
+ self.terminate(False)
+
+ def make_cookie_headers(self):
+ """Returns a list of cookie headers to send (if any)."""
+ # expire all cookies if the session has ended
+ if not self.sid:
+ return [EXPIRE_COOKIE_FMT % k for k in self.cookie_keys]
+
+ if self.cookie_data is None:
+ return [] # no cookie headers need to be sent
+
+ # build the cookie header(s): includes sig, sid, and cookie_data
+ if self.is_ssl_only():
+ m = MAX_DATA_PER_COOKIE - 8
+ fmt = COOKIE_FMT_SECURE
+ else:
+ m = MAX_DATA_PER_COOKIE
+ fmt = COOKIE_FMT
+ sig = Session.__compute_hmac(self.base_key, self.sid, self.cookie_data)
+ cv = sig + self.sid + b64encode(self.cookie_data)
+ num_cookies = 1 + (len(cv) - 1) / m
+ if self.get_expiration() > 0:
+ ed = "expires=%s; " % datetime.datetime.fromtimestamp(self.get_expiration()).strftime(COOKIE_DATE_FMT)
+ else:
+ ed = ''
+ cookies = [fmt % (i, cv[i * m:i * m + m], ed) for i in xrange(num_cookies)]
+
+ # expire old cookies which aren't needed anymore
+ old_cookies = xrange(num_cookies, len(self.cookie_keys))
+ key = COOKIE_NAME_PREFIX + '%02d'
+ cookies_to_ax = [EXPIRE_COOKIE_FMT % (key % i) for i in old_cookies]
+ return cookies + cookies_to_ax
+
+ def is_active(self):
+ """Returns True if this session is active (i.e., it has been assigned a
+ session ID and will be or has been persisted)."""
+ return self.sid is not None
+
+ def is_ssl_only(self):
+ """Returns True if cookies set by this session will include the "Secure"
+ attribute so that the client will only send them over a secure channel
+ like SSL)."""
+ return self.sid is not None and self.sid[-33] == 'S'
+
+ def is_accessed(self):
+ """Returns True if any value of this session has been accessed."""
+ return self._accessed
+
+ def ensure_data_loaded(self):
+ """Fetch the session data if it hasn't been retrieved it yet."""
+ self._accessed = True
+ if self.data is None and self.sid:
+ self.__retrieve_data()
+
+ def get_expiration(self):
+ """Returns the timestamp at which this session will expire."""
+ try:
+ return int(self.sid[:-33])
+ except:
+ return 0
+
+ def __make_sid(self, expire_ts=None, ssl_only=False):
+ """Returns a new session ID."""
+ # make a random ID (random.randrange() is 10x faster but less secure?)
+ if expire_ts is None:
+ expire_dt = datetime.datetime.now() + self.lifetime
+ expire_ts = int(time.mktime((expire_dt).timetuple()))
+ else:
+ expire_ts = int(expire_ts)
+ if ssl_only:
+ sep = 'S'
+ else:
+ sep = '_'
+ return ('%010d' % expire_ts) + sep + hashlib.md5(os.urandom(16)).hexdigest()
+
+ @staticmethod
+ def __encode_data(d):
+ """Returns a "pickled+" encoding of d. d values of type db.Model are
+ protobuf encoded before pickling to minimize CPU usage & data size."""
+ # separate protobufs so we'll know how to decode (they are just strings)
+ eP = {} # for models encoded as protobufs
+ eO = {} # for everything else
+ for k, v in d.iteritems():
+ if isinstance(v, db.Model):
+ eP[k] = db.model_to_protobuf(v)
+ else:
+ eO[k] = v
+ return pickle.dumps((eP, eO), 2)
+
+ @staticmethod
+ def __decode_data(pdump):
+ """Returns a data dictionary after decoding it from "pickled+" form."""
+ try:
+ eP, eO = pickle.loads(pdump)
+ for k, v in eP.iteritems():
+ eO[k] = db.model_from_protobuf(v)
+ except Exception, e:
+ logging.warn("failed to decode session data: %s" % e)
+ eO = {}
+ return eO
+
+ def regenerate_id(self, expiration_ts=None):
+ """Assigns the session a new session ID (data carries over). This
+ should be called whenever a user authenticates to prevent session
+ fixation attacks.
+
+ ``expiration_ts`` - The UNIX timestamp the session will expire at. If
+ omitted, the session expiration time will not be changed.
+ """
+ if self.sid or expiration_ts is not None:
+ self.ensure_data_loaded() # ensure we have the data before we delete it
+ if expiration_ts is None:
+ expiration_ts = self.get_expiration()
+ self.__set_sid(self.__make_sid(expiration_ts, self.is_ssl_only()))
+ self.dirty = True # ensure the data is written to the new session
+
+ def start(self, expiration_ts=None, ssl_only=False):
+ """Starts a new session. expiration specifies when it will expire. If
+ expiration is not specified, then self.lifetime will used to
+ determine the expiration date.
+
+ Normally this method does not need to be called directly - a session is
+ automatically started when the first value is added to the session.
+
+ ``expiration_ts`` - The UNIX timestamp the session will expire at. If
+ omitted, the session will expire after the default ``lifetime`` has past
+ (as specified in ``SessionMiddleware``).
+
+ ``ssl_only`` - Whether to specify the "Secure" attribute on the cookie
+ so that the client will ONLY transfer the cookie over a secure channel.
+ """
+ self.dirty = True
+ self.data = {}
+ self.__set_sid(self.__make_sid(expiration_ts, ssl_only), True)
+
+ def terminate(self, clear_data=True):
+ """Deletes the session and its data, and expires the user's cookie."""
+ if clear_data:
+ self.__clear_data()
+ self.sid = None
+ self.data = {}
+ self.dirty = False
+ if self.cookie_keys:
+ self.cookie_data = '' # trigger the cookies to expire
+ else:
+ self.cookie_data = None
+
+ def __set_sid(self, sid, make_cookie=True):
+ """Sets the session ID, deleting the old session if one existed. The
+ session's data will remain intact (only the session ID changes)."""
+ if self.sid:
+ self.__clear_data()
+ self.sid = sid
+ self.db_key = db.Key.from_path(SessionModel.kind(), sid, namespace='')
+
+ # set the cookie if requested
+ if make_cookie:
+ self.cookie_data = '' # trigger the cookie to be sent
+
+ def __clear_data(self):
+ """Deletes this session from memcache and the datastore."""
+ if self.sid:
+ memcache.delete(self.sid, namespace='') # not really needed; it'll go away on its own
+ try:
+ db.delete(self.db_key)
+ except:
+ pass # either it wasn't in the db (maybe cookie/memcache-only) or db is down => cron will expire it
+
+ def __retrieve_data(self):
+ """Sets the data associated with this session after retrieving it from
+ memcache or the datastore. Assumes self.sid is set. Checks for session
+ expiration after getting the data."""
+ pdump = memcache.get(self.sid, namespace='')
+ if pdump is None:
+ # memcache lost it, go to the datastore
+ if self.no_datastore:
+ logging.info("can't find session data in memcache for sid=%s (using memcache only sessions)" % self.sid)
+ self.terminate(False) # we lost it; just kill the session
+ return
+ session_model_instance = db.get(self.db_key)
+ if session_model_instance:
+ pdump = session_model_instance.pdump
+ else:
+ logging.error("can't find session data in the datastore for sid=%s" % self.sid)
+ self.terminate(False) # we lost it; just kill the session
+ return
+ self.data = self.__decode_data(pdump)
+
+ def save(self, persist_even_if_using_cookie=False):
+ """Saves the data associated with this session IF any changes have been
+ made (specifically, if any mutator methods like __setitem__ or the like
+ is called).
+
+ If the data is small enough it will be sent back to the user in a cookie
+ instead of using memcache and the datastore. If `persist_even_if_using_cookie`
+ evaluates to True, memcache and the datastore will also be used. If the
+ no_datastore option is set, then the datastore will never be used.
+
+ Normally this method does not need to be called directly - a session is
+ automatically saved at the end of the request if any changes were made.
+ """
+ if not self.sid:
+ return # no session is active
+ if not self.dirty:
+ return # nothing has changed
+ dirty = self.dirty
+ self.dirty = False # saving, so it won't be dirty anymore
+
+ # do the pickling ourselves b/c we need it for the datastore anyway
+ pdump = self.__encode_data(self.data)
+
+ # persist via cookies if it is reasonably small
+ if len(pdump) * 4 / 3 <= self.cookie_only_thresh: # 4/3 b/c base64 is ~33% bigger
+ self.cookie_data = pdump
+ if not persist_even_if_using_cookie:
+ return
+ elif self.cookie_keys:
+ # latest data will only be in the backend, so expire data cookies we set
+ self.cookie_data = ''
+
+ memcache.set(self.sid, pdump, namespace='', time=self.get_expiration()) # may fail if memcache is down
+
+ # persist the session to the datastore
+ if dirty is Session.DIRTY_BUT_DONT_PERSIST_TO_DB or self.no_datastore:
+ return
+ try:
+ SessionModel(key_name=self.sid, pdump=pdump).put()
+ except Exception, e:
+ logging.warning("unable to persist session to datastore for sid=%s (%s)" % (self.sid, e))
+
+ # Users may interact with the session through a dictionary-like interface.
+ def clear(self):
+ """Removes all data from the session (but does not terminate it)."""
+ if self.sid:
+ self.data = {}
+ self.dirty = True
+
+ def get(self, key, default=None):
+ """Retrieves a value from the session."""
+ self.ensure_data_loaded()
+ return self.data.get(key, default)
+
+ def has_key(self, key):
+ """Returns True if key is set."""
+ self.ensure_data_loaded()
+ return key in self.data
+
+ def pop(self, key, default=None):
+ """Removes key and returns its value, or default if key is not present."""
+ self.ensure_data_loaded()
+ self.dirty = True
+ return self.data.pop(key, default)
+
+ def pop_quick(self, key, default=None):
+ """Removes key and returns its value, or default if key is not present.
+ The change will only be persisted to memcache until another change
+ necessitates a write to the datastore."""
+ self.ensure_data_loaded()
+ if self.dirty is False:
+ self.dirty = Session.DIRTY_BUT_DONT_PERSIST_TO_DB
+ return self.data.pop(key, default)
+
+ def set_quick(self, key, value):
+ """Set a value named key on this session. The change will only be
+ persisted to memcache until another change necessitates a write to the
+ datastore. This will start a session if one is not already active."""
+ dirty = self.dirty
+ self[key] = value
+ if dirty is False or dirty is Session.DIRTY_BUT_DONT_PERSIST_TO_DB:
+ self.dirty = Session.DIRTY_BUT_DONT_PERSIST_TO_DB
+
+ def __getitem__(self, key):
+ """Returns the value associated with key on this session."""
+ self.ensure_data_loaded()
+ return self.data.__getitem__(key)
+
+ def __setitem__(self, key, value):
+ """Set a value named key on this session. This will start a session if
+ one is not already active."""
+ self.ensure_data_loaded()
+ if not self.sid:
+ self.start()
+ self.data.__setitem__(key, value)
+ self.dirty = True
+
+ def __delitem__(self, key):
+ """Deletes the value associated with key on this session."""
+ self.ensure_data_loaded()
+ self.data.__delitem__(key)
+ self.dirty = True
+
+ def __iter__(self):
+ """Returns an iterator over the keys (names) of the stored values."""
+ self.ensure_data_loaded()
+ return self.data.iterkeys()
+
+ def __contains__(self, key):
+ """Returns True if key is present on this session."""
+ self.ensure_data_loaded()
+ return self.data.__contains__(key)
+
+ def __str__(self):
+ """Returns a string representation of the session."""
+ if self.sid:
+ self.ensure_data_loaded()
+ return "SID=%s %s" % (self.sid, self.data)
+ else:
+ return "uninitialized session"
+
+
+class SessionMiddleware(object):
+ """WSGI middleware that adds session support.
+
+ ``cookie_key`` - A key used to secure cookies so users cannot modify their
+ content. Keys should be at least 32 bytes (RFC2104). Tip: generate your
+ key using ``os.urandom(64)`` but do this OFFLINE and copy/paste the output
+ into a string which you pass in as ``cookie_key``. If you use ``os.urandom()``
+ to dynamically generate your key at runtime then any existing sessions will
+ become junk every time your app starts up!
+
+ ``lifetime`` - ``datetime.timedelta`` that specifies how long a session may last. Defaults to 7 days.
+
+ ``no_datastore`` - By default all writes also go to the datastore in case
+ memcache is lost. Set to True to never use the datastore. This improves
+ write performance but sessions may be occassionally lost.
+
+ ``cookie_only_threshold`` - A size in bytes. If session data is less than this
+ threshold, then session data is kept only in a secure cookie. This avoids
+ memcache/datastore latency which is critical for small sessions. Larger
+ sessions are kept in memcache+datastore instead. Defaults to 10KB.
+ """
+ def __init__(self, app, cookie_key, lifetime=DEFAULT_LIFETIME, no_datastore=False, cookie_only_threshold=DEFAULT_COOKIE_ONLY_THRESH):
+ self.app = app
+ self.lifetime = lifetime
+ self.no_datastore = no_datastore
+ self.cookie_only_thresh = cookie_only_threshold
+ self.cookie_key = cookie_key
+ if not self.cookie_key:
+ raise ValueError("cookie_key MUST be specified")
+ if len(self.cookie_key) < 32:
+ raise ValueError("RFC2104 recommends you use at least a 32 character key. Try os.urandom(64) to make a key.")
+
+ def __call__(self, environ, start_response):
+ # initialize a session for the current user
+ _tls.current_session = Session(lifetime=self.lifetime, no_datastore=self.no_datastore, cookie_only_threshold=self.cookie_only_thresh, cookie_key=self.cookie_key)
+
+ # create a hook for us to insert a cookie into the response headers
+ def my_start_response(status, headers, exc_info=None):
+ _tls.current_session.save() # store the session if it was changed
+ for ch in _tls.current_session.make_cookie_headers():
+ headers.append(('Set-Cookie', ch))
+ return start_response(status, headers, exc_info)
+
+ # let the app do its thing
+ return self.app(environ, my_start_response)
+
+
+class DjangoSessionMiddleware(object):
+ """Django middleware that adds session support. You must specify the
+ session configuration parameters by modifying the call to ``SessionMiddleware``
+ in ``DjangoSessionMiddleware.__init__()`` since Django cannot call an
+ initialization method with parameters.
+ """
+ def __init__(self):
+ fake_app = lambda environ, start_response: start_response
+ self.wrapped_wsgi_middleware = SessionMiddleware(fake_app, cookie_key='you MUST change this')
+ self.response_handler = None
+
+ def process_request(self, request):
+ self.response_handler = self.wrapped_wsgi_middleware(None, lambda status, headers, exc_info: headers)
+ request.session = get_current_session() # for convenience
+
+ def process_response(self, request, response):
+ if self.response_handler:
+ session_headers = self.response_handler(None, [], None)
+ for k, v in session_headers:
+ response[k] = v
+ self.response_handler = None
+ if hasattr(request, 'session') and request.session.is_accessed():
+ from django.utils.cache import patch_vary_headers
+ logging.info("Varying")
+ patch_vary_headers(response, ('Cookie',))
+ return response
+
+
+def delete_expired_sessions():
+ """Deletes expired sessions from the datastore.
+ If there are more than 500 expired sessions, only 500 will be removed.
+ Returns True if all expired sessions have been removed.
+ """
+ now_str = unicode(int(time.time()))
+ q = db.Query(SessionModel, keys_only=True, namespace='')
+ key = db.Key.from_path('SessionModel', now_str + u'\ufffd', namespace='')
+ q.filter('__key__ < ', key)
+ results = q.fetch(500)
+ db.delete(results)
+ logging.info('gae-sessions: deleted %d expired sessions from the datastore' % len(results))
+ return len(results) < 500
7 main.py
@@ -0,0 +1,7 @@
+import webapp2
+import melya.handlerhelpers
+
+application = webapp2.WSGIApplication([
+ (r'/api/(.*)', melya.handlerhelpers.GeneralApiHandler),
+ (r'.*', melya.handlerhelpers.GeneralPageHandler),
+])
0 melya/__init__.py
No changes.
161 melya/adminapi.py
@@ -0,0 +1,161 @@
+from handlerhelpers import ApiReq, RetType, getUserAndIsAdmin, RequireAdmin
+import google.appengine.ext.db as db
+from google.appengine.api import memcache
+import datamodel,json, logging, types
+from utils import getAllFromFromQuery
+_memcache = memcache.Client()
+
+@ApiReq()
+@RequireAdmin
+def admin_GetDomainList(req):
+ whichKey = req.get('key')
+ if whichKey:
+ all = datamodel.DB_Domains.get_by_id(int(whichKey)) # load a specific one
+ if not all: return RetType.JSONFAIL, {'text':'Domain not found by key'}
+ all = [all]
+ else:
+ all = getAllFromFromQuery(datamodel.DB_Domains.all())
+ res = [{'key':x.key().id(),'name':x.name, 'regex':x.regex, 'defaultTitle':x.defaultTitle, 'order':x.order, 'dateUpdated':str(x.dateUpdated)} for x in all]
+ return RetType.JSONSUCCESS, {'domains':res}
+
+@ApiReq()
+@RequireAdmin
+def admin_DeleteDomain(req):
+ whichKey = req.get('key')
+ dd = datamodel.DB_Domains.get_by_id(int(whichKey)) # load a specific one
+ if not dd: return RetType.JSONFAIL, {'text':'Domain not found by key'}
+ dd.delete()
+ return RetType.JSONSUCCESS
+
+@ApiReq()
+@RequireAdmin
+def admin_SaveDomain(req):
+ jsonobj = req.get('jsonobj')
+ if not jsonobj: return RetType.JSONFAIL
+ jsonobj = json.loads(jsonobj)
+
+ key = jsonobj.get('key')
+ name = jsonobj.get('name')
+ regex = jsonobj.get('regex')
+ dt = jsonobj.get('defaultTitle')
+ o = jsonobj.get('order')
+
+ if not name or not regex or not dt or not o: return RetType.JSONFAIL, {'text':'All fields must be valid'}
+
+ if key:
+ theDomain = datamodel.DB_Domains.get_by_id(int(key)) # load the old one
+ if not theDomain:
+ return RetType.JSONFAIL, {'text':'Domain not found by key'}
+ else:
+ theDomain = datamodel.DB_Domains() # create a new one.
+
+ theDomain.name = name
+ theDomain.regex = regex
+ theDomain.defaultTitle = dt
+ theDomain.order = float(o)
+
+ theDomain.put()
+
+ return RetType.JSONSUCCESS, {'domainKey':theDomain.key().id()}
+
+# The API for pages
+
+@ApiReq()
+@RequireAdmin
+def admin_GetPageList(req):
+ whichKey = req.get('key')
+ if whichKey:
+ all = datamodel.DB_Pages.get_by_id(int(whichKey)) # load a specific one
+ if not all: return RetType.JSONFAIL, {'text':'page not found by key'}
+ all = [all]
+ else:
+ all = getAllFromFromQuery(datamodel.DB_Pages.all())
+ res = [{'key':x.key().id(),'domainName':x.domainName, 'regex':x.regex, 'fileName':x.fileName, 'flags':x.flags,
+ 'order':x.order, 'dateUpdated':str(x.dateUpdated)} for x in all]
+ return RetType.JSONSUCCESS, {'pages':res}
+
+@ApiReq()
+@RequireAdmin
+def admin_DeletePage(req):
+ whichKey = req.get('key')
+ dd = datamodel.DB_Pages.get_by_id(int(whichKey)) # load a specific one
+ if not dd: return RetType.JSONFAIL, {'text':'Page not found by key'}
+ dd.delete()
+ return RetType.JSONSUCCESS
+
+@ApiReq()
+@RequireAdmin
+def admin_SavePage(req):
+ jsonobj = req.get('jsonobj')
+ if not jsonobj: return RetType.JSONFAIL
+ jsonobj = json.loads(jsonobj)
+
+ valDict = dict((x, jsonobj.get(x)) for x in ['order', 'domainName', 'regex', 'fileName', 'flags'])
+ nonNull = [valDict[x] for x in ['order', 'regex', 'fileName']] # which ones need to be non-null
+
+ if not all(nonNull): return RetType.JSONFAIL, {'text':'Fields must be valid'}
+ key = jsonobj.get('key')
+
+ if key:
+ thePage = datamodel.DB_Pages.get_by_id(int(key)) # load the old one
+ if not thePage:
+ return RetType.JSONFAIL, {'text':'Page not found by key'}
+ else:
+ thePage = datamodel.DB_Pages() # create a new one.
+
+ for x,y in valDict.items():
+ dmt = getattr(datamodel.DB_Pages, x)
+ if dmt.data_type == types.FloatType: # add bool?
+ setattr(thePage, x, float(y))
+ else:
+ setattr(thePage, x, y)
+
+
+ thePage.put()
+
+ return RetType.JSONSUCCESS, {'pageKey':thePage.key().id()}
+
+
+
+def execute_dyn_python_code(statement):
+ import traceback,sys
+ from cStringIO import StringIO
+ out = StringIO()
+
+ # the python compiler doesn't like network line endings
+ statement = statement.replace('\r\n', '\n')
+
+ # add a couple newlines at the end of the statement. this makes
+ # single-line expressions such as 'class Foo: pass' evaluate happily.
+ statement += '\n\n'
+
+ try:
+ old_stdout = sys.stdout
+ old_stderr = sys.stderr
+ try:
+ sys.stdout = out
+ sys.stderr = out
+
+ compiled = compile(statement, '<string>', 'exec')
+ exec compiled
+ finally:
+ sys.stdout = old_stdout
+ sys.stderr = old_stderr
+ except:
+ out.write(traceback.format_exc())
+
+ contents = out.getvalue()
+ out.close()
+ return contents
+
+@ApiReq()
+@RequireAdmin
+def admin_DangerousExecutePythonCode(req):
+ res = execute_dyn_python_code(req.get('code'))
+ return RetType.HEADERSANDRAW, {'Content-Type':'text/plain'}, res
+
+@ApiReq()
+@RequireAdmin
+def admin_FlushAllMemcache(req):
+ _memcache.flush_all()
+ return RetType.JSONSUCCESS
150 melya/datamodel.py
@@ -0,0 +1,150 @@
+import google.appengine.ext.db as db
+from utils import getGitBlobHash, getStrSortableHexNum
+
+class DB_User(db.Model):
+ """
+ Let GAE pick the key id, and use that as the user id.
+ """
+ dateAdded = db.DateTimeProperty(auto_now_add=True)
+ name = db.StringProperty()
+
+class DB_UserLoginAssoc(db.Model):
+ """
+ User login associations, e.g. google login/facebook/twitter...
+ the key name is 'g'+(google user id) for google login, 'f'+fbid, 't'+twitter id
+ """
+ uid = db.IntegerProperty() # key id of DB_User - no need for reference, since it's always DB_User's id
+
+class DB_UserData(db.Expando):
+ """
+ parent is DB_User
+ key_name is the info type, e.g. 'emails', etc...
+ all properties are dynamic
+ """
+ pass
+
+class DB_Domains(db.Expando):
+ """
+ GAE picks an id
+ """
+ name = db.StringProperty() # the name that is passed to the client
+ regex = db.StringProperty()
+ defaultTitle = db.StringProperty()
+ order = db.FloatProperty()
+ dateUpdated = db.DateTimeProperty(auto_now=True)
+
+ #_compiledRegex = None
+
+class DB_Pages(db.Expando):
+ domainName = db.StringProperty() # corresponds to the domain's name (key_name)
+ regex = db.StringProperty()
+ fileName = db.StringProperty()
+ order = db.FloatProperty()
+ dateUpdated = db.DateTimeProperty(auto_now=True)
+ flags = db.StringProperty() # special flags (comma seperated) like 'jinja' and 'cache'
+ #_compiledRegex = None
+ #_parsedFlagList = frozenset of flags
+
+class DB_FileContent(db.Model):
+ """ The key_name is the hash (github sha1) hash of the data - getGitBlobHash()
+ """
+ date_added = db.DateTimeProperty(auto_now_add=True)
+ data = db.BlobProperty()
+
+ @classmethod
+ def CheckAndSave(cls, data):
+ """Return True,key if saved, otherwise False, Key if already exists."""
+ new_hash = getGitBlobHash(data)
+ theDataKey = cls.all(keys_only=True).filter('__key__ =', db.Key.from_path(u'DB_FileContent', new_hash)).get()
+ if theDataKey: return False, theDataKey
+ theDataKey = cls(key_name = new_hash, data = db.Blob(data)).put()
+ return True, theDataKey
+
+ @classmethod
+ def GetEmptyKey(cls):
+ return db.Key.from_path(u'DB_FileContent', u'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391')
+
+
+class DB_FileVersion(db.Model):
+ """
+ parent is DB_FileContent - so we can search with key_only to get the data
+ key_name is the version (same as version below)
+ """
+ filename = db.StringProperty() # the file name with path 'js/myfile.coffee' - shouldn't contain ':'
+ version = db.IntegerProperty() # the version generated by DB_JV_AtomicCounter
+ dateAdded = db.DateTimeProperty(auto_now_add=True)
+ generatedFrom = db.SelfReferenceProperty()
+ uid = db.IntegerProperty() # the id of DB_User
+ tags = db.StringListProperty() # tags should not be '_'
+ # the special index - we do this, so we don't have to have the user create an index entry in index.yaml
+ index = db.StringListProperty()
+ # otherwise we would need this index:
+ #- kind: DB_FileVersion
+ # properties:
+ # - name: filename
+ # - name: tags
+ # - name: version
+ # direction: desc
+
+
+ def updateIndex(self): # Make sure to call this before saving if not using db.put or db.put_async
+ # could use more than base 16, but don't care that much.
+ self.index = ['%s:%s:%s' % (self.filename, x, getStrSortableHexNum(self.version)) for x in self.tags]
+
+ def put(self, **kwargs):
+ self.updateIndex()
+ super(DB_FileVersion, self).put(**kwargs)
+
+ @classmethod
+ def getMostRecent(cls, filename, tag, keys_only=False):
+ max1 = '%s:%s~' % (filename, tag)
+ min1 = '%s:%s:' % (filename, tag)
+ return cls.all(keys_only=keys_only).filter('index >', min1).filter('index <', max1).order('-index').get()
+ #return cls.all(keys_only=keys_only).filter('filename =', fn).filter('tags =', 'a').order('-version').get()
+
+ @classmethod
+ def getSpecificVersion(cls, filename, tag, version, keys_only=False):
+ indexKey = '%s:%s:%s' % (filename, tag, getStrSortableHexNum(version))
+ return cls.all(keys_only=keys_only).filter('index =', indexKey).get()
+ #return cls.all(keys_only=keys_only).filter('filename =', filename).filter('tags =',tag).filter('version =', version).get()
+
+
+
+class DB_FileBuild(db.Model):
+ """
+ This is the generated version of the file used for caching (server and client) purposes
+ The key_name is the filename (from DB_FileVersion) + ':' + version tag ('' or 'a' for now) + ':' + cacheVerNum
+ except for the latest version which doesn't have the last cacheVerNum (and doesn't have the data either)
+ """
+ data = db.BlobProperty() # the actual generated file
+ hashVal = db.StringProperty() # the getGitBlobHash() of data
+ cacheVerNum = db.IntegerProperty() # the earliest version number (fver) that had this hashVal
+ fileVerCheckTime = db.IntegerProperty() # the latest version (fver) at which we checked to see if data changed.
+ dateGenerated = db.DateTimeProperty(auto_now_add=True)
+
+
+class DB_JV_AtomicCounter(db.Model):
+ """
+ An atomic counter for file versions. It's slow, but it's hardly used, so it's ok.
+ key_name is counterName
+ currently using "fver" and "processCfgVer"
+ """
+ counter = db.IntegerProperty()
+
+ @classmethod
+ def GetNextCounter(cls, counterName):
+ def tx():
+ temp = cls.get_by_key_name(counterName)
+ if not temp: temp = cls(key_name=counterName, counter=0)
+ temp.counter += 1
+ temp.put()
+ return temp.counter
+ return db.run_in_transaction_custom_retries(10, tx)
+
+ @classmethod
+ def GetMostRecentlyReturnedCounterValue(cls, counterName):
+ temp = cls.get_by_key_name(counterName)
+ if not temp: return cls.GetNextCounter(counterName) # execute once if it's the first time
+ return temp.counter
+
+
415 melya/fileapi.py
@@ -0,0 +1,415 @@
+from handlerhelpers import ApiReq, RetType, getUserAndIsAdmin, RequireAdmin, RequireAdminRaw
+import google.appengine.ext.db as db
+from google.appengine.api import users
+from utils import getGitBlobHash, getAllFromFromQuery,DB_SimpleToDict,chunks
+from collections import defaultdict
+from gaesessions import get_current_session
+import datamodel,datetime,logging, os,json
+from filegen import forceCacheRebuild
+
+_fileVerKey = 'fver'
+
+def createUserIfNeeded(req):
+ guser = users.get_current_user()
+ if not guser:
+ get_current_session().terminate()
+ return False
+
+ usergkey = 'g'+str(guser.user_id())
+ res = datamodel.DB_UserLoginAssoc.get_by_key_name(usergkey)
+ if not res:
+ muser = datamodel.DB_User(name=guser.nickname())
+ muser.put()
+ res = datamodel.DB_UserLoginAssoc(key_name=usergkey, uid = muser.key().id())
+ res.put()
+ res2 = datamodel.DB_UserData(parent=muser, key_name='emails')
+ res2.emails = [guser.email()]
+ res2.put()
+ else:
+ muser = datamodel.DB_User.get_by_id(res.uid)
+
+ get_current_session()['uid'] = muser.key().id()
+
+ return muser
+
+@ApiReq(allowGet=True)
+def admin_login(req):
+ muser = createUserIfNeeded(req)
+ if not muser: return RetType.RAW,("<a href=\"%s\">Login</a>." % users.create_login_url("/api/admin/login"))
+ if muser and users.is_current_user_admin(): return RetType.REDIRECT, '/api/admin/DoAdminStuff'
+ return RetType.RAW, 'Logged in'
+
+@ApiReq(allowGet=True)
+@RequireAdminRaw
+def admin_logout(req):
+ return RetType.REDIRECT, users.create_logout_url('/')
+
+@ApiReq()
+@RequireAdmin
+def admin_isAdmin(req):
+ return RetType.JSONSUCCESS
+
+# -------------- Upload Files...
+@ApiReq()
+@RequireAdminRaw
+def admin_UploadFilePage(req):
+ return RetType.RAW, """
+<form method='POST' enctype='multipart/form-data' action='/api/admin/UploadFileHandler'>
+Filename: <input type="text" name="filename"><br>
+<input type=file name="content"><br>
+<input type=submit value="Upload">
+</form>
+ """
+
+@ApiReq(allowGet=True)
+@RequireAdminRaw
+def admin_DoAdminStuff(req):
+ cmds_POST = [
+ ('RestoreFromBackupZip', 'Restore all files from backup'),
+ ('RestoreFromFileDomainAndPages', 'Restore all page and domain stuff in datastore'),
+ ('UploadFilePage', 'Upload a file'),
+ ('invalidateFileBuilds', 'Invalidate Builds'),
+ ('FlushAllMemcache', 'Flush all memcache'),
+ ('CreateAllFileZip','Create a zip file with all the files and all the versions'),
+ ('CreateBackupOfDomainAndPages', 'Create a file (json) with all the current page domains in the datastore'),
+ ('clearAllBuildData','Clear all file builds incase of problems'),
+
+ #('CreateBackupZipForTag', 'Create backup for a specific tag'),
+ ]
+ cmds_GET = [
+ ('/account/admin/editfiles','Edit files'),
+ ('/api/admin/setCurrentSessionVer?ver=-1','Switch to dev mode for files (most recent)'),
+ ('/api/admin/setCurrentSessionVer','Switch to live mode for files (head -a- version)'),
+ ('/api/admin/logout','Logout'),
+ ]
+ html_post = ["""<form method="POST" action="/api/admin/%s"><input type=submit value="%s"></form>""" % x for x in cmds_POST]
+ html_get = ["""<a href="%s">%s</a><br/>""" % x for x in cmds_GET]
+ return RetType.RAW, '\n'.join(html_post + html_get)
+
+@ApiReq()
+@RequireAdmin
+def admin_UploadFileHandler(req, user): # user is passed by RequireAdmin
+ fn = req.get('filename')
+ if not fn: return RetType.JSONFAIL, {'text':'No filename'}
+ content = req.get('content', None)
+ if not content or len(content) == 0:
+ if not req.get('allowemptyfile') or content != '':
+ return RetType.JSONFAIL, {'text':'No content or empty content'}
+
+
+ if isinstance(content, unicode):
+ logging.info('content is unicode, converting to 8bit using utf-8')
+ content = content.encode('utf8')
+ else:
+ content = str(content) # just in case?
+ new_hash = getGitBlobHash(content)
+ theDataKey = datamodel.DB_FileContent.all(keys_only=True).filter('__key__ =', db.Key.from_path(u'DB_FileContent', new_hash)).get()
+
+ if not theDataKey:
+ theDataKey = datamodel.DB_FileContent(key_name = new_hash, data = db.Blob(content)).put()
+ hashAlreadyExists = False
+ else:
+ hashAlreadyExists = True
+
+ latestVersion = datamodel.DB_FileVersion.getMostRecent(fn, 'z', keys_only=True)
+
+ if latestVersion and latestVersion.parent().name() == new_hash:
+ return RetType.JSONFAIL, {'text':'Failed because the latest version has the same hash'}
+
+ genFromFile = req.get('generatedFromFile')
+ genFromVer = req.get('generatedFromVer')
+
+ if genFromFile and genFromVer:
+ genFrom = datamodel.DB_FileVersion.getSpecificVersion(genFromFile, 'z', int(genFromVer), keys_only=True)
+ if not genFrom:
+ return RetType.JSONFAIL, {'text':'Generated from version doesn\'t exist'}
+ else:
+ genFrom = None
+
+ isFirstVersion = False if latestVersion else True
+
+ nextVersionNum = datamodel.DB_JV_AtomicCounter.GetNextCounter(_fileVerKey)
+
+ if isFirstVersion:
+ tags = ['z', 'a']
+ else:
+ tags = ['z']
+
+ newFileVer = datamodel.DB_FileVersion(parent=theDataKey, key_name=str(nextVersionNum),
+ filename=fn,version=nextVersionNum,uid=user.key().id(),tags=tags,
+ generatedFrom=genFrom)
+
+
+ newFileVer.put()
+
+ forceCacheRebuild(fn, 'z')
+
+ return RetType.JSONSUCCESS, {'ver': nextVersionNum, 'hash':new_hash, 'isFirstVersion':isFirstVersion, 'hashAlreadyExists':hashAlreadyExists, 'filename':fn}
+
+@ApiReq()
+@RequireAdmin
+def admin_invalidateFileBuilds(req):
+ # this will cause all the builds to be redone, and updated if needed. (as needed).
+
+ # first, increment the file counter.
+ nextVersionNum = datamodel.DB_JV_AtomicCounter.GetNextCounter(_fileVerKey)
+
+ # next, force all the processes to get that new version number.
+ # so when filegen.getCurCacheVer gets called next time, the file version (fv) will be the new value
+ from inmemconfig import InAppMemConfig
+ pcd = InAppMemConfig.ForceVersionIncrement()
+
+ # that should be it
+ return RetType.JSONSUCCESS, {'configVer':pcd, 'fileVer':nextVersionNum}
+
+@ApiReq()
+@RequireAdmin
+def admin_clearAllBuildData(req):
+ all = getAllFromFromQuery(datamodel.DB_FileBuild.all(keys_only=True))
+
+ for x in chunks(all, 1000):
+ db.delete(x)
+
+ nextVersionNum = datamodel.DB_JV_AtomicCounter.GetNextCounter(_fileVerKey)
+
+ from inmemconfig import InAppMemConfig
+ InAppMemConfig.ForceVersionIncrement()
+
+ return RetType.JSONSUCCESS
+
+@ApiReq()
+@RequireAdmin
+def admin_getCleanFileContent(req):
+ fn = req.get('filename')
+ tag = 'a' if req.get('head') else 'z'
+ if req.get('ver'):
+ res = datamodel.DB_FileVersion.getSpecificVersion(fn, tag, int(req.get('ver')), keys_only=True)
+ else:
+ res = datamodel.DB_FileVersion.getMostRecent(fn, tag, keys_only=True)
+ if not res: return RetType.JSONFAIL, {'text':'File version not found'}
+ theDataEnt = db.get(res.parent())
+ if not theDataEnt: return RetType.JSONFAIL, {'text':'File version not found2'}
+ return RetType.JSONSUCCESS, {'data':theDataEnt.data}
+
+@ApiReq()
+@RequireAdmin
+def admin_adminTouchFileVersion(req):
+ fn = req.get('filename')
+ if not fn: return RetType.JSONFAIL, {'text':'No filename'}
+ latestVersion = datamodel.DB_FileVersion.getMostRecent(fn, 'z')
+ if not latestVersion: return RetType.JSONFAIL
+ nextVersionNum = datamodel.DB_JV_AtomicCounter.GetNextCounter(_fileVerKey)
+ latestVersion.version = nextVersionNum
+ latestVersion.put()
+ return RetType.JSONSUCCESS, {'newVer':nextVersionNum}
+
+@ApiReq()
+@RequireAdmin
+def admin_getAllFileVersions(req):
+ a = getAllFromFromQuery(datamodel.DB_FileVersion.all().order('version'))
+
+ d = defaultdict(list)
+ for x in a: d[x.filename].append([x.version]+x.tags)
+ #for x in a: d[x.filename].append(x.version)
+ return RetType.JSONSUCCESS, {'files':d}
+
+@ApiReq()
+@RequireAdmin
+def admin_setFileVersionTag(req):
+ fn = req.get('filename')
+ res = datamodel.DB_FileVersion.getSpecificVersion(fn, 'z', int(req.get('ver')))
+ if not res: return RetType.JSONFAIL, {'text':'File version not found'}
+
+ addtag = req.get('addtag')
+ deltag = req.get('deltag')
+ delversion = req.get('delversion')
+ if addtag:
+ if addtag in res.tags:
+ return RetType.JSONFAIL, {'text':'Tag already in file version'}
+ res.tags.append(addtag)
+ res.put()
+ return RetType.JSONSUCCESS
+
+ if deltag:
+ if deltag not in res.tags:
+ return RetType.JSONFAIL, {'text':'Tag not in file version'}
+ res.tags.remove(deltag)
+ res.put()
+ return RetType.JSONSUCCESS
+
+ if delversion and delversion == str(res.version):
+ # delete both datamodel.DB_FileVersion and datamodel.DB_FileContent if no one else is using it (and no tags)
+ if len(res.tags) > 1:
+ return RetType.JSONFAIL, {'text':'File is tagged, can\'t delete'}
+ theContentKey = res.key().parent()
+ theHash = theContentKey.name()
+
+ db.delete_async(res.key())
+
+ usedByOtherResult = (RetType.JSONSUCCESS, {'text':'Version deleted, but content was not deleted since it is used by another version'})
+ others = datamodel.DB_FileVersion.all(keys_only=True).ancestor(theContentKey).fetch(2)
+ if len(others) != 1: return usedByOtherResult
+ if others[0] != res.key(): return usedByOtherResult
+
+ db.delete_async(theContentKey)
+
+ return RetType.JSONSUCCESS
+
+ return RetType.JSONFAIL, {'text': 'Nothing to do?'}
+
+@ApiReq()
+@RequireAdmin
+def admin_CreateAllFileZip(req):
+ import cStringIO
+ import zipfile
+ zipstream = cStringIO.StringIO()
+ zf = zipfile.ZipFile(zipstream, 'w', zipfile.ZIP_DEFLATED)
+
+ allFiles = getAllFromFromQuery(datamodel.DB_FileVersion.all())
+
+ def cleanDate(dd): return str(dd).replace(':', '.').replace(' ', '-')
+
+ for x in allFiles:
+ content = x.parent().data
+ aa = x.filename.rsplit('.', 1)
+ aa.insert(1, '%s_%s%s' % (str(x.version).zfill(4), cleanDate(x.dateAdded), '_a' if 'a' in x.tags else '') )
+ tfn = '.'.join(aa)
+ zf.writestr(str(tfn), content)
+
+ zf.close()
+
+ outfile = 'onlinefiles-' + cleanDate(datetime.datetime.now()) + '.zip'
+
+ res = zipstream.getvalue()
+ return RetType.HEADERSANDRAW, {'Content-Type':'application/zip', 'Content-Disposition': 'attachment; filename="'+outfile+'"' }, res
+
+
+@ApiReq()
+@RequireAdmin
+def admin_CreateBackupOfDomainAndPages(req):
+ aa = getAllFromFromQuery(datamodel.DB_Domains.all())
+ bb = getAllFromFromQuery(datamodel.DB_Pages.all())
+ toIgnore = {'dateUpdated'}
+ res_domains = [DB_SimpleToDict(x, toIgnore) for x in aa]
+ res_pages = [DB_SimpleToDict(x, toIgnore) for x in bb]
+ return RetType.JSONSUCCESS, {'domains':res_domains, 'pages':res_pages}
+
+@ApiReq()
+@RequireAdmin
+def admin_RestoreFromFileDomainAndPages(req):
+ if datamodel.DB_Domains.all().get() or datamodel.DB_Pages.all().get():
+ return RetType.JSONFAIL, {'text':'Can\'t restore if there are already domains or pages in the datastore'}
+ file = os.path.join(os.path.dirname(__file__), 'zips/defaultdomainandpages.json')
+ with open(file) as f:
+ jsonobj = json.load(f)
+ tosave = [datamodel.DB_Domains(**x) for x in jsonobj.get('domains')] + \
+ [datamodel.DB_Pages(**x) for x in jsonobj.get('pages')]
+ db.put(tosave)
+ return RetType.JSONSUCCESS
+
+
+@ApiReq()
+@RequireAdminRaw
+def admin_CreateBackupZipForTag(req):
+ import cStringIO
+ import zipfile
+ def cleanDate(dd): return str(dd).replace(':', '.').replace(' ', '-')
+
+ whichTag = req.get('tag')
+ if not whichTag: return RetType.RAW, 'Must specify tag'
+
+ allFiles = getAllFromFromQuery(datamodel.DB_FileVersion.all().filter('tags =', whichTag))
+ allFiles.sort(key=lambda x:x.version, reverse=True)
+
+ zipstream = cStringIO.StringIO()
+ zf = zipfile.ZipFile(zipstream, 'w', zipfile.ZIP_DEFLATED)
+
+ alreadyWritten = set()
+ for x in allFiles:
+ if x.filename in alreadyWritten: continue
+ alreadyWritten.add(x.filename)
+ content = x.parent().data # reads data from datastore... slow
+ zf.writestr(str(x.filename), content)
+
+ zf.close()
+ res = zipstream.getvalue()
+ outfile = str('onlinefiles-%s-%s.zip' % (whichTag, cleanDate(datetime.datetime.now())))
+ return RetType.HEADERSANDRAW, {'Content-Type':'application/zip', 'Content-Disposition': 'attachment; filename="'+outfile+'"' }, res
+
+
+@ApiReq()
+@RequireAdmin
+def admin_RestoreFromBackupZip(req, user):
+ import zipfile
+ file = os.path.join(os.path.dirname(__file__), 'zips/orig.zip')
+ zf = zipfile.ZipFile(file, 'r')
+
+ allFiles = getAllFromFromQuery(datamodel.DB_FileVersion.all().order('version'))
+ byName = {} # most recent version.
+ for x in allFiles: byName[x.filename] = x
+
+ extractStats = {}
+ filesInZip = zf.namelist()
+
+ forceFiles = req.get('forceFiles')
+ if forceFiles == '__all__':
+ forceFiles = set(filesInZip)
+ elif forceFiles:
+ forceFiles = set(forceFiles.split(','))
+ else:
+ forceFiles = set()
+
+ for fiz in filesInZip:
+ extractStats[fiz] = ''
+ data = zf.read(fiz)
+ new_hash = getGitBlobHash(data)
+ if fiz in byName:
+ curVer = byName[fiz]
+ oldHash = curVer.key().parent().name()
+
+ if oldHash == new_hash:
+ extractStats[fiz] += 'Same version already the head version in the system. '
+ continue
+ else:
+ extractStats[fiz] += 'Different version in zip file and system head. '
+ if fiz not in forceFiles: continue
+
+ theDataKey = datamodel.DB_FileContent.all(keys_only=True).filter('__key__ =', db.Key.from_path(u'DB_FileContent', new_hash)).get()
+ if not theDataKey:
+ theDataKey = datamodel.DB_FileContent(key_name = new_hash, data = db.Blob(data)).put()
+ extractStats[fiz] += 'Added to cache. '
+ else:
+ extractStats[fiz] += 'Already in cache. '
+
+ nextVersionNum = datamodel.DB_JV_AtomicCounter.GetNextCounter(_fileVerKey)
+ tags = ['z', 'a'] # make it head
+ newFileVer = datamodel.DB_FileVersion(parent=theDataKey, key_name=str(nextVersionNum),
+ filename=fiz,version=nextVersionNum,uid=user.key().id(),tags=tags)
+ newFileVer.put()
+ extractStats[fiz] += 'Added version %s with hash: %s ' % (nextVersionNum, new_hash)
+ #forceCacheRebuild(fiz, 'z')
+
+ zf.close()
+ return RetType.JSONSUCCESS, {'stats':extractStats}
+
+@ApiReq(allowGet=True)
+def admin_setCurrentSessionVer(req):
+ # if you only want to allow people who know the password to use other versions, set require_password
+ require_password = None # maybe put this in the DB somewhere?
+ if require_password and req.get('password') != require_password: return RetType.JSONFAIL
+
+ session = get_current_session()
+ if req.get('getver'):
+ if 'jvdevver' in session:
+ return RetType.JSONSUCCESS, {'version':session['jvdevver']}
+ return RetType.JSONSUCCESS, {'version':None}
+
+ vv = req.get('ver')
+ if not vv:
+ if 'jvdevver' in session:
+ del session['jvdevver']
+ return RetType.JSONSUCCESS, {'version':None}
+ session['jvdevver'] = vv
+ return RetType.JSONSUCCESS, {'version':vv}
+
+
122 melya/filegen.py
@@ -0,0 +1,122 @@
+import google.appengine.ext.db as db
+from utils import getGitBlobHash
+from google.appengine.api import memcache
+import datamodel, logging, re
+from inmemconfig import InAppMemConfig
+_memcache = memcache.Client()
+
+jvDollarEscapeRe = re.compile(r'(\$\$jv:([^\$]+)\$\$)')
+
+def getJsFileString(fn, tag):
+ # TODO: cache?
+ latestVersion = datamodel.DB_FileVersion.getMostRecent(fn, tag, keys_only=True)
+ if not latestVersion:
+ logging.error('File not found: %s %s' % (fn, tag))
+ return ''
+ theHash = latestVersion.parent().name()
+ theVersion = latestVersion.name()
+ theHashCacheKey = 'hashFileCache%s' % theHash
+ theDataEnt = _memcache.get(theHashCacheKey)
+ if not theDataEnt:
+ prnt = latestVersion.parent()
+ theDataEnt = db.get(prnt)
+ if theDataEnt:
+ theDataEnt = theDataEnt.data
+ _memcache.set(theHashCacheKey, theDataEnt)
+
+ if not theDataEnt:
+ logging.info('File not found2: %s %s' % (fn, tag))
+ return ''
+
+ def func(matchobj):
+ tt = matchobj.group(2)
+ if tt == 'fn': return str(fn)
+ elif tt == 'ver': return str(theVersion)
+ elif tt == 'maxver': return str(InAppMemConfig.Current().fileVersion)
+ elif tt == 'tag': return tag
+ elif tt.startswith('inc:'):
+ xx = tt[4:]
+ if xx.startswith('curver:'): # something like $$jv:curver:a:js/myfile.js$$
+ xx = xx.split(':')
+ return str(getCurCacheVer(xx[2], xx[1]))
+ return getJsFileString(xx, tag)
+ else:
+ return '$$UNKNOWN JV ESCAPE$$'
+ restext = jvDollarEscapeRe.sub(func, theDataEnt)
+ return restext
+
+def forceCacheRebuild(filename, tag):
+ """
+ This should be called on saving of files (on tag 'z')
+ """
+ return getCurCacheVer(filename, tag, forceRebuild = True)
+
+
+def getCurCacheVer(filename, tag, forceRebuild = False):
+ """
+ tag is from the DB_FileVersion.tags field. right now it's 'z' or 'a'
+ """
+ fv = InAppMemConfig.Current().fileVersion
+ buildkey = '%s:%s:' % (filename,tag) # the front... no version...
+ fvmckey = 'fcv:%s:%s' % (fv, buildkey) # file cache version
+ res = _memcache.get(fvmckey) # res is a curcachever
+ if not forceRebuild and res: return res # fast track.
+
+ fb = datamodel.DB_FileBuild.get_by_key_name(buildkey)
+ if not forceRebuild and fb:
+ if fb.fileVerCheckTime == fv: # simply evicted from memcache
+ _memcache.set(fvmckey, fb.cacheVerNum)
+ return fb.cacheVerNum
+
+ data = getJsFileString(filename, tag)
+ hashVal = getGitBlobHash(data)
+ if fb.hashVal == hashVal:
+ # nothing changed... update fileVerCheckTime to current version
+ fb.fileVerCheckTime = fv
+ _memcache.set(fvmckey, fb.cacheVerNum)
+ db.put_async(fb) # don't care when we do it since it is idempotent
+ return fb.cacheVerNum
+
+ # it changed... fall through to update it.
+ else:
+ data = getJsFileString(filename, tag)
+ hashVal = getGitBlobHash(data)
+
+ theKey = '%s:%s:%s' % (filename, tag, fv)
+ memck = 'datagen' + theKey
+
+ # create a new build with the current specific version
+ fb = datamodel.DB_FileBuild(key_name = theKey, hashVal=hashVal, cacheVerNum=fv, fileVerCheckTime=fv, data=data)
+ # update the non-version one to point to the most current one.
+ fb2 = datamodel.DB_FileBuild(key_name = buildkey, hashVal=hashVal, cacheVerNum=fv, fileVerCheckTime=fv)
+ db.put([fb,fb2])
+
+ _memcache.set(fvmckey, fv) # fast track for this function...
+ _memcache.set(memck, data) # fast track for getCurCacheBlob
+ return fv
+
+def getCurCacheBlob(filename, tag, cacheVerNum, tryLowerNum=True):
+ theKey = '%s:%s:%s' % (filename, tag, cacheVerNum)
+ memck = 'datagen' + theKey
+ res = _memcache.get(memck)
+ if res: return res
+ fb = datamodel.DB_FileBuild.get_by_key_name(theKey)
+ if not fb:
+ if tryLowerNum:
+ cacheVerNum = getCurCacheVer(filename, tag)
+ return getCurCacheBlob(filename, tag, cacheVerNum, False)
+ logging.error('Version not found %s %s %s' % (filename, tag, cacheVerNum))
+ return '' # TODO: something smarter? exception?
+ _memcache.set(memck, fb.data)
+ return fb.data
+
+def getRawFileData(filename, tag):
+ # TODO: add caching
+ latestVersion = datamodel.DB_FileVersion.getMostRecent(filename, tag, keys_only=True)
+ prnt = latestVersion.parent()
+ theDataEnt = db.get(prnt)
+ if theDataEnt: return theDataEnt.data
+ return '' # TODO: exception?
+
+
+
273 melya/handlerhelpers.py
@@ -0,0 +1,273 @@
+import email.Utils, mimetypes, time, re, inspect
+import webapp2,json,logging,os, zipfile
+from google.appengine.api import users,memcache
+import google.appengine.ext.db as db
+import datamodel
+from inmemconfig import InAppMemConfig
+from jintemps import renderWithJinja
+from filegen import getCurCacheBlob, getCurCacheVer, getRawFileData
+from gaesessions import get_current_session
+from functools import wraps
+
+jvDollarEscapeRe = re.compile(r'(\$\$jv:([^\$]+)\$\$)')
+_memcache = memcache.Client()
+_webFuncs = {}
+_webFuncsGet = {}
+
+
+class RetType:
+ """
+ This was going to be a simple enum, but decided I could simply make it the funcions...
+ """
+ @classmethod
+ def JSONSUCCESS(cls, self, json_obj = None):
+ if json_obj:
+ json_obj['success'] = True
+ return self.response.out.write(json.dumps(json_obj, check_circular=False, separators=(',',':')))
+ else:
+ return self.response.out.write('{"success":true}')
+
+ @classmethod
+ def JSONFAIL(cls, self, json_obj = None):
+ if json_obj:
+ json_obj['success'] = False
+ return self.response.out.write(json.dumps(json_obj, check_circular=False, separators=(',',':')))
+ else:
+ return self.response.out.write('{"success":false}')
+
+ @classmethod
+ def REDIRECT(cls, self, redir = None):
+ if not redir:
+ return self.redirect('/')
+ else:
+ return self.redirect(redir)
+
+ @classmethod
+ def RAW(cls, self, data = None):
+ return self.response.out.write(data)
+
+ @classmethod
+ def NOTFOUND(cls, self):
+ return self.response.set_status(404)
+
+ @classmethod
+ def HEADERSANDRAW(cls, self, headers=None, data=None, ):
+ if headers:
+ for x,y in headers.items():
+ self.response.headers[x] = y
+ self.response.out.write(data)
+
+def getUserAndIsAdmin(req):
+ """
+ return (User, isAdmin bool)
+ """
+ guser = users.get_current_user()
+ if not guser: return None, False
+ usergkey = 'g'+str(guser.user_id())
+ res = datamodel.DB_UserLoginAssoc.get_by_key_name(usergkey)
+ if not res: return None, False
+ muser = datamodel.DB_User.get_by_id(res.uid)
+ return muser, users.is_current_user_admin()
+
+def GenerateRequireAdminLoginDelegate(failReturnValue):
+ def RequireAdminFunc(func):
+ expectsUser = 'user' in inspect.getargspec(func)[0]
+ @wraps(func)
+ def wrapped(req):
+ user, isAdmin = getUserAndIsAdmin(req)
+ if not isAdmin: return failReturnValue
+ if expectsUser:
+ return func(req, user=user)
+ return func(req)
+ return wrapped
+ return RequireAdminFunc
+
+RequireAdmin = GenerateRequireAdminLoginDelegate((RetType.JSONFAIL, {'needlogin':True}))
+RequireAdminRaw = GenerateRequireAdminLoginDelegate((RetType.RAW, 'Requires admin login'))
+
+# the decorator
+def ApiReq(urlName = None, allowGet=False):
+ if urlName and not isinstance(urlName, str):
+ raise Exception('Decorator used without parentheses. Please add parentheses, i.e. "@JsonReq()"')
+ def wwrap(func):
+ uu = urlName
+ if not uu:
+ uu = func.__name__.replace('_', '/')
+ if uu in _webFuncs:
+ raise Exception('Duplicate Request Type', uu)
+ _webFuncs[uu] = func
+ if allowGet: _webFuncsGet[uu] = func
+ return func
+ return wwrap
+
+
+# these are the built in APIs
+import melya.fileapi
+import melya.adminapi
+
+
+def SetCachingHeadersForResponse(response, max_age = 600):
+ response.headers['Expires'] = email.Utils.formatdate(time.time() + max_age, usegmt=True)
+ response.headers['Cache-Control'] = 'public, max-age=%d' % max_age
+
+
+
+class ZipHandler(webapp2.RequestHandler):
+ """Request handler serving static files from zipfiles. - copied from zipserve"""
+ zipfile_cache = {}
+ def ServeFromZipFile(self, zipfilename, name):
+ """Helper for the GET request handler.
+
+ This serves the contents of file 'name' from zipfile
+ 'zipfilename', logging a message and returning a 404 response if
+ either the zipfile cannot be opened or the named file cannot be
+ read from it.
+
+ Args:
+ zipfilename: The name of the zipfile.
+ name: The name within the zipfile.
+ """
+
+ zipfile_object = self.zipfile_cache.get(zipfilename)
+ if zipfile_object is None:
+ try:
+ zipfile_object = zipfile.ZipFile(zipfilename)
+ except (IOError, RuntimeError, zipfile.BadZipfile), err:
+
+
+ logging.error('Can\'t open zipfile %s: %s', zipfilename, err)
+ zipfile_object = ''
+ self.zipfile_cache[zipfilename] = zipfile_object
+ if zipfile_object == '':
+ self.error(404)
+ self.response.out.write('Not found')
+ return
+ try:
+ data = zipfile_object.read(name)
+ except (KeyError, RuntimeError), err:
+ self.error(404)
+ self.response.out.write('Not found')
+ return
+ content_type, encoding = mimetypes.guess_type(name)
+ if content_type:
+ self.response.headers['Content-Type'] = content_type
+ self.SetCachingHeaders()
+ self.response.out.write(data)
+
+ MAX_AGE = 600
+ PUBLIC = True
+
+ def SetCachingHeaders(self):
+ SetCachingHeadersForResponse(self.response, self.MAX_AGE)
+
+
+class GeneralApiHandler(ZipHandler): #webapp2.RequestHandler):
+ def post(self, command):
+ return self.get(command, isPost=True)
+
+ def get(self, command, isPost=False):
+ InAppMemConfig.UpdateIfNeeded()
+ if command.startswith('z/'): return self.zipsrv(command)
+ if command.startswith('d/'): return self.dynsrv(command)
+
+ if isPost and command in _webFuncs:
+ cmd = _webFuncs[command]
+ elif command in _webFuncsGet:
+ cmd = _webFuncsGet[command]
+ else:
+ cmd = None
+
+ if cmd:
+ res = cmd(self.request)
+ if not res:
+ logging.error('Must return one of the functions from RetType')
+ return self.response.set_status(500)
+ if not isinstance(res, tuple):
+ return res(self)
+ return res[0](self, *res[1:])
+
+
+ return self.response.set_status(404)
+
+ def dynsrv(self, command):
+ spl = command.split('/', 2) # ['d', 'a1234', 'js/myfile.js']
+ if len(spl) != 3 or len(spl[1]) == 0 or len(spl[2]) == 0: return self.response.set_status(404)
+ tag = spl[1][0]
+ cacheVerNum = spl[1][1:]
+ fileName = spl[2]
+
+ if not cacheVerNum: cacheVerNum = getCurCacheVer(fileName, tag)
+ res = getCurCacheBlob(fileName, tag, cacheVerNum)
+
+ content_type, encoding = mimetypes.guess_type(fileName)
+ if content_type:
+ self.response.headers['Content-Type'] = content_type
+
+ #if fileName.endswith('.js'):
+ # self.response.headers['Content-Type'] = 'text/javascript'
+ #elif fileName.endswith('.css'):
+ # self.response.headers['Content-Type'] = 'text/css'
+
+ if tag == 'a': self.SetCachingHeaders()
+ return self.response.out.write(res)
+
+ def zipsrv(self, command):
+ _, prefix, name = command.split('/', 2) # ['z', 'a1234', 'js/myfile.js']
+ file = os.path.join(os.path.dirname(__file__), 'zips/'+ prefix + '.zip')
+ self.ServeFromZipFile(file , name)
+
+
+
+
+class GeneralPageHandler(webapp2.RequestHandler):
+ def get(self):
+ InAppMemConfig.UpdateIfNeeded()
+ session = get_current_session()
+
+ curHost = self.request.host
+ curDomain = self.resolveDomain(curHost)
+ if not curDomain:
+ # TODO: remove
+ self.response.out.write('Domain not found: %s' % curHost)
+ return
+
+
+ curDomainName = curDomain.name
+ curPath = self.request.path
+ curPage = None
+ for p in InAppMemConfig.Current().pages:
+ if (not p.domainName or p.domainName == curDomainName) and p._compiledRegex.match(curPath):
+ curPage = p
+ break
+
+ if not curPage:
+ # TODO: remove
+ self.response.out.write('Page not found: %s' % curPath)
+ return
+
+ isHead = not session or session.get('jvdevver') != '-1'
+
+ curTag = 'a' if isHead else 'z'
+
+ if 'jinja' in curPage._parsedFlagList:
+ res = renderWithJinja(curPage.fileName, isHead=isHead, curPage=curPage, curDomain=curDomain, config=InAppMemConfig.Current(), curTag=curTag)
+ elif 'melya' in curPage._parsedFlagList: # Melya's js parsing...
+ res = getCurCacheBlob(curPage.fileName, curTag, InAppMemConfig.Current().fileVersion)
+ else:
+ res = getRawFileData(curPage.fileName, curTag)
+
+ if isHead and 'cache' in curPage._parsedFlagList:
+ SetCachingHeadersForResponse(self.response)
+
+ content_type, encoding = mimetypes.guess_type(curPage.fileName)
+ if content_type:
+ self.response.headers['Content-Type'] = content_type
+
+
+ return self.response.out.write(res)
+
+ @classmethod
+ def resolveDomain(cls, curHost):
+ for d in InAppMemConfig.Current().domains:
+ if d._compiledRegex.match(curHost): return d
+ return None
89 melya/inmemconfig.py
@@ -0,0 +1,89 @@
+# in memory config
+import datetime, logging, datamodel, re
+from google.appengine.api import memcache
+
+_memcache = memcache.Client() # THREAD SAFE?
+
+
+# so we don't hit the same memcache location on every req.
+_checkMemcacheFreq = datetime.timedelta(seconds=1) # how often to check memcache for change.
+
+
+class InAppMemConfig(object):
+ _nextCheckTime = datetime.datetime.min
+ _processCfgVer = -1 # invalid version
+ _memcachekey = 'processCfgVer'
+ _currentConfig = None
+ _lastDomainUpdate = datetime.datetime.min
+ _domainsCache = []
+ _lastPageUpdate = datetime.datetime.min
+ _pagesCache = []
+
+ def __init__(self):
+ self.configInitTime = datetime.datetime.now()
+ self.configVersion = InAppMemConfig._processCfgVer
+ self.fileVersion = datamodel.DB_JV_AtomicCounter.GetMostRecentlyReturnedCounterValue('fver') # read from cache?
+ #logging.info('init inappmemconfig: %s %s %s' % (self.configInitTime, self.configVersion, self.fileVersion))
+
+ self.domains = self.UpdateDomains()
+ self.pages = self.UpdatePages()
+
+ @classmethod
+ def UpdateDomains(cls):
+ res = datamodel.DB_Domains.all().order('-dateUpdated').get()
+ if not res or res.dateUpdated == cls._lastDomainUpdate: return cls._domainsCache
+ domains = datamodel.DB_Domains.all().order('order').fetch(1000)
+ for x in domains: x._compiledRegex = re.compile(x.regex) # compile them for faster access
+ cls._domainsCache = domains # assign here to be threadsafe - kind of
+ cls._lastDomainUpdate = res.dateUpdated
+ return cls._domainsCache
+
+ @classmethod
+ def UpdatePages(cls):
+ res = datamodel.DB_Pages.all().order('-dateUpdated').get()
+ if not res or res.dateUpdated == cls._lastPageUpdate: return cls._pagesCache
+ pages = datamodel.DB_Pages.all().order('order').fetch(1000)
+ for x in pages:
+ x._compiledRegex = re.compile(x.regex) # compile them for faster access
+ x._parsedFlagList = frozenset(x.flags.split(',')) if x.flags else frozenset()
+ cls._pagesCache = pages # assign here to be threadsafe - kind of
+ cls._lastPageUpdate = res.dateUpdated
+ return cls._pagesCache
+
+
+ @classmethod
+ def Current(cls):
+ if cls._currentConfig:
+ return cls._currentConfig
+ return cls.UpdateIfNeeded()
+
+ @classmethod
+ def UpdateIfNeeded(cls):
+ # we check to see if enough time passed so if we have high load, we're not hitting the same memcache
+ # entry on every page.
+ curTime = datetime.datetime.now()
+ if curTime <= cls._nextCheckTime: return cls._currentConfig
+ cls._nextCheckTime = curTime + _checkMemcacheFreq
+ pcd = _memcache.get(cls._memcachekey)
+ if pcd and pcd == cls._processCfgVer:
+ # if it didn't change in memcache, just return it.
+ return cls._currentConfig
+ pcd = datamodel.DB_JV_AtomicCounter.GetMostRecentlyReturnedCounterValue('processCfgVer')
+ if pcd != cls._processCfgVer:
+ cls._processCfgVer = pcd
+ # create a new one.
+ cls._currentConfig = InAppMemConfig()
+
+ _memcache.set(cls._memcachekey, pcd)
+
+ return cls._currentConfig
+
+ @classmethod
+ def ForceVersionIncrement(cls):
+ pcd = datamodel.DB_JV_AtomicCounter.GetNextCounter('processCfgVer')
+ _memcache.set(cls._memcachekey, pcd)
+ return pcd
+
+
+
+
41 melya/jintemps.py
@@ -0,0 +1,41 @@
+import webapp2
+from jinja2 import Environment, Template, BaseLoader, TemplateNotFound
+import google.appengine.ext.db as db
+from google.appengine.api import memcache
+import datamodel
+from datetime import datetime
+from inmemconfig import InAppMemConfig
+from filegen import getCurCacheVer # for Jinja Globals Environment
+
+_memcache = memcache.Client()
+
+class JVJinjaLoader(BaseLoader):
+ def __init__(self, isHead):
+ self.isHead = isHead
+
+ def get_source(self, environment, template):
+ fn = template
+ latestVersion = datamodel.DB_FileVersion.getMostRecent(fn, 'a' if self.isHead else 'z', keys_only=True)
+ if not latestVersion: raise TemplateNotFound(template)
+ prnt = latestVersion.parent()
+ #theVersion = latestVersion.name()
+ theDataEnt = db.get(prnt)
+ if not theDataEnt: raise TemplateNotFound(template)
+ curTime = InAppMemConfig.Current().configInitTime
+ data = unicode(theDataEnt.data, 'utf8') # for now, consider all string to be utf8
+ return data, fn, lambda: InAppMemConfig.Current().configInitTime == curTime # False means reload -- TODO: for now...
+
+def createJinjaEnv(isHead):
+ res = Environment(loader=JVJinjaLoader(isHead), autoescape=True, auto_reload=True)
+ res.globals['getCurCacheVer'] = getCurCacheVer
+ return res
+
+jinjaEnvHead = createJinjaEnv(True)
+jinjaEnv = createJinjaEnv(False)
+
+def renderWithJinja(template, isHead=True, **kwds):
+ if isHead:
+ template = jinjaEnvHead.get_template(template)
+ else:
+ template = jinjaEnv.get_template(template)
+ return template.render(**kwds)
66 melya/utils.py
@@ -0,0 +1,66 @@
+import hashlib
+import google.appengine.ext.db as db
+
+def getGitBlobHash(blob): # the same hash as Git uses...
+ return hashlib.sha1('blob %s\0%s' %(len(blob), blob)).hexdigest()
+
+# database object pretty print
+def DB_PrettyPrint(t, prefix='', already_printed = None):
+ if already_printed is None:
+ already_printed = set()
+ if type(t) == type([]):
+ return "\n".join([DB_PrettyPrint(x, prefix + ' ', already_printed) for x in t])
+ res = prefix + (str(t.key().name() or t.key().id()) + "\n")
+ tl = list(t.properties())
+ if isinstance(t, db.Expando):
+ tl.extend(t.dynamic_properties())
+ for x in tl:
+ a = getattr(t,x)
+ if isinstance(a, db.Model):
+ kk = str(a.key())
+ if kk in already_printed:
+ res += prefix + " %s: %s\n" % (x, kk)
+ else:
+ already_printed.add(kk)
+ res += prefix + " %s: %s\n" % (x, kk)
+ res += DB_PrettyPrint(a, prefix + ' ', already_printed)
+ else:
+ res += prefix + " %s: %s\n" % (x, a)
+ return res
+
+def DB_SimpleToDict(t, ignore = frozenset()):
+ res = {}
+ tl = list(t.properties())
+ if isinstance(t, db.Expando):
+ tl.extend(t.dynamic_properties())
+ for x in tl:
+ if x in ignore: continue
+ a = getattr(t,x)
+ res[x] = a
+ return res
+
+def getAllFromFromQuery(theQuery):
+ res = []
+ while 1:
+ x = theQuery.fetch(1000)
+ res.extend(x)
+ if len(x) < 1000: break;
+ theQuery.with_cursor(theQuery.cursor())
+ return res
+
+def chunks(l, n):
+ """ Yield successive n-sized chunks from l.
+ """
+ for i in xrange(0, len(l), n):
+ yield l[i:i+n]
+
+def getStrSortableHexNum(a):
+ """
+ Returns a hex representation (string) of the integer a that can be used in an
+ ascii sort... for example... in a key or string property in the datastore, so
+ it can be sorted.
+ """
+ if a < 0: raise Exception('Does not support negative numbers')
+ c = 0; t = a
+ while t > 0xf: c += 1; t >>= 4
+ return ('~'*c)+"{0:x}".format(a)
BIN melya/zips/ace20.zip
Binary file not shown.
27 melya/zips/defaultdomainandpages.json
@@ -0,0 +1,27 @@
+{
+ "domains":[
+ {
+ "regex":".*",
+ "order":100000.0,
+ "name":"catchall",
+ "defaultTitle":"Powered by the Melya framework"
+ }
+ ],
+ "pages":[
+ {
+ "regex":".*",
+ "fileName":"tmpl/catchall.html",
+ "flags":"jinja",
+ "order":100000.0,
+ "domainName":""
+ },
+ {
+ "regex":"/favicon.ico",
+ "fileName":"favicon.ico",
+ "flags":"",
+ "order":99999.0,
+ "domainName":""
+ }
+ ],
+ "success":true
+}
BIN melya/zips/orig.zip
Binary file not shown.

0 comments on commit d948c15

Please sign in to comment.
Something went wrong with that request. Please try again.