Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

- bookmarklet and js improvements

  • Loading branch information...
commit 8aca9e3c25b3c027522a7b0b1b295250fc352be7 1 parent e4b1a93
@zemanel authored
Showing with 34,720 additions and 219 deletions.
  1. +20 −1 app/app.yaml
  2. +4 −2 app/config.py
  3. +2 −2 app/lib/dist/dateutil/__init__.py
  4. +3 −3 app/lib/dist/dateutil/easter.py
  5. +20 −21 app/lib/dist/dateutil/parser.py
  6. +13 −12 app/lib/dist/dateutil/relativedelta.py
  7. +48 −54 app/lib/dist/dateutil/rrule.py
  8. +53 −42 app/lib/dist/dateutil/tz.py
  9. +15 −15 app/lib/dist/dateutil/tzwin.py
  10. +2 −1  app/lib/dist/dateutil/zoneinfo/__init__.py
  11. BIN  app/lib/dist/dateutil/zoneinfo/zoneinfo-2010g.tar.gz
  12. +1 −7 app/local_settings.py
  13. +1 −3 app/main.py
  14. +46 −21 app/mood/handlers.py
  15. +44 −7 app/mood/jobs.py
  16. +1 −0  app/mood/models.py
  17. +1 −1  app/mood/tasks.py
  18. +26 −26 app/templates/bookmarklet.js
  19. +1 −1  buildout.cfg
  20. BIN  var/downloads/dist/python-dateutil-1.5.tar.gz
  21. BIN  var/downloads/google_appengine_1.4.2.zip
  22. +468 −0 var/parts/app_lib/site.py
  23. 0  var/parts/app_lib/sitecustomize.py
  24. +460 −0 var/parts/buildout/site.py
  25. 0  var/parts/buildout/sitecustomize.py
  26. +3 −0  var/parts/google_appengine/BUGS
  27. +132 −0 var/parts/google_appengine/LICENSE
  28. +123 −0 var/parts/google_appengine/README
  29. +778 −0 var/parts/google_appengine/RELEASE_NOTES
  30. +3 −0  var/parts/google_appengine/VERSION
  31. +71 −0 var/parts/google_appengine/appcfg.py
  32. +71 −0 var/parts/google_appengine/bulkload_client.py
  33. +71 −0 var/parts/google_appengine/bulkloader.py
  34. +8 −0 var/parts/google_appengine/demos/guestbook/app.yaml
  35. +79 −0 var/parts/google_appengine/demos/guestbook/guestbook.py
  36. +71 −0 var/parts/google_appengine/dev_appserver.py
  37. +16 −0 var/parts/google_appengine/google/__init__.py
  38. +16 −0 var/parts/google_appengine/google/appengine/__init__.py
  39. +16 −0 var/parts/google_appengine/google/appengine/api/__init__.py
  40. +659 −0 var/parts/google_appengine/google/appengine/api/api_base_pb.py
  41. +165 −0 var/parts/google_appengine/google/appengine/api/apiproxy_rpc.py
  42. +80 −0 var/parts/google_appengine/google/appengine/api/apiproxy_stub.py
  43. +635 −0 var/parts/google_appengine/google/appengine/api/apiproxy_stub_map.py
  44. +99 −0 var/parts/google_appengine/google/appengine/api/app_logging.py
  45. +796 −0 var/parts/google_appengine/google/appengine/api/appinfo.py
  46. +58 −0 var/parts/google_appengine/google/appengine/api/appinfo_errors.py
  47. +232 −0 var/parts/google_appengine/google/appengine/api/appinfo_includes.py
  48. +20 −0 var/parts/google_appengine/google/appengine/api/blobstore/__init__.py
  49. +278 −0 var/parts/google_appengine/google/appengine/api/blobstore/blobstore.py
  50. +873 −0 var/parts/google_appengine/google/appengine/api/blobstore/blobstore_service_pb.py
  51. +289 −0 var/parts/google_appengine/google/appengine/api/blobstore/blobstore_stub.py
  52. +152 −0 var/parts/google_appengine/google/appengine/api/blobstore/file_blob_storage.py
  53. +172 −0 var/parts/google_appengine/google/appengine/api/capabilities/__init__.py
  54. +411 −0 var/parts/google_appengine/google/appengine/api/capabilities/capability_service_pb.py
  55. +53 −0 var/parts/google_appengine/google/appengine/api/capabilities/capability_stub.py
  56. +20 −0 var/parts/google_appengine/google/appengine/api/channel/__init__.py
  57. +177 −0 var/parts/google_appengine/google/appengine/api/channel/channel.py
  58. +427 −0 var/parts/google_appengine/google/appengine/api/channel/channel_service_pb.py
  59. +173 −0 var/parts/google_appengine/google/appengine/api/channel/channel_service_stub.py
  60. +132 −0 var/parts/google_appengine/google/appengine/api/croninfo.py
  61. +2,233 −0 var/parts/google_appengine/google/appengine/api/datastore.py
  62. +133 −0 var/parts/google_appengine/google/appengine/api/datastore_admin.py
  63. +343 −0 var/parts/google_appengine/google/appengine/api/datastore_entities.py
  64. +113 −0 var/parts/google_appengine/google/appengine/api/datastore_errors.py
  65. +1,282 −0 var/parts/google_appengine/google/appengine/api/datastore_file_stub.py
  66. +1,751 −0 var/parts/google_appengine/google/appengine/api/datastore_types.py
  67. +112 −0 var/parts/google_appengine/google/appengine/api/dosinfo.py
  68. +16 −0 var/parts/google_appengine/google/appengine/api/files/__init__.py
  69. +4,024 −0 var/parts/google_appengine/google/appengine/api/files/file_service_pb.py
  70. +384 −0 var/parts/google_appengine/google/appengine/api/files/file_service_stub.py
  71. +988 −0 var/parts/google_appengine/google/appengine/api/images/__init__.py
  72. +36 −0 var/parts/google_appengine/google/appengine/api/images/images_not_implemented_stub.py
  73. +2,520 −0 var/parts/google_appengine/google/appengine/api/images/images_service_pb.py
  74. +506 −0 var/parts/google_appengine/google/appengine/api/images/images_stub.py
  75. +16 −0 var/parts/google_appengine/google/appengine/api/labs/__init__.py
  76. +72 −0 var/parts/google_appengine/google/appengine/api/labs/taskqueue/__init__.py
  77. +342 −0 var/parts/google_appengine/google/appengine/api/lib_config.py
  78. +20 −0 var/parts/google_appengine/google/appengine/api/logservice/__init__.py
  79. +475 −0 var/parts/google_appengine/google/appengine/api/logservice/log_service_pb.py
  80. +44 −0 var/parts/google_appengine/google/appengine/api/logservice/logservice.py
  81. +1,182 −0 var/parts/google_appengine/google/appengine/api/mail.py
  82. +55 −0 var/parts/google_appengine/google/appengine/api/mail_errors.py
  83. +661 −0 var/parts/google_appengine/google/appengine/api/mail_service_pb.py
  84. +237 −0 var/parts/google_appengine/google/appengine/api/mail_stub.py
  85. +1,045 −0 var/parts/google_appengine/google/appengine/api/memcache/__init__.py
  86. +3,134 −0 var/parts/google_appengine/google/appengine/api/memcache/memcache_service_pb.py
  87. +331 −0 var/parts/google_appengine/google/appengine/api/memcache/memcache_stub.py
  88. +21 −0 var/parts/google_appengine/google/appengine/api/namespace_manager/__init__.py
  89. +122 −0 var/parts/google_appengine/google/appengine/api/namespace_manager/namespace_manager.py
  90. +32 −0 var/parts/google_appengine/google/appengine/api/oauth/__init__.py
  91. +190 −0 var/parts/google_appengine/google/appengine/api/oauth/oauth_api.py
  92. +20 −0 var/parts/google_appengine/google/appengine/api/prospective_search/__init__.py
  93. +99 −0 var/parts/google_appengine/google/appengine/api/prospective_search/error_pb.py
  94. +540 −0 var/parts/google_appengine/google/appengine/api/prospective_search/prospective_search.py
  95. +61 −0 var/parts/google_appengine/google/appengine/api/prospective_search/prospective_search_admin.py
  96. +1,962 −0 var/parts/google_appengine/google/appengine/api/prospective_search/prospective_search_pb.py
  97. +560 −0 var/parts/google_appengine/google/appengine/api/prospective_search/prospective_search_stub.py
  98. +303 −0 var/parts/google_appengine/google/appengine/api/queueinfo.py
  99. +71 −0 var/parts/google_appengine/google/appengine/api/quota.py
  100. +62 −0 var/parts/google_appengine/google/appengine/api/runtime.py
  101. +34 −0 var/parts/google_appengine/google/appengine/api/taskqueue/__init__.py
Sorry, we could not display the entire diff because too many files (4,099) changed.
View
21 app/app.yaml
@@ -24,7 +24,13 @@ error_handlers:
file: templates/error_handlers/timeout.html
handlers:
-
+
+- url: /admin/.*
+ script: $PYTHON_LIB/google/appengine/ext/admin
+ login: admin
+
+
+
- url: /jobs/.*
script: main.py
login: admin
@@ -51,3 +57,16 @@ handlers:
- url: /.*
script: main.py
+
+admin_console:
+ pages:
+ - name: Console
+ url: /admin/interactive
+ - name: Memcache
+ url: /admin/memcache
+ - name: XMPP
+ url: /admin/xmpp
+ - name: Inbound Mail
+ url: /admin/inboundmail
+
+
View
6 app/config.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
"""App configuration."""
import os
+DEVEL = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# fill in local_settings.py
ALCHEMY_API_KEY = None
@@ -14,6 +15,7 @@
'tipfy' : {}
}
-config['mood.alchemyapi'] = {
- 'API_KEY': ALCHEMY_API_KEY,
+config['mood'] = {
+ 'CACHE_ENABLED': not DEVEL,
+ 'ALCHEMYAPI_KEY': ALCHEMY_API_KEY,
}
View
4 app/lib/dist/dateutil/__init__.py
@@ -5,5 +5,5 @@
datetime module.
"""
__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>"
-__license__ = "Simplified BSD"
-__version__ = "2.0"
+__license__ = "PSF License"
+__version__ = "1.5"
View
6 app/lib/dist/dateutil/easter.py
@@ -5,7 +5,7 @@
datetime module.
"""
__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>"
-__license__ = "Simplified BSD"
+__license__ = "PSF License"
import datetime
@@ -52,7 +52,7 @@ def easter(year, method=EASTER_WESTERN):
"""
if not (1 <= method <= 3):
- raise ValueError("invalid method")
+ raise ValueError, "invalid method"
# g - Golden year - 1
# c - Century
@@ -88,5 +88,5 @@ def easter(year, method=EASTER_WESTERN):
p = i-j+e
d = 1+(p+27+(p+6)//40)%31
m = 3+(p+26)//30
- return datetime.date(int(y), int(m), int(d))
+ return datetime.date(int(y),int(m),int(d))
View
41 app/lib/dist/dateutil/parser.py
@@ -6,22 +6,21 @@
datetime module.
"""
__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>"
-__license__ = "Simplified BSD"
+__license__ = "PSF License"
import datetime
import string
import time
import sys
import os
-import collections
try:
- from io import StringIO
+ from cStringIO import StringIO
except ImportError:
- from io import StringIO
+ from StringIO import StringIO
-from . import relativedelta
-from . import tz
+import relativedelta
+import tz
__all__ = ["parse", "parserinfo"]
@@ -40,7 +39,7 @@
class _timelex(object):
def __init__(self, instream):
- if isinstance(instream, str):
+ if isinstance(instream, basestring):
instream = StringIO(instream)
self.instream = instream
self.wordchars = ('abcdfeghijklmnopqrstuvwxyz'
@@ -134,7 +133,7 @@ def get_token(self):
def __iter__(self):
return self
- def __next__(self):
+ def next(self):
token = self.get_token()
if token is None:
raise StopIteration
@@ -156,7 +155,7 @@ def _repr(self, classname):
for attr in self.__slots__:
value = getattr(self, attr)
if value is not None:
- l.append("%s=%s" % (attr, repr(value)))
+ l.append("%s=%s" % (attr, `value`))
return "%s(%s)" % (classname, ", ".join(l))
def __repr__(self):
@@ -301,7 +300,7 @@ def parse(self, timestr, default=None,
second=0, microsecond=0)
res = self._parse(timestr, **kwargs)
if res is None:
- raise ValueError("unknown string format")
+ raise ValueError, "unknown string format"
repl = {}
for attr in ["year", "month", "day", "hour",
"minute", "second", "microsecond"]:
@@ -312,20 +311,20 @@ def parse(self, timestr, default=None,
if res.weekday is not None and not res.day:
ret = ret+relativedelta.relativedelta(weekday=res.weekday)
if not ignoretz:
- if isinstance(tzinfos, collections.Callable) or tzinfos and res.tzname in tzinfos:
- if isinstance(tzinfos, collections.Callable):
+ if callable(tzinfos) or tzinfos and res.tzname in tzinfos:
+ if callable(tzinfos):
tzdata = tzinfos(res.tzname, res.tzoffset)
else:
tzdata = tzinfos.get(res.tzname)
if isinstance(tzdata, datetime.tzinfo):
tzinfo = tzdata
- elif isinstance(tzdata, str):
+ elif isinstance(tzdata, basestring):
tzinfo = tz.tzstr(tzdata)
elif isinstance(tzdata, int):
tzinfo = tz.tzoffset(res.tzname, tzdata)
else:
- raise ValueError("offset must be tzinfo subclass, " \
- "tz string, or int offset")
+ raise ValueError, "offset must be tzinfo subclass, " \
+ "tz string, or int offset"
ret = ret.replace(tzinfo=tzinfo)
elif res.tzname and res.tzname in time.tzname:
ret = ret.replace(tzinfo=tz.tzlocal())
@@ -586,7 +585,7 @@ def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False):
# Check for a numbered timezone
if res.hour is not None and l[i] in ('+', '-'):
- signal = (-1, 1)[l[i] == '+']
+ signal = (-1,1)[l[i] == '+']
i += 1
len_li = len(l[i])
if len_li == 4:
@@ -744,7 +743,7 @@ def parse(self, tzstr):
if l[i] in ('+', '-'):
# Yes, that's right. See the TZ variable
# documentation.
- signal = (1, -1)[l[i] == '+']
+ signal = (1,-1)[l[i] == '+']
i += 1
else:
signal = -1
@@ -802,15 +801,15 @@ def parse(self, tzstr):
x.time = int(l[i])
i += 2
if i < len_l:
- if l[i] in ('-', '+'):
- signal = (-1, 1)[l[i] == "+"]
+ if l[i] in ('-','+'):
+ signal = (-1,1)[l[i] == "+"]
i += 1
else:
signal = 1
res.dstoffset = (res.stdoffset+int(l[i]))*signal
elif (l.count(',') == 2 and l[i:].count('/') <= 2 and
- not [y for x in l[i:] if x not in (',', '/', 'J', 'M',
- '.', '-', ':')
+ not [y for x in l[i:] if x not in (',','/','J','M',
+ '.','-',':')
for y in x if y not in "0123456789"]):
for x in (res.start, res.end):
if l[i] == 'J':
View
25 app/lib/dist/dateutil/relativedelta.py
@@ -5,7 +5,7 @@
datetime module.
"""
__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>"
-__license__ = "Simplified BSD"
+__license__ = "PSF License"
import datetime
import calendar
@@ -113,9 +113,10 @@ def __init__(self, dt1=None, dt2=None,
yearday=None, nlyearday=None,
hour=None, minute=None, second=None, microsecond=None):
if dt1 and dt2:
- if (not isinstance(dt1, datetime.date)) or (not isinstance(dt2, datetime.date)):
- raise TypeError("relativedelta only diffs datetime/date")
- if not type(dt1) == type(dt2): #isinstance(dt1, type(dt2)):
+ if not isinstance(dt1, datetime.date) or \
+ not isinstance(dt2, datetime.date):
+ raise TypeError, "relativedelta only diffs datetime/date"
+ if type(dt1) is not type(dt2):
if not isinstance(dt1, datetime.datetime):
dt1 = datetime.datetime.fromordinal(dt1.toordinal())
elif not isinstance(dt2, datetime.datetime):
@@ -171,7 +172,7 @@ def __init__(self, dt1=None, dt2=None,
self.second = second
self.microsecond = microsecond
- if isinstance(weekday, int):
+ if type(weekday) is int:
self.weekday = weekdays[weekday]
else:
self.weekday = weekday
@@ -184,7 +185,7 @@ def __init__(self, dt1=None, dt2=None,
if yearday > 59:
self.leapdays = -1
if yday:
- ydayidx = [31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 366]
+ ydayidx = [31,59,90,120,151,181,212,243,273,304,334,366]
for idx, ydays in enumerate(ydayidx):
if yday <= ydays:
self.month = idx+1
@@ -194,7 +195,7 @@ def __init__(self, dt1=None, dt2=None,
self.day = yday-ydayidx[idx-1]
break
else:
- raise ValueError("invalid year day (%d)" % yday)
+ raise ValueError, "invalid year day (%d)" % yday
self._fix()
@@ -243,7 +244,7 @@ def _set_months(self, months):
def __radd__(self, other):
if not isinstance(other, datetime.date):
- raise TypeError("unsupported type for add operation")
+ raise TypeError, "unsupported type for add operation"
elif self._has_time and not isinstance(other, datetime.datetime):
other = datetime.datetime.fromordinal(other.toordinal())
year = (self.year or other.year)+self.years
@@ -289,7 +290,7 @@ def __rsub__(self, other):
def __add__(self, other):
if not isinstance(other, relativedelta):
- raise TypeError("unsupported type for add operation")
+ raise TypeError, "unsupported type for add operation"
return relativedelta(years=other.years+self.years,
months=other.months+self.months,
days=other.days+self.days,
@@ -309,7 +310,7 @@ def __add__(self, other):
def __sub__(self, other):
if not isinstance(other, relativedelta):
- raise TypeError("unsupported type for sub operation")
+ raise TypeError, "unsupported type for sub operation"
return relativedelta(years=other.years-self.years,
months=other.months-self.months,
days=other.days-self.days,
@@ -345,7 +346,7 @@ def __neg__(self):
second=self.second,
microsecond=self.microsecond)
- def __bool__(self):
+ def __nonzero__(self):
return not (not self.years and
not self.months and
not self.days and
@@ -425,7 +426,7 @@ def __repr__(self):
"hour", "minute", "second", "microsecond"]:
value = getattr(self, attr)
if value is not None:
- l.append("%s=%s" % (attr, repr(value)))
+ l.append("%s=%s" % (attr, `value`))
return "%s(%s)" % (self.__class__.__name__, ", ".join(l))
# vim:ts=4:sw=4:et
View
102 app/lib/dist/dateutil/rrule.py
@@ -5,12 +5,12 @@
datetime module.
"""
__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>"
-__license__ = "Simplified BSD"
+__license__ = "PSF License"
import itertools
import datetime
import calendar
-import _thread
+import thread
import sys
__all__ = ["rrule", "rruleset", "rrulestr",
@@ -22,15 +22,15 @@
M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30+
[7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7)
M365MASK = list(M366MASK)
-M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32))
+M29, M30, M31 = range(1,30), range(1,31), range(1,32)
MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
MDAY365MASK = list(MDAY366MASK)
-M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0))
+M29, M30, M31 = range(-29,0), range(-30,0), range(-31,0)
NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
NMDAY365MASK = list(NMDAY366MASK)
-M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366)
-M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365)
-WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55
+M366RANGE = (0,31,60,91,121,152,182,213,244,274,305,335,366)
+M365RANGE = (0,31,59,90,120,151,181,212,243,273,304,334,365)
+WDAYMASK = [0,1,2,3,4,5,6]*55
del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31]
MDAY365MASK = tuple(MDAY365MASK)
M365MASK = tuple(M365MASK)
@@ -41,7 +41,7 @@
DAILY,
HOURLY,
MINUTELY,
- SECONDLY) = list(range(7))
+ SECONDLY) = range(7)
# Imported on demand.
easter = None
@@ -52,7 +52,7 @@ class weekday(object):
def __init__(self, weekday, n=None):
if n == 0:
- raise ValueError("Can't create weekday with n == 0")
+ raise ValueError, "Can't create weekday with n == 0"
self.weekday = weekday
self.n = n
@@ -83,7 +83,7 @@ class rrulebase:
def __init__(self, cache=False):
if cache:
self._cache = []
- self._cache_lock = _thread.allocate_lock()
+ self._cache_lock = thread.allocate_lock()
self._cache_gen = self._iter()
self._cache_complete = False
else:
@@ -112,7 +112,7 @@ def _iter_cached(self):
break
try:
for j in range(10):
- cache.append(next(gen))
+ cache.append(gen.next())
except StopIteration:
self._cache_gen = gen = None
self._cache_complete = True
@@ -133,13 +133,13 @@ def __getitem__(self, item):
else:
return list(itertools.islice(self,
item.start or 0,
- item.stop or sys.maxsize,
+ item.stop or sys.maxint,
item.step or 1))
elif item >= 0:
gen = iter(self)
try:
for i in range(item+1):
- res = next(gen)
+ res = gen.next()
except StopIteration:
raise IndexError
return res
@@ -250,13 +250,13 @@ def __init__(self, freq, dtstart=None,
self._until = until
if wkst is None:
self._wkst = calendar.firstweekday()
- elif isinstance(wkst, int):
+ elif type(wkst) is int:
self._wkst = wkst
else:
self._wkst = wkst.weekday
if bysetpos is None:
self._bysetpos = None
- elif isinstance(bysetpos, int):
+ elif type(bysetpos) is int:
if bysetpos == 0 or not (-366 <= bysetpos <= 366):
raise ValueError("bysetpos must be between 1 and 366, "
"or between -366 and -1")
@@ -280,14 +280,14 @@ def __init__(self, freq, dtstart=None,
# bymonth
if not bymonth:
self._bymonth = None
- elif isinstance(bymonth, int):
+ elif type(bymonth) is int:
self._bymonth = (bymonth,)
else:
self._bymonth = tuple(bymonth)
# byyearday
if not byyearday:
self._byyearday = None
- elif isinstance(byyearday, int):
+ elif type(byyearday) is int:
self._byyearday = (byyearday,)
else:
self._byyearday = tuple(byyearday)
@@ -295,7 +295,7 @@ def __init__(self, freq, dtstart=None,
if byeaster is not None:
if not easter:
from dateutil import easter
- if isinstance(byeaster, int):
+ if type(byeaster) is int:
self._byeaster = (byeaster,)
else:
self._byeaster = tuple(byeaster)
@@ -305,7 +305,7 @@ def __init__(self, freq, dtstart=None,
if not bymonthday:
self._bymonthday = ()
self._bynmonthday = ()
- elif isinstance(bymonthday, int):
+ elif type(bymonthday) is int:
if bymonthday < 0:
self._bynmonthday = (bymonthday,)
self._bymonthday = ()
@@ -318,7 +318,7 @@ def __init__(self, freq, dtstart=None,
# byweekno
if byweekno is None:
self._byweekno = None
- elif isinstance(byweekno, int):
+ elif type(byweekno) is int:
self._byweekno = (byweekno,)
else:
self._byweekno = tuple(byweekno)
@@ -326,7 +326,7 @@ def __init__(self, freq, dtstart=None,
if byweekday is None:
self._byweekday = None
self._bynweekday = None
- elif isinstance(byweekday, int):
+ elif type(byweekday) is int:
self._byweekday = (byweekday,)
self._bynweekday = None
elif hasattr(byweekday, "n"):
@@ -340,7 +340,7 @@ def __init__(self, freq, dtstart=None,
self._byweekday = []
self._bynweekday = []
for wday in byweekday:
- if isinstance(wday, int):
+ if type(wday) is int:
self._byweekday.append(wday)
elif not wday.n or freq > MONTHLY:
self._byweekday.append(wday.weekday)
@@ -358,7 +358,7 @@ def __init__(self, freq, dtstart=None,
self._byhour = (dtstart.hour,)
else:
self._byhour = None
- elif isinstance(byhour, int):
+ elif type(byhour) is int:
self._byhour = (byhour,)
else:
self._byhour = tuple(byhour)
@@ -368,7 +368,7 @@ def __init__(self, freq, dtstart=None,
self._byminute = (dtstart.minute,)
else:
self._byminute = None
- elif isinstance(byminute, int):
+ elif type(byminute) is int:
self._byminute = (byminute,)
else:
self._byminute = tuple(byminute)
@@ -378,7 +378,7 @@ def __init__(self, freq, dtstart=None,
self._bysecond = (dtstart.second,)
else:
self._bysecond = None
- elif isinstance(bysecond, int):
+ elif type(bysecond) is int:
self._bysecond = (bysecond,)
else:
self._bysecond = tuple(bysecond)
@@ -716,7 +716,7 @@ def rebuild(self, year, month):
# days from last year's last week number in
# this year.
if -1 not in rr._byweekno:
- lyearweekday = datetime.date(year-1, 1, 1).weekday()
+ lyearweekday = datetime.date(year-1,1,1).weekday()
lno1wkst = (7-lyearweekday+rr._wkst)%7
lyearlen = 365+calendar.isleap(year-1)
if lno1wkst >= 4:
@@ -768,7 +768,7 @@ def rebuild(self, year, month):
self.lastmonth = month
def ydayset(self, year, month, day):
- return list(range(self.yearlen)), 0, self.yearlen
+ return range(self.yearlen), 0, self.yearlen
def mdayset(self, year, month, day):
set = [None]*self.yearlen
@@ -833,20 +833,14 @@ def __init__(self, genlist, gen):
self.genlist = genlist
self.gen = gen
- def __next__(self):
+ def next(self):
try:
self.dt = self.gen()
except StopIteration:
self.genlist.remove(self)
- def __lt__(self, other):
- return self.dt < other.dt
-
- def __gt__(self, other):
- return self.dt > other.dt
-
- def __eq__(self, other):
- return self.dt == other.dt
+ def __cmp__(self, other):
+ return cmp(self.dt, other.dt)
def __init__(self, cache=False):
rrulebase.__init__(self, cache)
@@ -870,14 +864,14 @@ def exdate(self, exdate):
def _iter(self):
rlist = []
self._rdate.sort()
- self._genitem(rlist, iter(self._rdate).__next__)
- for gen in [iter(x).__next__ for x in self._rrule]:
+ self._genitem(rlist, iter(self._rdate).next)
+ for gen in [iter(x).next for x in self._rrule]:
self._genitem(rlist, gen)
rlist.sort()
exlist = []
self._exdate.sort()
- self._genitem(exlist, iter(self._exdate).__next__)
- for gen in [iter(x).__next__ for x in self._exrule]:
+ self._genitem(exlist, iter(self._exdate).next)
+ for gen in [iter(x).next for x in self._exrule]:
self._genitem(exlist, gen)
exlist.sort()
lastdt = None
@@ -886,13 +880,13 @@ def _iter(self):
ritem = rlist[0]
if not lastdt or lastdt != ritem.dt:
while exlist and exlist[0] < ritem:
- next(exlist[0])
+ exlist[0].next()
exlist.sort()
if not exlist or ritem != exlist[0]:
total += 1
yield ritem.dt
lastdt = ritem.dt
- next(ritem)
+ ritem.next()
rlist.sort()
self._len = total
@@ -938,7 +932,7 @@ def _handle_UNTIL(self, rrkwargs, name, value, **kwargs):
ignoretz=kwargs.get("ignoretz"),
tzinfos=kwargs.get("tzinfos"))
except ValueError:
- raise ValueError("invalid until date")
+ raise ValueError, "invalid until date"
def _handle_WKST(self, rrkwargs, name, value, **kwargs):
rrkwargs["wkst"] = self._weekday_map[value]
@@ -965,7 +959,7 @@ def _parse_rfc_rrule(self, line,
if line.find(':') != -1:
name, value = line.split(':')
if name != "RRULE":
- raise ValueError("unknown parameter name")
+ raise ValueError, "unknown parameter name"
else:
value = line
rrkwargs = {}
@@ -978,9 +972,9 @@ def _parse_rfc_rrule(self, line,
ignoretz=ignoretz,
tzinfos=tzinfos)
except AttributeError:
- raise ValueError("unknown parameter '%s'" % name)
+ raise ValueError, "unknown parameter '%s'" % name
except (KeyError, ValueError):
- raise ValueError("invalid '%s': %s" % (name, value))
+ raise ValueError, "invalid '%s': %s" % (name, value)
return rrule(dtstart=dtstart, cache=cache, **rrkwargs)
def _parse_rfc(self, s,
@@ -997,7 +991,7 @@ def _parse_rfc(self, s,
unfold = True
s = s.upper()
if not s.strip():
- raise ValueError("empty string")
+ raise ValueError, "empty string"
if unfold:
lines = s.splitlines()
i = 0
@@ -1032,36 +1026,36 @@ def _parse_rfc(self, s,
name, value = line.split(':', 1)
parms = name.split(';')
if not parms:
- raise ValueError("empty property name")
+ raise ValueError, "empty property name"
name = parms[0]
parms = parms[1:]
if name == "RRULE":
for parm in parms:
- raise ValueError("unsupported RRULE parm: "+parm)
+ raise ValueError, "unsupported RRULE parm: "+parm
rrulevals.append(value)
elif name == "RDATE":
for parm in parms:
if parm != "VALUE=DATE-TIME":
- raise ValueError("unsupported RDATE parm: "+parm)
+ raise ValueError, "unsupported RDATE parm: "+parm
rdatevals.append(value)
elif name == "EXRULE":
for parm in parms:
- raise ValueError("unsupported EXRULE parm: "+parm)
+ raise ValueError, "unsupported EXRULE parm: "+parm
exrulevals.append(value)
elif name == "EXDATE":
for parm in parms:
if parm != "VALUE=DATE-TIME":
- raise ValueError("unsupported RDATE parm: "+parm)
+ raise ValueError, "unsupported RDATE parm: "+parm
exdatevals.append(value)
elif name == "DTSTART":
for parm in parms:
- raise ValueError("unsupported DTSTART parm: "+parm)
+ raise ValueError, "unsupported DTSTART parm: "+parm
if not parser:
from dateutil import parser
dtstart = parser.parse(value, ignoretz=ignoretz,
tzinfos=tzinfos)
else:
- raise ValueError("unsupported property: "+name)
+ raise ValueError, "unsupported property: "+name
if (forceset or len(rrulevals) > 1 or
rdatevals or exrulevals or exdatevals):
if not parser and (rdatevals or exdatevals):
View
95 app/lib/dist/dateutil/tz.py
@@ -5,7 +5,7 @@
datetime module.
"""
__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>"
-__license__ = "Simplified BSD"
+__license__ = "PSF License"
import datetime
import struct
@@ -75,7 +75,7 @@ def __ne__(self, other):
def __repr__(self):
return "%s(%s, %s)" % (self.__class__.__name__,
- repr(self._name),
+ `self._name`,
self._offset.days*86400+self._offset.seconds)
__reduce__ = object.__reduce__
@@ -161,7 +161,7 @@ def __repr__(self):
for attr in self.__slots__:
value = getattr(self, attr)
if value is not None:
- l.append("%s=%s" % (attr, repr(value)))
+ l.append("%s=%s" % (attr, `value`))
return "%s(%s)" % (self.__class__.__name__, ", ".join(l))
def __eq__(self, other):
@@ -194,13 +194,13 @@ class tzfile(datetime.tzinfo):
# ftp://elsie.nci.nih.gov/pub/tz*.tar.gz
def __init__(self, fileobj):
- if isinstance(fileobj, str):
+ if isinstance(fileobj, basestring):
self._filename = fileobj
fileobj = open(fileobj)
elif hasattr(fileobj, "name"):
self._filename = fileobj.name
else:
- self._filename = repr(fileobj)
+ self._filename = `fileobj`
# From tzfile(5):
#
@@ -212,8 +212,8 @@ def __init__(self, fileobj):
# ``standard'' byte order (the high-order byte
# of the value is written first).
- if fileobj.read(4).decode() != "TZif":
- raise ValueError("magic not found")
+ if fileobj.read(4) != "TZif":
+ raise ValueError, "magic not found"
fileobj.read(16)
@@ -284,7 +284,7 @@ def __init__(self, fileobj):
for i in range(typecnt):
ttinfo.append(struct.unpack(">lbb", fileobj.read(6)))
- abbr = fileobj.read(charcnt).decode()
+ abbr = fileobj.read(charcnt)
# Then there are tzh_leapcnt pairs of four-byte
# values, written in standard byte order; the
@@ -360,7 +360,7 @@ def __init__(self, fileobj):
if not self._trans_list:
self._ttinfo_std = self._ttinfo_first = self._ttinfo_list[0]
else:
- for i in range(timecnt-1, -1, -1):
+ for i in range(timecnt-1,-1,-1):
tti = self._trans_idx[i]
if not self._ttinfo_std and not tti.isdst:
self._ttinfo_std = tti
@@ -465,11 +465,11 @@ def __ne__(self, other):
def __repr__(self):
- return "%s(%s)" % (self.__class__.__name__, repr(self._filename))
+ return "%s(%s)" % (self.__class__.__name__, `self._filename`)
def __reduce__(self):
if not os.path.isfile(self._filename):
- raise ValueError("Unpickable %s class" % self.__class__.__name__)
+ raise ValueError, "Unpickable %s class" % self.__class__.__name__
return (self.__class__, (self._filename,))
class tzrange(datetime.tzinfo):
@@ -524,7 +524,7 @@ def tzname(self, dt):
def _isdst(self, dt):
if not self._start_delta:
return False
- year = datetime.datetime(dt.year, 1, 1)
+ year = datetime.datetime(dt.year,1,1)
start = year+self._start_delta
end = year+self._end_delta
dt = dt.replace(tzinfo=None)
@@ -561,7 +561,7 @@ def __init__(self, s):
res = parser._parsetz(s)
if res is None:
- raise ValueError("unknown string format")
+ raise ValueError, "unknown string format"
# Here we break the compatibility with the TZ variable handling.
# GMT-3 actually *means* the timezone -3.
@@ -624,7 +624,7 @@ def _delta(self, x, isend=0):
return relativedelta.relativedelta(**kwargs)
def __repr__(self):
- return "%s(%s)" % (self.__class__.__name__, repr(self._s))
+ return "%s(%s)" % (self.__class__.__name__, `self._s`)
class _tzicalvtzcomp:
def __init__(self, tzoffsetfrom, tzoffsetto, isdst,
@@ -694,7 +694,7 @@ def tzname(self, dt):
return self._find_comp(dt).tzname
def __repr__(self):
- return "<tzicalvtz %s>" % repr(self._tzid)
+ return "<tzicalvtz %s>" % `self._tzid`
__reduce__ = object.__reduce__
@@ -704,37 +704,37 @@ def __init__(self, fileobj):
if not rrule:
from dateutil import rrule
- if isinstance(fileobj, str):
+ if isinstance(fileobj, basestring):
self._s = fileobj
fileobj = open(fileobj)
elif hasattr(fileobj, "name"):
self._s = fileobj.name
else:
- self._s = repr(fileobj)
+ self._s = `fileobj`
self._vtz = {}
self._parse_rfc(fileobj.read())
def keys(self):
- return list(self._vtz.keys())
+ return self._vtz.keys()
def get(self, tzid=None):
if tzid is None:
- keys = list(self._vtz.keys())
+ keys = self._vtz.keys()
if len(keys) == 0:
- raise ValueError("no timezones defined")
+ raise ValueError, "no timezones defined"
elif len(keys) > 1:
- raise ValueError("more than one timezone available")
+ raise ValueError, "more than one timezone available"
tzid = keys[0]
return self._vtz.get(tzid)
def _parse_offset(self, s):
s = s.strip()
if not s:
- raise ValueError("empty offset")
+ raise ValueError, "empty offset"
if s[0] in ('+', '-'):
- signal = (-1, +1)[s[0]=='+']
+ signal = (-1,+1)[s[0]=='+']
s = s[1:]
else:
signal = +1
@@ -743,12 +743,12 @@ def _parse_offset(self, s):
elif len(s) == 6:
return (int(s[:2])*3600+int(s[2:4])*60+int(s[4:]))*signal
else:
- raise ValueError("invalid offset: "+s)
+ raise ValueError, "invalid offset: "+s
def _parse_rfc(self, s):
lines = s.splitlines()
if not lines:
- raise ValueError("empty string")
+ raise ValueError, "empty string"
# Unfold
i = 0
@@ -772,7 +772,7 @@ def _parse_rfc(self, s):
name, value = line.split(':', 1)
parms = name.split(';')
if not parms:
- raise ValueError("empty property name")
+ raise ValueError, "empty property name"
name = parms[0].upper()
parms = parms[1:]
if invtz:
@@ -781,7 +781,7 @@ def _parse_rfc(self, s):
# Process component
pass
else:
- raise ValueError("unknown component: "+value)
+ raise ValueError, "unknown component: "+value
comptype = value
founddtstart = False
tzoffsetfrom = None
@@ -791,21 +791,27 @@ def _parse_rfc(self, s):
elif name == "END":
if value == "VTIMEZONE":
if comptype:
- raise ValueError("component not closed: "+comptype)
+ raise ValueError, \
+ "component not closed: "+comptype
if not tzid:
- raise ValueError("mandatory TZID not found")
+ raise ValueError, \
+ "mandatory TZID not found"
if not comps:
- raise ValueError("at least one component is needed")
+ raise ValueError, \
+ "at least one component is needed"
# Process vtimezone
self._vtz[tzid] = _tzicalvtz(tzid, comps)
invtz = False
elif value == comptype:
if not founddtstart:
- raise ValueError("mandatory DTSTART not found")
+ raise ValueError, \
+ "mandatory DTSTART not found"
if tzoffsetfrom is None:
- raise ValueError("mandatory TZOFFSETFROM not found")
+ raise ValueError, \
+ "mandatory TZOFFSETFROM not found"
if tzoffsetto is None:
- raise ValueError("mandatory TZOFFSETFROM not found")
+ raise ValueError, \
+ "mandatory TZOFFSETFROM not found"
# Process component
rr = None
if rrulelines:
@@ -819,7 +825,8 @@ def _parse_rfc(self, s):
comps.append(comp)
comptype = None
else:
- raise ValueError("invalid component end: "+value)
+ raise ValueError, \
+ "invalid component end: "+value
elif comptype:
if name == "DTSTART":
rrulelines.append(line)
@@ -828,36 +835,40 @@ def _parse_rfc(self, s):
rrulelines.append(line)
elif name == "TZOFFSETFROM":
if parms:
- raise ValueError("unsupported %s parm: %s "%(name, parms[0]))
+ raise ValueError, \
+ "unsupported %s parm: %s "%(name, parms[0])
tzoffsetfrom = self._parse_offset(value)
elif name == "TZOFFSETTO":
if parms:
- raise ValueError("unsupported TZOFFSETTO parm: "+parms[0])
+ raise ValueError, \
+ "unsupported TZOFFSETTO parm: "+parms[0]
tzoffsetto = self._parse_offset(value)
elif name == "TZNAME":
if parms:
- raise ValueError("unsupported TZNAME parm: "+parms[0])
+ raise ValueError, \
+ "unsupported TZNAME parm: "+parms[0]
tzname = value
elif name == "COMMENT":
pass
else:
- raise ValueError("unsupported property: "+name)
+ raise ValueError, "unsupported property: "+name
else:
if name == "TZID":
if parms:
- raise ValueError("unsupported TZID parm: "+parms[0])
+ raise ValueError, \
+ "unsupported TZID parm: "+parms[0]
tzid = value
elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"):
pass
else:
- raise ValueError("unsupported property: "+name)
+ raise ValueError, "unsupported property: "+name
elif name == "BEGIN" and value == "VTIMEZONE":
tzid = None
comps = []
invtz = True
def __repr__(self):
- return "%s(%s)" % (self.__class__.__name__, repr(self._s))
+ return "%s(%s)" % (self.__class__.__name__, `self._s`)
if sys.platform != "win32":
TZFILES = ["/etc/localtime", "localtime"]
@@ -903,7 +914,7 @@ def gettz(name=None):
for path in TZPATHS:
filepath = os.path.join(path, name)
if not os.path.isfile(filepath):
- filepath = filepath.replace(' ', '_')
+ filepath = filepath.replace(' ','_')
if not os.path.isfile(filepath):
continue
try:
View
30 app/lib/dist/dateutil/tzwin.py
@@ -1,7 +1,7 @@
# This code was originally contributed by Jeffrey Harris.
import datetime
import struct
-import winreg
+import _winreg
__author__ = "Jeffrey Harris & Gustavo Niemeyer <gustavo@niemeyer.net>"
@@ -15,9 +15,9 @@
def _settzkeyname():
global TZKEYNAME
- handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
+ handle = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
try:
- winreg.OpenKey(handle, TZKEYNAMENT).Close()
+ _winreg.OpenKey(handle, TZKEYNAMENT).Close()
TZKEYNAME = TZKEYNAMENT
except WindowsError:
TZKEYNAME = TZKEYNAME9X
@@ -49,10 +49,10 @@ def tzname(self, dt):
def list():
"""Return a list of all time zones known to the system."""
- handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
- tzkey = winreg.OpenKey(handle, TZKEYNAME)
- result = [winreg.EnumKey(tzkey, i)
- for i in range(winreg.QueryInfoKey(tzkey)[0])]
+ handle = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
+ tzkey = _winreg.OpenKey(handle, TZKEYNAME)
+ result = [_winreg.EnumKey(tzkey, i)
+ for i in range(_winreg.QueryInfoKey(tzkey)[0])]
tzkey.Close()
handle.Close()
return result
@@ -79,8 +79,8 @@ class tzwin(tzwinbase):
def __init__(self, name):
self._name = name
- handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
- tzkey = winreg.OpenKey(handle, "%s\%s" % (TZKEYNAME, name))
+ handle = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
+ tzkey = _winreg.OpenKey(handle, "%s\%s" % (TZKEYNAME, name))
keydict = valuestodict(tzkey)
tzkey.Close()
handle.Close()
@@ -118,9 +118,9 @@ class tzwinlocal(tzwinbase):
def __init__(self):
- handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
+ handle = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
- tzlocalkey = winreg.OpenKey(handle, TZLOCALKEYNAME)
+ tzlocalkey = _winreg.OpenKey(handle, TZLOCALKEYNAME)
keydict = valuestodict(tzlocalkey)
tzlocalkey.Close()
@@ -128,7 +128,7 @@ def __init__(self):
self._dstname = keydict["DaylightName"].encode("iso-8859-1")
try:
- tzkey = winreg.OpenKey(handle, "%s\%s"%(TZKEYNAME, self._stdname))
+ tzkey = _winreg.OpenKey(handle, "%s\%s"%(TZKEYNAME, self._stdname))
_keydict = valuestodict(tzkey)
self._display = _keydict["Display"]
tzkey.Close()
@@ -165,7 +165,7 @@ def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
"""dayofweek == 0 means Sunday, whichweek 5 means last instance"""
first = datetime.datetime(year, month, 1, hour, minute)
weekdayone = first.replace(day=((dayofweek-first.isoweekday())%7+1))
- for n in range(whichweek):
+ for n in xrange(whichweek):
dt = weekdayone+(whichweek-n)*ONEWEEK
if dt.month == month:
return dt
@@ -173,8 +173,8 @@ def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
def valuestodict(key):
"""Convert a registry key's values to a dictionary."""
dict = {}
- size = winreg.QueryInfoKey(key)[1]
+ size = _winreg.QueryInfoKey(key)[1]
for i in range(size):
- data = winreg.EnumValue(key, i)
+ data = _winreg.EnumValue(key, i)
dict[data[0]] = data[1]
return dict
View
3  app/lib/dist/dateutil/zoneinfo/__init__.py
@@ -21,7 +21,8 @@ def __reduce__(self):
return (gettz, (self._filename,))
def getzoneinfofile():
- filenames = sorted(os.listdir(os.path.join(os.path.dirname(__file__))))
+ filenames = os.listdir(os.path.join(os.path.dirname(__file__)))
+ filenames.sort()
filenames.reverse()
for entry in filenames:
if entry.startswith("zoneinfo") and ".tar." in entry:
View
BIN  app/lib/dist/dateutil/zoneinfo/zoneinfo-2010g.tar.gz
Binary file not shown
View
0  app/lib/dist/dateutil/zoneinfo/zoneinfo-2011d.tar.gz
Sorry, we could not display the changes to this file because there were too many other changes to display.
View
8 app/local_settings.py
@@ -1,10 +1,4 @@
'''
'''
-import os
-
-if os.environ.get('SERVER_SOFTWARE', '').startswith('Dev'):
- ALCHEMY_API_KEY='58fbb0bcc99e5a7c9160b2a23fcedbea16c92a01' #zemanel@maybeitworks.com
-else:
- ALCHEMY_API_KEY='dd804ff71af6a6e01e7c35667f2dcfe3413a99e5' #zemanel@zemanel.eu
- #ALCHEMY_API_KEY='58fbb0bcc99e5a7c9160b2a23fcedbea16c92a01' #zemanel@maybeitworks.com
+ALCHEMY_API_KEY='dd804ff71af6a6e01e7c35667f2dcfe3413a99e5' #zemanel@zemanel.eu
View
4 app/main.py
@@ -6,8 +6,6 @@
from config import config
from urls import rules
-
-
def enable_appstats(app):
"""Enables appstats middleware."""
from google.appengine.ext.appstats.recording import \
@@ -26,7 +24,7 @@ def enable_jinja2_debugging():
# Instantiate the application.
app = App(rules=rules, config=config, debug=debug)
-#enable_appstats(app)
+enable_appstats(app)
enable_jinja2_debugging()
def main():
View
67 app/mood/handlers.py
@@ -1,11 +1,11 @@
# -*- coding: utf-8 -*-
import logging
from django.utils import simplejson as json
-from google.appengine.api import memcache
from tipfy.app import Response
from tipfy.handler import RequestHandler
from tipfyext.jinja2 import Jinja2Mixin
+from google.appengine.api import memcache
from .models import NewsItem
@@ -13,33 +13,58 @@
class HomePage(RequestHandler, Jinja2Mixin):
def get(self):
- return self.render_response('home.html', **{
- 'baseurl' : self.url_for('home', _full=True),
- 'bookmarklet_src' : self.url_for('bookmarklet-js', _full=True),
- })
+ CACHE_ENABLED = self.app.config['mood']['CACHE_ENABLED']
+ memcachekey = "handler_home"
+ response = memcache.get(memcachekey)
+ if response is None:
+ response = self.render_response('home.html', **{
+ 'baseurl' : self.url_for('home', _full=True),
+ 'bookmarklet_src' : self.url_for('bookmarklet-js', _full=True),
+ })
+ if CACHE_ENABLED:
+ memcache.set(memcachekey, response, 60*60)
+ return response
class BookmarkletPage(RequestHandler, Jinja2Mixin):
def get(self):
- return self.render_response('bookmarklet.js', ** {
- 'baseurl' : self.url_for('home', _full=True),
- })
+ CACHE_ENABLED = self.app.config['mood']['CACHE_ENABLED']
+ memcachekey = "handler_bookmarklet"
+ response = memcache.get(memcachekey)
+ if response is None:
+ response = self.render_response('bookmarklet.js', ** {
+ 'baseurl' : self.url_for('home', _full=True),
+ })
+ if CACHE_ENABLED:
+ memcache.set(memcachekey, response, 60*60)
+ return response
class NewsItemDetail(RequestHandler, Jinja2Mixin):
def get(self, itemid):
'''Returns the json for a news item comment
'''
+ CACHE_ENABLED = self.app.config['mood']['CACHE_ENABLED']
jsoncallback = self.request.args.get('jsoncallback', None)
itemid = str(itemid)
- newsitem = NewsItem.get_by_key_name(itemid) or self.abort(404)
- json_response = json.dumps({
- 'itemid': itemid,
- 'is_sentiment_processed' : newsitem.is_sentiment_processed,
- 'sentiment_type': newsitem.sentiment_type,
- 'sentiment_score': newsitem.sentiment_score,
- 'sentiment_status': newsitem.sentiment_status,
- 'sentiment_status_info': newsitem.sentiment_status_info,
- }, indent=2)
- if jsoncallback is None:
- return Response(json_response)
- else:
- return Response("%s(%s)" % (jsoncallback, json_response))
+ memcachekey = "handler_newsitem_detail_%s" % itemid
+ response = memcache.get(memcachekey)
+ if response is None:
+ newsitem = NewsItem.get_by_key_name(itemid)
+ if newsitem is not None:
+ response = json.dumps({
+ 'itemid': itemid,
+ 'is_sentiment_processed' : newsitem.is_sentiment_processed,
+ 'sentiment_type': newsitem.sentiment_type,
+ 'sentiment_score': newsitem.sentiment_score,
+ 'sentiment_status': newsitem.sentiment_status,
+ 'sentiment_status_info': newsitem.sentiment_status_info,
+ }, indent=2)
+ else:
+ response = json.dumps({
+ 'itemid': itemid,
+ 'is_sentiment_processed' : False,
+ }, indent=2)
+ if jsoncallback is not None:
+ response = "%s(%s)" % (jsoncallback, response)
+ if CACHE_ENABLED:
+ memcache.set(memcachekey, response, 5*60)
+ return response
View
51 app/mood/jobs.py
@@ -1,7 +1,8 @@
# -*- coding: utf-8 -*-
import logging
-import rfc3339
import datetime
+import rfc3339
+
from django.utils import simplejson as json
from tipfy.app import Response
from tipfy.handler import RequestHandler
@@ -20,10 +21,9 @@ def get(self):
api = HNSearchAPI()
limit=100
try:
- now = datetime.datetime.now()
- now_rfc = rfc3339.rfc3339(now, utc=True)
- created_from = "%s-10MINUTES" % now_rfc
- created_to = now_rfc
+ created_from = "NOW-20MINUTES"
+ created_to = "NOW"
+
logging.info("Polling HNSearchAPI from %s to %s " % (created_from, created_to))
result = api.search(created_from=created_from, created_to=created_to, start=0, limit=0)
content = json.loads(result, encoding="utf-8")
@@ -48,14 +48,51 @@ def get(self):
self.abort(500)
return Response('OK', status=200)
+#class QueueHNSearchYesterdayJob(RequestHandler):
+# def get(self):
+# '''Poll HNSearch API for news comments
+# '''
+# queue = taskqueue.Queue(name='hnsearchapi')
+# api = HNSearchAPI()
+# limit=100
+# try:
+# now = datetime.datetime.now()
+# now_rfc = rfc3339.rfc3339(now, utc=True)
+# created_from = "%s-10MINUTES" % now_rfc
+# created_to = now_rfc
+#
+# logging.info("Polling HNSearchAPI from %s to %s " % (created_from, created_to))
+# result = api.search(created_from=created_from, created_to=created_to, start=0, limit=0)
+# content = json.loads(result, encoding="utf-8")
+# hits = int(content['hits'])
+# logger.info("Got %s hnsearch hits" % content['hits'])
+# if hits > 1000:
+# hits = 1000
+# logger.warn("Number of hits is over limit. Trimming to 1000")
+# for start in xrange(0, hits, limit):
+# #taskname = "poll-hnsearch-%s-%s" % (created_from, created_to)
+# params = {
+# 'created_from' : created_from,
+# 'created_to' : created_to,
+# 'start' : start,
+# 'limit' : limit,
+# }
+# task = taskqueue.Task(params=params, method="GET", url="/tasks/poll_hnsearch")
+# queue.add(task)
+# logging.info("Created task %s" % task.name)
+# except Exception, e:
+# logger.exception(e)
+# self.abort(500)
+# return Response('OK', status=200)
+
class QueueAlchemyTasksJob(RequestHandler):
def get(self):
'''Fills a GAP task queue with items sentiment analysis
'''
queue = taskqueue.Queue(name='alchemyapi')
- #items = NewsItem.all(keys_only=True).filter("is_sentiment_processed", False).filter("is_sentiment_queued", False).order('-create_ts').fetch(limit=100)
- items = NewsItem.all(keys_only=True).filter("is_sentiment_processed", False).order('-create_ts').fetch(limit=100)
+ items = NewsItem.all(keys_only=True).filter("is_sentiment_processed", False).filter("is_sentiment_queued", False).order('-create_ts').fetch(limit=100)
+ #items = NewsItem.all(keys_only=True).filter("is_sentiment_processed", False).order('-create_ts').fetch(limit=100)
for key in items:
keyname = key.name()
# queue sentiment analysis task
View
1  app/mood/models.py
@@ -38,6 +38,7 @@ class NewsItem(db.Model):
#schema version
schema_version = db.IntegerProperty(default=1)
+ created_on = db.DateTimeProperty(auto_now_add=True)
#points = db.IntegerProperty()
#parent_id: will use parent of db model instance
View
2  app/mood/tasks.py
@@ -55,7 +55,7 @@ class PollAlchemyTask(RequestHandler):
'''Poll Alchemy API Sentimental analysis for processing a news comment item
'''
def get(self):
- apikey = self.app.config['mood.alchemyapi']['API_KEY']
+ apikey = self.app.config['mood']['ALCHEMYAPI_KEY']
itemid = self.request.args.get('itemid', None)
newsitem = NewsItem.get_by_key_name(itemid)
if newsitem is not None:
View
52 app/templates/bookmarklet.js
@@ -15,7 +15,7 @@
if (newsitem.is_sentiment_processed===true) {
var newDomNode = document.createElement('SPAN');
var color = null;
- newDomNode.id = "hnmood_"+newsitem.id;
+ newDomNode.id = "hnmood_"+newsitem.itemid;
if ('OK'===newsitem.sentiment_status) {
if (newsitem.sentiment_type==='positive') {
color = 'green';
@@ -24,12 +24,12 @@
} else if (newsitem.sentiment_type==='neutral') {
color = 'gray';
}
- newDomNode.innerHTML = '| <span style="color:'+color+';"> Sentiment analysis score:'+newsitem.sentiment_score+' ('+newsitem.sentiment_type+') </span>';
- dojo.place(newDomNode, linkNode, 'after');
+ newDomNode.innerHTML = '| <span style="color:'+color+';"> Sentiment analysis score:'+newsitem.sentiment_score+' ('+newsitem.sentiment_type+') </span> [itemid: '+newsitem.itemid+']';
+ dojo.place(newDomNode, linkNode.parentNode, 'last');
} else if ('ERROR'===newsitem.sentiment_status){
color = 'orange';
- newDomNode.innerHTML = '| <span style="color:'+color+';">Sentiment analysis error: '+newsitem.sentiment_status_info+'</span>';
- dojo.place(newDomNode, linkNode, 'after');
+ newDomNode.innerHTML = '| <span style="color:'+color+';">Sentiment analysis error: '+newsitem.sentiment_status_info+'</span> [itemid: '+newsitem.itemid+']';
+ dojo.place(newDomNode, linkNode.parentNode, 'last');
}
}
}
@@ -40,28 +40,28 @@
var newsitemId = null;
var newDomNodeId = null;
comments.forEach(function(commentNode, index, array){
- links = dojo.query(commentNode).query("a:last-child");
- if (links.length>0) {
- links.forEach(function(linkNode, index, array){
- newsitemId = linkNode.href.split('=')[1];
- newDomNodeId = "hnmood_"+newsitemId;
- // check if it was already processed
- if (dojo.query(commentNode).query('#'+newDomNodeId).length==0) {
- // Analisys node does not exist, create it
- var xhrArgs = {
- url: baseurl+'item/'+newsitemId,
- callbackParamName: "jsoncallback",
- load: dojo.partial(getCommentAnalisysCallback, linkNode),
- error: function(error) {
- if (window.console) {
- console.error(error);
- }
- }
+ links = dojo.query(commentNode).query("a").filter(':contains("link")');
+ //links.style('color', 'red');
+ //console.debug(links);
+ links.forEach(function(linkNode, index, array){
+ newsitemId = linkNode.href.split('=')[1];
+ newDomNodeId = "hnmood_"+newsitemId;
+ // check if it was already processed
+ if (dojo.query(commentNode).query('#'+newDomNodeId).length==0) {
+ // Analisys node does not exist, create it
+ var xhrArgs =
+ dojo.io.script.get({
+ url: baseurl+'item/'+newsitemId,
+ callbackParamName: "jsoncallback",
+ load: dojo.partial(getCommentAnalisysCallback, linkNode),
+ error: function(error) {
+ if (window.console) {
+ console.error(error);
+ }
}
- dojo.io.script.get(xhrArgs);
- }
- });
- }
+ });
+ }
+ });
});
}
View
2  buildout.cfg
@@ -56,7 +56,7 @@ eggs =
gaepytz
wtforms
rfc3339
- python-dateutil
+ python-dateutil< 2.0
# Don't copy files that match these glob patterns.
ignore-globs =
View
BIN  var/downloads/dist/python-dateutil-1.5.tar.gz
Binary file not shown
View
BIN  var/downloads/google_appengine_1.4.2.zip
Binary file not shown
View
468 var/parts/app_lib/site.py
@@ -0,0 +1,468 @@
+"""Append module search paths for third-party packages to sys.path.
+
+****************************************************************
+* This module is automatically imported during initialization. *
+****************************************************************
+
+In earlier versions of Python (up to 1.5a3), scripts or modules that
+needed to use site-specific modules would place ``import site''
+somewhere near the top of their code. Because of the automatic
+import, this is no longer necessary (but code that does it still
+works).
+
+This will append site-specific paths to the module search path. On
+Unix (including Mac OSX), it starts with sys.prefix and
+sys.exec_prefix (if different) and appends
+lib/python<version>/site-packages as well as lib/site-python.
+On other platforms (such as Windows), it tries each of the
+prefixes directly, as well as with lib/site-packages appended. The
+resulting directories, if they exist, are appended to sys.path, and
+also inspected for path configuration files.
+
+A path configuration file is a file whose name has the form
+<package>.pth; its contents are additional directories (one per line)
+to be added to sys.path. Non-existing directories (or
+non-directories) are never added to sys.path; no directory is added to
+sys.path more than once. Blank lines and lines beginning with
+'#' are skipped. Lines starting with 'import' are executed.
+
+For example, suppose sys.prefix and sys.exec_prefix are set to
+/usr/local and there is a directory /usr/local/lib/python2.5/site-packages
+with three subdirectories, foo, bar and spam, and two path
+configuration files, foo.pth and bar.pth. Assume foo.pth contains the
+following:
+
+ # foo package configuration
+ foo
+ bar
+ bletch
+
+and bar.pth contains:
+
+ # bar package configuration
+ bar
+
+Then the following directories are added to sys.path, in this order:
+
+ /usr/local/lib/python2.5/site-packages/bar
+ /usr/local/lib/python2.5/site-packages/foo
+
+Note that bletch is omitted because it doesn't exist; bar precedes foo
+because bar.pth comes alphabetically before foo.pth; and spam is
+omitted because it is not mentioned in either path configuration file.
+
+After these path manipulations, an attempt is made to import a module
+named sitecustomize, which can perform arbitrary additional
+site-specific customizations. If this import fails with an
+ImportError exception, it is silently ignored.
+
+"""
+
+import sys
+import os
+import __builtin__
+
+
+def makepath(*paths):
+ dir = os.path.abspath(os.path.join(*paths))
+ return dir, os.path.normcase(dir)
+
+def abs__file__():
+ """Set all module' __file__ attribute to an absolute path"""
+ for m in sys.modules.values():
+ if hasattr(m, '__loader__'):
+ continue # don't mess with a PEP 302-supplied __file__
+ try:
+ m.__file__ = os.path.abspath(m.__file__)
+ except AttributeError:
+ continue
+
+def removeduppaths():
+ """ Remove duplicate entries from sys.path along with making them
+ absolute"""
+ # This ensures that the initial path provided by the interpreter contains
+ # only absolute pathnames, even if we're running from the build directory.
+ L = []
+ known_paths = set()
+ for dir in sys.path:
+ # Filter out duplicate paths (on case-insensitive file systems also
+ # if they only differ in case); turn relative paths into absolute
+ # paths.
+ dir, dircase = makepath(dir)
+ if not dircase in known_paths:
+ L.append(dir)
+ known_paths.add(dircase)
+ sys.path[:] = L
+ return known_paths
+
+# XXX This should not be part of site.py, since it is needed even when
+# using the -S option for Python. See http://www.python.org/sf/586680
+def addbuilddir():
+ """Append ./build/lib.<platform> in case we're running in the build dir
+ (especially for Guido :-)"""
+ from distutils.util import get_platform
+ s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
+ s = os.path.join(os.path.dirname(sys.path[-1]), s)
+ sys.path.append(s)
+
+def _init_pathinfo():
+ """Return a set containing all existing directory entries from sys.path"""
+ d = set()
+ for dir in sys.path:
+ try:
+ if os.path.isdir(dir):
+ dir, dircase = makepath(dir)
+ d.add(dircase)
+ except TypeError:
+ continue
+ return d
+
+def addpackage(sitedir, name, known_paths):
+ """Add a new path to known_paths by combining sitedir and 'name' or execute
+ sitedir if it starts with 'import'"""
+ if known_paths is None:
+ _init_pathinfo()
+ reset = 1
+ else:
+ reset = 0
+ fullname = os.path.join(sitedir, name)
+ try:
+ f = open(fullname, "rU")
+ except IOError:
+ return
+ try:
+ for line in f:
+ if line.startswith("#"):
+ continue
+ if line.startswith("import"):
+ exec line
+ continue
+ line = line.rstrip()
+ dir, dircase = makepath(sitedir, line)
+ if not dircase in known_paths and os.path.exists(dir):
+ sys.path.append(dir)
+ known_paths.add(dircase)
+ finally:
+ f.close()
+ if reset:
+ known_paths = None
+ return known_paths
+
+def addsitedir(sitedir, known_paths=None):
+ """Add 'sitedir' argument to sys.path if missing and handle .pth files in
+ 'sitedir'"""
+ if known_paths is None:
+ known_paths = _init_pathinfo()
+ reset = 1
+ else:
+ reset = 0
+ sitedir, sitedircase = makepath(sitedir)
+ if not sitedircase in known_paths:
+ sys.path.append(sitedir) # Add path component
+ try:
+ names = os.listdir(sitedir)
+ except os.error:
+ return
+ names.sort()
+ for name in names:
+ if name.endswith(os.extsep + "pth"):
+ addpackage(sitedir, name, known_paths)
+ if reset:
+ known_paths = None
+ return known_paths
+
+def addsitepackages(known_paths):
+ """Add site packages, as determined by zc.buildout.
+
+ See original_addsitepackages, below, for the original version."""
+ join = os.path.join
+ base = os.path.dirname(os.path.abspath(os.path.realpath(__file__)))
+ base = os.path.dirname(base)
+ base = os.path.dirname(base)
+ base = os.path.dirname(base)
+ setuptools_path = '/System/Library/Frameworks/Python.framework/Versions/2.5/Extras/lib/python'
+ sys.path.append(setuptools_path)
+ known_paths.add(os.path.normcase(setuptools_path))
+ import pkg_resources
+ buildout_paths = [
+ join(base, 'eggs/tipfy-1.0b1-py2.5.egg'),
+ join(base, 'eggs/Jinja2-2.5.5-py2.5.egg'),
+ join(base, 'eggs/Babel-0.9.6-py2.5.egg'),
+ join(base, 'eggs/gaepytz-2011c-py2.5.egg'),
+ join(base, 'eggs/WTForms-0.6.3-py2.5.egg'),
+ join(base, 'eggs/rfc3339-5-py2.5.egg'),
+ join(base, 'eggs/python_dateutil-1.5-py2.5.egg'),
+ '/System/Library/Frameworks/Python.framework/Versions/2.5/Extras/lib/python',
+ join(base, 'eggs/blinker-1.1-py2.5.egg'),
+ join(base, 'eggs/Werkzeug-0.6.2-py2.5.egg')
+ ]
+ for path in buildout_paths:
+ sitedir, sitedircase = makepath(path)
+ if not sitedircase in known_paths and os.path.exists(sitedir):
+ sys.path.append(sitedir)
+ known_paths.add(sitedircase)
+ pkg_resources.working_set.add_entry(sitedir)
+ sys.__egginsert = len(buildout_paths) # Support distribute.
+ original_paths = [
+ '/Library/Python/2.5/site-packages',
+ '/System/Library/Frameworks/Python.framework/Versions/2.5/Extras/lib/python/PyObjC',
+ '/System/Library/Frameworks/Python.framework/Versions/2.5/Extras/lib/python/wx-2.8-mac-unicode'
+ ]
+ for path in original_paths:
+ if path == setuptools_path or path not in known_paths:
+ addsitedir(path, known_paths)
+ return known_paths
+
+def original_addsitepackages(known_paths):
+ """Add site-packages (and possibly site-python) to sys.path"""
+ prefixes = [sys.prefix]
+ if sys.exec_prefix != sys.prefix:
+ prefixes.append(sys.exec_prefix)
+ for prefix in prefixes:
+ if prefix:
+ if sys.platform in ('os2emx', 'riscos'):
+ sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
+ elif sys.platform == 'darwin' and prefix == sys.prefix:
+ sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"), os.path.join(sys.prefix, "Extras", "lib", "python")]
+ elif os.sep == '/':
+ sitedirs = [os.path.join(prefix,
+ "lib",
+ "python" + sys.version[:3],
+ "site-packages"),
+ os.path.join(prefix, "lib", "site-python")]
+ else:
+ sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
+ if sys.platform == 'darwin':
+ # for framework builds *only* we add the standard Apple
+ # locations. Currently only per-user, but /Library and
+ # /Network/Library could be added too
+ if 'Python.framework' in prefix:
+ home = os.environ.get('HOME')
+ if home:
+ sitedirs.append(
+ os.path.join(home,
+ 'Library',
+ 'Python',
+ sys.version[:3],
+ 'site-packages'))
+ for sitedir in sitedirs:
+ if os.path.isdir(sitedir):
+ addsitedir(sitedir, known_paths)
+ return None
+
+
+def setBEGINLIBPATH():
+ """The OS/2 EMX port has optional extension modules that do double duty
+ as DLLs (and must use the .DLL file extension) for other extensions.
+ The library search path needs to be amended so these will be found
+ during module import. Use BEGINLIBPATH so that these are at the start
+ of the library search path.
+
+ """
+ dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
+ libpath = os.environ['BEGINLIBPATH'].split(';')
+ if libpath[-1]:
+ libpath.append(dllpath)
+ else:
+ libpath[-1] = dllpath
+ os.environ['BEGINLIBPATH'] = ';'.join(libpath)
+
+
+def setquit():
+ """Define new built-ins 'quit' and 'exit'.
+ These are simply strings that display a hint on how to exit.
+
+ """
+ if os.sep == ':':
+ eof = 'Cmd-Q'
+ elif os.sep == '\\':
+ eof = 'Ctrl-Z plus Return'
+ else:
+ eof = 'Ctrl-D (i.e. EOF)'
+
+ class Quitter(object):
+ def __init__(self, name):
+ self.name = name
+ def __repr__(self):
+ return 'Use %s() or %s to exit' % (self.name, eof)
+ def __call__(self, code=None):
+ # Shells like IDLE catch the SystemExit, but listen when their
+ # stdin wrapper is closed.
+ try:
+ sys.stdin.close()
+ except:
+ pass
+ raise SystemExit(code)
+ __builtin__.quit = Quitter('quit')
+ __builtin__.exit = Quitter('exit')
+
+
+class _Printer(object):
+ """interactive prompt objects for printing the license text, a list of
+ contributors and the copyright notice."""
+
+ MAXLINES = 23
+
+ def __init__(self, name, data, files=(), dirs=()):
+ self.__name = name
+ self.__data = data
+ self.__files = files
+ self.__dirs = dirs
+ self.__lines = None
+
+ def __setup(self):
+ if self.__lines:
+ return
+ data = None
+ for dir in self.__dirs:
+ for filename in self.__files:
+ filename = os.path.join(dir, filename)
+ try:
+ fp = file(filename, "rU")
+ data = fp.read()
+ fp.close()
+ break
+ except IOError:
+ pass
+ if data:
+ break
+ if not data:
+ data = self.__data
+ self.__lines = data.split('\n')
+ self.__linecnt = len(self.__lines)
+
+ def __repr__(self):
+ self.__setup()
+ if len(self.__lines) <= self.MAXLINES:
+ return "\n".join(self.__lines)
+ else:
+ return "Type %s() to see the full %s text" % ((self.__name,)*2)
+
+ def __call__(self):
+ self.__setup()
+ prompt = 'Hit Return for more, or q (and Return) to quit: '
+ lineno = 0
+ while 1:
+ try:
+ for i in range(lineno, lineno + self.MAXLINES):
+ print self.__lines[i]
+ except IndexError:
+ break
+ else:
+ lineno += self.MAXLINES
+ key = None
+ while key is None:
+ key = raw_input(prompt)
+ if key not in ('', 'q'):
+ key = None
+ if key == 'q':
+ break
+
+def setcopyright():
+ """Set 'copyright' and 'credits' in __builtin__"""
+ __builtin__.copyright = _Printer("copyright", sys.copyright)
+ if sys.platform[:4] == 'java':
+ __builtin__.credits = _Printer(
+ "credits",
+ "Jython is maintained by the Jython developers (www.jython.org).")
+ else:
+ __builtin__.credits = _Printer("credits", """\
+ Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
+ for supporting Python development. See www.python.org for more information.""")
+ here = os.path.dirname(os.__file__)
+ __builtin__.license = _Printer(
+ "license", "See http://www.python.org/%.3s/license.html" % sys.version,
+ ["LICENSE.txt", "LICENSE"],
+ [os.path.join(here, os.pardir), here, os.curdir])
+
+
+class _Helper(object):
+ """Define the built-in 'help'.
+ This is a wrapper around pydoc.help (with a twist).
+
+ """
+
+ def __repr__(self):
+ return "Type help() for interactive help, " \
+ "or help(object) for help about object."
+ def __call__(self, *args, **kwds):
+ import pydoc
+ return pydoc.help(*args, **kwds)
+
+def sethelper():
+ __builtin__.help = _Helper()
+
+def aliasmbcs():
+ """On Windows, some default encodings are not provided by Python,
+ while they are always available as "mbcs" in each locale. Make
+ them usable by aliasing to "mbcs" in such a case."""
+ if sys.platform == 'win32':
+ import locale, codecs
+ enc = locale.getdefaultlocale()[1]
+ if enc.startswith('cp'): # "cp***" ?
+ try:
+ codecs.lookup(enc)
+ except LookupError:
+ import encodings
+ encodings._cache[enc] = encodings._unknown
+ encodings.aliases.aliases[enc] = 'mbcs'
+
+def setencoding():
+ """Set the string encoding used by the Unicode implementation. The
+ default is 'ascii', but if you're willing to experiment, you can
+ change this."""
+ encoding = "ascii" # Default value set by _PyUnicode_Init()
+ if 0: