diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..06a4ea6
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,5 @@
+include README.rst
+include MANIFEST.in
+include campaign.ini
+recursive-include campaign/templates *.mako *.css
+
diff --git a/Makefile b/Makefile
index 7ceaaec..9bc9618 100644
--- a/Makefile
+++ b/Makefile
@@ -1,16 +1,21 @@
-APPNAME = geoip
+APPNAME = campaign
VE = virtualenv
PY = bin/python
PI = bin/pip
NO = bin/nosetests -s --with-xunit
+PS = bin/pserve
all: build
build:
$(VE) --no-site-packages .
+ bin/easy_install -U distribute
$(PI) install -r prod-reqs.txt
- $(PY) setup.py build
+ $(PY) setup.py develop
test:
$(NO) $(APPNAME)
+run:
+ $(PS) campaign-local.ini
+
diff --git a/campaign.ini b/campaign.ini
index e3a6065..7753641 100644
--- a/campaign.ini
+++ b/campaign.ini
@@ -19,6 +19,10 @@ db.db = /tmp/campaigns.sqlite
#db.password = snip
#db.db = campaign
+#dbg.traceback = False
+#dbg.break_unknown_exception = False
+#dbg.self_diag = False
+
beaker.session.cache_dir = %(here)s/data
beaker.session.key = campaign
beaker.session.secret = Secret.
diff --git a/campaign/__init__.py b/campaign/__init__.py
index f7e2412..b72b08d 100644
--- a/campaign/__init__.py
+++ b/campaign/__init__.py
@@ -3,6 +3,8 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""Main entry point
"""
+import logging
+
from pyramid.config import Configurator
from metlog.config import client_from_stream_config
from campaign.resources import Root
@@ -10,6 +12,8 @@
from mozsvc.config import load_into_settings
from mozsvc.middlewares import _resolve_name
+logger = logging.getLogger('campaign')
+
def get_group(group_name, dictionary):
if group_name is None:
@@ -29,6 +33,27 @@ def configure_from_settings(object_name, settings):
cls = _resolve_name(config.pop('backend'))
return cls(**config)
+def self_diag(config):
+ import warnings
+ import sys
+ import os
+ bad = False
+ if sys.version_info[:3] < (2,5,0) or sys.version_info[:3] > (3,0,0):
+ warnings.warn('Please run this code under version '
+ '2.6 or 2.7 of python.');
+ bad |= True
+ templatePath = os.path.join(os.path.dirname(__file__), 'templates',
+ 'login.mako')
+ if not os.path.exists(templatePath):
+ warnings.warn(('Could not find required template. %s\n Your install ' %
+ templatePath) +
+ 'may be corrupt. Please reinstall.');
+ bad |= True
+ if not config.registry['storage'].health_check():
+ warnings.warn('Storage reported an error. Please check settings.');
+ bad |= True
+
+
def main(global_config, **settings):
load_into_settings(global_config['__file__'], settings)
@@ -44,6 +69,8 @@ def main(global_config, **settings):
open(global_config['__file__'], 'r'),
'metlog')
config.registry['metlog'] = metlog_client
+ if settings.get('dbg.self_diag', False):
+ self_diag(config)
return config.make_wsgi_app()
diff --git a/campaign/storage/__init__.py b/campaign/storage/__init__.py
index 13d1415..55e24d0 100644
--- a/campaign/storage/__init__.py
+++ b/campaign/storage/__init__.py
@@ -81,6 +81,10 @@ def normalize_announce(self, data):
# customize for each memory model
+ def health_check(self):
+ # Is the current memory model working?
+ return False;
+
def del_announce(self, keys):
pass
diff --git a/campaign/storage/sql.py b/campaign/storage/sql.py
index ac1e345..7dae8b5 100644
--- a/campaign/storage/sql.py
+++ b/campaign/storage/sql.py
@@ -17,7 +17,7 @@ class Campaign(Base):
channel = Column('channel', String(24), index=True, nullable=True)
version = Column('version', Float, index=True, nullable=True)
platform = Column('platform', String(24), index=True, nullable=True)
- lang = Column('lang', String(24), index=True)
+ lang = Column('lang', String(24), index=True, nullable=True)
locale = Column('locale', String(24), index=True, nullable=True)
start_time = Column('start_time', Integer, index=True)
end_time = Column('end_time', Integer, index=True, nullable=True)
@@ -64,20 +64,42 @@ def _connect(self):
logging.error('Could not connect to db "%s"' % repr(e))
raise e
+ def health_check(self):
+ try:
+ healthy = True
+ with self.engine.begin() as conn:
+ conn.execute(("insert into %s (id, channel, platform, " %
+ self.__tablename__) +
+ "start_time, end_time, note, dest_url, author, created) " +
+ "values ('test', 'test', 'test', 0, 0, 'test', 'test', " +
+ "'test', 0)")
+ resp = conn.execute(("select id, note from %s where " %
+ self.__tablename__) + "id='test';")
+ if resp.rowcount == 0:
+ healthy = False
+ conn.execute("delete from %s where id='test';" %
+ self.__tablename__)
+ except Exception, e:
+ import warnings
+ warnings.warn(str(e))
+ return False
+ return healthy
+
def resolve(self, token):
if token is None:
return None
sql = 'select * from campaigns where id = :id'
items = self.engine.execute(text(sql), {'id': token})
- if items.rowcount == 0:
+ row = items.fetchone()
+ if items.rowcount == 0 or row is None:
return None
- result = dict(zip(items.keys(), items.fetchone()))
+ result = dict(zip(items.keys(), row))
return result
def put_announce(self, data):
if data.get('note') is None:
- raise StorageException('Nothing to do.')
+ raise StorageException('Incomplete record. Skipping.')
snip = self.normalize_announce(data)
campaign = Campaign(**snip)
self.session.add(campaign)
@@ -89,10 +111,19 @@ def get_announce(self, data):
# that they're going to want them.
params = {}
settings = self.config.get_settings()
- now = int(time.time())
+ # The window allows the db to cache the query for the length of the
+ # window. This is because the database won't cache a query if it
+ # differs from a previous one. The timestamp will cause the query to
+ # not be cached.
+ window = int(settings.get('db.query_window', 1))
+ if window == 0:
+ window = 1
+ now = int(time.time() / window )
sql =("select id, note from %s where " % self.__tablename__ +
- " coalesce(start_time, %s) < %s " % (now-1, now) +
- "and coalesce(end_time, %s) > %s " % (now+1, now))
+ " coalesce(round(start_time / %s), %s) < %s " % (window,
+ now-1, now) +
+ "and coalesce(round(end_time / %s), %s) > %s " % (window,
+ now+1, now))
if data.get('last_accessed'):
sql += "and created > :last_accessed "
params['last_accessed'] = int(data.get('last_accessed'))
@@ -108,10 +139,14 @@ def get_announce(self, data):
if data.get('locale'):
sql += "and coalesce(locale, :locale) = :locale "
params['locale'] = data.get('locale')
- if data.get('idle_time'):
- sql += "and coalesce(idle_time, :idle_time) = :idle_time "
- params['idle_time'] = data.get('idle_time')
+ if not data.get('idle_time'):
+ data['idle_time'] = 0
+ sql += "and coalesce(idle_time, 0) <= :idle_time "
+ params['idle_time'] = data.get('idle_time')
sql += " order by id"
+ if (settings.get('dbg.show_query', False)):
+ print sql;
+ print params;
items = self.engine.execute(text(sql), **dict(params))
result = []
for item in items:
@@ -139,5 +174,11 @@ def del_announce(self, keys):
#TODO: how do you safely do an "in (keys)" call?
sql = 'delete from %s where id = :key' % self.__tablename__
for key in keys:
- self.engine.execute(text(sql), {"key": key});
+ self.engine.execute(text(sql), {"key": key})
self.session.commit()
+
+ def purge(self):
+ sql = 'delete from %s;' % self.__tablename__
+ self.engine.execute(text(sql))
+ self.session.commit()
+
diff --git a/campaign/templates/login.mako b/campaign/templates/login.mako
index 9a08b67..7481bf4 100644
--- a/campaign/templates/login.mako
+++ b/campaign/templates/login.mako
@@ -11,8 +11,9 @@
-
${dnote['id']}
+
${strftime(time_format, localtime(dnote['created']))}
${dnote['start_time']}
${dnote['end_time']}
diff --git a/__init__.py b/campaign/tests/__init__.py
similarity index 59%
rename from __init__.py
rename to campaign/tests/__init__.py
index 5eb298d..bb253d6 100644
--- a/__init__.py
+++ b/campaign/tests/__init__.py
@@ -2,6 +2,12 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-def main(global_config, **settings):
- print 'starting app...'
- pass
+class TConfig:
+
+ def __init__(self, data):
+ self.settings = data
+
+ def get_settings(self):
+ return self.settings
+
+
diff --git a/campaign/tests/test_storage.py b/campaign/tests/test_storage.py
new file mode 100644
index 0000000..e8d5603
--- /dev/null
+++ b/campaign/tests/test_storage.py
@@ -0,0 +1,99 @@
+import json
+import time
+import unittest2
+from pprint import pprint
+from campaign.storage.sql import Storage
+from campaign.tests import TConfig
+
+class TestStorage(unittest2.TestCase):
+
+ now = int(time.time())
+
+ test_announce = {
+ 'start_time': int(now - 300),
+ 'end_time': int(now + 3000),
+ 'lang': 'en',
+ 'locale': 'US',
+ 'note': 'Text Body',
+ 'title': 'Test',
+ 'dest_url': 'http://example.com'
+ }
+
+ def setUp(self):
+ self.storage = Storage(config = TConfig({'db.type': 'sqlite',
+ 'db.db': ':memory:'}))
+
+ def tearDown(self):
+ self.storage.purge()
+
+ def test_announcement(self):
+ self.storage.put_announce(self.test_announce)
+ items = self.storage.get_all_announce()
+ self.failUnless(len(items) > 0)
+ self.failUnless(self.test_announce['note'] in items[0].note)
+ self.failUnless(self.test_announce['title'] in items[0].note)
+ self.failUnless(self.test_announce['dest_url'] in items[0].dest_url)
+
+ def update_note(self, announce, note_text):
+ return announce.copy()
+
+ def test_search(self):
+ """ Yes, this test does a lot of things. That's because I need
+ to exercise the search algo using a lot of records. """
+ # really wish that update allowed chaining.
+ updates = [{'lang':None, 'locale':None, 'title':'Everyone'},
+ {'platform':'a', 'channel':'a', 'title':'p:a;c:a'},
+ {'platform':'b', 'channel':'a', 'title':'p:b;c:a'},
+ {'platform':'a', 'start_time': self.now + 1,
+ 'end_time': self.now + 3, 'title':'notyet'},
+ {'platform':'a', 'end_time': self.now-5, 'title':'tooold'},
+ {'platform':'a', 'idle_time': 10, 'title': 'idle:10'},
+ {'platform':'a', 'channel':'b', 'lang':'a', 'locale':'a',
+ 'idle_time': 10, 'title': 'full_rec'}
+ ]
+ # load the database
+ for update in updates:
+ test = self.test_announce.copy()
+ test.update(update)
+ self.storage.put_announce(test)
+ data = {'platform':'f', 'channel':'f', 'version':0}
+ announce = self.storage.get_announce(data)
+ self.assertEqual(len(announce), 1)
+ self.assertEqual(announce[0]['title'], 'Everyone')
+ data = {'platform':'a', 'channel':'a'}
+ announce = self.storage.get_announce(data)
+ # only Everyone and p:a;c:a should be returned.
+ print "P&C check:"
+ self.assertEqual(len(announce), 2)
+
+ data = {'platform':'a', 'channel':'a', 'idle_time': 15}
+ announce = self.storage.get_announce(data)
+ print "Idle Check:"
+ self.assertEqual(len(announce), 3)
+
+ data = {'platform':'a', 'channel':'b'}
+ announce = self.storage.get_announce(data)
+ print "P&C2 check:"
+ self.assertEqual(len(announce), 1)
+ # Store the unique record data for the resolve check.
+ resolve_rec = announce[0]
+
+ data = {'platform':'a', 'channel':'a'}
+ time.sleep(self.now + 2 - int(time.time()))
+ print "Wake check: %s " % (int(time.time()) - self.now)
+ announce = self.storage.get_announce(data)
+ self.assertEqual(len(announce), 3);
+
+ time.sleep(self.now + 4 - int(time.time()))
+ print "Expire check: %s " % (int(time.time()) - self.now)
+ data = {'platform':'a', 'channel':'a'}
+ announce = self.storage.get_announce(data)
+ self.assertEqual(len(announce), 2);
+
+ # Since we have an ID for a unique record, query it to make
+ # sure records resolve.
+ print "resolve check: %s" % resolve_rec['id']
+ rec = self.storage.resolve(resolve_rec['id'])
+ self.assertEqual('Everyone', json.loads(rec['note'])['title'])
+
+#TODO: continue tests
diff --git a/campaign/tests/test_views.py b/campaign/tests/test_views.py
new file mode 100644
index 0000000..df7012d
--- /dev/null
+++ b/campaign/tests/test_views.py
@@ -0,0 +1,170 @@
+import mock
+import json
+import time
+import unittest2
+
+from pprint import pprint
+from pyramid import testing
+import pyramid.httpexceptions as http
+from nose.tools import eq_
+
+from campaign.storage.sql import Storage
+from campaign import views
+from campaign.tests import TConfig
+
+
+def Request(params=None, post=None, matchdict=None, headers=None,
+ registry=None, **kw):
+
+ class Errors(list):
+ def add(self, where, key, msg):
+ self.append((where, key, msg))
+
+ testing.DummyRequest.json_body = property(lambda s: json.loads(s.body))
+ request = testing.DummyRequest(params=params, post=post, headers=headers,
+ **kw)
+ request.route_url = lambda s, **kw: s.format(**kw)
+ if matchdict:
+ request.matchdict = matchdict
+ if registry:
+ request.registry.update(registry);
+ return request
+
+
+class FakeMetlog():
+
+ def metlog(self, **kw):
+ self.lastRec = {'type': kw['type'],
+ 'payload': kw['payload'],
+ 'fields': kw['fields']}
+
+
+class ViewTest(unittest2.TestCase):
+
+ now = int(time.time())
+
+ base_record = {
+ 'start_time': int(now),
+ 'end_time': int(now + 3000),
+ 'lang': 'en',
+ 'locale': 'US',
+ 'note': 'Body',
+ 'title': 'Title',
+ 'dest_url': 'http://example.com'
+ }
+
+ diffs = [
+ {'channel': None, 'platform': None, 'version': None, 'title':'all'},
+ {'channel': 'a', 'platform': None, 'version': None, 'title': 'ann'},
+ {'channel': 'a', 'platform': 'a', 'version': 0, 'title': 'aa0'},
+ {'channel': 'a', 'platform': 'a', 'version': 0, 'idle_time': 1,
+ 'title': 'aa0i1'},
+ {'channel': 'a', 'platform': 'b', 'version': 0, 'title': 'ab0'},
+ {'channel': 'b', 'platform': 'a', 'version': 2, 'title': 'ba2',
+ 'dest_url': 'http://example.org'},
+ ]
+
+ def req(self, matchdict={}, user_id=None, headers=None, **kw):
+ class Reg(dict):
+
+ settings = {}
+
+ def __init__(self, settings=None, **kw):
+ super(Reg, self).__init__(**kw)
+ if settings:
+ self.settings = settings
+
+
+ request = Request(headers=headers, **kw)
+ request.registry=Reg(settings=self.config.get_settings())
+ request.registry['storage'] = self.storage
+ request.registry['metlog'] = FakeMetlog()
+ request.registry['auth'] = mock.Mock()
+ request.registry['auth'].get_user_id.return_value=user_id
+ if matchdict:
+ request.matchdict.update(matchdict)
+ return request;
+
+
+ def setUp(self):
+ self.config = testing.setUp()
+ self.storage = Storage(config = TConfig({'db.type': 'sqlite',
+ 'db.db': '/tmp/foo.db'}))
+ for diff in self.diffs:
+ record = self.base_record.copy()
+ record.update(diff);
+ self.storage.put_announce(record);
+
+ def tearDown(self):
+ self.storage.purge()
+
+
+ def test_get_announcements(self):
+ # normal number
+ response = views.get_announcements(self.req(matchdict={'channel':'a',
+ 'platform': 'a', 'version': 0}))
+ eq_(len(response['announcements']), 3)
+ # idle number
+ response = views.get_announcements(self.req(matchdict={'channel':'a',
+ 'platform': 'a', 'version': 0, 'idle_time': 6}))
+ eq_(len(response['announcements']), 4)
+
+ def test_get_all(self):
+ self.assertRaises(http.HTTPUnauthorized,
+ views.get_all_announcements,
+ self.req())
+ # scan for include.js or 'test="login"' id?
+ # try with a 'valid' user id
+ self.assertRaises(http.HTTPUnauthorized,
+ views.get_all_announcements,
+ self.req(matchdict={}, user_id='invalid@example.com'))
+ # try successful json
+ req = self.req(matchdict={}, user_id='foo@mozilla.com')
+ req.accept_encoding = 'application/javascript'
+ try:
+ views.login(req)
+ except http.HTTPOk:
+ pass
+ response = views.get_all_announcements(req)
+ eq_(len(response['announcements']), 6)
+
+ def test_handle_redir(self):
+ # get a record
+ response = self.storage.get_announce({'channel':'b'})
+ record = response[0]
+ req = self.req(matchdict={'token': record['id']})
+ self.assertRaises(http.HTTPTemporaryRedirect,
+ views.handle_redir, req)
+ self.assertRaises(http.HTTPNotFound, views.handle_redir,
+ self.req(matchdict={'token': 'Invalid Token'}))
+
+ def test_admin_page(self):
+ req = self.req()
+ response = views.admin_page(req)
+ eq_(response.status_code, 403)
+ req = self.req(matchdict={}, user_id='foo@mozilla.com')
+ response = views.admin_page(req)
+ eq_(response.status_code, 200)
+ req.registry.settings.update({'auth.block_authoring': True})
+ self.assertRaises(http.HTTPNotFound, views.admin_page, req)
+
+ def test_manage_announce(self):
+ # test assertion post
+ req = self.req(matchdict={'channel':'c', 'title': 'Goat',
+ 'note': 'Ready for sacrifice'}, user_id='foo@mozilla.com')
+ response = views.manage_announce(req)
+ # test create
+ time.sleep (2) # Give the db a second to write the record.
+ response = views.get_announcements(self.req(matchdict={'channel':'c'}))
+ goat = None
+ for record in response['announcements']:
+ if record['title'] == 'Goat':
+ goat = record
+ break
+ self.assertIsNotNone(goat)
+ req = self.req(params={'delete': goat['id']},
+ user_id='foo@mozilla.com')
+ self.assertRaises(http.HTTPOk, views.del_announce, req)
+ time.sleep (2) # Give the db a second to write the record.
+ req = self.req(matchdict={'token': goat['id']})
+ self.assertRaises(http.HTTPNotFound, views.handle_redir, req)
diff --git a/campaign/views.py b/campaign/views.py
index 2f70255..a9785be 100644
--- a/campaign/views.py
+++ b/campaign/views.py
@@ -3,11 +3,11 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
""" Cornice services.
"""
-from campaign import LOG
+from campaign import logger, LOG
from campaign.auth.default import DefaultAuth
from mozsvc.metrics import Service
from mako.template import Template
-import pyramid.httpexceptions as err
+import pyramid.httpexceptions as http
from time import strptime
from webob import Response
import json
@@ -17,10 +17,10 @@
fetch = Service(name='fetch',
path='/announcements/{channel}/{platform}/{version}',
description='Fetcher')
-fetchall = Service(name="fetchall",
+get_all = Service(name="get_all",
path='/announcements/',
description='Fetch Everything')
-authorx = Service(name='authorx',
+author2 = Service(name='author2',
path='/author/{id}',
description='Authoring Interface with record')
author = Service(name='author',
@@ -32,22 +32,28 @@
logout = Service(name='logout',
path='/logout/',
description='logout')
+redirl = Service(name='redir2',
+ path='/redirect/{locale}/{token}',
+ description='redir with locale')
redir = Service(name='redir',
path='/redirect/{token}',
description='redir')
+root = Service(name='root',
+ path='/',
+ description='Default path')
_TMPL = os.path.join(os.path.dirname(__file__), 'templates')
def get_lang_loc(request):
- header = request.headers.get('Accept-Language', 'en-us')
+ header = request.headers.get('Accept-Language', 'en-US')
langloc = header.split(',')[0]
if ('-' in langloc):
(lang, loc) = langloc.split('-')
else:
(lang, loc) = (langloc, None)
- return {'lang': lang, 'locale': loc}
+ return {'lang': lang.lower(), 'locale': loc.upper()}
def get_last_accessed(request):
@@ -57,7 +63,13 @@ def get_last_accessed(request):
last_accessed_str = request.headers.get('If-Modified-Since')
last_accessed = strptime(last_accessed_str)
except Exception, e:
- import pdb; pdb.set_trace()
+ settings = request.registry.settings
+ if settings.get('dbg.traceback', False):
+ import traceback
+ traceback.print_exc()
+ if settings.get('dbg.break_unknown_exception', False):
+ import pdb
+ pdb.set_trace()
request.registry['metlog'].metlog(type='campaign_error',
severity=LOG.ERROR,
payload='Exception: %s' % str(e))
@@ -65,17 +77,16 @@ def get_last_accessed(request):
def log_fetched(request, reply):
- logger = request.registry['metlog'].metlog
+ metlog = request.registry['metlog'].metlog
for item in reply['announcements']:
- continue; ## NOOP
- logger(type='campaign_log',
- severity=LOG.INFO,
- payload=json.dumps(item))
- #metlogger.metlog('msgtype', payload='payload')
+ metlog(type='campaign_log',
+ severity=LOG.NOTICE,
+ payload='fetched',
+ fields=json.dumps(item))
pass
@fetch.get()
-def get_snippets(request):
+def get_announcements(request):
"""Returns campaigns in JSON."""
# get the valid user information from the request.
metlog = request.registry.get('metlog')
@@ -85,12 +96,12 @@ def get_snippets(request):
last_accessed = get_last_accessed(request)
args.update(last_accessed)
reply = {'announcements': storage.get_announce(args)}
- metlog.metlog(type='campaign', payload='fetch', fields=args)
+ metlog.metlog(type='campaign', payload='fetch_query', fields=args)
if not len(reply):
if last_accessed:
- raise err.HTTPNotModified
+ raise http.HTTPNotModified
else:
- raise err.HTTPNoContent
+ raise http.HTTPNoContent
log_fetched(request, reply)
return reply
@@ -99,8 +110,9 @@ def get_template(name):
name = os.path.join(_TMPL, '%s.mako' % name)
try:
return Template(filename=name)
- except IOError:
- raise err.HTTPServerError
+ except IOError, e:
+ logger.error(str(e))
+ raise http.HTTPServerError
def get_file(name):
@@ -109,48 +121,58 @@ def get_file(name):
ff = open(name)
return ff.read()
except IOError:
- raise err.HTTPNotFound
-
+ raise http.HTTPNotFound
-def get_ideltime(request):
- return {'idle_time': int(request.params.get('idle', 0))}
-
-def authorized(request, email):
+def authorized(email, request):
if email is None:
return False
settings = request.registry.settings
try:
domains = json.loads(settings.get('auth.valid.domains',
- "['@mozilla.com', '@mozilla.org']"))
+ '["@mozilla.com", "@mozilla.org"]'))
for valid_domain in domains:
if email.lower().endswith(valid_domain):
return True
except TypeError, e:
pass
except Exception, e:
- import pdb; pdb.set_trace();
+ if settings.get('dbg.traceback', False):
+ import traceback
+ traceback.print_exc()
+ if settings.get('dbg.break_unknown_exception', False):
+ import pdb
+ pdb.set_trace()
pass
return False
-@fetchall.get()
-def fetchall_snippets(request):
- if not authorized(request, request.session.get('uid')):
- return login(request)
+@get_all.get()
+def get_all_announcements(request):
+ if not login(request):
+ raise http.HTTPUnauthorized;
storage = request.registry.get('storage')
- tdata = {"notes": storage.get_all_announce()}
+ tdata = {"announcements": storage.get_all_announce()}
return tdata
@author.get()
-@authorx.get()
+@author2.get()
def admin_page(request, error=None):
- if not authorized(request, request.session.get('uid')):
- return login(request)
- tdata = fetchall_snippets(request)
+ if request.registry.settings.get('auth.block_authoring', False):
+ raise http.HTTPNotFound()
+ if not login(request):
+ return login_page(request)
+ tdata = get_all_announcements(request)
tdata['author'] = request.session['uid']
tdata['error'] = error
+ try:
+ if 'javascript' in request.accept_encoding:
+ if not error:
+ raise http.HTTPOk
+ raise http.HTTPConflict(json.dumps(error))
+ except AttributeError:
+ pass
template = get_template('main')
content_type = 'text/html'
reply = template.render(**tdata)
@@ -160,48 +182,62 @@ def admin_page(request, error=None):
# sad to use post for DELETE, but JQuery doesn't add args to DELETE for bulk.
@author.post()
-@authorx.post()
+@author2.post()
def manage_announce(request):
- if not authorized(request, request.session.get('uid')):
- return login(request)
+ args = request.params.copy()
+ args.update(request.matchdict)
+ if request.registry.settings.get('auth.block_authoring', False):
+ raise http.HTTPNotFound()
+ if not login(request):
+ raise http.HTTPUnauthorized
+ else:
+ # Clean up the login info
+ try:
+ del args['assertion']
+ del args['audience']
+ except KeyError:
+ pass
storage = request.registry.get('storage')
+ settings = request.registry.settings
session = request.session
- args = dict(request.params)
err = None
- if not args.get('author'):
- args['author'] = session.get('uid')
- if 'assertion' in args:
- """ Login request"""
- return admin_page(request)
if 'delete' in args or 'delete[]' in args:
try:
del_announce(request)
- except err.HTTPOk:
+ except http.HTTPOk:
pass
- except err.HTTPNotFound, e:
- import pdb; pdb.set_trace();
+ except http.HTTPNotFound, e:
pass
return admin_page(request)
- if not args.get('author'):
- args['author'] = session.get('uid')
try:
- storage.put_announce(args)
+ if args != None and len(args) > 0:
+ if not args.get('author'):
+ args['author'] = session.get('uid')
+ storage.put_announce(args)
except Exception, e:
- import pdb; pdb.set_trace()
+ if settings.get('dbg.traceback', False):
+ import traceback
+ traceback.print_exc()
+ if settings.get('dbg.break_unknown_exception', False):
+ import pdb
+ pdb.set_trace()
# display error page.
+ err = {'code': 1,
+ 'error': str(e)}
pass
- return admin_page(request);
+ return admin_page(request, err);
@author.delete()
def del_announce(request):
- if not authorized(request, request.session.get('uid')):
- return login(request)
+ if not login(request):
+ return login_page(request)
storage = request.registry.get('storage')
args = dict(request.params)
+ args.update(request.matchdict)
deleteables = args.get('delete', args.get('delete[]', '')).split(',')
if len(deleteables):
storage.del_announce(deleteables)
- raise err.HTTPOk
+ raise http.HTTPOk
@fstatic.get()
@@ -210,26 +246,56 @@ def get_static(request):
content_type = 'text/css')
return response
+@root.get()
+def boot_to_author(request):
+ raise http.HTTPTemporaryRedirect(location='/author/')
+
@logout.delete()
def logout_page(request):
session = request.session
if 'uid' in session:
del session['uid']
- session.persist()
- session.save()
+ try:
+ session.persist()
+ session.save()
+ except AttributeError:
+ pass
login_page(request)
-def login_page(request):
+def login_page(request, error=None):
session = request.session
- template = get_template('login')
- response = Response(str(template.render(audience=request.get('HTTP_HOST'))),
- status=403)
- if (session.get('uid')):
- del(session['uid'])
- session.persist()
- session.save()
- return response
+ try:
+ if 'javascript' in request.accept_encoding:
+ # Don't display the login page for javascript requests.
+ if not error:
+ raise http.HTTPForbidden
+ raise http.HTTPInternalServerError(str(error))
+ except AttributeError:
+ pass
+ try:
+ template = get_template('login')
+ response = Response(str(template.render(
+ audience=request.get('HTTP_HOST'))),
+ status=403)
+ if (session.get('uid')):
+ del(session['uid'])
+ try:
+ session.persist()
+ session.save()
+ except AttributeError:
+ pass # because testing
+ return response
+ except Exception, e:
+ settings = request.registry.settings
+ if settings.get('dbg.traceback', False):
+ import traceback
+ traceback.print_exc()
+ if settings.get('dbg.break_unknown_exception', False):
+ import pdb
+ pdb.set_trace()
+ logger.error(str(e))
+ raise http.HTTPServerError
def login(request, skipAuth=False):
params = dict(request.params.items())
@@ -239,31 +305,48 @@ def login(request, skipAuth=False):
except ValueError:
pass
try:
+ uid = request.session.get('uid')
+ if uid and authorized(uid, request):
+ return True;
#config = request.registry.get('config', {})
auth = request.registry.get('auth', DefaultAuth)
email = auth.get_user_id(request)
if email is None:
- return login_page(request)
- if authorized(request, email):
+ return False
+ if authorized(email, request):
session = request.session
session['uid'] = email
- session.persist()
- session.save()
+ try:
+ session.persist()
+ session.save()
+ except AttributeError:
+ pass
else:
- return login_page(request)
+ return False
+ except IOError, e:
+ raise e
+ except http.HTTPServerError, e:
+ raise e
except Exception, e:
- import pdb; pdb.set_trace();
- print ('missing credentials? %s', str(e))
- return login_page(request)
+ settings = request.registry.settings
+ if settings.get('dbg.traceback', False):
+ import traceback
+ traceback.print_exc()
+ if settings.get('dbg.break_unknown_exception', False):
+ import pdb
+ pdb.set_trace()
+ logger.error(str(e))
+ return False
# User Logged in
- return manage_announce(request)
+ return True
@redir.get()
+@redirl.get()
def handle_redir(request):
metlog = request.registry.get('metlog')
storage = request.registry.get('storage')
data = storage.resolve(request.matchdict.get('token'));
if data is None:
- return err.HTTPNotFound
+ raise http.HTTPNotFound
metlog.metlog(type='campaign', payload='redirect', fields=data)
- return err.HTTPTemporaryRedirect(location=data['dest_url'])
+ raise http.HTTPTemporaryRedirect(location=data['dest_url'])
diff --git a/prod-reqs.txt b/prod-reqs.txt
index 2280a6b..e8927f2 100644
--- a/prod-reqs.txt
+++ b/prod-reqs.txt
@@ -21,6 +21,7 @@ distribute==0.6.28
docutils==0.9.1
metlog-py==0.9.7
-e git://github.com/mozilla-services/mozservices.git@f1310d1e9a07ea6ae7374deee5b1da4cbb8dea0a#egg=mozsvc-dev
+nose==1.2.1
paster==0.7
pyramid==1.4a2
pyramid-beaker==0.7
diff --git a/setup.py b/setup.py
index 41c49e5..a594466 100644
--- a/setup.py
+++ b/setup.py
@@ -13,7 +13,7 @@
setup(name='campaign',
- version=0.1,
+ version=0.5,
description='campaign',
long_description=README,
classifiers=[