Skip to content
This repository has been archived by the owner on Apr 12, 2018. It is now read-only.

Commit

Permalink
Merge pull request #7 from jrconlin/master
Browse files Browse the repository at this point in the history
SqlAlchemy fixes
  • Loading branch information
jrconlin committed Oct 5, 2013
2 parents ca5a79a + 874a5c2 commit 7801f12
Show file tree
Hide file tree
Showing 12 changed files with 186 additions and 169 deletions.
14 changes: 8 additions & 6 deletions campaign/__init__.py
Expand Up @@ -8,11 +8,16 @@
from mozsvc.config import load_into_settings
from mozsvc.middlewares import _resolve_name
from campaign.logger import Logging, LOG
from campaign.storage.metrics import Counter

logger = None
counter = None

# TO prevent circular references, duplicate this func here.
def strToBool(s="False"):
if type(s) == bool:
return s
return s.lower() in ["true", "1", "yes", "t"]


def get_group(group_name, dictionary):
if group_name is None:
Expand Down Expand Up @@ -66,7 +71,6 @@ def main(global_config, **settings):
config.add_static_view(name='static', path='campaign:static')
config.scan("campaign.views")
logger = Logging(config, global_config['__file__'])
counter = Counter(config=config, logger=logger)
config.registry['storage'] = _resolve_name(
settings.get('db.backend',
'campaign.storage.sql.Storage'))(config=config,
Expand All @@ -75,11 +79,9 @@ def main(global_config, **settings):
'auth',
settings['config'].get_map('auth'))
config.registry['logger'] = logger
config.registry['counter'] = counter
if settings.get('dbg.self_diag', False):
config.registry['counter'] = config.registry['storage'].counter
if strToBool(settings.get('dbg.self_diag', False)):
self_diag(config)
config.registry['logger'].log('Starting up', fields='',
severity=LOG.INFORMATIONAL)
return config.make_wsgi_app()


16 changes: 11 additions & 5 deletions campaign/decorators.py
@@ -1,6 +1,7 @@
import json
import pyramid.httpexceptions as http
import random
import utils
from campaign import logger, LOG
from campaign.auth.default import DefaultAuth
from dateutil import parser
Expand Down Expand Up @@ -62,14 +63,18 @@ def authorized(self, email, request):
if not result:
return False
storage = request.registry.get('storage')
return storage.is_user(email)
if settings.get("db.checkAccount", True):
return storage.is_user(email)
else:
return True
except TypeError:
pass
except Exception:
if settings.get('dbg.traceback', False):
if utils.strToBool(settings.get('dbg.traceback', False)):
import traceback
traceback.print_exc()
if settings.get('dbg.break_unknown_exception', False):
if utils.strToBool(settings.get('dbg.break_unknown_exception',
False)):
import pdb
pdb.set_trace()
pass
Expand Down Expand Up @@ -107,10 +112,11 @@ def login(self, request, skipAuth=False):
raise e
except Exception, e:
settings = request.registry.settings
if settings.get('dbg.traceback', False):
if utils.strToBool(settings.get('dbg.traceback', False)):
import traceback
traceback.print_exc()
if settings.get('dbg.break_unknown_exception', False):
if utils.strToBool(settings.get('dbg.break_unknown_exception',
False)):
import pdb
pdb.set_trace()
logger.log(type='error', severity=LOG.ERROR, msg=str(e))
Expand Down
4 changes: 3 additions & 1 deletion campaign/logger.py
@@ -1,5 +1,6 @@
import logging
import json
import campaign.utils as utils
from __builtin__ import type as btype


Expand Down Expand Up @@ -28,6 +29,7 @@ class LoggingException(Exception):


class Logging(object):

metlog2log = [logging.CRITICAL, logging.CRITICAL, logging.CRITICAL,
logging.ERROR, logging.WARNING, logging.INFO,
logging.INFO, logging.DEBUG]
Expand All @@ -42,7 +44,7 @@ def __init__(self, config, settings_file):
self.loggername = settings.get('logging.name', 'campaign-manager')
self.logger = logging.getLogger(self.loggername)
self.logger.level = 1
if HEKA and settings.get('logging.use_heka', True):
if HEKA and utils.strToBool(settings.get('logging.use_heka', "true")):
self.heka = client_from_stream_config(
open(settings_file, 'r'),
'heka')
Expand Down
116 changes: 0 additions & 116 deletions campaign/storage/metrics.py

This file was deleted.

111 changes: 108 additions & 3 deletions campaign/storage/sql.py
@@ -1,11 +1,13 @@
import json
import time
import datetime
import uuid
import re
from . import StorageBase, StorageException, Base
from .metrics import Counter
from campaign.logger import LOG
from campaign.views import api_version
from sqlalchemy import (Column, Integer, String, Text, text)
from sqlalchemy import (Column, Integer, String, Text,
text)


class Users(Base):
Expand Down Expand Up @@ -43,6 +45,108 @@ class Campaign(Base):
status = Column('status', Integer)


class Scrapes(Base):
__tablename__ = 'scrapes'

id = Column('id', String(25), unique=True, primary_key=True)
served = Column('served', Integer, server_default=text('0'))
clicks = Column('clicks', Integer, server_default=text('0'))
last = Column('last', Integer, index=True, server_default=text('0'))


class CounterException(Exception):
pass


class Counter(StorageBase):
__database__ = 'campaign'
__tablename__ = 'scrapes'

def __init__(self, config, logger, **kw):
try:
super(Counter, self).__init__(config, **kw)
self.logger = logger
self._connect()
#TODO: add the most common index.
except Exception, e:
logger.log(msg='Could not initialize Storage "%s"' % str(e),
type='error', severity=LOG.CRITICAL)
raise e

def bulk_increment(self, conn, id, action, time=time.time()):
action = re.sub(r'[^0-9A-Za-z]', '', action)
try:
if (self.settings.get("db.type") == "sqlite"):
conn.execute(text("insert or ignore into " +
self.__tablename__ +
" (id)" +
" values (:id ); "),
{"id": id})
else:
dml = text("insert into " + self.__tablename__
+ " (id, %s) values (:id, 1) " % action
+ " on duplicate key update %s=%s+1, last=:last;"
% (action, action))
conn.execute(dml, {"id": id, "last": time})
except Exception, e:
self.logger.log(msg="Could not increment id: %s" % str(e),
type="error", severity=LOG.ERROR)

def increment(self, id, action, time):
with self.engine.begin() as conn:
self.bulk_increment(conn, id, action, time)

def fetched(self, data, time=time.time()):
with self.engine.begin() as conn:
for item in data:
self.bulk_increment(conn, item.get('token'), 'served', time)

def redir(self, data, time=time.time()):
self.increment(data.get('id'), 'clicks', time)

commands = {'redirect': redir,
'fetched': fetched}

def log(self, line):
for command in self.commands.keys():
if command + ' :' in line:
dt = datetime.strptime(line.split(',')[0],
'%Y-%m-%d %H:%M:%S')
timestamp = int(time.mktime(dt.timetuple()))
try:
data = json.loads(line.split(command + ' :')[1])
while (isinstance(data, basestring)):
data = json.loads(data)
self.commands.get(command)(self,
data,
timestamp)
except Exception, e:
self.logger.log(msg="Could not log %s" % str(e),
type="error", severity=LOG.ERROR)
raise e

def report(self, id):
with self.engine.begin() as conn:
resp = conn.execute(text(("select * from %s " %
self.__tablename__) +
"where id = :id"), {'id': id})
if resp.rowcount > 0:
result = resp.fetchone()
return dict(zip(resp.keys(), result))
else:
return {}

def parse(self, logfile):
try:
file = open(logfile, 'r')
for line in file:
self.log(line)
except Exception, e:
self.logger.log(msg="Could not parse %s" % str(e),
type="error", severity=LOG.ERROR)
pass


class Storage(StorageBase):
__database__ = 'campaign'
__tablename__ = 'campaigns'
Expand All @@ -51,11 +155,12 @@ def __init__(self, config, logger, **kw):
try:
super(Storage, self).__init__(config, **kw)
self.logger = logger
self._connect()
# Store off a link to the main table.
self.campaigns = Base.metadata.tables.get(Campaign.__tablename__)
self.users = Base.metadata.tables.get(Users.__tablename__)
self.scrapes = Base.metadata.tables.get(Scrapes.__tablename__)
self.counter = Counter(config, logger, **kw)
self._connect()
#TODO: add the most common index.
except Exception, e:
logger.log(msg='Could not initialize Storage "%s"' % str(e),
Expand Down
2 changes: 1 addition & 1 deletion campaign/templates/main.mako
@@ -1,6 +1,6 @@
<!doctype html>
<%
from campaign.util import strToUTC
from campaign.utils import strToUTC
from time import (time, strftime, gmtime)
import json
Expand Down

0 comments on commit 7801f12

Please sign in to comment.