Skip to content

Commit

Permalink
Merge pull request #4 from t2y/add-testing
Browse files Browse the repository at this point in the history
add tox.ini to test pep8/flakes using pytest
  • Loading branch information
llonchj committed Jul 3, 2016
2 parents 983637e + e94a5a6 commit f94c6d9
Show file tree
Hide file tree
Showing 6 changed files with 92 additions and 68 deletions.
3 changes: 1 addition & 2 deletions scrapy_sentry/__init__.py
@@ -1,5 +1,4 @@
# Redirect Scrapy log messages to standard Python logger
from __future__ import absolute_import

from .utils import init

from .utils import init # noqa
48 changes: 25 additions & 23 deletions scrapy_sentry/extensions.py
Expand Up @@ -8,12 +8,13 @@
import os

from scrapy import signals, log
from scrapy.mail import MailSender
from scrapy.mail import MailSender # noqa
from scrapy.exceptions import NotConfigured

from raven import Client

from .utils import get_client, response_to_dict
from .utils import init, response_to_dict


class Log(object):
def __init__(self, dsn=None, *args, **kwargs):
Expand All @@ -27,14 +28,14 @@ def from_crawler(cls, crawler):
o = cls(dsn=dsn)
return o


class Signals(object):
def __init__(self, client=None, dsn=None, **kwargs):
self.client = client if client else Client(dsn)

@classmethod
def from_crawler(cls, crawler, client=None, dsn=None):
dsn = crawler.settings.get("SENTRY_DSN", None)
client = get_client(dsn)
o = cls(dsn=dsn)

sentry_signals = crawler.settings.get("SENTRY_SIGNALS", [])
Expand All @@ -45,55 +46,56 @@ def from_crawler(cls, crawler, client=None, dsn=None):
crawler.signals.connect(receiver, signal=signal)

return o

def signal_receiver(self, signal=None, sender=None, *args, **kwargs):
message = signal
extra = {
'sender': sender,
'signal': signal,
'args': args,
'kwargs': kwargs,
}
idents = []
'sender': sender,
'signal': signal,
'args': args,
'kwargs': kwargs,
}
msg = self.client.capture('Message', message=message, extra=extra)
ident = self.client.get_ident(msg)
return ident


class Errors(object):
def __init__(self, dsn=None, client=None, **kwargs):
self.client = client if client else Client(dsn)

@classmethod
def from_crawler(cls, crawler, client=None, dsn=None):
dsn = os.environ.get("SENTRY_DSN", crawler.settings.get("SENTRY_DSN", None))
dsn = os.environ.get(
"SENTRY_DSN", crawler.settings.get("SENTRY_DSN", None))
if dsn is None:
raise NotConfigured('No SENTRY_DSN configured')
o = cls(dsn=dsn)
crawler.signals.connect(o.spider_error, signal=signals.spider_error)
return o

def spider_error(self, failure, response, spider, signal=None, sender=None, *args, **kwargs):
def spider_error(self, failure, response, spider,
signal=None, sender=None, *args, **kwargs):
from six import StringIO
traceback = StringIO()
failure.printTraceback(file=traceback)

message = signal
res_dict = response_to_dict(response, spider, include_request=True)
extra = {
'sender': sender,
'spider': spider.name,
'signal': signal,
'failure': failure,
'response': response_to_dict(response, spider, include_request=True),
'traceback': "\n".join(traceback.getvalue().split("\n")[-5:]),
}
'sender': sender,
'spider': spider.name,
'signal': signal,
'failure': failure,
'response': res_dict,
'traceback': "\n".join(traceback.getvalue().split("\n")[-5:]),
}
msg = self.client.captureMessage(
message=u"[{}] {}".format(spider.name, repr(failure.value)),
extra=extra) #, stack=failure.stack)
extra=extra) # , stack=failure.stack)

ident = self.client.get_ident(msg)

l = spider.log if spider else log.msg
l("Sentry Exception ID '%s'" % ident, level=log.INFO)

return ident

25 changes: 14 additions & 11 deletions scrapy_sentry/middlewares.py
Expand Up @@ -4,38 +4,41 @@
import sys

from scrapy import log
from scrapy.conf import settings
from scrapy.conf import settings # noqa
from scrapy.exceptions import NotConfigured

from .utils import get_client


class SentryMiddleware(object):
def __init__(self, dsn=None, client=None):
self.client = client if client else get_client(dsn)

@classmethod
def from_crawler(cls, crawler):
dsn = os.environ.get("SENTRY_DSN", crawler.settings.get("SENTRY_DSN", None))
dsn = os.environ.get(
"SENTRY_DSN", crawler.settings.get("SENTRY_DSN", None))
if dsn is None:
raise NotConfigured('No SENTRY_DSN configured')
return cls(dsn)

def trigger(self, exception, spider=None, extra={}):
extra = {
'spider': spider.name if spider else "",
}
msg = self.client.captureException(exc_info=sys.exc_info(), extra=extra)
'spider': spider.name if spider else "",
}
msg = self.client.captureException(
exc_info=sys.exc_info(), extra=extra)
ident = self.client.get_ident(msg)

l = spider.log if spider else log.msg
l("Sentry Exception ID '%s'" % ident, level=log.INFO)

return None

def process_exception(self, request, exception, spider):
return self.trigger(exception, spider,
extra={"spider":spider, "request":request})
return self.trigger(exception, spider,
extra={"spider": spider, "request": request})

def process_spider_exception(self, response, exception, spider):
return self.trigger(exception, spider,
extra={"spider":spider, "response":response})

return self.trigger(exception, spider,
extra={"spider": spider, "response": response})
11 changes: 7 additions & 4 deletions scrapy_sentry/utils.py
Expand Up @@ -5,8 +5,8 @@
from twisted.python import log

from scrapy.conf import settings
from scrapy.http import Request, Headers
from scrapy.utils.reqser import request_to_dict, request_from_dict
from scrapy.http import Request, Headers # noqa
from scrapy.utils.reqser import request_to_dict, request_from_dict # noqa
from scrapy.responsetypes import responsetypes

from raven import Client
Expand All @@ -15,10 +15,12 @@

SENTRY_DSN = os.environ.get("SENTRY_DSN", None)


def get_client(dsn=None):
"""gets a scrapy client"""
return Client(dsn or settings.get("SENTRY_DSN", SENTRY_DSN))


def init(dsn=None):
"""Redirect Scrapy log messages to standard Python logger"""

Expand All @@ -32,7 +34,7 @@ def init(dsn=None):

handler = SentryHandler(dsn)
setup_logging(handler)


def response_to_dict(response, spider, include_request=True, **kwargs):
"""Returns a dict based on a response from a spider"""
Expand All @@ -47,12 +49,13 @@ def response_to_dict(response, spider, include_request=True, **kwargs):
d['request'] = request_to_dict(response.request, spider)
return d


def response_from_dict(response, spider=None, **kwargs):
"""Returns a dict based on a response from a spider"""
url = response.get("url")
status = response.get("status")
headers = Headers([(x, list(map(str, y))) for x, y in
response.get("headers").items()])
response.get("headers").items()])
body = response.get("body")

respcls = responsetypes.from_args(headers=headers, url=url)
Expand Down
63 changes: 35 additions & 28 deletions setup.py
@@ -1,30 +1,37 @@
from setuptools import setup, find_packages

setup(name='scrapy-sentry',
version='0.6.1',
description='Sentry component for Scrapy',
long_description=open('README.md').read(),
author='Jordi Llonch',
author_email='llonchj@gmail.com',
url='https://github.com/llonchj/scrapy-sentry',
packages=find_packages(),
license='BSD',
install_requires=['Scrapy>0.16', 'raven', 'six'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
]
)
setup(
name='scrapy-sentry',
version='0.6.1',
description='Sentry component for Scrapy',
long_description=open('README.md').read(),
author='Jordi Llonch',
author_email='llonchj@gmail.com',
url='https://github.com/llonchj/scrapy-sentry',
packages=find_packages(),
license='BSD',
install_requires=['Scrapy>0.16', 'raven', 'six'],
tests_require=[
'pytest-flakes',
'pytest-pep8',
'pytest',
'tox',
],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
]
)
10 changes: 10 additions & 0 deletions tox.ini
@@ -0,0 +1,10 @@
[tox]
envlist = py27, py33, py34, py35

[testenv]
deps=
pytest
pytest-pep8
pytest-flakes

commands = py.test --pep8 --flakes -v scrapy_sentry

0 comments on commit f94c6d9

Please sign in to comment.