Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP

Comparing changes

Choose two branches to see what's changed or to start a new pull request. If you need to, you can also compare across forks.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also compare across forks.
base fork: ericholscher/django-test-utils
base: master
...
head fork: kkubasik/django-test-utils
compare: master
Checking mergeability… Don't worry, you can still create the pull request.
  • 11 commits
  • 4 files changed
  • 1 commit comment
  • 2 contributors
Commits on Jan 28, 2009
@initcrash initcrash follow redirects when crawling
Signed-off-by: Kevin Kubasik <kevin@kubasik.net>
aa990e4
Kevin Kubasik Handle less sane strings better 0666738
Commits on Mar 30, 2009
Kevin Kubasik local kevin commits ad6a5e9
Kevin Kubasik merging new crawl work d8d6fa1
Kevin Kubasik sloppy progress towards a more abstract middleware 0e1e62a
Kevin Kubasik Almost have default serializer working 844bcc3
Kevin Kubasik Untested, but closer to serializer working 5661aac
Commits on Mar 31, 2009
Kevin Kubasik updates 9d0c73f
Commits on Apr 06, 2009
Kevin Kubasik Merging from eric 984050d
Commits on Apr 07, 2009
Kevin Kubasik merging 2857cf9
Kevin Kubasik Merge branch 'new' of git://github.com/ericholscher/django-test-utils e812454
View
3  README
@@ -1,3 +1,6 @@
+aaiasldkfjdasdfssdf
+
+
Welcome to django-test-utils. The blog post annoucning this project has more information about it and is located at http://ericholscher.com/projects/django-test-utils/
Documentation for django-testmaker can be found at http://ericholscher.com/projects/django-testmaker/
View
5 test_utils/crawler.py
@@ -21,7 +21,7 @@ class Crawler(object):
"""
This is a class that represents a URL crawler in python
"""
- def __init__(self, base_url, conf_urls={}, verbosity=1, **kwargs):
+ def __init__(self, base_url, conf_urls={}, verbosity=1,username=None,password=None, **kwargs):
self.base_url = base_url
self.conf_urls = conf_urls
self.verbosity = verbosity
@@ -31,7 +31,8 @@ def __init__(self, base_url, conf_urls={}, verbosity=1, **kwargs):
self.crawled = {}
self.c = Client(REMOTE_ADDR='127.0.0.1')
-
+ if username:
+ self.c.login(username=username,password=password)
self.plugins = []
for plug in Plugin.__subclasses__():
active = getattr(plug, 'active', True)
View
11 test_utils/management/commands/crawlurls.py
@@ -8,7 +8,7 @@
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
- make_option('-p', '--pdb', action='store_true', dest='pdb', default=False,
+ make_option('-d', '--pdb', action='store_true', dest='pdb', default=False,
help='Pass -p to drop into pdb on an error'),
make_option('-f', '--fixture', action='store_true', dest='fixtures', default=False,
help='Pass -f to create a fixture for the data.'),
@@ -18,6 +18,12 @@ class Command(BaseCommand):
help='Pass -t to time your requests.'),
make_option('-r', '--response', action='store_true', dest='response', default=False,
help='Pass -r to store the response objects.'),
+ make_option('-u', '--username', action='store_true', dest='username', default=False,
+ help='Pass -u to set a username'),
+ make_option('-p', '--password', action='store_true', dest='password', default=False,
+ help='Pass -p to set a password');
+
+
#TODO
make_option('-e', '--each', action='store', dest='each',
type='int',
@@ -52,6 +58,7 @@ def handle(self, *args, **options):
#Now we have all of our URLs to test
+ c = Crawler('/', conf_urls=conf_urls, verbosity=verbosity,username=username,password=password)
#c = Crawler('/', conf_urls=conf_urls, verbosity=verbosity)
- c = Patu('http://ericholscher.com', conf_urls=conf_urls, verbosity=verbosity)
+ #c = Patu('http://ericholscher.com', conf_urls=conf_urls, verbosity=verbosity)
c.run()
View
6 test_utils/management/commands/testmaker.py
@@ -66,6 +66,12 @@ def handle_app(self, app, **options):
handler = logging.FileHandler(test_file, 'a')
handler.setFormatter(logging.Formatter('%(message)s'))
log.addHandler(handler)
+
+ log_s = logging.getLogger('testserializer')
+ log_s.setLevel(logging.INFO)
+ handler_s = logging.FileHandler(serialize_file, 'a')
+ handler_s.setFormatter(logging.Formatter('%(message)s'))
+ log_s.addHandler(handler_s)
log_s = logging.getLogger('testserializer')
log_s.setLevel(logging.INFO)

Showing you all comments on commits in this comparison.

@ericholscher

Your importing copy and not using it. The docstring for slugify is wrong. I agree that the string handling could use work, but this commit isn’t ready to be commited.

Something went wrong with that request. Please try again.