Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Testable searchable sphinx

  • Loading branch information...
commit 1ff31408323637bb833e5dd0112670b4bab22cb7 1 parent 125c8c1
@davedash davedash authored fwenzel committed
View
0  apps/__init__.py
No changes.
View
710 apps/feedback/fixtures/feedback/opinions.json
@@ -0,0 +1,710 @@
+[
+ {
+ "pk": 29,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "Let's see if this works.",
+ "created": "2010-06-22 00:38:17",
+ "url": "",
+ "positive": false,
+ "locale": "en-US",
+ "version": "3.7a6pre",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.3a6pre) Gecko/20100620 Minefield/3.7a6pre",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 28,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "I don't know what we're yelling about!",
+ "created": "2010-06-22 00:22:56",
+ "url": "http://www.google.com/",
+ "positive": false,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 27,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "Why don't you go visit youtube.com",
+ "created": "2010-06-21 00:30:45",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 26,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "ohai!",
+ "created": "2010-06-19 04:39:30",
+ "url": "",
+ "positive": true,
+ "locale": "",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 25,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "I am trying to send an email to me@example.com and it does not work!",
+ "created": "2010-06-18 04:32:41",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 24,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "Firefox's improved stability makes it hard for me to claim I lost my homework due to a browser crash.",
+ "created": "2010-06-18 00:47:24",
+ "url": "",
+ "positive": false,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 23,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "Firefox makes wasting my time on Facebook so much easier!",
+ "created": "2010-06-18 00:46:16",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 22,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "I have a problem with Google and I think it is Mozilla's fault.",
+ "created": "2010-06-18 00:44:58",
+ "url": "http://www.google.de/",
+ "positive": false,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 21,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "I like Firefox more than Internet Explorer.",
+ "created": "2010-06-18 00:02:47",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 20,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "I don't heart Firefox Input",
+ "created": "2010-06-17 23:56:39",
+ "url": "http://example.com/meh",
+ "positive": false,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 19,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "I \u00e2\u2122\u00a5 unicode",
+ "created": "2010-06-17 23:31:18",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100611 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 18,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "The more feedback, the better.",
+ "created": "2010-06-16 02:41:24",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100527 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 17,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "Happy happy, joy joy.",
+ "created": "2010-06-16 02:40:59",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100527 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 16,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "Happy happy, joy joy.",
+ "created": "2010-06-16 02:35:48",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100527 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 15,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "Happy happy, joy joy.",
+ "created": "2010-06-16 02:27:12",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100527 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 14,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "Happy happy, joy joy.",
+ "created": "2010-06-16 02:27:06",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100527 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 13,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "This version of Firefox is the best version ever.",
+ "created": "2010-06-15 05:21:21",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.4",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.4) Gecko/20100527 Firefox/3.6.4",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 12,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "I like all about it. Firefox is teh awesome.",
+ "created": "2010-06-10 02:14:58",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.3",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 11,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "I have something negative to say. Meh!",
+ "created": "2010-06-02 03:39:54",
+ "url": "",
+ "positive": false,
+ "locale": "en-US",
+ "version": "3.6.3",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 10,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "This is a super helpful feedback message. I don't like Firefox because a, b, and c are wrong with it.",
+ "created": "2010-06-01 08:54:40",
+ "url": "http://google.com/",
+ "positive": false,
+ "locale": "de",
+ "version": "3.6.3",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; de; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 9,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "mozilla firefox sucks. thats a good thing since i like it better than internet explorer.",
+ "created": "2010-06-01 02:38:39",
+ "url": "http://localhost:8100/thanks",
+ "positive": false,
+ "locale": "en-US",
+ "version": "3.6.3",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 8,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "having flash debugger version in firefox really sucks. Half the sites I go to have actionscript errors. sadface, g4tv.com =[",
+ "created": "2010-06-01 01:31:45",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.3",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 7,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "having flash debugger version in firefox really sucks. Half the sites I go to have actionscript errors. sadface, g4tv.com =[",
+ "created": "2010-06-01 01:31:22",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.3",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 6,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "Firefox for iPhone seems not possible at this moment, but the Firefox Home tool is also good enough. Mozilla is now developing a new app Firefox Home which could help to sync all the tabs settings, bookmarks and history to the iPhone. After simple setup, users would be able to browse those things on the iPhone, and view the websites with the internal Safari browser. Mozilla didn\u00e2\u20ac\u2122t announce the release day, but this tool will be free for sure. ",
+ "created": "2010-05-27 06:32:21",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.3",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 5,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "Firefox for iPhone seems not possible at this moment, but the Firefox Home tool is also good enough. Mozilla is now developing a new app Firefox Home which could help to sync all the tabs settings, bookmarks and history to the iPhone. After simple setup, users would be able to browse those things on the iPhone, and view the websites with the internal Safari browser. Mozilla didn\u00e2\u20ac\u2122t announce the release day, but this tool will be free for sure.",
+ "created": "2010-05-27 06:28:01",
+ "url": "",
+ "positive": true,
+ "locale": "en-US",
+ "version": "3.6.3",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 4,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "Internet Exploder has always been shit, Firefox got fat, Safari is great but Chrome wins the prize.",
+ "created": "2010-05-27 06:27:02",
+ "url": "",
+ "positive": false,
+ "locale": "en-US",
+ "version": "3.6.3",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 3,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "still nothing. i tried firefox and internet explorer - got the red X. i told you. yfrog sucks a nut. MRAW! :(",
+ "created": "2010-05-27 03:38:10",
+ "url": "http://www.mozilla.com/en-US/firefox/all-beta.html",
+ "positive": false,
+ "locale": "en-US",
+ "version": "3.6.3",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 2,
+ "model": "feedback.opinion",
+ "fields": {
+ "product": 1,
+ "description": "My alternative browser to #Firefox is now Internet Explorer, simply because Google #Chrome crashes way too often. It sucks! Shame on Google.",
+ "created": "2010-05-27 03:36:55",
+ "url": "http://www.mozilla.com/en-US/firefox/all-beta.html",
+ "positive": false,
+ "locale": "en-US",
+ "version": "3.6.3",
+ "user_agent": "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3",
+ "os": "mac"
+ }
+ },
+ {
+ "pk": 28,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "add_logentry",
+ "name": "Can add log entry",
+ "content_type": 10
+ }
+ },
+ {
+ "pk": 29,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "change_logentry",
+ "name": "Can change log entry",
+ "content_type": 10
+ }
+ },
+ {
+ "pk": 30,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "delete_logentry",
+ "name": "Can delete log entry",
+ "content_type": 10
+ }
+ },
+ {
+ "pk": 4,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "add_group",
+ "name": "Can add group",
+ "content_type": 2
+ }
+ },
+ {
+ "pk": 5,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "change_group",
+ "name": "Can change group",
+ "content_type": 2
+ }
+ },
+ {
+ "pk": 6,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "delete_group",
+ "name": "Can delete group",
+ "content_type": 2
+ }
+ },
+ {
+ "pk": 10,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "add_message",
+ "name": "Can add message",
+ "content_type": 4
+ }
+ },
+ {
+ "pk": 11,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "change_message",
+ "name": "Can change message",
+ "content_type": 4
+ }
+ },
+ {
+ "pk": 12,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "delete_message",
+ "name": "Can delete message",
+ "content_type": 4
+ }
+ },
+ {
+ "pk": 1,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "add_permission",
+ "name": "Can add permission",
+ "content_type": 1
+ }
+ },
+ {
+ "pk": 2,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "change_permission",
+ "name": "Can change permission",
+ "content_type": 1
+ }
+ },
+ {
+ "pk": 3,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "delete_permission",
+ "name": "Can delete permission",
+ "content_type": 1
+ }
+ },
+ {
+ "pk": 7,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "add_user",
+ "name": "Can add user",
+ "content_type": 3
+ }
+ },
+ {
+ "pk": 8,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "change_user",
+ "name": "Can change user",
+ "content_type": 3
+ }
+ },
+ {
+ "pk": 9,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "delete_user",
+ "name": "Can delete user",
+ "content_type": 3
+ }
+ },
+ {
+ "pk": 13,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "add_contenttype",
+ "name": "Can add content type",
+ "content_type": 5
+ }
+ },
+ {
+ "pk": 14,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "change_contenttype",
+ "name": "Can change content type",
+ "content_type": 5
+ }
+ },
+ {
+ "pk": 15,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "delete_contenttype",
+ "name": "Can delete content type",
+ "content_type": 5
+ }
+ },
+ {
+ "pk": 22,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "add_opinion",
+ "name": "Can add opinion",
+ "content_type": 8
+ }
+ },
+ {
+ "pk": 23,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "change_opinion",
+ "name": "Can change opinion",
+ "content_type": 8
+ }
+ },
+ {
+ "pk": 24,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "delete_opinion",
+ "name": "Can delete opinion",
+ "content_type": 8
+ }
+ },
+ {
+ "pk": 25,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "add_term",
+ "name": "Can add term",
+ "content_type": 9
+ }
+ },
+ {
+ "pk": 26,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "change_term",
+ "name": "Can change term",
+ "content_type": 9
+ }
+ },
+ {
+ "pk": 27,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "delete_term",
+ "name": "Can delete term",
+ "content_type": 9
+ }
+ },
+ {
+ "pk": 16,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "add_session",
+ "name": "Can add session",
+ "content_type": 6
+ }
+ },
+ {
+ "pk": 17,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "change_session",
+ "name": "Can change session",
+ "content_type": 6
+ }
+ },
+ {
+ "pk": 18,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "delete_session",
+ "name": "Can delete session",
+ "content_type": 6
+ }
+ },
+ {
+ "pk": 19,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "add_site",
+ "name": "Can add site",
+ "content_type": 7
+ }
+ },
+ {
+ "pk": 20,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "change_site",
+ "name": "Can change site",
+ "content_type": 7
+ }
+ },
+ {
+ "pk": 21,
+ "model": "auth.permission",
+ "fields": {
+ "codename": "delete_site",
+ "name": "Can delete site",
+ "content_type": 7
+ }
+ },
+ {
+ "pk": 1,
+ "model": "auth.user",
+ "fields": {
+ "username": "fred",
+ "first_name": "",
+ "last_name": "",
+ "is_active": true,
+ "is_superuser": true,
+ "is_staff": true,
+ "last_login": "2010-06-22 00:38:49",
+ "groups": [],
+ "user_permissions": [],
+ "password": "sha1$efa99$160da6091257e7996fb4f75d3a056ce67ea5d9ae",
+ "email": "freeed@gmail.com",
+ "date_joined": "2010-05-25 11:09:57"
+ }
+ }
+]
View
57 apps/search/client.py
@@ -0,0 +1,57 @@
+import socket
+import time
+
+from django.conf import settings
+
+import sphinxapi as sphinx
+from reporter.utils import crc32, manual_order
+from feedback.models import Opinion
+
+
+class SearchError(Exception):
+ pass
+
+
+class Client():
+
+ def __init__(self):
+ self.sphinx = sphinx.SphinxClient()
+ self.sphinx.SetServer(settings.SPHINX_HOST, settings.SPHINX_PORT)
+
+ def query(self, term, limit=10, offset=0, **kwargs):
+ """Submits formatted query, retrieves ids, returns Opinions."""
+ sc = self.sphinx
+
+ if isinstance(kwargs.get('product'), int):
+ sc.SetFilter('product', (kwargs['product'],))
+
+ if kwargs.get('version'):
+ sc.SetFilter('version', (crc32(kwargs['version']),))
+
+ if isinstance(kwargs.get('positive'), int):
+ sc.SetFilter('positive', (kwargs['positive'], ))
+
+ if kwargs.get('os'):
+ sc.SetFilter('os', (crc32(kwargs['os']),))
+
+ if kwargs.get('locale'):
+ sc.SetFilter('locale', (crc32(kwargs['locale']),))
+
+ if kwargs.get('date_end') and kwargs.get('date_start'):
+ start = int(time.mktime(kwargs['date_start'].timetuple()))
+ end = int(time.mktime(kwargs['date_end'].timetuple()))
+ sc.SetFilterRange('created', start, end)
+
+ try:
+ result = sc.Query(term, 'opinions')
+ except socket.timeout:
+ raise SearchError("Query has timed out.")
+ except Exception, e:
+ raise SearchError("Sphinx threw an unknown exception: %s" % e)
+
+ if sc.GetLastError():
+ raise SearchError(sc.GetLastError())
+
+
+ opinion_ids = [m['id'] for m in result['matches']]
+ return manual_order(Opinion.objects.all(), opinion_ids)
View
976 apps/search/sphinxapi.py
@@ -0,0 +1,976 @@
+#
+# $Id$
+#
+# Python version of Sphinx searchd client (Python API)
+#
+# Copyright (c) 2006-2008, Andrew Aksyonoff
+# Copyright (c) 2006, Mike Osadnik
+# All rights reserved
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License. You should have
+# received a copy of the GPL license along with this program; if you
+# did not, you can find it at http://www.gnu.org/
+#
+
+import sys
+import select
+import socket
+import re
+from struct import *
+
+
+# Zamboni customizations
+Z_SPHINX_TIMEOUT = 1
+
+# known searchd commands
+SEARCHD_COMMAND_SEARCH = 0
+SEARCHD_COMMAND_EXCERPT = 1
+SEARCHD_COMMAND_UPDATE = 2
+SEARCHD_COMMAND_KEYWORDS= 3
+SEARCHD_COMMAND_PERSIST = 4
+
+# current client-side command implementation versions
+VER_COMMAND_SEARCH = 0x116
+VER_COMMAND_EXCERPT = 0x100
+VER_COMMAND_UPDATE = 0x101
+VER_COMMAND_KEYWORDS = 0x100
+
+# known searchd status codes
+SEARCHD_OK = 0
+SEARCHD_ERROR = 1
+SEARCHD_RETRY = 2
+SEARCHD_WARNING = 3
+
+# known match modes
+SPH_MATCH_ALL = 0
+SPH_MATCH_ANY = 1
+SPH_MATCH_PHRASE = 2
+SPH_MATCH_BOOLEAN = 3
+SPH_MATCH_EXTENDED = 4
+SPH_MATCH_FULLSCAN = 5
+SPH_MATCH_EXTENDED2 = 6
+
+# known ranking modes (extended2 mode only)
+SPH_RANK_PROXIMITY_BM25 = 0 # default mode, phrase proximity major factor and BM25 minor one
+SPH_RANK_BM25 = 1 # statistical mode, BM25 ranking only (faster but worse quality)
+SPH_RANK_NONE = 2 # no ranking, all matches get a weight of 1
+SPH_RANK_WORDCOUNT = 3 # simple word-count weighting, rank is a weighted sum of per-field keyword occurence counts
+
+# known sort modes
+SPH_SORT_RELEVANCE = 0
+SPH_SORT_ATTR_DESC = 1
+SPH_SORT_ATTR_ASC = 2
+SPH_SORT_TIME_SEGMENTS = 3
+SPH_SORT_EXTENDED = 4
+SPH_SORT_EXPR = 5
+
+# known filter types
+SPH_FILTER_VALUES = 0
+SPH_FILTER_RANGE = 1
+SPH_FILTER_FLOATRANGE = 2
+
+# known attribute types
+SPH_ATTR_NONE = 0
+SPH_ATTR_INTEGER = 1
+SPH_ATTR_TIMESTAMP = 2
+SPH_ATTR_ORDINAL = 3
+SPH_ATTR_BOOL = 4
+SPH_ATTR_FLOAT = 5
+SPH_ATTR_BIGINT = 6
+SPH_ATTR_MULTI = 0X40000000L
+
+SPH_ATTR_TYPES = (SPH_ATTR_NONE,
+ SPH_ATTR_INTEGER,
+ SPH_ATTR_TIMESTAMP,
+ SPH_ATTR_ORDINAL,
+ SPH_ATTR_BOOL,
+ SPH_ATTR_FLOAT,
+ SPH_ATTR_BIGINT,
+ SPH_ATTR_MULTI)
+
+# known grouping functions
+SPH_GROUPBY_DAY = 0
+SPH_GROUPBY_WEEK = 1
+SPH_GROUPBY_MONTH = 2
+SPH_GROUPBY_YEAR = 3
+SPH_GROUPBY_ATTR = 4
+SPH_GROUPBY_ATTRPAIR = 5
+
+
+class SphinxClient:
+ def __init__ (self):
+ """
+ Create a new client object, and fill defaults.
+ """
+ self._host = 'localhost' # searchd host (default is "localhost")
+ self._port = 9312 # searchd port (default is 9312)
+ self._path = None # searchd unix-domain socket path
+ self._socket = None
+ self._offset = 0 # how much records to seek from result-set start (default is 0)
+ self._limit = 20 # how much records to return from result-set starting at offset (default is 20)
+ self._mode = SPH_MATCH_ALL # query matching mode (default is SPH_MATCH_ALL)
+ self._weights = [] # per-field weights (default is 1 for all fields)
+ self._sort = SPH_SORT_RELEVANCE # match sorting mode (default is SPH_SORT_RELEVANCE)
+ self._sortby = '' # attribute to sort by (defualt is "")
+ self._min_id = 0 # min ID to match (default is 0)
+ self._max_id = 0 # max ID to match (default is UINT_MAX)
+ self._filters = [] # search filters
+ self._groupby = '' # group-by attribute name
+ self._groupfunc = SPH_GROUPBY_DAY # group-by function (to pre-process group-by attribute value with)
+ self._groupsort = '@group desc' # group-by sorting clause (to sort groups in result set with)
+ self._groupdistinct = '' # group-by count-distinct attribute
+ self._maxmatches = 1000 # max matches to retrieve
+ self._cutoff = 0 # cutoff to stop searching at
+ self._retrycount = 0 # distributed retry count
+ self._retrydelay = 0 # distributed retry delay
+ self._anchor = {} # geographical anchor point
+ self._indexweights = {} # per-index weights
+ self._ranker = SPH_RANK_PROXIMITY_BM25 # ranking mode
+ self._maxquerytime = 0 # max query time, milliseconds (default is 0, do not limit)
+ self._fieldweights = {} # per-field-name weights
+ self._overrides = {} # per-query attribute values overrides
+ self._select = '*' # select-list (attributes or expressions, with optional aliases)
+
+ self._error = '' # last error message
+ self._warning = '' # last warning message
+ self._reqs = [] # requests array for multi-query
+
+ def __del__ (self):
+ if self._socket:
+ self._socket.close()
+
+
+ def GetLastError (self):
+ """
+ Get last error message (string).
+ """
+ return self._error
+
+
+ def GetLastWarning (self):
+ """
+ Get last warning message (string).
+ """
+ return self._warning
+
+
+ def SetServer (self, host, port = None):
+ """
+ Set searchd server host and port.
+ """
+ assert(isinstance(host, str))
+ if host.startswith('/'):
+ self._path = host
+ return
+ elif host.startswith('unix://'):
+ self._path = host[7:]
+ return
+ assert(isinstance(port, int))
+ self._host = host
+ self._port = port
+ self._path = None
+
+
+ def _Connect (self):
+ """
+ INTERNAL METHOD, DO NOT CALL. Connects to searchd server.
+ """
+ if self._socket:
+ # we have a socket, but is it still alive?
+ sr, sw, _ = select.select ( [self._socket], [self._socket], [], 0 )
+
+ # this is how alive socket should look
+ if len(sr)==0 and len(sw)==1:
+ return self._socket
+
+ # oops, looks like it was closed, lets reopen
+ self._socket.close()
+ self._socket = None
+
+ try:
+ if self._path:
+ af = socket.AF_UNIX
+ addr = self._path
+ desc = self._path
+ else:
+ af = socket.AF_INET
+ addr = ( self._host, self._port )
+ desc = '%s;%s' % addr
+ sock = socket.socket ( af, socket.SOCK_STREAM )
+ sock.settimeout(Z_SPHINX_TIMEOUT)
+ sock.connect ( addr )
+ except socket.error, msg:
+ if sock:
+ sock.close()
+ self._error = 'connection to %s failed (%s)' % ( desc, msg )
+ return
+
+ v = unpack('>L', sock.recv(4))
+ if v<1:
+ sock.close()
+ self._error = 'expected searchd protocol version, got %s' % v
+ return
+
+ # all ok, send my version
+ sock.send(pack('>L', 1))
+ return sock
+
+
+ def _GetResponse (self, sock, client_ver):
+ """
+ INTERNAL METHOD, DO NOT CALL. Gets and checks response packet from searchd server.
+ """
+ (status, ver, length) = unpack('>2HL', sock.recv(8))
+ response = ''
+ left = length
+ while left>0:
+ chunk = sock.recv(left)
+ if chunk:
+ response += chunk
+ left -= len(chunk)
+ else:
+ break
+
+ if not self._socket:
+ sock.close()
+
+ # check response
+ read = len(response)
+ if not response or read!=length:
+ if length:
+ self._error = 'failed to read searchd response (status=%s, ver=%s, len=%s, read=%s)' \
+ % (status, ver, length, read)
+ else:
+ self._error = 'received zero-sized searchd response'
+ return None
+
+ # check status
+ if status==SEARCHD_WARNING:
+ wend = 4 + unpack ( '>L', response[0:4] )[0]
+ self._warning = response[4:wend]
+ return response[wend:]
+
+ if status==SEARCHD_ERROR:
+ self._error = 'searchd error: '+response[4:]
+ return None
+
+ if status==SEARCHD_RETRY:
+ self._error = 'temporary searchd error: '+response[4:]
+ return None
+
+ if status!=SEARCHD_OK:
+ self._error = 'unknown status code %d' % status
+ return None
+
+ # check version
+ if ver<client_ver:
+ self._warning = 'searchd command v.%d.%d older than client\'s v.%d.%d, some options might not work' \
+ % (ver>>8, ver&0xff, client_ver>>8, client_ver&0xff)
+
+ return response
+
+
+ def SetLimits (self, offset, limit, maxmatches=0, cutoff=0):
+ """
+ Set offset and count into result set, and optionally set max-matches and cutoff limits.
+ """
+ assert ( type(offset) in [int,long] and 0<=offset<16777216 )
+ assert ( type(limit) in [int,long] and 0<limit<16777216 )
+ assert(maxmatches>=0)
+ self._offset = offset
+ self._limit = limit
+ if maxmatches>0:
+ self._maxmatches = maxmatches
+ if cutoff>=0:
+ self._cutoff = cutoff
+
+
+ def SetMaxQueryTime (self, maxquerytime):
+ """
+ Set maximum query time, in milliseconds, per-index. 0 means 'do not limit'.
+ """
+ assert(isinstance(maxquerytime,int) and maxquerytime>0)
+ self._maxquerytime = maxquerytime
+
+
+ def SetMatchMode (self, mode):
+ """
+ Set matching mode.
+ """
+ assert(mode in [SPH_MATCH_ALL, SPH_MATCH_ANY, SPH_MATCH_PHRASE, SPH_MATCH_BOOLEAN, SPH_MATCH_EXTENDED, SPH_MATCH_FULLSCAN, SPH_MATCH_EXTENDED2])
+ self._mode = mode
+
+
+ def SetRankingMode (self, ranker):
+ """
+ Set ranking mode.
+ """
+ assert(ranker in [SPH_RANK_PROXIMITY_BM25, SPH_RANK_BM25, SPH_RANK_NONE, SPH_RANK_WORDCOUNT])
+ self._ranker = ranker
+
+
+ def SetSortMode ( self, mode, clause='' ):
+ """
+ Set sorting mode.
+ """
+ assert ( mode in [SPH_SORT_RELEVANCE, SPH_SORT_ATTR_DESC, SPH_SORT_ATTR_ASC, SPH_SORT_TIME_SEGMENTS, SPH_SORT_EXTENDED, SPH_SORT_EXPR] )
+ assert ( isinstance ( clause, str ) )
+ self._sort = mode
+ self._sortby = clause
+
+
+ def SetWeights (self, weights):
+ """
+ Set per-field weights.
+ WARNING, DEPRECATED; do not use it! use SetFieldWeights() instead
+ """
+ assert(isinstance(weights, list))
+ for w in weights:
+ assert(isinstance(w, int))
+ self._weights = weights
+
+
+ def SetFieldWeights (self, weights):
+ """
+ Bind per-field weights by name; expects (name,field_weight) dictionary as argument.
+ """
+ assert(isinstance(weights,dict))
+ for key,val in weights.items():
+ assert(isinstance(key,str))
+ assert(isinstance(val,int))
+ self._fieldweights = weights
+
+
+ def SetIndexWeights (self, weights):
+ """
+ Bind per-index weights by name; expects (name,index_weight) dictionary as argument.
+ """
+ assert(isinstance(weights,dict))
+ for key,val in weights.items():
+ assert(isinstance(key,str))
+ assert(isinstance(val,int))
+ self._indexweights = weights
+
+
+ def SetIDRange (self, minid, maxid):
+ """
+ Set IDs range to match.
+ Only match records if document ID is beetwen $min and $max (inclusive).
+ """
+ assert(isinstance(minid, (int, long)))
+ assert(isinstance(maxid, (int, long)))
+ assert(minid<=maxid)
+ self._min_id = minid
+ self._max_id = maxid
+
+
+ def SetFilter ( self, attribute, values, exclude=0 ):
+ """
+ Set values set filter.
+ Only match records where 'attribute' value is in given 'values' set.
+ """
+ assert(isinstance(attribute, str))
+ assert iter(values)
+
+ for value in values:
+ assert(isinstance(value, (int,long)))
+
+ self._filters.append ( { 'type':SPH_FILTER_VALUES, 'attr':attribute, 'exclude':exclude, 'values':values } )
+
+
+ def SetFilterRange (self, attribute, min_, max_, exclude=0 ):
+ """
+ Set range filter.
+ Only match records if 'attribute' value is beetwen 'min_' and 'max_' (inclusive).
+ """
+ assert(isinstance(attribute, str))
+ assert(isinstance(min_, (int,long)))
+ assert(isinstance(max_, (int,long)))
+ assert(min_<=max_)
+
+ self._filters.append ( { 'type':SPH_FILTER_RANGE, 'attr':attribute, 'exclude':exclude, 'min':min_, 'max':max_ } )
+
+
+ def SetFilterFloatRange (self, attribute, min_, max_, exclude=0 ):
+ assert(isinstance(attribute,str))
+ assert(isinstance(min_,float))
+ assert(isinstance(max_,float))
+ assert(min_ <= max_)
+ self._filters.append ( {'type':SPH_FILTER_FLOATRANGE, 'attr':attribute, 'exclude':exclude, 'min':min_, 'max':max_} )
+
+
+ def SetGeoAnchor (self, attrlat, attrlong, latitude, longitude):
+ assert(isinstance(attrlat,str))
+ assert(isinstance(attrlong,str))
+ assert(isinstance(latitude,float))
+ assert(isinstance(longitude,float))
+ self._anchor['attrlat'] = attrlat
+ self._anchor['attrlong'] = attrlong
+ self._anchor['lat'] = latitude
+ self._anchor['long'] = longitude
+
+
+ def SetGroupBy ( self, attribute, func, groupsort='@group desc' ):
+ """
+ Set grouping attribute and function.
+ """
+ assert(isinstance(attribute, str))
+ assert(func in [SPH_GROUPBY_DAY, SPH_GROUPBY_WEEK, SPH_GROUPBY_MONTH, SPH_GROUPBY_YEAR, SPH_GROUPBY_ATTR, SPH_GROUPBY_ATTRPAIR] )
+ assert(isinstance(groupsort, str))
+
+ self._groupby = attribute
+ self._groupfunc = func
+ self._groupsort = groupsort
+
+
+ def SetGroupDistinct (self, attribute):
+ assert(isinstance(attribute,str))
+ self._groupdistinct = attribute
+
+
+ def SetRetries (self, count, delay=0):
+ assert(isinstance(count,int) and count>=0)
+ assert(isinstance(delay,int) and delay>=0)
+ self._retrycount = count
+ self._retrydelay = delay
+
+
+ def SetOverride (self, name, type, values):
+ assert(isinstance(name, str))
+ assert(type in SPH_ATTR_TYPES)
+ assert(isinstance(values, dict))
+
+ self._overrides[name] = {'name': name, 'type': type, 'values': values}
+
+ def SetSelect (self, select):
+ assert(isinstance(select, str))
+ self._select = select
+
+
+ def ResetOverrides (self):
+ self._overrides = {}
+
+
+ def ResetFilters (self):
+ """
+ Clear all filters (for multi-queries).
+ """
+ self._filters = []
+ self._anchor = {}
+
+
+ def ResetGroupBy (self):
+ """
+ Clear groupby settings (for multi-queries).
+ """
+ self._groupby = ''
+ self._groupfunc = SPH_GROUPBY_DAY
+ self._groupsort = '@group desc'
+ self._groupdistinct = ''
+
+
+ def Query (self, query, index='*', comment=''):
+ """
+ Connect to searchd server and run given search query.
+ Returns None on failure; result set hash on success (see documentation for details).
+ """
+ assert(len(self._reqs)==0)
+ self.AddQuery(query,index,comment)
+ results = self.RunQueries()
+
+ if not results or len(results)==0:
+ return None
+ self._error = results[0]['error']
+ self._warning = results[0]['warning']
+ if results[0]['status'] == SEARCHD_ERROR:
+ return None
+ return results[0]
+
+
+ def AddQuery (self, query, index='*', comment=''):
+ """
+ Add query to batch.
+ """
+ # build request
+ req = [pack('>5L', self._offset, self._limit, self._mode, self._ranker, self._sort)]
+ req.append(pack('>L', len(self._sortby)))
+ req.append(self._sortby)
+
+ if isinstance(query,unicode):
+ query = query.encode('utf-8')
+ assert(isinstance(query,str))
+
+ req.append(pack('>L', len(query)))
+ req.append(query)
+
+ req.append(pack('>L', len(self._weights)))
+ for w in self._weights:
+ req.append(pack('>L', w))
+ req.append(pack('>L', len(index)))
+ req.append(index)
+ req.append(pack('>L',1)) # id64 range marker
+ req.append(pack('>Q', self._min_id))
+ req.append(pack('>Q', self._max_id))
+
+ # filters
+ req.append ( pack ( '>L', len(self._filters) ) )
+ for f in self._filters:
+ req.append ( pack ( '>L', len(f['attr'])) + f['attr'])
+ filtertype = f['type']
+ req.append ( pack ( '>L', filtertype))
+ if filtertype == SPH_FILTER_VALUES:
+ req.append ( pack ('>L', len(f['values'])))
+ for val in f['values']:
+ req.append ( pack ('>q', val))
+ elif filtertype == SPH_FILTER_RANGE:
+ req.append ( pack ('>2q', f['min'], f['max']))
+ elif filtertype == SPH_FILTER_FLOATRANGE:
+ req.append ( pack ('>2f', f['min'], f['max']))
+ req.append ( pack ( '>L', f['exclude'] ) )
+
+ # group-by, max-matches, group-sort
+ req.append ( pack ( '>2L', self._groupfunc, len(self._groupby) ) )
+ req.append ( self._groupby )
+ req.append ( pack ( '>2L', self._maxmatches, len(self._groupsort) ) )
+ req.append ( self._groupsort )
+ req.append ( pack ( '>LLL', self._cutoff, self._retrycount, self._retrydelay))
+ req.append ( pack ( '>L', len(self._groupdistinct)))
+ req.append ( self._groupdistinct)
+
+ # anchor point
+ if len(self._anchor) == 0:
+ req.append ( pack ('>L', 0))
+ else:
+ attrlat, attrlong = self._anchor['attrlat'], self._anchor['attrlong']
+ latitude, longitude = self._anchor['lat'], self._anchor['long']
+ req.append ( pack ('>L', 1))
+ req.append ( pack ('>L', len(attrlat)) + attrlat)
+ req.append ( pack ('>L', len(attrlong)) + attrlong)
+ req.append ( pack ('>f', latitude) + pack ('>f', longitude))
+
+ # per-index weights
+ req.append ( pack ('>L',len(self._indexweights)))
+ for indx,weight in self._indexweights.items():
+ req.append ( pack ('>L',len(indx)) + indx + pack ('>L',weight))
+
+ # max query time
+ req.append ( pack ('>L', self._maxquerytime) )
+
+ # per-field weights
+ req.append ( pack ('>L',len(self._fieldweights) ) )
+ for field,weight in self._fieldweights.items():
+ req.append ( pack ('>L',len(field)) + field + pack ('>L',weight) )
+
+ # comment
+ req.append ( pack('>L',len(comment)) + comment )
+
+ # attribute overrides
+ req.append ( pack('>L', len(self._overrides)) )
+ for v in self._overrides.values():
+ req.extend ( ( pack('>L', len(v['name'])), v['name'] ) )
+ req.append ( pack('>LL', v['type'], len(v['values'])) )
+ for id, value in v['values'].iteritems():
+ req.append ( pack('>Q', id) )
+ if v['type'] == SPH_ATTR_FLOAT:
+ req.append ( pack('>f', value) )
+ elif v['type'] == SPH_ATTR_BIGINT:
+ req.append ( pack('>q', value) )
+ else:
+ req.append ( pack('>l', value) )
+
+ # select-list
+ req.append ( pack('>L', len(self._select)) )
+ req.append ( self._select )
+
+ # send query, get response
+ req = ''.join(req)
+
+ self._reqs.append(req)
+ return
+
+
+ def RunQueries (self):
+ """
+ Run queries batch.
+ Returns None on network IO failure; or an array of result set hashes on success.
+ """
+ if len(self._reqs)==0:
+ self._error = 'no queries defined, issue AddQuery() first'
+ return None
+
+ sock = self._Connect()
+ if not sock:
+ return None
+
+ req = ''.join(self._reqs)
+ length = len(req)+4
+ req = pack('>HHLL', SEARCHD_COMMAND_SEARCH, VER_COMMAND_SEARCH, length, len(self._reqs))+req
+ sock.send(req)
+
+ response = self._GetResponse(sock, VER_COMMAND_SEARCH)
+ if not response:
+ return None
+
+ nreqs = len(self._reqs)
+
+ # parse response
+ max_ = len(response)
+ p = 0
+
+ results = []
+ for i in range(0,nreqs,1):
+ result = {}
+ results.append(result)
+
+ result['error'] = ''
+ result['warning'] = ''
+ status = unpack('>L', response[p:p+4])[0]
+ p += 4
+ result['status'] = status
+ if status != SEARCHD_OK:
+ length = unpack('>L', response[p:p+4])[0]
+ p += 4
+ message = response[p:p+length]
+ p += length
+
+ if status == SEARCHD_WARNING:
+ result['warning'] = message
+ else:
+ result['error'] = message
+ continue
+
+ # read schema
+ fields = []
+ attrs = []
+
+ nfields = unpack('>L', response[p:p+4])[0]
+ p += 4
+ while nfields>0 and p<max_:
+ nfields -= 1
+ length = unpack('>L', response[p:p+4])[0]
+ p += 4
+ fields.append(response[p:p+length])
+ p += length
+
+ result['fields'] = fields
+
+ nattrs = unpack('>L', response[p:p+4])[0]
+ p += 4
+ while nattrs>0 and p<max_:
+ nattrs -= 1
+ length = unpack('>L', response[p:p+4])[0]
+ p += 4
+ attr = response[p:p+length]
+ p += length
+ type_ = unpack('>L', response[p:p+4])[0]
+ p += 4
+ attrs.append([attr,type_])
+
+ result['attrs'] = attrs
+
+ # read match count
+ count = unpack('>L', response[p:p+4])[0]
+ p += 4
+ id64 = unpack('>L', response[p:p+4])[0]
+ p += 4
+
+ # read matches
+ result['matches'] = []
+ while count>0 and p<max_:
+ count -= 1
+ if id64:
+ doc, weight = unpack('>QL', response[p:p+12])
+ p += 12
+ else:
+ doc, weight = unpack('>2L', response[p:p+8])
+ p += 8
+
+ match = { 'id':doc, 'weight':weight, 'attrs':{} }
+ for i in range(len(attrs)):
+ if attrs[i][1] == SPH_ATTR_FLOAT:
+ match['attrs'][attrs[i][0]] = unpack('>f', response[p:p+4])[0]
+ elif attrs[i][1] == SPH_ATTR_BIGINT:
+ match['attrs'][attrs[i][0]] = unpack('>q', response[p:p+8])[0]
+ p += 4
+ elif attrs[i][1] == (SPH_ATTR_MULTI | SPH_ATTR_INTEGER):
+ match['attrs'][attrs[i][0]] = []
+ nvals = unpack('>L', response[p:p+4])[0]
+ p += 4
+ for n in range(0,nvals,1):
+ match['attrs'][attrs[i][0]].append(unpack('>L', response[p:p+4])[0])
+ p += 4
+ p -= 4
+ else:
+ match['attrs'][attrs[i][0]] = unpack('>L', response[p:p+4])[0]
+ p += 4
+
+ result['matches'].append ( match )
+
+ result['total'], result['total_found'], result['time'], words = unpack('>4L', response[p:p+16])
+
+ result['time'] = '%.3f' % (result['time']/1000.0)
+ p += 16
+
+ result['words'] = []
+ while words>0:
+ words -= 1
+ length = unpack('>L', response[p:p+4])[0]
+ p += 4
+ word = response[p:p+length]
+ p += length
+ docs, hits = unpack('>2L', response[p:p+8])
+ p += 8
+
+ result['words'].append({'word':word, 'docs':docs, 'hits':hits})
+
+ self._reqs = []
+ return results
+
+
+ def BuildExcerpts (self, docs, index, words, opts=None):
+ """
+ Connect to searchd server and generate exceprts from given documents.
+ """
+ if not opts:
+ opts = {}
+ if isinstance(words,unicode):
+ words = words.encode('utf-8')
+
+ assert(isinstance(docs, list))
+ assert(isinstance(index, str))
+ assert(isinstance(words, str))
+ assert(isinstance(opts, dict))
+
+ sock = self._Connect()
+
+ if not sock:
+ return None
+
+ # fixup options
+ opts.setdefault('before_match', '<b>')
+ opts.setdefault('after_match', '</b>')
+ opts.setdefault('chunk_separator', ' ... ')
+ opts.setdefault('limit', 256)
+ opts.setdefault('around', 5)
+
+ # build request
+ # v.1.0 req
+
+ flags = 1 # (remove spaces)
+ if opts.get('exact_phrase'): flags |= 2
+ if opts.get('single_passage'): flags |= 4
+ if opts.get('use_boundaries'): flags |= 8
+ if opts.get('weight_order'): flags |= 16
+
+ # mode=0, flags
+ req = [pack('>2L', 0, flags)]
+
+ # req index
+ req.append(pack('>L', len(index)))
+ req.append(index)
+
+ # req words
+ req.append(pack('>L', len(words)))
+ req.append(words)
+
+ # options
+ req.append(pack('>L', len(opts['before_match'])))
+ req.append(opts['before_match'])
+
+ req.append(pack('>L', len(opts['after_match'])))
+ req.append(opts['after_match'])
+
+ req.append(pack('>L', len(opts['chunk_separator'])))
+ req.append(opts['chunk_separator'])
+
+ req.append(pack('>L', int(opts['limit'])))
+ req.append(pack('>L', int(opts['around'])))
+
+ # documents
+ req.append(pack('>L', len(docs)))
+ for doc in docs:
+ if isinstance(doc,unicode):
+ doc = doc.encode('utf-8')
+ assert(isinstance(doc, str))
+ req.append(pack('>L', len(doc)))
+ req.append(doc)
+
+ req = ''.join(req)
+
+ # send query, get response
+ length = len(req)
+
+ # add header
+ req = pack('>2HL', SEARCHD_COMMAND_EXCERPT, VER_COMMAND_EXCERPT, length)+req
+ wrote = sock.send(req)
+
+ response = self._GetResponse(sock, VER_COMMAND_EXCERPT )
+ if not response:
+ return []
+
+ # parse response
+ pos = 0
+ res = []
+ rlen = len(response)
+
+ for i in range(len(docs)):
+ length = unpack('>L', response[pos:pos+4])[0]
+ pos += 4
+
+ if pos+length > rlen:
+ self._error = 'incomplete reply'
+ return []
+
+ res.append(response[pos:pos+length])
+ pos += length
+
+ return res
+
+
+ def UpdateAttributes ( self, index, attrs, values ):
+ """
+ Update given attribute values on given documents in given indexes.
+ Returns amount of updated documents (0 or more) on success, or -1 on failure.
+
+ 'attrs' must be a list of strings.
+ 'values' must be a dict with int key (document ID) and list of int values (new attribute values).
+
+ Example:
+ res = cl.UpdateAttributes ( 'test1', [ 'group_id', 'date_added' ], { 2:[123,1000000000], 4:[456,1234567890] } )
+ """
+ assert ( isinstance ( index, str ) )
+ assert ( isinstance ( attrs, list ) )
+ assert ( isinstance ( values, dict ) )
+ for attr in attrs:
+ assert ( isinstance ( attr, str ) )
+ for docid, entry in values.items():
+ assert ( isinstance ( docid, int ) )
+ assert ( isinstance ( entry, list ) )
+ assert ( len(attrs)==len(entry) )
+ for val in entry:
+ assert ( isinstance ( val, int ) )
+
+ # build request
+ req = [ pack('>L',len(index)), index ]
+
+ req.append ( pack('>L',len(attrs)) )
+ for attr in attrs:
+ req.append ( pack('>L',len(attr)) + attr )
+
+ req.append ( pack('>L',len(values)) )
+ for docid, entry in values.items():
+ req.append ( pack('>Q',docid) )
+ for val in entry:
+ req.append ( pack('>L',val) )
+
+ # connect, send query, get response
+ sock = self._Connect()
+ if not sock:
+ return None
+
+ req = ''.join(req)
+ length = len(req)
+ req = pack ( '>2HL', SEARCHD_COMMAND_UPDATE, VER_COMMAND_UPDATE, length ) + req
+ wrote = sock.send ( req )
+
+ response = self._GetResponse ( sock, VER_COMMAND_UPDATE )
+ if not response:
+ return -1
+
+ # parse response
+ updated = unpack ( '>L', response[0:4] )[0]
+ return updated
+
+
+ def BuildKeywords ( self, query, index, hits ):
+ """
+ Connect to searchd server, and generate keywords list for a given query.
+ Returns None on failure, or a list of keywords on success.
+ """
+ assert ( isinstance ( query, str ) )
+ assert ( isinstance ( index, str ) )
+ assert ( isinstance ( hits, int ) )
+
+ # build request
+ req = [ pack ( '>L', len(query) ) + query ]
+ req.append ( pack ( '>L', len(index) ) + index )
+ req.append ( pack ( '>L', hits ) )
+
+ # connect, send query, get response
+ sock = self._Connect()
+ if not sock:
+ return None
+
+ req = ''.join(req)
+ length = len(req)
+ req = pack ( '>2HL', SEARCHD_COMMAND_KEYWORDS, VER_COMMAND_KEYWORDS, length ) + req
+ wrote = sock.send ( req )
+
+ response = self._GetResponse ( sock, VER_COMMAND_KEYWORDS )
+ if not response:
+ return None
+
+ # parse response
+ res = []
+
+ nwords = unpack ( '>L', response[0:4] )[0]
+ p = 4
+ max_ = len(response)
+
+ while nwords>0 and p<max_:
+ nwords -= 1
+
+ length = unpack ( '>L', response[p:p+4] )[0]
+ p += 4
+ tokenized = response[p:p+length]
+ p += length
+
+ length = unpack ( '>L', response[p:p+4] )[0]
+ p += 4
+ normalized = response[p:p+length]
+ p += length
+
+ entry = { 'tokenized':tokenized, 'normalized':normalized }
+ if hits:
+ entry['docs'], entry['hits'] = unpack ( '>2L', response[p:p+8] )
+ p += 8
+
+ res.append ( entry )
+
+ if nwords>0 or p>max_:
+ self._error = 'incomplete reply'
+ return None
+
+ return res
+
+ ### persistent connections
+
+ def Open(self):
+ if self._socket:
+ self._error = 'already connected'
+ return
+
+ server = self._Connect()
+ if not server:
+ return
+
+ # command, command version = 0, body length = 4, body = 1
+ request = pack ( '>hhII', SEARCHD_COMMAND_PERSIST, 0, 4, 1 )
+ server.send ( request )
+
+ self._socket = server
+
+ def Close(self):
+ if not self._socket:
+ self._error = 'not connected'
+ return
+ self._socket.close()
+ self._socket = None
+
+ def EscapeString(self, string):
+ return re.sub(r"([=\(\)|\-!@~\"&/\\\^\$\=])", r"\\\1", string)
+
+#
+# $Id$
+#
View
11 apps/search/templates/search/search.html
@@ -9,7 +9,7 @@
{% block content %}
<div id="search_box" class="clearfix">
<div id="search_options">
- {% if page.object_list %}
+ {% if opinions %}
{% if query %}Description: <span class="opt">"{{ query }}"</span>;{% endif %}
Dates: <span class="opt">{{ form.cleaned_data.date_start }}
&ndash; {{ form.cleaned_data.date_end }}</span>
@@ -24,16 +24,14 @@
<div id="results" class="container">
<ul>
- {% if page.object_list %}
- {% with opinions = page.object_list %}
+ {% if opinions %}
{% include "dashboard/messages.html" %}
- {% endwith %}
{% else %}
<li>No search results found.</li>
{% endif %}
</ul>
- {% if page.object_list %}
+ {% if 0 %}
<nav class="pagination">
<span class="step-links">
{% if page.has_previous() %}
@@ -55,7 +53,7 @@
</nav>
{% endif %}
</div>
-
+{% if opinions %}
<div id="overview">
<div id="sentiment" class="container clearfix">
<h2>Overview</h2>
@@ -78,4 +76,5 @@
</div>
</div>
</div>
+{% endif %}
{% endblock %}
View
88 apps/search/tests.py
@@ -0,0 +1,88 @@
+import os
+import shutil
+import time
+import datetime
+
+from django.conf import settings
+
+from nose.tools import eq_
+import test_utils
+
+from search.client import Client
+from search.utils import start_sphinx, stop_sphinx, reindex
+
+
+# TODO(davedash): liberate from Zamboni
+class SphinxTestCase(test_utils.TransactionTestCase):
+ """
+ This test case type can setUp and tearDown the sphinx daemon. Use this
+ when testing any feature that requires sphinx.
+ """
+
+ fixtures = ['feedback/opinions']
+ sphinx = True
+ sphinx_is_running = False
+
+ def setUp(self):
+ super(SphinxTestCase, self).setUp()
+
+ if not SphinxTestCase.sphinx_is_running:
+ if (not settings.SPHINX_SEARCHD or
+ not settings.SPHINX_INDEXER): # pragma: no cover
+ raise SkipTest()
+
+ os.environ['DJANGO_ENVIRONMENT'] = 'test'
+
+ # XXX: Path names need to be more clear.
+ if os.path.exists(settings.SPHINX_CATALOG_PATH):
+ shutil.rmtree(settings.SPHINX_CATALOG_PATH)
+ if os.path.exists(settings.SPHINX_LOG_PATH):
+ shutil.rmtree(settings.SPHINX_LOG_PATH)
+
+ os.makedirs(settings.SPHINX_LOG_PATH)
+ os.makedirs(settings.SPHINX_CATALOG_PATH)
+
+ reindex()
+ start_sphinx()
+ time.sleep(1)
+ SphinxTestCase.sphinx_is_running = True
+
+ @classmethod
+ def tearDownClass(cls):
+ if SphinxTestCase.sphinx_is_running:
+ stop_sphinx()
+ SphinxTestCase.sphinx_is_running = False
+
+
+query = lambda x='', **kwargs: Client().query(x, **kwargs)
+num_results = lambda x='', **kwargs: len(query(x, **kwargs))
+class SearchTest(SphinxTestCase):
+
+ def test_query(self):
+ eq_(num_results(), 20)
+
+ def test_product_filter(self):
+ eq_(num_results(product=1), 20)
+ eq_(num_results(product=2), 0)
+
+ def test_version_filter(self):
+ eq_(num_results(version='3.6.3'), 11)
+ eq_(num_results(version='3.6.4'), 16)
+
+ def test_positive_filter(self):
+ eq_(num_results(positive=1), 17)
+ eq_(num_results(positive=0), 11)
+
+ def test_os_filter(self):
+ eq_(num_results(os='mac'), 20)
+ eq_(num_results(os='palm'), 0)
+
+ def test_locale_filter(self):
+ eq_(num_results(locale='en-US'), 20)
+ eq_(num_results(locale='de'), 1)
+
+ def test_date_filter(self):
+ start = datetime.datetime(2010, 5, 27)
+ end = datetime.datetime(2010, 5, 28)
+ eq_(num_results(date_start=start, date_end=end), 5)
+ pass
View
7 apps/search/urls.py
@@ -5,14 +5,11 @@
from feedback.models import Opinion
-from .forms import ReporterSearchForm
-from .views import OpinionSearchView
+import views
sqs = SearchQuerySet().models(Opinion)
urlpatterns = patterns('',
- url(r'^$', OpinionSearchView(searchqueryset=sqs,
- form_class=ReporterSearchForm),
- name='search'),
+ url(r'^$', views.index, name='search'),
)
View
39 apps/search/utils.py
@@ -0,0 +1,39 @@
+# TODO(davedash): liberate from zamboni
+
+import subprocess
+
+from django.conf import settings
+
+call = lambda x: subprocess.Popen(x, stdout=subprocess.PIPE).communicate()
+
+
+def reindex(rotate=False):
+ """
+ Reindexes sphinx. Note this is only to be used in dev and test
+ environments.
+ """
+ calls = [settings.SPHINX_INDEXER, '--all', '--config',
+ settings.SPHINX_CONFIG_PATH]
+
+ if rotate: # pragma: no cover
+ calls.append('--rotate')
+
+ call(calls)
+
+
+def start_sphinx():
+ """
+ Starts sphinx. Note this is only to be used in dev and test environments.
+ """
+
+ call([settings.SPHINX_SEARCHD, '--config',
+ settings.SPHINX_CONFIG_PATH])
+
+
+def stop_sphinx():
+ """
+ Stops sphinx. Note this is only to be used in dev and test environments.
+ """
+
+ call([settings.SPHINX_SEARCHD, '--stop', '--config',
+ settings.SPHINX_CONFIG_PATH])
View
83 apps/search/views.py
@@ -1,73 +1,26 @@
-from django.conf import settings
-from django.utils.hashcompat import md5_constructor
-
-from haystack.views import SearchView
import jingo
-from view_cache_utils import cache_page_with_prefix
-
-from feedback.models import Opinion, Term
-from feedback import stats, FIREFOX
-
-
-def search_view_cache_key(request):
- """Generate a cache key for a search view based on its GET parameters."""
- return md5_constructor(str(request.GET)).hexdigest()
-
-
-class OpinionSearchView(SearchView):
- def get_results(self):
- """If no query is selected, browse dataset."""
- if self.form.is_valid() and not self.query:
- return Opinion.objects.browse(**self.form.cleaned_data)
- else:
- return super(OpinionSearchView, self).get_results()
-
- def extra_context(self):
- """Gather sentiments/trends/demographic info for these search results."""
- extra = super(OpinionSearchView, self).extra_context()
-
- # TODO make sure this won't issue millions of queries
- opinion_pks = [ res.pk for res in self.results ]
-
- # Aggregates:
- opinions = Opinion.objects.filter(pk__in=opinion_pks)
-
- extra['sent'] = stats.sentiment(qs=opinions)
- extra['demo'] = stats.demographics(qs=opinions)
-
- frequent_terms = Term.objects.frequent().filter(
- used_in__in=opinion_pks)[:settings.TRENDS_COUNT]
- extra['terms'] = stats.frequent_terms(qs=frequent_terms)
- return extra
+from client import Client
+from forms import ReporterSearchForm
+from feedback import stats
- def create_response(self):
- """
- Generates the actual HttpResponse to send back to the user.
- The same as Haystack's stock SearchView, except for Jinja2 rendering.
- """
- (paginator, page) = self.build_page()
+def index(request):
+ form = ReporterSearchForm(request.GET)
+ form.is_valid()
+ query = form.cleaned_data.get('q', '')
+ search_opts = form.cleaned_data
+ c = Client()
+ opinions = c.query(query, **search_opts)
- context = {
- 'query': self.query,
- 'form': self.form,
- 'page': page,
- 'paginator': paginator,
- }
- context.update(self.extra_context())
- return jingo.render(self.request, self.template, context)
+ context = {
+ 'opinions': opinions,
+ 'form': form,
+ }
+ if opinions:
+ context['sent'] = stats.sentiment(qs=opinions)
+ context['demo'] = stats.demographics(qs=opinions)
- def __call__(self, request):
- """Main view entrypoint. Cached."""
+ return jingo.render(request, 'search/search.html', context)
- @cache_page_with_prefix(settings.CACHE_DEFAULT_PERIOD,
- search_view_cache_key)
- def cache_wrapper(request):
- """
- Cache decorator expects request, not self, to be the first
- positional argument, so let's cater to that by using a closure.
- """
- return super(OpinionSearchView, self).__call__(request)
- return cache_wrapper(request)
View
31 configs/sphinx/localsettings_django.py
@@ -0,0 +1,31 @@
+import os
+import sys
+
+SETTINGS_DIR = os.path.realpath(
+ os.path.join(os.path.dirname(__file__), os.path.sep.join(('..',)*2)))
+
+sys.path.append(SETTINGS_DIR)
+sys.path.append(os.path.join(SETTINGS_DIR,'lib'))
+
+import settings_local as settings
+
+s = settings.DATABASES['default']
+MYSQL_PASS = s['PASSWORD']
+MYSQL_USER = s['USER']
+MYSQL_HOST = s.get('HOST', 'localhost')
+MYSQL_NAME = s['NAME']
+
+if MYSQL_HOST.endswith('.sock'):
+ MYSQL_HOST = 'localhost'
+
+if os.environ.get('DJANGO_ENVIRONMENT') == 'test':
+ MYSQL_NAME = 'test_' + MYSQL_NAME
+
+
+BASE_PATH = os.path.join(SETTINGS_DIR, 'tmp')
+CATALOG_PATH = BASE_PATH + '/data/sphinx'
+LOG_PATH = BASE_PATH + '/log/searchd'
+ETC_PATH = BASE_PATH + '/etc'
+LISTEN_PORT = 3314
+MYSQL_LISTEN_PORT = 3309
+MYSQL_LISTEN_HOST = 'localhost'
View
73 configs/sphinx/sphinx.conf
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+
+try:
+ from localsettings import *
+except ImportError:
+ from localsettings_django import *
+
+MYSQL_SOURCE_CONFIG = """
+ type = mysql
+ sql_host = %s
+ sql_user = %s
+ sql_pass = %s
+ sql_db = %s
+
+ sql_query_pre = SET NAMES utf8
+ sql_query_pre = SET SESSION query_cache_type=OFF
+""" % (MYSQL_HOST, MYSQL_USER, MYSQL_PASS, MYSQL_NAME,)
+
+CHARSET_DATA = """
+ charset_type = utf-8
+ charset_table = 0..9, U+41..U+5a->U+61..U+7a, U+61..U+7a, U+aa, U+b5, \
+ U+ba, U+c0..U+d6->U+e0..U+f6, U+d8..U+de->U+f8..U+fe, U+df..U+f6, \
+ U+f8..U+ff, U+100..U+12f/2, U+130->U+69, U+131, U+132..U+137/2, U+138, U+139..U+148/2, U+149, U+14a..U+177/2, U+178->U+ff, U+179..U+17e/2, U+17f..U+180, U+181->U+253, U+182..U+185/2, U+186->U+254, U+187..U+188/2, U+189..U+18a->U+256..U+257, U+18b..U+18c/2, U+18d, U+18e->U+1dd, U+18f->U+259, U+190->U+25b, U+191..U+192/2, U+193->U+260, U+194->U+263, U+195, U+196->U+269, U+197->U+268, U+198..U+199/2, U+19a..U+19b, U+19c->U+26f, U+19d->U+272, U+19e, U+19f->U+275, U+1a0..U+1a5/2, U+1a6->U+280, U+1a7..U+1a8/2, U+1a9->U+283, U+1aa..U+1ab, U+1ac..U+1ad/2, U+1ae->U+288, U+1af..U+1b0/2, U+1b1..U+1b2->U+28a..U+28b, U+1b3..U+1b6/2, U+1b7->U+292, U+1b8..U+1b9/2, U+1ba..U+1bb, U+1bc..U+1bd/2, U+1be..U+1c3, U+1c4->U+1c6, U+1c5..U+1c6/2, U+1c7->U+1c9, U+1c8..U+1c9/2, U+1ca->U+1cc, U+1cb..U+1dc/2, U+1dd, U+1de..U+1ef/2, U+1f0, U+1f1->U+1f3, U+1f2..U+1f5/2, U+1f6->U+195, U+1f7->U+1bf, U+1f8..U+21f/2, U+220->U+19e, U+221, U+222..U+233/2, U+234..U+23a, U+23b..U+23c/2, U+23d->U+19a, U+23e..U+240, U+241->U+294, U+250..U+2c1, U+2c6..U+2d1, U+2e0..U+2e4, U+2ee, U+37a, U+386..U+389->U+3ac..U+3af, U+38c..U+38e->U+3cc..U+3ce, U+390, U+391..U+3a1->U+3b1..U+3c1, U+3a3..U+3ab->U+3c3..U+3cb, U+3ac..U+3ce, U+3d0..U+3d7, U+3d8..U+3ef/2, U+3f0..U+3f3, U+3f4->U+3b8, U+3f5, U+3f7..U+3f8/2, U+3f9->U+3f2, U+3fa..U+3fb/2, U+3fc..U+3ff, U+400..U+40f->U+450..U+45f, U+410..U+42f->U+430..U+44f, U+430..U+45f, U+460..U+481/2, U+48a..U+4bf/2, U+4c0, U+4c1..U+4ce/2, U+4d0..U+4f9/2, U+500..U+50f/2, U+531..U+556->U+561..U+586, U+559, U+561..U+587, U+5d0..U+5ea, U+5f0..U+5f2, U+621..U+63a, U+640..U+64a, U+66e..U+66f, U+671..U+6d3, U+6d5, U+6e5..U+6e6, U+6ee..U+6ef, U+6fa..U+6fc, U+6ff, U+e01..U+e30, U+e32..U+e33, U+e40..U+e46, U+e81..U+e82, U+e84, U+e87..U+e88, U+e8a, U+e8d, U+e94..U+e97, U+e99..U+e9f, U+ea1..U+ea3, U+ea5, U+ea7, U+eaa..U+eab, U+ead..U+eb0, U+eb2..U+eb3, U+ebd, U+ec0..U+ec4, U+ec6, U+edc..U+edd, U+1000..U+1021, U+1023..U+1027, U+1029..U+102a, U+1050..U+1055, U+10a0..U+10c5->U+2d00..U+2d25, U+10d0..U+10fa, U+10fc, U+2d00..U+2d25, U+1d00..U+1dbf, U+1e00..U+1e95/2, U+1e96..U+1e9b, U+1ea0..U+1ef9/2, U+3005..U+3006, U+3031..U+3035, U+303b..U+303c, U+3041..U+3096, U+309d..U+309f, U+30a1..U+30fa, U+30fc..U+30ff, U+31f0..U+31ff
+ ngram_chars = U+3400..U+4DB5, U+4E00..U+9FA5, U+20000..U+2A6D6
+ ngram_len = 1
+ """
+
+config = """
+source opinions
+{
+""" + MYSQL_SOURCE_CONFIG + """
+ sql_query = \
+ SELECT \
+ id, positive, url, description, product, \
+ CRC32(version) as version, \
+ CRC32(os) as os, \
+ CRC32(locale) AS locale, \
+ UNIX_TIMESTAMP(created) AS created \
+ FROM feedback_opinion
+
+ sql_attr_bool = positive
+ sql_attr_uint = product
+ sql_attr_uint = locale
+ sql_attr_uint = version
+ sql_attr_uint = os
+ sql_attr_timestamp = created
+}
+"""
+
+config = config + """
+index opinions
+{
+ source = opinions
+ path = %s/opinions
+ morphology = stem_en
+ wordforms = %s/sphinx-wordforms.txt
+ %s
+}
+""" % (CATALOG_PATH, ETC_PATH, CHARSET_DATA)
+
+
+config = config + """
+searchd
+{
+ listen = %d
+ listen = %s:%d:mysql41
+ log = %s/searchd.log
+ query_log = %s/query.log
+ pid_file = %s/searchd.pid
+}
+""" % ((LISTEN_PORT, MYSQL_LISTEN_HOST, MYSQL_LISTEN_PORT) + (LOG_PATH,)*3)
+
+print config
View
1  requirements/dev.txt
@@ -3,3 +3,4 @@
# package for developers (testing, docs, etc.), it goes in this file.
-r prod.txt
+-e git://github.com/jbalogh/test-utils.git#egg=test-utils
View
15 settings.py
@@ -112,7 +112,7 @@ def JINJA_CONFIG():
ROOT_URLCONF = 'reporter.urls'
-INSTALLED_APPS = (
+INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
@@ -128,7 +128,7 @@ def JINJA_CONFIG():
'feedback',
'search',
'swearwords',
-)
+]
# Where to store product details
PROD_DETAILS_DIR = path('lib/product_details_json')
@@ -149,3 +149,14 @@ def JINJA_CONFIG():
# Number of items to show in the "Trends" box and Messages box.
MESSAGES_COUNT = 10
TRENDS_COUNT = 10
+
+SPHINX_HOST = '127.0.0.1'
+SPHINX_PORT = 3314
+SPHINX_SEARCHD = 'searchd'
+SPHINX_INDEXER = 'indexer'
+SPHINX_CATALOG_PATH = path('tmp/data/sphinx')
+SPHINX_LOG_PATH = path('tmp')
+SPHINX_CONFIG_PATH = path('configs/sphinx/sphinx.conf')
+
+
+TEST_RUNNER = 'test_utils.runner.RadicalTestSuiteRunner'
View
12 urls.py
@@ -1,5 +1,6 @@
-from django.conf.urls.defaults import *
+from django.conf.urls.defaults import url, patterns, include
from django.contrib import admin
+from django.conf import settings
import jingo
@@ -24,3 +25,12 @@ def _error_page(request, status):
(r'^robots\.txt$', jingo.render, {'template': 'robots.txt',
'mimetype': 'text/plain'}),
)
+
+
+if settings.DEBUG:
+ # Remove leading and trailing slashes so the regex matches.
+ media_url = settings.MEDIA_URL.lstrip('/').rstrip('/')
+ urlpatterns += patterns('',
+ (r'^%s/(?P<path>.*)$' % media_url, 'django.views.static.serve',
+ {'document_root': settings.MEDIA_ROOT}),
+ )
View
22 utils.py
@@ -0,0 +1,22 @@
+import zlib
+
+
+# TODO(davedash): liberate this
+def manual_order(qs, pks, pk_name='id'):
+ """
+ Given a query set and a list of primary keys, return a set of objects from
+ the query set in that exact order.
+ """
+
+ if not pks:
+ return qs.none()
+
+ objects = qs.filter(id__in=pks).extra(
+ select={'_manual': 'FIELD(%s, %s)'
+ % (pk_name, ','.join(map(str, pks)))},
+ order_by=['_manual'])
+
+ return objects
+
+
+crc32 = lambda x: zlib.crc32(x) & 0xffffffff
Please sign in to comment.
Something went wrong with that request. Please try again.