Permalink
Browse files

fixed sorting issue where the global sorts were being overwritten. ad…

…ded the query_queue parameters to example.ini.
  • Loading branch information...
1 parent 33fd4e9 commit 1861a04e454c736e49fe1b012c5a5aa3d26971d0 Steve Huffman committed Aug 22, 2008
View
@@ -41,13 +41,26 @@ email_db_host = 127.0.0.1
email_db_user = reddit
email_db_pass = password
+query_queue_db_name = query_queue
+query_queue_db_host = 127.0.0.1
+query_queue_db_user = reddit
+query_queue_db_pass = password
+
###
# Other magic settings
###
timezone = UTC
monitored_servers = localhost
+#query cache settings
+query_caches = 127.0.0.1:11211
+num_query_queue_workers =
+query_queue_worker =
+enable_doquery = False
+use_query_cache = False
+write_query_queue = False
+
stylesheet = reddit.css
stylesheet_rtl = reddit_rtl.css
View
@@ -705,19 +705,20 @@ def POST_vote(self, res, dir, thing, ip, vote_type):
organic = vote_type == 'organic'
v = Vote.vote(user, thing, dir, ip, spam, organic)
- #update last modified
- set_last_modified(c.user, 'liked')
- set_last_modified(c.user, 'disliked')
+ #update relevant caches
+ if isinstance(thing, Link):
+ sr = thing.subreddit_slow
+ set_last_modified(c.user, 'liked')
+ set_last_modified(c.user, 'disliked')
- #update the queries
- if g.write_query_queue:
- queries.new_vote(v)
+ if v.valid_thing:
+ expire_hot(sr)
+ if g.write_query_queue:
+ queries.new_vote(v)
# flag search indexer that something has changed
tc.changed(thing)
-
-
@Json
@validate(VUser(),
VModhash(),
@@ -194,7 +194,7 @@ def organic(self):
def query(self):
#no need to worry when working from the cache
if g.use_query_cache or c.site == Default:
- fix_listing = False
+ self.fix_listing = False
if c.site == Default:
user = c.user if c.user_is_loggedin else None
View
@@ -12,11 +12,16 @@
query_cache = g.query_cache
precompute_limit = 1000
-db_sorts = dict(hot = desc('_hot'),
- new = desc('_date'),
- top = desc('_score'),
- controversial = desc('_controversy'),
- old = asc('_date'))
+
+db_sorts = dict(hot = (desc, '_hot'),
+ new = (desc, '_date'),
+ top = (desc, '_score'),
+ controversial = (desc, '_controversy'),
+ old = (asc, '_date'))
+
+def db_sort(sort):
+ cls, col = db_sorts[sort]
+ return cls(col)
db_times = dict(all = None,
hour = Thing.c._date >= timeago('1 hour'),
@@ -114,7 +119,7 @@ def thing_cmp(t1, t2):
def get_links(sr, sort, time):
"""General link query for a subreddit."""
q = Link._query(Link.c.sr_id == sr._id,
- sort = db_sorts[sort])
+ sort = db_sort(sort))
if time != 'all':
q._filter(db_times[time])
return CachedResults(q)
@@ -123,7 +128,7 @@ def user_query(kind, user, sort, time):
"""General profile-page query."""
q = kind._query(kind.c.author_id == user._id,
kind.c._spam == (True, False),
- sort = db_sorts[sort])
+ sort = db_sort(sort))
if time != 'all':
q._filter(db_times[time])
return CachedResults(q)
@@ -363,7 +363,7 @@ def get_links(self, sort, time):
raise UserRequiredException
q = Link._query(self.c.author_id == c.user.friends,
- sort = queries.db_sorts[sort])
+ sort = queries.db_sort(sort))
if time != 'all':
q._filter(queries.db_times[time])
return q
@@ -381,7 +381,7 @@ def query_rules(self):
def get_links(self, sort, time):
from r2.models import Link
from r2.lib.db import queries
- q = Link._query(sort = queries.db_sorts[sort])
+ q = Link._query(sort = queries.db_sort(sort))
if time != 'all':
q._filter(queries.db_times[time])
return q
@@ -408,7 +408,7 @@ def get_links_srs(self, srs, sort, time):
return queries.merge_results(*results)
else:
- q = Link._query(sort = queries.db_sorts[sort])
+ q = Link._query(sort = queries.db_sort(sort))
if time != 'all':
q._filter(queries.db_times[time])
return q
View
@@ -116,10 +116,6 @@ def vote(cls, sub, obj, dir, ip, spam = False, organic = False):
if sub._id != obj.author_id:
incr_counts([sr])
- #expire the sr
- if kind == 'link' and v.valid_thing:
- expire_hot(sr)
-
return v
#TODO make this generic and put on multirelation?

0 comments on commit 1861a04

Please sign in to comment.