Skip to content
This repository has been archived by the owner on Jun 24, 2023. It is now read-only.

Commit

Permalink
hot gets own precompute_limit, minor
Browse files Browse the repository at this point in the history
  • Loading branch information
libertysoft3 committed Feb 9, 2020
1 parent aa335de commit 158882a
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 7 deletions.
3 changes: 1 addition & 2 deletions install/reddit.sh
Expand Up @@ -247,11 +247,10 @@ automoderator_account = automoderator
[server:main]
port = 8001
# gunicorn settings, enable in /etc/init/reddit-paster.conf
# set 'workers' (and haproxy's 'maxconn') to # of CPU cores for max high traffic performance
# set workers (and haproxy's maxconn) to num CPU cores or less for max single server performance
# workers = 2
# max_requests = 500
# timeout = 10
# backlog = 64
[live_config]
# Specify global admins and permissions, each user should have one of admin, sponsor, or employee as their permission level
Expand Down
5 changes: 3 additions & 2 deletions r2/example.ini
Expand Up @@ -881,9 +881,10 @@ remote_fetch_proxy_url = http://myproxy.example.com:1234
# enables the imgur uploader. api client type "Anonymous usage without user authorization"
imgur_client_id =

# max browseable things in permacache for /s/x/top, /s/x/fun, /s/x/insightful, /s/x/comments, /user/x/comments, etc.
# max history/browseable things in permacache for /s/x/top, /s/x/fun, /s/x/insightful, /s/x/comments, /user/x/comments, etc.
precompute_limit = 1000
# scale up with precompute_limit changes
precompute_limit_hot = 1000
# scale up with precompute_limit_hot changes
hot_max_links_per_subreddit = 150
# updates must also be made in the hot() pgsql function, see install/setup_postgres.sh
# increase to slow down link turnover on hot pages, making them more score sensitive
Expand Down
1 change: 1 addition & 0 deletions r2/r2/lib/app_globals.py
Expand Up @@ -230,6 +230,7 @@ class Globals(object):
'captcha_font_size',
'banner_variants',
'precompute_limit',
'precompute_limit_hot',
'hot_max_links_per_subreddit',
],

Expand Down
2 changes: 1 addition & 1 deletion r2/r2/lib/normalized_hot.py
Expand Up @@ -32,7 +32,7 @@

# SaidIt: store over the default 1000 for the heavy sub muting use case
MAX_PER_SUBREDDIT = g.hot_max_links_per_subreddit
MAX_LINKS = g.precompute_limit
MAX_LINKS = g.precompute_limit_hot

def get_hot_tuples(sr_ids, ageweight=None):
queries_by_sr_id = {sr_id: _get_links(sr_id, sort='hot', time='all')
Expand Down
4 changes: 3 additions & 1 deletion scripts/time_listings.sh
@@ -1,5 +1,7 @@
#!/bin/bash
# SAIDIT: making these run sequentially for performance

# SAIDIT: run sequentially and sleep for max single server performance
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" > /dev/null 2>&1 && pwd )"
/bin/bash $DIR/compute_time_listings link year "['hour', 'day', 'week', 'month', 'year']" 2>&1 | /usr/bin/logger -t compute_time_listings_link
sleep 10
/bin/bash $DIR/compute_time_listings comment year "['hour', 'day', 'week', 'month', 'year']" 2>&1 | /usr/bin/logger -t compute_time_listings_comment
2 changes: 1 addition & 1 deletion upstart/reddit-job-solr_links.conf
Expand Up @@ -8,5 +8,5 @@ nice 10

script
. /etc/default/reddit
wrap-job paster run $REDDIT_INI -c 'import r2.lib.providers.search.solr as cs; cs._rebuild_link_index()'
wrap-job paster run $REDDIT_INI -c 'import r2.lib.providers.search.solr as cs; cs._rebuild_link_index(sleeptime=2)'
end script

0 comments on commit 158882a

Please sign in to comment.