Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
branch: master
773 lines (677 sloc) 27.662 kb
# DO NOT EDIT THIS FILE
# This is a base template. To apply changes to your
# reddit instance, create a "myreddit.update" config
# file, then run 'make ini'. 'make ini' will combine
# this template with the myreddit.update file and create a
# 'myreddit.ini'. ('myreddit.update' is just an example;
# any name will do - e.g., 'foo.update' will create
# 'foo.ini')
[secrets]
# the tokens in this section are base64 encoded
# general purpose secret
SECRET = YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXowMTIzNDU2Nzg5
# secret for /prefs/feeds
FEEDSECRET = YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXowMTIzNDU2Nzg5
# used for authenticating admin API calls w/o cookie
ADMINSECRET = YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXowMTIzNDU2Nzg5
# used to securely authenticate websocket requests to sutro
websocket = YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXowMTIzNDU2Nzg5
# secret for validating the cdn-provided client ip
cdn_ip_verification =
# secret for authenticating private media embeds
media_embed = YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXowMTIzNDU2Nzg5
# secret for authenticating controller#action name
action_name = YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXowMTIzNDU2Nzg5
# secret for email notification one-click unsubscribe links
email_notifications = YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXowMTIzNDU2Nzg5
# secrets for communicating with Stripe (optional payment processor)
stripe_webhook =
stripe_public_key =
stripe_secret_key =
# secrets for communicating with Authorize.net (optional payment processor--
# must also set authorizenetapi to enable)
authorizenetname =
authorizenetkey =
# secret for communicating with Paypal (optional payment processor)
paypal_webhook =
# secret for communicating with Coinbase (optional payment processor)
coinbase_webhook =
# secret for communicating with RedditGifts (optional payment processor)
redditgifts_webhook =
# The campaign monitor API key for the newsletter
newsletter_api_key =
# event-collector key and secret
events_collector_key =
events_collector_secret =
[DEFAULT]
############################################ SITE-SPECIFIC OPTIONS
#### Appearance
# the site's tagline, used in the title and description
short_description = open source is awesome
# default site interface language (two letter character code)
site_lang = en
# default header image url
default_header_url = reddit.com.header.png
#### Domains
# the domain that this app expects to be accessed on
domain = reddit.local
# where third party media (embeds etc.) are hosted
# this should be different from `domain` for security
media_domain = %(domain)s
# the short domain (like redd.it)
shortdomain =
# if you use www for the old-timey feel, put it here
domain_prefix =
# subdomains that don't need special processing
reserved_subdomains = www, ssl, oauth
# subdomains that don't need special processing, and can't be part of `c.domain_prefix`.
# should be a subset of `reserved_subdomains`
ignored_subdomains = www, ssl, oauth
# subdomains that are not reddit instances
offsite_subdomains =
# https api endpoint (must be g.domain or a subdomain of g.domain)
https_endpoint =
# the domain for oauth-orized requests
oauth_domain =
# (secure) payment domain for self-serve ads
payment_domain = https://pay.reddit.local/
# base url where 300x250 ad units (sidebar) are hosted
ad_domain = http://reddit.local
# domain where sutro websocket server is hosted
websocket_host = %(domain)s
# domain to send stats to
stats_domain =
# URL to send event-collector events to
events_collector_url =
#### Accounts and Subreddits
# the user used for "system" operations and private messages
system_user = reddit
# the default subreddit for submissions
default_sr = frontpage
# account used for default feedback messaging (can be /r/subreddit)
admin_message_acct = reddit
# subreddit used for DMCA takedowns
takedown_sr = takedowns
# subreddit used for trending subreddits postings. Ignored if blank.
trending_sr =
# list of subreddits to always include in a user's front page (unless they unsubscribe)
automatic_reddits =
# special subreddit that only reddit gold subscribers can use
lounge_reddit =
# subreddits that have subscribers hidden
hide_subscribers_srs =
multi_icons = art and design, ask, books, business, cars, comics, cute animals, diy, entertainment, food and drink, funny, games, grooming, health, life advice, military, models pinup, music, news, philosophy, pictures and gifs, science, shopping, sports, style, tech, travel, unusual stories, video
#### Static Files
# if set, this is the domain used for static files served over http and https
# if not set, no domain will be specified and relative local URLs will be used instead
static_domain =
#### Ops
# if your webserver is a proxy and on a different instance on the same 10.0.0.0/8 network
# set X-forwarded-for and set this to true
trust_local_proxies = false
# Location (directory) for temp files for diff3 merging
# Empty will use python default for temp files
diff3_temp_location =
# which cdn provider to use; right now this is just used for getting an
# accurate client IP. options are:
# null - no cdn
# cloudflare - use cloudflare as configured for reddit.com
# or write your own!
cdn_provider = null
#### Analytics
# image to render to track pageviews
tracker_url = /pixel/of_destiny.png
# images to render to track sponsored links
adtracker_url = /pixel/of_doom.png
# image to render to track the ad frame
adframetracker_url = /pixel/of_defenestration.png
# redirector to bounce clicks off of on sponsored links for tracking
clicktracker_url = /click
event_clicktracker_url = /event_click
# url to request to track interaction statistics
uitracker_url = /pixel/of_discovery.png
# embeds pixel tracking url
eventtracker_url = /pixel/of_delight.png
anon_eventtracker_url = /pixel/of_diversity.png
# google analytics token
googleanalytics =
# google analytics events sampling rate. Valid values are 1-100.
# See https://developers.google.com/analytics/devguides/collection/gajs/methods/gaJSApiBasicConfiguration#_gat.GA_Tracker_._setSampleRate
googleanalytics_sample_rate = 50
# google analytics token for gold
googleanalytics_gold =
# google analytics events sampling rate for gold. Valid values are 1-100.
googleanalytics_sample_rate_gold = 100
# secret used for signing information on the above tracking pixels
tracking_secret = abcdefghijklmnopqrstuvwxyz0123456789
#### Wiki Pages
wiki_page_privacy_policy = privacypolicy
wiki_page_user_agreement = useragreement
wiki_page_registration_info = registration_info
wiki_page_gold_bottlecaps = gold_bottlecaps
wiki_page_stylesheets_everywhere =
#### Feature toggles
disable_ads = false
disable_captcha = false
disable_ratelimit = false
disable_require_admin_otp = false
disable_wiki = false
############################################ DEBUG
# global debug flag -- displays pylons stacktrace rather than 500 page on error when true
# NOTE: a pylons stacktrace allows remote code execution. make sure this is false in prod.
debug = false
# enables/disables whitespace removal in rendered html
template_debug = false
# enables/disables compiled template caching and template file mtime checking
reload_templates = true
# use uncompressed static files (out of /static/js and /static/css)
# rather than compressed files out of /static (for development if true)
uncompressedJS = true
# enable/disable verbose logging of SQL queries
sqlprinting = false
# directory to write cProfile stats dumps to (disabled if not set)
profile_directory =
# template names to record render timings for
timed_templates = Reddit, Link, Comment, LinkListing, NestedListing, SubredditTopBar
############################################ PLUGINS
# which plugins are enabled (they must be installed via setup.py first)
plugins =
###### about
# set which subreddits the about page pulls its source data from
# make sure you create the subreddits as well, otherwise you'll get an error
about_sr_quotes = about_quotes
about_sr_images = about_images
# size limit on the about page slideshow
about_images_count = 50
about_images_min_score = 1
advertising_links_sr = advertising_links
wiki_page_selfserve_advertisers = advertising/advertisers
wiki_page_selfserve_content = advertising/content
wiki_page_selfserve_blurbs = advertising/blurbs
wiki_page_selfserve_quotes = advertising/quotes
wiki_page_selfserve_help = advertising/help
###### liveupdate
# the domain the activity pixel is hosted on
liveupdate_pixel_domain = %(domain)s
# maximum number of outstanding invites an event can have
liveupdate_invite_quota = 5
###### meatspace
# nothing!
############################################ LOGGING
# whether to print a "reddit app started" message at start
log_start = true
# enable/disable logging of exceptions and events via amqp/rabbitmq
amqp_logging = false
# exception reporter objects to give to ErrorMiddleware (see log.py)
error_reporters =
############################################ METRICS
# where to send metrics
statsd_addr =
# how often to send them [0.0 - 1.0]
statsd_sample_rate = 1.0
# percentage of stats for sampling (0-100)
stats_sample_rate = 1
############################################ MEDIA STORAGE
# which backend provider to use for media (thumbnails, subreddit stylesheets,
# subreddit images, app icons). options are:
# s3 - use amazon s3
# filesystem - write to local filesystem
# or write your own!
media_provider = s3
# s3 provider
# your s3 credentials -- if these are left blank, we'll pass None to
# boto which will trigger it to look in various places, including instance
# metadata if on ec2, for credentials.
S3KEY_ID =
S3SECRET_KEY =
# May be one bucket, or many buckets seperated by commas
s3_media_buckets =
# Store direct urls for images, rather than buckets
# For the bucket mybucket with the image helloworld.jpg the stored url would be:
# true: http://{s3_media_domain}/mybucket/helloworld.jpg
# false: http://mybucket/helloworld.jpg
s3_media_direct = true
# Which S3 region to use
# If using the US region, the default is correct. Otherwise, use a region-specific
# endpoint as in http://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html
s3_media_domain = s3.amazonaws.com
# filesystem provider configuration
media_fs_root =
media_fs_base_url_http =
thumbnail_size = 70, 70
thumbnail_hidpi_scaling = 2
############################################ EMERGENCY MODES
# emergency measures: makes the site read only
read_only_mode = false
# a modified read only mode used for cache shown during heavy load 503s
heavy_load_mode = false
# override default site language for things like pirate day
lang_override =
# enable/disable automatic creation of database tables/column families
db_create_tables = True
# are we allowed to write to databases at all?
disallow_db_writes = False
# disable custom subreddit stylesheets
css_killswitch = False
############################################ SCRAPER
# user agent for the scraper
useragent = Mozilla/5.0 (compatible; redditbot/1.0; +http://www.reddit.com/feedback)
# Embedly API Key. if no key is provided, the scraper will only fetch thumbnails.
embedly_api_key =
# A tuple of what media_object types will automatically be displayed on comments view.
# Ex: liveupdate, custom
autoexpand_media_types = liveupdate
############################################ NEWSLETTER
# The list ID within campaign monitor to be altering
newsletter_list_id =
############################################ QUOTAS
# quota for various types of relations creatable in subreddits
sr_banned_quota = 10000
sr_moderator_invite_quota = 10000
sr_contributor_quota = 10000
sr_wikibanned_quota = 10000
sr_wikicontributor_quota = 10000
sr_quota_time = 7200
sr_invite_limit = 25
# delay before allowing a link to be shared
new_link_share_delay = 30 seconds
# max number of uploaded images per subreddit
max_sr_images = 50
############################################ RATELIMITS
# If true, send 429 responses on exceeded ratelimits
# If false, send headers only, but don't abort
# Only applies if tracking is enabled below
ENFORCE_RATELIMIT = false
# If true, store per-user request counts in ratelimits cache
RL_SITEWIDE_ENABLED = true
# How large of a burst window will users be allowed?
RL_RESET_MINUTES = 10
# What is the average request rate over the above time period?
RL_AVG_REQ_PER_SEC = 0.5
# Same as above, but configured separately for connections via OAuth
RL_OAUTH_SITEWIDE_ENABLED = true
RL_OAUTH_RESET_MINUTES = 10
RL_OAUTH_AVG_REQ_PER_SEC = 0.5
# same as above, but for failed login attempts per account
RL_LOGIN_AVG_PER_SEC = .1
# user agent substrings to hard-ratelimit to a number of requests per ten second period
# example: agents = googlebot:10, appengine:2
agents =
# karma needed to avoid per-subreddit submission ratelimits
MIN_RATE_LIMIT_KARMA = 10
MIN_RATE_LIMIT_COMMENT_KARMA = 1
############################################ THRESHOLDS
# minimum item score to be considered for quota baskets
QUOTA_THRESHOLD = 5
# if the user has positive total karma, their per-subreddit karma will default to this, else 0
MIN_UP_KARMA = 1
# minimum user karma total values to show to non-admins
link_karma_display_floor = 1
comment_karma_display_floor = -100
# age at which links/comments become "archived" and can no longer be voted on, replied to, or reported
ARCHIVE_AGE = 180 days
# the maximum amount of time we use memcache to hide that a vote hasn't been
# asynchronously processed yet.
vote_queue_grace_period = 1 hour
# minimum age of an account (in days) for the "create a subreddit" button to show
min_membership_create_community = 30
# maximum age (in days) of items eligible for display on normalized hot pages (frontpage, multis, etc.)
HOT_PAGE_AGE = 1000
# how long to consider links eligible for the rising page
rising_period = 12 hours
# default number of comments shown
num_comments = 100
# max number of comments to show at once
max_comments = 500
max_comments_gold = 2500
# max number of parents to walk up the tree while uncollapsing replies in Q&A
# sort mode
max_comment_parent_walk = 20
# how deep do we go into the top listing when fetching /random
num_serendipity = 250
# number of subscriptions a user needs to have before the "my subreddits"
# dropdown is shown
sr_dropdown_threshold = 15
# Conflate visits to a comment page that happen within this many
# seconds of each other (gold "new comments" feature)
comment_visits_period = 600
# Max number of mentions to extract per comment
butler_max_mentions = 3
# Number of days to keep recent wiki revisions for
wiki_keep_recent_days = 7
# Max number of bytes for wiki pages
wiki_max_page_length_bytes = 262144
# Max wiki page name length
wiki_max_page_name_length = 128
# Max number of separators in a wiki page name
wiki_max_page_separators = 3
############################################ SEARCH
# endpoint for link search
CLOUDSEARCH_SEARCH_API =
# endpoint for link upload
CLOUDSEARCH_DOC_API =
# endpoint for subreddit search
CLOUDSEARCH_SUBREDDIT_SEARCH_API =
# endpoint for subreddit upload
CLOUDSEARCH_SUBREDDIT_DOC_API =
############################################ MEMCACHE
num_mc_clients = 5
# core memcache cluster, Things and various other stuff
memcaches = 127.0.0.1:11211
# caches used for @memoize decorator magic
memoizecaches = 127.0.0.1:11211
# hosts to store memcache-based locks on
lockcaches = 127.0.0.1:11211
# hosts to store rendered template fragments in
rendercaches = 127.0.0.1:11211
# hosts to store entire rendered pages in
pagecaches = 127.0.0.1:11211
# hosts that cache permacache cassandra data
permacache_memcaches = 127.0.0.1:11211
# hosts that cache srmember rels (subset of Thing data)
srmembercaches = 127.0.0.1:11211
# hosts that cache relations between Things
relcaches = 127.0.0.1:11211
# a local cache that's not globally consistent and can have stale data (optional)
stalecaches =
# cache for tracking rate limit thresholds
ratelimitcaches = 127.0.0.1:11211
############################################ MISCELLANEOUS
# default localization for strings (when using python's locale.format)
# (mostly replaced by babel, this shouldn't be necessary to change)
locale = C
# storage timezone, should probably not be changed from UTC
timezone = UTC
# timezone for display of some data, deprecated
display_timezone = MST
# location of the static directory
static_path = /static/
# Just a list of words. Used by errorlog.py to make up names for new errors.
words_file = /usr/dict/words
# domains that we consider URLs case sensitive for repost detection purposes
case_sensitive_domains = i.imgur.com, youtube.com
# Domains that we know are friendly and host raw image files
known_image_domains = i.imgur.com, giant.gfycat.com, pbs.twimg.com, upload.wikimedia.org
# whether to load reddit private code (a hack until we structure it better)
import_private = false
# location of geoip service
geoip_location = http://127.0.0.1:5000
# account name that AutoModerator actions will be done by
automoderator_account =
############################################ AUTHENTICATION
# how to authenticate users. options are:
# cookie: standard cookie-based auth
# http: http basic authentication
# or write your own!
authentication_provider = cookie
# the work factor for bcrypt, increment this every time computers double in
# speed. don't worry, changing this won't break old passwords
bcrypt_work_factor = 12
# name of the cookie to drop with login information
login_cookie = reddit_session
# name of the admin cookie
admin_cookie = reddit_admin
# name of the otp cookie
otp_cookie = reddit_otp
# the maximum life of an admin cookie (seconds)
ADMIN_COOKIE_TTL = 32400
# the maximum amount of idle time for an admin cookie (seconds)
ADMIN_COOKIE_MAX_IDLE = 900
# the maximum life of an otp cookie
OTP_COOKIE_TTL = 604800
# funky app user agents exempt from the login CSRF prevention
exempt_login_user_agents =
# for the http provider:
# should we trust the authorization header and auto-register accounts?
auth_trust_http_authorization = false
############################################ SSL
# max-age for Strict Transport Security, setting this to 0 disables
# HSTS and revokes any previous HSTS grants.
hsts_max_age = 10886400
############################################ CASSANDRA
# cassandra hosts
cassandra_seeds = 127.0.0.1:9160
# number of connections to keep open to the cassandra ring
cassandra_pool_size = 5
# default read/write consistency levels for Cassandra
cassandra_rcl = ONE
cassandra_wcl = ONE
# name of default connection pool to use when _connection_pool not specified
cassandra_default_pool = main
############################################ AMQP
amqp_host = localhost:5672
amqp_user = reddit
amqp_pass = reddit
amqp_virtual_host = /
############################################ ZOOKEEPER
# zookeeper is optional at the moment
zookeeper_connection_string =
zookeeper_username =
zookeeper_password =
############################################ EMAIL
smtp_server = localhost
# where to send alerts for exceptions, etc.
nerds_email = nerds@reddit.com
# the "from" address for link share emails
share_reply = noreply@reddit.com
# where to send feedback comments
feedback_email = reddit@gmail.com
# the "from" address for orangered notifications
notification_email = notifications@example.com
# email to ads team
ads_email = ad-ops@reddit.com
############################################ POSTGRES
db_user = reddit
db_pass = password
db_port = 5432
db_pool_size = 3
db_pool_overflow_size = 3
# list of all databases named in the subsequent table
databases = main, comment, email, authorize, award, hc, traffic
#db name db host user, pass, port, conn, overflow_conn
main_db = reddit, 127.0.0.1, *, *, *, *, *
comment_db = reddit, 127.0.0.1, *, *, *, *, *
comment2_db = reddit, 127.0.0.1, *, *, *, *, *
email_db = reddit, 127.0.0.1, *, *, *, *, *
authorize_db = reddit, 127.0.0.1, *, *, *, *, *
award_db = reddit, 127.0.0.1, *, *, *, *, *
hc_db = reddit, 127.0.0.1, *, *, *, *, *
traffic_db = reddit, 127.0.0.1, *, *, *, *, *
hardcache_categories = *:hc:hc
# this setting will prefix all of the table names
db_app_name = reddit
type_db = main
rel_type_db = main
hardcache_db = main
# definitions of what each table is (probably shouldn't change in .update files)
# things require no extra info. relation is followed by the names of the related tables
# a !typeid=# flag allows overriding of the type id for a thing/rel to avoid the db
# hit on app startup.
db_table_link = thing
db_table_account = thing
db_table_message = thing
db_table_comment = thing
db_table_subreddit = thing
db_table_srmember = relation, subreddit, account
db_table_friend = relation, account, account
db_table_inbox_account_comment = relation, account, comment
db_table_inbox_account_message = relation, account, message
db_table_moderatorinbox = relation, subreddit, message
db_table_report_account_link = relation, account, link
db_table_report_account_comment = relation, account, comment
db_table_report_account_message = relation, account, message
db_table_report_account_subreddit = relation, account, subreddit
db_table_award = thing
db_table_trophy = relation, account, award
db_table_jury_account_link = relation, account, link
db_table_flair = relation, subreddit, account
db_table_promocampaign = thing
# which servers to find each table on (likely to change in .update files)
# first server listed is assumed to be the master, all others are read-only slaves
# additionally, a "!avoid_master" flag may be added to specify that reads should use the slaves
db_servers_link = main, main
db_servers_account = main
db_servers_message = main
db_servers_comment = comment
db_servers_subreddit = comment
db_servers_srmember = comment
db_servers_friend = comment
db_servers_inbox_account_comment = main
db_servers_inbox_account_message = main
db_servers_moderatorinbox = main
db_servers_report_account_link = main
db_servers_report_account_comment = comment
db_servers_report_account_message = main
db_servers_report_account_subreddit = main
db_servers_award = award
db_servers_trophy = award
db_servers_jury_account_link = main
db_servers_ad = main
db_servers_adsr = main
db_servers_flair = main
db_servers_promocampaign = main
############################################ GOLD
goldpayment_email =
# "thanks for subscribing to reddit gold" return address
goldsupport_email =
gold_month_price = 3.99
gold_year_price = 29.99
PAYPAL_BUTTONID_ONETIME_BYMONTH =
PAYPAL_BUTTONID_ONETIME_BYYEAR =
PAYPAL_BUTTONID_AUTORENEW_BYMONTH =
PAYPAL_BUTTONID_AUTORENEW_BYYEAR =
PAYPAL_BUTTONID_CREDDITS_BYMONTH =
PAYPAL_BUTTONID_CREDDITS_BYYEAR =
PAYPAL_BUTTONID_GIFTCODE_BYMONTH =
PAYPAL_BUTTONID_GIFTCODE_BYYEAR =
STRIPE_MONTHLY_GOLD_PLAN =
STRIPE_YEARLY_GOLD_PLAN =
COINBASE_BUTTONID_ONETIME_1MO =
COINBASE_BUTTONID_ONETIME_3MO =
COINBASE_BUTTONID_ONETIME_1YR =
COINBASE_BUTTONID_ONETIME_2YR =
COINBASE_BUTTONID_ONETIME_3YR =
############################################ SELF-SERVE ADS
selfserve_support_email = selfservesupport@mydomain.com
MAX_CAMPAIGNS_PER_LINK = 100
cpm_selfserve = 1.00
cpm_selfserve_geotarget_metro = 2.00
cpm_selfserve_collection = 0.75
authorizenetapi =
default_promote_bid = 50
min_promote_bid = 20
max_promote_bid = 9999
adserver_click_domain =
############################################ TRAFFIC
TRAFFIC_ACCESS_KEY =
TRAFFIC_SECRET_KEY =
RAW_LOG_DIR =
PROCESSED_DIR =
AGGREGATE_DIR =
AWS_LOG_DIR =
TRAFFIC_SRC_DIR =
TRAFFIC_LOG_HOSTS =
############################################ PERFORMANCE / SCALING
# should we split link votes into separate queues based on subreddit id?
# this helps with lock contention but isn't necessary on smaller sites
shard_link_vote_queues = false
# should we split comment tree processing into shards based on the link id?
# this helps with lock contention but isn't necessary on smaller sites
shard_commentstree_queues = false
# chance of a write to the query cache triggering pruning. increasing this will
# potentially slow down writes, but will keep the size of cached queries in check better
querycache_prune_chance = 0.05
# time for the page cache (for unlogged in users)
page_cache_time = 90
# time for the comment pane cache (for a subset of logged in users, see pages.py:CommentPane)
commentpane_cache_time = 120
[server:main]
use = egg:Paste#http
host = 0.0.0.0
port = %(http_port)s
[filter:gzip]
use = egg:r2#gzip
compress_level = 6
min_size = 800
[app:main]
use = egg:r2
cache_dir = %(here)s/data
[loggers]
keys = root
[logger_root]
level = WARNING
handlers = console
[handlers]
keys = console
[handler_console]
class = StreamHandler
args = (sys.stdout,)
[formatters]
keys = reddit
[formatter_reddit]
format = %(message)s
# the following configuration section makes up the "live" config. if zookeeper
# is enabled, then this configuration will be found by the app in zookeeper. to
# write it to zookeeper, use the writer script: scripts/write_live_config.
[live_config]
# permissions! each user should have one of admin, sponsor, or employee as their permission level
employees = reddit:admin
# links that get their own infrastructure (comma-delimited list of id36s)
fastlane_links =
# a message placed in the infobar
announcement_message =
# an info message placed in the sidebar
sidebar_message =
# an info message placed in the sidebar for gold users
gold_sidebar_message =
# probability of the subreddit suggester showing up in the spotlight box
# for users that have at some point edited their subscriptions:
spotlight_interest_sub_p = .05
# and for users that have not ever subscribed:
spotlight_interest_nosub_p = .1
# map of comment tree version to how frequently it should be chosen relative to
# the others
comment_tree_version_weights = 1:1, 2:0, 3:0
# enables/disables client side logging POSTs to /web/log/...
frontend_logging = true
# daily gold revenue goal (in pennies) for progress bar thing
gold_revenue_goal = 0
# messages to display in the "you're new here" welcome bar
# space-delimited list of strings with / to indicate newlines
welcomebar_messages =
# sample multireddits (displayed when a user has no multis)
listing_chooser_sample_multis = /user/reddit/m/hello, /user/reddit/m/world
# multi of subreddits to share with gold users
listing_chooser_gold_multi = /user/reddit/m/gold
# subreddit showcasing new multireddits
listing_chooser_explore_sr =
# subreddits that help people discover more subreddits (used in explore tab)
discovery_srs =
# historical cost to run a reddit server
pennies_per_server_second = 1970/1/1:1
# lowercased names of accounts that API clients use as "proxies" for gilding
proxy_gilding_accounts =
# Controversial item determination
# Criteria for an item to meet to be determined as controversial
cflag_min_votes = 7
cflag_lower_bound = 0.4
cflag_upper_bound = 0.6
# Karma requirements to disable captchas - must meet at least one
captcha_exempt_link_karma = 1
captcha_exempt_comment_karma = 1
# Requirements to allow creating a subreddit - must meet age req + at least one karma req
create_sr_account_age_days = 0
create_sr_link_karma = 0
create_sr_comment_karma = 0
# Sample rate for event-collector processing
events_collector_sample_rate = 0.0
#### Features
# Availability for the "force HTTPS" option
feature_allow_force_https = {"employee": true}
# Who is required to use HTTPS?
feature_require_https = off
# HSTS grants are disabled by default since they make it a pain for devs
# to use local HTTP services. Beware that this will disable grant revocation
# as well.
feature_give_hsts_grants = off
feature_multireddit_customizations = off
Jump to Line
Something went wrong with that request. Please try again.