This repository has been archived by the owner on Nov 9, 2017. It is now read-only.
/
app_globals.py
executable file
·814 lines (693 loc) · 27.7 KB
/
app_globals.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
# The contents of this file are subject to the Common Public Attribution
# License Version 1.0. (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
# License Version 1.1, but Sections 14 and 15 have been added to cover use of
# software over a computer network and provide for limited attribution for the
# Original Developer. In addition, Exhibit A has been modified to be consistent
# with Exhibit B.
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
# the specific language governing rights and limitations under the License.
#
# The Original Code is reddit.
#
# The Original Developer is the Initial Developer. The Initial Developer of
# the Original Code is reddit Inc.
#
# All portions of the code written by reddit are Copyright (c) 2006-2013 reddit
# Inc. All Rights Reserved.
###############################################################################
from datetime import datetime
from urlparse import urlparse
import base64
import ConfigParser
import locale
import json
import logging
import os
import signal
import site
import socket
import subprocess
import sys
from sqlalchemy import engine, event
import cssutils
import pkg_resources
import pytz
from r2.config import queues
from r2.lib.cache import (
CacheChain,
CassandraCache,
CassandraCacheChain,
CL_ONE,
CL_QUORUM,
CMemcache,
HardCache,
HardcacheChain,
LocalCache,
MemcacheChain,
SelfEmptyingCache,
StaleCacheChain,
)
from r2.lib.configparse import ConfigValue, ConfigValueParser
from r2.lib.contrib import ipaddress
from r2.lib.lock import make_lock_factory
from r2.lib.manager import db_manager
from r2.lib.plugin import PluginLoader
from r2.lib.providers import select_provider
from r2.lib.stats import Stats, CacheStats, StatsCollectingConnectionPool
from r2.lib.translation import get_active_langs, I18N_PATH
from r2.lib.utils import config_gold_price, thread_dump
LIVE_CONFIG_NODE = "/config/live"
SECRETS_NODE = "/config/secrets"
def extract_live_config(config, plugins):
"""Gets live config out of INI file and validates it according to spec."""
# ConfigParser will include every value in DEFAULT (which paste abuses)
# if we do this the way we're supposed to. sorry for the horribleness.
live_config = config._sections["live_config"].copy()
del live_config["__name__"] # magic value used by ConfigParser
# parse the config data including specs from plugins
parsed = ConfigValueParser(live_config)
parsed.add_spec(Globals.live_config_spec)
for plugin in plugins:
parsed.add_spec(plugin.live_config)
return parsed
def _decode_secrets(secrets):
return {key: base64.b64decode(value) for key, value in secrets.iteritems()}
def extract_secrets(config):
# similarly to the live_config one above, if we just did
# .options("secrets") we'd get back all the junk from DEFAULT too. bleh.
secrets = config._sections["secrets"].copy()
del secrets["__name__"] # magic value used by ConfigParser
return _decode_secrets(secrets)
def fetch_secrets(zk_client):
node_data = zk_client.get(SECRETS_NODE)[0]
secrets = json.loads(node_data)
return _decode_secrets(secrets)
class Globals(object):
spec = {
ConfigValue.int: [
'db_pool_size',
'db_pool_overflow_size',
'page_cache_time',
'commentpane_cache_time',
'num_mc_clients',
'MAX_CAMPAIGNS_PER_LINK',
'MIN_DOWN_LINK',
'MIN_UP_KARMA',
'MIN_DOWN_KARMA',
'MIN_RATE_LIMIT_KARMA',
'MIN_RATE_LIMIT_COMMENT_KARMA',
'VOTE_AGE_LIMIT',
'REPLY_AGE_LIMIT',
'REPORT_AGE_LIMIT',
'HOT_PAGE_AGE',
'QUOTA_THRESHOLD',
'ADMIN_COOKIE_TTL',
'ADMIN_COOKIE_MAX_IDLE',
'OTP_COOKIE_TTL',
'num_comments',
'max_comments',
'max_comments_gold',
'num_default_reddits',
'max_sr_images',
'num_serendipity',
'sr_dropdown_threshold',
'comment_visits_period',
'min_membership_create_community',
'bcrypt_work_factor',
'cassandra_pool_size',
'sr_banned_quota',
'sr_wikibanned_quota',
'sr_wikicontributor_quota',
'sr_moderator_invite_quota',
'sr_contributor_quota',
'sr_quota_time',
'sr_invite_limit',
'wiki_keep_recent_days',
'wiki_max_page_length_bytes',
'wiki_max_page_name_length',
'wiki_max_page_separators',
'min_promote_future',
'max_promote_future',
'RL_RESET_MINUTES',
'RL_OAUTH_RESET_MINUTES',
],
ConfigValue.float: [
'min_promote_bid',
'max_promote_bid',
'statsd_sample_rate',
'querycache_prune_chance',
'RL_AVG_REQ_PER_SEC',
'RL_OAUTH_AVG_REQ_PER_SEC',
],
ConfigValue.bool: [
'debug',
'log_start',
'sqlprinting',
'template_debug',
'reload_templates',
'uncompressedJS',
'css_killswitch',
'db_create_tables',
'disallow_db_writes',
'disable_ratelimit',
'amqp_logging',
'read_only_mode',
'disable_wiki',
'heavy_load_mode',
'disable_captcha',
'disable_ads',
'disable_require_admin_otp',
'static_pre_gzipped',
'static_secure_pre_gzipped',
'trust_local_proxies',
'shard_link_vote_queues',
'shard_commentstree_queues',
'subreddit_stylesheets_static',
'ENFORCE_RATELIMIT',
'RL_SITEWIDE_ENABLED',
'RL_OAUTH_SITEWIDE_ENABLED',
],
ConfigValue.tuple: [
'plugins',
'stalecaches',
'memcaches',
'lockcaches',
'permacache_memcaches',
'rendercaches',
'pagecaches',
'memoizecaches',
'srmembercaches',
'ratelimitcaches',
'cassandra_seeds',
'admins',
'sponsors',
'employees',
'automatic_reddits',
'hardcache_categories',
'case_sensitive_domains',
'reserved_subdomains',
'offsite_subdomains',
'TRAFFIC_LOG_HOSTS',
'exempt_login_user_agents',
'timed_templates',
],
ConfigValue.dict(ConfigValue.str, ConfigValue.int): [
'agents',
],
ConfigValue.str: [
'wiki_page_registration_info',
'wiki_page_privacy_policy',
'wiki_page_user_agreement',
'wiki_page_gold_bottlecaps',
],
ConfigValue.choice: {
'cassandra_rcl': {
'ONE': CL_ONE,
'QUORUM': CL_QUORUM
},
'cassandra_wcl': {
'ONE': CL_ONE,
'QUORUM': CL_QUORUM
},
},
config_gold_price: [
'gold_month_price',
'gold_year_price',
'cpm_selfserve',
'cpm_selfserve_geotarget',
],
}
live_config_spec = {
ConfigValue.bool: [
'frontend_logging',
],
ConfigValue.float: [
'spotlight_interest_sub_p',
'spotlight_interest_nosub_p',
'gold_revenue_goal',
],
ConfigValue.tuple: [
'fastlane_links',
'listing_chooser_sample_multis',
'discovery_srs',
],
ConfigValue.str: [
'listing_chooser_gold_multi',
'listing_chooser_explore_sr',
],
ConfigValue.dict(ConfigValue.int, ConfigValue.float): [
'comment_tree_version_weights',
],
ConfigValue.messages: [
'welcomebar_messages',
'sidebar_message',
'gold_sidebar_message',
],
ConfigValue.dict(ConfigValue.str, ConfigValue.float): [
'pennies_per_server_second',
],
}
def __init__(self, global_conf, app_conf, paths, **extra):
"""
Globals acts as a container for objects available throughout
the life of the application.
One instance of Globals is created by Pylons during
application initialization and is available during requests
via the 'g' variable.
``global_conf``
The same variable used throughout ``config/middleware.py``
namely, the variables from the ``[DEFAULT]`` section of the
configuration file.
``app_conf``
The same ``kw`` dictionary used throughout
``config/middleware.py`` namely, the variables from the
section in the config file for your application.
``extra``
The configuration returned from ``load_config`` in
``config/middleware.py`` which may be of use in the setup of
your global variables.
"""
global_conf.setdefault("debug", False)
# reloading site ensures that we have a fresh sys.path to build our
# working set off of. this means that forked worker processes won't get
# the sys.path that was current when the master process was spawned
# meaning that new plugins will be picked up on regular app reload
# rather than having to restart the master process as well.
reload(site)
self.pkg_resources_working_set = pkg_resources.WorkingSet()
self.config = ConfigValueParser(global_conf)
self.config.add_spec(self.spec)
self.plugins = PluginLoader(self.pkg_resources_working_set,
self.config.get("plugins", []))
self.stats = Stats(self.config.get('statsd_addr'),
self.config.get('statsd_sample_rate'))
self.startup_timer = self.stats.get_timer("app_startup")
self.startup_timer.start()
self.paths = paths
self.running_as_script = global_conf.get('running_as_script', False)
# turn on for language support
self.lang = getattr(self, 'site_lang', 'en')
self.languages, self.lang_name = \
get_active_langs(default_lang=self.lang)
all_languages = self.lang_name.keys()
all_languages.sort()
self.all_languages = all_languages
# set default time zone if one is not set
tz = global_conf.get('timezone', 'UTC')
self.tz = pytz.timezone(tz)
dtz = global_conf.get('display_timezone', tz)
self.display_tz = pytz.timezone(dtz)
self.startup_timer.intermediate("init")
def __getattr__(self, name):
if not name.startswith('_') and name in self.config:
return self.config[name]
else:
raise AttributeError
def setup(self):
self.queues = queues.declare_queues(self)
################# PROVIDERS
self.media_provider = select_provider(
self.config,
self.pkg_resources_working_set,
"r2.provider.media",
self.media_provider,
)
self.startup_timer.intermediate("providers")
################# CONFIGURATION
# AMQP is required
if not self.amqp_host:
raise ValueError("amqp_host not set in the .ini")
if not self.cassandra_seeds:
raise ValueError("cassandra_seeds not set in the .ini")
# heavy load mode is read only mode with a different infobar
if self.heavy_load_mode:
self.read_only_mode = True
origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
self.origin = "http://" + origin_prefix + self.domain
self.trusted_domains = set([self.domain])
if self.https_endpoint:
https_url = urlparse(self.https_endpoint)
self.trusted_domains.add(https_url.hostname)
# load the unique hashed names of files under static
static_files = os.path.join(self.paths.get('static_files'), 'static')
names_file_path = os.path.join(static_files, 'names.json')
if os.path.exists(names_file_path):
with open(names_file_path) as handle:
self.static_names = json.load(handle)
else:
self.static_names = {}
# make python warnings go through the logging system
logging.captureWarnings(capture=True)
log = logging.getLogger('reddit')
# when we're a script (paster run) just set up super simple logging
if self.running_as_script:
log.setLevel(logging.INFO)
log.addHandler(logging.StreamHandler())
# if in debug mode, override the logging level to DEBUG
if self.debug:
log.setLevel(logging.DEBUG)
# attempt to figure out which pool we're in and add that to the
# LogRecords.
try:
with open("/etc/ec2_asg", "r") as f:
pool = f.read().strip()
# clean up the pool name since we're putting stuff after "-"
pool = pool.partition("-")[0]
except IOError:
pool = "reddit-app"
self.log = logging.LoggerAdapter(log, {"pool": pool})
# make cssutils use the real logging system
csslog = logging.getLogger("cssutils")
cssutils.log.setLog(csslog)
# set locations
self.locations = {}
if not self.media_domain:
self.media_domain = self.domain
if self.media_domain == self.domain:
print >> sys.stderr, ("Warning: g.media_domain == g.domain. " +
"This may give untrusted content access to user cookies")
for arg in sys.argv:
tokens = arg.split("=")
if len(tokens) == 2:
k, v = tokens
self.log.debug("Overriding g.%s to %s" % (k, v))
setattr(self, k, v)
self.reddit_host = socket.gethostname()
self.reddit_pid = os.getpid()
if hasattr(signal, 'SIGUSR1'):
# not all platforms have user signals
signal.signal(signal.SIGUSR1, thread_dump)
locale.setlocale(locale.LC_ALL, self.locale)
# Pre-calculate ratelimit values
self.RL_RESET_SECONDS = self.config["RL_RESET_MINUTES"] * 60
self.RL_MAX_REQS = int(self.config["RL_AVG_REQ_PER_SEC"] *
self.RL_RESET_SECONDS)
self.RL_OAUTH_RESET_SECONDS = self.config["RL_OAUTH_RESET_MINUTES"] * 60
self.RL_OAUTH_MAX_REQS = int(self.config["RL_OAUTH_AVG_REQ_PER_SEC"] *
self.RL_OAUTH_RESET_SECONDS)
self.startup_timer.intermediate("configuration")
################# ZOOKEEPER
# for now, zookeeper will be an optional part of the stack.
# if it's not configured, we will grab the expected config from the
# [live_config] section of the ini file
zk_hosts = self.config.get("zookeeper_connection_string")
if zk_hosts:
from r2.lib.zookeeper import (connect_to_zookeeper,
LiveConfig, LiveList)
zk_username = self.config["zookeeper_username"]
zk_password = self.config["zookeeper_password"]
self.zookeeper = connect_to_zookeeper(zk_hosts, (zk_username,
zk_password))
self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
self.secrets = fetch_secrets(self.zookeeper)
self.throttles = LiveList(self.zookeeper, "/throttles",
map_fn=ipaddress.ip_network,
reduce_fn=ipaddress.collapse_addresses)
else:
self.zookeeper = None
parser = ConfigParser.RawConfigParser()
parser.optionxform = str
parser.read([self.config["__file__"]])
self.live_config = extract_live_config(parser, self.plugins)
self.secrets = extract_secrets(parser)
self.throttles = tuple() # immutable since it's not real
self.startup_timer.intermediate("zookeeper")
################# MEMCACHE
num_mc_clients = self.num_mc_clients
# the main memcache pool. used for most everything.
self.memcache = CMemcache(
self.memcaches,
min_compress_len=50 * 1024,
num_clients=num_mc_clients,
)
# a pool just used for @memoize results
memoizecaches = CMemcache(
self.memoizecaches,
min_compress_len=50 * 1024,
num_clients=num_mc_clients,
)
# a pool just for srmember rels
srmembercaches = CMemcache(
self.srmembercaches,
min_compress_len=50 * 1024,
num_clients=num_mc_clients,
)
ratelimitcaches = CMemcache(
self.ratelimitcaches,
min_compress_len=96,
num_clients=num_mc_clients,
)
# a smaller pool of caches used only for distributed locks.
# TODO: move this to ZooKeeper
self.lock_cache = CMemcache(self.lockcaches,
num_clients=num_mc_clients)
self.make_lock = make_lock_factory(self.lock_cache, self.stats)
# memcaches used in front of the permacache CF in cassandra.
# XXX: this is a legacy thing; permacache was made when C* didn't have
# a row cache.
if self.permacache_memcaches:
permacache_memcaches = CMemcache(self.permacache_memcaches,
min_compress_len=50 * 1024,
num_clients=num_mc_clients)
else:
permacache_memcaches = None
# the stalecache is a memcached local to the current app server used
# for data that's frequently fetched but doesn't need to be fresh.
if self.stalecaches:
stalecaches = CMemcache(self.stalecaches,
num_clients=num_mc_clients)
else:
stalecaches = None
# rendercache holds rendered partial templates.
rendercaches = CMemcache(
self.rendercaches,
noreply=True,
no_block=True,
num_clients=num_mc_clients,
min_compress_len=480,
)
# pagecaches hold fully rendered pages
pagecaches = CMemcache(
self.pagecaches,
noreply=True,
no_block=True,
num_clients=num_mc_clients,
min_compress_len=1400,
)
self.startup_timer.intermediate("memcache")
################# CASSANDRA
keyspace = "reddit"
self.cassandra_pools = {
"main":
StatsCollectingConnectionPool(
keyspace,
stats=self.stats,
logging_name="main",
server_list=self.cassandra_seeds,
pool_size=self.cassandra_pool_size,
timeout=4,
max_retries=3,
prefill=False
),
}
permacache_cf = CassandraCache(
'permacache',
self.cassandra_pools[self.cassandra_default_pool],
read_consistency_level=self.cassandra_rcl,
write_consistency_level=self.cassandra_wcl
)
self.startup_timer.intermediate("cassandra")
################# POSTGRES
event.listens_for(engine.Engine, 'before_cursor_execute')(
self.stats.pg_before_cursor_execute)
event.listens_for(engine.Engine, 'after_cursor_execute')(
self.stats.pg_after_cursor_execute)
self.dbm = self.load_db_params()
self.startup_timer.intermediate("postgres")
################# CHAINS
# initialize caches. Any cache-chains built here must be added
# to cache_chains (closed around by reset_caches) so that they
# can properly reset their local components
cache_chains = {}
localcache_cls = (SelfEmptyingCache if self.running_as_script
else LocalCache)
if stalecaches:
self.cache = StaleCacheChain(
localcache_cls(),
stalecaches,
self.memcache,
)
else:
self.cache = MemcacheChain((localcache_cls(), self.memcache))
cache_chains.update(cache=self.cache)
if stalecaches:
self.memoizecache = StaleCacheChain(
localcache_cls(),
stalecaches,
memoizecaches,
)
else:
self.memoizecache = MemcacheChain(
(localcache_cls(), memoizecaches))
cache_chains.update(memoizecache=self.memoizecache)
if stalecaches:
self.srmembercache = StaleCacheChain(
localcache_cls(),
stalecaches,
srmembercaches,
)
else:
self.srmembercache = MemcacheChain(
(localcache_cls(), srmembercaches))
cache_chains.update(srmembercache=self.srmembercache)
self.ratelimitcache = MemcacheChain(
(localcache_cls(), ratelimitcaches))
cache_chains.update(ratelimitcaches=self.ratelimitcache)
self.rendercache = MemcacheChain((
localcache_cls(),
rendercaches,
))
cache_chains.update(rendercache=self.rendercache)
self.pagecache = MemcacheChain((
localcache_cls(),
pagecaches,
))
cache_chains.update(pagecache=self.pagecache)
# the thing_cache is used in tdb_cassandra.
self.thing_cache = CacheChain((localcache_cls(),))
cache_chains.update(thing_cache=self.thing_cache)
self.permacache = CassandraCacheChain(
localcache_cls(),
permacache_cf,
memcache=permacache_memcaches,
lock_factory=self.make_lock,
)
cache_chains.update(permacache=self.permacache)
# hardcache is used for various things that tend to expire
# TODO: replace hardcache w/ cassandra stuff
self.hardcache = HardcacheChain(
(localcache_cls(), self.memcache, HardCache(self)),
cache_negative_results=True,
)
cache_chains.update(hardcache=self.hardcache)
# I know this sucks, but we need non-request-threads to be
# able to reset the caches, so we need them be able to close
# around 'cache_chains' without being able to call getattr on
# 'g'
def reset_caches():
for name, chain in cache_chains.iteritems():
chain.reset()
chain.stats = CacheStats(self.stats, name)
self.cache_chains = cache_chains
self.reset_caches = reset_caches
self.reset_caches()
self.startup_timer.intermediate("cache_chains")
# try to set the source control revision numbers
self.versions = {}
r2_root = os.path.dirname(os.path.dirname(self.paths["root"]))
r2_gitdir = os.path.join(r2_root, ".git")
self.short_version = self.record_repo_version("r2", r2_gitdir)
if I18N_PATH:
i18n_git_path = os.path.join(os.path.dirname(I18N_PATH), ".git")
self.record_repo_version("i18n", i18n_git_path)
self.startup_timer.intermediate("revisions")
def setup_complete(self):
self.startup_timer.stop()
self.stats.flush()
if self.log_start:
self.log.error(
"%s:%s started %s at %s (took %.02fs)",
self.reddit_host,
self.reddit_pid,
self.short_version,
datetime.now().strftime("%H:%M:%S"),
self.startup_timer.elapsed_seconds()
)
def record_repo_version(self, repo_name, git_dir):
"""Get the currently checked out git revision for a given repository,
record it in g.versions, and return the short version of the hash."""
try:
subprocess.check_output
except AttributeError:
# python 2.6 compat
pass
else:
try:
revision = subprocess.check_output(["git",
"--git-dir", git_dir,
"rev-parse", "HEAD"])
except subprocess.CalledProcessError, e:
self.log.warning("Unable to fetch git revision: %r", e)
else:
self.versions[repo_name] = revision.rstrip()
return revision[:7]
return "(unknown)"
def load_db_params(self):
self.databases = tuple(ConfigValue.to_iter(self.config.raw_data['databases']))
self.db_params = {}
if not self.databases:
return
dbm = db_manager.db_manager()
db_param_names = ('name', 'db_host', 'db_user', 'db_pass', 'db_port',
'pool_size', 'max_overflow')
for db_name in self.databases:
conf_params = ConfigValue.to_iter(self.config.raw_data[db_name + '_db'])
params = dict(zip(db_param_names, conf_params))
if params['db_user'] == "*":
params['db_user'] = self.db_user
if params['db_pass'] == "*":
params['db_pass'] = self.db_pass
if params['db_port'] == "*":
params['db_port'] = self.db_port
if params['pool_size'] == "*":
params['pool_size'] = self.db_pool_size
if params['max_overflow'] == "*":
params['max_overflow'] = self.db_pool_overflow_size
dbm.setup_db(db_name, g_override=self, **params)
self.db_params[db_name] = params
dbm.type_db = dbm.get_engine(self.config.raw_data['type_db'])
dbm.relation_type_db = dbm.get_engine(self.config.raw_data['rel_type_db'])
def split_flags(raw_params):
params = []
flags = {}
for param in raw_params:
if not param.startswith("!"):
params.append(param)
else:
key, sep, value = param[1:].partition("=")
if sep:
flags[key] = value
else:
flags[key] = True
return params, flags
prefix = 'db_table_'
self.predefined_type_ids = {}
for k, v in self.config.raw_data.iteritems():
if not k.startswith(prefix):
continue
params, table_flags = split_flags(ConfigValue.to_iter(v))
name = k[len(prefix):]
kind = params[0]
server_list = self.config.raw_data["db_servers_" + name]
engines, flags = split_flags(ConfigValue.to_iter(server_list))
typeid = table_flags.get("typeid")
if typeid:
self.predefined_type_ids[name] = int(typeid)
if kind == 'thing':
dbm.add_thing(name, dbm.get_engines(engines),
**flags)
elif kind == 'relation':
dbm.add_relation(name, params[1], params[2],
dbm.get_engines(engines),
**flags)
return dbm
def __del__(self):
"""
Put any cleanup code to be run when the application finally exits
here.
"""
pass