forked from nprapps/anno-docs
-
Notifications
You must be signed in to change notification settings - Fork 0
/
app_config.py
executable file
·321 lines (267 loc) · 9.02 KB
/
app_config.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
#!/usr/bin/env python
# _*_ coding:utf-8 _*_
"""
Project-wide application configuration.
DO NOT STORE SECRETS, PASSWORDS, ETC. IN THIS FILE.
They will be exposed to users. Use environment variables instead.
See get_secrets() below for a fast way to access them.
"""
import logging
import os
from authomatic.providers import oauth2
from authomatic import Authomatic
"""
NAMES
"""
# Project name to be used in urls
# Use dashes, not underscores!
PROJECT_SLUG = 'debates'
# Project name to be used in file paths
PROJECT_FILENAME = 'debates'
# The name of the repository containing the source
REPOSITORY_NAME = 'debates'
GITHUB_USERNAME = 'wbez'
REPOSITORY_URL = 'git@github.com:%s/%s.git' % (
GITHUB_USERNAME, REPOSITORY_NAME)
REPOSITORY_ALT_URL = None # 'git@bitbucket.org:nprapps/%s.git' % REPOSITORY_NAME'
# Project name used for assets rig
# Should stay the same, even if PROJECT_SLUG changes
ASSETS_SLUG = 'debates'
# DEPLOY SETUP CONFIG
DEBATE_DIRECTORY_PREFIX = 'factchecks/'
CURRENT_DEBATE = 'wbez-debates'
SEAMUS_ID = '510629447' # SEAMUS PAGE ID FOR DEEP LINKING
try:
from local_settings import CURRENT_DEBATE
# Override SEAMUS_ID to generate the sharing list accordingly
from local_settings import SEAMUS_ID
except ImportError:
pass
"""
DEPLOYMENT
"""
PRODUCTION_S3_BUCKET = 'wbez-debates'
STAGING_S3_BUCKET = 'wbez-debates-stage'
ASSETS_S3_BUCKET = 'wbez-assets'
ARCHIVE_S3_BUCKET = 'wbez-assets'
DEFAULT_MAX_AGE = 20
RELOAD_TRIGGER = False
RELOAD_CHECK_INTERVAL = 60
PRODUCTION_SERVERS = ['54.211.94.130']
STAGING_SERVERS = ['54.167.46.85']
# Should code be deployed to the web/cron servers?
DEPLOY_TO_SERVERS = False
try:
# Override whether we should deploy to a cutom webserver
from local_settings import DEPLOY_TO_SERVERS
except ImportError:
pass
DEPLOY_STATIC_FACTCHECK = True
try:
# Override whether we are going to deploy a static factcheck
# from our local environment. Useful for non-live factchecks
from local_settings import DEPLOY_STATIC_FACTCHECK
except ImportError:
pass
SERVER_USER = 'ubuntu'
SERVER_PYTHON = 'python2.7'
SERVER_PROJECT_PATH = '/home/%s/apps/%s' % (SERVER_USER, PROJECT_FILENAME)
SERVER_REPOSITORY_PATH = '%s/repository' % SERVER_PROJECT_PATH
SERVER_VIRTUALENV_PATH = '%s/virtualenv' % SERVER_PROJECT_PATH
# Should the crontab file be installed on the servers?
# If True, DEPLOY_TO_SERVERS must also be True
DEPLOY_CRONTAB = False
# Should the service configurations be installed on the servers?
# If True, DEPLOY_TO_SERVERS must also be True
DEPLOY_SERVICES = False
UWSGI_SOCKET_PATH = '/tmp/%s.uwsgi.sock' % PROJECT_FILENAME
# Services are the server-side services we want to enable and configure.
# A three-tuple following this format:
# (service name, service deployment path, service config file extension)
SERVER_SERVICES = [
('deploy', '/etc/init', 'conf'),
]
# These variables will be set at runtime. See configure_targets() below
S3_BUCKET = None
S3_BASE_URL = None
S3_DEPLOY_URL = None
SERVERS = []
SERVER_BASE_URL = None
SERVER_LOG_PATH = None
DEBUG = True
LOG_LEVEL = None
"""
TEST AUTOINIT LOADER
"""
AUTOINIT_LOADER = False
"""
COPY EDITING
"""
COPY_GOOGLE_DOC_KEY = '1weq2hlKqAuo3sZZTAylbcF3WAN6jOntfIVk6hnEPyE0'
COPY_PATH = 'data/copy.xlsx'
TRANSCRIPT_HTML_PATH = 'data/transcript.html'
LOAD_COPY_INTERVAL = 10
"""
GOOGLE APPS SCRIPTS
"""
# PARENT_FOLDER_ID = '0B6C-jdxmvrJoM3JnZ1ZZUkhVQTg'
GAS_LOG_KEY = '1tUxTFa2J5IKIlOMLop9IA9eaZ6uDDhgh6KwxeLdgQGU' # Google app script logs spreadsheet key
TRANSCRIPT_GDOC_KEY = '1n395I0Qo7WsACZv1b918FZGrvLWmhQ9DwjpZ3rqFbRM' # Google app script google doc key
SCRIPT_PROJECT_NAME = 'factcheck_scripts' # Google app scripts project name
CSPAN = False
"""
SHARING
"""
SHARE_URL = 'http://%s/%s/' % (PRODUCTION_S3_BUCKET, PROJECT_SLUG)
"""
SERVICES
"""
NPR_GOOGLE_ANALYTICS = {
'ACCOUNT_ID': 'UA-369047-40',
'DOMAIN': 'wbez.org',
'TOPICS': '' # e.g. '[1014,3,1003,1002,1001]'
}
VIZ_GOOGLE_ANALYTICS = {
'ACCOUNT_ID': 'UA-369047-40'
}
"""
OAUTH
"""
GOOGLE_OAUTH_CREDENTIALS_PATH = '~/.google_oauth_credentials'
authomatic_config = {
'google': {
'id': 1,
'class_': oauth2.Google,
'consumer_key': os.environ.get('GOOGLE_OAUTH_CLIENT_ID'),
'consumer_secret': os.environ.get('GOOGLE_OAUTH_CONSUMER_SECRET'),
'scope': ['https://www.googleapis.com/auth/drive',
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/drive.scripts',
'https://www.googleapis.com/auth/documents',
'https://www.googleapis.com/auth/script.external_request',
'https://www.googleapis.com/auth/script.scriptapp',
'https://www.googleapis.com/auth/script.send_mail',
'https://www.googleapis.com/auth/script.storage',
'https://www.googleapis.com/auth/spreadsheets'],
'offline': True,
},
}
authomatic = Authomatic(authomatic_config, os.environ.get('AUTHOMATIC_SALT'))
"""
Logging
"""
LOG_FORMAT = '%(levelname)s:%(name)s:%(asctime)s: %(message)s'
"""
Utilities
"""
def get_secrets():
"""
A method for accessing our secrets.
"""
secrets_dict = {}
for k, v in os.environ.items():
if k.startswith(PROJECT_SLUG):
k = k[len(PROJECT_SLUG) + 1:]
secrets_dict[k] = v
return secrets_dict
def configure_targets(deployment_target):
"""
Configure deployment targets. Abstracted so this can be
overriden for rendering before deployment.
"""
global S3_BUCKET
global S3_BASE_URL
global S3_DEPLOY_URL
global SERVERS
global SERVER_BASE_URL
global SERVER_LOG_PATH
global DEBUG
global DEPLOYMENT_TARGET
global LOG_LEVEL
global ASSETS_MAX_AGE
global TRANSCRIPT_GDOC_KEY
global GAS_LOG_KEY
global CSPAN
if deployment_target == 'production':
S3_BUCKET = PRODUCTION_S3_BUCKET
S3_BASE_URL = 'https://s3.amazonaws.com/%s/%s%s' % (S3_BUCKET,
DEBATE_DIRECTORY_PREFIX,
CURRENT_DEBATE)
S3_DEPLOY_URL = 's3://%s/%s' % (S3_BUCKET, PROJECT_SLUG)
SERVERS = PRODUCTION_SERVERS
SERVER_BASE_URL = '//%s/%s' % (SERVERS[0], PROJECT_SLUG)
SERVER_LOG_PATH = '/var/log/%s' % PROJECT_FILENAME
LOG_LEVEL = logging.INFO
DEBUG = False
ASSETS_MAX_AGE = 86400
# PRODUCTION DOCUMENT
TRANSCRIPT_GDOC_KEY = '1LZuK4-BnpQTLu6FdT52Tl6NpXUyAWd7iW50CL3__-wo'
# PRODUCTION LOGS
GAS_LOG_KEY = '1tUxTFa2J5IKIlOMLop9IA9eaZ6uDDhgh6KwxeLdgQGU'
elif deployment_target == 'staging':
S3_BUCKET = STAGING_S3_BUCKET
S3_BASE_URL = 'https://s3.amazonaws.com/%s/%s%s' % (S3_BUCKET,
DEBATE_DIRECTORY_PREFIX,
CURRENT_DEBATE)
S3_DEPLOY_URL = 's3://%s/%s' % (S3_BUCKET, PROJECT_SLUG)
SERVERS = STAGING_SERVERS
SERVER_BASE_URL = '//%s/%s' % (SERVERS[0], PROJECT_SLUG)
SERVER_LOG_PATH = '/var/log/%s' % PROJECT_FILENAME
LOG_LEVEL = logging.INFO
DEBUG = True
ASSETS_MAX_AGE = 20
# STAGING DOCUMENT
TRANSCRIPT_GDOC_KEY = '1n395I0Qo7WsACZv1b918FZGrvLWmhQ9DwjpZ3rqFbRM'
# STAGING LOGS
GAS_LOG_KEY = '1vpRgWpqGqW1p3yMv6nCixAjczc8cJr_TlMCTg52Ch9I'
else:
S3_BUCKET = None
S3_BASE_URL = '//127.0.0.1:8000'
S3_DEPLOY_URL = None
SERVERS = []
SERVER_BASE_URL = 'http://127.0.0.1:8001/%s' % PROJECT_SLUG
SERVER_LOG_PATH = '/tmp'
LOG_LEVEL = logging.INFO
DEBUG = True
ASSETS_MAX_AGE = 20
# DEVELOPMENT DOCUMENT
TRANSCRIPT_GDOC_KEY = '1n395I0Qo7WsACZv1b918FZGrvLWmhQ9DwjpZ3rqFbRM'
# DEVELOPMENT LOGS
GAS_LOG_KEY = '1I7IUCUJHIWLW3c_E-ukfqIp4QxuvUoHqbEQIlKQFC7w'
# Override S3_BASE_URL to use another port locally for fab app
try:
from local_settings import S3_BASE_URL
except ImportError:
pass
try:
from local_settings import TRANSCRIPT_GDOC_KEY
except ImportError:
pass
# Override GAS_LOG_KEY to point to a different google app script log
try:
from local_settings import GAS_LOG_KEY
except ImportError:
pass
# Override CSPAN if we need to
try:
from local_settings import CSPAN
except ImportError:
pass
# If we are deploying a non live fact check:
if DEPLOY_STATIC_FACTCHECK:
# Override TRANSCRIPT_GDOC_KEY to point ALL environments to google doc
try:
from local_settings import TRANSCRIPT_GDOC_KEY
except ImportError:
pass
# Override GAS_LOG_KEY to point to a different google app script log
try:
from local_settings import GAS_LOG_KEY
except ImportError:
pass
DEPLOYMENT_TARGET = deployment_target
"""
Run automated configuration
"""
DEPLOYMENT_TARGET = os.environ.get('DEPLOYMENT_TARGET', None)
configure_targets(DEPLOYMENT_TARGET)