forked from streamlit/streamlit
-
Notifications
You must be signed in to change notification settings - Fork 0
/
config.yml
672 lines (568 loc) · 20.5 KB
/
config.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
# Python CircleCI 2.1 configuration file
#
# Check https://circleci.com/docs/2.0/language-python/ for more details
#
version: 2.1
orbs:
slack: circleci/slack@3.4.2
commands:
update-submodules:
steps:
- run:
# `git submodule update --remote` checks out the submodule repo's
# HEAD (as opposed to whatever commit is specified in the
# submodule). We want to ensure we're always testing against
# the most recent commit in our component-template repo.
name: Update submodules
command: |
git submodule init
git submodule update --remote
pre-cache:
steps:
- run: &get_make_checksum
name: Get 'make' checksum
command: |
echo 'export SUDO="sudo"' >> $BASH_ENV
cp -f /usr/bin/make make.bin
md5sum make.bin > ~/make.md5
- run: &get_dot_checksum
name: Get 'dot' checksum
command: |
if [ -f /usr/bin/dot ] ; then
cp -f /usr/bin/dot dot.bin
md5sum dot.bin > ~/dot.md5
else
touch dot.bin
md5sum dot.bin > ~/dot.md5
rm -f dot.bin
fi
- run: &create_python_cache_key
# Combine hashes of the Python interpreter, Pipfile, and today's
# date into a file whose checksum will key the Python virtualenv.
#
# This means that our virtualenv cache will expire each day. We do
# this because we are not using a lockfile to pin dependencies -
# instead, each time CircleCI rebuilds the virtualenv, it uses the
# latest compatible version of each dependency (which mirrors what
# happens when a user installs Streamlit locally). So we expire our
# virtualenv cache daily to prevent it from getting far out of sync
# with what a fresh Streamlit installation would look like.
name: Create Python environment cache key
command: |
md5sum $(which python) > ~/python_cache_key.md5
md5sum lib/Pipfile >> ~/python_cache_key.md5
md5sum lib/test-requirements.txt >> ~/python_cache_key.md5
date +%F >> ~/python_cache_key.md5
- run: &create_yarn_cache_key
name: Create Yarn cache key
command: |
md5sum frontend/yarn.lock > ~/yarn.lock.md5
restore-from-cache:
steps:
- restore_cache: &restore_virtualenv
name: Restore virtualenv from cache
keys:
- v13-python-venv-{{ checksum "~/python_cache_key.md5" }}
- restore_cache: &restore_nvm
name: Restore nvm and node_modules from cache
keys:
- v13-nvm_node_modules-{{ checksum "~/yarn.lock.md5" }}
- restore_cache: &restore_make
name: Restore make from cache
keys:
- v13_make.bin-{{ checksum "~/make.md5" }}
- restore_cache: &restore_dot
name: Restore dot from cache
keys:
- v13_dot.bin-{{ checksum "~/dot.md5" }}
pre-make:
steps:
- run: &install_make
name: Install make
command: |
if [ -s make.bin ] ; then
echo "make.bin exists; not installing"
else
echo "/usr/bin/make doesn't exist; installing"
apt update
apt-get install -y make
cp -f /usr/bin/make make.bin
fi
${SUDO} cp -f make.bin /usr/bin/make
- run: &install_dot
name: Install dot
command: |
if [ -s dot.bin ] ; then
echo "dot.bin exists and is non zero"
else
echo "/usr/bin/dot doesn't exist, installing"
${SUDO} apt update
${SUDO} apt-get install -y graphviz
cp -f /usr/bin/dot dot.bin
fi
${SUDO} cp -f dot.bin /usr/bin/dot
make-init:
steps:
- run: &install_nodejs
name: Install NVM, Node.js, and Yarn
command: |
if [ ! -d ~/.nvm ] ; then
# install nodejs via nvm
curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.35.2/install.sh | bash
source "$HOME/.nvm/nvm.sh"
nvm install --lts=fermium
# install yarn
npm install -g yarn
fi
if [ ! -d frontend/node_modules ] ; then
source "$HOME/.nvm/nvm.sh"
make react-init
fi
echo 'export NVM_DIR="$HOME/.nvm"' >> $BASH_ENV
echo 'source "$NVM_DIR/nvm.sh"' >> $BASH_ENV
- run:
name: Install pyodbc dependencies
command: |
${SUDO} apt-get install -y unixodbc-dev
- run:
name: Install graphviz dependencies
command: |
${SUDO} apt update
${SUDO} apt-get install -y libgvc6
- run: &activate_virtualenv
name: Create virtualenv
command: |
echo 'Checking for virtualenv'
if [ ! -d venv ] ; then
# The virtualenv was NOT restored from cache. Create a new one.
python -m venv venv
source venv/bin/activate
pip install --upgrade pip
make setup
make pipenv-install
deactivate
else
# The virtualenv WAS restored from cache. Don't create a new one.
echo 'Virtualenv already exists, not creating'
fi
# Add 'activate venv' to $BASH_ENV. This means that our venv will be active
# for the remainder of the job ($BASH_ENV is evaluated at each step).
echo 'source venv/bin/activate' >> $BASH_ENV
- run: &generate_protobufs
name: Generate protobufs
command: |
# install protobuf v3
${SUDO} apt update
${SUDO} apt-get install -y gnupg
echo "deb http://ppa.launchpad.net/maarten-fonville/protobuf/ubuntu trusty main" | ${SUDO} tee /etc/apt/sources.list.d/protobuf.list
${SUDO} apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 4DEA8909DC6A13A3
${SUDO} apt update
${SUDO} apt-get install -y protobuf-compiler
# Generate protobufs
make protobuf
workflows:
circleci:
jobs:
- python-min-version: # Oldest supported Python minor version.
filters:
tags:
only: /^([0-9]+\.){3}dev[0-9]+/ # 0.56.1.dev20201129
- python-max-version: # Latest supported Python minor version.
filters:
tags:
only: /^([0-9]+\.){3}dev[0-9]+/
- cypress: # Non flaky Cypress tests
filters:
tags:
only: /^([0-9]+\.){3}dev[0-9]+/
- pr-preview: # Make a preview branch for each PR
filters:
tags:
only: /^([0-9]+\.){3}dev[0-9]+/
- nightly-release:
requires:
- python-min-version
- python-max-version
- cypress
filters:
tags:
only: /^([0-9]+\.){3}dev[0-9]+/
branches:
ignore: /.*/
# Uncomment to get a button that runs flaky tests on CircleCI:
# - cypress-flaky-approval:
# type: approval
# requires:
# - python-max-version
# - cypress: # Flaky Cypress tests
# name: cypress-flaky
# flaky: true
# requires:
# - cypress-flaky-approval
create-nightly-tag:
triggers:
- schedule:
# Run job at 10.30pm PST or 11.30pm PDT
cron: "30 6 * * *"
filters:
branches:
only:
- develop
jobs:
- create-tag
jobs:
python-max-version: &job-template
docker:
- image: circleci/python:3.9.7
working_directory: ~/repo
steps:
- checkout:
name: Checkout Streamlit code
- update-submodules
#################################################################
# Pre Cache Steps
#################################################################
- pre-cache
#################################################################
# Restore from cache
#################################################################
- restore-from-cache
#################################################################
# Pre Make commands
#################################################################
- pre-make
- save_cache:
name: Save make to cache
key: v13_make.bin-{{ checksum "~/make.md5" }}
paths:
- make.bin
- save_cache:
name: Save dot to cache
key: v13_dot.bin-{{ checksum "~/dot.md5" }}
paths:
- dot.bin
#################################################################
# Run 'make init'
#################################################################
- make-init
- run: &make_develop
name: Run make develop
command: |
make develop
#################################################################
# Run linters
#################################################################
- run:
name: Run linters
command: |
make jslint
make pylint
- store_test_results:
path: frontend/test-reports
when: always
#################################################################
# Run mypy. (Only executed in one job.)
#################################################################
- run:
name: Run mypy
command: |
if [ "${CIRCLE_JOB}" != "python-max-version" ] ; then
echo "Mypy is only run in python-max-version job"
else
scripts/mypy --report
fi
- store_test_results:
path: lib/test-reports
when: always
#################################################################
# Run make pycoverage
#################################################################
- run:
name: Run python tests
command: |
make pycoverage
- store_test_results:
path: lib/test-reports
when: always
#################################################################
# Run integration tests
#################################################################
- run:
name: Run integration tests
command: |
make integration-tests
#################################################################
# Run CLI smoke tests
#################################################################
- run:
name: CLI smoke tests
command: |
make cli-smoke-tests
#################################################################
# Run frontend tests. (Only executed in one job.)
#################################################################
- run:
name: Run frontend tests
command: |
if [ "${CIRCLE_JOB}" != "python-max-version" ] ; then
echo "Frontend tests are only run in python-max-version job"
else
make jstest
fi
#################################################################
# Save cache for python virtualenv and node_modules.
#################################################################
- save_cache:
name: Save virtualenv to cache
key: v13-python-venv-{{ checksum "~/python_cache_key.md5" }}
paths:
- venv
- save_cache:
name: Save nvm and node_modules to cache
key: v13-nvm_node_modules-{{ checksum "~/yarn.lock.md5" }}
paths:
- ~/.nvm
- ~/.cache
- frontend/node_modules
- when:
condition: <<pipeline.git.tag>>
steps:
- slack/status:
fail_only: true
failure_message: ":blobonfire: Nightly job failed on unit tests"
# The following inherits from python-max-version. In a few cases, steps are skipped
# based on the name of the current job (see, e.g., "Run frontend tests").
python-min-version:
<<: *job-template
docker:
- image: circleci/python:3.7.12
create-tag:
docker:
- image: circleci/python:3.9.7
working_directory: ~/repo
steps:
- checkout:
name: Checkout Streamlit code
- update-submodules
- pre-cache
- restore-from-cache
- pre-make
- make-init
- run:
<<: *make_develop
- run:
name: Create tag
# TODO move git commands into script
command: |
git config user.email "jonathan@streamlit.io"
git config user.name "CircleCI"
TAG="$(./scripts/pypi_nightly_create_tag.py)"
./scripts/update_version.py $TAG
./scripts/update_name.py streamlit-nightly
git add lib/setup.py
git add frontend/package.json
git add lib/streamlit/__init__.py
git add lib/streamlit/version.py
git commit -m "Update version and project name in files"
git tag -a $TAG -m "Streamlit nightly $TAG"
git push origin $TAG
- slack/status:
fail_only: true
failure_message: ":blobonfire: Nightly job failed to create a tag"
nightly-release:
docker:
- image: circleci/python:3.9.7
resource_class: large
working_directory: ~/repo
steps:
- checkout:
name: Checkout Streamlit code
- update-submodules
- pre-cache
- restore-from-cache
- pre-make
- make-init
- run:
name: verify git tag vs. version
command: |
cd lib
python setup.py verify
# Password added to circleci environment
# https://ui.circleci.com/settings/project/github/streamlit/streamlit/environment-variables
- run:
name: init .pypirc
command: |
cd lib
echo -e "[pypi]" >> ~/.pypirc
echo -e "username = streamlit" >> ~/.pypirc
echo -e "password = $PYPI_PASSWORD" >> ~/.pypirc
- run:
name: create packages
no_output_timeout: 2h
command: |
${SUDO} apt-get install rsync
make package
- run:
name: upload to pypi
command: |
make distribute
- slack/status:
fail_only: true
failure_message: ":blobonfire: Nightly job failed to release"
pr-preview:
docker:
- image: circleci/python:3.9.7
resource_class: large
working_directory: ~/repo
steps:
- add_ssh_keys:
name: Add the read/write Github deploy key
fingerprints:
- "98:67:1f:37:d6:4d:21:f3:46:5d:e2:c9:a3:58:52:39"
- checkout:
name: Checkout Streamlit code
- update-submodules
- pre-cache
- restore-from-cache
- pre-make
- make-init
- run:
name: Create wheel file
no_output_timeout: 2h
command: |
${SUDO} apt-get install rsync
BUILD_AS_FAST_AS_POSSIBLE=1 make package
- run:
name: Set PREVIEW_BRANCH envvar
command: |
echo "export PREVIEW_BRANCH=$(
if [[ -n "${CIRCLE_PR_NUMBER}" ]]
then
echo "pr-${CIRCLE_PR_NUMBER}"
elif [[ -n "${CIRCLE_BRANCH}" ]]
then
echo "${CIRCLE_BRANCH}-preview"
elif [[ -n "${CIRCLE_TAG}" ]]
then
echo "tag-${CIRCLE_TAG}"
else
echo "main-preview"
fi
)" >> $BASH_ENV
- run:
name: Upload wheel to S3
command: |
${SUDO} apt-get install -y awscli
aws configure set aws_access_key_id ${CORE_PREVIEWS_S3_KEY_ID}
aws configure set aws_secret_access_key ${CORE_PREVIEWS_S3_SECRET_KEY}
cd lib/dist
export WHEELFILE="$(ls -t *.whl | head -n 1)"
aws s3 cp ${WHEELFILE} s3://core-previews/${PREVIEW_BRANCH}/ --acl public-read
cd ../..
echo -e "https://core-previews.s3-us-west-2.amazonaws.com/${PREVIEW_BRANCH}/${WHEELFILE}" >> S3_URL
- run:
name: Setup preview repo
command: |
git config --global user.email "core+streamlitbot-github@streamlit.io"
git config --global user.name "CircleCI"
git clone git@github.com:streamlit/core-previews.git
cd core-previews
git branch -D ${PREVIEW_BRANCH} &>/dev/null || true
git checkout -b ${PREVIEW_BRANCH}
cat ../S3_URL >> requirements.txt
git add .
git commit -m "Prepare core preview: ${PREVIEW_BRANCH}"
git push -f origin ${PREVIEW_BRANCH}
- run:
name: Ready to deploy!
command: |
echo -e "https://share.streamlit.io/deploy?repository=streamlit/core-previews&branch=${PREVIEW_BRANCH}&mainModule=streamlit_app.py"
cypress:
docker:
- image: circleci/python:3.7.11
parallelism: 20
resource_class: xlarge
working_directory: ~/repo
parameters:
flaky:
description: "Run flaky tests"
default: false
type: boolean
steps:
- checkout:
name: Checkout Streamlit code
- update-submodules
- pre-cache
- restore-from-cache
- pre-make
- make-init
- run:
<<: *make_develop
- run:
name: Install Cypress dependencies
command: |
${SUDO} apt-get install -y xvfb libgtk2.0-0 libnotify-dev libgconf-2-4 libnss3 libxss1 libasound2 jq curl
- run:
name: Init config
command: |
mkdir ~/.streamlit
MAPBOX_TOKEN=$(curl -sS https://data.streamlit.io/tokens.json | jq -r '.["mapbox-localhost"]')
echo '[mapbox]' > ~/.streamlit/config.toml
echo 'token = "'$MAPBOX_TOKEN'"' >> ~/.streamlit/config.toml
- run:
name: Init credentials
command: |
echo '[general]' > ~/.streamlit/credentials.toml
echo 'email = "test@streamlit.io"' >> ~/.streamlit/credentials.toml
- when:
condition: << parameters.flaky >>
steps:
- run:
name: Cypress
# See comment below, in the "unless << parameters.flaky >>".
command: |
cd frontend
TEST=$(circleci tests glob ../e2e/specs/*.spec.js | circleci tests split --split-by=timings)
../scripts/run_e2e_tests.py -a -f $TEST
- unless:
condition: << parameters.flaky >>
steps:
- run:
name: Cypress
# Need to run from frontend directory, because
# cypress-circleci-reporter reports paths relative to there.
#
# `circleci tests split` splits our long-running Cypress e2e
# tests into N different parallel jobs, so that they finish
# much faster than if we run them in sequence. CircleCI makes
# sure each container that calls `circleci test split` receives
# its share of the test files.
command: |
cd frontend
TEST=$(circleci tests glob ../e2e/specs/*.spec.js | circleci tests split --split-by=timings)
../scripts/run_e2e_tests.py -a $TEST
- store_test_results: &store_results
path: frontend/test_results/cypress
when: always
- store_artifacts: &store_videos
path: frontend/cypress/videos
- store_artifacts: &store_snapshots
path: frontend/cypress/snapshots
- save_cache:
name: Save virtualenv to cache
key: v13-python-venv-{{ checksum "~/python_cache_key.md5" }}
paths:
- venv
- save_cache:
name: Save nvm and node_modules to cache
key: v13-nvm_node_modules-{{ checksum "~/yarn.lock.md5" }}
paths:
- ~/.nvm
- ~/.cache
- frontend/node_modules
- when:
condition: <<pipeline.git.tag>>
steps:
- slack/status:
fail_only: true
failure_message: ":blobonfire: Nightly job failed on E2E tests"