52 lines (46 loc) · 2.09 KB
/
script-evaluation-test.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# A nightly job which downloads script evaluation dumps from S3 and runs a regression test.
name: Script Evaluation Test
on:
schedule:
- cron: 30 3 * * * # 3:30am every day
workflow_dispatch:
concurrency:
group: script-evaluation-test
# We only want at most one evaluation test running at a time
cancel-in-progress: true
jobs:
script-evaluation-test:
runs-on: [self-hosted, default]
steps:
- name: Checkout
uses: actions/checkout@v3.3.0
- name: Quick Install Nix
uses: nixbuild/nix-quick-install-action@v22
with:
# 2.14.1 seems to have issues, see https://github.com/nixbuild/nix-quick-install-action/issues/29
nix_version: '2.13.3'
nix_conf: |
experimental-features = nix-command flakes
accept-flake-config = true
- name: Download and Unzip Dump Files
if: always()
# NOTE: the S3 location s3://plutus/mainnet-script-dump/ must match that in
# plutus-apps/.github/script-evaluation-dump.yml
run: |
export LOCAL_DIR="$HOME/mainnet-script-dump-downloaded"
nix develop --no-warn-dirty --accept-flake-config --command \
bash ./scripts/s3-sync-unzip.sh s3://plutus/mainnet-script-dump-1-35-4/ \*.event.bz2
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: us-east-1
AWS_ENDPOINT_URL: https://s3.devx.iog.io
- name: Run
# Run the test cases sequentially. This ensures we don't need to simultaneously store
# multiple `ScriptEvaluationEvents`, which are large, in memory. Each test case
# contains many script evaluation events, and those are run in parallel based on
# the number of available processors.
run: |
export EVENT_DUMP_DIR="$HOME/mainnet-script-dump-downloaded"
nix run --no-warn-dirty --accept-flake-config \
.#x86_64-linux.plutus.library.plutus-project-925.hsPkgs.plutus-ledger-api.components.exes.evaluation-test -- --num-threads=1