-
Notifications
You must be signed in to change notification settings - Fork 380
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Added Luau Benchmark Workflows (#530)
- Loading branch information
1 parent
c30ab06
commit da01056
Showing
2 changed files
with
211 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,109 @@ | ||
name: Luau Benchmarks | ||
|
||
on: | ||
push: | ||
branches: | ||
- master | ||
|
||
paths-ignore: | ||
- "docs/**" | ||
- "papers/**" | ||
- "rfcs/**" | ||
- "*.md" | ||
- "prototyping/**" | ||
|
||
jobs: | ||
benchmarks-run: | ||
name: Run ${{ matrix.bench.title }} | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
os: [ubuntu-latest] | ||
bench: | ||
- { | ||
script: "run-benchmarks", | ||
timeout: 12, | ||
title: "Luau Benchmarks", | ||
cachegrindTitle: "Performance", | ||
cachegrindIterCount: 20, | ||
} | ||
benchResultsRepo: | ||
- { name: "luau-lang/benchmark-data", branch: "main" } | ||
|
||
runs-on: ${{ matrix.os }} | ||
steps: | ||
- name: Checkout Luau | ||
uses: actions/checkout@v3 | ||
|
||
- name: Build Luau | ||
run: make config=release luau luau-analyze | ||
|
||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: "3.9" | ||
architecture: "x64" | ||
|
||
- name: Install python dependencies | ||
run: | | ||
python -m pip install requests | ||
python -m pip install --user numpy scipy matplotlib ipython jupyter pandas sympy nose | ||
- name: Install valgrind | ||
run: | | ||
sudo apt-get install valgrind | ||
- name: Run benchmark | ||
run: | | ||
python bench/bench.py | tee ${{ matrix.bench.script }}-output.txt | ||
- name: Run ${{ matrix.bench.title }} (Cold Cachegrind) | ||
run: sudo bash ./scripts/run-with-cachegrind.sh python ./bench/bench.py "${{ matrix.bench.cachegrindTitle}}Cold" 1 | tee -a ${{ matrix.bench.script }}-output.txt | ||
|
||
- name: Run ${{ matrix.bench.title }} (Warm Cachegrind) | ||
run: sudo bash ./scripts/run-with-cachegrind.sh python ./bench/bench.py "${{ matrix.bench.cachegrindTitle }}" ${{ matrix.bench.cachegrindIterCount }} | tee -a ${{ matrix.bench.script }}-output.txt | ||
|
||
- name: Checkout Benchmark Results repository | ||
uses: actions/checkout@v3 | ||
with: | ||
repository: ${{ matrix.benchResultsRepo.name }} | ||
ref: ${{ matrix.benchResultsRepo.branch }} | ||
token: ${{ secrets.BENCH_GITHUB_TOKEN }} | ||
path: "./gh-pages" | ||
|
||
- name: Store ${{ matrix.bench.title }} result | ||
uses: Roblox/rhysd-github-action-benchmark@v-luau | ||
with: | ||
name: ${{ matrix.bench.title }} | ||
tool: "benchmarkluau" | ||
output-file-path: ./${{ matrix.bench.script }}-output.txt | ||
external-data-json-path: ./gh-pages/dev/bench/data.json | ||
alert-threshold: 150% | ||
fail-threshold: 1000% | ||
fail-on-alert: false | ||
comment-on-alert: true | ||
github-token: ${{ secrets.GITHUB_TOKEN }} | ||
|
||
- name: Store ${{ matrix.bench.title }} result | ||
uses: Roblox/rhysd-github-action-benchmark@v-luau | ||
with: | ||
name: ${{ matrix.bench.title }} (CacheGrind) | ||
tool: "roblox" | ||
output-file-path: ./${{ matrix.bench.script }}-output.txt | ||
external-data-json-path: ./gh-pages/dev/bench/data.json | ||
alert-threshold: 150% | ||
fail-threshold: 1000% | ||
fail-on-alert: false | ||
comment-on-alert: true | ||
github-token: ${{ secrets.GITHUB_TOKEN }} | ||
|
||
- name: Push benchmark results | ||
|
||
run: | | ||
echo "Pushing benchmark results..." | ||
cd gh-pages | ||
git config user.name github-actions | ||
git config user.email github@users.noreply.github.com | ||
git add ./dev/bench/data.json | ||
git commit -m "Add benchmarks results for ${{ github.sha }}" | ||
git push | ||
cd .. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,102 @@ | ||
#!/bin/bash | ||
set -euo pipefail | ||
IFS=$'\n\t' | ||
|
||
declare -A event_map | ||
event_map[Ir]="TotalInstructionsExecuted,executions\n" | ||
event_map[I1mr]="L1_InstrReadCacheMisses,misses/op\n" | ||
event_map[ILmr]="LL_InstrReadCacheMisses,misses/op\n" | ||
event_map[Dr]="TotalMemoryReads,reads\n" | ||
event_map[D1mr]="L1_DataReadCacheMisses,misses/op\n" | ||
event_map[DLmr]="LL_DataReadCacheMisses,misses/op\n" | ||
event_map[Dw]="TotalMemoryWrites,writes\n" | ||
event_map[D1mw]="L1_DataWriteCacheMisses,misses/op\n" | ||
event_map[DLmw]="LL_DataWriteCacheMisses,misses/op\n" | ||
event_map[Bc]="ConditionalBranchesExecuted,executions\n" | ||
event_map[Bcm]="ConditionalBranchMispredictions,mispredictions/op\n" | ||
event_map[Bi]="IndirectBranchesExecuted,executions\n" | ||
event_map[Bim]="IndirectBranchMispredictions,mispredictions/op\n" | ||
|
||
now_ms() { | ||
echo -n $(date +%s%N | cut -b1-13) | ||
} | ||
|
||
# Run cachegrind on a given benchmark and echo the results. | ||
ITERATION_COUNT=$4 | ||
START_TIME=$(now_ms) | ||
|
||
valgrind \ | ||
--quiet \ | ||
--tool=cachegrind \ | ||
"$1" "$2" >/dev/null | ||
|
||
TIME_ELAPSED=$(bc <<< "$(now_ms) - ${START_TIME}") | ||
|
||
# Generate report using cg_annotate and extract the header and totals of the | ||
# recorded events valgrind was configured to record. | ||
CG_RESULTS=$(cg_annotate $(ls -t cachegrind.out.* | head -1)) | ||
CG_HEADERS=$(grep -B2 'PROGRAM TOTALS$' <<< "$CG_RESULTS" | head -1 | sed -E 's/\s+/\n/g' | sed '/^$/d') | ||
CG_TOTALS=$(grep 'PROGRAM TOTALS$' <<< "$CG_RESULTS" | head -1 | grep -Po '[0-9,]+\s' | tr -d ', ') | ||
|
||
TOTALS_ARRAY=($CG_TOTALS) | ||
HEADERS_ARRAY=($CG_HEADERS) | ||
|
||
declare -A header_map | ||
for i in "${!TOTALS_ARRAY[@]}"; do | ||
header_map[${HEADERS_ARRAY[$i]}]=$i | ||
done | ||
|
||
# Map the results to the format that the benchmark script expects. | ||
for i in "${!TOTALS_ARRAY[@]}"; do | ||
TOTAL=${TOTALS_ARRAY[$i]} | ||
|
||
# Labels and unit descriptions are packed together in the map. | ||
EVENT_TUPLE=${event_map[${HEADERS_ARRAY[$i]}]} | ||
IFS=$',' read -d '\n' -ra EVENT_VALUES < <(printf "%s" "$EVENT_TUPLE") | ||
EVENT_NAME="${EVENT_VALUES[0]}" | ||
UNIT="${EVENT_VALUES[1]}" | ||
|
||
case ${HEADERS_ARRAY[$i]} in | ||
I1mr | ILmr) | ||
REF=${TOTALS_ARRAY[header_map["Ir"]]} | ||
OPS_PER_SEC=$(bc -l <<< "$TOTAL / $REF") | ||
;; | ||
|
||
D1mr | DLmr) | ||
REF=${TOTALS_ARRAY[header_map["Dr"]]} | ||
OPS_PER_SEC=$(bc -l <<< "$TOTAL / $REF") | ||
;; | ||
|
||
D1mw | DLmw) | ||
REF=${TOTALS_ARRAY[header_map["Dw"]]} | ||
OPS_PER_SEC=$(bc -l <<< "$TOTAL / $REF") | ||
;; | ||
|
||
Bcm) | ||
REF=${TOTALS_ARRAY[header_map["Bc"]]} | ||
OPS_PER_SEC=$(bc -l <<< "$TOTAL / $REF") | ||
;; | ||
|
||
Bim) | ||
REF=${TOTALS_ARRAY[header_map["Bi"]]} | ||
OPS_PER_SEC=$(bc -l <<< "$TOTAL / $REF") | ||
;; | ||
|
||
*) | ||
OPS_PER_SEC=$(bc -l <<< "$TOTAL") | ||
;; | ||
esac | ||
|
||
STD_DEV="0%" | ||
RUNS="1" | ||
|
||
if [[ $OPS_PER_SEC =~ ^[+-]?[0-9]*$ ]] | ||
then # $OPS_PER_SEC is integer | ||
printf "%s#%s x %.0f %s ±%s (%d runs sampled)\n" \ | ||
"$3" "$EVENT_NAME" "$OPS_PER_SEC" "$UNIT" "$STD_DEV" "$RUNS" | ||
else # $OPS_PER_SEC is float | ||
printf "%s#%s x %.10f %s ±%s (%d runs sampled)\n" \ | ||
"$3" "$EVENT_NAME" "$OPS_PER_SEC" "$UNIT" "$STD_DEV" "$RUNS" | ||
fi | ||
|
||
done |