Skip to content

Commit

Permalink
[toil] Porting to sr.ht
Browse files Browse the repository at this point in the history
- Add SSH secret for sr.ht
- [.builds/other-tests] Add zip package
- [toil] missing file for services/toil-web.sh deploy.  I forgot that
  some of these need to be on the server
- Add 'srht-index' action to toil_web.py
  • Loading branch information
Andy Chu committed Nov 19, 2020
1 parent 9f33c12 commit c8a3194
Show file tree
Hide file tree
Showing 7 changed files with 166 additions and 47 deletions.
7 changes: 6 additions & 1 deletion .builds/other-tests.yml
@@ -1,8 +1,13 @@
image: debian/buster
packages:
- zip
- python2-dev # for posix_.so, etc.
secrets:
- 2678474d-b22b-449f-a19a-16cb403c94cd
tasks:
- other-tests: |
cd oil
services/toil-worker.sh run-other-tests
- publish-html: |
cd oil
services/travis.sh publish-html
services/sourcehut.sh publish-html-assuming-ssh-key
9 changes: 6 additions & 3 deletions benchmarks/time-test.sh
Expand Up @@ -183,12 +183,15 @@ test-rusage() {
#cat $out
}

# Compare vs. /usr/bin/time
# Compare vs. /usr/bin/time.
test-maxrss() {
/usr/bin/time --format '%x %U %M' -- seq 1
if command -v time; then # Ignore this on continuous build
command time --format '%x %U %M' -- seq 1
fi

# Showing a discrepancy. FIXED!
time-tool --tsv --rusage -- seq 1
time-tool -o _tmp/maxrss --tsv --rusage -- seq 1
cat _tmp/maxrss
}

all-passing() {
Expand Down
Empty file added services/__init__.py
Empty file.
79 changes: 79 additions & 0 deletions services/sourcehut.sh
@@ -0,0 +1,79 @@
#!/usr/bin/env bash
#
# Usage:
# ./sourcehut.sh <function name>

set -o nounset
set -o pipefail
set -o errexit

# Reuse some stuff
source services/travis.sh

# Relevant docs:
#
# https://man.sr.ht/tutorials/getting-started-with-builds.md
# https://man.sr.ht/builds.sr.ht/#secrets
#
# Basically, it supports up to 4 files called .builds/*.yml.
# And we need to upload an SSH key as secret via the web UI.

keygen() {
ssh-keygen -t rsa -b 4096 -C "andyc sr.ht" -f rsa_srht
}

#
# Run remotely
#

deploy-job-results() {
local job_id="$(date +%Y-%m-%d__%H-%M-%S)"

make-job-wwz $job_id

# Written by toil-worker.sh
# TODO:
# - Don't export these, just pass to env_to_json
# - if it exists, publish _tmp/spec/*.stats.txt and publish it?
# - osh failures and total failures
export TASK_RUN_START_TIME=$(cat _tmp/toil/task-run-start-time.txt)
export TASK_DEPLOY_START_TIME=$(date +%s)

services/env_to_json.py \
JOB_ID \
JOB_URL \
> $job_id.json

# So we don't have to unzip it
cp _tmp/toil/INDEX.tsv $job_id.tsv

# Copy wwz, tsv, json
scp-results 'srht-' $job_id.*

log ''
log "http://travis-ci.oilshell.org/srht-jobs/"
log "http://travis-ci.oilshell.org/srht-jobs/$job_id.wwz/"
log ''
}


publish-html-assuming-ssh-key() {
if true; then
deploy-job-results
else
deploy-test-wwz # dummy data that doesn't depend on the build
fi

write-jobs-raw 'srht-'

remote-rewrite-jobs-index 'srht-'

# note: we could speed jobs up by doing this separately?
remote-cleanup-jobs-index 'srht-'

# toil-worker.sh recorded this for us
return $(cat _tmp/toil/exit-status.txt)
}


"$@"
15 changes: 9 additions & 6 deletions services/toil-web.sh
Expand Up @@ -20,18 +20,19 @@ source $REPO_ROOT/services/common.sh
# toil-web.sh
# doctools/
# services/
#
#


toil-web() {
PYTHONPATH=$REPO_ROOT $REPO_ROOT/services/toil_web.py "$@"
}

index() { toil-web index "$@"; }
cleanup() { toil-web cleanup "$@"; }

rewrite-jobs-index() {
### Atomic update of travis-ci.oilshell.org/jobs/
local dir=${1:-~/travis-ci.oilshell.org/jobs/}
local prefix=$1

local dir=~/travis-ci.oilshell.org/${prefix}jobs/

log "toil-web: Rewriting jobs/index.html"

Expand All @@ -41,16 +42,18 @@ rewrite-jobs-index() {
# 2020-03-20__...

# suppress SIGPIPE failure
{ ls $dir/*.json || true; } | tail -n -100 | index > $tmp
{ ls $dir/*.json || true; } | tail -n -100 | toil-web ${prefix}index > $tmp
echo status=${PIPESTATUS[@]}

mv -v $tmp $dir/index.html
}

cleanup-jobs-index() {
local dir=${1:-~/travis-ci.oilshell.org/jobs/}
local prefix=$1
local dry_run=${2:-true}

local dir=~/travis-ci.oilshell.org/${prefix}jobs/

# Pass it all JSON, and then it figures out what files to delete (TSV, etc.)
case $dry_run in
false)
Expand Down
39 changes: 26 additions & 13 deletions services/toil_web.py
Expand Up @@ -193,18 +193,7 @@ def ParseJobs(stdin):
</tr>
'''


def main(argv):
action = argv[1]

if action == 'index':

# Bust cache (e.g. Safari iPad seems to cache aggressively and doesn't
# have Ctrl-F5)
html_head.Write(sys.stdout, 'Recent Jobs',
css_urls=['../web/base.css?cache=0', '../web/toil.css?cache=0'])

print('''
INDEX_TOP = '''
<body class="width50">
<p id="home-link">
<a href="/">travis-ci.oilshell.org</a>
Expand All @@ -226,8 +215,32 @@ def main(argv):
-->
</tr>
</thead>
''')
'''


def main(argv):
action = argv[1]

if action == 'srht-index':

# Bust cache (e.g. Safari iPad seems to cache aggressively and doesn't
# have Ctrl-F5)
html_head.Write(sys.stdout, 'Recent Jobs',
css_urls=['../web/base.css?cache=0', '../web/toil.css?cache=0'])

print(INDEX_TOP)

rows = list(ParseJobs(sys.stdin))
#print(json.dump(rows, indent=2))

elif action == 'travis-index':

# Bust cache (e.g. Safari iPad seems to cache aggressively and doesn't
# have Ctrl-F5)
html_head.Write(sys.stdout, 'Recent Jobs',
css_urls=['../web/base.css?cache=0', '../web/toil.css?cache=0'])

print(INDEX_TOP)
rows = list(ParseJobs(sys.stdin))

# Sort by descending build number
Expand Down
64 changes: 40 additions & 24 deletions services/travis.sh
Expand Up @@ -111,10 +111,13 @@ home-page() {
<ul>
<li>
<a href="jobs/">Jobs</a>
<a href="srht-jobs/">sr.ht Jobs</a>
</li>
<li>
<a href="builds/">Builds</a>
<a href="travis-jobs/">Travis Jobs</a>
</li>
<li>
<a href="builds/">Builds</a> (not yet implemented)
</li>
</ul>
Expand Down Expand Up @@ -158,16 +161,18 @@ sshq() {
}

remote-rewrite-jobs-index() {
sshq toil-web/services/toil-web.sh rewrite-jobs-index
local prefix=$1
sshq toil-web/services/toil-web.sh rewrite-jobs-index "$prefix"
}

remote-cleanup-jobs-index() {
local prefix=$1
# clean it up for real!
sshq toil-web/services/toil-web.sh cleanup-jobs-index '' false
sshq toil-web/services/toil-web.sh cleanup-jobs-index "$prefix" false
}

init-server-html() {
ssh $USER@$HOST mkdir -v -p $HOST/{jobs,web,builds/src}
ssh $USER@$HOST mkdir -v -p $HOST/{travis-jobs,srht-jobs,web,builds/src}

home-page > _tmp/index.html

Expand All @@ -185,14 +190,17 @@ decrypt-key() {

scp-results() {
# could also use Travis known_hosts addon?
local prefix=$1 # srht- or ''
shift

scp -o StrictHostKeyChecking=no "$@" \
travis_admin@travis-ci.oilshell.org:travis-ci.oilshell.org/jobs/
"travis_admin@travis-ci.oilshell.org:travis-ci.oilshell.org/${prefix}jobs/"
}

list-remote-results() {
# could also use Travis known_hosts addon?
local prefix=$1
ssh -o StrictHostKeyChecking=no \
travis_admin@travis-ci.oilshell.org ls 'travis-ci.oilshell.org/jobs/'
travis_admin@travis-ci.oilshell.org ls "travis-ci.oilshell.org/${prefix}jobs/"
}

# Dummy that doesn't depend on results
Expand All @@ -212,7 +220,7 @@ EOF

zip $wwz env.txt index.html build/*.txt

scp-results $wwz
scp-results '' $wwz
}

format-wwz-index() {
Expand Down Expand Up @@ -325,7 +333,7 @@ deploy-job-results() {
cp _tmp/toil/INDEX.tsv $job_id.tsv

# Copy wwz, tsv, json
scp-results $job_id.*
scp-results '' $job_id.*

log ''
log "http://travis-ci.oilshell.org/jobs/"
Expand Down Expand Up @@ -379,9 +387,10 @@ EOF

write-jobs-raw() {
### Rewrite travis-ci.oilshell.org/jobs/raw.html
local prefix=$1

log "Listing remote .wwz"
list-remote-results > _tmp/listing.txt
list-remote-results "$prefix" > _tmp/listing.txt
ls -l _tmp/listing.txt

# Pass all .wwz files in reverse order.
Expand All @@ -393,33 +402,37 @@ write-jobs-raw() {

log "Copying raw.html"

scp-results _tmp/raw.html
scp-results "$prefix" _tmp/raw.html
}

publish-html() {
local privkey=/tmp/rsa_travis

decrypt-key $privkey
chmod 600 $privkey
eval "$(ssh-agent -s)"
ssh-add $privkey

publish-html-assuming-ssh-key() {
if true; then
deploy-job-results
else
deploy-test-wwz # dummy data that doesn't depend on the build
fi

write-jobs-raw
remote-rewrite-jobs-index
write-jobs-raw 'travis-'
remote-rewrite-jobs-index 'travis-'

# note: we could speed jobs up by doing this separately?
remote-cleanup-jobs-index
remote-cleanup-jobs-index 'travis-'

# toil-worker.sh recorded this for us
return $(cat _tmp/toil/exit-status.txt)
}

publish-html() {
local privkey=/tmp/rsa_travis

decrypt-key $privkey
chmod 600 $privkey
eval "$(ssh-agent -s)"
ssh-add $privkey

publish-html-assuming-ssh-key
}

#
# Maintenance
#
Expand All @@ -429,4 +442,7 @@ delete-caches() {
travis cache -d
}

"$@"
if test $(basename $0) = 'travis.sh'; then
"$@"
fi

0 comments on commit c8a3194

Please sign in to comment.