OILS / soil / web.sh View on Github | oilshell.org

187 lines, 95 significant
1#!/usr/bin/env bash
2#
3# Wrapper for soil/web.py.
4#
5# Usage:
6# soil/web.sh <function name>
7
8set -o nounset
9set -o pipefail
10set -o errexit
11
12shopt -s nullglob # for list-json
13
14REPO_ROOT=$(cd $(dirname $0)/.. && pwd)
15readonly REPO_ROOT
16
17source $REPO_ROOT/soil/common.sh
18
19# Jobs to show and keep. This corresponds to say soil/worker.sh JOB-dummy,
20# which means each git COMMIT is more than 15 jobs.
21readonly NUM_JOBS=4000
22
23soil-web() {
24 PYTHONPATH=$REPO_ROOT $REPO_ROOT/soil/web.py "$@"
25}
26
27# Bug fix for another race:
28# ls *.json has a race: the shell expands files that may no longer exist, and
29# then 'ls' fails!
30list-json() {
31 local dir=$1 # e.g. travis-ci.oilshell.org/github-jobs
32
33 for name in $dir/*/*.json; do
34 echo $name
35 done
36}
37
38rewrite-jobs-index() {
39 ### Atomic update of travis-ci.oilshell.org/jobs/
40 local prefix=$1
41 local run_id=$2 # pass GITHUB_RUN_NUMBER or git-$hash
42
43 local dir=$SOIL_HOST_DIR/${prefix}jobs
44
45 log "soil-web: Rewriting ${prefix}jobs/index.html"
46
47 # Fix for bug #1169: don't create the temp file on a different file system,
48 # which /tmp may be.
49 #
50 # When the source and target are on different systems, I believe 'mv' falls
51 # back to 'cp', which has this race condition:
52 #
53 # https://unix.stackexchange.com/questions/116280/cannot-create-regular-file-filename-file-exists
54
55 # Limit to last 100 jobs. Glob is in alphabetical order and jobs look like
56 # 2020-03-20__...
57
58 local index_tmp=$dir/$$.index.html # index of every job in every run
59 local run_index_tmp=$dir/$$.runs.html # only the jobs in this run/commit
60
61 list-json $dir \
62 | tail -n -$NUM_JOBS \
63 | soil-web ${prefix}index $index_tmp $run_index_tmp $run_id
64
65 echo "rewrite index status = ${PIPESTATUS[@]}"
66
67 mv -v $index_tmp $dir/index.html
68
69 mkdir -v -p $dir/$run_id # this could be a new commit hash, etc.
70 mv -v $run_index_tmp $dir/$run_id/index.html
71}
72
73cleanup-jobs-index() {
74 local prefix=$1
75 local dry_run=${2:-true}
76
77 local dir=$SOIL_HOST_DIR/${prefix}jobs
78
79 # Pass it all JSON, and then it figures out what files to delete (TSV, etc.)
80 case $dry_run in
81 false)
82 # Bug fix: There's a race here when 2 jobs complete at the same time.
83 # Use rm -f to ignore failure if the file was already deleted.
84
85 list-json $dir | soil-web cleanup $NUM_JOBS | xargs --no-run-if-empty -- rm -f -v
86 ;;
87 true)
88 list-json $dir | soil-web cleanup $NUM_JOBS
89 ;;
90 *)
91 log 'Expected true or false for dry_run'
92 esac
93}
94
95test-cleanup() {
96 # the 999 jobs are the oldest
97
98 soil-web cleanup 2 <<EOF
99travis-ci.oilshell.org/github-jobs/999/one.json
100travis-ci.oilshell.org/github-jobs/999/two.json
101travis-ci.oilshell.org/github-jobs/999/three.json
102travis-ci.oilshell.org/github-jobs/1000/one.json
103travis-ci.oilshell.org/github-jobs/1000/two.json
104travis-ci.oilshell.org/github-jobs/1001/one.json
105travis-ci.oilshell.org/github-jobs/1001/two.json
106travis-ci.oilshell.org/github-jobs/1001/three.json
107EOF
108}
109
110cleanup-status-api() {
111 ### cleanup the files used for maybe-merge
112
113 local dry_run=${1:-true}
114
115 local dir=$SOIL_HOST_DIR/status-api/github
116
117 pushd $dir
118 case $dry_run in
119 false)
120 # delete all but the last 30
121 ls | head -n -30 | xargs --no-run-if-empty -- rm -r -f -v
122 ;;
123 true)
124 ls | head -n -30
125 ;;
126 *)
127 log 'Expected true or false for dry_run'
128 esac
129 popd
130}
131
132event-job-done() {
133 ### "Server side" handler
134
135 local prefix=$1 # 'github-' or 'srht-'
136 local run_id=$2 # $GITHUB_RUN_NUMBER or git-$hash
137
138 rewrite-jobs-index $prefix $run_id
139
140 # note: we could speed jobs up by doing this separately?
141 cleanup-jobs-index $prefix false
142}
143
144#
145# Dev Tools
146#
147
148sync-testdata() {
149
150 local dest=_tmp/github-jobs/
151
152 rsync --archive --verbose \
153 $SOIL_USER@$SOIL_HOST:$SOIL_HOST/github-jobs/ $dest
154
155 # 2023-04: 3.2 GB of files! Probably can reduce this
156
157 du --si -s $dest
158}
159
160copy-web() {
161 ### for relative URLs to work
162
163 cp -r -v web/ _tmp/
164}
165
166local-test() {
167 ### Used the sync'd testdata
168 local dir=${1:-_tmp/github-jobs}
169
170 local index=$dir/index.html
171
172 local run_id=3722
173 local run_index=$dir/$run_id/index.html
174
175 list-json $dir | soil-web github-index $index $run_index $run_id
176
177 echo "Wrote $index and $run_index"
178}
179
180hello() {
181 echo "hi from $0"
182 echo
183 whoami
184 hostname
185}
186
187"$@"