forked from RIOT-OS/RIOT
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.murdock
executable file
·346 lines (289 loc) · 9.96 KB
/
.murdock
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
#!/bin/sh
: ${TEST_BOARDS_AVAILABLE:="esp32-wroom-32 nrf52dk samr21-xpro"}
: ${TEST_BOARDS_LLVM_COMPILE:="iotlab-m3 native nrf52dk mulle nucleo-f401re samr21-xpro slstk3402a"}
export RIOT_CI_BUILD=1
export STATIC_TESTS=${STATIC_TESTS:-1}
export CFLAGS_DBG=""
export DLCACHE_DIR=${DLCACHE_DIR:-~/.dlcache}
export ENABLE_TEST_CACHE=${ENABLE_TEST_CACHE:-1}
# This is a work around for a bug in CCACHE which interacts very badly with
# some features of RIOT and of murdock. The result is that ccache is
# ineffective (i.e. objects are never reused, resulting in extreme cache miss
# rate) and murdock becomes slow.
#
# - CCACHE thinks that -gz by itself enables debugging, which is not true.
# see https://github.com/ccache/ccache/issues/464
# - When debug info is included, CCACHE hashes the file paths, as these
# influence the debug information (the name of compile units and/or their
# "comp_dir" attribute)
# - Riot does not set -fdebug-prefix-map. This is not that easy as it may not
# be supported in every toolchain (some are quite old).
# - Murdock builds PRs in different directories each time.
#
# It is only the combination of these three factors which causes this bug.
export OPTIONAL_CFLAGS_BLACKLIST="-gz"
NIGHTLY=${NIGHTLY:-0}
RUN_TESTS=${RUN_TESTS:-${NIGHTLY}}
DWQ_ENV="-E BOARDS -E APPS -E NIGHTLY -E RUN_TESTS -E ENABLE_TEST_CACHE
-E TEST_HASH -E CI_PULL_LABELS"
check_label() {
local label="${1}"
[ -z "${CI_PULL_LABELS}" ] && return 1
echo "${CI_PULL_LABELS}" | grep -q "${label}"
return $?
}
[ "$RUN_TESTS" != "1" ] && {
check_label "CI: run tests" && RUN_TESTS=1
}
[ "$ENABLE_TEST_CACHE" = "1" ] && {
check_label "CI: disable test cache" && export ENABLE_TEST_CACHE=0
}
error() {
echo "$@"
exit 1
}
# true if "$2" starts with "$1", false otherwise
startswith() {
case "${2}" in
${1}*) true ;;
*) false ;;
esac
}
# if MURDOCK_HOOK is set, this function will execute it and pass on all it's
# parameters. should the hook script exit with negative exit code, hook() makes
# this script exit with error, too.
# hook() will be called from different locations of this script.
# currently, the only caller is "run_test", which calls "hook run_test_pre".
# More hooks will be added as needed.
hook() {
if [ -n "${MURDOCK_HOOK}" ]; then
echo "- executing hook $1"
"${MURDOCK_HOOK}" "$@" || {
error "$0: hook \"${MURDOCK_HOOK} $@\" failed!"
}
echo "- hook $1 finished"
fi
}
# true if word "$1" is in list of words "$2", false otherwise
# uses grep -w, thus only alphanum and "_" count as word bounderies
# (word "def" matches "abc-def")
is_in_list() {
[ $# -ne 2 ] && return 1
local needle="$1"
local haystack="$2"
echo "$haystack" | grep -q -w "$needle"
}
# grep that doesn't return error on empty input
_grep() {
grep "$@"
true
}
_greplist() {
if [ $# -eq 0 ]; then
echo cat
else
echo -n "_grep -E ($1"
shift
for i in $*; do
echo -n "|$i"
done
echo ")"
fi
}
# get list of all app directories
get_apps() {
make -f makefiles/app_dirs.inc.mk info-applications \
| $(_greplist $APPS) | sort
}
# take app dir as parameter, print all boards that are supported
# Only print for boards in $BOARDS.
get_supported_boards() {
local appdir=$1
local boards="$(make --no-print-directory -C$appdir info-boards-supported 2>/dev/null || echo broken)"
if [ "$boards" = broken ]; then
echo "makefile_broken"
return
fi
for board in $boards
do
echo $board
done | $(_greplist $BOARDS)
}
get_supported_toolchains() {
local appdir=$1
local board=$2
local toolchains="gnu"
if is_in_list "${board}" "${TEST_BOARDS_LLVM_COMPILE}"; then
toolchains="$(make -s --no-print-directory -C${appdir} BOARD=${board} \
info-toolchains-supported 2> /dev/null | grep -o -e "llvm" -e "gnu")"
fi
echo "${toolchains}"
}
# given an app dir as parameter, print "$appdir $board:$toolchain" for each
# supported board and toolchain. Only print for boards in $BOARDS.
# if extra args are given, they will be prepended to each output line.
get_app_board_toolchain_pairs() {
local appdir=$1
local boards="$(get_supported_boards $appdir)"
# collect extra arguments into prefix variable
shift
local prefix="$*"
if [ "$boards" = makefile_broken ]; then
echo "$appdir makefile_broken"
return
fi
for board in ${boards}
do
for toolchain in $(get_supported_toolchains $appdir $board)
do
echo $prefix $appdir $board:$toolchain
done
done | $(_greplist $BOARDS)
}
# use dwqc to create full "appdir board toolchain" compile job list
get_compile_jobs() {
check_label "CI: skip compile test" && return
get_apps | \
dwqc ${DWQ_ENV} -s \
${DWQ_JOBID:+--subjob} \
"$0 get_app_board_toolchain_pairs \${1} $0 compile"
}
print_worker() {
[ -n "$DWQ_WORKER" ] && \
echo "-- running on worker ${DWQ_WORKER} thread ${DWQ_WORKER_THREAD}, build number $DWQ_WORKER_BUILDNUM."
}
test_hash_calc() {
local bindir=$1
# Why two times cut?
# "test-input-hash.sha1" contains a list of lines containing
# "<hash> <filename>" on each line.
# We need to filter out the filename, as it contains the full path name,
# which differs depending on the build machine.
#
# After piping through sha1sum, we get "<hash> -". " -" has to go so we save the
# hassle of escaping the resulting hash.
cat ${bindir}/test-input-hash.sha1 | cut -f1 -d' ' | sha1sum | cut -f1 -d' '
}
test_cache_get() {
test "${ENABLE_TEST_CACHE}" = "1" || return 1
test -n "$(redis-cli get $1)" > /dev/null
}
test_cache_put() {
redis-cli set "$1" ok
}
# compile one app for one board with one toolchain. delete intermediates.
compile() {
local appdir=$1
local board=$(echo $2 | cut -f 1 -d':')
local toolchain=$(echo $2 | cut -f 2 -d':')
[ "$board" = "makefile_broken" ] && {
echo "$0: There seems to be a problem in \"$appdir\" while getting supported boards!"
echo "$0: testing \"make -C$appdir info-boards-supported\"..."
make -C$appdir info-boards-supported && echo "$0: success. no idea what's wrong." || echo "$0: failed!"
exit 1
}
# set build directory. CI ensures only one build at a time in $(pwd).
export BINDIR="$(pwd)/build"
export PKGDIRBASE="${BINDIR}/pkg"
# Pre-build cleanup
rm -rf ${BINDIR}
print_worker
# sanity checks
[ $# -ne 2 ] && error "$0: compile: invalid parameters (expected \$appdir \$board:\$toolchain)"
[ ! -d "$appdir" ] && error "$0: compile: error: application directory \"$appdir\" doesn't exist"
[ ! -d "boards/$board" ] && error "$0: compile: error: board directory \"boards/$board\" doesn't exist"
# compile
CCACHE_BASEDIR="$(pwd)" BOARD=$board TOOLCHAIN=$toolchain RIOT_CI_BUILD=1 \
make -C${appdir} clean all test-input-hash -j${JOBS:-4}
RES=$?
# run tests
if [ $RES -eq 0 ]; then
if [ $RUN_TESTS -eq 1 -o "$board" = "native" ]; then
if [ -f "${BINDIR}/.test" ]; then
if [ "$board" = "native" ]; then
BOARD=$board make -C${appdir} test
RES=$?
elif is_in_list "$board" "$TEST_BOARDS_AVAILABLE"; then
test_hash=$(test_hash_calc "$BINDIR")
echo "-- test_hash=$test_hash"
if test_cache_get $test_hash; then
echo "-- skipping test due to positive cache hit"
else
BOARD=$board TOOLCHAIN=$toolchain TEST_HASH=$test_hash \
make -C${appdir} test-murdock
RES=$?
fi
fi
fi
fi
fi
if [ -d ${BINDIR} ]
then
echo "-- build directory size: $(du -sh ${BINDIR} | cut -f1)"
# cleanup
rm -rf ${BINDIR}
fi
return $RES
}
test_job() {
local appdir=$1
local board=$(echo $2 | cut -f 1 -d':')
local toolchain=$(echo $2 | cut -f 2 -d':')
# interpret any extra arguments as file names.
# They will be sent along with the job to the test worker
# and stored in the application's binary folder.
shift 2
local files=""
for filename in "$@"; do
# check if the file is within $(BINDIR)
if startswith "${BINDIR}" "${filename}"; then
# get relative (to $(BINDIR) path
local relpath="$(realpath --relative-to ${BINDIR} ${filename})"
else
error "$0: error: extra test files not within \${BINDIR}!"
fi
# set remote file path.
# currently, the test workers build in the default build path.
local remote_bindir="${appdir}/bin/${board}"
files="${files} --file ${filename}:${remote_bindir}/${relpath}"
done
dwqc \
${DWQ_ENV} \
${DWQ_JOBID:+--subjob} \
--queue ${TEST_QUEUE:-$board} \
--maxfail 1 \
$files \
"./.murdock run_test $appdir $board:$toolchain"
}
run_test() {
local appdir=$1
local board=$(echo $2 | cut -f 1 -d':')
local toolchain=$(echo $2 | cut -f 2 -d':')
print_worker
echo "-- executing tests for $appdir on $board (compiled with $toolchain toolchain):"
hook run_test_pre
# do flashing and building of termdeps simultaneously
BOARD=$board TOOLCHAIN=${toolchain} make -C$appdir flash-only termdeps -j2
# now run the actual test
BOARD=$board TOOLCHAIN=${toolchain} make -C$appdir test
RES=$?
if [ $RES -eq 0 -a -n "$TEST_HASH" ]; then
echo -n "-- saving test result to cache: "
test_cache_put $TEST_HASH
fi
return $RES
}
# execute static tests
static_tests() {
print_worker
./dist/tools/ci/static_tests.sh
}
get_non_compile_jobs() {
[ "$STATIC_TESTS" = "1" ] && \
echo "$0 static_tests"
}
get_jobs() {
get_non_compile_jobs
get_compile_jobs
}
$*