x86: spl: Drop unwanted debug()
[platform/kernel/u-boot.git] / .azure-pipelines.yml
1 variables:
2   windows_vm: windows-2019
3   ubuntu_vm: ubuntu-22.04
4   macos_vm: macOS-12
5   ci_runner_image: trini/u-boot-gitlab-ci-runner:jammy-20230624-20Jul2023
6   # Add '-u 0' options for Azure pipelines, otherwise we get "permission
7   # denied" error when it tries to "useradd -m -u 1001 vsts_azpcontainer",
8   # since our $(ci_runner_image) user is not root.
9   container_option: -u 0
10   work_dir: /u
11
12 stages:
13 - stage: testsuites
14   jobs:
15   - job: tools_only_windows
16     displayName: 'Ensure host tools build for Windows'
17     pool:
18       vmImage: $(windows_vm)
19     steps:
20       - powershell: |
21           (New-Object Net.WebClient).DownloadFile("https://github.com/msys2/msys2-installer/releases/download/2021-06-04/msys2-base-x86_64-20210604.sfx.exe", "sfx.exe")
22         displayName: 'Install MSYS2'
23       - script: |
24           sfx.exe -y -o%CD:~0,2%\
25           %CD:~0,2%\msys64\usr\bin\bash -lc " "
26           %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm -Syuu"
27           %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm -Syuu"
28         displayName: 'Update MSYS2'
29       - script: |
30           %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm --needed -Sy make gcc bison flex diffutils openssl-devel libgnutls-devel libutil-linux-devel"
31         displayName: 'Install Toolchain'
32       - script: |
33           echo make tools-only_defconfig tools-only > build-tools.sh
34           %CD:~0,2%\msys64\usr\bin\bash -lc "bash build-tools.sh"
35         displayName: 'Build Host Tools'
36         env:
37           # Tell MSYS2 we need a POSIX emulation layer
38           MSYSTEM: MSYS
39           # Tell MSYS2 not to ‘cd’ our startup directory to HOME
40           CHERE_INVOKING: yes
41
42   - job: tools_only_macOS
43     displayName: 'Ensure host tools build for macOS X'
44     pool:
45       vmImage: $(macos_vm)
46     steps:
47       - script: brew install make ossp-uuid
48         displayName: Brew install dependencies
49       - script: |
50           gmake tools-only_config tools-only \
51             HOSTCFLAGS="-I/usr/local/opt/openssl@1.1/include" \
52             HOSTLDFLAGS="-L/usr/local/opt/openssl@1.1/lib" \
53             -j$(sysctl -n hw.logicalcpu)
54         displayName: 'Perform tools-only build'
55
56   - job: check_for_new_CONFIG_symbols_outside_Kconfig
57     displayName: 'Check for new CONFIG symbols outside Kconfig'
58     pool:
59       vmImage: $(ubuntu_vm)
60     container:
61       image: $(ci_runner_image)
62       options: $(container_option)
63     steps:
64       # If grep succeeds and finds a match the test fails as we should
65       # have no matches.
66       - script: git grep -E '^#[[:blank:]]*(define|undef)[[:blank:]]*CONFIG_'
67                   :^doc/ :^arch/arm/dts/ :^scripts/kconfig/lkc.h
68                   :^include/linux/kconfig.h :^tools/ && exit 1 || exit 0
69
70   - job: cppcheck
71     displayName: 'Static code analysis with cppcheck'
72     pool:
73       vmImage: $(ubuntu_vm)
74     container:
75       image: $(ci_runner_image)
76       options: $(container_option)
77     steps:
78       - script: cppcheck -j$(nproc) --force --quiet --inline-suppr .
79
80   - job: docs
81     displayName: 'Build documentation'
82     pool:
83       vmImage: $(ubuntu_vm)
84     container:
85       image: $(ci_runner_image)
86       options: $(container_option)
87     steps:
88       - script: |
89           virtualenv -p /usr/bin/python3 /tmp/venvhtml
90           . /tmp/venvhtml/bin/activate
91           pip install -r doc/sphinx/requirements.txt
92           make htmldocs KDOC_WERROR=1
93           make infodocs
94
95   - job: todo
96     displayName: 'Search for TODO within source tree'
97     pool:
98       vmImage: $(ubuntu_vm)
99     container:
100       image: $(ci_runner_image)
101       options: $(container_option)
102     steps:
103       - script: grep -r TODO .
104       - script: grep -r FIXME .
105       - script: grep -r HACK . | grep -v HACKKIT
106
107   - job: sloccount
108     displayName: 'Some statistics about the code base'
109     pool:
110       vmImage: $(ubuntu_vm)
111     container:
112       image: $(ci_runner_image)
113       options: $(container_option)
114     steps:
115       - script: sloccount .
116
117   - job: maintainers
118     displayName: 'Ensure all configs have MAINTAINERS entries'
119     pool:
120       vmImage: $(ubuntu_vm)
121     container:
122       image: $(ci_runner_image)
123       options: $(container_option)
124     steps:
125       - script: |
126           ./tools/buildman/buildman --maintainer-check || exit 0
127
128   - job: tools_only
129     displayName: 'Ensure host tools build'
130     pool:
131       vmImage: $(ubuntu_vm)
132     container:
133       image: $(ci_runner_image)
134       options: $(container_option)
135     steps:
136       - script: |
137           make tools-only_config tools-only -j$(nproc)
138
139   - job: envtools
140     displayName: 'Ensure env tools build'
141     pool:
142       vmImage: $(ubuntu_vm)
143     container:
144       image: $(ci_runner_image)
145       options: $(container_option)
146     steps:
147       - script: |
148           make tools-only_config envtools -j$(nproc)
149
150   - job: utils
151     displayName: 'Run binman, buildman, dtoc, Kconfig and patman testsuites'
152     pool:
153       vmImage: $(ubuntu_vm)
154     steps:
155       - script: |
156           cat << "EOF" > build.sh
157           cd $(work_dir)
158           git config --global user.name "Azure Pipelines"
159           git config --global user.email bmeng.cn@gmail.com
160           git config --global --add safe.directory $(work_dir)
161           export USER=azure
162           virtualenv -p /usr/bin/python3 /tmp/venv
163           . /tmp/venv/bin/activate
164           pip install -r test/py/requirements.txt
165           pip install -r tools/buildman/requirements.txt
166           export UBOOT_TRAVIS_BUILD_DIR=/tmp/sandbox_spl
167           export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
168           export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}
169           ./tools/buildman/buildman -T0 -o ${UBOOT_TRAVIS_BUILD_DIR} -w --board sandbox_spl
170           set -ex
171           ./tools/binman/binman --toolpath ${UBOOT_TRAVIS_BUILD_DIR}/tools test
172           ./tools/buildman/buildman -t
173           ./tools/dtoc/dtoc -t
174           ./tools/patman/patman test
175           make O=${UBOOT_TRAVIS_BUILD_DIR} testconfig
176           EOF
177           cat build.sh
178           # We cannot use "container" like other jobs above, as buildman
179           # seems to hang forever with pre-configured "container" environment
180           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh
181
182   - job: nokia_rx51_test
183     displayName: 'Run tests for Nokia RX-51 (aka N900)'
184     pool:
185       vmImage: $(ubuntu_vm)
186     container:
187       image: $(ci_runner_image)
188       options: $(container_option)
189     steps:
190       - script: |
191           mkdir nokia_rx51_tmp
192           ln -s /opt/nokia/u-boot-gen-combined nokia_rx51_tmp/
193           ln -s /opt/nokia/qemu-n900.tar.gz nokia_rx51_tmp/
194           ln -s /opt/nokia/kernel_2.6.28-20103103+0m5_armel.deb nokia_rx51_tmp/
195           ln -s /opt/nokia/libc6_2.5.1-1eglibc27+0m5_armel.deb nokia_rx51_tmp/
196           ln -s /opt/nokia/busybox_1.10.2.legal-1osso30+0m5_armel.deb nokia_rx51_tmp/
197           ln -s /opt/nokia/qemu-system-arm nokia_rx51_tmp/
198           export PATH=/opt/gcc-13.1.0-nolibc/arm-linux-gnueabi/bin:$PATH
199           test/nokia_rx51_test.sh
200
201   - job: pylint
202     displayName: Check for any pylint regressions
203     pool:
204       vmImage: $(ubuntu_vm)
205     container:
206       image: $(ci_runner_image)
207       options: $(container_option)
208     steps:
209       - script: |
210           git config --global --add safe.directory $(work_dir)
211           export USER=azure
212           pip install -r test/py/requirements.txt
213           pip install -r tools/buildman/requirements.txt
214           pip install asteval pylint==2.12.2 pyopenssl
215           export PATH=${PATH}:~/.local/bin
216           echo "[MASTER]" >> .pylintrc
217           echo "load-plugins=pylint.extensions.docparams" >> .pylintrc
218           export UBOOT_TRAVIS_BUILD_DIR=/tmp/sandbox_spl
219           ./tools/buildman/buildman -T0 -o ${UBOOT_TRAVIS_BUILD_DIR} -w --board sandbox_spl
220           set -ex
221           pylint --version
222           export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
223           make pylint_err
224
225   - job: check_for_pre_schema_tags
226     displayName: 'Check for pre-schema driver model tags'
227     pool:
228       vmImage: $(ubuntu_vm)
229     container:
230       image: $(ci_runner_image)
231       options: $(container_option)
232     steps:
233       # If grep succeeds and finds a match the test fails as we should
234       # have no matches.
235       - script: git grep u-boot,dm- -- '*.dts*' && exit 1 || exit 0
236
237   - job: check_packing_of_python_tools
238     displayName: 'Check we can package the Python tools'
239     pool:
240       vmImage: $(ubuntu_vm)
241     container:
242       image: $(ci_runner_image)
243       options: $(container_option)
244     steps:
245       - script: make pip
246
247 - stage: test_py
248   jobs:
249   - job: test_py
250     displayName: 'test.py'
251     pool:
252       vmImage: $(ubuntu_vm)
253     strategy:
254       matrix:
255         sandbox:
256           TEST_PY_BD: "sandbox"
257         sandbox_clang:
258           TEST_PY_BD: "sandbox"
259           OVERRIDE: "-O clang-16"
260         sandbox_nolto:
261           TEST_PY_BD: "sandbox"
262           BUILD_ENV: "NO_LTO=1"
263         sandbox_spl:
264           TEST_PY_BD: "sandbox_spl"
265           TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
266         sandbox_vpl:
267           TEST_PY_BD: "sandbox_vpl"
268           TEST_PY_TEST_SPEC: "vpl or test_spl"
269         sandbox_noinst:
270           TEST_PY_BD: "sandbox_noinst"
271           TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
272         sandbox_flattree:
273           TEST_PY_BD: "sandbox_flattree"
274         sandbox_trace:
275           TEST_PY_BD: "sandbox"
276           BUILD_ENV: "FTRACE=1 NO_LTO=1"
277           TEST_PY_TEST_SPEC: "trace"
278           OVERRIDE: "-a CONFIG_TRACE=y -a CONFIG_TRACE_EARLY=y -a CONFIG_TRACE_EARLY_SIZE=0x01000000"
279         coreboot:
280           TEST_PY_BD: "coreboot"
281           TEST_PY_ID: "--id qemu"
282           TEST_PY_TEST_SPEC: "not sleep"
283         evb_ast2500:
284           TEST_PY_BD: "evb-ast2500"
285           TEST_PY_ID: "--id qemu"
286         evb_ast2600:
287           TEST_PY_BD: "evb-ast2600"
288           TEST_PY_ID: "--id qemu"
289         vexpress_ca9x4:
290           TEST_PY_BD: "vexpress_ca9x4"
291           TEST_PY_ID: "--id qemu"
292         integratorcp_cm926ejs:
293           TEST_PY_BD: "integratorcp_cm926ejs"
294           TEST_PY_ID: "--id qemu"
295           TEST_PY_TEST_SPEC: "not sleep"
296         qemu_arm:
297           TEST_PY_BD: "qemu_arm"
298           TEST_PY_TEST_SPEC: "not sleep"
299         qemu_arm64:
300           TEST_PY_BD: "qemu_arm64"
301           TEST_PY_TEST_SPEC: "not sleep"
302         qemu_m68k:
303           TEST_PY_BD: "M5208EVBE"
304           TEST_PY_ID: "--id qemu"
305           TEST_PY_TEST_SPEC: "not sleep and not efi"
306           OVERRIDE: "-a CONFIG_M68K_QEMU=y -a ~CONFIG_MCFTMR"
307         qemu_malta:
308           TEST_PY_BD: "malta"
309           TEST_PY_ID: "--id qemu"
310           TEST_PY_TEST_SPEC: "not sleep and not efi"
311         qemu_maltael:
312           TEST_PY_BD: "maltael"
313           TEST_PY_ID: "--id qemu"
314           TEST_PY_TEST_SPEC: "not sleep and not efi"
315         qemu_malta64:
316           TEST_PY_BD: "malta64"
317           TEST_PY_ID: "--id qemu"
318           TEST_PY_TEST_SPEC: "not sleep and not efi"
319         qemu_malta64el:
320           TEST_PY_BD: "malta64el"
321           TEST_PY_ID: "--id qemu"
322           TEST_PY_TEST_SPEC: "not sleep and not efi"
323         qemu_ppce500:
324           TEST_PY_BD: "qemu-ppce500"
325           TEST_PY_TEST_SPEC: "not sleep"
326         qemu_riscv32:
327           TEST_PY_BD: "qemu-riscv32"
328           TEST_PY_TEST_SPEC: "not sleep"
329         qemu_riscv64:
330           TEST_PY_BD: "qemu-riscv64"
331           TEST_PY_TEST_SPEC: "not sleep"
332         qemu_riscv32_spl:
333           TEST_PY_BD: "qemu-riscv32_spl"
334           TEST_PY_TEST_SPEC: "not sleep"
335         qemu_riscv64_spl:
336           TEST_PY_BD: "qemu-riscv64_spl"
337           TEST_PY_TEST_SPEC: "not sleep"
338         qemu_x86:
339           TEST_PY_BD: "qemu-x86"
340           TEST_PY_TEST_SPEC: "not sleep"
341         qemu_x86_64:
342           TEST_PY_BD: "qemu-x86_64"
343           TEST_PY_TEST_SPEC: "not sleep"
344         r2dplus_i82557c:
345           TEST_PY_BD: "r2dplus"
346           TEST_PY_ID: "--id i82557c_qemu"
347         r2dplus_pcnet:
348           TEST_PY_BD: "r2dplus"
349           TEST_PY_ID: "--id pcnet_qemu"
350         r2dplus_rtl8139:
351           TEST_PY_BD: "r2dplus"
352           TEST_PY_ID: "--id rtl8139_qemu"
353         r2dplus_tulip:
354           TEST_PY_BD: "r2dplus"
355           TEST_PY_ID: "--id tulip_qemu"
356         sifive_unleashed_sdcard:
357           TEST_PY_BD: "sifive_unleashed"
358           TEST_PY_ID: "--id sdcard_qemu"
359         sifive_unleashed_spi-nor:
360           TEST_PY_BD: "sifive_unleashed"
361           TEST_PY_ID: "--id spi-nor_qemu"
362         xilinx_zynq_virt:
363           TEST_PY_BD: "xilinx_zynq_virt"
364           TEST_PY_ID: "--id qemu"
365           TEST_PY_TEST_SPEC: "not sleep"
366         xilinx_versal_virt:
367           TEST_PY_BD: "xilinx_versal_virt"
368           TEST_PY_ID: "--id qemu"
369           TEST_PY_TEST_SPEC: "not sleep"
370         xtfpga:
371           TEST_PY_BD: "xtfpga"
372           TEST_PY_ID: "--id qemu"
373           TEST_PY_TEST_SPEC: "not sleep"
374     steps:
375       - script: |
376           cat << EOF > test.sh
377           set -ex
378           # make environment variables available as tests are running inside a container
379           export WORK_DIR="${WORK_DIR}"
380           export TEST_PY_BD="${TEST_PY_BD}"
381           export TEST_PY_ID="${TEST_PY_ID}"
382           export TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}"
383           export OVERRIDE="${OVERRIDE}"
384           export BUILD_ENV="${BUILD_ENV}"
385           EOF
386           cat << "EOF" >> test.sh
387           # the below corresponds to .gitlab-ci.yml "before_script"
388           cd ${WORK_DIR}
389           git config --global --add safe.directory ${WORK_DIR}
390           git clone --depth=1 https://source.denx.de/u-boot/u-boot-test-hooks /tmp/uboot-test-hooks
391           ln -s travis-ci /tmp/uboot-test-hooks/bin/`hostname`
392           ln -s travis-ci /tmp/uboot-test-hooks/py/`hostname`
393           grub-mkimage --prefix=\"\" -o ~/grub_x86.efi -O i386-efi normal  echo lsefimmap lsefi lsefisystab efinet tftp minicmd
394           grub-mkimage --prefix=\"\" -o ~/grub_x64.efi -O x86_64-efi normal  echo lsefimmap lsefi lsefisystab efinet tftp minicmd
395           if [[ "${TEST_PY_BD}" == "qemu-riscv32_spl" ]]; then
396               wget -O - https://github.com/riscv-software-src/opensbi/releases/download/v1.2/opensbi-1.2-rv-bin.tar.xz | tar -C /tmp -xJ;
397               export OPENSBI=/tmp/opensbi-1.2-rv-bin/share/opensbi/ilp32/generic/firmware/fw_dynamic.bin;
398           fi
399           if [[ "${TEST_PY_BD}" == "qemu-riscv64_spl" ]] || [[ "${TEST_PY_BD}" == "sifive_unleashed" ]]; then
400               wget -O - https://github.com/riscv-software-src/opensbi/releases/download/v1.2/opensbi-1.2-rv-bin.tar.xz | tar -C /tmp -xJ;
401               export OPENSBI=/tmp/opensbi-1.2-rv-bin/share/opensbi/lp64/generic/firmware/fw_dynamic.bin;
402           fi
403           # the below corresponds to .gitlab-ci.yml "script"
404           cd ${WORK_DIR}
405           export UBOOT_TRAVIS_BUILD_DIR=/tmp/${TEST_PY_BD};
406           if [ -n "${BUILD_ENV}" ]; then
407               export ${BUILD_ENV};
408           fi
409           pip install -r tools/buildman/requirements.txt
410           tools/buildman/buildman -o ${UBOOT_TRAVIS_BUILD_DIR} -w -E -W -e --board ${TEST_PY_BD} ${OVERRIDE}
411           cp ~/grub_x86.efi ${UBOOT_TRAVIS_BUILD_DIR}/
412           cp ~/grub_x64.efi ${UBOOT_TRAVIS_BUILD_DIR}/
413           cp /opt/grub/grubriscv64.efi ${UBOOT_TRAVIS_BUILD_DIR}/grub_riscv64.efi
414           cp /opt/grub/grubaa64.efi ${UBOOT_TRAVIS_BUILD_DIR}/grub_arm64.efi
415           cp /opt/grub/grubarm.efi ${UBOOT_TRAVIS_BUILD_DIR}/grub_arm.efi
416           # create sdcard / spi-nor images for sifive unleashed using genimage
417           if [[ "${TEST_PY_BD}" == "sifive_unleashed" ]]; then
418               mkdir -p root;
419               cp ${UBOOT_TRAVIS_BUILD_DIR}/spl/u-boot-spl.bin .;
420               cp ${UBOOT_TRAVIS_BUILD_DIR}/u-boot.itb .;
421               rm -rf tmp;
422               genimage --inputpath . --config board/sifive/unleashed/genimage_sdcard.cfg;
423               cp images/sdcard.img ${UBOOT_TRAVIS_BUILD_DIR}/;
424               rm -rf tmp;
425               genimage --inputpath . --config board/sifive/unleashed/genimage_spi-nor.cfg;
426               cp images/spi-nor.img ${UBOOT_TRAVIS_BUILD_DIR}/;
427           fi
428           if [[ "${TEST_PY_BD}" == "coreboot" ]]; then
429               wget -O - "https://drive.google.com/uc?id=1x6nrtWIyIRPLS2cQBwYTnT2TbOI8UjmM&export=download" |xz -dc >${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom;
430               wget -O - "https://drive.google.com/uc?id=149Cz-5SZXHNKpi9xg6R_5XITWohu348y&export=download" >cbfstool;
431               chmod a+x cbfstool;
432               ./cbfstool ${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom add-flat-binary -f ${UBOOT_TRAVIS_BUILD_DIR}/u-boot.bin -n fallback/payload -c LZMA -l 0x1110000 -e 0x1110000;
433           fi
434           virtualenv -p /usr/bin/python3 /tmp/venv
435           . /tmp/venv/bin/activate
436           pip install -r test/py/requirements.txt
437           pip install pytest-azurepipelines
438           export PATH=/opt/qemu/bin:/tmp/uboot-test-hooks/bin:${PATH};
439           export PYTHONPATH=/tmp/uboot-test-hooks/py/travis-ci;
440           # "${var:+"-k $var"}" expands to "" if $var is empty, "-k $var" if not
441           ./test/py/test.py -ra -o cache_dir="$UBOOT_TRAVIS_BUILD_DIR"/.pytest_cache --bd ${TEST_PY_BD} ${TEST_PY_ID} ${TEST_PY_TEST_SPEC:+"-k ${TEST_PY_TEST_SPEC}"} --build-dir "$UBOOT_TRAVIS_BUILD_DIR" --report-dir "$UBOOT_TRAVIS_BUILD_DIR";
442           # the below corresponds to .gitlab-ci.yml "after_script"
443           rm -rf /tmp/uboot-test-hooks /tmp/venv
444           EOF
445           cat test.sh
446           # make current directory writeable to uboot user inside the container
447           # as sandbox testing need create files like spi flash images, etc.
448           # (TODO: clean up this in the future)
449           chmod 777 .
450           # Filesystem tests need extra docker args to run
451           set --
452           if [[ "${TEST_PY_BD}" == "sandbox" ]]; then
453               # mount -o loop needs the loop devices
454               if modprobe loop; then
455                   for d in $(find /dev -maxdepth 1 -name 'loop*'); do
456                       set -- "$@" --device $d:$d
457                   done
458               fi
459               # Needed for mount syscall (for guestmount as well)
460               set -- "$@" --cap-add SYS_ADMIN
461               # Default apparmor profile denies mounts
462               set -- "$@" --security-opt apparmor=unconfined
463           fi
464           # Some tests using libguestfs-tools need the fuse device to run
465           docker run "$@" --device /dev/fuse:/dev/fuse -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/test.sh
466         retryCountOnTaskFailure: 2 # QEMU may be too slow, etc.
467
468 - stage: world_build
469   jobs:
470   - job: build_the_world
471     displayName: 'Build the World'
472     pool:
473       vmImage: $(ubuntu_vm)
474     strategy:
475       # Use almost the same target division in .travis.yml, only merged
476       # 3 small build jobs (arc/microblaze/xtensa) into one.
477       matrix:
478         arc_nios2_m68k_microblaze_xtensa:
479           BUILDMAN: "arc nios2 microblaze m68k xtensa"
480         amlogic:
481           BUILDMAN: "amlogic"
482         arm11_arm7_arm920t_arm946es:
483           BUILDMAN: "arm11 arm7 arm920t arm946es"
484         arm926ejs:
485           BUILDMAN: "arm926ejs -x freescale,siemens,at91,kirkwood,omap"
486         at91_non_armv7:
487           BUILDMAN: "at91 -x armv7"
488         at91_non_arm926ejs:
489           BUILDMAN: "at91 -x arm926ejs"
490         boundary_engicam_toradex:
491           BUILDMAN: "boundary engicam toradex"
492         arm_bcm:
493           BUILDMAN: "bcm -x mips"
494         nxp_arm32:
495           BUILDMAN: "freescale -x powerpc,m68k,aarch64,ls101,ls102,ls104,ls108,ls20,lx216"
496         nxp_ls101x_ls108x:
497           BUILDMAN: "freescale&ls101 freescale&ls108"
498         nxp_ls102x:
499           BUILDMAN: "freescale&ls102 -x keymile"
500         nxp_ls104x:
501           BUILDMAN: "freescale&ls104"
502         nxp_ls20xx_lx216x:
503           BUILDMAN: "freescale&ls20 freescale&lx216"
504         imx6:
505           BUILDMAN: "mx6 -x boundary,engicam,freescale,technexion,toradex"
506         imx:
507           BUILDMAN: "mx -x mx6,imx8,freescale,technexion,toradex"
508         imx8_imx9:
509           BUILDMAN: "imx8 imx9 -x engicam,technexion,toradex"
510         keymiles_siemens_technexion:
511           BUILDMAN: "keymile siemens technexion"
512         keystone2_keystone3:
513           BUILDMAN: "k2 k3 -x siemens,toradex"
514         sandbox_asan:
515           BUILDMAN: "sandbox"
516           OVERRIDE: "-a ASAN"
517         sandbox_clang_asan:
518           BUILDMAN: "sandbox"
519           OVERRIDE: "-O clang-16 -a ASAN"
520         samsung_socfpga_renesas:
521           BUILDMAN: "samsung socfpga renesas"
522         sun4i_sun9i:
523           BUILDMAN: "sun4i sun9i"
524         sun5i_sun6i:
525           BUILDMAN: "sun5i sun6i"
526         sun7i:
527           BUILDMAN: "sun7i"
528         sun8i:
529           BUILDMAN: "sun8i"
530         sun50i:
531           BUILDMAN: "sun50i"
532         arm_catch_all:
533           BUILDMAN: "arm -x arm11,arm7,arm9,aarch64,at91,bcm,freescale,kirkwood,mvebu,renesas,siemens,tegra,uniphier,mx,samsung,sunxi,am33xx,omap,toradex,socfpga,k2,k3,zynq"
534         sandbox_x86:
535           BUILDMAN: "sandbox x86"
536         kirkwood_mvebu_uniphier:
537           BUILDMAN: "kirkwood mvebu uniphier"
538         mips:
539           BUILDMAN: "mips"
540         powerpc:
541           BUILDMAN: "powerpc -x keymile"
542         tegra:
543           BUILDMAN: "tegra -x toradex"
544         am33xx_omap:
545           BUILDMAN: "am33xx omap -x siemens"
546         aarch64_catch_all:
547           BUILDMAN: "aarch64 -x amlogic,bcm,imx8,imx9,k3,tegra,ls1,ls2,lx216,mvebu,uniphier,renesas,sunxi,samsung,socfpga,rk,versal,zynq"
548         rk_non_rockchip_64bit:
549           BUILDMAN: "rk&aarch64 -x rockchip"
550         rk_rockchip_64bit:
551           BUILDMAN: "rk&aarch64&rockchip"
552         zynq_zynqmp_versal:
553           BUILDMAN: "zynq&armv7 versal zynqmp&aarch64"
554         riscv:
555           BUILDMAN: "riscv"
556     steps:
557       - script: |
558           cat << EOF > build.sh
559           set -ex
560           cd ${WORK_DIR}
561           # make environment variables available as tests are running inside a container
562           export BUILDMAN="${BUILDMAN}"
563           git config --global --add safe.directory ${WORK_DIR}
564           pip install -r tools/buildman/requirements.txt
565           EOF
566           cat << "EOF" >> build.sh
567           if [[ "${BUILDMAN}" != "" ]]; then
568               ret=0;
569               tools/buildman/buildman -o /tmp -PEWM ${BUILDMAN} ${OVERRIDE} || ret=$?;
570               if [[ $ret -ne 0 ]]; then
571                   tools/buildman/buildman -o /tmp -seP ${BUILDMAN};
572                   exit $ret;
573               fi;
574           fi
575           EOF
576           cat build.sh
577           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh