imx8: Select boot device dynamically
[platform/kernel/u-boot.git] / .azure-pipelines.yml
1 variables:
2   windows_vm: vs2017-win2016
3   ubuntu_vm: ubuntu-18.04
4   ci_runner_image: trini/u-boot-gitlab-ci-runner:bionic-20200403-27Apr2020
5   # Add '-u 0' options for Azure pipelines, otherwise we get "permission
6   # denied" error when it tries to "useradd -m -u 1001 vsts_azpcontainer",
7   # since our $(ci_runner_image) user is not root.
8   container_option: -u 0
9   work_dir: /u
10
11 jobs:
12   - job: tools_only_windows
13     displayName: 'Ensure host tools build for Windows'
14     pool:
15       vmImage: $(windows_vm)
16     strategy:
17       matrix:
18         i686:
19           MSYS_DIR: msys32
20           BASE_REPO: msys2-ci-base-i686
21         x86_64:
22           MSYS_DIR: msys64
23           BASE_REPO: msys2-ci-base
24     steps:
25       - script: |
26           git clone https://github.com/msys2/$(BASE_REPO).git %CD:~0,2%\$(MSYS_DIR)
27         displayName: 'Install MSYS2'
28       - script: |
29           set PATH=%CD:~0,2%\$(MSYS_DIR)\usr\bin;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
30           %CD:~0,2%\$(MSYS_DIR)\usr\bin\pacman --noconfirm -Syyuu
31         displayName: 'Update MSYS2'
32       - script: |
33           set PATH=%CD:~0,2%\$(MSYS_DIR)\usr\bin;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
34           %CD:~0,2%\$(MSYS_DIR)\usr\bin\pacman --noconfirm --needed -S make gcc bison diffutils openssl-devel
35         displayName: 'Install Toolchain'
36       - script: |
37           set PATH=C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
38           echo make tools-only_defconfig tools-only NO_SDL=1 > build-tools.sh
39           %CD:~0,2%\$(MSYS_DIR)\usr\bin\bash -lc "bash build-tools.sh"
40         displayName: 'Build Host Tools'
41         env:
42           # Tell MSYS2 we need a POSIX emulation layer
43           MSYSTEM: MSYS
44           # Tell MSYS2 not to ‘cd’ our startup directory to HOME
45           CHERE_INVOKING: yes
46
47   - job: cppcheck
48     displayName: 'Static code analysis with cppcheck'
49     pool:
50       vmImage: $(ubuntu_vm)
51     container:
52       image: $(ci_runner_image)
53       options: $(container_option)
54     steps:
55       - script: cppcheck -j$(nproc) --force --quiet --inline-suppr .
56
57   - job: htmldocs
58     displayName: 'Build HTML documentation'
59     pool:
60       vmImage: $(ubuntu_vm)
61     container:
62       image: $(ci_runner_image)
63       options: $(container_option)
64     steps:
65       - script: make htmldocs
66
67   - job: todo
68     displayName: 'Search for TODO within source tree'
69     pool:
70       vmImage: $(ubuntu_vm)
71     container:
72       image: $(ci_runner_image)
73       options: $(container_option)
74     steps:
75       - script: grep -r TODO .
76       - script: grep -r FIXME .
77       - script: grep -r HACK . | grep -v HACKKIT
78
79   - job: sloccount
80     displayName: 'Some statistics about the code base'
81     pool:
82       vmImage: $(ubuntu_vm)
83     container:
84       image: $(ci_runner_image)
85       options: $(container_option)
86     steps:
87       - script: sloccount .
88
89   - job: maintainers
90     displayName: 'Ensure all configs have MAINTAINERS entries'
91     pool:
92       vmImage: $(ubuntu_vm)
93     container:
94       image: $(ci_runner_image)
95       options: $(container_option)
96     steps:
97       - script: |
98           if [ `./tools/genboardscfg.py -f 2>&1 | wc -l` -ne 0 ]; then exit 1; fi
99
100   - job: tools_only
101     displayName: 'Ensure host tools build'
102     pool:
103       vmImage: $(ubuntu_vm)
104     container:
105       image: $(ci_runner_image)
106       options: $(container_option)
107     steps:
108       - script: |
109           make tools-only_config tools-only -j$(nproc)
110
111   - job: envtools
112     displayName: 'Ensure env tools build'
113     pool:
114       vmImage: $(ubuntu_vm)
115     container:
116       image: $(ci_runner_image)
117       options: $(container_option)
118     steps:
119       - script: |
120           make tools-only_config envtools -j$(nproc)
121
122   - job: utils
123     displayName: 'Run binman, buildman, dtoc, Kconfig and patman testsuites'
124     pool:
125       vmImage: $(ubuntu_vm)
126     steps:
127       - script: |
128           cat << EOF > build.sh
129           set -ex
130           cd ${WORK_DIR}
131           EOF
132           cat << "EOF" >> build.sh
133           git config --global user.name "Azure Pipelines"
134           git config --global user.email bmeng.cn@gmail.com
135           export USER=azure
136           virtualenv -p /usr/bin/python3 /tmp/venv
137           . /tmp/venv/bin/activate
138           pip install pyelftools pytest
139           export UBOOT_TRAVIS_BUILD_DIR=/tmp/sandbox_spl
140           export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
141           export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}
142           ./tools/buildman/buildman -o ${UBOOT_TRAVIS_BUILD_DIR} -w sandbox_spl
143           ./tools/binman/binman --toolpath ${UBOOT_TRAVIS_BUILD_DIR}/tools test
144           ./tools/buildman/buildman -t
145           ./tools/dtoc/dtoc -t
146           ./tools/patman/patman --test
147           make O=${UBOOT_TRAVIS_BUILD_DIR} testconfig
148           EOF
149           cat build.sh
150           # We cannot use "container" like other jobs above, as buildman
151           # seems to hang forever with pre-configured "container" environment
152           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh
153
154   - job: test_py
155     displayName: 'test.py'
156     pool:
157       vmImage: $(ubuntu_vm)
158     strategy:
159       matrix:
160         sandbox:
161           TEST_PY_BD: "sandbox"
162         sandbox_clang:
163           TEST_PY_BD: "sandbox"
164           OVERRIDE: "-O clang-10"
165         sandbox_spl:
166           TEST_PY_BD: "sandbox_spl"
167           TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff"
168         sandbox_flattree:
169           TEST_PY_BD: "sandbox_flattree"
170         evb_ast2500:
171           TEST_PY_BD: "evb-ast2500"
172           TEST_PY_ID: "--id qemu"
173         vexpress_ca15_tc2:
174           TEST_PY_BD: "vexpress_ca15_tc2"
175           TEST_PY_ID: "--id qemu"
176         vexpress_ca9x4:
177           TEST_PY_BD: "vexpress_ca9x4"
178           TEST_PY_ID: "--id qemu"
179         integratorcp_cm926ejs:
180           TEST_PY_BD: "integratorcp_cm926ejs"
181           TEST_PY_ID: "--id qemu"
182           TEST_PY_TEST_SPEC: "not sleep"
183         qemu_arm:
184           TEST_PY_BD: "qemu_arm"
185           TEST_PY_TEST_SPEC: "not sleep"
186         qemu_arm64:
187           TEST_PY_BD: "qemu_arm64"
188           TEST_PY_TEST_SPEC: "not sleep"
189         qemu_mips:
190           TEST_PY_BD: "qemu_mips"
191           TEST_PY_TEST_SPEC: "not sleep"
192         qemu_mipsel:
193           TEST_PY_BD: "qemu_mipsel"
194           TEST_PY_TEST_SPEC: "not sleep"
195         qemu_mips64:
196           TEST_PY_BD: "qemu_mips64"
197           TEST_PY_TEST_SPEC: "not sleep"
198         qemu_mips64el:
199           TEST_PY_BD: "qemu_mips64el"
200           TEST_PY_TEST_SPEC: "not sleep"
201         qemu_ppce500:
202           TEST_PY_BD: "qemu-ppce500"
203           TEST_PY_TEST_SPEC: "not sleep"
204         qemu_riscv32:
205           TEST_PY_BD: "qemu-riscv32"
206           TEST_PY_TEST_SPEC: "not sleep"
207         qemu_riscv64:
208           TEST_PY_BD: "qemu-riscv64"
209           TEST_PY_TEST_SPEC: "not sleep"
210         qemu_riscv32_spl:
211           TEST_PY_BD: "qemu-riscv32_spl"
212           TEST_PY_TEST_SPEC: "not sleep"
213         qemu_riscv64_spl:
214           TEST_PY_BD: "qemu-riscv64_spl"
215           TEST_PY_TEST_SPEC: "not sleep"
216         qemu_x86:
217           TEST_PY_BD: "qemu-x86"
218           TEST_PY_TEST_SPEC: "not sleep"
219         qemu_x86_64:
220           TEST_PY_BD: "qemu-x86_64"
221           TEST_PY_TEST_SPEC: "not sleep"
222         xilinx_zynq_virt:
223           TEST_PY_BD: "xilinx_zynq_virt"
224           TEST_PY_ID: "--id qemu"
225           TEST_PY_TEST_SPEC: "not sleep"
226         xilinx_versal_virt:
227           TEST_PY_BD: "xilinx_versal_virt"
228           TEST_PY_ID: "--id qemu"
229           TEST_PY_TEST_SPEC: "not sleep"
230         xtfpga:
231           TEST_PY_BD: "xtfpga"
232           TEST_PY_ID: "--id qemu"
233           TEST_PY_TEST_SPEC: "not sleep"
234     steps:
235       - script: |
236           cat << EOF > test.sh
237           set -ex
238           # make environment variables available as tests are running inside a container
239           export WORK_DIR="${WORK_DIR}"
240           export TEST_PY_BD="${TEST_PY_BD}"
241           export TEST_PY_ID="${TEST_PY_ID}"
242           export TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}"
243           export OVERRIDE="${OVERRIDE}"
244           EOF
245           cat << "EOF" >> test.sh
246           # the below corresponds to .gitlab-ci.yml "before_script"
247           cd ${WORK_DIR}
248           git clone --depth=1 git://github.com/swarren/uboot-test-hooks.git /tmp/uboot-test-hooks
249           ln -s travis-ci /tmp/uboot-test-hooks/bin/`hostname`
250           ln -s travis-ci /tmp/uboot-test-hooks/py/`hostname`
251           grub-mkimage --prefix=\"\" -o ~/grub_x86.efi -O i386-efi normal  echo lsefimmap lsefi lsefisystab efinet tftp minicmd
252           grub-mkimage --prefix=\"\" -o ~/grub_x64.efi -O x86_64-efi normal  echo lsefimmap lsefi lsefisystab efinet tftp minicmd
253           cp /opt/grub/grubriscv64.efi ~/grub_riscv64.efi
254           cp /opt/grub/grubriscv32.efi ~/grub_riscv32.efi
255           cp /opt/grub/grubaa64.efi ~/grub_arm64.efi
256           cp /opt/grub/grubarm.efi ~/grub_arm.efi
257           if [[ "${TEST_PY_BD}" == "qemu-riscv32_spl" ]]; then
258               wget -O - https://github.com/riscv/opensbi/releases/download/v0.6/opensbi-0.6-rv32-bin.tar.xz | tar -C /tmp -xJ;
259               export OPENSBI=/tmp/opensbi-0.6-rv32-bin/platform/qemu/virt/firmware/fw_dynamic.bin;
260           fi
261           if [[ "${TEST_PY_BD}" == "qemu-riscv64_spl" ]]; then
262               wget -O - https://github.com/riscv/opensbi/releases/download/v0.6/opensbi-0.6-rv64-bin.tar.xz | tar -C /tmp -xJ;
263               export OPENSBI=/tmp/opensbi-0.6-rv64-bin/platform/qemu/virt/firmware/fw_dynamic.bin;
264           fi
265           # the below corresponds to .gitlab-ci.yml "script"
266           cd ${WORK_DIR}
267           export UBOOT_TRAVIS_BUILD_DIR=/tmp/${TEST_PY_BD};
268           tools/buildman/buildman -o ${UBOOT_TRAVIS_BUILD_DIR} -w -E -W -e --board ${TEST_PY_BD} ${OVERRIDE}
269           virtualenv -p /usr/bin/python3 /tmp/venv
270           . /tmp/venv/bin/activate
271           pip install -r test/py/requirements.txt
272           export PATH=/opt/qemu/bin:/tmp/uboot-test-hooks/bin:${PATH};
273           export PYTHONPATH=/tmp/uboot-test-hooks/py/travis-ci;
274           # "${var:+"-k $var"}" expands to "" if $var is empty, "-k $var" if not
275           ./test/py/test.py --bd ${TEST_PY_BD} ${TEST_PY_ID} ${TEST_PY_TEST_SPEC:+"-k ${TEST_PY_TEST_SPEC}"} --build-dir "$UBOOT_TRAVIS_BUILD_DIR";
276           # the below corresponds to .gitlab-ci.yml "after_script"
277           rm -rf /tmp/uboot-test-hooks /tmp/venv
278           EOF
279           cat test.sh
280           # make current directory writeable to uboot user inside the container
281           # as sandbox testing need create files like spi flash images, etc.
282           # (TODO: clean up this in the future)
283           chmod 777 .
284           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/test.sh
285
286   - job: build_the_world
287     displayName: 'Build the World'
288     pool:
289       vmImage: $(ubuntu_vm)
290     strategy:
291       # Use almost the same target division in .travis.yml, only merged
292       # 4 small build jobs (arc/microblaze/nds32/xtensa) into one.
293       matrix:
294         arc_microblaze_nds32_xtensa:
295           BUILDMAN: "arc microblaze nds32 xtensa"
296         arm11_arm7_arm920t_arm946es:
297           BUILDMAN: "arm11 arm7 arm920t arm946es"
298         arm926ejs:
299           BUILDMAN: "arm926ejs -x freescale,siemens,at91,kirkwood,spear,omap"
300         at91_non_armv7:
301           BUILDMAN: "at91 -x armv7"
302         at91_non_arm926ejs:
303           BUILDMAN: "at91 -x arm926ejs"
304         boundary_engicam_toradex:
305           BUILDMAN: "boundary engicam toradex"
306         arm_bcm:
307           BUILDMAN: "bcm -x mips"
308         nxp_arm32:
309           BUILDMAN: "freescale -x powerpc,m68k,aarch64,ls101,ls102,ls104,ls108,ls20,lx216"
310         nxp_ls101x:
311           BUILDMAN: "freescale&ls101"
312         nxp_ls102x:
313           BUILDMAN: "freescale&ls102"
314         nxp_ls104x:
315           BUILDMAN: "freescale&ls104"
316         nxp_ls108x:
317           BUILDMAN: "freescale&ls108"
318         nxp_ls20xx:
319           BUILDMAN: "freescale&ls20"
320         nxp_lx216x:
321           BUILDMAN: "freescale&lx216"
322         imx6:
323           BUILDMAN: "mx6 -x boundary,engicam,freescale,technexion,toradex"
324         imx:
325           BUILDMAN: "mx -x mx6,freescale,technexion,toradex"
326         keystone2_keystone3:
327           BUILDMAN: "k2 k3"
328         samsung_socfpga:
329           BUILDMAN: "samsung socfpga"
330         spear:
331           BUILDMAN: "spear"
332         sun4i:
333           BUILDMAN: "sun4i"
334         sun5i:
335           BUILDMAN: "sun5i"
336         sun6i:
337           BUILDMAN: "sun6i"
338         sun7i:
339           BUILDMAN: "sun7i"
340         sun8i_32bit:
341           BUILDMAN: "sun8i&armv7"
342         sun8i_64bit:
343           BUILDMAN: "sun8i&aarch64"
344         sun9i:
345           BUILDMAN: "sun9i"
346         sun50i:
347           BUILDMAN: "sun50i"
348         arm_catch_all:
349           BUILDMAN: "arm -x arm11,arm7,arm9,aarch64,at91,bcm,freescale,kirkwood,mvebu,siemens,tegra,uniphier,mx,samsung,sunxi,am33xx,omap,rk,toradex,socfpga,k2,k3,zynq"
350         sandbox_x86:
351           BUILDMAN: "sandbox x86"
352         technexion:
353           BUILDMAN: "technexion"
354         kirkwood:
355           BUILDMAN: "kirkwood"
356         mvebu:
357           BUILDMAN: "mvebu"
358         m68k:
359           BUILDMAN: "m68k"
360         mips:
361           BUILDMAN: "mips"
362         non_fsl_ppc:
363           BUILDMAN: "powerpc -x freescale"
364         mpc85xx_freescale:
365           BUILDMAN: "mpc85xx&freescale -x t208xrdb -x t4qds -x t102* -x p1_p2_rdb_pc -x p1010rdb -x corenet_ds -x b4860qds -x bsc91*"
366         t208xrdb_corenet_ds:
367           BUILDMAN: "t208xrdb corenet_ds"
368         fsl_ppc:
369           BUILDMAN: "t4qds b4860qds mpc83xx&freescale mpc86xx&freescale"
370         t102x:
371           BUILDMAN: "t102*"
372         p1_p2_rdb_pc:
373           BUILDMAN: "p1_p2_rdb_pc"
374         p1010rdb_bsc91:
375           BUILDMAN: "p1010rdb bsc91"
376         siemens:
377           BUILDMAN: "siemens"
378         tegra:
379           BUILDMAN: "tegra -x toradex"
380         am33xx_no_siemens:
381           BUILDMAN: "am33xx -x siemens"
382         omap:
383           BUILDMAN: "omap"
384         uniphier:
385           BUILDMAN: "uniphier"
386         aarch64_catch_all:
387           BUILDMAN: "aarch64 -x bcm,k3,tegra,ls1,ls2,lx216,mvebu,uniphier,sunxi,samsung,socfpga,rk,versal,zynq"
388         rockchip:
389           BUILDMAN: "rk"
390         sh:
391           BUILDMAN: "sh -x arm"
392         zynq:
393           BUILDMAN: "zynq&armv7"
394         zynqmp_versal:
395           BUILDMAN: "versal|zynqmp&aarch64"
396         riscv:
397           BUILDMAN: "riscv"
398     steps:
399       - script: |
400           cat << EOF > build.sh
401           set -ex
402           cd ${WORK_DIR}
403           # make environment variables available as tests are running inside a container
404           export BUILDMAN="${BUILDMAN}"
405           EOF
406           cat << "EOF" >> build.sh
407           if [[ "${BUILDMAN}" != "" ]]; then
408               ret=0;
409               tools/buildman/buildman -o /tmp -P -W ${BUILDMAN} ${OVERRIDE} || ret=$?;
410               if [[ $ret -ne 0 ]]; then
411                   tools/buildman/buildman -o /tmp -seP ${BUILDMAN};
412                   exit $ret;
413               fi;
414           fi
415           EOF
416           cat build.sh
417           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh