2 windows_vm: vs2017-win2016
3 ubuntu_vm: ubuntu-18.04
4 ci_runner_image: trini/u-boot-gitlab-ci-runner:bionic-20200311-10Apr2020
5 # Add '-u 0' options for Azure pipelines, otherwise we get "permission
6 # denied" error when it tries to "useradd -m -u 1001 vsts_azpcontainer",
7 # since our $(ci_runner_image) user is not root.
12 - job: tools_only_windows
13 displayName: 'Ensure host tools build for Windows'
15 vmImage: $(windows_vm)
20 BASE_REPO: msys2-ci-base-i686
23 BASE_REPO: msys2-ci-base
26 git clone https://github.com/msys2/$(BASE_REPO).git %CD:~0,2%\$(MSYS_DIR)
27 displayName: 'Install MSYS2'
29 set PATH=%CD:~0,2%\$(MSYS_DIR)\usr\bin;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
30 %CD:~0,2%\$(MSYS_DIR)\usr\bin\pacman --noconfirm -Syyuu
31 displayName: 'Update MSYS2'
33 set PATH=%CD:~0,2%\$(MSYS_DIR)\usr\bin;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
34 %CD:~0,2%\$(MSYS_DIR)\usr\bin\pacman --noconfirm --needed -S make gcc bison diffutils openssl-devel
35 displayName: 'Install Toolchain'
37 set PATH=C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
38 echo make tools-only_defconfig tools-only NO_SDL=1 > build-tools.sh
39 %CD:~0,2%\$(MSYS_DIR)\usr\bin\bash -lc "bash build-tools.sh"
40 displayName: 'Build Host Tools'
42 # Tell MSYS2 we need a POSIX emulation layer
44 # Tell MSYS2 not to ‘cd’ our startup directory to HOME
48 displayName: 'Static code analysis with cppcheck'
52 image: $(ci_runner_image)
53 options: $(container_option)
55 - script: cppcheck --force --quiet --inline-suppr .
58 displayName: 'Build HTML documentation'
62 image: $(ci_runner_image)
63 options: $(container_option)
65 - script: make htmldocs
68 displayName: 'Search for TODO within source tree'
72 image: $(ci_runner_image)
73 options: $(container_option)
75 - script: grep -r TODO .
76 - script: grep -r FIXME .
77 - script: grep -r HACK . | grep -v HACKKIT
80 displayName: 'Some statistics about the code base'
84 image: $(ci_runner_image)
85 options: $(container_option)
90 displayName: 'Ensure all configs have MAINTAINERS entries'
94 image: $(ci_runner_image)
95 options: $(container_option)
98 if [ `./tools/genboardscfg.py -f 2>&1 | wc -l` -ne 0 ]; then exit 1; fi
101 displayName: 'Ensure host tools build'
103 vmImage: $(ubuntu_vm)
105 image: $(ci_runner_image)
106 options: $(container_option)
109 make tools-only_config tools-only -j$(nproc)
112 displayName: 'Ensure env tools build'
114 vmImage: $(ubuntu_vm)
116 image: $(ci_runner_image)
117 options: $(container_option)
120 make tools-only_config envtools -j$(nproc)
123 displayName: 'Run binman, buildman, dtoc, Kconfig and patman testsuites'
125 vmImage: $(ubuntu_vm)
128 cat << EOF > build.sh
132 cat << "EOF" >> build.sh
133 git config --global user.name "Azure Pipelines"
134 git config --global user.email bmeng.cn@gmail.com
136 virtualenv -p /usr/bin/python3 /tmp/venv
137 . /tmp/venv/bin/activate
138 pip install pyelftools pytest
139 export UBOOT_TRAVIS_BUILD_DIR=/tmp/.bm-work/sandbox_spl
140 export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
141 export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}
142 ./tools/buildman/buildman -o /tmp -P sandbox_spl
143 ./tools/binman/binman --toolpath ${UBOOT_TRAVIS_BUILD_DIR}/tools test
144 ./tools/buildman/buildman -t
146 ./tools/patman/patman --test
147 make O=${UBOOT_TRAVIS_BUILD_DIR} testconfig
150 # We cannot use "container" like other jobs above, as buildman
151 # seems to hang forever with pre-configured "container" environment
152 docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh
155 displayName: 'test.py'
157 vmImage: $(ubuntu_vm)
161 TEST_PY_BD: "sandbox"
162 BUILDMAN: "^sandbox$"
164 TEST_PY_BD: "sandbox"
165 BUILDMAN: "^sandbox$"
166 OVERRIDE: "-O clang-7"
168 TEST_PY_BD: "sandbox_spl"
169 TEST_PY_TEST_SPEC: "test_ofplatdata"
170 BUILDMAN: "^sandbox_spl$"
172 TEST_PY_BD: "sandbox_flattree"
173 BUILDMAN: "^sandbox_flattree$"
175 TEST_PY_BD: "evb-ast2500"
176 TEST_PY_ID: "--id qemu"
177 BUILDMAN: "^evb-ast2500$"
179 TEST_PY_BD: "vexpress_ca15_tc2"
180 TEST_PY_ID: "--id qemu"
181 BUILDMAN: "^vexpress_ca15_tc2$"
183 TEST_PY_BD: "vexpress_ca9x4"
184 TEST_PY_ID: "--id qemu"
185 BUILDMAN: "^vexpress_ca9x4$"
186 integratorcp_cm926ejs:
187 TEST_PY_BD: "integratorcp_cm926ejs"
188 TEST_PY_ID: "--id qemu"
189 TEST_PY_TEST_SPEC: "not sleep"
190 BUILDMAN: "^integratorcp_cm926ejs$"
192 TEST_PY_BD: "qemu_arm"
193 TEST_PY_TEST_SPEC: "not sleep"
194 BUILDMAN: "^qemu_arm$"
196 TEST_PY_BD: "qemu_arm64"
197 TEST_PY_TEST_SPEC: "not sleep"
198 BUILDMAN: "^qemu_arm64$"
200 TEST_PY_BD: "qemu_mips"
201 TEST_PY_TEST_SPEC: "not sleep"
202 BUILDMAN: "^qemu_mips$"
204 TEST_PY_BD: "qemu_mipsel"
205 TEST_PY_TEST_SPEC: "not sleep"
206 BUILDMAN: "^qemu_mipsel$"
208 TEST_PY_BD: "qemu_mips64"
209 TEST_PY_TEST_SPEC: "not sleep"
210 BUILDMAN: "^qemu_mips64$"
212 TEST_PY_BD: "qemu_mips64el"
213 TEST_PY_TEST_SPEC: "not sleep"
214 BUILDMAN: "^qemu_mips64el$"
216 TEST_PY_BD: "qemu-ppce500"
217 TEST_PY_TEST_SPEC: "not sleep"
218 BUILDMAN: "^qemu-ppce500$"
220 TEST_PY_BD: "qemu-riscv32"
221 TEST_PY_TEST_SPEC: "not sleep"
222 BUILDMAN: "^qemu-riscv32$"
224 TEST_PY_BD: "qemu-riscv64"
225 TEST_PY_TEST_SPEC: "not sleep"
226 BUILDMAN: "^qemu-riscv64$"
228 TEST_PY_BD: "qemu-x86"
229 TEST_PY_TEST_SPEC: "not sleep"
230 BUILDMAN: "^qemu-x86$"
232 TEST_PY_BD: "qemu-x86_64"
233 TEST_PY_TEST_SPEC: "not sleep"
234 BUILDMAN: "^qemu-x86_64$"
236 TEST_PY_BD: "xilinx_zynq_virt"
237 TEST_PY_ID: "--id qemu"
238 TEST_PY_TEST_SPEC: "not sleep"
239 BUILDMAN: "^xilinx_zynq_virt$"
241 TEST_PY_BD: "xilinx_versal_virt"
242 TEST_PY_ID: "--id qemu"
243 TEST_PY_TEST_SPEC: "not sleep"
244 BUILDMAN: "^xilinx_versal_virt$"
247 TEST_PY_ID: "--id qemu"
248 TEST_PY_TEST_SPEC: "not sleep"
254 # make environment variables available as tests are running inside a container
255 export WORK_DIR="${WORK_DIR}"
256 export TEST_PY_BD="${TEST_PY_BD}"
257 export TEST_PY_ID="${TEST_PY_ID}"
258 export TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}"
259 export BUILDMAN="${BUILDMAN}"
260 export OVERRIDE="${OVERRIDE}"
262 cat << "EOF" >> test.sh
263 # the below corresponds to .gitlab-ci.yml "before_script"
265 git clone --depth=1 git://github.com/swarren/uboot-test-hooks.git /tmp/uboot-test-hooks
266 ln -s travis-ci /tmp/uboot-test-hooks/bin/`hostname`
267 ln -s travis-ci /tmp/uboot-test-hooks/py/`hostname`
268 grub-mkimage --prefix=\"\" -o ~/grub_x86.efi -O i386-efi normal echo lsefimmap lsefi lsefisystab efinet tftp minicmd
269 grub-mkimage --prefix=\"\" -o ~/grub_x64.efi -O x86_64-efi normal echo lsefimmap lsefi lsefisystab efinet tftp minicmd
270 cp /opt/grub/grubriscv64.efi ~/grub_riscv64.efi
271 cp /opt/grub/grubriscv32.efi ~/grub_riscv32.efi
272 cp /opt/grub/grubaa64.efi ~/grub_arm64.efi
273 cp /opt/grub/grubarm.efi ~/grub_arm.efi
274 # the below corresponds to .gitlab-ci.yml "script"
276 if [[ "${BUILDMAN}" != "" ]]; then
278 tools/buildman/buildman -o /tmp -P -E ${BUILDMAN} ${OVERRIDE} || ret=$?;
279 if [[ $ret -ne 0 && $ret -ne 129 ]]; then
280 tools/buildman/buildman -o /tmp -sdeP ${BUILDMAN};
284 virtualenv -p /usr/bin/python3 /tmp/venv
285 . /tmp/venv/bin/activate
286 pip install -r test/py/requirements.txt
287 export UBOOT_TRAVIS_BUILD_DIR=/tmp/.bm-work/${TEST_PY_BD};
288 export PATH=/opt/qemu/bin:/tmp/uboot-test-hooks/bin:${PATH};
289 export PYTHONPATH=/tmp/uboot-test-hooks/py/travis-ci;
290 if [[ "${TEST_PY_BD}" != "" ]]; then
291 ./test/py/test.py --bd ${TEST_PY_BD} ${TEST_PY_ID} -k "${TEST_PY_TEST_SPEC:-not a_test_which_does_not_exist}" --build-dir "$UBOOT_TRAVIS_BUILD_DIR";
293 if [[ $ret -ne 0 ]]; then
297 # the below corresponds to .gitlab-ci.yml "after_script"
298 rm -rf /tmp/uboot-test-hooks /tmp/venv
301 # make current directory writeable to uboot user inside the container
302 # as sandbox testing need create files like spi flash images, etc.
303 # (TODO: clean up this in the future)
305 docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/test.sh
307 - job: build_the_world
308 displayName: 'Build the World'
310 vmImage: $(ubuntu_vm)
312 # Use almost the same target division in .travis.yml, only merged
313 # 4 small build jobs (arc/microblaze/nds32/xtensa) into one.
315 arc_microblaze_nds32_xtensa:
316 BUILDMAN: "arc microblaze nds32 xtensa"
317 arm11_arm7_arm920t_arm946es:
318 BUILDMAN: "arm11 arm7 arm920t arm946es"
320 BUILDMAN: "arm926ejs -x freescale,siemens,at91,kirkwood,spear,omap"
322 BUILDMAN: "at91 -x armv7"
324 BUILDMAN: "at91 -x arm926ejs"
325 boundary_engicam_toradex:
326 BUILDMAN: "boundary engicam toradex"
328 BUILDMAN: "bcm -x mips"
330 BUILDMAN: "freescale -x powerpc,m68k,aarch64,ls101,ls102,ls104,ls108,ls20,lx216"
332 BUILDMAN: "freescale&ls101"
334 BUILDMAN: "freescale&ls102"
336 BUILDMAN: "freescale&ls104"
338 BUILDMAN: "freescale&ls108"
340 BUILDMAN: "freescale&ls20"
342 BUILDMAN: "freescale&lx216"
344 BUILDMAN: "mx6 -x boundary,engicam,freescale,technexion,toradex"
346 BUILDMAN: "mx -x mx6,freescale,technexion,toradex"
350 BUILDMAN: "samsung socfpga"
362 BUILDMAN: "sun8i&armv7"
364 BUILDMAN: "sun8i&aarch64"
370 BUILDMAN: "arm -x arm11,arm7,arm9,aarch64,at91,bcm,freescale,kirkwood,mvebu,siemens,tegra,uniphier,mx,samsung,sunxi,am33xx,omap,rk,toradex,socfpga,k2,k3,zynq"
372 BUILDMAN: "sandbox x86"
374 BUILDMAN: "technexion"
384 BUILDMAN: "powerpc -x freescale"
386 BUILDMAN: "mpc85xx&freescale -x t208xrdb -x t4qds -x t102* -x p1_p2_rdb_pc -x p1010rdb -x corenet_ds -x b4860qds -x bsc91*"
388 BUILDMAN: "t208xrdb corenet_ds"
390 BUILDMAN: "t4qds b4860qds mpc83xx&freescale mpc86xx&freescale"
394 BUILDMAN: "p1_p2_rdb_pc"
396 BUILDMAN: "p1010rdb bsc91"
400 BUILDMAN: "tegra -x toradex"
402 BUILDMAN: "am33xx -x siemens"
408 BUILDMAN: "aarch64 -x bcm,k3,tegra,ls1,ls2,lx216,mvebu,uniphier,sunxi,samsung,socfpga,rk,versal,zynq"
412 BUILDMAN: "sh -x arm"
414 BUILDMAN: "zynq&armv7"
416 BUILDMAN: "versal|zynqmp&aarch64"
421 cat << EOF > build.sh
424 # make environment variables available as tests are running inside a container
425 export BUILDMAN="${BUILDMAN}"
427 cat << "EOF" >> build.sh
428 if [[ "${BUILDMAN}" != "" ]]; then
430 tools/buildman/buildman -o /tmp -P -E ${BUILDMAN} ${OVERRIDE} || ret=$?;
431 if [[ $ret -ne 0 && $ret -ne 129 ]]; then
432 tools/buildman/buildman -o /tmp -sdeP ${BUILDMAN};
438 docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh