configs: enable eSDHC device module for P5040DS board
[platform/kernel/u-boot.git] / .azure-pipelines.yml
1 variables:
2   windows_vm: vs2015-win2012r2
3   ubuntu_vm: ubuntu-18.04
4   ci_runner_image: trini/u-boot-gitlab-ci-runner:bionic-20191010-20Oct2019
5   # Add '-u 0' options for Azure pipelines, otherwise we get "permission
6   # denied" error when it tries to "useradd -m -u 1001 vsts_azpcontainer",
7   # since our $(ci_runner_image) user is not root.
8   container_option: -u 0
9   work_dir: /u
10
11 jobs:
12   - job: tools_only_windows
13     displayName: 'Ensure host tools build for Windows'
14     pool:
15       vmImage: $(windows_vm)
16     strategy:
17       matrix:
18         i686:
19           MSYS_DIR: msys32
20           BASE_REPO: msys2-ci-base-i686
21         x86_64:
22           MSYS_DIR: msys64
23           BASE_REPO: msys2-ci-base
24     steps:
25       - script: |
26           git clone https://github.com/msys2/$(BASE_REPO).git %CD:~0,2%\$(MSYS_DIR)
27         displayName: 'Install MSYS2'
28       - script: |
29           set PATH=%CD:~0,2%\$(MSYS_DIR)\usr\bin;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
30           %CD:~0,2%\$(MSYS_DIR)\usr\bin\pacman --noconfirm -Syyuu
31         displayName: 'Update MSYS2'
32       - script: |
33           set PATH=%CD:~0,2%\$(MSYS_DIR)\usr\bin;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
34           %CD:~0,2%\$(MSYS_DIR)\usr\bin\pacman --noconfirm --needed -S make gcc bison diffutils openssl-devel
35         displayName: 'Install Toolchain'
36       - script: |
37           set PATH=C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
38           echo make tools-only_defconfig tools-only NO_SDL=1 > build-tools.sh
39           %CD:~0,2%\$(MSYS_DIR)\usr\bin\bash -lc "bash build-tools.sh"
40         displayName: 'Build Host Tools'
41         env:
42           # Tell MSYS2 we need a POSIX emulation layer
43           MSYSTEM: MSYS
44           # Tell MSYS2 not to ‘cd’ our startup directory to HOME
45           CHERE_INVOKING: yes
46
47   - job: cppcheck
48     displayName: 'Static code analysis with cppcheck'
49     pool:
50       vmImage: $(ubuntu_vm)
51     container:
52       image: $(ci_runner_image)
53       options: $(container_option)
54     steps:
55       - script: cppcheck --force --quiet --inline-suppr .
56
57   - job: todo
58     displayName: 'Search for TODO within source tree'
59     pool:
60       vmImage: $(ubuntu_vm)
61     container:
62       image: $(ci_runner_image)
63       options: $(container_option)
64     steps:
65       - script: grep -r TODO .
66       - script: grep -r FIXME .
67       - script: grep -r HACK . | grep -v HACKKIT
68
69   - job: sloccount
70     displayName: 'Some statistics about the code base'
71     pool:
72       vmImage: $(ubuntu_vm)
73     container:
74       image: $(ci_runner_image)
75       options: $(container_option)
76     steps:
77       - script: sloccount .
78
79   - job: maintainers
80     displayName: 'Ensure all configs have MAINTAINERS entries'
81     pool:
82       vmImage: $(ubuntu_vm)
83     container:
84       image: $(ci_runner_image)
85       options: $(container_option)
86     steps:
87       - script: |
88           if [ `./tools/genboardscfg.py -f 2>&1 | wc -l` -ne 0 ]; then exit 1; fi
89
90   - job: tools_only
91     displayName: 'Ensure host tools build'
92     pool:
93       vmImage: $(ubuntu_vm)
94     container:
95       image: $(ci_runner_image)
96       options: $(container_option)
97     steps:
98       - script: |
99           make tools-only_config tools-only -j$(nproc)
100
101   - job: envtools
102     displayName: 'Ensure env tools build'
103     pool:
104       vmImage: $(ubuntu_vm)
105     container:
106       image: $(ci_runner_image)
107       options: $(container_option)
108     steps:
109       - script: |
110           make tools-only_config envtools -j$(nproc)
111
112   - job: utils
113     displayName: 'Run binman, buildman, dtoc and patman testsuites'
114     pool:
115       vmImage: $(ubuntu_vm)
116     steps:
117       - script: |
118           cat << EOF > build.sh
119           set -ex
120           cd ${WORK_DIR}
121           EOF
122           cat << "EOF" >> build.sh
123           git config --global user.name "Azure Pipelines"
124           git config --global user.email bmeng.cn@gmail.com
125           export USER=azure
126           virtualenv /tmp/venv
127           . /tmp/venv/bin/activate
128           pip install pyelftools
129           export UBOOT_TRAVIS_BUILD_DIR=/tmp/.bm-work/sandbox_spl
130           export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
131           export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}
132           ./tools/buildman/buildman -o /tmp -P sandbox_spl
133           ./tools/binman/binman --toolpath ${UBOOT_TRAVIS_BUILD_DIR}/tools test
134           ./tools/buildman/buildman -t
135           ./tools/dtoc/dtoc -t
136           ./tools/patman/patman --test
137           EOF
138           cat build.sh
139           # We cannot use "container" like other jobs above, as buildman
140           # seems to hang forever with pre-configured "container" environment
141           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh
142
143   - job: test_py
144     displayName: 'test.py'
145     pool:
146       vmImage: $(ubuntu_vm)
147     strategy:
148       matrix:
149         sandbox:
150           TEST_PY_BD: "sandbox"
151           BUILDMAN: "^sandbox$"
152         sandbox_clang:
153           TEST_PY_BD: "sandbox"
154           BUILDMAN: "^sandbox$"
155           OVERRIDE: "-O clang-7"
156         sandbox_spl:
157           TEST_PY_BD: "sandbox_spl"
158           TEST_PY_TEST_SPEC: "test_ofplatdata"
159           BUILDMAN: "^sandbox_spl$"
160         sandbox_flattree:
161           TEST_PY_BD: "sandbox_flattree"
162           BUILDMAN: "^sandbox_flattree$"
163         evb_ast2500:
164           TEST_PY_BD: "evb-ast2500"
165           TEST_PY_ID: "--id qemu"
166           BUILDMAN: "^evb-ast2500$"
167         vexpress_ca15_tc2:
168           TEST_PY_BD: "vexpress_ca15_tc2"
169           TEST_PY_ID: "--id qemu"
170           BUILDMAN: "^vexpress_ca15_tc2$"
171         vexpress_ca9x4:
172           TEST_PY_BD: "vexpress_ca9x4"
173           TEST_PY_ID: "--id qemu"
174           BUILDMAN: "^vexpress_ca9x4$"
175         integratorcp_cm926ejs:
176           TEST_PY_BD: "integratorcp_cm926ejs"
177           TEST_PY_ID: "--id qemu"
178           TEST_PY_TEST_SPEC: "not sleep"
179           BUILDMAN: "^integratorcp_cm926ejs$"
180         qemu_arm:
181           TEST_PY_BD: "qemu_arm"
182           TEST_PY_TEST_SPEC: "not sleep"
183           BUILDMAN: "^qemu_arm$"
184         qemu_arm64:
185           TEST_PY_BD: "qemu_arm64"
186           TEST_PY_TEST_SPEC: "not sleep"
187           BUILDMAN: "^qemu_arm64$"
188         qemu_mips:
189           TEST_PY_BD: "qemu_mips"
190           TEST_PY_TEST_SPEC: "not sleep"
191           BUILDMAN: "^qemu_mips$"
192         qemu_mipsel:
193           TEST_PY_BD: "qemu_mipsel"
194           TEST_PY_TEST_SPEC: "not sleep"
195           BUILDMAN: "^qemu_mipsel$"
196         qemu_mips64:
197           TEST_PY_BD: "qemu_mips64"
198           TEST_PY_TEST_SPEC: "not sleep"
199           BUILDMAN: "^qemu_mips64$"
200         qemu_mips64el:
201           TEST_PY_BD: "qemu_mips64el"
202           TEST_PY_TEST_SPEC: "not sleep"
203           BUILDMAN: "^qemu_mips64el$"
204         qemu_ppce500:
205           TEST_PY_BD: "qemu-ppce500"
206           TEST_PY_TEST_SPEC: "not sleep"
207           BUILDMAN: "^qemu-ppce500$"
208         qemu_riscv64:
209           TEST_PY_BD: "qemu-riscv64"
210           TEST_PY_TEST_SPEC: "not sleep"
211           BUILDMAN: "^qemu-riscv64$"
212         qemu_x86:
213           TEST_PY_BD: "qemu-x86"
214           TEST_PY_TEST_SPEC: "not sleep"
215           BUILDMAN: "^qemu-x86$"
216         qemu_x86_64:
217           TEST_PY_BD: "qemu-x86_64"
218           TEST_PY_TEST_SPEC: "not sleep"
219           BUILDMAN: "^qemu-x86_64$"
220         zynq_zc702:
221           TEST_PY_BD: "zynq_zc702"
222           TEST_PY_ID: "--id qemu"
223           TEST_PY_TEST_SPEC: "not sleep"
224           BUILDMAN: "^zynq_zc702$"
225         xilinx_versal_virt:
226           TEST_PY_BD: "xilinx_versal_virt"
227           TEST_PY_ID: "--id qemu"
228           TEST_PY_TEST_SPEC: "not sleep"
229           BUILDMAN: "^xilinx_versal_virt$"
230         xtfpga:
231           TEST_PY_BD: "xtfpga"
232           TEST_PY_ID: "--id qemu"
233           TEST_PY_TEST_SPEC: "not sleep"
234           BUILDMAN: "^xtfpga$"
235     steps:
236       - script: |
237           cat << EOF > test.sh
238           set -ex
239           # make environment variables available as tests are running inside a container
240           export WORK_DIR="${WORK_DIR}"
241           export TEST_PY_BD="${TEST_PY_BD}"
242           export TEST_PY_ID="${TEST_PY_ID}"
243           export TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}"
244           export BUILDMAN="${BUILDMAN}"
245           export OVERRIDE="${OVERRIDE}"
246           EOF
247           cat << "EOF" >> test.sh
248           # the below corresponds to .gitlab-ci.yml "before_script"
249           cd ${WORK_DIR}
250           git clone --depth=1 git://github.com/swarren/uboot-test-hooks.git /tmp/uboot-test-hooks
251           ln -s travis-ci /tmp/uboot-test-hooks/bin/`hostname`
252           ln -s travis-ci /tmp/uboot-test-hooks/py/`hostname`
253           grub-mkimage --prefix=\"\" -o ~/grub_x86.efi -O i386-efi normal  echo lsefimmap lsefi lsefisystab efinet tftp minicmd
254           grub-mkimage --prefix=\"\" -o ~/grub_x64.efi -O x86_64-efi normal  echo lsefimmap lsefi lsefisystab efinet tftp minicmd
255           mkdir ~/grub2-arm
256           cd ~/grub2-arm; wget -O - http://download.opensuse.org/ports/armv7hl/distribution/leap/42.2/repo/oss/suse/armv7hl/grub2-arm-efi-2.02~beta2-87.1.armv7hl.rpm | rpm2cpio | cpio -di
257           mkdir ~/grub2-arm64
258           cd ~/grub2-arm64; wget -O - http://download.opensuse.org/ports/aarch64/distribution/leap/42.2/repo/oss/suse/aarch64/grub2-arm64-efi-2.02~beta2-87.1.aarch64.rpm | rpm2cpio | cpio -di
259           # the below corresponds to .gitlab-ci.yml "script"
260           cd ${WORK_DIR}
261           if [[ "${BUILDMAN}" != "" ]]; then
262               ret=0;
263               tools/buildman/buildman -o /tmp -P -E ${BUILDMAN} ${OVERRIDE} || ret=$?;
264               if [[ $ret -ne 0 && $ret -ne 129 ]]; then
265                   tools/buildman/buildman -o /tmp -sdeP ${BUILDMAN};
266                   exit $ret;
267               fi;
268           fi
269           virtualenv -p /usr/bin/python3 /tmp/venv
270           . /tmp/venv/bin/activate
271           pip install -r test/py/requirements.txt
272           export UBOOT_TRAVIS_BUILD_DIR=/tmp/.bm-work/${TEST_PY_BD};
273           export PATH=/opt/qemu/bin:/tmp/uboot-test-hooks/bin:${PATH};
274           export PYTHONPATH=/tmp/uboot-test-hooks/py/travis-ci;
275           if [[ "${TEST_PY_BD}" != "" ]]; then
276               ./test/py/test.py --bd ${TEST_PY_BD} ${TEST_PY_ID} -k "${TEST_PY_TEST_SPEC:-not a_test_which_does_not_exist}" --build-dir "$UBOOT_TRAVIS_BUILD_DIR";
277               ret=$?;
278               if [[ $ret -ne 0 ]]; then
279                   exit $ret;
280               fi;
281           fi
282           # the below corresponds to .gitlab-ci.yml "after_script"
283           rm -rf ~/grub2* /tmp/uboot-test-hooks /tmp/venv
284           EOF
285           cat test.sh
286           # make current directory writeable to uboot user inside the container
287           # as sandbox testing need create files like spi flash images, etc.
288           # (TODO: clean up this in the future)
289           chmod 777 .
290           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/test.sh
291
292   - job: build_the_world
293     displayName: 'Build the World'
294     pool:
295       vmImage: $(ubuntu_vm)
296     strategy:
297       # Use almost the same target division in .travis.yml, only merged
298       # 4 small build jobs (arc/microblaze/nds32/xtensa) into one.
299       matrix:
300         arc_microblaze_nds32_xtensa:
301           BUILDMAN: "arc microblaze nds32 xtensa"
302         arm11_arm7_arm920t_arm946es:
303           BUILDMAN: "arm11 arm7 arm920t arm946es"
304         arm926ejs:
305           BUILDMAN: "arm926ejs -x freescale,siemens,at91,kirkwood,spear,omap"
306         at91_non_armv7:
307           BUILDMAN: "at91 -x armv7"
308         at91_non_arm926ejs:
309           BUILDMAN: "at91 -x arm926ejs"
310         boundary_engicam_toradex:
311           BUILDMAN: "boundary engicam toradex"
312         arm_bcm:
313           BUILDMAN: "bcm -x mips"
314         nxp_arm32:
315           BUILDMAN: "freescale -x powerpc,m68k,aarch64,ls101,ls102,ls104,ls108,ls20,lx216"
316         nxp_ls101x:
317           BUILDMAN: "freescale&ls101"
318         nxp_ls102x:
319           BUILDMAN: "freescale&ls102"
320         nxp_ls104x:
321           BUILDMAN: "freescale&ls104"
322         nxp_ls108x:
323           BUILDMAN: "freescale&ls108"
324         nxp_ls20xx:
325           BUILDMAN: "freescale&ls20"
326         nxp_lx216x:
327           BUILDMAN: "freescale&lx216"
328         imx6:
329           BUILDMAN: "mx6 -x boundary,engicam,freescale,technexion,toradex"
330         imx:
331           BUILDMAN: "mx -x mx6,freescale,technexion,toradex"
332         keystone2_keystone3:
333           BUILDMAN: "k2 k3"
334         samsung_socfpga:
335           BUILDMAN: "samsung socfpga"
336         spear:
337           BUILDMAN: "spear"
338         sun4i:
339           BUILDMAN: "sun4i"
340         sun5i:
341           BUILDMAN: "sun5i"
342         sun6i:
343           BUILDMAN: "sun6i"
344         sun7i:
345           BUILDMAN: "sun7i"
346         sun8i_32bit:
347           BUILDMAN: "sun8i&armv7"
348         sun8i_64bit:
349           BUILDMAN: "sun8i&aarch64"
350         sun9i:
351           BUILDMAN: "sun9i"
352         sun50i:
353           BUILDMAN: "sun50i"
354         arm_catch_all:
355           BUILDMAN: "arm -x arm11,arm7,arm9,aarch64,at91,bcm,freescale,kirkwood,mvebu,siemens,tegra,uniphier,mx,samsung,sunxi,am33xx,omap,rockchip,toradex,socfpga,k2,k3,zynq"
356         sandbox_x86:
357           BUILDMAN: "sandbox x86"
358         technexion:
359           BUILDMAN: "technexion"
360         kirkwood:
361           BUILDMAN: "kirkwood"
362         mvebu:
363           BUILDMAN: "mvebu"
364         m68k:
365           BUILDMAN: "m68k"
366         mips:
367           BUILDMAN: "mips"
368         non_fsl_ppc:
369           BUILDMAN: "powerpc -x freescale"
370         mpc85xx_freescale:
371           BUILDMAN: "mpc85xx&freescale -x t208xrdb -x t4qds -x t102* -x p1_p2_rdb_pc -x p1010rdb -x corenet_ds -x b4860qds -x bsc91*"
372         t208xrdb_corenet_ds:
373           BUILDMAN: "t208xrdb corenet_ds"
374         fsl_ppc:
375           BUILDMAN: "t4qds b4860qds mpc83xx&freescale mpc86xx&freescale"
376         t102x:
377           BUILDMAN: "t102*"
378         p1_p2_rdb_pc:
379           BUILDMAN: "p1_p2_rdb_pc"
380         p1010rdb_bsc91:
381           BUILDMAN: "p1010rdb bsc91"
382         siemens:
383           BUILDMAN: "siemens"
384         tegra:
385           BUILDMAN: "tegra -x toradex"
386         am33xx_no_siemens:
387           BUILDMAN: "am33xx -x siemens"
388         omap:
389           BUILDMAN: "omap"
390         uniphier:
391           BUILDMAN: "uniphier"
392         aarch64_catch_all:
393           BUILDMAN: "aarch64 -x bcm,k3,tegra,ls1,ls2,mvebu,uniphier,sunxi,samsung,rockchip,versal,zynq"
394         rockchip:
395           BUILDMAN: "rockchip"
396         sh:
397           BUILDMAN: "sh -x arm"
398         zynq:
399           BUILDMAN: "zynq&armv7"
400         zynqmp_versal:
401           BUILDMAN: "versal|zynqmp&aarch64"
402         riscv:
403           BUILDMAN: "riscv"
404     steps:
405       - script: |
406           cat << EOF > build.sh
407           set -ex
408           cd ${WORK_DIR}
409           # make environment variables available as tests are running inside a container
410           export BUILDMAN="${BUILDMAN}"
411           EOF
412           cat << "EOF" >> build.sh
413           if [[ "${BUILDMAN}" != "" ]]; then
414               ret=0;
415               tools/buildman/buildman -o /tmp -P -E ${BUILDMAN} ${OVERRIDE} || ret=$?;
416               if [[ $ret -ne 0 && $ret -ne 129 ]]; then
417                   tools/buildman/buildman -o /tmp -sdeP ${BUILDMAN};
418                   exit $ret;
419               fi;
420           fi
421           EOF
422           cat build.sh
423           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh