ARM: dts: rmobile: Add IIC3 node on Gen2 R8A7792 V2H
[platform/kernel/u-boot.git] / .azure-pipelines.yml
1 variables:
2   windows_vm: vs2017-win2016
3   ubuntu_vm: ubuntu-18.04
4   ci_runner_image: trini/u-boot-gitlab-ci-runner:bionic-20200112-21Feb2020
5   # Add '-u 0' options for Azure pipelines, otherwise we get "permission
6   # denied" error when it tries to "useradd -m -u 1001 vsts_azpcontainer",
7   # since our $(ci_runner_image) user is not root.
8   container_option: -u 0
9   work_dir: /u
10
11 jobs:
12   - job: tools_only_windows
13     displayName: 'Ensure host tools build for Windows'
14     pool:
15       vmImage: $(windows_vm)
16     strategy:
17       matrix:
18         i686:
19           MSYS_DIR: msys32
20           BASE_REPO: msys2-ci-base-i686
21         x86_64:
22           MSYS_DIR: msys64
23           BASE_REPO: msys2-ci-base
24     steps:
25       - script: |
26           git clone https://github.com/msys2/$(BASE_REPO).git %CD:~0,2%\$(MSYS_DIR)
27         displayName: 'Install MSYS2'
28       - script: |
29           set PATH=%CD:~0,2%\$(MSYS_DIR)\usr\bin;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
30           %CD:~0,2%\$(MSYS_DIR)\usr\bin\pacman --noconfirm -Syyuu
31         displayName: 'Update MSYS2'
32       - script: |
33           set PATH=%CD:~0,2%\$(MSYS_DIR)\usr\bin;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
34           %CD:~0,2%\$(MSYS_DIR)\usr\bin\pacman --noconfirm --needed -S make gcc bison diffutils openssl-devel
35         displayName: 'Install Toolchain'
36       - script: |
37           set PATH=C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
38           echo make tools-only_defconfig tools-only NO_SDL=1 > build-tools.sh
39           %CD:~0,2%\$(MSYS_DIR)\usr\bin\bash -lc "bash build-tools.sh"
40         displayName: 'Build Host Tools'
41         env:
42           # Tell MSYS2 we need a POSIX emulation layer
43           MSYSTEM: MSYS
44           # Tell MSYS2 not to ‘cd’ our startup directory to HOME
45           CHERE_INVOKING: yes
46
47   - job: cppcheck
48     displayName: 'Static code analysis with cppcheck'
49     pool:
50       vmImage: $(ubuntu_vm)
51     container:
52       image: $(ci_runner_image)
53       options: $(container_option)
54     steps:
55       - script: cppcheck --force --quiet --inline-suppr .
56
57   - job: htmldocs
58     displayName: 'Build HTML documentation'
59     pool:
60       vmImage: $(ubuntu_vm)
61     container:
62       image: $(ci_runner_image)
63       options: $(container_option)
64     steps:
65       - script: make htmldocs
66
67   - job: todo
68     displayName: 'Search for TODO within source tree'
69     pool:
70       vmImage: $(ubuntu_vm)
71     container:
72       image: $(ci_runner_image)
73       options: $(container_option)
74     steps:
75       - script: grep -r TODO .
76       - script: grep -r FIXME .
77       - script: grep -r HACK . | grep -v HACKKIT
78
79   - job: sloccount
80     displayName: 'Some statistics about the code base'
81     pool:
82       vmImage: $(ubuntu_vm)
83     container:
84       image: $(ci_runner_image)
85       options: $(container_option)
86     steps:
87       - script: sloccount .
88
89   - job: maintainers
90     displayName: 'Ensure all configs have MAINTAINERS entries'
91     pool:
92       vmImage: $(ubuntu_vm)
93     container:
94       image: $(ci_runner_image)
95       options: $(container_option)
96     steps:
97       - script: |
98           if [ `./tools/genboardscfg.py -f 2>&1 | wc -l` -ne 0 ]; then exit 1; fi
99
100   - job: tools_only
101     displayName: 'Ensure host tools build'
102     pool:
103       vmImage: $(ubuntu_vm)
104     container:
105       image: $(ci_runner_image)
106       options: $(container_option)
107     steps:
108       - script: |
109           make tools-only_config tools-only -j$(nproc)
110
111   - job: envtools
112     displayName: 'Ensure env tools build'
113     pool:
114       vmImage: $(ubuntu_vm)
115     container:
116       image: $(ci_runner_image)
117       options: $(container_option)
118     steps:
119       - script: |
120           make tools-only_config envtools -j$(nproc)
121
122   - job: utils
123     displayName: 'Run binman, buildman, dtoc and patman testsuites'
124     pool:
125       vmImage: $(ubuntu_vm)
126     steps:
127       - script: |
128           cat << EOF > build.sh
129           set -ex
130           cd ${WORK_DIR}
131           EOF
132           cat << "EOF" >> build.sh
133           git config --global user.name "Azure Pipelines"
134           git config --global user.email bmeng.cn@gmail.com
135           export USER=azure
136           virtualenv -p /usr/bin/python3 /tmp/venv
137           . /tmp/venv/bin/activate
138           pip install pyelftools
139           export UBOOT_TRAVIS_BUILD_DIR=/tmp/.bm-work/sandbox_spl
140           export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
141           export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}
142           ./tools/buildman/buildman -o /tmp -P sandbox_spl
143           ./tools/binman/binman --toolpath ${UBOOT_TRAVIS_BUILD_DIR}/tools test
144           ./tools/buildman/buildman -t
145           ./tools/dtoc/dtoc -t
146           ./tools/patman/patman --test
147           EOF
148           cat build.sh
149           # We cannot use "container" like other jobs above, as buildman
150           # seems to hang forever with pre-configured "container" environment
151           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh
152
153   - job: test_py
154     displayName: 'test.py'
155     pool:
156       vmImage: $(ubuntu_vm)
157     strategy:
158       matrix:
159         sandbox:
160           TEST_PY_BD: "sandbox"
161           BUILDMAN: "^sandbox$"
162         sandbox_clang:
163           TEST_PY_BD: "sandbox"
164           BUILDMAN: "^sandbox$"
165           OVERRIDE: "-O clang-7"
166         sandbox_spl:
167           TEST_PY_BD: "sandbox_spl"
168           TEST_PY_TEST_SPEC: "test_ofplatdata"
169           BUILDMAN: "^sandbox_spl$"
170         sandbox_flattree:
171           TEST_PY_BD: "sandbox_flattree"
172           BUILDMAN: "^sandbox_flattree$"
173         evb_ast2500:
174           TEST_PY_BD: "evb-ast2500"
175           TEST_PY_ID: "--id qemu"
176           BUILDMAN: "^evb-ast2500$"
177         vexpress_ca15_tc2:
178           TEST_PY_BD: "vexpress_ca15_tc2"
179           TEST_PY_ID: "--id qemu"
180           BUILDMAN: "^vexpress_ca15_tc2$"
181         vexpress_ca9x4:
182           TEST_PY_BD: "vexpress_ca9x4"
183           TEST_PY_ID: "--id qemu"
184           BUILDMAN: "^vexpress_ca9x4$"
185         integratorcp_cm926ejs:
186           TEST_PY_BD: "integratorcp_cm926ejs"
187           TEST_PY_ID: "--id qemu"
188           TEST_PY_TEST_SPEC: "not sleep"
189           BUILDMAN: "^integratorcp_cm926ejs$"
190         qemu_arm:
191           TEST_PY_BD: "qemu_arm"
192           TEST_PY_TEST_SPEC: "not sleep"
193           BUILDMAN: "^qemu_arm$"
194         qemu_arm64:
195           TEST_PY_BD: "qemu_arm64"
196           TEST_PY_TEST_SPEC: "not sleep"
197           BUILDMAN: "^qemu_arm64$"
198         qemu_mips:
199           TEST_PY_BD: "qemu_mips"
200           TEST_PY_TEST_SPEC: "not sleep"
201           BUILDMAN: "^qemu_mips$"
202         qemu_mipsel:
203           TEST_PY_BD: "qemu_mipsel"
204           TEST_PY_TEST_SPEC: "not sleep"
205           BUILDMAN: "^qemu_mipsel$"
206         qemu_mips64:
207           TEST_PY_BD: "qemu_mips64"
208           TEST_PY_TEST_SPEC: "not sleep"
209           BUILDMAN: "^qemu_mips64$"
210         qemu_mips64el:
211           TEST_PY_BD: "qemu_mips64el"
212           TEST_PY_TEST_SPEC: "not sleep"
213           BUILDMAN: "^qemu_mips64el$"
214         qemu_ppce500:
215           TEST_PY_BD: "qemu-ppce500"
216           TEST_PY_TEST_SPEC: "not sleep"
217           BUILDMAN: "^qemu-ppce500$"
218         qemu_riscv64:
219           TEST_PY_BD: "qemu-riscv64"
220           TEST_PY_TEST_SPEC: "not sleep"
221           BUILDMAN: "^qemu-riscv64$"
222         qemu_x86:
223           TEST_PY_BD: "qemu-x86"
224           TEST_PY_TEST_SPEC: "not sleep"
225           BUILDMAN: "^qemu-x86$"
226         qemu_x86_64:
227           TEST_PY_BD: "qemu-x86_64"
228           TEST_PY_TEST_SPEC: "not sleep"
229           BUILDMAN: "^qemu-x86_64$"
230         zynq_zc702:
231           TEST_PY_BD: "zynq_zc702"
232           TEST_PY_ID: "--id qemu"
233           TEST_PY_TEST_SPEC: "not sleep"
234           BUILDMAN: "^zynq_zc702$"
235         xilinx_versal_virt:
236           TEST_PY_BD: "xilinx_versal_virt"
237           TEST_PY_ID: "--id qemu"
238           TEST_PY_TEST_SPEC: "not sleep"
239           BUILDMAN: "^xilinx_versal_virt$"
240         xtfpga:
241           TEST_PY_BD: "xtfpga"
242           TEST_PY_ID: "--id qemu"
243           TEST_PY_TEST_SPEC: "not sleep"
244           BUILDMAN: "^xtfpga$"
245     steps:
246       - script: |
247           cat << EOF > test.sh
248           set -ex
249           # make environment variables available as tests are running inside a container
250           export WORK_DIR="${WORK_DIR}"
251           export TEST_PY_BD="${TEST_PY_BD}"
252           export TEST_PY_ID="${TEST_PY_ID}"
253           export TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}"
254           export BUILDMAN="${BUILDMAN}"
255           export OVERRIDE="${OVERRIDE}"
256           EOF
257           cat << "EOF" >> test.sh
258           # the below corresponds to .gitlab-ci.yml "before_script"
259           cd ${WORK_DIR}
260           git clone --depth=1 git://github.com/swarren/uboot-test-hooks.git /tmp/uboot-test-hooks
261           ln -s travis-ci /tmp/uboot-test-hooks/bin/`hostname`
262           ln -s travis-ci /tmp/uboot-test-hooks/py/`hostname`
263           grub-mkimage --prefix=\"\" -o ~/grub_x86.efi -O i386-efi normal  echo lsefimmap lsefi lsefisystab efinet tftp minicmd
264           grub-mkimage --prefix=\"\" -o ~/grub_x64.efi -O x86_64-efi normal  echo lsefimmap lsefi lsefisystab efinet tftp minicmd
265           cp /opt/grub/grubriscv64.efi ~/grub_riscv64.efi
266           cp /opt/grub/grubaa64.efi ~/grub_arm64.efi
267           cp /opt/grub/grubarm.efi ~/grub_arm.efi
268           # the below corresponds to .gitlab-ci.yml "script"
269           cd ${WORK_DIR}
270           if [[ "${BUILDMAN}" != "" ]]; then
271               ret=0;
272               tools/buildman/buildman -o /tmp -P -E ${BUILDMAN} ${OVERRIDE} || ret=$?;
273               if [[ $ret -ne 0 && $ret -ne 129 ]]; then
274                   tools/buildman/buildman -o /tmp -sdeP ${BUILDMAN};
275                   exit $ret;
276               fi;
277           fi
278           virtualenv -p /usr/bin/python3 /tmp/venv
279           . /tmp/venv/bin/activate
280           pip install -r test/py/requirements.txt
281           export UBOOT_TRAVIS_BUILD_DIR=/tmp/.bm-work/${TEST_PY_BD};
282           export PATH=/opt/qemu/bin:/tmp/uboot-test-hooks/bin:${PATH};
283           export PYTHONPATH=/tmp/uboot-test-hooks/py/travis-ci;
284           if [[ "${TEST_PY_BD}" != "" ]]; then
285               ./test/py/test.py --bd ${TEST_PY_BD} ${TEST_PY_ID} -k "${TEST_PY_TEST_SPEC:-not a_test_which_does_not_exist}" --build-dir "$UBOOT_TRAVIS_BUILD_DIR";
286               ret=$?;
287               if [[ $ret -ne 0 ]]; then
288                   exit $ret;
289               fi;
290           fi
291           # the below corresponds to .gitlab-ci.yml "after_script"
292           rm -rf /tmp/uboot-test-hooks /tmp/venv
293           EOF
294           cat test.sh
295           # make current directory writeable to uboot user inside the container
296           # as sandbox testing need create files like spi flash images, etc.
297           # (TODO: clean up this in the future)
298           chmod 777 .
299           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/test.sh
300
301   - job: build_the_world
302     displayName: 'Build the World'
303     pool:
304       vmImage: $(ubuntu_vm)
305     strategy:
306       # Use almost the same target division in .travis.yml, only merged
307       # 4 small build jobs (arc/microblaze/nds32/xtensa) into one.
308       matrix:
309         arc_microblaze_nds32_xtensa:
310           BUILDMAN: "arc microblaze nds32 xtensa"
311         arm11_arm7_arm920t_arm946es:
312           BUILDMAN: "arm11 arm7 arm920t arm946es"
313         arm926ejs:
314           BUILDMAN: "arm926ejs -x freescale,siemens,at91,kirkwood,spear,omap"
315         at91_non_armv7:
316           BUILDMAN: "at91 -x armv7"
317         at91_non_arm926ejs:
318           BUILDMAN: "at91 -x arm926ejs"
319         boundary_engicam_toradex:
320           BUILDMAN: "boundary engicam toradex"
321         arm_bcm:
322           BUILDMAN: "bcm -x mips"
323         nxp_arm32:
324           BUILDMAN: "freescale -x powerpc,m68k,aarch64,ls101,ls102,ls104,ls108,ls20,lx216"
325         nxp_ls101x:
326           BUILDMAN: "freescale&ls101"
327         nxp_ls102x:
328           BUILDMAN: "freescale&ls102"
329         nxp_ls104x:
330           BUILDMAN: "freescale&ls104"
331         nxp_ls108x:
332           BUILDMAN: "freescale&ls108"
333         nxp_ls20xx:
334           BUILDMAN: "freescale&ls20"
335         nxp_lx216x:
336           BUILDMAN: "freescale&lx216"
337         imx6:
338           BUILDMAN: "mx6 -x boundary,engicam,freescale,technexion,toradex"
339         imx:
340           BUILDMAN: "mx -x mx6,freescale,technexion,toradex"
341         keystone2_keystone3:
342           BUILDMAN: "k2 k3"
343         samsung_socfpga:
344           BUILDMAN: "samsung socfpga"
345         spear:
346           BUILDMAN: "spear"
347         sun4i:
348           BUILDMAN: "sun4i"
349         sun5i:
350           BUILDMAN: "sun5i"
351         sun6i:
352           BUILDMAN: "sun6i"
353         sun7i:
354           BUILDMAN: "sun7i"
355         sun8i_32bit:
356           BUILDMAN: "sun8i&armv7"
357         sun8i_64bit:
358           BUILDMAN: "sun8i&aarch64"
359         sun9i:
360           BUILDMAN: "sun9i"
361         sun50i:
362           BUILDMAN: "sun50i"
363         arm_catch_all:
364           BUILDMAN: "arm -x arm11,arm7,arm9,aarch64,at91,bcm,freescale,kirkwood,mvebu,siemens,tegra,uniphier,mx,samsung,sunxi,am33xx,omap,rk,toradex,socfpga,k2,k3,zynq"
365         sandbox_x86:
366           BUILDMAN: "sandbox x86"
367         technexion:
368           BUILDMAN: "technexion"
369         kirkwood:
370           BUILDMAN: "kirkwood"
371         mvebu:
372           BUILDMAN: "mvebu"
373         m68k:
374           BUILDMAN: "m68k"
375         mips:
376           BUILDMAN: "mips"
377         non_fsl_ppc:
378           BUILDMAN: "powerpc -x freescale"
379         mpc85xx_freescale:
380           BUILDMAN: "mpc85xx&freescale -x t208xrdb -x t4qds -x t102* -x p1_p2_rdb_pc -x p1010rdb -x corenet_ds -x b4860qds -x bsc91*"
381         t208xrdb_corenet_ds:
382           BUILDMAN: "t208xrdb corenet_ds"
383         fsl_ppc:
384           BUILDMAN: "t4qds b4860qds mpc83xx&freescale mpc86xx&freescale"
385         t102x:
386           BUILDMAN: "t102*"
387         p1_p2_rdb_pc:
388           BUILDMAN: "p1_p2_rdb_pc"
389         p1010rdb_bsc91:
390           BUILDMAN: "p1010rdb bsc91"
391         siemens:
392           BUILDMAN: "siemens"
393         tegra:
394           BUILDMAN: "tegra -x toradex"
395         am33xx_no_siemens:
396           BUILDMAN: "am33xx -x siemens"
397         omap:
398           BUILDMAN: "omap"
399         uniphier:
400           BUILDMAN: "uniphier"
401         aarch64_catch_all:
402           BUILDMAN: "aarch64 -x bcm,k3,tegra,ls1,ls2,lx216,mvebu,uniphier,sunxi,samsung,socfpga,rk,versal,zynq"
403         rockchip:
404           BUILDMAN: "rk"
405         sh:
406           BUILDMAN: "sh -x arm"
407         zynq:
408           BUILDMAN: "zynq&armv7"
409         zynqmp_versal:
410           BUILDMAN: "versal|zynqmp&aarch64"
411         riscv:
412           BUILDMAN: "riscv"
413     steps:
414       - script: |
415           cat << EOF > build.sh
416           set -ex
417           cd ${WORK_DIR}
418           # make environment variables available as tests are running inside a container
419           export BUILDMAN="${BUILDMAN}"
420           EOF
421           cat << "EOF" >> build.sh
422           if [[ "${BUILDMAN}" != "" ]]; then
423               ret=0;
424               tools/buildman/buildman -o /tmp -P -E ${BUILDMAN} ${OVERRIDE} || ret=$?;
425               if [[ $ret -ne 0 && $ret -ne 129 ]]; then
426                   tools/buildman/buildman -o /tmp -sdeP ${BUILDMAN};
427                   exit $ret;
428               fi;
429           fi
430           EOF
431           cat build.sh
432           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh