.azure-pipelines.yml 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559
  1. variables:
  2. windows_vm: windows-2019
  3. ubuntu_vm: ubuntu-22.04
  4. macos_vm: macOS-12
  5. ci_runner_image: trini/u-boot-gitlab-ci-runner:jammy-20230624-20Jul2023
  6. # Add '-u 0' options for Azure pipelines, otherwise we get "permission
  7. # denied" error when it tries to "useradd -m -u 1001 vsts_azpcontainer",
  8. # since our $(ci_runner_image) user is not root.
  9. container_option: -u 0
  10. work_dir: /u
  11. stages:
  12. - stage: testsuites
  13. jobs:
  14. - job: tools_only_windows
  15. displayName: 'Ensure host tools build for Windows'
  16. pool:
  17. vmImage: $(windows_vm)
  18. steps:
  19. - powershell: |
  20. (New-Object Net.WebClient).DownloadFile("https://github.com/msys2/msys2-installer/releases/download/2021-06-04/msys2-base-x86_64-20210604.sfx.exe", "sfx.exe")
  21. displayName: 'Install MSYS2'
  22. - script: |
  23. sfx.exe -y -o%CD:~0,2%\
  24. %CD:~0,2%\msys64\usr\bin\bash -lc " "
  25. %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm -Syuu"
  26. %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm -Syuu"
  27. displayName: 'Update MSYS2'
  28. - script: |
  29. %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm --needed -Sy make gcc bison flex diffutils openssl-devel libgnutls-devel libutil-linux-devel"
  30. displayName: 'Install Toolchain'
  31. - script: |
  32. echo make tools-only_defconfig tools-only > build-tools.sh
  33. %CD:~0,2%\msys64\usr\bin\bash -lc "bash build-tools.sh"
  34. displayName: 'Build Host Tools'
  35. env:
  36. # Tell MSYS2 we need a POSIX emulation layer
  37. MSYSTEM: MSYS
  38. # Tell MSYS2 not to ‘cd’ our startup directory to HOME
  39. CHERE_INVOKING: yes
  40. - job: tools_only_macOS
  41. displayName: 'Ensure host tools build for macOS X'
  42. pool:
  43. vmImage: $(macos_vm)
  44. steps:
  45. - script: brew install make ossp-uuid
  46. displayName: Brew install dependencies
  47. - script: |
  48. gmake tools-only_config tools-only \
  49. HOSTCFLAGS="-I/usr/local/opt/openssl@1.1/include" \
  50. HOSTLDFLAGS="-L/usr/local/opt/openssl@1.1/lib" \
  51. -j$(sysctl -n hw.logicalcpu)
  52. displayName: 'Perform tools-only build'
  53. - job: check_for_new_CONFIG_symbols_outside_Kconfig
  54. displayName: 'Check for new CONFIG symbols outside Kconfig'
  55. pool:
  56. vmImage: $(ubuntu_vm)
  57. container:
  58. image: $(ci_runner_image)
  59. options: $(container_option)
  60. steps:
  61. # If grep succeeds and finds a match the test fails as we should
  62. # have no matches.
  63. - script: git grep -E '^#[[:blank:]]*(define|undef)[[:blank:]]*CONFIG_'
  64. :^doc/ :^arch/arm/dts/ :^scripts/kconfig/lkc.h
  65. :^include/linux/kconfig.h :^tools/ && exit 1 || exit 0
  66. - job: cppcheck
  67. displayName: 'Static code analysis with cppcheck'
  68. pool:
  69. vmImage: $(ubuntu_vm)
  70. container:
  71. image: $(ci_runner_image)
  72. options: $(container_option)
  73. steps:
  74. - script: cppcheck -j$(nproc) --force --quiet --inline-suppr .
  75. - job: docs
  76. displayName: 'Build documentation'
  77. pool:
  78. vmImage: $(ubuntu_vm)
  79. container:
  80. image: $(ci_runner_image)
  81. options: $(container_option)
  82. steps:
  83. - script: |
  84. virtualenv -p /usr/bin/python3 /tmp/venvhtml
  85. . /tmp/venvhtml/bin/activate
  86. pip install -r doc/sphinx/requirements.txt
  87. make htmldocs KDOC_WERROR=1
  88. make infodocs
  89. - job: todo
  90. displayName: 'Search for TODO within source tree'
  91. pool:
  92. vmImage: $(ubuntu_vm)
  93. container:
  94. image: $(ci_runner_image)
  95. options: $(container_option)
  96. steps:
  97. - script: grep -r TODO .
  98. - script: grep -r FIXME .
  99. - script: grep -r HACK . | grep -v HACKKIT
  100. - job: sloccount
  101. displayName: 'Some statistics about the code base'
  102. pool:
  103. vmImage: $(ubuntu_vm)
  104. container:
  105. image: $(ci_runner_image)
  106. options: $(container_option)
  107. steps:
  108. - script: sloccount .
  109. - job: maintainers
  110. displayName: 'Ensure all configs have MAINTAINERS entries'
  111. pool:
  112. vmImage: $(ubuntu_vm)
  113. container:
  114. image: $(ci_runner_image)
  115. options: $(container_option)
  116. steps:
  117. - script: |
  118. ./tools/buildman/buildman --maintainer-check || exit 0
  119. - job: tools_only
  120. displayName: 'Ensure host tools build'
  121. pool:
  122. vmImage: $(ubuntu_vm)
  123. container:
  124. image: $(ci_runner_image)
  125. options: $(container_option)
  126. steps:
  127. - script: |
  128. make tools-only_config tools-only -j$(nproc)
  129. - job: envtools
  130. displayName: 'Ensure env tools build'
  131. pool:
  132. vmImage: $(ubuntu_vm)
  133. container:
  134. image: $(ci_runner_image)
  135. options: $(container_option)
  136. steps:
  137. - script: |
  138. make tools-only_config envtools -j$(nproc)
  139. - job: utils
  140. displayName: 'Run binman, buildman, dtoc, Kconfig and patman testsuites'
  141. pool:
  142. vmImage: $(ubuntu_vm)
  143. steps:
  144. - script: |
  145. cat << "EOF" > build.sh
  146. cd $(work_dir)
  147. git config --global user.name "Azure Pipelines"
  148. git config --global user.email bmeng.cn@gmail.com
  149. git config --global --add safe.directory $(work_dir)
  150. export USER=azure
  151. virtualenv -p /usr/bin/python3 /tmp/venv
  152. . /tmp/venv/bin/activate
  153. pip install -r test/py/requirements.txt
  154. pip install -r tools/buildman/requirements.txt
  155. export UBOOT_TRAVIS_BUILD_DIR=/tmp/sandbox_spl
  156. export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
  157. export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}
  158. ./tools/buildman/buildman -T0 -o ${UBOOT_TRAVIS_BUILD_DIR} -w --board sandbox_spl
  159. set -ex
  160. ./tools/binman/binman --toolpath ${UBOOT_TRAVIS_BUILD_DIR}/tools test
  161. ./tools/buildman/buildman -t
  162. ./tools/dtoc/dtoc -t
  163. ./tools/patman/patman test
  164. make O=${UBOOT_TRAVIS_BUILD_DIR} testconfig
  165. EOF
  166. cat build.sh
  167. # We cannot use "container" like other jobs above, as buildman
  168. # seems to hang forever with pre-configured "container" environment
  169. docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh
  170. - job: pylint
  171. displayName: Check for any pylint regressions
  172. pool:
  173. vmImage: $(ubuntu_vm)
  174. container:
  175. image: $(ci_runner_image)
  176. options: $(container_option)
  177. steps:
  178. - script: |
  179. git config --global --add safe.directory $(work_dir)
  180. export USER=azure
  181. pip install -r test/py/requirements.txt
  182. pip install -r tools/buildman/requirements.txt
  183. pip install asteval pylint==2.12.2 pyopenssl
  184. export PATH=${PATH}:~/.local/bin
  185. echo "[MASTER]" >> .pylintrc
  186. echo "load-plugins=pylint.extensions.docparams" >> .pylintrc
  187. export UBOOT_TRAVIS_BUILD_DIR=/tmp/sandbox_spl
  188. ./tools/buildman/buildman -T0 -o ${UBOOT_TRAVIS_BUILD_DIR} -w --board sandbox_spl
  189. set -ex
  190. pylint --version
  191. export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
  192. make pylint_err
  193. - job: check_for_pre_schema_tags
  194. displayName: 'Check for pre-schema driver model tags'
  195. pool:
  196. vmImage: $(ubuntu_vm)
  197. container:
  198. image: $(ci_runner_image)
  199. options: $(container_option)
  200. steps:
  201. # If grep succeeds and finds a match the test fails as we should
  202. # have no matches.
  203. - script: git grep u-boot,dm- -- '*.dts*' && exit 1 || exit 0
  204. - job: check_packing_of_python_tools
  205. displayName: 'Check we can package the Python tools'
  206. pool:
  207. vmImage: $(ubuntu_vm)
  208. container:
  209. image: $(ci_runner_image)
  210. options: $(container_option)
  211. steps:
  212. - script: make pip
  213. - stage: test_py
  214. jobs:
  215. - job: test_py
  216. displayName: 'test.py'
  217. pool:
  218. vmImage: $(ubuntu_vm)
  219. strategy:
  220. matrix:
  221. sandbox:
  222. TEST_PY_BD: "sandbox"
  223. sandbox_clang:
  224. TEST_PY_BD: "sandbox"
  225. OVERRIDE: "-O clang-16"
  226. sandbox_nolto:
  227. TEST_PY_BD: "sandbox"
  228. BUILD_ENV: "NO_LTO=1"
  229. sandbox_spl:
  230. TEST_PY_BD: "sandbox_spl"
  231. TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
  232. sandbox_vpl:
  233. TEST_PY_BD: "sandbox_vpl"
  234. TEST_PY_TEST_SPEC: "vpl or test_spl"
  235. sandbox_noinst:
  236. TEST_PY_BD: "sandbox_noinst"
  237. TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
  238. sandbox_flattree:
  239. TEST_PY_BD: "sandbox_flattree"
  240. sandbox_trace:
  241. TEST_PY_BD: "sandbox"
  242. BUILD_ENV: "FTRACE=1 NO_LTO=1"
  243. TEST_PY_TEST_SPEC: "trace"
  244. OVERRIDE: "-a CONFIG_TRACE=y -a CONFIG_TRACE_EARLY=y -a CONFIG_TRACE_EARLY_SIZE=0x01000000"
  245. coreboot:
  246. TEST_PY_BD: "coreboot"
  247. TEST_PY_ID: "--id qemu"
  248. TEST_PY_TEST_SPEC: "not sleep"
  249. evb_ast2500:
  250. TEST_PY_BD: "evb-ast2500"
  251. TEST_PY_ID: "--id qemu"
  252. evb_ast2600:
  253. TEST_PY_BD: "evb-ast2600"
  254. TEST_PY_ID: "--id qemu"
  255. vexpress_ca9x4:
  256. TEST_PY_BD: "vexpress_ca9x4"
  257. TEST_PY_ID: "--id qemu"
  258. integratorcp_cm926ejs:
  259. TEST_PY_BD: "integratorcp_cm926ejs"
  260. TEST_PY_ID: "--id qemu"
  261. TEST_PY_TEST_SPEC: "not sleep"
  262. qemu_arm:
  263. TEST_PY_BD: "qemu_arm"
  264. TEST_PY_TEST_SPEC: "not sleep"
  265. qemu_arm64:
  266. TEST_PY_BD: "qemu_arm64"
  267. TEST_PY_TEST_SPEC: "not sleep"
  268. qemu_m68k:
  269. TEST_PY_BD: "M5208EVBE"
  270. TEST_PY_ID: "--id qemu"
  271. TEST_PY_TEST_SPEC: "not sleep and not efi"
  272. OVERRIDE: "-a CONFIG_M68K_QEMU=y -a ~CONFIG_MCFTMR"
  273. qemu_malta:
  274. TEST_PY_BD: "malta"
  275. TEST_PY_ID: "--id qemu"
  276. TEST_PY_TEST_SPEC: "not sleep and not efi"
  277. qemu_maltael:
  278. TEST_PY_BD: "maltael"
  279. TEST_PY_ID: "--id qemu"
  280. TEST_PY_TEST_SPEC: "not sleep and not efi"
  281. qemu_malta64:
  282. TEST_PY_BD: "malta64"
  283. TEST_PY_ID: "--id qemu"
  284. TEST_PY_TEST_SPEC: "not sleep and not efi"
  285. qemu_malta64el:
  286. TEST_PY_BD: "malta64el"
  287. TEST_PY_ID: "--id qemu"
  288. TEST_PY_TEST_SPEC: "not sleep and not efi"
  289. qemu_ppce500:
  290. TEST_PY_BD: "qemu-ppce500"
  291. TEST_PY_TEST_SPEC: "not sleep"
  292. qemu_riscv32:
  293. TEST_PY_BD: "qemu-riscv32"
  294. TEST_PY_TEST_SPEC: "not sleep"
  295. qemu_riscv64:
  296. TEST_PY_BD: "qemu-riscv64"
  297. TEST_PY_TEST_SPEC: "not sleep"
  298. qemu_riscv32_spl:
  299. TEST_PY_BD: "qemu-riscv32_spl"
  300. TEST_PY_TEST_SPEC: "not sleep"
  301. qemu_riscv64_spl:
  302. TEST_PY_BD: "qemu-riscv64_spl"
  303. TEST_PY_TEST_SPEC: "not sleep"
  304. qemu_x86:
  305. TEST_PY_BD: "qemu-x86"
  306. TEST_PY_TEST_SPEC: "not sleep"
  307. qemu_x86_64:
  308. TEST_PY_BD: "qemu-x86_64"
  309. TEST_PY_TEST_SPEC: "not sleep"
  310. r2dplus_i82557c:
  311. TEST_PY_BD: "r2dplus"
  312. TEST_PY_ID: "--id i82557c_qemu"
  313. r2dplus_pcnet:
  314. TEST_PY_BD: "r2dplus"
  315. TEST_PY_ID: "--id pcnet_qemu"
  316. r2dplus_rtl8139:
  317. TEST_PY_BD: "r2dplus"
  318. TEST_PY_ID: "--id rtl8139_qemu"
  319. r2dplus_tulip:
  320. TEST_PY_BD: "r2dplus"
  321. TEST_PY_ID: "--id tulip_qemu"
  322. sifive_unleashed_sdcard:
  323. TEST_PY_BD: "sifive_unleashed"
  324. TEST_PY_ID: "--id sdcard_qemu"
  325. sifive_unleashed_spi-nor:
  326. TEST_PY_BD: "sifive_unleashed"
  327. TEST_PY_ID: "--id spi-nor_qemu"
  328. xilinx_zynq_virt:
  329. TEST_PY_BD: "xilinx_zynq_virt"
  330. TEST_PY_ID: "--id qemu"
  331. TEST_PY_TEST_SPEC: "not sleep"
  332. xilinx_versal_virt:
  333. TEST_PY_BD: "xilinx_versal_virt"
  334. TEST_PY_ID: "--id qemu"
  335. TEST_PY_TEST_SPEC: "not sleep"
  336. xtfpga:
  337. TEST_PY_BD: "xtfpga"
  338. TEST_PY_ID: "--id qemu"
  339. TEST_PY_TEST_SPEC: "not sleep"
  340. steps:
  341. - script: |
  342. cat << EOF > test.sh
  343. set -ex
  344. # make environment variables available as tests are running inside a container
  345. export WORK_DIR="${WORK_DIR}"
  346. export TEST_PY_BD="${TEST_PY_BD}"
  347. export TEST_PY_ID="${TEST_PY_ID}"
  348. export TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}"
  349. export OVERRIDE="${OVERRIDE}"
  350. export BUILD_ENV="${BUILD_ENV}"
  351. EOF
  352. cat << "EOF" >> test.sh
  353. # the below corresponds to .gitlab-ci.yml "before_script"
  354. cd ${WORK_DIR}
  355. git config --global --add safe.directory ${WORK_DIR}
  356. git clone --depth=1 https://source.denx.de/u-boot/u-boot-test-hooks /tmp/uboot-test-hooks
  357. ln -s travis-ci /tmp/uboot-test-hooks/bin/`hostname`
  358. ln -s travis-ci /tmp/uboot-test-hooks/py/`hostname`
  359. grub-mkimage --prefix=\"\" -o ~/grub_x86.efi -O i386-efi normal echo lsefimmap lsefi lsefisystab efinet tftp minicmd
  360. grub-mkimage --prefix=\"\" -o ~/grub_x64.efi -O x86_64-efi normal echo lsefimmap lsefi lsefisystab efinet tftp minicmd
  361. if [[ "${TEST_PY_BD}" == "qemu-riscv32_spl" ]]; then
  362. wget -O - https://github.com/riscv-software-src/opensbi/releases/download/v1.2/opensbi-1.2-rv-bin.tar.xz | tar -C /tmp -xJ;
  363. export OPENSBI=/tmp/opensbi-1.2-rv-bin/share/opensbi/ilp32/generic/firmware/fw_dynamic.bin;
  364. fi
  365. if [[ "${TEST_PY_BD}" == "qemu-riscv64_spl" ]] || [[ "${TEST_PY_BD}" == "sifive_unleashed" ]]; then
  366. wget -O - https://github.com/riscv-software-src/opensbi/releases/download/v1.2/opensbi-1.2-rv-bin.tar.xz | tar -C /tmp -xJ;
  367. export OPENSBI=/tmp/opensbi-1.2-rv-bin/share/opensbi/lp64/generic/firmware/fw_dynamic.bin;
  368. fi
  369. # the below corresponds to .gitlab-ci.yml "script"
  370. cd ${WORK_DIR}
  371. export UBOOT_TRAVIS_BUILD_DIR=/tmp/${TEST_PY_BD};
  372. if [ -n "${BUILD_ENV}" ]; then
  373. export ${BUILD_ENV};
  374. fi
  375. pip install -r tools/buildman/requirements.txt
  376. tools/buildman/buildman -o ${UBOOT_TRAVIS_BUILD_DIR} -w -E -W -e --board ${TEST_PY_BD} ${OVERRIDE}
  377. cp ~/grub_x86.efi ${UBOOT_TRAVIS_BUILD_DIR}/
  378. cp ~/grub_x64.efi ${UBOOT_TRAVIS_BUILD_DIR}/
  379. cp /opt/grub/grubriscv64.efi ${UBOOT_TRAVIS_BUILD_DIR}/grub_riscv64.efi
  380. cp /opt/grub/grubaa64.efi ${UBOOT_TRAVIS_BUILD_DIR}/grub_arm64.efi
  381. cp /opt/grub/grubarm.efi ${UBOOT_TRAVIS_BUILD_DIR}/grub_arm.efi
  382. # create sdcard / spi-nor images for sifive unleashed using genimage
  383. if [[ "${TEST_PY_BD}" == "sifive_unleashed" ]]; then
  384. mkdir -p root;
  385. cp ${UBOOT_TRAVIS_BUILD_DIR}/spl/u-boot-spl.bin .;
  386. cp ${UBOOT_TRAVIS_BUILD_DIR}/u-boot.itb .;
  387. rm -rf tmp;
  388. genimage --inputpath . --config board/sifive/unleashed/genimage_sdcard.cfg;
  389. cp images/sdcard.img ${UBOOT_TRAVIS_BUILD_DIR}/;
  390. rm -rf tmp;
  391. genimage --inputpath . --config board/sifive/unleashed/genimage_spi-nor.cfg;
  392. cp images/spi-nor.img ${UBOOT_TRAVIS_BUILD_DIR}/;
  393. fi
  394. if [[ "${TEST_PY_BD}" == "coreboot" ]]; then
  395. wget -O - "https://drive.google.com/uc?id=1uJ2VkUQ8czWFZmhJQ90Tp8V_zrJ6BrBH&export=download" |xz -dc >${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom;
  396. wget -O - "https://drive.google.com/uc?id=149Cz-5SZXHNKpi9xg6R_5XITWohu348y&export=download" >cbfstool;
  397. chmod a+x cbfstool;
  398. ./cbfstool ${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom add-flat-binary -f ${UBOOT_TRAVIS_BUILD_DIR}/u-boot.bin -n fallback/payload -c LZMA -l 0x1110000 -e 0x1110000;
  399. fi
  400. virtualenv -p /usr/bin/python3 /tmp/venv
  401. . /tmp/venv/bin/activate
  402. pip install -r test/py/requirements.txt
  403. pip install pytest-azurepipelines
  404. export PATH=/opt/qemu/bin:/tmp/uboot-test-hooks/bin:${PATH};
  405. export PYTHONPATH=/tmp/uboot-test-hooks/py/travis-ci;
  406. # "${var:+"-k $var"}" expands to "" if $var is empty, "-k $var" if not
  407. ./test/py/test.py -ra -o cache_dir="$UBOOT_TRAVIS_BUILD_DIR"/.pytest_cache --bd ${TEST_PY_BD} ${TEST_PY_ID} ${TEST_PY_TEST_SPEC:+"-k ${TEST_PY_TEST_SPEC}"} --build-dir "$UBOOT_TRAVIS_BUILD_DIR" --report-dir "$UBOOT_TRAVIS_BUILD_DIR";
  408. # the below corresponds to .gitlab-ci.yml "after_script"
  409. rm -rf /tmp/uboot-test-hooks /tmp/venv
  410. EOF
  411. cat test.sh
  412. # make current directory writeable to uboot user inside the container
  413. # as sandbox testing need create files like spi flash images, etc.
  414. # (TODO: clean up this in the future)
  415. chmod 777 .
  416. # Filesystem tests need extra docker args to run
  417. set --
  418. if [[ "${TEST_PY_BD}" == "sandbox" ]]; then
  419. # mount -o loop needs the loop devices
  420. if modprobe loop; then
  421. for d in $(find /dev -maxdepth 1 -name 'loop*'); do
  422. set -- "$@" --device $d:$d
  423. done
  424. fi
  425. # Needed for mount syscall (for guestmount as well)
  426. set -- "$@" --cap-add SYS_ADMIN
  427. # Default apparmor profile denies mounts
  428. set -- "$@" --security-opt apparmor=unconfined
  429. fi
  430. # Some tests using libguestfs-tools need the fuse device to run
  431. docker run "$@" --device /dev/fuse:/dev/fuse -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/test.sh
  432. retryCountOnTaskFailure: 2 # QEMU may be too slow, etc.
  433. - stage: world_build
  434. jobs:
  435. - job: build_the_world
  436. timeoutInMinutes: 0 # Use the maximum allowed
  437. displayName: 'Build the World'
  438. pool:
  439. vmImage: $(ubuntu_vm)
  440. strategy:
  441. # Use almost the same target division in .travis.yml, only merged
  442. # 3 small build jobs (arc/microblaze/xtensa) into one.
  443. matrix:
  444. arc_nios2_m68k_microblaze_xtensa:
  445. BUILDMAN: "arc nios2 microblaze m68k xtensa"
  446. amlogic:
  447. BUILDMAN: "amlogic"
  448. arm11_arm7_arm920t_arm946es:
  449. BUILDMAN: "arm11 arm7 arm920t arm946es"
  450. arm926ejs:
  451. BUILDMAN: "arm926ejs -x freescale,siemens,at91,kirkwood,omap"
  452. at91_non_armv7:
  453. BUILDMAN: "at91 -x armv7"
  454. at91_non_arm926ejs:
  455. BUILDMAN: "at91 -x arm926ejs"
  456. boundary_engicam_toradex:
  457. BUILDMAN: "boundary engicam toradex"
  458. arm_bcm:
  459. BUILDMAN: "bcm -x mips"
  460. nxp_arm32:
  461. BUILDMAN: "freescale -x powerpc,m68k,aarch64,ls101,ls102,ls104,ls108,ls20,lx216"
  462. nxp_ls101x_ls108x:
  463. BUILDMAN: "freescale&ls101 freescale&ls108"
  464. nxp_ls102x:
  465. BUILDMAN: "freescale&ls102 -x keymile"
  466. nxp_ls104x:
  467. BUILDMAN: "freescale&ls104"
  468. nxp_ls20xx_lx216x:
  469. BUILDMAN: "freescale&ls20 freescale&lx216"
  470. imx6:
  471. BUILDMAN: "mx6 -x boundary,engicam,freescale,technexion,toradex"
  472. imx:
  473. BUILDMAN: "mx -x mx6,imx8,freescale,technexion,toradex"
  474. imx8_imx9:
  475. BUILDMAN: "imx8 imx9 -x engicam,technexion,toradex"
  476. keymiles_siemens_technexion:
  477. BUILDMAN: "keymile siemens technexion"
  478. keystone2_keystone3:
  479. BUILDMAN: "k2 k3 -x siemens,toradex"
  480. sandbox_asan:
  481. BUILDMAN: "sandbox"
  482. OVERRIDE: "-a ASAN"
  483. sandbox_clang_asan:
  484. BUILDMAN: "sandbox"
  485. OVERRIDE: "-O clang-16 -a ASAN"
  486. samsung_socfpga_renesas:
  487. BUILDMAN: "samsung socfpga renesas"
  488. sun4i_sun9i:
  489. BUILDMAN: "sun4i sun9i"
  490. sun5i_sun6i:
  491. BUILDMAN: "sun5i sun6i"
  492. sun7i:
  493. BUILDMAN: "sun7i"
  494. sun8i:
  495. BUILDMAN: "sun8i"
  496. sun50i:
  497. BUILDMAN: "sun50i"
  498. arm_catch_all:
  499. BUILDMAN: "arm -x arm11,arm7,arm9,aarch64,at91,bcm,freescale,kirkwood,mvebu,renesas,siemens,tegra,uniphier,mx,samsung,sunxi,am33xx,omap,toradex,socfpga,k2,k3,zynq"
  500. sandbox_x86:
  501. BUILDMAN: "sandbox x86"
  502. kirkwood_mvebu_uniphier:
  503. BUILDMAN: "kirkwood mvebu uniphier"
  504. mips:
  505. BUILDMAN: "mips"
  506. powerpc:
  507. BUILDMAN: "powerpc -x keymile"
  508. tegra:
  509. BUILDMAN: "tegra -x toradex"
  510. am33xx_omap:
  511. BUILDMAN: "am33xx omap -x siemens"
  512. aarch64_catch_all:
  513. BUILDMAN: "aarch64 -x amlogic,bcm,imx8,imx9,k3,tegra,ls1,ls2,lx216,mvebu,uniphier,renesas,sunxi,samsung,socfpga,rk,versal,zynq"
  514. rk_non_rockchip_64bit:
  515. BUILDMAN: "rk&aarch64 -x rockchip"
  516. rk_rockchip_64bit:
  517. BUILDMAN: "rk&aarch64&rockchip"
  518. zynq_zynqmp_versal:
  519. BUILDMAN: "zynq&armv7 versal zynqmp&aarch64"
  520. riscv:
  521. BUILDMAN: "riscv"
  522. steps:
  523. - script: |
  524. cat << EOF > build.sh
  525. set -ex
  526. cd ${WORK_DIR}
  527. # make environment variables available as tests are running inside a container
  528. export BUILDMAN="${BUILDMAN}"
  529. git config --global --add safe.directory ${WORK_DIR}
  530. pip install -r tools/buildman/requirements.txt
  531. EOF
  532. cat << "EOF" >> build.sh
  533. if [[ "${BUILDMAN}" != "" ]]; then
  534. ret=0;
  535. tools/buildman/buildman -o /tmp -PEWM ${BUILDMAN} ${OVERRIDE} || ret=$?;
  536. if [[ $ret -ne 0 ]]; then
  537. tools/buildman/buildman -o /tmp -seP ${BUILDMAN};
  538. exit $ret;
  539. fi;
  540. fi
  541. EOF
  542. cat build.sh
  543. docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh