Compare commits
	
		
			254 Commits
		
	
	
		
			mesa-22.3.
			...
			texman_0_1
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | bcc4804410 | ||
|  | a03b239041 | ||
|  | 4890c7ed87 | ||
|  | 17c33d6f04 | ||
|  | 5ab599600e | ||
|  | db4b6fd759 | ||
|  | dbb2f30ad3 | ||
|  | 9c84dfe156 | ||
|  | 8d86325516 | ||
|  | 8ecd83f6cd | ||
|  | cff7e025ed | ||
|  | 3de9a9bd1d | ||
|  | e6cae6e6f6 | ||
|  | 9cf5945b83 | ||
|  | d6ddc33f07 | ||
|  | ec32d129b7 | ||
|  | cd41c27446 | ||
|  | af6a480117 | ||
|  | 14e9700621 | ||
|  | 7fb177c367 | ||
|  | d65cda4ce3 | ||
|  | 460a375d85 | ||
|  | c3c5652602 | ||
|  | e1998baef8 | ||
|  | 3e980901b0 | ||
|  | 14fe63a12b | ||
|  | fbd147c0b5 | ||
|  | 7b7c54e94c | ||
|  | 84dd9c8112 | ||
|  | bba85343e7 | ||
|  | 6484b373c3 | ||
|  | 245a3c54c7 | ||
|  | 1266e633cc | ||
|  | 86042f53f2 | ||
|  | 068062f997 | ||
|  | c62af8e3b7 | ||
|  | 8e5650d7f4 | ||
|  | 65f1cf2cbf | ||
|  | f04f5e990a | ||
|  | 1854ebe77e | ||
|  | 3500f2e3c4 | ||
|  | cfeefc9b81 | ||
|  | 3ad6adfb57 | ||
|  | 8a126f1166 | ||
|  | 6315aabcf2 | ||
|  | a8c2344364 | ||
|  | 2929b2569b | ||
|  | f14790d7b0 | ||
|  | 8fb2e61801 | ||
|  | b3c94f9d9d | ||
|  | 5b889f7f2d | ||
|  | 0f531b5b48 | ||
|  | c6482c0d41 | ||
|  | b71748efd1 | ||
|  | 1c4b9edc41 | ||
|  | 2b464d93d1 | ||
|  | 497f80ac34 | ||
|  | 2be748769b | ||
|  | 45a1083ee7 | ||
|  | 3a45baa70d | ||
|  | 0bc3a7ac2e | ||
|  | 08f0579176 | ||
|  | d689cd0715 | ||
|  | e957f39f67 | ||
|  | 416f09d3e6 | ||
|  | 40cc5d36fd | ||
|  | 62f9613dbc | ||
|  | 7f31255eb2 | ||
|  | 78382b4bbe | ||
|  | bd8e90857c | ||
|  | b7cc448ff8 | ||
|  | 0ea582acbe | ||
|  | 05c824db87 | ||
|  | 829f6909bd | ||
|  | 34f1ebc72f | ||
|  | c6e586ee0c | ||
|  | 9180d932c6 | ||
|  | 95df1b67a2 | ||
|  | bc400c3f30 | ||
|  | 3a3db0e9ec | ||
|  | 613e395ab8 | ||
|  | 86c5de8b85 | ||
|  | 44c8c42ab9 | ||
|  | 31a82a663c | ||
|  | dafdf6512e | ||
|  | 9caacb015a | ||
|  | 2887770534 | ||
|  | fb440c9f82 | ||
|  | 576c9c310f | ||
|  | ec1db9db1f | ||
|  | e1f70cf5e2 | ||
|  | a7cdbf5c38 | ||
|  | fe37adfde3 | ||
|  | 315396ac6c | ||
|  | a7252f42ae | ||
|  | bffc66109e | ||
|  | 0089c7f4ee | ||
|  | 54f435340b | ||
|  | 66d887d605 | ||
|  | c6dc5cc8ab | ||
|  | e0412bb726 | ||
|  | cc5cbd1bca | ||
|  | 93fbfa5781 | ||
|  | 30709caad6 | ||
|  | a647198f65 | ||
|  | 7ad6ea6ff2 | ||
|  | ceee2c45cd | ||
|  | 26dc161fde | ||
|  | b176ef05f2 | ||
|  | 6d6e7a08c7 | ||
|  | eb75ec2349 | ||
|  | 2a8bd4e329 | ||
|  | c247268499 | ||
|  | b2cb8920c2 | ||
|  | af4d93f256 | ||
|  | c8dd839acb | ||
|  | e43c3c38fb | ||
|  | 9194782fdc | ||
|  | ec36d5b537 | ||
|  | 6e5da1a860 | ||
|  | 21f6e8a2d0 | ||
|  | 18772c9e88 | ||
|  | 5df4283b53 | ||
|  | 6734bab3b2 | ||
|  | 0a5fea5f96 | ||
|  | 9b2762d7b9 | ||
|  | 571bf7a71b | ||
|  | 4ab3b9aa76 | ||
|  | 88c018ee47 | ||
|  | cead52ef68 | ||
|  | 9cf0ed6059 | ||
|  | 2c1c8dccf7 | ||
|  | b87f9f6056 | ||
|  | 427bf0171d | ||
|  | 98c5445c75 | ||
|  | 15ffb3bbab | ||
|  | 97b570b999 | ||
|  | e8e6374a16 | ||
|  | d9f31f16c0 | ||
|  | 1173198f51 | ||
|  | 01a56d23e6 | ||
|  | bf050799ac | ||
|  | 3697eca493 | ||
|  | de2b3506ea | ||
|  | bfbe2eb6e9 | ||
|  | 8bd395f601 | ||
|  | 735925d15b | ||
|  | 448a3456a9 | ||
|  | e176b11b74 | ||
|  | cdc8771893 | ||
|  | 789399d3f4 | ||
|  | dfa8624e83 | ||
|  | 36b64bbd4f | ||
|  | 651985f813 | ||
|  | 9f2b49a7b3 | ||
|  | 05c6361b0b | ||
|  | 9621817253 | ||
|  | 7e2e4a6b1a | ||
|  | 656c71ca3f | ||
|  | 6ce7055dcc | ||
|  | c848a65b5e | ||
|  | 1ff99fcd50 | ||
|  | 67342052d2 | ||
|  | ccf52b6784 | ||
|  | 78aa58c914 | ||
|  | 4291f24abd | ||
|  | f4211722f1 | ||
|  | 71380a1325 | ||
|  | 33e55c6a7d | ||
|  | cd32164cc6 | ||
|  | 7c1348ac8f | ||
|  | 2560061cfe | ||
|  | 0c047b8847 | ||
|  | c64a08c286 | ||
|  | bb409e88d8 | ||
|  | af69eefcba | ||
|  | d9c071adec | ||
|  | 833d19c21d | ||
|  | 7245757174 | ||
|  | 3d96cb7d48 | ||
|  | f565ab2dd5 | ||
|  | 7ad0c51f48 | ||
|  | fea684c533 | ||
|  | 4e8845fc0c | ||
|  | ffcc1cdb41 | ||
|  | afc90075ba | ||
|  | 79acea2708 | ||
|  | a12063a540 | ||
|  | 9826d83439 | ||
|  | 84f7ded925 | ||
|  | 59dd8ad752 | ||
|  | 4afd26982e | ||
|  | 17e4a7ea17 | ||
|  | 3d67655cce | ||
|  | b5d7c69884 | ||
|  | b6759e2be7 | ||
|  | af853b0da7 | ||
|  | 8d349798da | ||
|  | 4aea628194 | ||
|  | 008ddf628a | ||
|  | 716496f263 | ||
|  | b3b325e560 | ||
|  | 382ce985ac | ||
|  | 54b7eca7a0 | ||
|  | f74e06bf11 | ||
|  | 9c02649d18 | ||
|  | 2c34704e41 | ||
|  | 85d0041ff0 | ||
|  | 67ff8b354e | ||
|  | 08e7957571 | ||
|  | 9c1f7089d8 | ||
|  | 5381ac5f11 | ||
|  | a6f78d4eee | ||
|  | ae695e3566 | ||
|  | c82c3335e1 | ||
|  | f8246f504b | ||
|  | 499458bcdb | ||
|  | 37a53fd6a0 | ||
|  | 5ec38fb2ea | ||
|  | 11c0215bf8 | ||
|  | 6a13b6c346 | ||
|  | a103097ee5 | ||
|  | 7efad0d84c | ||
|  | 42c88cd072 | ||
|  | 2541c54e79 | ||
|  | 33529e3d8e | ||
|  | 84c1b82081 | ||
|  | a97a1439ae | ||
|  | 2fd2910010 | ||
|  | fbbda155e0 | ||
|  | a49c3c0fae | ||
|  | baf5998d59 | ||
|  | 4c5acef241 | ||
|  | 9839e272cf | ||
|  | b57e79ff14 | ||
|  | 43824acb4e | ||
|  | d8f509e749 | ||
|  | eb91c93c2e | ||
|  | 79de983b6f | ||
|  | fdb3acf016 | ||
|  | 2807d1f58a | ||
|  | 93f913926e | ||
|  | 33ca04f379 | ||
|  | 398cb30c72 | ||
|  | f67bb30314 | ||
|  | 4578d7b9f0 | ||
|  | dcdfc154c3 | ||
|  | 696ba32779 | ||
|  | dcfe55539f | ||
|  | ff84b1f1b2 | ||
|  | 3bca9c47f4 | ||
|  | d4d7fdb43b | ||
|  | d65dab5777 | ||
|  | 638ca019ef | 
| @@ -1,18 +0,0 @@ | |||||||
| ((nil . ((show-trailing-whitespace . t))) |  | ||||||
|  (prog-mode |  | ||||||
|   (indent-tabs-mode . nil) |  | ||||||
|   (tab-width . 8) |  | ||||||
|   (c-basic-offset . 3) |  | ||||||
|   (c-file-style . "stroustrup") |  | ||||||
|   (fill-column . 78) |  | ||||||
|   (eval . (progn |  | ||||||
| 	    (c-set-offset 'case-label '0) |  | ||||||
| 	    (c-set-offset 'innamespace '0) |  | ||||||
| 	    (c-set-offset 'inline-open '0))) |  | ||||||
|   (whitespace-style face indentation) |  | ||||||
|   (whitespace-line-column . 79) |  | ||||||
|   (eval ignore-errors |  | ||||||
|         (require 'whitespace) |  | ||||||
|         (whitespace-mode 1))) |  | ||||||
|  (makefile-mode (indent-tabs-mode . t)) |  | ||||||
|  ) |  | ||||||
| @@ -1,44 +0,0 @@ | |||||||
| # To use this config on you editor, follow the instructions at: |  | ||||||
| # http://editorconfig.org |  | ||||||
|  |  | ||||||
| root = true |  | ||||||
|  |  | ||||||
| [*] |  | ||||||
| charset = utf-8 |  | ||||||
| insert_final_newline = true |  | ||||||
| tab_width = 8 |  | ||||||
|  |  | ||||||
| [*.{c,h,cpp,hpp,cc,hh}] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 3 |  | ||||||
| max_line_length = 78 |  | ||||||
|  |  | ||||||
| [{Makefile*,*.mk}] |  | ||||||
| indent_style = tab |  | ||||||
|  |  | ||||||
| [*.py] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 4 |  | ||||||
|  |  | ||||||
| [*.yml] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 2 |  | ||||||
|  |  | ||||||
| [*.rst] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 3 |  | ||||||
|  |  | ||||||
| [*.patch] |  | ||||||
| trim_trailing_whitespace = false |  | ||||||
|  |  | ||||||
| [{meson.build,meson_options.txt}] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 2 |  | ||||||
|  |  | ||||||
| [*.ps1] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 2 |  | ||||||
|  |  | ||||||
| [*.rs] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 4 |  | ||||||
							
								
								
									
										6
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							| @@ -1,6 +0,0 @@ | |||||||
| *.csv eol=crlf |  | ||||||
| * text=auto |  | ||||||
| *.jpg binary |  | ||||||
| *.png binary |  | ||||||
| *.gif binary |  | ||||||
| *.ico binary |  | ||||||
							
								
								
									
										59
									
								
								.github/workflows/macos.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										59
									
								
								.github/workflows/macos.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,59 +0,0 @@ | |||||||
| name: macOS-CI |  | ||||||
| on: push |  | ||||||
|  |  | ||||||
| permissions: |  | ||||||
|   contents: read |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   macOS-CI: |  | ||||||
|     strategy: |  | ||||||
|       matrix: |  | ||||||
|         glx_option: ['dri', 'xlib'] |  | ||||||
|     runs-on: macos-11 |  | ||||||
|     env: |  | ||||||
|       GALLIUM_DUMP_CPU: true |  | ||||||
|       MESON_EXEC: /Users/runner/Library/Python/3.11/bin/meson |  | ||||||
|     steps: |  | ||||||
|     - name: Checkout |  | ||||||
|       uses: actions/checkout@v3 |  | ||||||
|     - name: Install Dependencies |  | ||||||
|       run: | |  | ||||||
|         cat > Brewfile <<EOL |  | ||||||
|         brew "bison" |  | ||||||
|         brew "expat" |  | ||||||
|         brew "gettext" |  | ||||||
|         brew "libx11" |  | ||||||
|         brew "libxcb" |  | ||||||
|         brew "libxdamage" |  | ||||||
|         brew "libxext" |  | ||||||
|         brew "ninja" |  | ||||||
|         brew "pkg-config" |  | ||||||
|         brew "python@3.10" |  | ||||||
|         EOL |  | ||||||
|  |  | ||||||
|         brew update |  | ||||||
|         brew bundle --verbose |  | ||||||
|     - name: Install Mako and meson |  | ||||||
|       run: pip3 install --user mako meson |  | ||||||
|     - name: Configure |  | ||||||
|       run: | |  | ||||||
|         cat > native_config <<EOL |  | ||||||
|         [binaries] |  | ||||||
|         llvm-config = '/usr/local/opt/llvm/bin/llvm-config' |  | ||||||
|         EOL |  | ||||||
|         $MESON_EXEC . build --native-file=native_config -Dbuild-tests=true -Dosmesa=true -Dgallium-drivers=swrast -Dglx=${{ matrix.glx_option }} |  | ||||||
|     - name: Build |  | ||||||
|       run: $MESON_EXEC compile -C build |  | ||||||
|     - name: Test |  | ||||||
|       run: $MESON_EXEC test -C build --print-errorlogs |  | ||||||
|     - name: Install |  | ||||||
|       run: $MESON_EXEC install -C build --destdir $PWD/install |  | ||||||
|     - name: 'Upload Artifact' |  | ||||||
|       if: always() |  | ||||||
|       uses: actions/upload-artifact@v3 |  | ||||||
|       with: |  | ||||||
|         name: macos-${{ matrix.glx_option }}-result |  | ||||||
|         path: | |  | ||||||
|           build/meson-logs/ |  | ||||||
|           install/ |  | ||||||
|         retention-days: 5 |  | ||||||
							
								
								
									
										4
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,4 +0,0 @@ | |||||||
| *.pyc |  | ||||||
| *.pyo |  | ||||||
| *.out |  | ||||||
| /build |  | ||||||
							
								
								
									
										302
									
								
								.gitlab-ci.yml
									
									
									
									
									
								
							
							
						
						
									
										302
									
								
								.gitlab-ci.yml
									
									
									
									
									
								
							| @@ -1,302 +0,0 @@ | |||||||
| variables: |  | ||||||
|   FDO_UPSTREAM_REPO: mesa/mesa |  | ||||||
|   MESA_TEMPLATES_COMMIT: &ci-templates-commit d5aa3941aa03c2f716595116354fb81eb8012acb |  | ||||||
|   CI_PRE_CLONE_SCRIPT: |- |  | ||||||
|           set -o xtrace |  | ||||||
|           wget -q -O download-git-cache.sh ${CI_PROJECT_URL}/-/raw/${CI_COMMIT_SHA}/.gitlab-ci/download-git-cache.sh |  | ||||||
|           bash download-git-cache.sh |  | ||||||
|           rm download-git-cache.sh |  | ||||||
|           set +o xtrace |  | ||||||
|   CI_JOB_JWT_FILE: /minio_jwt |  | ||||||
|   MINIO_HOST: s3.freedesktop.org |  | ||||||
|   # per-pipeline artifact storage on MinIO |  | ||||||
|   PIPELINE_ARTIFACTS_BASE: ${MINIO_HOST}/artifacts/${CI_PROJECT_PATH}/${CI_PIPELINE_ID} |  | ||||||
|   # per-job artifact storage on MinIO |  | ||||||
|   JOB_ARTIFACTS_BASE: ${PIPELINE_ARTIFACTS_BASE}/${CI_JOB_ID} |  | ||||||
|   # reference images stored for traces |  | ||||||
|   PIGLIT_REPLAY_REFERENCE_IMAGES_BASE: "${MINIO_HOST}/mesa-tracie-results/$FDO_UPSTREAM_REPO" |  | ||||||
|   # Individual CI farm status, set to "offline" to disable jobs |  | ||||||
|   # running on a particular CI farm (ie. for outages, etc): |  | ||||||
|   FD_FARM: "online" |  | ||||||
|   COLLABORA_FARM: "online" |  | ||||||
|   MICROSOFT_FARM: "online" |  | ||||||
|   LIMA_FARM: "online" |  | ||||||
|   IGALIA_FARM: "online" |  | ||||||
|   ANHOLT_FARM: "online" |  | ||||||
|  |  | ||||||
| default: |  | ||||||
|   before_script: |  | ||||||
|     - echo -e "\e[0Ksection_start:$(date +%s):unset_env_vars_section[collapsed=true]\r\e[0KUnsetting vulnerable environment variables" |  | ||||||
|     - echo -n "${CI_JOB_JWT}" > "${CI_JOB_JWT_FILE}" |  | ||||||
|     - unset CI_JOB_JWT |  | ||||||
|     - echo -e "\e[0Ksection_end:$(date +%s):unset_env_vars_section\r\e[0K" |  | ||||||
|  |  | ||||||
|   after_script: |  | ||||||
|     - > |  | ||||||
|       set +x |  | ||||||
|  |  | ||||||
|       test -e "${CI_JOB_JWT_FILE}" && |  | ||||||
|       export CI_JOB_JWT="$(<${CI_JOB_JWT_FILE})" && |  | ||||||
|       rm "${CI_JOB_JWT_FILE}" |  | ||||||
|  |  | ||||||
|   # Retry build or test jobs up to twice when the gitlab-runner itself fails somehow. |  | ||||||
|   retry: |  | ||||||
|     max: 2 |  | ||||||
|     when: |  | ||||||
|       - runner_system_failure |  | ||||||
|  |  | ||||||
| include: |  | ||||||
|   - project: 'freedesktop/ci-templates' |  | ||||||
|     ref: ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2 |  | ||||||
|     file: |  | ||||||
|       - '/templates/ci-fairy.yml' |  | ||||||
|   - project: 'freedesktop/ci-templates' |  | ||||||
|     ref: *ci-templates-commit |  | ||||||
|     file: |  | ||||||
|       - '/templates/debian.yml' |  | ||||||
|       - '/templates/fedora.yml' |  | ||||||
|   - local: '.gitlab-ci/image-tags.yml' |  | ||||||
|   - local: '.gitlab-ci/lava/lava-gitlab-ci.yml' |  | ||||||
|   - local: '.gitlab-ci/container/gitlab-ci.yml' |  | ||||||
|   - local: '.gitlab-ci/build/gitlab-ci.yml' |  | ||||||
|   - local: '.gitlab-ci/test/gitlab-ci.yml' |  | ||||||
|   - local: '.gitlab-ci/test-source-dep.yml' |  | ||||||
|   - local: 'src/amd/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/broadcom/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/etnaviv/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/freedreno/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/crocus/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/d3d12/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/i915/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/lima/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/llvmpipe/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/nouveau/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/softpipe/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/virgl/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/zink/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/frontends/lavapipe/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/intel/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/microsoft/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/panfrost/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/virtio/ci/gitlab-ci.yml' |  | ||||||
|  |  | ||||||
| stages: |  | ||||||
|   - sanity |  | ||||||
|   - container |  | ||||||
|   - git-archive |  | ||||||
|   - build-x86_64 |  | ||||||
|   - build-misc |  | ||||||
|   - lint |  | ||||||
|   - amd |  | ||||||
|   - intel |  | ||||||
|   - nouveau |  | ||||||
|   - arm |  | ||||||
|   - broadcom |  | ||||||
|   - freedreno |  | ||||||
|   - etnaviv |  | ||||||
|   - software-renderer |  | ||||||
|   - layered-backends |  | ||||||
|   - deploy |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # YAML anchors for rule conditions |  | ||||||
| # -------------------------------- |  | ||||||
| .rules-anchors: |  | ||||||
|   rules: |  | ||||||
|     # Pipeline for forked project branch |  | ||||||
|     - if: &is-forked-branch '$CI_COMMIT_BRANCH && $CI_PROJECT_NAMESPACE != "mesa"' |  | ||||||
|       when: manual |  | ||||||
|     # Forked project branch / pre-merge pipeline not for Marge bot |  | ||||||
|     - if: &is-forked-branch-or-pre-merge-not-for-marge '$CI_PROJECT_NAMESPACE != "mesa" || ($GITLAB_USER_LOGIN != "marge-bot" && $CI_PIPELINE_SOURCE == "merge_request_event")' |  | ||||||
|       when: manual |  | ||||||
|     # Pipeline runs for the main branch of the upstream Mesa project |  | ||||||
|     - if: &is-mesa-main '$CI_PROJECT_NAMESPACE == "mesa" && $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH && $CI_COMMIT_BRANCH' |  | ||||||
|       when: always |  | ||||||
|     # Post-merge pipeline |  | ||||||
|     - if: &is-post-merge '$CI_PROJECT_NAMESPACE == "mesa" && $CI_COMMIT_BRANCH' |  | ||||||
|       when: on_success |  | ||||||
|     # Post-merge pipeline, not for Marge Bot |  | ||||||
|     - if: &is-post-merge-not-for-marge '$CI_PROJECT_NAMESPACE == "mesa" && $GITLAB_USER_LOGIN != "marge-bot" && $CI_COMMIT_BRANCH' |  | ||||||
|       when: on_success |  | ||||||
|     # Pre-merge pipeline |  | ||||||
|     - if: &is-pre-merge '$CI_PIPELINE_SOURCE == "merge_request_event"' |  | ||||||
|       when: on_success |  | ||||||
|     # Pre-merge pipeline for Marge Bot |  | ||||||
|     - if: &is-pre-merge-for-marge '$GITLAB_USER_LOGIN == "marge-bot" && $CI_PIPELINE_SOURCE == "merge_request_event"' |  | ||||||
|       when: on_success |  | ||||||
|  |  | ||||||
|  |  | ||||||
| .docs-base: |  | ||||||
|   extends: |  | ||||||
|     - .fdo.ci-fairy |  | ||||||
|     - .build-rules |  | ||||||
|   script: |  | ||||||
|   - apk --no-cache add graphviz doxygen |  | ||||||
|   - pip3 install sphinx===5.1.1 breathe===4.34.0 mako===1.2.3 sphinx_rtd_theme===1.0.0 |  | ||||||
|   - docs/doxygen-wrapper.py --out-dir=docs/doxygen_xml |  | ||||||
|   - sphinx-build -W -b html docs public |  | ||||||
|  |  | ||||||
| pages: |  | ||||||
|   extends: .docs-base |  | ||||||
|   stage: deploy |  | ||||||
|   artifacts: |  | ||||||
|     paths: |  | ||||||
|     - public |  | ||||||
|   needs: [] |  | ||||||
|   rules: |  | ||||||
|     - !reference [.no_scheduled_pipelines-rules, rules] |  | ||||||
|     - if: *is-mesa-main |  | ||||||
|       changes: &docs-or-ci |  | ||||||
|       - docs/**/* |  | ||||||
|       - .gitlab-ci.yml |  | ||||||
|       when: always |  | ||||||
|     # Other cases default to never |  | ||||||
|  |  | ||||||
| test-docs: |  | ||||||
|   extends: .docs-base |  | ||||||
|   # Cancel job if a newer commit is pushed to the same branch |  | ||||||
|   interruptible: true |  | ||||||
|   stage: deploy |  | ||||||
|   needs: [] |  | ||||||
|   rules: |  | ||||||
|     - !reference [.no_scheduled_pipelines-rules, rules] |  | ||||||
|     - if: *is-forked-branch |  | ||||||
|       changes: *docs-or-ci |  | ||||||
|       when: manual |  | ||||||
|     # Other cases default to never |  | ||||||
|  |  | ||||||
| test-docs-mr: |  | ||||||
|   extends: |  | ||||||
|     - test-docs |  | ||||||
|   needs: |  | ||||||
|     - sanity |  | ||||||
|   artifacts: |  | ||||||
|     expose_as: 'Documentation preview' |  | ||||||
|     paths: |  | ||||||
|     - public/ |  | ||||||
|   rules: |  | ||||||
|     - if: *is-pre-merge |  | ||||||
|       changes: *docs-or-ci |  | ||||||
|       when: on_success |  | ||||||
|     # Other cases default to never |  | ||||||
|  |  | ||||||
| # When to automatically run the CI for build jobs |  | ||||||
| .build-rules: |  | ||||||
|   rules: |  | ||||||
|     - !reference [.no_scheduled_pipelines-rules, rules] |  | ||||||
|     # If any files affecting the pipeline are changed, build/test jobs run |  | ||||||
|     # automatically once all dependency jobs have passed |  | ||||||
|     - changes: &all_paths |  | ||||||
|       - VERSION |  | ||||||
|       - bin/git_sha1_gen.py |  | ||||||
|       - bin/install_megadrivers.py |  | ||||||
|       - bin/meson_get_version.py |  | ||||||
|       - bin/symbols-check.py |  | ||||||
|       # GitLab CI |  | ||||||
|       - .gitlab-ci.yml |  | ||||||
|       - .gitlab-ci/**/* |  | ||||||
|       # Meson |  | ||||||
|       - meson* |  | ||||||
|       - build-support/**/* |  | ||||||
|       - subprojects/**/* |  | ||||||
|       # Source code |  | ||||||
|       - include/**/* |  | ||||||
|       - src/**/* |  | ||||||
|       when: on_success |  | ||||||
|     # Otherwise, build/test jobs won't run because no rule matched. |  | ||||||
|  |  | ||||||
|  |  | ||||||
| .ci-deqp-artifacts: |  | ||||||
|   artifacts: |  | ||||||
|     name: "mesa_${CI_JOB_NAME}" |  | ||||||
|     when: always |  | ||||||
|     untracked: false |  | ||||||
|     paths: |  | ||||||
|       # Watch out!  Artifacts are relative to the build dir. |  | ||||||
|       # https://gitlab.com/gitlab-org/gitlab-ce/commit/8788fb925706cad594adf6917a6c5f6587dd1521 |  | ||||||
|       - artifacts |  | ||||||
|       - _build/meson-logs/*.txt |  | ||||||
|       - _build/meson-logs/strace |  | ||||||
|  |  | ||||||
| .container-rules: |  | ||||||
|   rules: |  | ||||||
|     - !reference [.no_scheduled_pipelines-rules, rules] |  | ||||||
|     # Run pipeline by default in the main project if any CI pipeline |  | ||||||
|     # configuration files were changed, to ensure docker images are up to date |  | ||||||
|     - if: *is-post-merge |  | ||||||
|       changes: |  | ||||||
|       - .gitlab-ci.yml |  | ||||||
|       - .gitlab-ci/**/* |  | ||||||
|       when: on_success |  | ||||||
|     # Run pipeline by default if it was triggered by Marge Bot, is for a |  | ||||||
|     # merge request, and any files affecting the pipeline were changed |  | ||||||
|     - if: *is-pre-merge-for-marge |  | ||||||
|       changes: |  | ||||||
|         *all_paths |  | ||||||
|       when: on_success |  | ||||||
|     # Run pipeline by default in the main project if it was not triggered by |  | ||||||
|     # Marge Bot, and any files affecting the pipeline were changed |  | ||||||
|     - if: *is-post-merge-not-for-marge |  | ||||||
|       changes: |  | ||||||
|         *all_paths |  | ||||||
|       when: on_success |  | ||||||
|     # Allow triggering jobs manually in other cases if any files affecting the |  | ||||||
|     # pipeline were changed |  | ||||||
|     - changes: |  | ||||||
|         *all_paths |  | ||||||
|       when: manual |  | ||||||
|     # Otherwise, container jobs won't run because no rule matched. |  | ||||||
|  |  | ||||||
| # Git archive |  | ||||||
|  |  | ||||||
| make git archive: |  | ||||||
|   extends: |  | ||||||
|     - .fdo.ci-fairy |  | ||||||
|   stage: git-archive |  | ||||||
|   rules: |  | ||||||
|     - !reference [.scheduled_pipeline-rules, rules] |  | ||||||
|   # ensure we are running on packet |  | ||||||
|   tags: |  | ||||||
|     - packet.net |  | ||||||
|   script: |  | ||||||
|     # Compactify the .git directory |  | ||||||
|     - git gc --aggressive |  | ||||||
|     # compress the current folder |  | ||||||
|     - tar -cvzf ../$CI_PROJECT_NAME.tar.gz . |  | ||||||
|  |  | ||||||
|     - ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" ../$CI_PROJECT_NAME.tar.gz https://$MINIO_HOST/git-cache/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/$CI_PROJECT_NAME.tar.gz |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # Sanity checks of MR settings and commit logs |  | ||||||
| sanity: |  | ||||||
|   extends: |  | ||||||
|     - .fdo.ci-fairy |  | ||||||
|   stage: sanity |  | ||||||
|   rules: |  | ||||||
|     - if: *is-pre-merge |  | ||||||
|       when: on_success |  | ||||||
|     # Other cases default to never |  | ||||||
|   variables: |  | ||||||
|     GIT_STRATEGY: none |  | ||||||
|   script: |  | ||||||
|     # ci-fairy check-commits --junit-xml=check-commits.xml |  | ||||||
|     - ci-fairy check-merge-request --require-allow-collaboration --junit-xml=check-merge-request.xml |  | ||||||
|   artifacts: |  | ||||||
|     when: on_failure |  | ||||||
|     reports: |  | ||||||
|       junit: check-*.xml |  | ||||||
|  |  | ||||||
| # Rules for tests that should not block merging, but should be available to |  | ||||||
| # optionally run with the "play" button in the UI in pre-merge non-marge |  | ||||||
| # pipelines.  This should appear in "extends:" after any includes of |  | ||||||
| # test-source-dep.yml rules, so that these rules replace those. |  | ||||||
| .test-manual-mr: |  | ||||||
|   rules: |  | ||||||
|     - !reference [.no_scheduled_pipelines-rules, rules] |  | ||||||
|     - if: *is-forked-branch-or-pre-merge-not-for-marge |  | ||||||
|       changes: |  | ||||||
|         *all_paths |  | ||||||
|       when: manual |  | ||||||
|   variables: |  | ||||||
|     JOB_TIMEOUT: 80 |  | ||||||
|  |  | ||||||
| @@ -1,17 +0,0 @@ | |||||||
| # Note: skips lists for CI are just a list of lines that, when |  | ||||||
| # non-zero-length and not starting with '#', will regex match to |  | ||||||
| # delete lines from the test list.  Be careful. |  | ||||||
|  |  | ||||||
| # These are tremendously slow (pushing toward a minute), and aren't |  | ||||||
| # reliable to be run in parallel with other tests due to CPU-side timing. |  | ||||||
| dEQP-GLES[0-9]*.functional.flush_finish.* |  | ||||||
|  |  | ||||||
| # piglit: WGL is Windows-only |  | ||||||
| wgl@.* |  | ||||||
|  |  | ||||||
| # These are sensitive to CPU timing, and would need to be run in isolation |  | ||||||
| # on the system rather than in parallel with other tests. |  | ||||||
| glx@glx_arb_sync_control@timing.* |  | ||||||
|  |  | ||||||
| # This test is not built with waffle, while we do build tests with waffle |  | ||||||
| spec@!opengl 1.1@windowoverlap |  | ||||||
| @@ -1,66 +0,0 @@ | |||||||
| version: 1 |  | ||||||
|  |  | ||||||
| # Rules to match for a machine to qualify |  | ||||||
| target: |  | ||||||
| {% if tags %} |  | ||||||
|   tags: |  | ||||||
| {% for tag in tags %} |  | ||||||
|     - '{{ tag | trim }}' |  | ||||||
| {% endfor %} |  | ||||||
| {% endif %} |  | ||||||
|  |  | ||||||
| timeouts: |  | ||||||
|   first_console_activity:  # This limits the time it can take to receive the first console log |  | ||||||
|     minutes: {{ timeout_first_minutes }} |  | ||||||
|     retries: {{ timeout_first_retries }} |  | ||||||
|   console_activity:        # Reset every time we receive a message from the logs |  | ||||||
|     minutes: {{ timeout_minutes }} |  | ||||||
|     retries: {{ timeout_retries }} |  | ||||||
|   boot_cycle: |  | ||||||
|     minutes: {{ timeout_boot_minutes }} |  | ||||||
|     retries: {{ timeout_boot_retries }} |  | ||||||
|   overall:                 # Maximum time the job can take, not overrideable by the "continue" deployment |  | ||||||
|     minutes: {{ timeout_overall_minutes }} |  | ||||||
|     retries: 0 |  | ||||||
|     # no retries possible here |  | ||||||
|  |  | ||||||
| console_patterns: |  | ||||||
|     session_end: |  | ||||||
|         regex: >- |  | ||||||
|           {{ session_end_regex }} |  | ||||||
|     session_reboot: |  | ||||||
|         regex: >- |  | ||||||
|           {{ session_reboot_regex }} |  | ||||||
|     job_success: |  | ||||||
|         regex: >- |  | ||||||
|           {{ job_success_regex }} |  | ||||||
|     job_warn: |  | ||||||
|         regex: >- |  | ||||||
|           {{ job_warn_regex }} |  | ||||||
|  |  | ||||||
| # Environment to deploy |  | ||||||
| deployment: |  | ||||||
|   # Initial boot |  | ||||||
|   start: |  | ||||||
|     kernel: |  | ||||||
|       url: '{{ kernel_url }}' |  | ||||||
|       cmdline: > |  | ||||||
|         SALAD.machine_id={{ '{{' }} machine_id }} |  | ||||||
|         console={{ '{{' }} local_tty_device }},115200 earlyprintk=vga,keep |  | ||||||
|         loglevel={{ log_level }} no_hash_pointers |  | ||||||
|         b2c.service="--privileged --tls-verify=false --pid=host docker://{{ '{{' }} fdo_proxy_registry }}/mupuf/valve-infra/telegraf-container:latest" b2c.hostname=dut-{{ '{{' }} machine.full_name }} |  | ||||||
|         b2c.container="-ti --tls-verify=false docker://{{ '{{' }} fdo_proxy_registry }}/mupuf/valve-infra/machine_registration:latest check" |  | ||||||
|         b2c.ntp_peer=10.42.0.1 b2c.pipefail b2c.cache_device=auto b2c.poweroff_delay={{ poweroff_delay }} |  | ||||||
|         b2c.minio="gateway,{{ '{{' }} minio_url }},{{ '{{' }} job_bucket_access_key }},{{ '{{' }} job_bucket_secret_key }}" |  | ||||||
|         b2c.volume="{{ '{{' }} job_bucket }}-results,mirror=gateway/{{ '{{' }} job_bucket }},pull_on=pipeline_start,push_on=changes,overwrite{% for excl in job_volume_exclusions %},exclude={{ excl }}{% endfor %},expiration=pipeline_end,preserve" |  | ||||||
| {% for volume in volumes %} |  | ||||||
|         b2c.volume={{ volume }} |  | ||||||
| {% endfor %} |  | ||||||
|         b2c.container="-v {{ '{{' }} job_bucket }}-results:{{ working_dir }} -w {{ working_dir }} {% for mount_volume in mount_volumes %} -v {{ mount_volume }}{% endfor %} --tls-verify=false docker://{{ local_container }} {{ container_cmd }}" |  | ||||||
|         {% if cmdline_extras is defined %} |  | ||||||
|         {{ cmdline_extras }} |  | ||||||
|         {% endif %} |  | ||||||
|  |  | ||||||
|     initramfs: |  | ||||||
|       url: '{{ initramfs_url }}' |  | ||||||
|  |  | ||||||
| @@ -1,105 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
|  |  | ||||||
| # Copyright © 2022 Valve Corporation |  | ||||||
| # |  | ||||||
| # Permission is hereby granted, free of charge, to any person obtaining a |  | ||||||
| # copy of this software and associated documentation files (the "Software"), |  | ||||||
| # to deal in the Software without restriction, including without limitation |  | ||||||
| # the rights to use, copy, modify, merge, publish, distribute, sublicense, |  | ||||||
| # and/or sell copies of the Software, and to permit persons to whom the |  | ||||||
| # Software is furnished to do so, subject to the following conditions: |  | ||||||
| # |  | ||||||
| # The above copyright notice and this permission notice (including the next |  | ||||||
| # paragraph) shall be included in all copies or substantial portions of the |  | ||||||
| # Software. |  | ||||||
| # |  | ||||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |  | ||||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |  | ||||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL |  | ||||||
| # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |  | ||||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |  | ||||||
| # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |  | ||||||
| # IN THE SOFTWARE. |  | ||||||
|  |  | ||||||
| from jinja2 import Environment, FileSystemLoader |  | ||||||
| from argparse import ArgumentParser |  | ||||||
| from os import environ, path |  | ||||||
| import json |  | ||||||
|  |  | ||||||
|  |  | ||||||
| parser = ArgumentParser() |  | ||||||
| parser.add_argument('--ci-job-id') |  | ||||||
| parser.add_argument('--container-cmd') |  | ||||||
| parser.add_argument('--initramfs-url') |  | ||||||
| parser.add_argument('--job-success-regex') |  | ||||||
| parser.add_argument('--job-warn-regex') |  | ||||||
| parser.add_argument('--kernel-url') |  | ||||||
| parser.add_argument('--log-level', type=int) |  | ||||||
| parser.add_argument('--poweroff-delay', type=int) |  | ||||||
| parser.add_argument('--session-end-regex') |  | ||||||
| parser.add_argument('--session-reboot-regex') |  | ||||||
| parser.add_argument('--tags', nargs='?', default='') |  | ||||||
| parser.add_argument('--template', default='b2c.yml.jinja2.jinja2') |  | ||||||
| parser.add_argument('--timeout-boot-minutes', type=int) |  | ||||||
| parser.add_argument('--timeout-boot-retries', type=int) |  | ||||||
| parser.add_argument('--timeout-first-minutes', type=int) |  | ||||||
| parser.add_argument('--timeout-first-retries', type=int) |  | ||||||
| parser.add_argument('--timeout-minutes', type=int) |  | ||||||
| parser.add_argument('--timeout-overall-minutes', type=int) |  | ||||||
| parser.add_argument('--timeout-retries', type=int) |  | ||||||
| parser.add_argument('--job-volume-exclusions', nargs='?', default='') |  | ||||||
| parser.add_argument('--volume', action='append') |  | ||||||
| parser.add_argument('--mount-volume', action='append') |  | ||||||
| parser.add_argument('--local-container', default=environ.get('B2C_LOCAL_CONTAINER', 'alpine:latest')) |  | ||||||
| parser.add_argument('--working-dir') |  | ||||||
| args = parser.parse_args() |  | ||||||
|  |  | ||||||
| env = Environment(loader=FileSystemLoader(path.dirname(args.template)), |  | ||||||
|                   trim_blocks=True, lstrip_blocks=True) |  | ||||||
|  |  | ||||||
| template = env.get_template(path.basename(args.template)) |  | ||||||
|  |  | ||||||
| values = {} |  | ||||||
| values['ci_job_id'] = args.ci_job_id |  | ||||||
| values['container_cmd'] = args.container_cmd |  | ||||||
| values['initramfs_url'] = args.initramfs_url |  | ||||||
| values['job_success_regex'] = args.job_success_regex |  | ||||||
| values['job_warn_regex'] = args.job_warn_regex |  | ||||||
| values['kernel_url'] = args.kernel_url |  | ||||||
| values['log_level'] = args.log_level |  | ||||||
| values['poweroff_delay'] = args.poweroff_delay |  | ||||||
| values['session_end_regex'] = args.session_end_regex |  | ||||||
| values['session_reboot_regex'] = args.session_reboot_regex |  | ||||||
| try: |  | ||||||
|     values['tags'] = json.loads(args.tags) |  | ||||||
| except json.decoder.JSONDecodeError: |  | ||||||
|     values['tags'] = args.tags.split(",") |  | ||||||
| values['template'] = args.template |  | ||||||
| values['timeout_boot_minutes'] = args.timeout_boot_minutes |  | ||||||
| values['timeout_boot_retries'] = args.timeout_boot_retries |  | ||||||
| values['timeout_first_minutes'] = args.timeout_first_minutes |  | ||||||
| values['timeout_first_retries'] = args.timeout_first_retries |  | ||||||
| values['timeout_minutes'] = args.timeout_minutes |  | ||||||
| values['timeout_overall_minutes'] = args.timeout_overall_minutes |  | ||||||
| values['timeout_retries'] = args.timeout_retries |  | ||||||
| if len(args.job_volume_exclusions) > 0: |  | ||||||
|     exclusions = args.job_volume_exclusions.split(",") |  | ||||||
|     values['job_volume_exclusions'] = [excl for excl in exclusions if len(excl) > 0] |  | ||||||
| if args.volume is not None: |  | ||||||
|     values['volumes'] = args.volume |  | ||||||
| if args.mount_volume is not None: |  | ||||||
|     values['mount_volumes'] = args.mount_volume |  | ||||||
| values['working_dir'] = args.working_dir |  | ||||||
|  |  | ||||||
| assert(len(args.local_container) > 0) |  | ||||||
| values['local_container'] = args.local_container.replace( |  | ||||||
|     # Use the gateway's pull-through registry cache to reduce load on fd.o. |  | ||||||
|     'registry.freedesktop.org', '{{ fdo_proxy_registry }}' |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| if 'B2C_KERNEL_CMDLINE_EXTRAS' in environ: |  | ||||||
|     values['cmdline_extras'] = environ['B2C_KERNEL_CMDLINE_EXTRAS'] |  | ||||||
|  |  | ||||||
| f = open(path.splitext(path.basename(args.template))[0], "w") |  | ||||||
| f.write(template.render(values)) |  | ||||||
| f.close() |  | ||||||
| @@ -1,2 +0,0 @@ | |||||||
| [*.sh] |  | ||||||
| indent_size = 2 |  | ||||||
| @@ -1,26 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| # This test script groups together a bunch of fast dEQP variant runs |  | ||||||
| # to amortize the cost of rebooting the board. |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| EXIT=0 |  | ||||||
|  |  | ||||||
| # Run reset tests without parallelism: |  | ||||||
| if ! env \ |  | ||||||
|   DEQP_RESULTS_DIR=results/reset \ |  | ||||||
|   FDO_CI_CONCURRENT=1 \ |  | ||||||
|   DEQP_CASELIST_FILTER='.*reset.*' \ |  | ||||||
|   /install/deqp-runner.sh; then |  | ||||||
|     EXIT=1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Then run everything else with parallelism: |  | ||||||
| if ! env \ |  | ||||||
|   DEQP_RESULTS_DIR=results/nonrobustness \ |  | ||||||
|   DEQP_CASELIST_INV_FILTER='.*reset.*' \ |  | ||||||
|   /install/deqp-runner.sh; then |  | ||||||
|     EXIT=1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| @@ -1,13 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| # Init entrypoint for bare-metal devices; calls common init code. |  | ||||||
|  |  | ||||||
| # First stage: very basic setup to bring up network and /dev etc |  | ||||||
| /init-stage1.sh |  | ||||||
|  |  | ||||||
| # Second stage: run jobs |  | ||||||
| test $? -eq 0 && /init-stage2.sh |  | ||||||
|  |  | ||||||
| # Wait until the job would have timed out anyway, so we don't spew a "init |  | ||||||
| # exited" panic. |  | ||||||
| sleep 6000 |  | ||||||
| @@ -1,17 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_INTERFACE" ]; then |  | ||||||
|     echo "Must supply the PoE Interface to power down" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_ADDRESS" ]; then |  | ||||||
|     echo "Must supply the PoE Switch host" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| SNMP_KEY="1.3.6.1.4.1.9.9.402.1.2.1.1.1.$BM_POE_INTERFACE" |  | ||||||
| SNMP_ON="i 1" |  | ||||||
| SNMP_OFF="i 4" |  | ||||||
|  |  | ||||||
| snmpset -v2c -r 3 -t 30 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_OFF |  | ||||||
| @@ -1,21 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_INTERFACE" ]; then |  | ||||||
|     echo "Must supply the PoE Interface to power up" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_ADDRESS" ]; then |  | ||||||
|     echo "Must supply the PoE Switch host" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| SNMP_KEY="1.3.6.1.4.1.9.9.402.1.2.1.1.1.$BM_POE_INTERFACE" |  | ||||||
| SNMP_ON="i 1" |  | ||||||
| SNMP_OFF="i 4" |  | ||||||
|  |  | ||||||
| snmpset -v2c -r 3 -t 10 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_OFF |  | ||||||
| sleep 3s |  | ||||||
| snmpset -v2c -r 3 -t 10 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_ON |  | ||||||
| @@ -1,102 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| # Boot script for Chrome OS devices attached to a servo debug connector, using |  | ||||||
| # NFS and TFTP to boot. |  | ||||||
|  |  | ||||||
| # We're run from the root of the repo, make a helper var for our paths |  | ||||||
| BM=$CI_PROJECT_DIR/install/bare-metal |  | ||||||
| CI_COMMON=$CI_PROJECT_DIR/install/common |  | ||||||
|  |  | ||||||
| # Runner config checks |  | ||||||
| if [ -z "$BM_SERIAL" ]; then |  | ||||||
|   echo "Must set BM_SERIAL in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is the CPU serial device." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_SERIAL_EC" ]; then |  | ||||||
|   echo "Must set BM_SERIAL in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is the EC serial device for controlling board power" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ ! -d /nfs ]; then |  | ||||||
|   echo "NFS rootfs directory needs to be mounted at /nfs by the gitlab runner" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ ! -d /tftp ]; then |  | ||||||
|   echo "TFTP directory for this board needs to be mounted at /tftp by the gitlab runner" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # job config checks |  | ||||||
| if [ -z "$BM_KERNEL" ]; then |  | ||||||
|   echo "Must set BM_KERNEL to your board's kernel FIT image" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_ROOTFS" ]; then |  | ||||||
|   echo "Must set BM_ROOTFS to your board's rootfs directory in the job's variables" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_CMDLINE" ]; then |  | ||||||
|   echo "Must set BM_CMDLINE to your board's kernel command line arguments" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| # Clear out any previous run's artifacts. |  | ||||||
| rm -rf results/ |  | ||||||
| mkdir -p results |  | ||||||
|  |  | ||||||
| # Create the rootfs in the NFS directory.  rm to make sure it's in a pristine |  | ||||||
| # state, since it's volume-mounted on the host. |  | ||||||
| rsync -a --delete $BM_ROOTFS/ /nfs/ |  | ||||||
| mkdir -p /nfs/results |  | ||||||
| . $BM/rootfs-setup.sh /nfs |  | ||||||
|  |  | ||||||
| # Put the kernel/dtb image and the boot command line in the tftp directory for |  | ||||||
| # the board to find.  For normal Mesa development, we build the kernel and |  | ||||||
| # store it in the docker container that this script is running in. |  | ||||||
| # |  | ||||||
| # However, container builds are expensive, so when you're hacking on the |  | ||||||
| # kernel, it's nice to be able to skip the half hour container build and plus |  | ||||||
| # moving that container to the runner.  So, if BM_KERNEL is a URL, fetch it |  | ||||||
| # instead of looking in the container.  Note that the kernel build should be |  | ||||||
| # the output of: |  | ||||||
| # |  | ||||||
| # make Image.lzma |  | ||||||
| # |  | ||||||
| # mkimage \ |  | ||||||
| #  -A arm64 \ |  | ||||||
| #  -f auto \ |  | ||||||
| #  -C lzma \ |  | ||||||
| #  -d arch/arm64/boot/Image.lzma \ |  | ||||||
| #  -b arch/arm64/boot/dts/qcom/sdm845-cheza-r3.dtb \ |  | ||||||
| #  cheza-image.img |  | ||||||
|  |  | ||||||
| rm -rf /tftp/* |  | ||||||
| if echo "$BM_KERNEL" | grep -q http; then |  | ||||||
|   apt install -y wget |  | ||||||
|   wget $BM_KERNEL -O /tftp/vmlinuz |  | ||||||
| else |  | ||||||
|   cp $BM_KERNEL /tftp/vmlinuz |  | ||||||
| fi |  | ||||||
| echo "$BM_CMDLINE" > /tftp/cmdline |  | ||||||
|  |  | ||||||
| set +e |  | ||||||
| python3 $BM/cros_servo_run.py \ |  | ||||||
|         --cpu $BM_SERIAL \ |  | ||||||
|         --ec $BM_SERIAL_EC \ |  | ||||||
|         --test-timeout ${TEST_PHASE_TIMEOUT:-20} |  | ||||||
| ret=$? |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| # Bring artifacts back from the NFS dir to the build dir where gitlab-runner |  | ||||||
| # will look for them. |  | ||||||
| cp -Rp /nfs/results/. results/ |  | ||||||
|  |  | ||||||
| exit $ret |  | ||||||
| @@ -1,180 +0,0 @@ | |||||||
|  |  | ||||||
| #!/usr/bin/env python3 |  | ||||||
| # |  | ||||||
| # Copyright © 2020 Google LLC |  | ||||||
| # |  | ||||||
| # Permission is hereby granted, free of charge, to any person obtaining a |  | ||||||
| # copy of this software and associated documentation files (the "Software"), |  | ||||||
| # to deal in the Software without restriction, including without limitation |  | ||||||
| # the rights to use, copy, modify, merge, publish, distribute, sublicense, |  | ||||||
| # and/or sell copies of the Software, and to permit persons to whom the |  | ||||||
| # Software is furnished to do so, subject to the following conditions: |  | ||||||
| # |  | ||||||
| # The above copyright notice and this permission notice (including the next |  | ||||||
| # paragraph) shall be included in all copies or substantial portions of the |  | ||||||
| # Software. |  | ||||||
| # |  | ||||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |  | ||||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |  | ||||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL |  | ||||||
| # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |  | ||||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |  | ||||||
| # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |  | ||||||
| # IN THE SOFTWARE. |  | ||||||
|  |  | ||||||
| import argparse |  | ||||||
| import queue |  | ||||||
| import re |  | ||||||
| from serial_buffer import SerialBuffer |  | ||||||
| import sys |  | ||||||
| import threading |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class CrosServoRun: |  | ||||||
|     def __init__(self, cpu, ec, test_timeout): |  | ||||||
|         self.cpu_ser = SerialBuffer( |  | ||||||
|             cpu, "results/serial.txt", "R SERIAL-CPU> ") |  | ||||||
|         # Merge the EC serial into the cpu_ser's line stream so that we can |  | ||||||
|         # effectively poll on both at the same time and not have to worry about |  | ||||||
|         self.ec_ser = SerialBuffer( |  | ||||||
|             ec, "results/serial-ec.txt", "R SERIAL-EC> ", line_queue=self.cpu_ser.line_queue) |  | ||||||
|         self.test_timeout = test_timeout |  | ||||||
|  |  | ||||||
|     def close(self): |  | ||||||
|         self.ec_ser.close() |  | ||||||
|         self.cpu_ser.close() |  | ||||||
|  |  | ||||||
|     def ec_write(self, s): |  | ||||||
|         print("W SERIAL-EC> %s" % s) |  | ||||||
|         self.ec_ser.serial.write(s.encode()) |  | ||||||
|  |  | ||||||
|     def cpu_write(self, s): |  | ||||||
|         print("W SERIAL-CPU> %s" % s) |  | ||||||
|         self.cpu_ser.serial.write(s.encode()) |  | ||||||
|  |  | ||||||
|     def print_error(self, message): |  | ||||||
|         RED = '\033[0;31m' |  | ||||||
|         NO_COLOR = '\033[0m' |  | ||||||
|         print(RED + message + NO_COLOR) |  | ||||||
|  |  | ||||||
|     def run(self): |  | ||||||
|         # Flush any partial commands in the EC's prompt, then ask for a reboot. |  | ||||||
|         self.ec_write("\n") |  | ||||||
|         self.ec_write("reboot\n") |  | ||||||
|  |  | ||||||
|         bootloader_done = False |  | ||||||
|         # This is emitted right when the bootloader pauses to check for input. |  | ||||||
|         # Emit a ^N character to request network boot, because we don't have a |  | ||||||
|         # direct-to-netboot firmware on cheza. |  | ||||||
|         for line in self.cpu_ser.lines(timeout=120, phase="bootloader"): |  | ||||||
|             if re.search("load_archive: loading locale_en.bin", line): |  | ||||||
|                 self.cpu_write("\016") |  | ||||||
|                 bootloader_done = True |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|             # If the board has a netboot firmware and we made it to booting the |  | ||||||
|             # kernel, proceed to processing of the test run. |  | ||||||
|             if re.search("Booting Linux", line): |  | ||||||
|                 bootloader_done = True |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|             # The Cheza boards have issues with failing to bring up power to |  | ||||||
|             # the system sometimes, possibly dependent on ambient temperature |  | ||||||
|             # in the farm. |  | ||||||
|             if re.search("POWER_GOOD not seen in time", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected intermittent poweron failure, restarting run...") |  | ||||||
|                 return 2 |  | ||||||
|  |  | ||||||
|         if not bootloader_done: |  | ||||||
|             print("Failed to make it through bootloader, restarting run...") |  | ||||||
|             return 2 |  | ||||||
|  |  | ||||||
|         tftp_failures = 0 |  | ||||||
|         for line in self.cpu_ser.lines(timeout=self.test_timeout, phase="test"): |  | ||||||
|             if re.search("---. end Kernel panic", line): |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             # The Cheza firmware seems to occasionally get stuck looping in |  | ||||||
|             # this error state during TFTP booting, possibly based on amount of |  | ||||||
|             # network traffic around it, but it'll usually recover after a |  | ||||||
|             # reboot. |  | ||||||
|             if re.search("R8152: Bulk read error 0xffffffbf", line): |  | ||||||
|                 tftp_failures += 1 |  | ||||||
|                 if tftp_failures >= 100: |  | ||||||
|                     self.print_error( |  | ||||||
|                         "Detected intermittent tftp failure, restarting run...") |  | ||||||
|                     return 2 |  | ||||||
|  |  | ||||||
|             # There are very infrequent bus errors during power management transitions |  | ||||||
|             # on cheza, which we don't expect to be the case on future boards. |  | ||||||
|             if re.search("Kernel panic - not syncing: Asynchronous SError Interrupt", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected cheza power management bus error, restarting run...") |  | ||||||
|                 return 2 |  | ||||||
|  |  | ||||||
|             # If the network device dies, it's probably not graphics's fault, just try again. |  | ||||||
|             if re.search("NETDEV WATCHDOG", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected network device failure, restarting run...") |  | ||||||
|                 return 2 |  | ||||||
|  |  | ||||||
|             # These HFI response errors started appearing with the introduction |  | ||||||
|             # of piglit runs.  CosmicPenguin says: |  | ||||||
|             # |  | ||||||
|             # "message ID 106 isn't a thing, so likely what happened is that we |  | ||||||
|             # got confused when parsing the HFI queue.  If it happened on only |  | ||||||
|             # one run, then memory corruption could be a possible clue" |  | ||||||
|             # |  | ||||||
|             # Given that it seems to trigger randomly near a GPU fault and then |  | ||||||
|             # break many tests after that, just restart the whole run. |  | ||||||
|             if re.search("a6xx_hfi_send_msg.*Unexpected message id .* on the response queue", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected cheza power management bus error, restarting run...") |  | ||||||
|                 return 2 |  | ||||||
|  |  | ||||||
|             if re.search("coreboot.*bootblock starting", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected spontaneous reboot, restarting run...") |  | ||||||
|                 return 2 |  | ||||||
|  |  | ||||||
|             if re.search("arm-smmu 5040000.iommu: TLB sync timed out -- SMMU may be deadlocked", line): |  | ||||||
|                 self.print_error("Detected cheza MMU fail, restarting run...") |  | ||||||
|                 return 2 |  | ||||||
|  |  | ||||||
|             result = re.search("hwci: mesa: (\S*)", line) |  | ||||||
|             if result: |  | ||||||
|                 if result.group(1) == "pass": |  | ||||||
|                     return 0 |  | ||||||
|                 else: |  | ||||||
|                     return 1 |  | ||||||
|  |  | ||||||
|         self.print_error( |  | ||||||
|             "Reached the end of the CPU serial log without finding a result") |  | ||||||
|         return 2 |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     parser = argparse.ArgumentParser() |  | ||||||
|     parser.add_argument('--cpu', type=str, |  | ||||||
|                         help='CPU Serial device', required=True) |  | ||||||
|     parser.add_argument( |  | ||||||
|         '--ec', type=str, help='EC Serial device', required=True) |  | ||||||
|     parser.add_argument( |  | ||||||
|         '--test-timeout', type=int, help='Test phase timeout (minutes)', required=True) |  | ||||||
|     args = parser.parse_args() |  | ||||||
|  |  | ||||||
|     while True: |  | ||||||
|         servo = CrosServoRun(args.cpu, args.ec, args.test_timeout * 60) |  | ||||||
|         retval = servo.run() |  | ||||||
|  |  | ||||||
|         # power down the CPU on the device |  | ||||||
|         servo.ec_write("power off\n") |  | ||||||
|         servo.close() |  | ||||||
|  |  | ||||||
|         if retval != 2: |  | ||||||
|             sys.exit(retval) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     main() |  | ||||||
| @@ -1,10 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| relay=$1 |  | ||||||
|  |  | ||||||
| if [ -z "$relay" ]; then |  | ||||||
|     echo "Must supply a relay arg" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| $CI_PROJECT_DIR/install/bare-metal/eth008-power-relay.py $ETH_HOST $ETH_PORT off $relay |  | ||||||
| @@ -1,28 +0,0 @@ | |||||||
| #!/usr/bin/python3 |  | ||||||
|  |  | ||||||
| import sys |  | ||||||
| import socket |  | ||||||
|  |  | ||||||
| host = sys.argv[1] |  | ||||||
| port = sys.argv[2] |  | ||||||
| mode = sys.argv[3] |  | ||||||
| relay = sys.argv[4] |  | ||||||
| msg = None |  | ||||||
|  |  | ||||||
| if mode == "on": |  | ||||||
|     msg = b'\x20' |  | ||||||
| else: |  | ||||||
|     msg = b'\x21' |  | ||||||
|  |  | ||||||
| msg += int(relay).to_bytes(1, 'big') |  | ||||||
| msg += b'\x00' |  | ||||||
|  |  | ||||||
| c = socket.create_connection((host, int(port))) |  | ||||||
| c.sendall(msg) |  | ||||||
|  |  | ||||||
| data = c.recv(1) |  | ||||||
| c.close() |  | ||||||
|  |  | ||||||
| if data[0] == b'\x01': |  | ||||||
|     print('Command failed') |  | ||||||
|     sys.exit(1) |  | ||||||
| @@ -1,12 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| relay=$1 |  | ||||||
|  |  | ||||||
| if [ -z "$relay" ]; then |  | ||||||
|     echo "Must supply a relay arg" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| $CI_PROJECT_DIR/install/bare-metal/eth008-power-relay.py $ETH_HOST $ETH_PORT off $relay |  | ||||||
| sleep 5 |  | ||||||
| $CI_PROJECT_DIR/install/bare-metal/eth008-power-relay.py $ETH_HOST $ETH_PORT on $relay |  | ||||||
| @@ -1,30 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| STRINGS=$(mktemp) |  | ||||||
| ERRORS=$(mktemp) |  | ||||||
|  |  | ||||||
| trap "rm $STRINGS; rm $ERRORS;" EXIT |  | ||||||
|  |  | ||||||
| FILE=$1 |  | ||||||
| shift 1 |  | ||||||
|  |  | ||||||
| while getopts "f:e:" opt; do |  | ||||||
|   case $opt in |  | ||||||
|     f) echo "$OPTARG" >> $STRINGS;; |  | ||||||
|     e) echo "$OPTARG" >> $STRINGS ; echo "$OPTARG" >> $ERRORS;; |  | ||||||
|   esac |  | ||||||
| done |  | ||||||
| shift $((OPTIND -1)) |  | ||||||
|  |  | ||||||
| echo "Waiting for $FILE to say one of following strings" |  | ||||||
| cat $STRINGS |  | ||||||
|  |  | ||||||
| while ! egrep -wf $STRINGS $FILE; do |  | ||||||
|   sleep 2 |  | ||||||
| done |  | ||||||
|  |  | ||||||
| if egrep -wf $ERRORS $FILE; then |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
| @@ -1,154 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| BM=$CI_PROJECT_DIR/install/bare-metal |  | ||||||
| CI_COMMON=$CI_PROJECT_DIR/install/common |  | ||||||
|  |  | ||||||
| if [ -z "$BM_SERIAL" -a -z "$BM_SERIAL_SCRIPT" ]; then |  | ||||||
|   echo "Must set BM_SERIAL OR BM_SERIAL_SCRIPT in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "BM_SERIAL:" |  | ||||||
|   echo "  This is the serial device to talk to for waiting for fastboot to be ready and logging from the kernel." |  | ||||||
|   echo "BM_SERIAL_SCRIPT:" |  | ||||||
|   echo "  This is a shell script to talk to for waiting for fastboot to be ready and logging from the kernel." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POWERUP" ]; then |  | ||||||
|   echo "Must set BM_POWERUP in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is a shell script that should reset the device and begin its boot sequence" |  | ||||||
|   echo "such that it pauses at fastboot." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POWERDOWN" ]; then |  | ||||||
|   echo "Must set BM_POWERDOWN in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is a shell script that should power off the device." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_FASTBOOT_SERIAL" ]; then |  | ||||||
|   echo "Must set BM_FASTBOOT_SERIAL in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This must be the a stable-across-resets fastboot serial number." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_KERNEL" ]; then |  | ||||||
|   echo "Must set BM_KERNEL to your board's kernel vmlinuz or Image.gz in the job's variables:" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_DTB" ]; then |  | ||||||
|   echo "Must set BM_DTB to your board's DTB file in the job's variables:" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_ROOTFS" ]; then |  | ||||||
|   echo "Must set BM_ROOTFS to your board's rootfs directory in the job's variables:" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if echo $BM_CMDLINE | grep -q "root=/dev/nfs"; then |  | ||||||
|   BM_FASTBOOT_NFSROOT=1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| # Clear out any previous run's artifacts. |  | ||||||
| rm -rf results/ |  | ||||||
| mkdir -p results/ |  | ||||||
|  |  | ||||||
| if [ -n "$BM_FASTBOOT_NFSROOT" ]; then |  | ||||||
|   # Create the rootfs in the NFS directory.  rm to make sure it's in a pristine |  | ||||||
|   # state, since it's volume-mounted on the host. |  | ||||||
|   rsync -a --delete $BM_ROOTFS/ /nfs/ |  | ||||||
|   mkdir -p /nfs/results |  | ||||||
|   . $BM/rootfs-setup.sh /nfs |  | ||||||
|  |  | ||||||
|   # Root on NFS, no need for an inintramfs. |  | ||||||
|   rm -f rootfs.cpio.gz |  | ||||||
|   touch rootfs.cpio |  | ||||||
|   gzip rootfs.cpio |  | ||||||
| else |  | ||||||
|   # Create the rootfs in a temp dir |  | ||||||
|   rsync -a --delete $BM_ROOTFS/ rootfs/ |  | ||||||
|   . $BM/rootfs-setup.sh rootfs |  | ||||||
|  |  | ||||||
|   # Finally, pack it up into a cpio rootfs.  Skip the vulkan CTS since none of |  | ||||||
|   # these devices use it and it would take up space in the initrd. |  | ||||||
|  |  | ||||||
|   if [ -n "$PIGLIT_PROFILES" ]; then |  | ||||||
|     EXCLUDE_FILTER="deqp|arb_gpu_shader5|arb_gpu_shader_fp64|arb_gpu_shader_int64|glsl-4.[0123456]0|arb_tessellation_shader" |  | ||||||
|   else |  | ||||||
|     EXCLUDE_FILTER="piglit|python" |  | ||||||
|   fi |  | ||||||
|  |  | ||||||
|   pushd rootfs |  | ||||||
|   find -H | \ |  | ||||||
|     egrep -v "external/(openglcts|vulkancts|amber|glslang|spirv-tools)" | |  | ||||||
|     egrep -v "traces-db|apitrace|renderdoc" | \ |  | ||||||
|     egrep -v $EXCLUDE_FILTER | \ |  | ||||||
|     cpio -H newc -o | \ |  | ||||||
|     xz --check=crc32 -T4 - > $CI_PROJECT_DIR/rootfs.cpio.gz |  | ||||||
|   popd |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Make the combined kernel image and dtb for passing to fastboot.  For normal |  | ||||||
| # Mesa development, we build the kernel and store it in the docker container |  | ||||||
| # that this script is running in. |  | ||||||
| # |  | ||||||
| # However, container builds are expensive, so when you're hacking on the |  | ||||||
| # kernel, it's nice to be able to skip the half hour container build and plus |  | ||||||
| # moving that container to the runner.  So, if BM_KERNEL+BM_DTB are URLs, |  | ||||||
| # fetch them instead of looking in the container. |  | ||||||
| if echo "$BM_KERNEL $BM_DTB" | grep -q http; then |  | ||||||
|   apt install -y wget |  | ||||||
|  |  | ||||||
|   wget $BM_KERNEL -O kernel |  | ||||||
|   wget $BM_DTB -O dtb |  | ||||||
|  |  | ||||||
|   cat kernel dtb > Image.gz-dtb |  | ||||||
|   rm kernel |  | ||||||
| else |  | ||||||
|   cat $BM_KERNEL $BM_DTB > Image.gz-dtb |  | ||||||
|   cp $BM_DTB dtb |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| export PATH=$BM:$PATH |  | ||||||
|  |  | ||||||
| mkdir -p artifacts |  | ||||||
| mkbootimg.py \ |  | ||||||
|   --kernel Image.gz-dtb \ |  | ||||||
|   --ramdisk rootfs.cpio.gz \ |  | ||||||
|   --dtb dtb \ |  | ||||||
|   --cmdline "$BM_CMDLINE" \ |  | ||||||
|   $BM_MKBOOT_PARAMS \ |  | ||||||
|   --header_version 2 \ |  | ||||||
|   -o artifacts/fastboot.img |  | ||||||
|  |  | ||||||
| rm Image.gz-dtb dtb |  | ||||||
|  |  | ||||||
| # Start background command for talking to serial if we have one. |  | ||||||
| if [ -n "$BM_SERIAL_SCRIPT" ]; then |  | ||||||
|   $BM_SERIAL_SCRIPT > results/serial-output.txt & |  | ||||||
|  |  | ||||||
|   while [ ! -e results/serial-output.txt ]; do |  | ||||||
|     sleep 1 |  | ||||||
|   done |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| set +e |  | ||||||
| $BM/fastboot_run.py \ |  | ||||||
|   --dev="$BM_SERIAL" \ |  | ||||||
|   --test-timeout ${TEST_PHASE_TIMEOUT:-20} \ |  | ||||||
|   --fbserial="$BM_FASTBOOT_SERIAL" \ |  | ||||||
|   --powerup="$BM_POWERUP" \ |  | ||||||
|   --powerdown="$BM_POWERDOWN" |  | ||||||
| ret=$? |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| if [ -n "$BM_FASTBOOT_NFSROOT" ]; then |  | ||||||
|   # Bring artifacts back from the NFS dir to the build dir where gitlab-runner |  | ||||||
|   # will look for them. |  | ||||||
|   cp -Rp /nfs/results/. results/ |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| exit $ret |  | ||||||
| @@ -1,164 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
| # |  | ||||||
| # Copyright © 2020 Google LLC |  | ||||||
| # |  | ||||||
| # Permission is hereby granted, free of charge, to any person obtaining a |  | ||||||
| # copy of this software and associated documentation files (the "Software"), |  | ||||||
| # to deal in the Software without restriction, including without limitation |  | ||||||
| # the rights to use, copy, modify, merge, publish, distribute, sublicense, |  | ||||||
| # and/or sell copies of the Software, and to permit persons to whom the |  | ||||||
| # Software is furnished to do so, subject to the following conditions: |  | ||||||
| # |  | ||||||
| # The above copyright notice and this permission notice (including the next |  | ||||||
| # paragraph) shall be included in all copies or substantial portions of the |  | ||||||
| # Software. |  | ||||||
| # |  | ||||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |  | ||||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |  | ||||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL |  | ||||||
| # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |  | ||||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |  | ||||||
| # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |  | ||||||
| # IN THE SOFTWARE. |  | ||||||
|  |  | ||||||
| import argparse |  | ||||||
| import subprocess |  | ||||||
| import re |  | ||||||
| from serial_buffer import SerialBuffer |  | ||||||
| import sys |  | ||||||
| import threading |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class FastbootRun: |  | ||||||
|     def __init__(self, args, test_timeout): |  | ||||||
|         self.powerup = args.powerup |  | ||||||
|         self.ser = SerialBuffer( |  | ||||||
|             args.dev, "results/serial-output.txt", "R SERIAL> ") |  | ||||||
|         self.fastboot = "fastboot boot -s {ser} artifacts/fastboot.img".format( |  | ||||||
|             ser=args.fbserial) |  | ||||||
|         self.test_timeout = test_timeout |  | ||||||
|  |  | ||||||
|     def close(self): |  | ||||||
|         self.ser.close() |  | ||||||
|  |  | ||||||
|     def print_error(self, message): |  | ||||||
|         RED = '\033[0;31m' |  | ||||||
|         NO_COLOR = '\033[0m' |  | ||||||
|         print(RED + message + NO_COLOR) |  | ||||||
|  |  | ||||||
|     def logged_system(self, cmd, timeout=60): |  | ||||||
|         print("Running '{}'".format(cmd)) |  | ||||||
|         try: |  | ||||||
|             return subprocess.call(cmd, shell=True, timeout=timeout) |  | ||||||
|         except subprocess.TimeoutExpired: |  | ||||||
|             self.print_error("timeout, restarting run...") |  | ||||||
|             return 2 |  | ||||||
|  |  | ||||||
|     def run(self): |  | ||||||
|         if ret := self.logged_system(self.powerup): |  | ||||||
|             return ret |  | ||||||
|  |  | ||||||
|         fastboot_ready = False |  | ||||||
|         for line in self.ser.lines(timeout=2 * 60, phase="bootloader"): |  | ||||||
|             if re.search("fastboot: processing commands", line) or \ |  | ||||||
|                     re.search("Listening for fastboot command on", line): |  | ||||||
|                 fastboot_ready = True |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|             if re.search("data abort", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected crash during boot, restarting run...") |  | ||||||
|                 return 2 |  | ||||||
|  |  | ||||||
|         if not fastboot_ready: |  | ||||||
|             self.print_error( |  | ||||||
|                 "Failed to get to fastboot prompt, restarting run...") |  | ||||||
|             return 2 |  | ||||||
|  |  | ||||||
|         if ret := self.logged_system(self.fastboot): |  | ||||||
|             return ret |  | ||||||
|  |  | ||||||
|         print_more_lines = -1 |  | ||||||
|         for line in self.ser.lines(timeout=self.test_timeout, phase="test"): |  | ||||||
|             if print_more_lines == 0: |  | ||||||
|                 return 2 |  | ||||||
|             if print_more_lines > 0: |  | ||||||
|                 print_more_lines -= 1 |  | ||||||
|  |  | ||||||
|             if re.search("---. end Kernel panic", line): |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             # The db820c boards intermittently reboot.  Just restart the run |  | ||||||
|             # when if we see a reboot after we got past fastboot. |  | ||||||
|             if re.search("PON REASON", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected spontaneous reboot, restarting run...") |  | ||||||
|                 return 2 |  | ||||||
|  |  | ||||||
|             # db820c sometimes wedges around iommu fault recovery |  | ||||||
|             if re.search("watchdog: BUG: soft lockup - CPU.* stuck", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected kernel soft lockup, restarting run...") |  | ||||||
|                 return 2 |  | ||||||
|  |  | ||||||
|             # If the network device dies, it's probably not graphics's fault, just try again. |  | ||||||
|             if re.search("NETDEV WATCHDOG", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected network device failure, restarting run...") |  | ||||||
|                 return 2 |  | ||||||
|  |  | ||||||
|             # A3xx recovery doesn't quite work. Sometimes the GPU will get |  | ||||||
|             # wedged and recovery will fail (because power can't be reset?) |  | ||||||
|             # This assumes that the jobs are sufficiently well-tested that GPU |  | ||||||
|             # hangs aren't always triggered, so just try again. But print some |  | ||||||
|             # more lines first so that we get better information on the cause |  | ||||||
|             # of the hang. Once a hang happens, it's pretty chatty. |  | ||||||
|             if "[drm:adreno_recover] *ERROR* gpu hw init failed: -22" in line: |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected GPU hang, restarting run...") |  | ||||||
|                 if print_more_lines == -1: |  | ||||||
|                     print_more_lines = 30 |  | ||||||
|  |  | ||||||
|             result = re.search("hwci: mesa: (\S*)", line) |  | ||||||
|             if result: |  | ||||||
|                 if result.group(1) == "pass": |  | ||||||
|                     return 0 |  | ||||||
|                 else: |  | ||||||
|                     return 1 |  | ||||||
|  |  | ||||||
|         self.print_error( |  | ||||||
|             "Reached the end of the CPU serial log without finding a result, restarting run...") |  | ||||||
|         return 2 |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     parser = argparse.ArgumentParser() |  | ||||||
|     parser.add_argument( |  | ||||||
|         '--dev', type=str, help='Serial device (otherwise reading from serial-output.txt)') |  | ||||||
|     parser.add_argument('--powerup', type=str, |  | ||||||
|                         help='shell command for rebooting', required=True) |  | ||||||
|     parser.add_argument('--powerdown', type=str, |  | ||||||
|                         help='shell command for powering off', required=True) |  | ||||||
|     parser.add_argument('--fbserial', type=str, |  | ||||||
|                         help='fastboot serial number of the board', required=True) |  | ||||||
|     parser.add_argument('--test-timeout', type=int, |  | ||||||
|                         help='Test phase timeout (minutes)', required=True) |  | ||||||
|     args = parser.parse_args() |  | ||||||
|  |  | ||||||
|     fastboot = FastbootRun(args, args.test_timeout * 60) |  | ||||||
|  |  | ||||||
|     while True: |  | ||||||
|         retval = fastboot.run() |  | ||||||
|         fastboot.close() |  | ||||||
|         if retval != 2: |  | ||||||
|             break |  | ||||||
|  |  | ||||||
|         fastboot = FastbootRun(args, args.test_timeout * 60) |  | ||||||
|  |  | ||||||
|     fastboot.logged_system(args.powerdown) |  | ||||||
|  |  | ||||||
|     sys.exit(retval) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     main() |  | ||||||
| @@ -1,10 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| relay=$1 |  | ||||||
|  |  | ||||||
| if [ -z "$relay" ]; then |  | ||||||
|     echo "Must supply a relay arg" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| $CI_PROJECT_DIR/install/bare-metal/google-power-relay.py off $relay |  | ||||||
| @@ -1,19 +0,0 @@ | |||||||
| #!/usr/bin/python3 |  | ||||||
|  |  | ||||||
| import sys |  | ||||||
| import serial |  | ||||||
|  |  | ||||||
| mode = sys.argv[1] |  | ||||||
| relay = sys.argv[2] |  | ||||||
|  |  | ||||||
| # our relays are "off" means "board is powered". |  | ||||||
| mode_swap = { |  | ||||||
|     "on": "off", |  | ||||||
|     "off": "on", |  | ||||||
| } |  | ||||||
| mode = mode_swap[mode] |  | ||||||
|  |  | ||||||
| ser = serial.Serial('/dev/ttyACM0', 115200, timeout=2) |  | ||||||
| command = "relay {} {}\n\r".format(mode, relay) |  | ||||||
| ser.write(command.encode()) |  | ||||||
| ser.close() |  | ||||||
| @@ -1,12 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| relay=$1 |  | ||||||
|  |  | ||||||
| if [ -z "$relay" ]; then |  | ||||||
|     echo "Must supply a relay arg" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| $CI_PROJECT_DIR/install/bare-metal/google-power-relay.py off $relay |  | ||||||
| sleep 5 |  | ||||||
| $CI_PROJECT_DIR/install/bare-metal/google-power-relay.py on $relay |  | ||||||
| @@ -1,569 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
| # |  | ||||||
| # Copyright 2015, The Android Open Source Project |  | ||||||
| # |  | ||||||
| # Licensed under the Apache License, Version 2.0 (the "License"); |  | ||||||
| # you may not use this file except in compliance with the License. |  | ||||||
| # You may obtain a copy of the License at |  | ||||||
| # |  | ||||||
| #     http://www.apache.org/licenses/LICENSE-2.0 |  | ||||||
| # |  | ||||||
| # Unless required by applicable law or agreed to in writing, software |  | ||||||
| # distributed under the License is distributed on an "AS IS" BASIS, |  | ||||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |  | ||||||
| # See the License for the specific language governing permissions and |  | ||||||
| # limitations under the License. |  | ||||||
| """Creates the boot image.""" |  | ||||||
| from argparse import (ArgumentParser, ArgumentTypeError, |  | ||||||
|                       FileType, RawDescriptionHelpFormatter) |  | ||||||
| from hashlib import sha1 |  | ||||||
| from os import fstat |  | ||||||
| from struct import pack |  | ||||||
| import array |  | ||||||
| import collections |  | ||||||
| import os |  | ||||||
| import re |  | ||||||
| import subprocess |  | ||||||
| import tempfile |  | ||||||
| # Constant and structure definition is in |  | ||||||
| # system/tools/mkbootimg/include/bootimg/bootimg.h |  | ||||||
| BOOT_MAGIC = 'ANDROID!' |  | ||||||
| BOOT_MAGIC_SIZE = 8 |  | ||||||
| BOOT_NAME_SIZE = 16 |  | ||||||
| BOOT_ARGS_SIZE = 512 |  | ||||||
| BOOT_EXTRA_ARGS_SIZE = 1024 |  | ||||||
| BOOT_IMAGE_HEADER_V1_SIZE = 1648 |  | ||||||
| BOOT_IMAGE_HEADER_V2_SIZE = 1660 |  | ||||||
| BOOT_IMAGE_HEADER_V3_SIZE = 1580 |  | ||||||
| BOOT_IMAGE_HEADER_V3_PAGESIZE = 4096 |  | ||||||
| BOOT_IMAGE_HEADER_V4_SIZE = 1584 |  | ||||||
| BOOT_IMAGE_V4_SIGNATURE_SIZE = 4096 |  | ||||||
| VENDOR_BOOT_MAGIC = 'VNDRBOOT' |  | ||||||
| VENDOR_BOOT_MAGIC_SIZE = 8 |  | ||||||
| VENDOR_BOOT_NAME_SIZE = BOOT_NAME_SIZE |  | ||||||
| VENDOR_BOOT_ARGS_SIZE = 2048 |  | ||||||
| VENDOR_BOOT_IMAGE_HEADER_V3_SIZE = 2112 |  | ||||||
| VENDOR_BOOT_IMAGE_HEADER_V4_SIZE = 2128 |  | ||||||
| VENDOR_RAMDISK_TYPE_NONE = 0 |  | ||||||
| VENDOR_RAMDISK_TYPE_PLATFORM = 1 |  | ||||||
| VENDOR_RAMDISK_TYPE_RECOVERY = 2 |  | ||||||
| VENDOR_RAMDISK_TYPE_DLKM = 3 |  | ||||||
| VENDOR_RAMDISK_NAME_SIZE = 32 |  | ||||||
| VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE = 16 |  | ||||||
| VENDOR_RAMDISK_TABLE_ENTRY_V4_SIZE = 108 |  | ||||||
| # Names with special meaning, mustn't be specified in --ramdisk_name. |  | ||||||
| VENDOR_RAMDISK_NAME_BLOCKLIST = {b'default'} |  | ||||||
| PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT = '--vendor_ramdisk_fragment' |  | ||||||
| def filesize(f): |  | ||||||
|     if f is None: |  | ||||||
|         return 0 |  | ||||||
|     try: |  | ||||||
|         return fstat(f.fileno()).st_size |  | ||||||
|     except OSError: |  | ||||||
|         return 0 |  | ||||||
| def update_sha(sha, f): |  | ||||||
|     if f: |  | ||||||
|         sha.update(f.read()) |  | ||||||
|         f.seek(0) |  | ||||||
|         sha.update(pack('I', filesize(f))) |  | ||||||
|     else: |  | ||||||
|         sha.update(pack('I', 0)) |  | ||||||
| def pad_file(f, padding): |  | ||||||
|     pad = (padding - (f.tell() & (padding - 1))) & (padding - 1) |  | ||||||
|     f.write(pack(str(pad) + 'x')) |  | ||||||
| def get_number_of_pages(image_size, page_size): |  | ||||||
|     """calculates the number of pages required for the image""" |  | ||||||
|     return (image_size + page_size - 1) // page_size |  | ||||||
| def get_recovery_dtbo_offset(args): |  | ||||||
|     """calculates the offset of recovery_dtbo image in the boot image""" |  | ||||||
|     num_header_pages = 1 # header occupies a page |  | ||||||
|     num_kernel_pages = get_number_of_pages(filesize(args.kernel), args.pagesize) |  | ||||||
|     num_ramdisk_pages = get_number_of_pages(filesize(args.ramdisk), |  | ||||||
|                                             args.pagesize) |  | ||||||
|     num_second_pages = get_number_of_pages(filesize(args.second), args.pagesize) |  | ||||||
|     dtbo_offset = args.pagesize * (num_header_pages + num_kernel_pages + |  | ||||||
|                                    num_ramdisk_pages + num_second_pages) |  | ||||||
|     return dtbo_offset |  | ||||||
| def write_header_v3_and_above(args): |  | ||||||
|     if args.header_version > 3: |  | ||||||
|         boot_header_size = BOOT_IMAGE_HEADER_V4_SIZE |  | ||||||
|     else: |  | ||||||
|         boot_header_size = BOOT_IMAGE_HEADER_V3_SIZE |  | ||||||
|     args.output.write(pack(f'{BOOT_MAGIC_SIZE}s', BOOT_MAGIC.encode())) |  | ||||||
|     # kernel size in bytes |  | ||||||
|     args.output.write(pack('I', filesize(args.kernel))) |  | ||||||
|     # ramdisk size in bytes |  | ||||||
|     args.output.write(pack('I', filesize(args.ramdisk))) |  | ||||||
|     # os version and patch level |  | ||||||
|     args.output.write(pack('I', (args.os_version << 11) | args.os_patch_level)) |  | ||||||
|     args.output.write(pack('I', boot_header_size)) |  | ||||||
|     # reserved |  | ||||||
|     args.output.write(pack('4I', 0, 0, 0, 0)) |  | ||||||
|     # version of boot image header |  | ||||||
|     args.output.write(pack('I', args.header_version)) |  | ||||||
|     args.output.write(pack(f'{BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE}s', |  | ||||||
|                            args.cmdline)) |  | ||||||
|     if args.header_version >= 4: |  | ||||||
|         # The signature used to verify boot image v4. |  | ||||||
|         args.output.write(pack('I', BOOT_IMAGE_V4_SIGNATURE_SIZE)) |  | ||||||
|     pad_file(args.output, BOOT_IMAGE_HEADER_V3_PAGESIZE) |  | ||||||
| def write_vendor_boot_header(args): |  | ||||||
|     if filesize(args.dtb) == 0: |  | ||||||
|         raise ValueError('DTB image must not be empty.') |  | ||||||
|     if args.header_version > 3: |  | ||||||
|         vendor_ramdisk_size = args.vendor_ramdisk_total_size |  | ||||||
|         vendor_boot_header_size = VENDOR_BOOT_IMAGE_HEADER_V4_SIZE |  | ||||||
|     else: |  | ||||||
|         vendor_ramdisk_size = filesize(args.vendor_ramdisk) |  | ||||||
|         vendor_boot_header_size = VENDOR_BOOT_IMAGE_HEADER_V3_SIZE |  | ||||||
|     args.vendor_boot.write(pack(f'{VENDOR_BOOT_MAGIC_SIZE}s', |  | ||||||
|                                 VENDOR_BOOT_MAGIC.encode())) |  | ||||||
|     # version of boot image header |  | ||||||
|     args.vendor_boot.write(pack('I', args.header_version)) |  | ||||||
|     # flash page size |  | ||||||
|     args.vendor_boot.write(pack('I', args.pagesize)) |  | ||||||
|     # kernel physical load address |  | ||||||
|     args.vendor_boot.write(pack('I', args.base + args.kernel_offset)) |  | ||||||
|     # ramdisk physical load address |  | ||||||
|     args.vendor_boot.write(pack('I', args.base + args.ramdisk_offset)) |  | ||||||
|     # ramdisk size in bytes |  | ||||||
|     args.vendor_boot.write(pack('I', vendor_ramdisk_size)) |  | ||||||
|     args.vendor_boot.write(pack(f'{VENDOR_BOOT_ARGS_SIZE}s', |  | ||||||
|                                 args.vendor_cmdline)) |  | ||||||
|     # kernel tags physical load address |  | ||||||
|     args.vendor_boot.write(pack('I', args.base + args.tags_offset)) |  | ||||||
|     # asciiz product name |  | ||||||
|     args.vendor_boot.write(pack(f'{VENDOR_BOOT_NAME_SIZE}s', args.board)) |  | ||||||
|     # header size in bytes |  | ||||||
|     args.vendor_boot.write(pack('I', vendor_boot_header_size)) |  | ||||||
|     # dtb size in bytes |  | ||||||
|     args.vendor_boot.write(pack('I', filesize(args.dtb))) |  | ||||||
|     # dtb physical load address |  | ||||||
|     args.vendor_boot.write(pack('Q', args.base + args.dtb_offset)) |  | ||||||
|     if args.header_version > 3: |  | ||||||
|         vendor_ramdisk_table_size = (args.vendor_ramdisk_table_entry_num * |  | ||||||
|                                      VENDOR_RAMDISK_TABLE_ENTRY_V4_SIZE) |  | ||||||
|         # vendor ramdisk table size in bytes |  | ||||||
|         args.vendor_boot.write(pack('I', vendor_ramdisk_table_size)) |  | ||||||
|         # number of vendor ramdisk table entries |  | ||||||
|         args.vendor_boot.write(pack('I', args.vendor_ramdisk_table_entry_num)) |  | ||||||
|         # vendor ramdisk table entry size in bytes |  | ||||||
|         args.vendor_boot.write(pack('I', VENDOR_RAMDISK_TABLE_ENTRY_V4_SIZE)) |  | ||||||
|         # bootconfig section size in bytes |  | ||||||
|         args.vendor_boot.write(pack('I', filesize(args.vendor_bootconfig))) |  | ||||||
|     pad_file(args.vendor_boot, args.pagesize) |  | ||||||
| def write_header(args): |  | ||||||
|     if args.header_version > 4: |  | ||||||
|         raise ValueError( |  | ||||||
|             f'Boot header version {args.header_version} not supported') |  | ||||||
|     if args.header_version in {3, 4}: |  | ||||||
|         return write_header_v3_and_above(args) |  | ||||||
|     ramdisk_load_address = ((args.base + args.ramdisk_offset) |  | ||||||
|                             if filesize(args.ramdisk) > 0 else 0) |  | ||||||
|     second_load_address = ((args.base + args.second_offset) |  | ||||||
|                            if filesize(args.second) > 0 else 0) |  | ||||||
|     args.output.write(pack(f'{BOOT_MAGIC_SIZE}s', BOOT_MAGIC.encode())) |  | ||||||
|     # kernel size in bytes |  | ||||||
|     args.output.write(pack('I', filesize(args.kernel))) |  | ||||||
|     # kernel physical load address |  | ||||||
|     args.output.write(pack('I', args.base + args.kernel_offset)) |  | ||||||
|     # ramdisk size in bytes |  | ||||||
|     args.output.write(pack('I', filesize(args.ramdisk))) |  | ||||||
|     # ramdisk physical load address |  | ||||||
|     args.output.write(pack('I', ramdisk_load_address)) |  | ||||||
|     # second bootloader size in bytes |  | ||||||
|     args.output.write(pack('I', filesize(args.second))) |  | ||||||
|     # second bootloader physical load address |  | ||||||
|     args.output.write(pack('I', second_load_address)) |  | ||||||
|     # kernel tags physical load address |  | ||||||
|     args.output.write(pack('I', args.base + args.tags_offset)) |  | ||||||
|     # flash page size |  | ||||||
|     args.output.write(pack('I', args.pagesize)) |  | ||||||
|     # version of boot image header |  | ||||||
|     args.output.write(pack('I', args.header_version)) |  | ||||||
|     # os version and patch level |  | ||||||
|     args.output.write(pack('I', (args.os_version << 11) | args.os_patch_level)) |  | ||||||
|     # asciiz product name |  | ||||||
|     args.output.write(pack(f'{BOOT_NAME_SIZE}s', args.board)) |  | ||||||
|     args.output.write(pack(f'{BOOT_ARGS_SIZE}s', args.cmdline)) |  | ||||||
|     sha = sha1() |  | ||||||
|     update_sha(sha, args.kernel) |  | ||||||
|     update_sha(sha, args.ramdisk) |  | ||||||
|     update_sha(sha, args.second) |  | ||||||
|     if args.header_version > 0: |  | ||||||
|         update_sha(sha, args.recovery_dtbo) |  | ||||||
|     if args.header_version > 1: |  | ||||||
|         update_sha(sha, args.dtb) |  | ||||||
|     img_id = pack('32s', sha.digest()) |  | ||||||
|     args.output.write(img_id) |  | ||||||
|     args.output.write(pack(f'{BOOT_EXTRA_ARGS_SIZE}s', args.extra_cmdline)) |  | ||||||
|     if args.header_version > 0: |  | ||||||
|         if args.recovery_dtbo: |  | ||||||
|             # recovery dtbo size in bytes |  | ||||||
|             args.output.write(pack('I', filesize(args.recovery_dtbo))) |  | ||||||
|             # recovert dtbo offset in the boot image |  | ||||||
|             args.output.write(pack('Q', get_recovery_dtbo_offset(args))) |  | ||||||
|         else: |  | ||||||
|             # Set to zero if no recovery dtbo |  | ||||||
|             args.output.write(pack('I', 0)) |  | ||||||
|             args.output.write(pack('Q', 0)) |  | ||||||
|     # Populate boot image header size for header versions 1 and 2. |  | ||||||
|     if args.header_version == 1: |  | ||||||
|         args.output.write(pack('I', BOOT_IMAGE_HEADER_V1_SIZE)) |  | ||||||
|     elif args.header_version == 2: |  | ||||||
|         args.output.write(pack('I', BOOT_IMAGE_HEADER_V2_SIZE)) |  | ||||||
|     if args.header_version > 1: |  | ||||||
|         if filesize(args.dtb) == 0: |  | ||||||
|             raise ValueError('DTB image must not be empty.') |  | ||||||
|         # dtb size in bytes |  | ||||||
|         args.output.write(pack('I', filesize(args.dtb))) |  | ||||||
|         # dtb physical load address |  | ||||||
|         args.output.write(pack('Q', args.base + args.dtb_offset)) |  | ||||||
|     pad_file(args.output, args.pagesize) |  | ||||||
|     return img_id |  | ||||||
| class AsciizBytes: |  | ||||||
|     """Parses a string and encodes it as an asciiz bytes object. |  | ||||||
|     >>> AsciizBytes(bufsize=4)('foo') |  | ||||||
|     b'foo\\x00' |  | ||||||
|     >>> AsciizBytes(bufsize=4)('foob') |  | ||||||
|     Traceback (most recent call last): |  | ||||||
|         ... |  | ||||||
|     argparse.ArgumentTypeError: Encoded asciiz length exceeded: max 4, got 5 |  | ||||||
|     """ |  | ||||||
|     def __init__(self, bufsize): |  | ||||||
|         self.bufsize = bufsize |  | ||||||
|     def __call__(self, arg): |  | ||||||
|         arg_bytes = arg.encode() + b'\x00' |  | ||||||
|         if len(arg_bytes) > self.bufsize: |  | ||||||
|             raise ArgumentTypeError( |  | ||||||
|                 'Encoded asciiz length exceeded: ' |  | ||||||
|                 f'max {self.bufsize}, got {len(arg_bytes)}') |  | ||||||
|         return arg_bytes |  | ||||||
| class VendorRamdiskTableBuilder: |  | ||||||
|     """Vendor ramdisk table builder. |  | ||||||
|     Attributes: |  | ||||||
|         entries: A list of VendorRamdiskTableEntry namedtuple. |  | ||||||
|         ramdisk_total_size: Total size in bytes of all ramdisks in the table. |  | ||||||
|     """ |  | ||||||
|     VendorRamdiskTableEntry = collections.namedtuple(  # pylint: disable=invalid-name |  | ||||||
|         'VendorRamdiskTableEntry', |  | ||||||
|         ['ramdisk_path', 'ramdisk_size', 'ramdisk_offset', 'ramdisk_type', |  | ||||||
|          'ramdisk_name', 'board_id']) |  | ||||||
|     def __init__(self): |  | ||||||
|         self.entries = [] |  | ||||||
|         self.ramdisk_total_size = 0 |  | ||||||
|         self.ramdisk_names = set() |  | ||||||
|     def add_entry(self, ramdisk_path, ramdisk_type, ramdisk_name, board_id): |  | ||||||
|         # Strip any trailing null for simple comparison. |  | ||||||
|         stripped_ramdisk_name = ramdisk_name.rstrip(b'\x00') |  | ||||||
|         if stripped_ramdisk_name in VENDOR_RAMDISK_NAME_BLOCKLIST: |  | ||||||
|             raise ValueError( |  | ||||||
|                 f'Banned vendor ramdisk name: {stripped_ramdisk_name}') |  | ||||||
|         if stripped_ramdisk_name in self.ramdisk_names: |  | ||||||
|             raise ValueError( |  | ||||||
|                 f'Duplicated vendor ramdisk name: {stripped_ramdisk_name}') |  | ||||||
|         self.ramdisk_names.add(stripped_ramdisk_name) |  | ||||||
|         if board_id is None: |  | ||||||
|             board_id = array.array( |  | ||||||
|                 'I', [0] * VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE) |  | ||||||
|         else: |  | ||||||
|             board_id = array.array('I', board_id) |  | ||||||
|         if len(board_id) != VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE: |  | ||||||
|             raise ValueError('board_id size must be ' |  | ||||||
|                              f'{VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE}') |  | ||||||
|         with open(ramdisk_path, 'rb') as f: |  | ||||||
|             ramdisk_size = filesize(f) |  | ||||||
|         self.entries.append(self.VendorRamdiskTableEntry( |  | ||||||
|             ramdisk_path, ramdisk_size, self.ramdisk_total_size, ramdisk_type, |  | ||||||
|             ramdisk_name, board_id)) |  | ||||||
|         self.ramdisk_total_size += ramdisk_size |  | ||||||
|     def write_ramdisks_padded(self, fout, alignment): |  | ||||||
|         for entry in self.entries: |  | ||||||
|             with open(entry.ramdisk_path, 'rb') as f: |  | ||||||
|                 fout.write(f.read()) |  | ||||||
|         pad_file(fout, alignment) |  | ||||||
|     def write_entries_padded(self, fout, alignment): |  | ||||||
|         for entry in self.entries: |  | ||||||
|             fout.write(pack('I', entry.ramdisk_size)) |  | ||||||
|             fout.write(pack('I', entry.ramdisk_offset)) |  | ||||||
|             fout.write(pack('I', entry.ramdisk_type)) |  | ||||||
|             fout.write(pack(f'{VENDOR_RAMDISK_NAME_SIZE}s', |  | ||||||
|                             entry.ramdisk_name)) |  | ||||||
|             fout.write(entry.board_id) |  | ||||||
|         pad_file(fout, alignment) |  | ||||||
| def write_padded_file(f_out, f_in, padding): |  | ||||||
|     if f_in is None: |  | ||||||
|         return |  | ||||||
|     f_out.write(f_in.read()) |  | ||||||
|     pad_file(f_out, padding) |  | ||||||
| def parse_int(x): |  | ||||||
|     return int(x, 0) |  | ||||||
| def parse_os_version(x): |  | ||||||
|     match = re.search(r'^(\d{1,3})(?:\.(\d{1,3})(?:\.(\d{1,3}))?)?', x) |  | ||||||
|     if match: |  | ||||||
|         a = int(match.group(1)) |  | ||||||
|         b = c = 0 |  | ||||||
|         if match.lastindex >= 2: |  | ||||||
|             b = int(match.group(2)) |  | ||||||
|         if match.lastindex == 3: |  | ||||||
|             c = int(match.group(3)) |  | ||||||
|         # 7 bits allocated for each field |  | ||||||
|         assert a < 128 |  | ||||||
|         assert b < 128 |  | ||||||
|         assert c < 128 |  | ||||||
|         return (a << 14) | (b << 7) | c |  | ||||||
|     return 0 |  | ||||||
| def parse_os_patch_level(x): |  | ||||||
|     match = re.search(r'^(\d{4})-(\d{2})(?:-(\d{2}))?', x) |  | ||||||
|     if match: |  | ||||||
|         y = int(match.group(1)) - 2000 |  | ||||||
|         m = int(match.group(2)) |  | ||||||
|         # 7 bits allocated for the year, 4 bits for the month |  | ||||||
|         assert 0 <= y < 128 |  | ||||||
|         assert 0 < m <= 12 |  | ||||||
|         return (y << 4) | m |  | ||||||
|     return 0 |  | ||||||
| def parse_vendor_ramdisk_type(x): |  | ||||||
|     type_dict = { |  | ||||||
|         'none': VENDOR_RAMDISK_TYPE_NONE, |  | ||||||
|         'platform': VENDOR_RAMDISK_TYPE_PLATFORM, |  | ||||||
|         'recovery': VENDOR_RAMDISK_TYPE_RECOVERY, |  | ||||||
|         'dlkm': VENDOR_RAMDISK_TYPE_DLKM, |  | ||||||
|     } |  | ||||||
|     if x.lower() in type_dict: |  | ||||||
|         return type_dict[x.lower()] |  | ||||||
|     return parse_int(x) |  | ||||||
| def get_vendor_boot_v4_usage(): |  | ||||||
|     return """vendor boot version 4 arguments: |  | ||||||
|   --ramdisk_type {none,platform,recovery,dlkm} |  | ||||||
|                         specify the type of the ramdisk |  | ||||||
|   --ramdisk_name NAME |  | ||||||
|                         specify the name of the ramdisk |  | ||||||
|   --board_id{0..15} NUMBER |  | ||||||
|                         specify the value of the board_id vector, defaults to 0 |  | ||||||
|   --vendor_ramdisk_fragment VENDOR_RAMDISK_FILE |  | ||||||
|                         path to the vendor ramdisk file |  | ||||||
|   These options can be specified multiple times, where each vendor ramdisk |  | ||||||
|   option group ends with a --vendor_ramdisk_fragment option. |  | ||||||
|   Each option group appends an additional ramdisk to the vendor boot image. |  | ||||||
| """ |  | ||||||
| def parse_vendor_ramdisk_args(args, args_list): |  | ||||||
|     """Parses vendor ramdisk specific arguments. |  | ||||||
|     Args: |  | ||||||
|         args: An argparse.Namespace object. Parsed results are stored into this |  | ||||||
|             object. |  | ||||||
|         args_list: A list of argument strings to be parsed. |  | ||||||
|     Returns: |  | ||||||
|         A list argument strings that are not parsed by this method. |  | ||||||
|     """ |  | ||||||
|     parser = ArgumentParser(add_help=False) |  | ||||||
|     parser.add_argument('--ramdisk_type', type=parse_vendor_ramdisk_type, |  | ||||||
|                         default=VENDOR_RAMDISK_TYPE_NONE) |  | ||||||
|     parser.add_argument('--ramdisk_name', |  | ||||||
|                         type=AsciizBytes(bufsize=VENDOR_RAMDISK_NAME_SIZE), |  | ||||||
|                         required=True) |  | ||||||
|     for i in range(VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE): |  | ||||||
|         parser.add_argument(f'--board_id{i}', type=parse_int, default=0) |  | ||||||
|     parser.add_argument(PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT, required=True) |  | ||||||
|     unknown_args = [] |  | ||||||
|     vendor_ramdisk_table_builder = VendorRamdiskTableBuilder() |  | ||||||
|     if args.vendor_ramdisk is not None: |  | ||||||
|         vendor_ramdisk_table_builder.add_entry( |  | ||||||
|             args.vendor_ramdisk.name, VENDOR_RAMDISK_TYPE_PLATFORM, b'', None) |  | ||||||
|     while PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT in args_list: |  | ||||||
|         idx = args_list.index(PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT) + 2 |  | ||||||
|         vendor_ramdisk_args = args_list[:idx] |  | ||||||
|         args_list = args_list[idx:] |  | ||||||
|         ramdisk_args, extra_args = parser.parse_known_args(vendor_ramdisk_args) |  | ||||||
|         ramdisk_args_dict = vars(ramdisk_args) |  | ||||||
|         unknown_args.extend(extra_args) |  | ||||||
|         ramdisk_path = ramdisk_args.vendor_ramdisk_fragment |  | ||||||
|         ramdisk_type = ramdisk_args.ramdisk_type |  | ||||||
|         ramdisk_name = ramdisk_args.ramdisk_name |  | ||||||
|         board_id = [ramdisk_args_dict[f'board_id{i}'] |  | ||||||
|                     for i in range(VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE)] |  | ||||||
|         vendor_ramdisk_table_builder.add_entry(ramdisk_path, ramdisk_type, |  | ||||||
|                                                ramdisk_name, board_id) |  | ||||||
|     if len(args_list) > 0: |  | ||||||
|         unknown_args.extend(args_list) |  | ||||||
|     args.vendor_ramdisk_total_size = (vendor_ramdisk_table_builder |  | ||||||
|                                       .ramdisk_total_size) |  | ||||||
|     args.vendor_ramdisk_table_entry_num = len(vendor_ramdisk_table_builder |  | ||||||
|                                               .entries) |  | ||||||
|     args.vendor_ramdisk_table_builder = vendor_ramdisk_table_builder |  | ||||||
|     return unknown_args |  | ||||||
| def parse_cmdline(): |  | ||||||
|     version_parser = ArgumentParser(add_help=False) |  | ||||||
|     version_parser.add_argument('--header_version', type=parse_int, default=0) |  | ||||||
|     if version_parser.parse_known_args()[0].header_version < 3: |  | ||||||
|         # For boot header v0 to v2, the kernel commandline field is split into |  | ||||||
|         # two fields, cmdline and extra_cmdline. Both fields are asciiz strings, |  | ||||||
|         # so we minus one here to ensure the encoded string plus the |  | ||||||
|         # null-terminator can fit in the buffer size. |  | ||||||
|         cmdline_size = BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE - 1 |  | ||||||
|     else: |  | ||||||
|         cmdline_size = BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE |  | ||||||
|     parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter, |  | ||||||
|                             epilog=get_vendor_boot_v4_usage()) |  | ||||||
|     parser.add_argument('--kernel', type=FileType('rb'), |  | ||||||
|                         help='path to the kernel') |  | ||||||
|     parser.add_argument('--ramdisk', type=FileType('rb'), |  | ||||||
|                         help='path to the ramdisk') |  | ||||||
|     parser.add_argument('--second', type=FileType('rb'), |  | ||||||
|                         help='path to the second bootloader') |  | ||||||
|     parser.add_argument('--dtb', type=FileType('rb'), help='path to the dtb') |  | ||||||
|     dtbo_group = parser.add_mutually_exclusive_group() |  | ||||||
|     dtbo_group.add_argument('--recovery_dtbo', type=FileType('rb'), |  | ||||||
|                             help='path to the recovery DTBO') |  | ||||||
|     dtbo_group.add_argument('--recovery_acpio', type=FileType('rb'), |  | ||||||
|                             metavar='RECOVERY_ACPIO', dest='recovery_dtbo', |  | ||||||
|                             help='path to the recovery ACPIO') |  | ||||||
|     parser.add_argument('--cmdline', type=AsciizBytes(bufsize=cmdline_size), |  | ||||||
|                         default='', help='kernel command line arguments') |  | ||||||
|     parser.add_argument('--vendor_cmdline', |  | ||||||
|                         type=AsciizBytes(bufsize=VENDOR_BOOT_ARGS_SIZE), |  | ||||||
|                         default='', |  | ||||||
|                         help='vendor boot kernel command line arguments') |  | ||||||
|     parser.add_argument('--base', type=parse_int, default=0x10000000, |  | ||||||
|                         help='base address') |  | ||||||
|     parser.add_argument('--kernel_offset', type=parse_int, default=0x00008000, |  | ||||||
|                         help='kernel offset') |  | ||||||
|     parser.add_argument('--ramdisk_offset', type=parse_int, default=0x01000000, |  | ||||||
|                         help='ramdisk offset') |  | ||||||
|     parser.add_argument('--second_offset', type=parse_int, default=0x00f00000, |  | ||||||
|                         help='second bootloader offset') |  | ||||||
|     parser.add_argument('--dtb_offset', type=parse_int, default=0x01f00000, |  | ||||||
|                         help='dtb offset') |  | ||||||
|     parser.add_argument('--os_version', type=parse_os_version, default=0, |  | ||||||
|                         help='operating system version') |  | ||||||
|     parser.add_argument('--os_patch_level', type=parse_os_patch_level, |  | ||||||
|                         default=0, help='operating system patch level') |  | ||||||
|     parser.add_argument('--tags_offset', type=parse_int, default=0x00000100, |  | ||||||
|                         help='tags offset') |  | ||||||
|     parser.add_argument('--board', type=AsciizBytes(bufsize=BOOT_NAME_SIZE), |  | ||||||
|                         default='', help='board name') |  | ||||||
|     parser.add_argument('--pagesize', type=parse_int, |  | ||||||
|                         choices=[2**i for i in range(11, 15)], default=2048, |  | ||||||
|                         help='page size') |  | ||||||
|     parser.add_argument('--id', action='store_true', |  | ||||||
|                         help='print the image ID on standard output') |  | ||||||
|     parser.add_argument('--header_version', type=parse_int, default=0, |  | ||||||
|                         help='boot image header version') |  | ||||||
|     parser.add_argument('-o', '--output', type=FileType('wb'), |  | ||||||
|                         help='output file name') |  | ||||||
|     parser.add_argument('--gki_signing_algorithm', |  | ||||||
|                         help='GKI signing algorithm to use') |  | ||||||
|     parser.add_argument('--gki_signing_key', |  | ||||||
|                         help='path to RSA private key file') |  | ||||||
|     parser.add_argument('--gki_signing_signature_args', |  | ||||||
|                         help='other hash arguments passed to avbtool') |  | ||||||
|     parser.add_argument('--gki_signing_avbtool_path', |  | ||||||
|                         help='path to avbtool for boot signature generation') |  | ||||||
|     parser.add_argument('--vendor_boot', type=FileType('wb'), |  | ||||||
|                         help='vendor boot output file name') |  | ||||||
|     parser.add_argument('--vendor_ramdisk', type=FileType('rb'), |  | ||||||
|                         help='path to the vendor ramdisk') |  | ||||||
|     parser.add_argument('--vendor_bootconfig', type=FileType('rb'), |  | ||||||
|                         help='path to the vendor bootconfig file') |  | ||||||
|     args, extra_args = parser.parse_known_args() |  | ||||||
|     if args.vendor_boot is not None and args.header_version > 3: |  | ||||||
|         extra_args = parse_vendor_ramdisk_args(args, extra_args) |  | ||||||
|     if len(extra_args) > 0: |  | ||||||
|         raise ValueError(f'Unrecognized arguments: {extra_args}') |  | ||||||
|     if args.header_version < 3: |  | ||||||
|         args.extra_cmdline = args.cmdline[BOOT_ARGS_SIZE-1:] |  | ||||||
|         args.cmdline = args.cmdline[:BOOT_ARGS_SIZE-1] + b'\x00' |  | ||||||
|         assert len(args.cmdline) <= BOOT_ARGS_SIZE |  | ||||||
|         assert len(args.extra_cmdline) <= BOOT_EXTRA_ARGS_SIZE |  | ||||||
|     return args |  | ||||||
| def add_boot_image_signature(args, pagesize): |  | ||||||
|     """Adds the boot image signature. |  | ||||||
|     Note that the signature will only be verified in VTS to ensure a |  | ||||||
|     generic boot.img is used. It will not be used by the device |  | ||||||
|     bootloader at boot time. The bootloader should only verify |  | ||||||
|     the boot vbmeta at the end of the boot partition (or in the top-level |  | ||||||
|     vbmeta partition) via the Android Verified Boot process, when the |  | ||||||
|     device boots. |  | ||||||
|     """ |  | ||||||
|     args.output.flush()  # Flush the buffer for signature calculation. |  | ||||||
|     # Appends zeros if the signing key is not specified. |  | ||||||
|     if not args.gki_signing_key or not args.gki_signing_algorithm: |  | ||||||
|         zeros = b'\x00' * BOOT_IMAGE_V4_SIGNATURE_SIZE |  | ||||||
|         args.output.write(zeros) |  | ||||||
|         pad_file(args.output, pagesize) |  | ||||||
|         return |  | ||||||
|     avbtool = 'avbtool'  # Used from otatools.zip or Android build env. |  | ||||||
|     # We need to specify the path of avbtool in build/core/Makefile. |  | ||||||
|     # Because avbtool is not guaranteed to be in $PATH there. |  | ||||||
|     if args.gki_signing_avbtool_path: |  | ||||||
|         avbtool = args.gki_signing_avbtool_path |  | ||||||
|     # Need to specify a value of --partition_size for avbtool to work. |  | ||||||
|     # We use 64 MB below, but avbtool will not resize the boot image to |  | ||||||
|     # this size because --do_not_append_vbmeta_image is also specified. |  | ||||||
|     avbtool_cmd = [ |  | ||||||
|         avbtool, 'add_hash_footer', |  | ||||||
|         '--partition_name', 'boot', |  | ||||||
|         '--partition_size', str(64 * 1024 * 1024), |  | ||||||
|         '--image', args.output.name, |  | ||||||
|         '--algorithm', args.gki_signing_algorithm, |  | ||||||
|         '--key', args.gki_signing_key, |  | ||||||
|         '--salt', 'd00df00d']  # TODO: use a hash of kernel/ramdisk as the salt. |  | ||||||
|     # Additional arguments passed to avbtool. |  | ||||||
|     if args.gki_signing_signature_args: |  | ||||||
|         avbtool_cmd += args.gki_signing_signature_args.split() |  | ||||||
|     # Outputs the signed vbmeta to a separate file, then append to boot.img |  | ||||||
|     # as the boot signature. |  | ||||||
|     with tempfile.TemporaryDirectory() as temp_out_dir: |  | ||||||
|         boot_signature_output = os.path.join(temp_out_dir, 'boot_signature') |  | ||||||
|         avbtool_cmd += ['--do_not_append_vbmeta_image', |  | ||||||
|                         '--output_vbmeta_image', boot_signature_output] |  | ||||||
|         subprocess.check_call(avbtool_cmd) |  | ||||||
|         with open(boot_signature_output, 'rb') as boot_signature: |  | ||||||
|             if filesize(boot_signature) > BOOT_IMAGE_V4_SIGNATURE_SIZE: |  | ||||||
|                 raise ValueError( |  | ||||||
|                     f'boot sigature size is > {BOOT_IMAGE_V4_SIGNATURE_SIZE}') |  | ||||||
|             write_padded_file(args.output, boot_signature, pagesize) |  | ||||||
| def write_data(args, pagesize): |  | ||||||
|     write_padded_file(args.output, args.kernel, pagesize) |  | ||||||
|     write_padded_file(args.output, args.ramdisk, pagesize) |  | ||||||
|     write_padded_file(args.output, args.second, pagesize) |  | ||||||
|     if args.header_version > 0 and args.header_version < 3: |  | ||||||
|         write_padded_file(args.output, args.recovery_dtbo, pagesize) |  | ||||||
|     if args.header_version == 2: |  | ||||||
|         write_padded_file(args.output, args.dtb, pagesize) |  | ||||||
|     if args.header_version >= 4: |  | ||||||
|         add_boot_image_signature(args, pagesize) |  | ||||||
| def write_vendor_boot_data(args): |  | ||||||
|     if args.header_version > 3: |  | ||||||
|         builder = args.vendor_ramdisk_table_builder |  | ||||||
|         builder.write_ramdisks_padded(args.vendor_boot, args.pagesize) |  | ||||||
|         write_padded_file(args.vendor_boot, args.dtb, args.pagesize) |  | ||||||
|         builder.write_entries_padded(args.vendor_boot, args.pagesize) |  | ||||||
|         write_padded_file(args.vendor_boot, args.vendor_bootconfig, |  | ||||||
|             args.pagesize) |  | ||||||
|     else: |  | ||||||
|         write_padded_file(args.vendor_boot, args.vendor_ramdisk, args.pagesize) |  | ||||||
|         write_padded_file(args.vendor_boot, args.dtb, args.pagesize) |  | ||||||
| def main(): |  | ||||||
|     args = parse_cmdline() |  | ||||||
|     if args.vendor_boot is not None: |  | ||||||
|         if args.header_version not in {3, 4}: |  | ||||||
|             raise ValueError( |  | ||||||
|                 '--vendor_boot not compatible with given header version') |  | ||||||
|         if args.header_version == 3 and args.vendor_ramdisk is None: |  | ||||||
|             raise ValueError('--vendor_ramdisk missing or invalid') |  | ||||||
|         write_vendor_boot_header(args) |  | ||||||
|         write_vendor_boot_data(args) |  | ||||||
|     if args.output is not None: |  | ||||||
|         if args.second is not None and args.header_version > 2: |  | ||||||
|             raise ValueError( |  | ||||||
|                 '--second not compatible with given header version') |  | ||||||
|         img_id = write_header(args) |  | ||||||
|         if args.header_version > 2: |  | ||||||
|             write_data(args, BOOT_IMAGE_HEADER_V3_PAGESIZE) |  | ||||||
|         else: |  | ||||||
|             write_data(args, args.pagesize) |  | ||||||
|         if args.id and img_id is not None: |  | ||||||
|             print('0x' + ''.join(f'{octet:02x}' for octet in img_id)) |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     main() |  | ||||||
| @@ -1,17 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_INTERFACE" ]; then |  | ||||||
|     echo "Must supply the PoE Interface to power up" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_ADDRESS" ]; then |  | ||||||
|     echo "Must supply the PoE Switch host" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| SNMP_KEY="SNMPv2-SMI::mib-2.105.1.1.1.3.1.`expr 48 + $BM_POE_INTERFACE`" |  | ||||||
| SNMP_ON="i 1" |  | ||||||
| SNMP_OFF="i 2" |  | ||||||
|  |  | ||||||
| flock /var/run/poe.lock -c "snmpset -v2c -r 3 -t 30 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_OFF" |  | ||||||
| @@ -1,19 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_INTERFACE" ]; then |  | ||||||
|     echo "Must supply the PoE Interface to power up" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_ADDRESS" ]; then |  | ||||||
|     echo "Must supply the PoE Switch host" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| SNMP_KEY="SNMPv2-SMI::mib-2.105.1.1.1.3.1.`expr 48 + $BM_POE_INTERFACE`" |  | ||||||
| SNMP_ON="i 1" |  | ||||||
| SNMP_OFF="i 2" |  | ||||||
|  |  | ||||||
| flock /var/run/poe.lock -c "snmpset -v2c -r 3 -t 30 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_OFF" |  | ||||||
| sleep 3s |  | ||||||
| flock /var/run/poe.lock -c "snmpset -v2c -r 3 -t 30 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_ON" |  | ||||||
| @@ -1,162 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| # Boot script for devices attached to a PoE switch, using NFS for the root |  | ||||||
| # filesystem. |  | ||||||
|  |  | ||||||
| # We're run from the root of the repo, make a helper var for our paths |  | ||||||
| BM=$CI_PROJECT_DIR/install/bare-metal |  | ||||||
| CI_COMMON=$CI_PROJECT_DIR/install/common |  | ||||||
|  |  | ||||||
| # Runner config checks |  | ||||||
| if [ -z "$BM_SERIAL" ]; then |  | ||||||
|   echo "Must set BM_SERIAL in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is the serial port to listen the device." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_ADDRESS" ]; then |  | ||||||
|   echo "Must set BM_POE_ADDRESS in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is the PoE switch address to connect for powering up/down devices." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_INTERFACE" ]; then |  | ||||||
|   echo "Must set BM_POE_INTERFACE in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is the PoE switch interface where the device is connected." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POWERUP" ]; then |  | ||||||
|   echo "Must set BM_POWERUP in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is a shell script that should power up the device and begin its boot sequence." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POWERDOWN" ]; then |  | ||||||
|   echo "Must set BM_POWERDOWN in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is a shell script that should power off the device." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ ! -d /nfs ]; then |  | ||||||
|   echo "NFS rootfs directory needs to be mounted at /nfs by the gitlab runner" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ ! -d /tftp ]; then |  | ||||||
|   echo "TFTP directory for this board needs to be mounted at /tftp by the gitlab runner" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # job config checks |  | ||||||
| if [ -z "$BM_ROOTFS" ]; then |  | ||||||
|   echo "Must set BM_ROOTFS to your board's rootfs directory in the job's variables" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_BOOTFS" ]; then |  | ||||||
|   echo "Must set /boot files for the TFTP boot in the job's variables" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_CMDLINE" ]; then |  | ||||||
|   echo "Must set BM_CMDLINE to your board's kernel command line arguments" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_BOOTCONFIG" ]; then |  | ||||||
|   echo "Must set BM_BOOTCONFIG to your board's required boot configuration arguments" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| # Clear out any previous run's artifacts. |  | ||||||
| rm -rf results/ |  | ||||||
| mkdir -p results |  | ||||||
|  |  | ||||||
| # Create the rootfs in the NFS directory.  rm to make sure it's in a pristine |  | ||||||
| # state, since it's volume-mounted on the host. |  | ||||||
| rsync -a --delete $BM_ROOTFS/ /nfs/ |  | ||||||
|  |  | ||||||
| # If BM_BOOTFS is an URL, download it |  | ||||||
| if echo $BM_BOOTFS | grep -q http; then |  | ||||||
|   apt install -y wget |  | ||||||
|   wget ${FDO_HTTP_CACHE_URI:-}$BM_BOOTFS -O /tmp/bootfs.tar |  | ||||||
|   BM_BOOTFS=/tmp/bootfs.tar |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # If BM_BOOTFS is a file, assume it is a tarball and uncompress it |  | ||||||
| if [ -f $BM_BOOTFS ]; then |  | ||||||
|   mkdir -p /tmp/bootfs |  | ||||||
|   tar xf $BM_BOOTFS -C /tmp/bootfs |  | ||||||
|   BM_BOOTFS=/tmp/bootfs |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Install kernel modules (it could be either in /lib/modules or |  | ||||||
| # /usr/lib/modules, but we want to install in the latter) |  | ||||||
| [ -d $BM_BOOTFS/usr/lib/modules ] && rsync -a $BM_BOOTFS/usr/lib/modules/ /nfs/usr/lib/modules/ |  | ||||||
| [ -d $BM_BOOTFS/lib/modules ] && rsync -a $BM_BOOTFS/lib/modules/ /nfs/lib/modules/ |  | ||||||
|  |  | ||||||
| # Install kernel image + bootloader files |  | ||||||
| rsync -aL --delete $BM_BOOTFS/boot/ /tftp/ |  | ||||||
|  |  | ||||||
| # Set up the pxelinux config for Jetson Nano |  | ||||||
| mkdir -p /tftp/pxelinux.cfg |  | ||||||
| cat <<EOF >/tftp/pxelinux.cfg/default-arm-tegra210-p3450-0000 |  | ||||||
| PROMPT 0 |  | ||||||
| TIMEOUT 30 |  | ||||||
| DEFAULT primary |  | ||||||
| MENU TITLE jetson nano boot options |  | ||||||
| LABEL primary |  | ||||||
|       MENU LABEL CI kernel on TFTP |  | ||||||
|       LINUX Image |  | ||||||
|       FDT tegra210-p3450-0000.dtb |  | ||||||
|       APPEND \${cbootargs} $BM_CMDLINE |  | ||||||
| EOF |  | ||||||
|  |  | ||||||
| # Set up the pxelinux config for Jetson TK1 |  | ||||||
| cat <<EOF >/tftp/pxelinux.cfg/default-arm-tegra124-jetson-tk1 |  | ||||||
| PROMPT 0 |  | ||||||
| TIMEOUT 30 |  | ||||||
| DEFAULT primary |  | ||||||
| MENU TITLE jetson TK1 boot options |  | ||||||
| LABEL primary |  | ||||||
|       MENU LABEL CI kernel on TFTP |  | ||||||
|       LINUX zImage |  | ||||||
|       FDT tegra124-jetson-tk1.dtb |  | ||||||
|       APPEND \${cbootargs} $BM_CMDLINE |  | ||||||
| EOF |  | ||||||
|  |  | ||||||
| # Create the rootfs in the NFS directory |  | ||||||
| mkdir -p /nfs/results |  | ||||||
| . $BM/rootfs-setup.sh /nfs |  | ||||||
|  |  | ||||||
| echo "$BM_CMDLINE" > /tftp/cmdline.txt |  | ||||||
|  |  | ||||||
| # Add some required options in config.txt |  | ||||||
| printf "$BM_BOOTCONFIG" >> /tftp/config.txt |  | ||||||
|  |  | ||||||
| set +e |  | ||||||
| ATTEMPTS=10 |  | ||||||
| while [ $((ATTEMPTS--)) -gt 0 ]; do |  | ||||||
|   python3 $BM/poe_run.py \ |  | ||||||
|           --dev="$BM_SERIAL" \ |  | ||||||
|           --powerup="$BM_POWERUP" \ |  | ||||||
|           --powerdown="$BM_POWERDOWN" \ |  | ||||||
|           --test-timeout ${TEST_PHASE_TIMEOUT:-20} |  | ||||||
|   ret=$? |  | ||||||
|  |  | ||||||
|   if [ $ret -eq 2 ]; then |  | ||||||
|     echo "Did not detect boot sequence, retrying..." |  | ||||||
|   else |  | ||||||
|     ATTEMPTS=0 |  | ||||||
|   fi |  | ||||||
| done |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| # Bring artifacts back from the NFS dir to the build dir where gitlab-runner |  | ||||||
| # will look for them. |  | ||||||
| cp -Rp /nfs/results/. results/ |  | ||||||
|  |  | ||||||
| exit $ret |  | ||||||
| @@ -1,115 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
| # |  | ||||||
| # Copyright © 2020 Igalia, S.L. |  | ||||||
| # |  | ||||||
| # Permission is hereby granted, free of charge, to any person obtaining a |  | ||||||
| # copy of this software and associated documentation files (the "Software"), |  | ||||||
| # to deal in the Software without restriction, including without limitation |  | ||||||
| # the rights to use, copy, modify, merge, publish, distribute, sublicense, |  | ||||||
| # and/or sell copies of the Software, and to permit persons to whom the |  | ||||||
| # Software is furnished to do so, subject to the following conditions: |  | ||||||
| # |  | ||||||
| # The above copyright notice and this permission notice (including the next |  | ||||||
| # paragraph) shall be included in all copies or substantial portions of the |  | ||||||
| # Software. |  | ||||||
| # |  | ||||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |  | ||||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |  | ||||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL |  | ||||||
| # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |  | ||||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |  | ||||||
| # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |  | ||||||
| # IN THE SOFTWARE. |  | ||||||
|  |  | ||||||
| import argparse |  | ||||||
| import os |  | ||||||
| import re |  | ||||||
| from serial_buffer import SerialBuffer |  | ||||||
| import sys |  | ||||||
| import threading |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class PoERun: |  | ||||||
|     def __init__(self, args, test_timeout): |  | ||||||
|         self.powerup = args.powerup |  | ||||||
|         self.powerdown = args.powerdown |  | ||||||
|         self.ser = SerialBuffer( |  | ||||||
|             args.dev, "results/serial-output.txt", "") |  | ||||||
|         self.test_timeout = test_timeout |  | ||||||
|  |  | ||||||
|     def print_error(self, message): |  | ||||||
|         RED = '\033[0;31m' |  | ||||||
|         NO_COLOR = '\033[0m' |  | ||||||
|         print(RED + message + NO_COLOR) |  | ||||||
|  |  | ||||||
|     def logged_system(self, cmd): |  | ||||||
|         print("Running '{}'".format(cmd)) |  | ||||||
|         return os.system(cmd) |  | ||||||
|  |  | ||||||
|     def run(self): |  | ||||||
|         if self.logged_system(self.powerup) != 0: |  | ||||||
|             return 1 |  | ||||||
|  |  | ||||||
|         boot_detected = False |  | ||||||
|         for line in self.ser.lines(timeout=5 * 60, phase="bootloader"): |  | ||||||
|             if re.search("Booting Linux", line): |  | ||||||
|                 boot_detected = True |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|         if not boot_detected: |  | ||||||
|             self.print_error( |  | ||||||
|                 "Something wrong; couldn't detect the boot start up sequence") |  | ||||||
|             return 2 |  | ||||||
|  |  | ||||||
|         for line in self.ser.lines(timeout=self.test_timeout, phase="test"): |  | ||||||
|             if re.search("---. end Kernel panic", line): |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             # Binning memory problems |  | ||||||
|             if re.search("binner overflow mem", line): |  | ||||||
|                 self.print_error("Memory overflow in the binner; GPU hang") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             if re.search("nouveau 57000000.gpu: bus: MMIO read of 00000000 FAULT at 137000", line): |  | ||||||
|                 self.print_error("nouveau jetson boot bug, retrying.") |  | ||||||
|                 return 2 |  | ||||||
|  |  | ||||||
|             # network fail on tk1 |  | ||||||
|             if re.search("NETDEV WATCHDOG:.* transmit queue 0 timed out", line): |  | ||||||
|                 self.print_error("nouveau jetson tk1 network fail, retrying.") |  | ||||||
|                 return 2 |  | ||||||
|  |  | ||||||
|             result = re.search("hwci: mesa: (\S*)", line) |  | ||||||
|             if result: |  | ||||||
|                 if result.group(1) == "pass": |  | ||||||
|                     return 0 |  | ||||||
|                 else: |  | ||||||
|                     return 1 |  | ||||||
|  |  | ||||||
|         self.print_error( |  | ||||||
|             "Reached the end of the CPU serial log without finding a result") |  | ||||||
|         return 2 |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     parser = argparse.ArgumentParser() |  | ||||||
|     parser.add_argument('--dev', type=str, |  | ||||||
|                         help='Serial device to monitor', required=True) |  | ||||||
|     parser.add_argument('--powerup', type=str, |  | ||||||
|                         help='shell command for rebooting', required=True) |  | ||||||
|     parser.add_argument('--powerdown', type=str, |  | ||||||
|                         help='shell command for powering off', required=True) |  | ||||||
|     parser.add_argument( |  | ||||||
|         '--test-timeout', type=int, help='Test phase timeout (minutes)', required=True) |  | ||||||
|     args = parser.parse_args() |  | ||||||
|  |  | ||||||
|     poe = PoERun(args, args.test_timeout * 60) |  | ||||||
|     retval = poe.run() |  | ||||||
|  |  | ||||||
|     poe.logged_system(args.powerdown) |  | ||||||
|  |  | ||||||
|     sys.exit(retval) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     main() |  | ||||||
| @@ -1,30 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| rootfs_dst=$1 |  | ||||||
|  |  | ||||||
| mkdir -p $rootfs_dst/results |  | ||||||
|  |  | ||||||
| # Set up the init script that brings up the system. |  | ||||||
| cp $BM/bm-init.sh $rootfs_dst/init |  | ||||||
| cp $CI_COMMON/init*.sh $rootfs_dst/ |  | ||||||
|  |  | ||||||
| # Make JWT token available as file in the bare-metal storage to enable access |  | ||||||
| # to MinIO |  | ||||||
| cp "${CI_JOB_JWT_FILE}" "${rootfs_dst}${CI_JOB_JWT_FILE}" |  | ||||||
|  |  | ||||||
| cp $CI_COMMON/capture-devcoredump.sh $rootfs_dst/ |  | ||||||
| cp $CI_COMMON/intel-gpu-freq.sh $rootfs_dst/ |  | ||||||
|  |  | ||||||
| set +x |  | ||||||
|  |  | ||||||
| # Pass through relevant env vars from the gitlab job to the baremetal init script |  | ||||||
| "$CI_COMMON"/generate-env.sh > $rootfs_dst/set-job-env-vars.sh |  | ||||||
| chmod +x $rootfs_dst/set-job-env-vars.sh |  | ||||||
| echo "Variables passed through:" |  | ||||||
| cat $rootfs_dst/set-job-env-vars.sh |  | ||||||
|  |  | ||||||
| set -x |  | ||||||
|  |  | ||||||
| # Add the Mesa drivers we built, and make a consistent symlink to them. |  | ||||||
| mkdir -p $rootfs_dst/$CI_PROJECT_DIR |  | ||||||
| rsync -aH --delete $CI_PROJECT_DIR/install/ $rootfs_dst/$CI_PROJECT_DIR/install/ |  | ||||||
| @@ -1,185 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
| # |  | ||||||
| # Copyright © 2020 Google LLC |  | ||||||
| # |  | ||||||
| # Permission is hereby granted, free of charge, to any person obtaining a |  | ||||||
| # copy of this software and associated documentation files (the "Software"), |  | ||||||
| # to deal in the Software without restriction, including without limitation |  | ||||||
| # the rights to use, copy, modify, merge, publish, distribute, sublicense, |  | ||||||
| # and/or sell copies of the Software, and to permit persons to whom the |  | ||||||
| # Software is furnished to do so, subject to the following conditions: |  | ||||||
| # |  | ||||||
| # The above copyright notice and this permission notice (including the next |  | ||||||
| # paragraph) shall be included in all copies or substantial portions of the |  | ||||||
| # Software. |  | ||||||
| # |  | ||||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |  | ||||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |  | ||||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL |  | ||||||
| # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |  | ||||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |  | ||||||
| # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |  | ||||||
| # IN THE SOFTWARE. |  | ||||||
|  |  | ||||||
| import argparse |  | ||||||
| from datetime import datetime, timezone |  | ||||||
| import queue |  | ||||||
| import serial |  | ||||||
| import threading |  | ||||||
| import time |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SerialBuffer: |  | ||||||
|     def __init__(self, dev, filename, prefix, timeout=None, line_queue=None): |  | ||||||
|         self.filename = filename |  | ||||||
|         self.dev = dev |  | ||||||
|  |  | ||||||
|         if dev: |  | ||||||
|             self.f = open(filename, "wb+") |  | ||||||
|             self.serial = serial.Serial(dev, 115200, timeout=timeout) |  | ||||||
|         else: |  | ||||||
|             self.f = open(filename, "rb") |  | ||||||
|             self.serial = None |  | ||||||
|  |  | ||||||
|         self.byte_queue = queue.Queue() |  | ||||||
|         # allow multiple SerialBuffers to share a line queue so you can merge |  | ||||||
|         # servo's CPU and EC streams into one thing to watch the boot/test |  | ||||||
|         # progress on. |  | ||||||
|         if line_queue: |  | ||||||
|             self.line_queue = line_queue |  | ||||||
|         else: |  | ||||||
|             self.line_queue = queue.Queue() |  | ||||||
|         self.prefix = prefix |  | ||||||
|         self.timeout = timeout |  | ||||||
|         self.sentinel = object() |  | ||||||
|         self.closing = False |  | ||||||
|  |  | ||||||
|         if self.dev: |  | ||||||
|             self.read_thread = threading.Thread( |  | ||||||
|                 target=self.serial_read_thread_loop, daemon=True) |  | ||||||
|         else: |  | ||||||
|             self.read_thread = threading.Thread( |  | ||||||
|                 target=self.serial_file_read_thread_loop, daemon=True) |  | ||||||
|         self.read_thread.start() |  | ||||||
|  |  | ||||||
|         self.lines_thread = threading.Thread( |  | ||||||
|             target=self.serial_lines_thread_loop, daemon=True) |  | ||||||
|         self.lines_thread.start() |  | ||||||
|  |  | ||||||
|     def close(self): |  | ||||||
|         self.closing = True |  | ||||||
|         if self.serial: |  | ||||||
|             self.serial.cancel_read() |  | ||||||
|         self.read_thread.join() |  | ||||||
|         self.lines_thread.join() |  | ||||||
|         if self.serial: |  | ||||||
|             self.serial.close() |  | ||||||
|  |  | ||||||
|     # Thread that just reads the bytes from the serial device to try to keep from |  | ||||||
|     # buffer overflowing it. If nothing is received in 1 minute, it finalizes. |  | ||||||
|     def serial_read_thread_loop(self): |  | ||||||
|         greet = "Serial thread reading from %s\n" % self.dev |  | ||||||
|         self.byte_queue.put(greet.encode()) |  | ||||||
|  |  | ||||||
|         while not self.closing: |  | ||||||
|             try: |  | ||||||
|                 b = self.serial.read() |  | ||||||
|                 if len(b) == 0: |  | ||||||
|                     break |  | ||||||
|                 self.byte_queue.put(b) |  | ||||||
|             except Exception as err: |  | ||||||
|                 print(self.prefix + str(err)) |  | ||||||
|                 break |  | ||||||
|         self.byte_queue.put(self.sentinel) |  | ||||||
|  |  | ||||||
|     # Thread that just reads the bytes from the file of serial output that some |  | ||||||
|     # other process is appending to. |  | ||||||
|     def serial_file_read_thread_loop(self): |  | ||||||
|         greet = "Serial thread reading from %s\n" % self.filename |  | ||||||
|         self.byte_queue.put(greet.encode()) |  | ||||||
|  |  | ||||||
|         while not self.closing: |  | ||||||
|             line = self.f.readline() |  | ||||||
|             if line: |  | ||||||
|                 self.byte_queue.put(line) |  | ||||||
|             else: |  | ||||||
|                 time.sleep(0.1) |  | ||||||
|         self.byte_queue.put(self.sentinel) |  | ||||||
|  |  | ||||||
|     # Thread that processes the stream of bytes to 1) log to stdout, 2) log to |  | ||||||
|     # file, 3) add to the queue of lines to be read by program logic |  | ||||||
|  |  | ||||||
|     def serial_lines_thread_loop(self): |  | ||||||
|         line = bytearray() |  | ||||||
|         while True: |  | ||||||
|             bytes = self.byte_queue.get(block=True) |  | ||||||
|  |  | ||||||
|             if bytes == self.sentinel: |  | ||||||
|                 self.read_thread.join() |  | ||||||
|                 self.line_queue.put(self.sentinel) |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|             # Write our data to the output file if we're the ones reading from |  | ||||||
|             # the serial device |  | ||||||
|             if self.dev: |  | ||||||
|                 self.f.write(bytes) |  | ||||||
|                 self.f.flush() |  | ||||||
|  |  | ||||||
|             for b in bytes: |  | ||||||
|                 line.append(b) |  | ||||||
|                 if b == b'\n'[0]: |  | ||||||
|                     line = line.decode(errors="replace") |  | ||||||
|  |  | ||||||
|                     time = datetime.now().strftime('%y-%m-%d %H:%M:%S') |  | ||||||
|                     print("{endc}{time} {prefix}{line}".format( |  | ||||||
|                         time=time, prefix=self.prefix, line=line, endc='\033[0m'), flush=True, end='') |  | ||||||
|  |  | ||||||
|                     self.line_queue.put(line) |  | ||||||
|                     line = bytearray() |  | ||||||
|  |  | ||||||
|     def lines(self, timeout=None, phase=None): |  | ||||||
|         start_time = time.monotonic() |  | ||||||
|         while True: |  | ||||||
|             read_timeout = None |  | ||||||
|             if timeout: |  | ||||||
|                 read_timeout = timeout - (time.monotonic() - start_time) |  | ||||||
|                 if read_timeout <= 0: |  | ||||||
|                     print("read timeout waiting for serial during {}".format(phase)) |  | ||||||
|                     self.close() |  | ||||||
|                     break |  | ||||||
|  |  | ||||||
|             try: |  | ||||||
|                 line = self.line_queue.get(timeout=read_timeout) |  | ||||||
|             except queue.Empty: |  | ||||||
|                 print("read timeout waiting for serial during {}".format(phase)) |  | ||||||
|                 self.close() |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|             if line == self.sentinel: |  | ||||||
|                 print("End of serial output") |  | ||||||
|                 self.lines_thread.join() |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|             yield line |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     parser = argparse.ArgumentParser() |  | ||||||
|  |  | ||||||
|     parser.add_argument('--dev', type=str, help='Serial device') |  | ||||||
|     parser.add_argument('--file', type=str, |  | ||||||
|                         help='Filename for serial output', required=True) |  | ||||||
|     parser.add_argument('--prefix', type=str, |  | ||||||
|                         help='Prefix for logging serial to stdout', nargs='?') |  | ||||||
|  |  | ||||||
|     args = parser.parse_args() |  | ||||||
|  |  | ||||||
|     ser = SerialBuffer(args.dev, args.file, args.prefix or "") |  | ||||||
|     for line in ser.lines(): |  | ||||||
|         # We're just using this as a logger, so eat the produced lines and drop |  | ||||||
|         # them |  | ||||||
|         pass |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     main() |  | ||||||
| @@ -1,41 +0,0 @@ | |||||||
| #!/usr/bin/python3 |  | ||||||
|  |  | ||||||
| # Copyright © 2020 Christian Gmeiner |  | ||||||
| # |  | ||||||
| # Permission is hereby granted, free of charge, to any person obtaining a |  | ||||||
| # copy of this software and associated documentation files (the "Software"), |  | ||||||
| # to deal in the Software without restriction, including without limitation |  | ||||||
| # the rights to use, copy, modify, merge, publish, distribute, sublicense, |  | ||||||
| # and/or sell copies of the Software, and to permit persons to whom the |  | ||||||
| # Software is furnished to do so, subject to the following conditions: |  | ||||||
| # |  | ||||||
| # The above copyright notice and this permission notice (including the next |  | ||||||
| # paragraph) shall be included in all copies or substantial portions of the |  | ||||||
| # Software. |  | ||||||
| # |  | ||||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |  | ||||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |  | ||||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL |  | ||||||
| # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |  | ||||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |  | ||||||
| # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |  | ||||||
| # IN THE SOFTWARE. |  | ||||||
| # |  | ||||||
| # Tiny script to read bytes from telnet, and write the output to stdout, with a |  | ||||||
| # buffer in between so we don't lose serial output from its buffer. |  | ||||||
| # |  | ||||||
|  |  | ||||||
| import sys |  | ||||||
| import telnetlib |  | ||||||
|  |  | ||||||
| host = sys.argv[1] |  | ||||||
| port = sys.argv[2] |  | ||||||
|  |  | ||||||
| tn = telnetlib.Telnet(host, port, 1000000) |  | ||||||
|  |  | ||||||
| while True: |  | ||||||
|     bytes = tn.read_some() |  | ||||||
|     sys.stdout.buffer.write(bytes) |  | ||||||
|     sys.stdout.flush() |  | ||||||
|  |  | ||||||
| tn.close() |  | ||||||
							
								
								
									
										2
									
								
								.gitlab-ci/bin/.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.gitlab-ci/bin/.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,2 +0,0 @@ | |||||||
| schema.graphql |  | ||||||
| gitlab_gql.py.cache.db |  | ||||||
| @@ -1,301 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
| # Copyright © 2020 - 2022 Collabora Ltd. |  | ||||||
| # Authors: |  | ||||||
| #   Tomeu Vizoso <tomeu.vizoso@collabora.com> |  | ||||||
| #   David Heidelberg <david.heidelberg@collabora.com> |  | ||||||
| # |  | ||||||
| # SPDX-License-Identifier: MIT |  | ||||||
|  |  | ||||||
| """ |  | ||||||
| Helper script to restrict running only required CI jobs |  | ||||||
| and show the job(s) logs. |  | ||||||
| """ |  | ||||||
|  |  | ||||||
| import argparse |  | ||||||
| import re |  | ||||||
| import sys |  | ||||||
| import time |  | ||||||
| from concurrent.futures import ThreadPoolExecutor |  | ||||||
| from functools import partial |  | ||||||
| from itertools import chain |  | ||||||
| from typing import Optional |  | ||||||
|  |  | ||||||
| import gitlab |  | ||||||
| from colorama import Fore, Style |  | ||||||
| from gitlab_common import get_gitlab_project, read_token, wait_for_pipeline |  | ||||||
| from gitlab_gql import GitlabGQL, create_job_needs_dag, filter_dag, print_dag |  | ||||||
|  |  | ||||||
| REFRESH_WAIT_LOG = 10 |  | ||||||
| REFRESH_WAIT_JOBS = 6 |  | ||||||
|  |  | ||||||
| URL_START = "\033]8;;" |  | ||||||
| URL_END = "\033]8;;\a" |  | ||||||
|  |  | ||||||
| STATUS_COLORS = { |  | ||||||
|     "created": "", |  | ||||||
|     "running": Fore.BLUE, |  | ||||||
|     "success": Fore.GREEN, |  | ||||||
|     "failed": Fore.RED, |  | ||||||
|     "canceled": Fore.MAGENTA, |  | ||||||
|     "manual": "", |  | ||||||
|     "pending": "", |  | ||||||
|     "skipped": "", |  | ||||||
| } |  | ||||||
|  |  | ||||||
| COMPLETED_STATUSES = ["success", "failed"] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def print_job_status(job) -> None: |  | ||||||
|     """It prints a nice, colored job status with a link to the job.""" |  | ||||||
|     if job.status == "canceled": |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     print( |  | ||||||
|         STATUS_COLORS[job.status] |  | ||||||
|         + "🞋 job " |  | ||||||
|         + URL_START |  | ||||||
|         + f"{job.web_url}\a{job.name}" |  | ||||||
|         + URL_END |  | ||||||
|         + f" :: {job.status}" |  | ||||||
|         + Style.RESET_ALL |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def print_job_status_change(job) -> None: |  | ||||||
|     """It reports job status changes.""" |  | ||||||
|     if job.status == "canceled": |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     print( |  | ||||||
|         STATUS_COLORS[job.status] |  | ||||||
|         + "🗘 job " |  | ||||||
|         + URL_START |  | ||||||
|         + f"{job.web_url}\a{job.name}" |  | ||||||
|         + URL_END |  | ||||||
|         + f" has new status: {job.status}" |  | ||||||
|         + Style.RESET_ALL |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def pretty_wait(sec: int) -> None: |  | ||||||
|     """shows progressbar in dots""" |  | ||||||
|     for val in range(sec, 0, -1): |  | ||||||
|         print(f"⏲  {val} seconds", end="\r") |  | ||||||
|         time.sleep(1) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def monitor_pipeline( |  | ||||||
|     project, |  | ||||||
|     pipeline, |  | ||||||
|     target_job: Optional[str], |  | ||||||
|     dependencies, |  | ||||||
|     force_manual: bool, |  | ||||||
|     stress: bool, |  | ||||||
| ) -> tuple[Optional[int], Optional[int]]: |  | ||||||
|     """Monitors pipeline and delegate canceling jobs""" |  | ||||||
|     statuses = {} |  | ||||||
|     target_statuses = {} |  | ||||||
|     stress_succ = 0 |  | ||||||
|     stress_fail = 0 |  | ||||||
|  |  | ||||||
|     if target_job: |  | ||||||
|         target_jobs_regex = re.compile(target_job.strip()) |  | ||||||
|  |  | ||||||
|     while True: |  | ||||||
|         to_cancel = [] |  | ||||||
|         for job in pipeline.jobs.list(all=True, sort="desc"): |  | ||||||
|             # target jobs |  | ||||||
|             if target_job and target_jobs_regex.match(job.name): |  | ||||||
|                 if force_manual and job.status == "manual": |  | ||||||
|                     enable_job(project, job, True) |  | ||||||
|  |  | ||||||
|                 if stress and job.status in ["success", "failed"]: |  | ||||||
|                     if job.status == "success": |  | ||||||
|                         stress_succ += 1 |  | ||||||
|                     if job.status == "failed": |  | ||||||
|                         stress_fail += 1 |  | ||||||
|                     retry_job(project, job) |  | ||||||
|  |  | ||||||
|                 if (job.id not in target_statuses) or ( |  | ||||||
|                     job.status not in target_statuses[job.id] |  | ||||||
|                 ): |  | ||||||
|                     print_job_status_change(job) |  | ||||||
|                     target_statuses[job.id] = job.status |  | ||||||
|                 else: |  | ||||||
|                     print_job_status(job) |  | ||||||
|  |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|             # all jobs |  | ||||||
|             if (job.id not in statuses) or (job.status not in statuses[job.id]): |  | ||||||
|                 print_job_status_change(job) |  | ||||||
|                 statuses[job.id] = job.status |  | ||||||
|  |  | ||||||
|             # dependencies and cancelling the rest |  | ||||||
|             if job.name in dependencies: |  | ||||||
|                 if job.status == "manual": |  | ||||||
|                     enable_job(project, job, False) |  | ||||||
|  |  | ||||||
|             elif target_job and job.status not in [ |  | ||||||
|                 "canceled", |  | ||||||
|                 "success", |  | ||||||
|                 "failed", |  | ||||||
|                 "skipped", |  | ||||||
|             ]: |  | ||||||
|                 to_cancel.append(job) |  | ||||||
|  |  | ||||||
|         if target_job: |  | ||||||
|             cancel_jobs(project, to_cancel) |  | ||||||
|  |  | ||||||
|         if stress: |  | ||||||
|             print( |  | ||||||
|                 "∑ succ: " + str(stress_succ) + "; fail: " + str(stress_fail), |  | ||||||
|                 flush=False, |  | ||||||
|             ) |  | ||||||
|             pretty_wait(REFRESH_WAIT_JOBS) |  | ||||||
|             continue |  | ||||||
|  |  | ||||||
|         print("---------------------------------", flush=False) |  | ||||||
|  |  | ||||||
|         if len(target_statuses) == 1 and {"running"}.intersection( |  | ||||||
|             target_statuses.values() |  | ||||||
|         ): |  | ||||||
|             return next(iter(target_statuses)), None |  | ||||||
|  |  | ||||||
|         if {"failed", "canceled"}.intersection(target_statuses.values()): |  | ||||||
|             return None, 1 |  | ||||||
|  |  | ||||||
|         if {"success", "manual"}.issuperset(target_statuses.values()): |  | ||||||
|             return None, 0 |  | ||||||
|  |  | ||||||
|         pretty_wait(REFRESH_WAIT_JOBS) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def enable_job(project, job, target: bool) -> None: |  | ||||||
|     """enable manual job""" |  | ||||||
|     pjob = project.jobs.get(job.id, lazy=True) |  | ||||||
|     pjob.play() |  | ||||||
|     if target: |  | ||||||
|         jtype = "🞋 " |  | ||||||
|     else: |  | ||||||
|         jtype = "(dependency)" |  | ||||||
|     print(Fore.MAGENTA + f"{jtype} job {job.name} manually enabled" + Style.RESET_ALL) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def retry_job(project, job) -> None: |  | ||||||
|     """retry job""" |  | ||||||
|     pjob = project.jobs.get(job.id, lazy=True) |  | ||||||
|     pjob.retry() |  | ||||||
|     jtype = "↻" |  | ||||||
|     print(Fore.MAGENTA + f"{jtype} job {job.name} manually enabled" + Style.RESET_ALL) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def cancel_job(project, job) -> None: |  | ||||||
|     """Cancel GitLab job""" |  | ||||||
|     pjob = project.jobs.get(job.id, lazy=True) |  | ||||||
|     pjob.cancel() |  | ||||||
|     print(f"♲ {job.name}") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def cancel_jobs(project, to_cancel) -> None: |  | ||||||
|     """Cancel unwanted GitLab jobs""" |  | ||||||
|     if not to_cancel: |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     with ThreadPoolExecutor(max_workers=6) as exe: |  | ||||||
|         part = partial(cancel_job, project) |  | ||||||
|         exe.map(part, to_cancel) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def print_log(project, job_id) -> None: |  | ||||||
|     """Print job log into output""" |  | ||||||
|     printed_lines = 0 |  | ||||||
|     while True: |  | ||||||
|         job = project.jobs.get(job_id) |  | ||||||
|  |  | ||||||
|         # GitLab's REST API doesn't offer pagination for logs, so we have to refetch it all |  | ||||||
|         lines = job.trace().decode("unicode_escape").splitlines() |  | ||||||
|         for line in lines[printed_lines:]: |  | ||||||
|             print(line) |  | ||||||
|         printed_lines = len(lines) |  | ||||||
|  |  | ||||||
|         if job.status in COMPLETED_STATUSES: |  | ||||||
|             print(Fore.GREEN + f"Job finished: {job.web_url}" + Style.RESET_ALL) |  | ||||||
|             return |  | ||||||
|         pretty_wait(REFRESH_WAIT_LOG) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def parse_args() -> None: |  | ||||||
|     """Parse args""" |  | ||||||
|     parser = argparse.ArgumentParser( |  | ||||||
|         description="Tool to trigger a subset of container jobs " |  | ||||||
|         + "and monitor the progress of a test job", |  | ||||||
|         epilog="Example: mesa-monitor.py --rev $(git rev-parse HEAD) " |  | ||||||
|         + '--target ".*traces" ', |  | ||||||
|     ) |  | ||||||
|     parser.add_argument("--target", metavar="target-job", help="Target job") |  | ||||||
|     parser.add_argument( |  | ||||||
|         "--rev", metavar="revision", help="repository git revision", required=True |  | ||||||
|     ) |  | ||||||
|     parser.add_argument( |  | ||||||
|         "--token", |  | ||||||
|         metavar="token", |  | ||||||
|         help="force GitLab token, otherwise it's read from ~/.config/gitlab-token", |  | ||||||
|     ) |  | ||||||
|     parser.add_argument( |  | ||||||
|         "--force-manual", action="store_true", help="Force jobs marked as manual" |  | ||||||
|     ) |  | ||||||
|     parser.add_argument("--stress", action="store_true", help="Stresstest job(s)") |  | ||||||
|     return parser.parse_args() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def find_dependencies(target_job: str, project_path: str, sha: str) -> set[str]: |  | ||||||
|     gql_instance = GitlabGQL() |  | ||||||
|     dag, _ = create_job_needs_dag( |  | ||||||
|         gql_instance, {"projectPath": project_path.path_with_namespace, "sha": sha} |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     target_dep_dag = filter_dag(dag, target_job) |  | ||||||
|     print(Fore.YELLOW) |  | ||||||
|     print("Detected job dependencies:") |  | ||||||
|     print() |  | ||||||
|     print_dag(target_dep_dag) |  | ||||||
|     print(Fore.RESET) |  | ||||||
|     return set(chain.from_iterable(target_dep_dag.values())) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == "__main__": |  | ||||||
|     try: |  | ||||||
|         t_start = time.perf_counter() |  | ||||||
|  |  | ||||||
|         args = parse_args() |  | ||||||
|  |  | ||||||
|         token = read_token(args.token) |  | ||||||
|  |  | ||||||
|         gl = gitlab.Gitlab(url="https://gitlab.freedesktop.org", private_token=token) |  | ||||||
|  |  | ||||||
|         cur_project = get_gitlab_project(gl, "mesa") |  | ||||||
|  |  | ||||||
|         print(f"Revision: {args.rev}") |  | ||||||
|         pipe = wait_for_pipeline(cur_project, args.rev) |  | ||||||
|         print(f"Pipeline: {pipe.web_url}") |  | ||||||
|         deps = set() |  | ||||||
|         if args.target: |  | ||||||
|             print("🞋 job: " + Fore.BLUE + args.target + Style.RESET_ALL) |  | ||||||
|             deps = find_dependencies( |  | ||||||
|                 target_job=args.target, sha=args.rev, project_path=cur_project |  | ||||||
|             ) |  | ||||||
|         target_job_id, ret = monitor_pipeline( |  | ||||||
|             cur_project, pipe, args.target, deps, args.force_manual, args.stress |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         if target_job_id: |  | ||||||
|             print_log(cur_project, target_job_id) |  | ||||||
|  |  | ||||||
|         t_end = time.perf_counter() |  | ||||||
|         spend_minutes = (t_end - t_start) / 60 |  | ||||||
|         print(f"⏲ Duration of script execution: {spend_minutes:0.1f} minutes") |  | ||||||
|  |  | ||||||
|         sys.exit(ret) |  | ||||||
|     except KeyboardInterrupt: |  | ||||||
|         sys.exit(1) |  | ||||||
| @@ -1,11 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| # Helper script to download the schema GraphQL from Gitlab to enable IDEs to |  | ||||||
| # assist the developer to edit gql files |  | ||||||
|  |  | ||||||
| SOURCE_DIR=$(dirname "$(realpath "$0")") |  | ||||||
|  |  | ||||||
| ( |  | ||||||
|     cd $SOURCE_DIR || exit 1 |  | ||||||
|     gql-cli https://gitlab.freedesktop.org/api/graphql --print-schema > schema.graphql |  | ||||||
| ) |  | ||||||
| @@ -1,42 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
| # Copyright © 2020 - 2022 Collabora Ltd. |  | ||||||
| # Authors: |  | ||||||
| #   Tomeu Vizoso <tomeu.vizoso@collabora.com> |  | ||||||
| #   David Heidelberg <david.heidelberg@collabora.com> |  | ||||||
| # |  | ||||||
| # SPDX-License-Identifier: MIT |  | ||||||
| '''Shared functions between the scripts.''' |  | ||||||
|  |  | ||||||
| import os |  | ||||||
| import time |  | ||||||
| from typing import Optional |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_gitlab_project(glab, name: str): |  | ||||||
|     """Finds a specified gitlab project for given user""" |  | ||||||
|     glab.auth() |  | ||||||
|     username = glab.user.username |  | ||||||
|     return glab.projects.get(f"{username}/mesa") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def read_token(token_arg: Optional[str]) -> str: |  | ||||||
|     """pick token from args or file""" |  | ||||||
|     if token_arg: |  | ||||||
|         return token_arg |  | ||||||
|     return ( |  | ||||||
|         open(os.path.expanduser("~/.config/gitlab-token"), encoding="utf-8") |  | ||||||
|         .readline() |  | ||||||
|         .rstrip() |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def wait_for_pipeline(project, sha: str): |  | ||||||
|     """await until pipeline appears in Gitlab""" |  | ||||||
|     print("⏲ for the pipeline to appear..", end="") |  | ||||||
|     while True: |  | ||||||
|         pipelines = project.pipelines.list(sha=sha) |  | ||||||
|         if pipelines: |  | ||||||
|             print("", flush=True) |  | ||||||
|             return pipelines[0] |  | ||||||
|         print("", end=".", flush=True) |  | ||||||
|         time.sleep(1) |  | ||||||
| @@ -1,303 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
|  |  | ||||||
| import re |  | ||||||
| from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace |  | ||||||
| from dataclasses import dataclass, field |  | ||||||
| from os import getenv |  | ||||||
| from pathlib import Path |  | ||||||
| from typing import Any, Iterable, Optional, Pattern, Union |  | ||||||
|  |  | ||||||
| import yaml |  | ||||||
| from filecache import DAY, filecache |  | ||||||
| from gql import Client, gql |  | ||||||
| from gql.transport.aiohttp import AIOHTTPTransport |  | ||||||
| from graphql import DocumentNode |  | ||||||
|  |  | ||||||
| Dag = dict[str, list[str]] |  | ||||||
| TOKEN_DIR = Path(getenv("XDG_CONFIG_HOME") or Path.home() / ".config") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_token_from_default_dir() -> str: |  | ||||||
|     try: |  | ||||||
|         token_file = TOKEN_DIR / "gitlab-token" |  | ||||||
|         return token_file.resolve() |  | ||||||
|     except FileNotFoundError as ex: |  | ||||||
|         print( |  | ||||||
|             f"Could not find {token_file}, please provide a token file as an argument" |  | ||||||
|         ) |  | ||||||
|         raise ex |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_project_root_dir(): |  | ||||||
|     root_path = Path(__file__).parent.parent.parent.resolve() |  | ||||||
|     gitlab_file = root_path / ".gitlab-ci.yml" |  | ||||||
|     assert gitlab_file.exists() |  | ||||||
|  |  | ||||||
|     return root_path |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass |  | ||||||
| class GitlabGQL: |  | ||||||
|     _transport: Any = field(init=False) |  | ||||||
|     client: Client = field(init=False) |  | ||||||
|     url: str = "https://gitlab.freedesktop.org/api/graphql" |  | ||||||
|     token: Optional[str] = None |  | ||||||
|  |  | ||||||
|     def __post_init__(self): |  | ||||||
|         self._setup_gitlab_gql_client() |  | ||||||
|  |  | ||||||
|     def _setup_gitlab_gql_client(self) -> Client: |  | ||||||
|         # Select your transport with a defined url endpoint |  | ||||||
|         headers = {} |  | ||||||
|         if self.token: |  | ||||||
|             headers["Authorization"] = f"Bearer {self.token}" |  | ||||||
|         self._transport = AIOHTTPTransport(url=self.url, headers=headers) |  | ||||||
|  |  | ||||||
|         # Create a GraphQL client using the defined transport |  | ||||||
|         self.client = Client( |  | ||||||
|             transport=self._transport, fetch_schema_from_transport=True |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     @filecache(DAY) |  | ||||||
|     def query( |  | ||||||
|         self, gql_file: Union[Path, str], params: dict[str, Any] |  | ||||||
|     ) -> dict[str, Any]: |  | ||||||
|         # Provide a GraphQL query |  | ||||||
|         source_path = Path(__file__).parent |  | ||||||
|         pipeline_query_file = source_path / gql_file |  | ||||||
|  |  | ||||||
|         query: DocumentNode |  | ||||||
|         with open(pipeline_query_file, "r") as f: |  | ||||||
|             pipeline_query = f.read() |  | ||||||
|             query = gql(pipeline_query) |  | ||||||
|  |  | ||||||
|         # Execute the query on the transport |  | ||||||
|         return self.client.execute(query, variable_values=params) |  | ||||||
|  |  | ||||||
|     def invalidate_query_cache(self): |  | ||||||
|         self.query._db.clear() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def create_job_needs_dag( |  | ||||||
|     gl_gql: GitlabGQL, params |  | ||||||
| ) -> tuple[Dag, dict[str, dict[str, Any]]]: |  | ||||||
|  |  | ||||||
|     result = gl_gql.query("pipeline_details.gql", params) |  | ||||||
|     dag = {} |  | ||||||
|     jobs = {} |  | ||||||
|     pipeline = result["project"]["pipeline"] |  | ||||||
|     if not pipeline: |  | ||||||
|         raise RuntimeError(f"Could not find any pipelines for {params}") |  | ||||||
|  |  | ||||||
|     for stage in pipeline["stages"]["nodes"]: |  | ||||||
|         for stage_job in stage["groups"]["nodes"]: |  | ||||||
|             for job in stage_job["jobs"]["nodes"]: |  | ||||||
|                 needs = job.pop("needs")["nodes"] |  | ||||||
|                 jobs[job["name"]] = job |  | ||||||
|                 dag[job["name"]] = {node["name"] for node in needs} |  | ||||||
|  |  | ||||||
|     for job, needs in dag.items(): |  | ||||||
|         needs: set |  | ||||||
|         partial = True |  | ||||||
|  |  | ||||||
|         while partial: |  | ||||||
|             next_depth = {n for dn in needs for n in dag[dn]} |  | ||||||
|             partial = not needs.issuperset(next_depth) |  | ||||||
|             needs = needs.union(next_depth) |  | ||||||
|  |  | ||||||
|         dag[job] = needs |  | ||||||
|  |  | ||||||
|     return dag, jobs |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def filter_dag(dag: Dag, regex: Pattern) -> Dag: |  | ||||||
|     return {job: needs for job, needs in dag.items() if re.match(regex, job)} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def print_dag(dag: Dag) -> None: |  | ||||||
|     for job, needs in dag.items(): |  | ||||||
|         print(f"{job}:") |  | ||||||
|         print(f"\t{' '.join(needs)}") |  | ||||||
|         print() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def fetch_merged_yaml(gl_gql: GitlabGQL, params) -> dict[Any]: |  | ||||||
|     gitlab_yml_file = get_project_root_dir() / ".gitlab-ci.yml" |  | ||||||
|     content = Path(gitlab_yml_file).read_text().strip() |  | ||||||
|     params["content"] = content |  | ||||||
|     raw_response = gl_gql.query("job_details.gql", params) |  | ||||||
|     if merged_yaml := raw_response["ciConfig"]["mergedYaml"]: |  | ||||||
|         return yaml.safe_load(merged_yaml) |  | ||||||
|  |  | ||||||
|     gl_gql.invalidate_query_cache() |  | ||||||
|     raise ValueError( |  | ||||||
|         """ |  | ||||||
|     Could not fetch any content for merged YAML, |  | ||||||
|     please verify if the git SHA exists in remote. |  | ||||||
|     Maybe you forgot to `git push`?  """ |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def recursive_fill(job, relationship_field, target_data, acc_data: dict, merged_yaml): |  | ||||||
|     if relatives := job.get(relationship_field): |  | ||||||
|         if isinstance(relatives, str): |  | ||||||
|             relatives = [relatives] |  | ||||||
|  |  | ||||||
|         for relative in relatives: |  | ||||||
|             parent_job = merged_yaml[relative] |  | ||||||
|             acc_data = recursive_fill(parent_job, acc_data, merged_yaml) |  | ||||||
|  |  | ||||||
|     acc_data |= job.get(target_data, {}) |  | ||||||
|  |  | ||||||
|     return acc_data |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_variables(job, merged_yaml, project_path, sha) -> dict[str, str]: |  | ||||||
|     p = get_project_root_dir() / ".gitlab-ci" / "image-tags.yml" |  | ||||||
|     image_tags = yaml.safe_load(p.read_text()) |  | ||||||
|  |  | ||||||
|     variables = image_tags["variables"] |  | ||||||
|     variables |= merged_yaml["variables"] |  | ||||||
|     variables |= job["variables"] |  | ||||||
|     variables["CI_PROJECT_PATH"] = project_path |  | ||||||
|     variables["CI_PROJECT_NAME"] = project_path.split("/")[1] |  | ||||||
|     variables["CI_REGISTRY_IMAGE"] = "registry.freedesktop.org/${CI_PROJECT_PATH}" |  | ||||||
|     variables["CI_COMMIT_SHA"] = sha |  | ||||||
|  |  | ||||||
|     while recurse_among_variables_space(variables): |  | ||||||
|         pass |  | ||||||
|  |  | ||||||
|     return variables |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # Based on: https://stackoverflow.com/a/2158532/1079223 |  | ||||||
| def flatten(xs): |  | ||||||
|     for x in xs: |  | ||||||
|         if isinstance(x, Iterable) and not isinstance(x, (str, bytes)): |  | ||||||
|             yield from flatten(x) |  | ||||||
|         else: |  | ||||||
|             yield x |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_full_script(job) -> list[str]: |  | ||||||
|     script = [] |  | ||||||
|     for script_part in ("before_script", "script", "after_script"): |  | ||||||
|         script.append(f"# {script_part}") |  | ||||||
|         lines = flatten(job.get(script_part, [])) |  | ||||||
|         script.extend(lines) |  | ||||||
|         script.append("") |  | ||||||
|  |  | ||||||
|     return script |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def recurse_among_variables_space(var_graph) -> bool: |  | ||||||
|     updated = False |  | ||||||
|     for var, value in var_graph.items(): |  | ||||||
|         value = str(value) |  | ||||||
|         dep_vars = [] |  | ||||||
|         if match := re.findall(r"(\$[{]?[\w\d_]*[}]?)", value): |  | ||||||
|             all_dep_vars = [v.lstrip("${").rstrip("}") for v in match] |  | ||||||
|             # print(value, match, all_dep_vars) |  | ||||||
|             dep_vars = [v for v in all_dep_vars if v in var_graph] |  | ||||||
|  |  | ||||||
|         for dep_var in dep_vars: |  | ||||||
|             dep_value = str(var_graph[dep_var]) |  | ||||||
|             new_value = var_graph[var] |  | ||||||
|             new_value = new_value.replace(f"${{{dep_var}}}", dep_value) |  | ||||||
|             new_value = new_value.replace(f"${dep_var}", dep_value) |  | ||||||
|             var_graph[var] = new_value |  | ||||||
|             updated |= dep_value != new_value |  | ||||||
|  |  | ||||||
|     return updated |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_job_final_definiton(job_name, merged_yaml, project_path, sha): |  | ||||||
|     job = merged_yaml[job_name] |  | ||||||
|     variables = get_variables(job, merged_yaml, project_path, sha) |  | ||||||
|  |  | ||||||
|     print("# --------- variables ---------------") |  | ||||||
|     for var, value in sorted(variables.items()): |  | ||||||
|         print(f"export {var}={value!r}") |  | ||||||
|  |  | ||||||
|     # TODO: Recurse into needs to get full script |  | ||||||
|     # TODO: maybe create a extra yaml file to avoid too much rework |  | ||||||
|     script = get_full_script(job) |  | ||||||
|     print() |  | ||||||
|     print() |  | ||||||
|     print("# --------- full script ---------------") |  | ||||||
|     print("\n".join(script)) |  | ||||||
|  |  | ||||||
|     if image := variables.get("MESA_IMAGE"): |  | ||||||
|         print() |  | ||||||
|         print() |  | ||||||
|         print("# --------- container image ---------------") |  | ||||||
|         print(image) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def parse_args() -> Namespace: |  | ||||||
|     parser = ArgumentParser( |  | ||||||
|         formatter_class=ArgumentDefaultsHelpFormatter, |  | ||||||
|         description="CLI and library with utility functions to debug jobs via Gitlab GraphQL", |  | ||||||
|         epilog=f"""Example: |  | ||||||
|         {Path(__file__).name} --rev $(git rev-parse HEAD) --print-job-dag""", |  | ||||||
|     ) |  | ||||||
|     parser.add_argument("-pp", "--project-path", type=str, default="mesa/mesa") |  | ||||||
|     parser.add_argument("--sha", "--rev", type=str, required=True) |  | ||||||
|     parser.add_argument( |  | ||||||
|         "--regex", |  | ||||||
|         type=str, |  | ||||||
|         required=False, |  | ||||||
|         help="Regex pattern for the job name to be considered", |  | ||||||
|     ) |  | ||||||
|     parser.add_argument("--print-dag", action="store_true", help="Print job needs DAG") |  | ||||||
|     parser.add_argument( |  | ||||||
|         "--print-merged-yaml", |  | ||||||
|         action="store_true", |  | ||||||
|         help="Print the resulting YAML for the specific SHA", |  | ||||||
|     ) |  | ||||||
|     parser.add_argument( |  | ||||||
|         "--print-job-manifest", type=str, help="Print the resulting job data" |  | ||||||
|     ) |  | ||||||
|     parser.add_argument( |  | ||||||
|         "--gitlab-token-file", |  | ||||||
|         type=str, |  | ||||||
|         default=get_token_from_default_dir(), |  | ||||||
|         help="force GitLab token, otherwise it's read from $XDG_CONFIG_HOME/gitlab-token", |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     args = parser.parse_args() |  | ||||||
|     args.gitlab_token = Path(args.gitlab_token_file).read_text() |  | ||||||
|     return args |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     args = parse_args() |  | ||||||
|     gl_gql = GitlabGQL(token=args.gitlab_token) |  | ||||||
|  |  | ||||||
|     if args.print_dag: |  | ||||||
|         dag, jobs = create_job_needs_dag( |  | ||||||
|             gl_gql, {"projectPath": args.project_path, "sha": args.sha} |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         if args.regex: |  | ||||||
|             dag = filter_dag(dag, re.compile(args.regex)) |  | ||||||
|         print_dag(dag) |  | ||||||
|  |  | ||||||
|     if args.print_merged_yaml: |  | ||||||
|         print( |  | ||||||
|             fetch_merged_yaml( |  | ||||||
|                 gl_gql, {"projectPath": args.project_path, "sha": args.sha} |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     if args.print_job_manifest: |  | ||||||
|         merged_yaml = fetch_merged_yaml( |  | ||||||
|             gl_gql, {"projectPath": args.project_path, "sha": args.sha} |  | ||||||
|         ) |  | ||||||
|         get_job_final_definiton( |  | ||||||
|             args.print_job_manifest, merged_yaml, args.project_path, args.sha |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == "__main__": |  | ||||||
|     main() |  | ||||||
| @@ -1,7 +0,0 @@ | |||||||
| query getCiConfigData($projectPath: ID!, $sha: String, $content: String!) { |  | ||||||
|   ciConfig(projectPath: $projectPath, sha: $sha, content: $content) { |  | ||||||
|     errors |  | ||||||
|     mergedYaml |  | ||||||
|     __typename |  | ||||||
|   } |  | ||||||
| } |  | ||||||
| @@ -1,86 +0,0 @@ | |||||||
| fragment LinkedPipelineData on Pipeline { |  | ||||||
|   id |  | ||||||
|   iid |  | ||||||
|   path |  | ||||||
|   cancelable |  | ||||||
|   retryable |  | ||||||
|   userPermissions { |  | ||||||
|     updatePipeline |  | ||||||
|   } |  | ||||||
|   status: detailedStatus { |  | ||||||
|     id |  | ||||||
|     group |  | ||||||
|     label |  | ||||||
|     icon |  | ||||||
|   } |  | ||||||
|   sourceJob { |  | ||||||
|     id |  | ||||||
|     name |  | ||||||
|   } |  | ||||||
|   project { |  | ||||||
|     id |  | ||||||
|     name |  | ||||||
|     fullPath |  | ||||||
|   } |  | ||||||
| } |  | ||||||
|  |  | ||||||
| query getPipelineDetails($projectPath: ID!, $sha: String!) { |  | ||||||
|   project(fullPath: $projectPath) { |  | ||||||
|     id |  | ||||||
|     pipeline(sha: $sha) { |  | ||||||
|       id |  | ||||||
|       iid |  | ||||||
|       complete |  | ||||||
|       downstream { |  | ||||||
|         nodes { |  | ||||||
|           ...LinkedPipelineData |  | ||||||
|         } |  | ||||||
|       } |  | ||||||
|       upstream { |  | ||||||
|         ...LinkedPipelineData |  | ||||||
|       } |  | ||||||
|       stages { |  | ||||||
|         nodes { |  | ||||||
|           id |  | ||||||
|           name |  | ||||||
|           status: detailedStatus { |  | ||||||
|             id |  | ||||||
|             action { |  | ||||||
|               id |  | ||||||
|               icon |  | ||||||
|               path |  | ||||||
|               title |  | ||||||
|             } |  | ||||||
|           } |  | ||||||
|           groups { |  | ||||||
|             nodes { |  | ||||||
|               id |  | ||||||
|               status: detailedStatus { |  | ||||||
|                 id |  | ||||||
|                 label |  | ||||||
|                 group |  | ||||||
|                 icon |  | ||||||
|               } |  | ||||||
|               name |  | ||||||
|               size |  | ||||||
|               jobs { |  | ||||||
|                 nodes { |  | ||||||
|                   id |  | ||||||
|                   name |  | ||||||
|                   kind |  | ||||||
|                   scheduledAt |  | ||||||
|                   needs { |  | ||||||
|                     nodes { |  | ||||||
|                       id |  | ||||||
|                       name |  | ||||||
|                     } |  | ||||||
|                   } |  | ||||||
|                 } |  | ||||||
|               } |  | ||||||
|             } |  | ||||||
|           } |  | ||||||
|         } |  | ||||||
|       } |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
| } |  | ||||||
| @@ -1,8 +0,0 @@ | |||||||
| aiohttp==3.8.1 |  | ||||||
| colorama==0.4.5 |  | ||||||
| filecache==0.81 |  | ||||||
| gql==3.4.0 |  | ||||||
| python-gitlab==3.5.0 |  | ||||||
| PyYAML==6.0 |  | ||||||
| ruamel.yaml.clib==0.2.6 |  | ||||||
| ruamel.yaml==0.17.21 |  | ||||||
| @@ -1,140 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
| # Copyright © 2022 Collabora Ltd. |  | ||||||
| # Authors: |  | ||||||
| #   David Heidelberg <david.heidelberg@collabora.com> |  | ||||||
| # |  | ||||||
| # SPDX-License-Identifier: MIT |  | ||||||
|  |  | ||||||
| """ |  | ||||||
| Helper script to update traces checksums |  | ||||||
| """ |  | ||||||
|  |  | ||||||
| import argparse |  | ||||||
| import bz2 |  | ||||||
| import glob |  | ||||||
| import re |  | ||||||
| import json |  | ||||||
| import sys |  | ||||||
| from ruamel.yaml import YAML |  | ||||||
|  |  | ||||||
| import gitlab |  | ||||||
| from gitlab_common import get_gitlab_project, read_token, wait_for_pipeline |  | ||||||
|  |  | ||||||
|  |  | ||||||
| DESCRIPTION_FILE = "export PIGLIT_REPLAY_DESCRIPTION_FILE='.*/install/(.*)'$" |  | ||||||
| DEVICE_NAME = "export PIGLIT_REPLAY_DEVICE_NAME='(.*)'$" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def gather_results( |  | ||||||
|     project, |  | ||||||
|     pipeline, |  | ||||||
| ) -> None: |  | ||||||
|     """Gather results""" |  | ||||||
|  |  | ||||||
|     target_jobs_regex = re.compile(".*-traces([:].*)?$") |  | ||||||
|  |  | ||||||
|     for job in pipeline.jobs.list(all=True, sort="desc"): |  | ||||||
|         if target_jobs_regex.match(job.name) and job.status == "failed": |  | ||||||
|             cur_job = project.jobs.get(job.id) |  | ||||||
|             # get variables |  | ||||||
|             print(f"👁 Looking through logs for the device variable and traces.yml file in {job.name}...") |  | ||||||
|             log = cur_job.trace().decode("unicode_escape").splitlines() |  | ||||||
|             filename: str = '' |  | ||||||
|             dev_name: str = '' |  | ||||||
|             for logline in log: |  | ||||||
|                 desc_file = re.search(DESCRIPTION_FILE, logline) |  | ||||||
|                 device_name = re.search(DEVICE_NAME, logline) |  | ||||||
|                 if desc_file: |  | ||||||
|                     filename = desc_file.group(1) |  | ||||||
|                 if device_name: |  | ||||||
|                     dev_name = device_name.group(1) |  | ||||||
|  |  | ||||||
|             if not filename or not dev_name: |  | ||||||
|                 print("! Couldn't find device name or YML file in the logs!") |  | ||||||
|                 return |  | ||||||
|  |  | ||||||
|             print(f"👁 Found {dev_name} and file {filename}") |  | ||||||
|  |  | ||||||
|             # find filename in Mesa source |  | ||||||
|             traces_file = glob.glob('./**/' + filename, recursive=True) |  | ||||||
|             # write into it |  | ||||||
|             with open(traces_file[0], 'r', encoding='utf-8') as target_file: |  | ||||||
|                 yaml = YAML() |  | ||||||
|                 yaml.compact(seq_seq=False, seq_map=False) |  | ||||||
|                 yaml.version = 1,2 |  | ||||||
|                 yaml.width = 2048  # do not break the text fields |  | ||||||
|                 yaml.default_flow_style = None |  | ||||||
|                 target = yaml.load(target_file) |  | ||||||
|  |  | ||||||
|                 # parse artifact |  | ||||||
|                 results_json_bz2 = cur_job.artifact(path="results/results.json.bz2", streamed=False) |  | ||||||
|                 results_json = bz2.decompress(results_json_bz2).decode("utf-8") |  | ||||||
|                 results = json.loads(results_json) |  | ||||||
|  |  | ||||||
|                 for _, value in results["tests"].items(): |  | ||||||
|                     if ( |  | ||||||
|                         not value['images'] or |  | ||||||
|                         not value['images'][0] or |  | ||||||
|                         "image_desc" not in value['images'][0] |  | ||||||
|                     ): |  | ||||||
|                         continue |  | ||||||
|  |  | ||||||
|                     trace: str = value['images'][0]['image_desc'] |  | ||||||
|                     checksum: str = value['images'][0]['checksum_render'] |  | ||||||
|  |  | ||||||
|                     if not checksum: |  | ||||||
|                         print(f"Trace {trace} checksum is missing! Abort.") |  | ||||||
|                         continue |  | ||||||
|  |  | ||||||
|                     if checksum == "error": |  | ||||||
|                         print(f"Trace {trace} crashed") |  | ||||||
|                         continue |  | ||||||
|  |  | ||||||
|                     if target['traces'][trace][dev_name].get('checksum') == checksum: |  | ||||||
|                         continue |  | ||||||
|  |  | ||||||
|                     if "label" in target['traces'][trace][dev_name]: |  | ||||||
|                         print(f'{trace}: {dev_name}: has label: {target["traces"][trace][dev_name]["label"]}, is it still right?') |  | ||||||
|  |  | ||||||
|                     target['traces'][trace][dev_name]['checksum'] = checksum |  | ||||||
|  |  | ||||||
|             with open(traces_file[0], 'w', encoding='utf-8') as target_file: |  | ||||||
|                 yaml.dump(target, target_file) |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def parse_args() -> None: |  | ||||||
|     """Parse args""" |  | ||||||
|     parser = argparse.ArgumentParser( |  | ||||||
|         description="Tool to generate patch from checksums ", |  | ||||||
|         epilog="Example: update_traces_checksum.py --rev $(git rev-parse HEAD) " |  | ||||||
|     ) |  | ||||||
|     parser.add_argument( |  | ||||||
|         "--rev", metavar="revision", help="repository git revision", required=True |  | ||||||
|     ) |  | ||||||
|     parser.add_argument( |  | ||||||
|         "--token", |  | ||||||
|         metavar="token", |  | ||||||
|         help="force GitLab token, otherwise it's read from ~/.config/gitlab-token", |  | ||||||
|     ) |  | ||||||
|     return parser.parse_args() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == "__main__": |  | ||||||
|     try: |  | ||||||
|         args = parse_args() |  | ||||||
|  |  | ||||||
|         token = read_token(args.token) |  | ||||||
|  |  | ||||||
|         gl = gitlab.Gitlab(url="https://gitlab.freedesktop.org", private_token=token) |  | ||||||
|  |  | ||||||
|         cur_project = get_gitlab_project(gl, "mesa") |  | ||||||
|  |  | ||||||
|         print(f"Revision: {args.rev}") |  | ||||||
|         pipe = wait_for_pipeline(cur_project, args.rev) |  | ||||||
|         print(f"Pipeline: {pipe.web_url}") |  | ||||||
|         gather_results(cur_project, pipe) |  | ||||||
|  |  | ||||||
|         sys.exit() |  | ||||||
|     except KeyboardInterrupt: |  | ||||||
|         sys.exit(1) |  | ||||||
| @@ -1,628 +0,0 @@ | |||||||
| # Shared between windows and Linux |  | ||||||
| .build-common: |  | ||||||
|   extends: .build-rules |  | ||||||
|   # Cancel job if a newer commit is pushed to the same branch |  | ||||||
|   interruptible: true |  | ||||||
|   artifacts: |  | ||||||
|     name: "mesa_${CI_JOB_NAME}" |  | ||||||
|     when: always |  | ||||||
|     paths: |  | ||||||
|       - _build/meson-logs/*.txt |  | ||||||
|       - _build/meson-logs/strace |  | ||||||
|       - shader-db |  | ||||||
|  |  | ||||||
| # Just Linux |  | ||||||
| .build-linux: |  | ||||||
|   extends: .build-common |  | ||||||
|   variables: |  | ||||||
|     CCACHE_COMPILERCHECK: "content" |  | ||||||
|     CCACHE_COMPRESS: "true" |  | ||||||
|     CCACHE_DIR: /cache/mesa/ccache |  | ||||||
|   # Use ccache transparently, and print stats before/after |  | ||||||
|   before_script: |  | ||||||
|     - !reference [default, before_script] |  | ||||||
|     - export PATH="/usr/lib/ccache:$PATH" |  | ||||||
|     - export CCACHE_BASEDIR="$PWD" |  | ||||||
|     - echo -e "\e[0Ksection_start:$(date +%s):ccache_before[collapsed=true]\r\e[0Kccache stats before build" |  | ||||||
|     - ccache --show-stats |  | ||||||
|     - echo -e "\e[0Ksection_end:$(date +%s):ccache_before\r\e[0K" |  | ||||||
|   after_script: |  | ||||||
|     - echo -e "\e[0Ksection_start:$(date +%s):ccache_after[collapsed=true]\r\e[0Kccache stats after build" |  | ||||||
|     - ccache --show-stats |  | ||||||
|     - echo -e "\e[0Ksection_end:$(date +%s):ccache_after\r\e[0K" |  | ||||||
|     - !reference [default, after_script] |  | ||||||
|  |  | ||||||
| .build-windows: |  | ||||||
|   extends: .build-common |  | ||||||
|   tags: |  | ||||||
|     - windows |  | ||||||
|     - docker |  | ||||||
|     - "2022" |  | ||||||
|     - mesa |  | ||||||
|   cache: |  | ||||||
|     key: ${CI_JOB_NAME} |  | ||||||
|     paths: |  | ||||||
|       - subprojects/packagecache |  | ||||||
|  |  | ||||||
| .meson-build: |  | ||||||
|   extends: |  | ||||||
|     - .build-linux |  | ||||||
|     - .use-debian/x86_build |  | ||||||
|   stage: build-x86_64 |  | ||||||
|   variables: |  | ||||||
|     LLVM_VERSION: 11 |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|  |  | ||||||
| .meson-build_mingw: |  | ||||||
|   extends: |  | ||||||
|     - .build-linux |  | ||||||
|     - .use-debian/x86_build_mingw |  | ||||||
|     - .use-wine |  | ||||||
|   stage: build-x86_64 |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|  |  | ||||||
| debian-testing: |  | ||||||
|   extends: |  | ||||||
|     - .meson-build |  | ||||||
|     - .ci-deqp-artifacts |  | ||||||
|   variables: |  | ||||||
|     UNWIND: "enabled" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=dri |  | ||||||
|       -D gbm=enabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D platforms=x11 |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-va=enabled |  | ||||||
|     GALLIUM_DRIVERS: "swrast,virgl,radeonsi,zink,crocus,iris,i915" |  | ||||||
|     VULKAN_DRIVERS: "swrast,amd,intel,virtio-experimental" |  | ||||||
|     BUILDTYPE: "debugoptimized" |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D spirv-to-dxil=true |  | ||||||
|       -D valgrind=false |  | ||||||
|     MINIO_ARTIFACT_NAME: mesa-amd64 |  | ||||||
|     LLVM_VERSION: "13" |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|     - .gitlab-ci/prepare-artifacts.sh |  | ||||||
|   artifacts: |  | ||||||
|     reports: |  | ||||||
|       junit: artifacts/ci_scripts_report.xml |  | ||||||
|  |  | ||||||
| debian-testing-asan: |  | ||||||
|   extends: |  | ||||||
|     - debian-testing |  | ||||||
|   variables: |  | ||||||
|     C_ARGS: > |  | ||||||
|       -Wno-error=stringop-truncation |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D b_sanitize=address |  | ||||||
|       -D valgrind=false |  | ||||||
|       -D tools=dlclose-skip |  | ||||||
|     MINIO_ARTIFACT_NAME: "" |  | ||||||
|     ARTIFACTS_DEBUG_SYMBOLS: 1 |  | ||||||
|  |  | ||||||
| debian-testing-msan: |  | ||||||
|   extends: |  | ||||||
|     - debian-clang |  | ||||||
|   variables: |  | ||||||
|     # l_undef is incompatible with msan |  | ||||||
|     EXTRA_OPTION: |  | ||||||
|       -D b_sanitize=memory |  | ||||||
|       -D b_lundef=false |  | ||||||
|     MINIO_ARTIFACT_NAME: "" |  | ||||||
|     ARTIFACTS_DEBUG_SYMBOLS: 1 |  | ||||||
|     # Don't run all the tests yet: |  | ||||||
|     # GLSL has some issues in sexpression reading. |  | ||||||
|     # gtest has issues in its test initialization. |  | ||||||
|     MESON_TEST_ARGS: "--suite glcpp --suite gallium  --suite format" |  | ||||||
|     # Freedreno dropped because freedreno tools fail at msan. |  | ||||||
|     GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus" |  | ||||||
|     VULKAN_DRIVERS: intel,amd,broadcom,virtio-experimental |  | ||||||
|  |  | ||||||
| .debian-cl-testing: |  | ||||||
|   extends: |  | ||||||
|     - .meson-build |  | ||||||
|     - .ci-deqp-artifacts |  | ||||||
|   variables: |  | ||||||
|     LLVM_VERSION: "13" |  | ||||||
|     UNWIND: "enabled" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=disabled |  | ||||||
|       -D egl=disabled |  | ||||||
|       -D gbm=disabled |  | ||||||
|     GALLIUM_DRIVERS: "swrast" |  | ||||||
|     BUILDTYPE: "debugoptimized" |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D valgrind=false |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|     - .gitlab-ci/prepare-artifacts.sh |  | ||||||
|  |  | ||||||
| debian-clover-testing: |  | ||||||
|   extends: |  | ||||||
|     - .debian-cl-testing |  | ||||||
|   variables: |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D gallium-opencl=icd |  | ||||||
|       -D opencl-spirv=true |  | ||||||
|  |  | ||||||
| debian-rusticl-testing: |  | ||||||
|   extends: |  | ||||||
|     - .debian-cl-testing |  | ||||||
|   variables: |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D gallium-rusticl=true |  | ||||||
|       -D opencl-spirv=true |  | ||||||
|  |  | ||||||
| debian-build-testing: |  | ||||||
|   extends: .meson-build |  | ||||||
|   variables: |  | ||||||
|     UNWIND: "enabled" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=dri |  | ||||||
|       -D gbm=enabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-extra-hud=true |  | ||||||
|       -D gallium-vdpau=enabled |  | ||||||
|       -D gallium-omx=bellagio |  | ||||||
|       -D gallium-va=enabled |  | ||||||
|       -D gallium-xa=enabled |  | ||||||
|       -D gallium-nine=true |  | ||||||
|       -D gallium-opencl=disabled |  | ||||||
|       -D gallium-rusticl=false |  | ||||||
|     GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,asahi,crocus" |  | ||||||
|     VULKAN_DRIVERS: swrast |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D spirv-to-dxil=true |  | ||||||
|       -D osmesa=true |  | ||||||
|       -D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/lava/lava-pytest.sh |  | ||||||
|     - .gitlab-ci/run-shellcheck.sh |  | ||||||
|     - .gitlab-ci/run-yamllint.sh |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|     - .gitlab-ci/run-shader-db.sh |  | ||||||
|  |  | ||||||
| # Test a release build with -Werror so new warnings don't sneak in. |  | ||||||
| debian-release: |  | ||||||
|   extends: .meson-build |  | ||||||
|   variables: |  | ||||||
|     LLVM_VERSION: "13" |  | ||||||
|     UNWIND: "enabled" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=dri |  | ||||||
|       -D gbm=enabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-extra-hud=true |  | ||||||
|       -D gallium-vdpau=enabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=enabled |  | ||||||
|       -D gallium-xa=enabled |  | ||||||
|       -D gallium-nine=false |  | ||||||
|       -D gallium-opencl=disabled |  | ||||||
|       -D gallium-rusticl=false |  | ||||||
|       -D llvm=enabled |  | ||||||
|     GALLIUM_DRIVERS: "i915,iris,nouveau,kmsro,freedreno,r300,svga,swrast,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,crocus" |  | ||||||
|     VULKAN_DRIVERS: "amd,imagination-experimental,microsoft-experimental" |  | ||||||
|     BUILDTYPE: "release" |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D spirv-to-dxil=true |  | ||||||
|       -D osmesa=true |  | ||||||
|       -D tools=all |  | ||||||
|       -D intel-clc=enabled |  | ||||||
|       -D imagination-srv=true |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|  |  | ||||||
| fedora-release: |  | ||||||
|   extends: |  | ||||||
|     - .meson-build |  | ||||||
|     - .use-fedora/x86_build |  | ||||||
|   variables: |  | ||||||
|     BUILDTYPE: "release" |  | ||||||
|     C_ARGS: > |  | ||||||
|       -Wno-error=array-bounds |  | ||||||
|       -Wno-error=stringop-overread |  | ||||||
|       -Wno-error=uninitialized |  | ||||||
|     CPP_ARGS: > |  | ||||||
|       -Wno-error=array-bounds |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=dri |  | ||||||
|       -D gbm=enabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D glvnd=true |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|     # intel-clc disabled, we need llvm-spirv-translator 13.0+, Fedora 34 only packages 12.0. |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D osmesa=true |  | ||||||
|       -D selinux=true |  | ||||||
|       -D tools=drm-shim,etnaviv,freedreno,glsl,intel,nir,nouveau,lima,panfrost,imagination |  | ||||||
|       -D vulkan-layers=device-select,overlay |  | ||||||
|       -D intel-clc=disabled |  | ||||||
|       -D imagination-srv=true |  | ||||||
|     GALLIUM_DRIVERS: "crocus,etnaviv,freedreno,iris,kmsro,lima,nouveau,panfrost,r300,r600,radeonsi,svga,swrast,tegra,v3d,vc4,virgl,zink" |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-extra-hud=true |  | ||||||
|       -D gallium-vdpau=enabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=enabled |  | ||||||
|       -D gallium-xa=enabled |  | ||||||
|       -D gallium-nine=false |  | ||||||
|       -D gallium-opencl=icd |  | ||||||
|       -D gallium-rusticl=false |  | ||||||
|       -D gles1=disabled |  | ||||||
|       -D gles2=enabled |  | ||||||
|       -D llvm=enabled |  | ||||||
|       -D microsoft-clc=disabled |  | ||||||
|       -D shared-llvm=enabled |  | ||||||
|     LLVM_VERSION: "" |  | ||||||
|     UNWIND: "disabled" |  | ||||||
|     VULKAN_DRIVERS: "amd,broadcom,freedreno,intel,imagination-experimental" |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|  |  | ||||||
| debian-android: |  | ||||||
|   extends: |  | ||||||
|     - .meson-cross |  | ||||||
|     - .use-debian/android_build |  | ||||||
|   variables: |  | ||||||
|     UNWIND: "disabled" |  | ||||||
|     C_ARGS: > |  | ||||||
|       -Wno-error=asm-operand-widths |  | ||||||
|       -Wno-error=constant-conversion |  | ||||||
|       -Wno-error=enum-conversion |  | ||||||
|       -Wno-error=initializer-overrides |  | ||||||
|       -Wno-error=missing-braces |  | ||||||
|       -Wno-error=sometimes-uninitialized |  | ||||||
|       -Wno-error=unused-function |  | ||||||
|     CPP_ARGS: > |  | ||||||
|       -Wno-error=deprecated-declarations |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=disabled |  | ||||||
|       -D gbm=disabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D platforms=android |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D android-stub=true |  | ||||||
|       -D llvm=disabled |  | ||||||
|       -D platform-sdk-version=29 |  | ||||||
|       -D valgrind=false |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=disabled |  | ||||||
|       -D gallium-vdpau=disabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=disabled |  | ||||||
|       -D gallium-xa=disabled |  | ||||||
|       -D gallium-nine=false |  | ||||||
|       -D gallium-opencl=disabled |  | ||||||
|       -D gallium-rusticl=false |  | ||||||
|     LLVM_VERSION: "" |  | ||||||
|     PKG_CONFIG_LIBDIR: "/disable/non/android/system/pc/files" |  | ||||||
|   script: |  | ||||||
|     - PKG_CONFIG_PATH=/usr/local/lib/aarch64-linux-android/pkgconfig/:/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/lib/aarch64-linux-android/pkgconfig/ CROSS=aarch64-linux-android GALLIUM_DRIVERS=etnaviv,freedreno,lima,panfrost,vc4,v3d VULKAN_DRIVERS=freedreno,broadcom,virtio-experimental .gitlab-ci/meson/build.sh |  | ||||||
|     # x86_64 build: |  | ||||||
|     # Can't do Intel because gen_decoder.c currently requires libexpat, which |  | ||||||
|     # is not a dependency that AOSP wants to accept.  Can't do Radeon Gallium |  | ||||||
|     # drivers because they requires LLVM, which we don't have an Android build |  | ||||||
|     # of. |  | ||||||
|     - PKG_CONFIG_PATH=/usr/local/lib/x86_64-linux-android/pkgconfig/:/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/lib/x86_64-linux-android/pkgconfig/ CROSS=x86_64-linux-android GALLIUM_DRIVERS=iris VULKAN_DRIVERS=amd,intel .gitlab-ci/meson/build.sh |  | ||||||
|  |  | ||||||
| .meson-cross: |  | ||||||
|   extends: |  | ||||||
|     - .meson-build |  | ||||||
|   stage: build-misc |  | ||||||
|   variables: |  | ||||||
|     UNWIND: "disabled" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=dri |  | ||||||
|       -D gbm=enabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D platforms=x11 |  | ||||||
|       -D osmesa=false |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-vdpau=disabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=disabled |  | ||||||
|       -D gallium-xa=disabled |  | ||||||
|       -D gallium-nine=false |  | ||||||
|  |  | ||||||
| .meson-arm: |  | ||||||
|   extends: |  | ||||||
|     - .meson-cross |  | ||||||
|     - .use-debian/arm_build |  | ||||||
|   needs: |  | ||||||
|     - debian/arm_build |  | ||||||
|   variables: |  | ||||||
|     VULKAN_DRIVERS: freedreno,broadcom |  | ||||||
|     GALLIUM_DRIVERS: "etnaviv,freedreno,kmsro,lima,nouveau,panfrost,swrast,tegra,v3d,vc4,zink" |  | ||||||
|     BUILDTYPE: "debugoptimized" |  | ||||||
|   tags: |  | ||||||
|     - aarch64 |  | ||||||
|  |  | ||||||
| debian-armhf: |  | ||||||
|   extends: |  | ||||||
|     - .meson-arm |  | ||||||
|     - .ci-deqp-artifacts |  | ||||||
|   variables: |  | ||||||
|     CROSS: armhf |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D llvm=disabled |  | ||||||
|       -D valgrind=false |  | ||||||
|     MINIO_ARTIFACT_NAME: mesa-armhf |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|     - .gitlab-ci/prepare-artifacts.sh |  | ||||||
|  |  | ||||||
| debian-arm64: |  | ||||||
|   extends: |  | ||||||
|     - .meson-arm |  | ||||||
|     - .ci-deqp-artifacts |  | ||||||
|   variables: |  | ||||||
|     VULKAN_DRIVERS: "freedreno,broadcom,panfrost,imagination-experimental" |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D llvm=disabled |  | ||||||
|       -D valgrind=false |  | ||||||
|       -D imagination-srv=true |  | ||||||
|     MINIO_ARTIFACT_NAME: mesa-arm64 |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|     - .gitlab-ci/prepare-artifacts.sh |  | ||||||
|  |  | ||||||
| debian-arm64-asan: |  | ||||||
|   extends: |  | ||||||
|     - debian-arm64 |  | ||||||
|   variables: |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D llvm=disabled |  | ||||||
|       -D b_sanitize=address |  | ||||||
|       -D valgrind=false |  | ||||||
|       -D tools=dlclose-skip |  | ||||||
|     ARTIFACTS_DEBUG_SYMBOLS: 1 |  | ||||||
|     MINIO_ARTIFACT_NAME: mesa-arm64-asan |  | ||||||
|     MESON_TEST_ARGS: "--no-suite mesa:compiler" |  | ||||||
|  |  | ||||||
| debian-arm64-build-test: |  | ||||||
|   extends: |  | ||||||
|     - .meson-arm |  | ||||||
|     - .ci-deqp-artifacts |  | ||||||
|   variables: |  | ||||||
|     VULKAN_DRIVERS: "amd" |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -Dtools=panfrost,imagination |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|  |  | ||||||
| debian-clang: |  | ||||||
|   extends: .meson-build |  | ||||||
|   variables: |  | ||||||
|     LLVM_VERSION: "13" |  | ||||||
|     UNWIND: "enabled" |  | ||||||
|     GALLIUM_DUMP_CPU: "true" |  | ||||||
|     C_ARGS: > |  | ||||||
|       -Wno-error=constant-conversion |  | ||||||
|       -Wno-error=enum-conversion |  | ||||||
|       -Wno-error=implicit-const-int-float-conversion |  | ||||||
|       -Wno-error=initializer-overrides |  | ||||||
|       -Wno-error=sometimes-uninitialized |  | ||||||
|     CPP_ARGS: > |  | ||||||
|       -Wno-error=c99-designator |  | ||||||
|       -Wno-error=implicit-const-int-float-conversion |  | ||||||
|       -Wno-error=overloaded-virtual |  | ||||||
|       -Wno-error=tautological-constant-out-of-range-compare |  | ||||||
|       -Wno-error=unused-const-variable |  | ||||||
|       -Wno-error=unused-private-field |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=dri |  | ||||||
|       -D gbm=enabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D glvnd=true |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-extra-hud=true |  | ||||||
|       -D gallium-vdpau=enabled |  | ||||||
|       -D gallium-omx=bellagio |  | ||||||
|       -D gallium-va=enabled |  | ||||||
|       -D gallium-xa=enabled |  | ||||||
|       -D gallium-nine=true |  | ||||||
|       -D gallium-opencl=icd |  | ||||||
|       -D gles1=enabled |  | ||||||
|       -D gles2=enabled |  | ||||||
|       -D llvm=enabled |  | ||||||
|       -D microsoft-clc=enabled |  | ||||||
|       -D shared-llvm=enabled |  | ||||||
|       -D opencl-spirv=true |  | ||||||
|     GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus,i915,asahi" |  | ||||||
|     VULKAN_DRIVERS: intel,amd,freedreno,broadcom,virtio-experimental,swrast,panfrost,imagination-experimental,microsoft-experimental |  | ||||||
|     EXTRA_OPTION: |  | ||||||
|       -D spirv-to-dxil=true |  | ||||||
|       -D osmesa=true |  | ||||||
|       -D imagination-srv=true |  | ||||||
|       -D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi,imagination |  | ||||||
|       -D vulkan-layers=device-select,overlay |  | ||||||
|       -D build-aco-tests=true |  | ||||||
|       -D intel-clc=enabled |  | ||||||
|       -D imagination-srv=true |  | ||||||
|     CC: clang |  | ||||||
|     CXX: clang++ |  | ||||||
|  |  | ||||||
| debian-clang-release: |  | ||||||
|   extends: debian-clang |  | ||||||
|   variables: |  | ||||||
|     BUILDTYPE: "release" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=xlib |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|  |  | ||||||
| windows-vs2019: |  | ||||||
|   extends: |  | ||||||
|     - .build-windows |  | ||||||
|     - .use-windows_build_vs2019 |  | ||||||
|     - .windows-build-rules |  | ||||||
|   stage: build-misc |  | ||||||
|   script: |  | ||||||
|     - pwsh -ExecutionPolicy RemoteSigned .\.gitlab-ci\windows\mesa_build.ps1 |  | ||||||
|   artifacts: |  | ||||||
|     paths: |  | ||||||
|       - _build/meson-logs/*.txt |  | ||||||
|       - _install/ |  | ||||||
|  |  | ||||||
| .debian-cl: |  | ||||||
|   extends: .meson-build |  | ||||||
|   variables: |  | ||||||
|     LLVM_VERSION: "13" |  | ||||||
|     UNWIND: "enabled" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=disabled |  | ||||||
|       -D egl=disabled |  | ||||||
|       -D gbm=disabled |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D valgrind=false |  | ||||||
|  |  | ||||||
| debian-clover: |  | ||||||
|   extends: .debian-cl |  | ||||||
|   variables: |  | ||||||
|     GALLIUM_DRIVERS: "r600,radeonsi,swrast" |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=disabled |  | ||||||
|       -D gallium-vdpau=disabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=disabled |  | ||||||
|       -D gallium-xa=disabled |  | ||||||
|       -D gallium-nine=false |  | ||||||
|       -D gallium-opencl=icd |  | ||||||
|       -D gallium-rusticl=false |  | ||||||
|  |  | ||||||
| debian-rusticl: |  | ||||||
|   extends: .debian-cl |  | ||||||
|   variables: |  | ||||||
|     GALLIUM_DRIVERS: "iris,swrast" |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=disabled |  | ||||||
|       -D gallium-vdpau=disabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=disabled |  | ||||||
|       -D gallium-xa=disabled |  | ||||||
|       -D gallium-nine=false |  | ||||||
|       -D gallium-opencl=disabled |  | ||||||
|       -D gallium-rusticl=true |  | ||||||
|  |  | ||||||
| debian-vulkan: |  | ||||||
|   extends: .meson-build |  | ||||||
|   variables: |  | ||||||
|     LLVM_VERSION: "13" |  | ||||||
|     UNWIND: "disabled" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=disabled |  | ||||||
|       -D gbm=disabled |  | ||||||
|       -D egl=disabled |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|       -D osmesa=false |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-vdpau=disabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=disabled |  | ||||||
|       -D gallium-xa=disabled |  | ||||||
|       -D gallium-nine=false |  | ||||||
|       -D gallium-opencl=disabled |  | ||||||
|       -D gallium-rusticl=false |  | ||||||
|       -D b_sanitize=undefined |  | ||||||
|       -D c_args=-fno-sanitize-recover=all |  | ||||||
|       -D cpp_args=-fno-sanitize-recover=all |  | ||||||
|     UBSAN_OPTIONS: "print_stacktrace=1" |  | ||||||
|     VULKAN_DRIVERS: intel,amd,freedreno,broadcom,virtio-experimental,imagination-experimental,microsoft-experimental |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D vulkan-layers=device-select,overlay |  | ||||||
|       -D build-aco-tests=true |  | ||||||
|       -D intel-clc=disabled |  | ||||||
|       -D imagination-srv=true |  | ||||||
|  |  | ||||||
| debian-i386: |  | ||||||
|   extends: |  | ||||||
|     - .meson-cross |  | ||||||
|     - .use-debian/i386_build |  | ||||||
|   variables: |  | ||||||
|     CROSS: i386 |  | ||||||
|     VULKAN_DRIVERS: intel,amd,swrast,virtio-experimental |  | ||||||
|     GALLIUM_DRIVERS: "iris,nouveau,r300,r600,radeonsi,swrast,virgl,zink,crocus" |  | ||||||
|     LLVM_VERSION: 13 |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D vulkan-layers=device-select,overlay |  | ||||||
|  |  | ||||||
| debian-s390x: |  | ||||||
|   extends: |  | ||||||
|     - debian-ppc64el |  | ||||||
|     - .use-debian/s390x_build |  | ||||||
|     - .s390x-rules |  | ||||||
|   tags: |  | ||||||
|     - kvm |  | ||||||
|   variables: |  | ||||||
|     CROSS: s390x |  | ||||||
|     GALLIUM_DRIVERS: "swrast,zink" |  | ||||||
|     LLVM_VERSION: 13 |  | ||||||
|     VULKAN_DRIVERS: "swrast" |  | ||||||
|  |  | ||||||
| debian-ppc64el: |  | ||||||
|   extends: |  | ||||||
|     - .meson-cross |  | ||||||
|     - .use-debian/ppc64el_build |  | ||||||
|     - .ppc64el-rules |  | ||||||
|   variables: |  | ||||||
|     CROSS: ppc64el |  | ||||||
|     GALLIUM_DRIVERS: "nouveau,radeonsi,swrast,virgl,zink" |  | ||||||
|     VULKAN_DRIVERS: "amd,swrast" |  | ||||||
|  |  | ||||||
| debian-mingw32-x86_64: |  | ||||||
|   extends: .meson-build_mingw |  | ||||||
|   stage: build-misc |  | ||||||
|   variables: |  | ||||||
|     UNWIND: "disabled" |  | ||||||
|     C_ARGS: > |  | ||||||
|       -Wno-error=format |  | ||||||
|       -Wno-error=format-extra-args |  | ||||||
|       -Wno-error=deprecated-declarations |  | ||||||
|       -Wno-error=unused-function |  | ||||||
|       -Wno-error=unused-variable |  | ||||||
|       -Wno-error=unused-but-set-variable |  | ||||||
|       -Wno-error=unused-value |  | ||||||
|       -Wno-error=switch |  | ||||||
|       -Wno-error=parentheses |  | ||||||
|       -Wno-error=missing-prototypes |  | ||||||
|       -Wno-error=sign-compare |  | ||||||
|       -Wno-error=narrowing |  | ||||||
|       -Wno-error=overflow |  | ||||||
|     CPP_ARGS: $C_ARGS |  | ||||||
|     GALLIUM_DRIVERS: "swrast,d3d12,zink" |  | ||||||
|     VULKAN_DRIVERS: "swrast,amd,microsoft-experimental" |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D gallium-opencl=icd |  | ||||||
|       -D gallium-rusticl=false |  | ||||||
|       -D opencl-spirv=true |  | ||||||
|       -D microsoft-clc=enabled |  | ||||||
|       -D static-libclc=all |  | ||||||
|       -D llvm=enabled |  | ||||||
|       -D gallium-va=true |  | ||||||
|       -D video-codecs=h264dec,h264enc,h265dec,h265enc,vc1dec |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D min-windows-version=7 |  | ||||||
|       -D spirv-to-dxil=true |  | ||||||
|       -D gles1=enabled |  | ||||||
|       -D gles2=enabled |  | ||||||
|       -D osmesa=true |  | ||||||
|       -D cpp_rtti=true |  | ||||||
|       -D shared-glapi=enabled |  | ||||||
|       -D zlib=enabled |  | ||||||
|       --cross-file=.gitlab-ci/x86_64-w64-mingw32 |  | ||||||
| @@ -1,14 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| while true; do |  | ||||||
|   devcds=`find /sys/devices/virtual/devcoredump/ -name data 2>/dev/null` |  | ||||||
|   for i in $devcds; do |  | ||||||
|     echo "Found a devcoredump at $i." |  | ||||||
|     if cp $i /results/first.devcore; then |  | ||||||
|       echo 1 > $i |  | ||||||
|       echo "Saved to the job artifacts at /first.devcore" |  | ||||||
|       exit 0 |  | ||||||
|     fi |  | ||||||
|   done |  | ||||||
|   sleep 10 |  | ||||||
| done |  | ||||||
| @@ -1,123 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| for var in \ |  | ||||||
|     ACO_DEBUG \ |  | ||||||
|     ASAN_OPTIONS \ |  | ||||||
|     BASE_SYSTEM_FORK_HOST_PREFIX \ |  | ||||||
|     BASE_SYSTEM_MAINLINE_HOST_PREFIX \ |  | ||||||
|     CI_COMMIT_BRANCH \ |  | ||||||
|     CI_COMMIT_REF_NAME \ |  | ||||||
|     CI_COMMIT_TITLE \ |  | ||||||
|     CI_JOB_ID \ |  | ||||||
|     CI_JOB_JWT_FILE \ |  | ||||||
|     CI_JOB_NAME \ |  | ||||||
|     CI_JOB_URL \ |  | ||||||
|     CI_MERGE_REQUEST_SOURCE_BRANCH_NAME \ |  | ||||||
|     CI_MERGE_REQUEST_TITLE \ |  | ||||||
|     CI_NODE_INDEX \ |  | ||||||
|     CI_NODE_TOTAL \ |  | ||||||
|     CI_PAGES_DOMAIN \ |  | ||||||
|     CI_PIPELINE_ID \ |  | ||||||
|     CI_PIPELINE_URL \ |  | ||||||
|     CI_PROJECT_DIR \ |  | ||||||
|     CI_PROJECT_NAME \ |  | ||||||
|     CI_PROJECT_PATH \ |  | ||||||
|     CI_PROJECT_ROOT_NAMESPACE \ |  | ||||||
|     CI_RUNNER_DESCRIPTION \ |  | ||||||
|     CI_SERVER_URL \ |  | ||||||
|     CROSVM_GALLIUM_DRIVER \ |  | ||||||
|     CROSVM_GPU_ARGS \ |  | ||||||
|     DEQP_BIN_DIR \ |  | ||||||
|     DEQP_CASELIST_FILTER \ |  | ||||||
|     DEQP_CASELIST_INV_FILTER \ |  | ||||||
|     DEQP_CONFIG \ |  | ||||||
|     DEQP_EXPECTED_RENDERER \ |  | ||||||
|     DEQP_FRACTION \ |  | ||||||
|     DEQP_HEIGHT \ |  | ||||||
|     DEQP_RESULTS_DIR \ |  | ||||||
|     DEQP_RUNNER_OPTIONS \ |  | ||||||
|     DEQP_SUITE \ |  | ||||||
|     DEQP_TEMP_DIR \ |  | ||||||
|     DEQP_VARIANT \ |  | ||||||
|     DEQP_VER \ |  | ||||||
|     DEQP_WIDTH \ |  | ||||||
|     DEVICE_NAME \ |  | ||||||
|     DRIVER_NAME \ |  | ||||||
|     EGL_PLATFORM \ |  | ||||||
|     ETNA_MESA_DEBUG \ |  | ||||||
|     FDO_CI_CONCURRENT \ |  | ||||||
|     FDO_UPSTREAM_REPO \ |  | ||||||
|     FD_MESA_DEBUG \ |  | ||||||
|     FLAKES_CHANNEL \ |  | ||||||
|     FREEDRENO_HANGCHECK_MS \ |  | ||||||
|     GALLIUM_DRIVER \ |  | ||||||
|     GALLIVM_PERF \ |  | ||||||
|     GPU_VERSION \ |  | ||||||
|     GTEST \ |  | ||||||
|     GTEST_FAILS \ |  | ||||||
|     GTEST_FRACTION \ |  | ||||||
|     GTEST_RESULTS_DIR \ |  | ||||||
|     GTEST_RUNNER_OPTIONS \ |  | ||||||
|     GTEST_SKIPS \ |  | ||||||
|     HWCI_FREQ_MAX \ |  | ||||||
|     HWCI_KERNEL_MODULES \ |  | ||||||
|     HWCI_KVM \ |  | ||||||
|     HWCI_START_XORG \ |  | ||||||
|     HWCI_TEST_SCRIPT \ |  | ||||||
|     IR3_SHADER_DEBUG \ |  | ||||||
|     JOB_ARTIFACTS_BASE \ |  | ||||||
|     JOB_RESULTS_PATH \ |  | ||||||
|     JOB_ROOTFS_OVERLAY_PATH \ |  | ||||||
|     KERNEL_IMAGE_BASE_URL \ |  | ||||||
|     KERNEL_IMAGE_NAME \ |  | ||||||
|     LD_LIBRARY_PATH \ |  | ||||||
|     LP_NUM_THREADS \ |  | ||||||
|     MESA_BASE_TAG \ |  | ||||||
|     MESA_BUILD_PATH \ |  | ||||||
|     MESA_DEBUG \ |  | ||||||
|     MESA_GLES_VERSION_OVERRIDE \ |  | ||||||
|     MESA_GLSL_VERSION_OVERRIDE \ |  | ||||||
|     MESA_GL_VERSION_OVERRIDE \ |  | ||||||
|     MESA_IMAGE \ |  | ||||||
|     MESA_IMAGE_PATH \ |  | ||||||
|     MESA_IMAGE_TAG \ |  | ||||||
|     MESA_LOADER_DRIVER_OVERRIDE \ |  | ||||||
|     MESA_TEMPLATES_COMMIT \ |  | ||||||
|     MESA_VK_IGNORE_CONFORMANCE_WARNING \ |  | ||||||
|     MESA_SPIRV_LOG_LEVEL \ |  | ||||||
|     MINIO_HOST \ |  | ||||||
|     MINIO_RESULTS_UPLOAD \ |  | ||||||
|     NIR_DEBUG \ |  | ||||||
|     PAN_I_WANT_A_BROKEN_VULKAN_DRIVER \ |  | ||||||
|     PAN_MESA_DEBUG \ |  | ||||||
|     PIGLIT_FRACTION \ |  | ||||||
|     PIGLIT_NO_WINDOW \ |  | ||||||
|     PIGLIT_OPTIONS \ |  | ||||||
|     PIGLIT_PLATFORM \ |  | ||||||
|     PIGLIT_PROFILES \ |  | ||||||
|     PIGLIT_REPLAY_ARTIFACTS_BASE_URL \ |  | ||||||
|     PIGLIT_REPLAY_DESCRIPTION_FILE \ |  | ||||||
|     PIGLIT_REPLAY_DEVICE_NAME \ |  | ||||||
|     PIGLIT_REPLAY_EXTRA_ARGS \ |  | ||||||
|     PIGLIT_REPLAY_LOOP_TIMES \ |  | ||||||
|     PIGLIT_REPLAY_REFERENCE_IMAGES_BASE \ |  | ||||||
|     PIGLIT_REPLAY_SUBCOMMAND \ |  | ||||||
|     PIGLIT_RESULTS \ |  | ||||||
|     PIGLIT_TESTS \ |  | ||||||
|     PIPELINE_ARTIFACTS_BASE \ |  | ||||||
|     RADV_DEBUG \ |  | ||||||
|     RADV_PERFTEST \ |  | ||||||
|     SKQP_ASSETS_DIR \ |  | ||||||
|     SKQP_BACKENDS \ |  | ||||||
|     TU_DEBUG \ |  | ||||||
|     VIRGL_HOST_API \ |  | ||||||
|     WAFFLE_PLATFORM \ |  | ||||||
|     VK_CPU \ |  | ||||||
|     VK_DRIVER \ |  | ||||||
|     VK_ICD_FILENAMES \ |  | ||||||
|     VKD3D_PROTON_RESULTS \ |  | ||||||
|     ; do |  | ||||||
|   if [ -n "${!var+x}" ]; then |  | ||||||
|     echo "export $var=${!var@Q}" |  | ||||||
|   fi |  | ||||||
| done |  | ||||||
| @@ -1,23 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| # Very early init, used to make sure devices and network are set up and |  | ||||||
| # reachable. |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| cd / |  | ||||||
|  |  | ||||||
| mount -t proc none /proc |  | ||||||
| mount -t sysfs none /sys |  | ||||||
| mount -t debugfs none /sys/kernel/debug |  | ||||||
| mount -t devtmpfs none /dev || echo possibly already mounted |  | ||||||
| mkdir -p /dev/pts |  | ||||||
| mount -t devpts devpts /dev/pts |  | ||||||
| mount -t tmpfs tmpfs /tmp |  | ||||||
|  |  | ||||||
| echo "nameserver 8.8.8.8" > /etc/resolv.conf |  | ||||||
| [ -z "$NFS_SERVER_IP" ] || echo "$NFS_SERVER_IP caching-proxy" >> /etc/hosts |  | ||||||
|  |  | ||||||
| # Set the time so we can validate certificates before we fetch anything; |  | ||||||
| # however as not all DUTs have network, make this non-fatal. |  | ||||||
| for i in 1 2 3; do sntp -sS pool.ntp.org && break || sleep 2; done || true |  | ||||||
| @@ -1,164 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| # Make sure to kill itself and all the children process from this script on |  | ||||||
| # exiting, since any console output may interfere with LAVA signals handling, |  | ||||||
| # which based on the log console. |  | ||||||
| cleanup() { |  | ||||||
|   if [ "$BACKGROUND_PIDS" = "" ]; then |  | ||||||
|     return 0 |  | ||||||
|   fi |  | ||||||
|  |  | ||||||
|   set +x |  | ||||||
|   echo "Killing all child processes" |  | ||||||
|   for pid in $BACKGROUND_PIDS |  | ||||||
|   do |  | ||||||
|     kill "$pid" 2>/dev/null || true |  | ||||||
|   done |  | ||||||
|  |  | ||||||
|   # Sleep just a little to give enough time for subprocesses to be gracefully |  | ||||||
|   # killed. Then apply a SIGKILL if necessary. |  | ||||||
|   sleep 5 |  | ||||||
|   for pid in $BACKGROUND_PIDS |  | ||||||
|   do |  | ||||||
|     kill -9 "$pid" 2>/dev/null || true |  | ||||||
|   done |  | ||||||
|  |  | ||||||
|   BACKGROUND_PIDS= |  | ||||||
|   set -x |  | ||||||
| } |  | ||||||
| trap cleanup INT TERM EXIT |  | ||||||
|  |  | ||||||
| # Space separated values with the PIDS of the processes started in the |  | ||||||
| # background by this script |  | ||||||
| BACKGROUND_PIDS= |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # Second-stage init, used to set up devices and our job environment before |  | ||||||
| # running tests. |  | ||||||
|  |  | ||||||
| . /set-job-env-vars.sh |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| # Set up any devices required by the jobs |  | ||||||
| [ -z "$HWCI_KERNEL_MODULES" ] || { |  | ||||||
|     echo -n $HWCI_KERNEL_MODULES | xargs -d, -n1 /usr/sbin/modprobe |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Load the KVM module specific to the detected CPU virtualization extensions: |  | ||||||
| # - vmx for Intel VT |  | ||||||
| # - svm for AMD-V |  | ||||||
| # |  | ||||||
| # Additionally, download the kernel image to boot the VM via HWCI_TEST_SCRIPT. |  | ||||||
| # |  | ||||||
| if [ "$HWCI_KVM" = "true" ]; then |  | ||||||
|     unset KVM_KERNEL_MODULE |  | ||||||
|     grep -qs '\bvmx\b' /proc/cpuinfo && KVM_KERNEL_MODULE=kvm_intel || { |  | ||||||
|         grep -qs '\bsvm\b' /proc/cpuinfo && KVM_KERNEL_MODULE=kvm_amd |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ -z "${KVM_KERNEL_MODULE}" ] && \ |  | ||||||
|         echo "WARNING: Failed to detect CPU virtualization extensions" || \ |  | ||||||
|         modprobe ${KVM_KERNEL_MODULE} |  | ||||||
|  |  | ||||||
|     mkdir -p /lava-files |  | ||||||
|     wget -S --progress=dot:giga -O /lava-files/${KERNEL_IMAGE_NAME} \ |  | ||||||
|         "${KERNEL_IMAGE_BASE_URL}/${KERNEL_IMAGE_NAME}" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Fix prefix confusion: the build installs to $CI_PROJECT_DIR, but we expect |  | ||||||
| # it in /install |  | ||||||
| ln -sf $CI_PROJECT_DIR/install /install |  | ||||||
| export LD_LIBRARY_PATH=/install/lib |  | ||||||
| export LIBGL_DRIVERS_PATH=/install/lib/dri |  | ||||||
|  |  | ||||||
| # Store Mesa's disk cache under /tmp, rather than sending it out over NFS. |  | ||||||
| export XDG_CACHE_HOME=/tmp |  | ||||||
|  |  | ||||||
| # Make sure Python can find all our imports |  | ||||||
| export PYTHONPATH=$(python3 -c "import sys;print(\":\".join(sys.path))") |  | ||||||
|  |  | ||||||
| if [ "$HWCI_FREQ_MAX" = "true" ]; then |  | ||||||
|   # Ensure initialization of the DRM device (needed by MSM) |  | ||||||
|   head -0 /dev/dri/renderD128 |  | ||||||
|  |  | ||||||
|   # Disable GPU frequency scaling |  | ||||||
|   DEVFREQ_GOVERNOR=`find /sys/devices -name governor | grep gpu || true` |  | ||||||
|   test -z "$DEVFREQ_GOVERNOR" || echo performance > $DEVFREQ_GOVERNOR || true |  | ||||||
|  |  | ||||||
|   # Disable CPU frequency scaling |  | ||||||
|   echo performance | tee -a /sys/devices/system/cpu/cpufreq/policy*/scaling_governor || true |  | ||||||
|  |  | ||||||
|   # Disable GPU runtime power management |  | ||||||
|   GPU_AUTOSUSPEND=`find /sys/devices -name autosuspend_delay_ms | grep gpu | head -1` |  | ||||||
|   test -z "$GPU_AUTOSUSPEND" || echo -1 > $GPU_AUTOSUSPEND || true |  | ||||||
|   # Lock Intel GPU frequency to 70% of the maximum allowed by hardware |  | ||||||
|   # and enable throttling detection & reporting. |  | ||||||
|   # Additionally, set the upper limit for CPU scaling frequency to 65% of the |  | ||||||
|   # maximum permitted, as an additional measure to mitigate thermal throttling. |  | ||||||
|   ./intel-gpu-freq.sh -s 70% --cpu-set-max 65% -g all -d |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Increase freedreno hangcheck timer because it's right at the edge of the |  | ||||||
| # spilling tests timing out (and some traces, too) |  | ||||||
| if [ -n "$FREEDRENO_HANGCHECK_MS" ]; then |  | ||||||
|     echo $FREEDRENO_HANGCHECK_MS | tee -a /sys/kernel/debug/dri/128/hangcheck_period_ms |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Start a little daemon to capture the first devcoredump we encounter.  (They |  | ||||||
| # expire after 5 minutes, so we poll for them). |  | ||||||
| /capture-devcoredump.sh & |  | ||||||
| BACKGROUND_PIDS="$! $BACKGROUND_PIDS" |  | ||||||
|  |  | ||||||
| # If we want Xorg to be running for the test, then we start it up before the |  | ||||||
| # HWCI_TEST_SCRIPT because we need to use xinit to start X (otherwise |  | ||||||
| # without using -displayfd you can race with Xorg's startup), but xinit will eat |  | ||||||
| # your client's return code |  | ||||||
| if [ -n "$HWCI_START_XORG" ]; then |  | ||||||
|   echo "touch /xorg-started; sleep 100000" > /xorg-script |  | ||||||
|   env \ |  | ||||||
|     xinit /bin/sh /xorg-script -- /usr/bin/Xorg -noreset -s 0 -dpms -logfile /Xorg.0.log & |  | ||||||
|   BACKGROUND_PIDS="$! $BACKGROUND_PIDS" |  | ||||||
|  |  | ||||||
|   # Wait for xorg to be ready for connections. |  | ||||||
|   for i in 1 2 3 4 5; do |  | ||||||
|     if [ -e /xorg-started ]; then |  | ||||||
|       break |  | ||||||
|     fi |  | ||||||
|     sleep 5 |  | ||||||
|   done |  | ||||||
|   export DISPLAY=:0 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| RESULT=fail |  | ||||||
| set +e |  | ||||||
| sh -c "$HWCI_TEST_SCRIPT" |  | ||||||
| EXIT_CODE=$? |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| # Let's make sure the results are always stored in current working directory |  | ||||||
| mv -f ${CI_PROJECT_DIR}/results ./ 2>/dev/null || true |  | ||||||
|  |  | ||||||
| [ ${EXIT_CODE} -ne 0 ] || rm -rf results/trace/"$PIGLIT_REPLAY_DEVICE_NAME" |  | ||||||
|  |  | ||||||
| # Make sure that capture-devcoredump is done before we start trying to tar up |  | ||||||
| # artifacts -- if it's writing while tar is reading, tar will throw an error and |  | ||||||
| # kill the job. |  | ||||||
| cleanup |  | ||||||
|  |  | ||||||
| # upload artifacts |  | ||||||
| if [ -n "$MINIO_RESULTS_UPLOAD" ]; then |  | ||||||
|   tar --zstd -cf results.tar.zst results/; |  | ||||||
|   ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" results.tar.zst https://"$MINIO_RESULTS_UPLOAD"/results.tar.zst; |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # We still need to echo the hwci: mesa message, as some scripts rely on it, such |  | ||||||
| # as the python ones inside the bare-metal folder |  | ||||||
| [ ${EXIT_CODE} -eq 0 ] && RESULT=pass |  | ||||||
|  |  | ||||||
| set +x |  | ||||||
| echo "hwci: mesa: $RESULT" |  | ||||||
| # Sleep a bit to avoid kernel dump message interleave from LAVA ENDTC signal |  | ||||||
| sleep 1 |  | ||||||
| exit $EXIT_CODE |  | ||||||
| @@ -1,758 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
| # |  | ||||||
| # This is an utility script to manage Intel GPU frequencies. |  | ||||||
| # It can be used for debugging performance problems or trying to obtain a stable |  | ||||||
| # frequency while benchmarking. |  | ||||||
| # |  | ||||||
| # Note the Intel i915 GPU driver allows to change the minimum, maximum and boost |  | ||||||
| # frequencies in steps of 50 MHz via: |  | ||||||
| # |  | ||||||
| # /sys/class/drm/card<n>/<freq_info> |  | ||||||
| # |  | ||||||
| # Where <n> is the DRM card index and <freq_info> one of the following: |  | ||||||
| # |  | ||||||
| # - gt_max_freq_mhz (enforced maximum freq) |  | ||||||
| # - gt_min_freq_mhz (enforced minimum freq) |  | ||||||
| # - gt_boost_freq_mhz (enforced boost freq) |  | ||||||
| # |  | ||||||
| # The hardware capabilities can be accessed via: |  | ||||||
| # |  | ||||||
| # - gt_RP0_freq_mhz (supported maximum freq) |  | ||||||
| # - gt_RPn_freq_mhz (supported minimum freq) |  | ||||||
| # - gt_RP1_freq_mhz (most efficient freq) |  | ||||||
| # |  | ||||||
| # The current frequency can be read from: |  | ||||||
| # - gt_act_freq_mhz (the actual GPU freq) |  | ||||||
| # - gt_cur_freq_mhz (the last requested freq) |  | ||||||
| # |  | ||||||
| # Also note that in addition to GPU management, the script offers the |  | ||||||
| # possibility to adjust CPU operating frequencies. However, this is currently |  | ||||||
| # limited to just setting the maximum scaling frequency as percentage of the |  | ||||||
| # maximum frequency allowed by the hardware. |  | ||||||
| # |  | ||||||
| # Copyright (C) 2022 Collabora Ltd. |  | ||||||
| # Author: Cristian Ciocaltea <cristian.ciocaltea@collabora.com> |  | ||||||
| # |  | ||||||
| # SPDX-License-Identifier: MIT |  | ||||||
| # |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Constants |  | ||||||
| # |  | ||||||
|  |  | ||||||
| # GPU |  | ||||||
| DRM_FREQ_SYSFS_PATTERN="/sys/class/drm/card%d/gt_%s_freq_mhz" |  | ||||||
| ENF_FREQ_INFO="max min boost" |  | ||||||
| CAP_FREQ_INFO="RP0 RPn RP1" |  | ||||||
| ACT_FREQ_INFO="act cur" |  | ||||||
| THROTT_DETECT_SLEEP_SEC=2 |  | ||||||
| THROTT_DETECT_PID_FILE_PATH=/tmp/thrott-detect.pid |  | ||||||
|  |  | ||||||
| # CPU |  | ||||||
| CPU_SYSFS_PREFIX=/sys/devices/system/cpu |  | ||||||
| CPU_PSTATE_SYSFS_PATTERN="${CPU_SYSFS_PREFIX}/intel_pstate/%s" |  | ||||||
| CPU_FREQ_SYSFS_PATTERN="${CPU_SYSFS_PREFIX}/cpu%s/cpufreq/%s_freq" |  | ||||||
| CAP_CPU_FREQ_INFO="cpuinfo_max cpuinfo_min" |  | ||||||
| ENF_CPU_FREQ_INFO="scaling_max scaling_min" |  | ||||||
| ACT_CPU_FREQ_INFO="scaling_cur" |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Global variables. |  | ||||||
| # |  | ||||||
| unset INTEL_DRM_CARD_INDEX |  | ||||||
| unset GET_ACT_FREQ GET_ENF_FREQ GET_CAP_FREQ |  | ||||||
| unset SET_MIN_FREQ SET_MAX_FREQ |  | ||||||
| unset MONITOR_FREQ |  | ||||||
| unset CPU_SET_MAX_FREQ |  | ||||||
| unset DETECT_THROTT |  | ||||||
| unset DRY_RUN |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Simple printf based stderr logger. |  | ||||||
| # |  | ||||||
| log() { |  | ||||||
|     local msg_type=$1 |  | ||||||
|  |  | ||||||
|     shift |  | ||||||
|     printf "%s: %s: " "${msg_type}" "${0##*/}" >&2 |  | ||||||
|     printf "$@" >&2 |  | ||||||
|     printf "\n" >&2 |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper to print sysfs path for the given card index and freq info. |  | ||||||
| # |  | ||||||
| # arg1: Frequency info sysfs name, one of *_FREQ_INFO constants above |  | ||||||
| # arg2: Video card index, defaults to INTEL_DRM_CARD_INDEX |  | ||||||
| # |  | ||||||
| print_freq_sysfs_path() { |  | ||||||
|     printf ${DRM_FREQ_SYSFS_PATTERN} "${2:-${INTEL_DRM_CARD_INDEX}}" "$1" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper to set INTEL_DRM_CARD_INDEX for the first identified Intel video card. |  | ||||||
| # |  | ||||||
| identify_intel_gpu() { |  | ||||||
|     local i=0 vendor path |  | ||||||
|  |  | ||||||
|     while [ ${i} -lt 16 ]; do |  | ||||||
|         [ -c "/dev/dri/card$i" ] || { |  | ||||||
|             i=$((i + 1)) |  | ||||||
|             continue |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         path=$(print_freq_sysfs_path "" ${i}) |  | ||||||
|         path=${path%/*}/device/vendor |  | ||||||
|  |  | ||||||
|         [ -r "${path}" ] && read vendor < "${path}" && \ |  | ||||||
|             [ "${vendor}" = "0x8086" ] && INTEL_DRM_CARD_INDEX=$i && return 0 |  | ||||||
|  |  | ||||||
|         i=$((i + 1)) |  | ||||||
|     done |  | ||||||
|  |  | ||||||
|     return 1 |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Read the specified freq info from sysfs. |  | ||||||
| # |  | ||||||
| # arg1: Flag (y/n) to also enable printing the freq info. |  | ||||||
| # arg2...: Frequency info sysfs name(s), see *_FREQ_INFO constants above |  | ||||||
| # return: Global variable(s) FREQ_${arg} containing the requested information |  | ||||||
| # |  | ||||||
| read_freq_info() { |  | ||||||
|     local var val info path print=0 ret=0 |  | ||||||
|  |  | ||||||
|     [ "$1" = "y" ] && print=1 |  | ||||||
|     shift |  | ||||||
|  |  | ||||||
|     while [ $# -gt 0 ]; do |  | ||||||
|         info=$1 |  | ||||||
|         shift |  | ||||||
|         var=FREQ_${info} |  | ||||||
|         path=$(print_freq_sysfs_path "${info}") |  | ||||||
|  |  | ||||||
|         [ -r ${path} ] && read ${var} < ${path} || { |  | ||||||
|             log ERROR "Failed to read freq info from: %s" "${path}" |  | ||||||
|             ret=1 |  | ||||||
|             continue |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         [ -n "${var}" ] || { |  | ||||||
|             log ERROR "Got empty freq info from: %s" "${path}" |  | ||||||
|             ret=1 |  | ||||||
|             continue |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         [ ${print} -eq 1 ] && { |  | ||||||
|             eval val=\$${var} |  | ||||||
|             printf "%6s: %4s MHz\n" "${info}" "${val}" |  | ||||||
|         } |  | ||||||
|     done |  | ||||||
|  |  | ||||||
|     return ${ret} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Display requested info. |  | ||||||
| # |  | ||||||
| print_freq_info() { |  | ||||||
|     local req_freq |  | ||||||
|  |  | ||||||
|     [ -n "${GET_CAP_FREQ}" ] && { |  | ||||||
|         printf "* Hardware capabilities\n" |  | ||||||
|         read_freq_info y ${CAP_FREQ_INFO} |  | ||||||
|         printf "\n" |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ -n "${GET_ENF_FREQ}" ] && { |  | ||||||
|         printf "* Enforcements\n" |  | ||||||
|         read_freq_info y ${ENF_FREQ_INFO} |  | ||||||
|         printf "\n" |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ -n "${GET_ACT_FREQ}" ] && { |  | ||||||
|         printf "* Actual\n" |  | ||||||
|         read_freq_info y ${ACT_FREQ_INFO} |  | ||||||
|         printf "\n" |  | ||||||
|     } |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper to print frequency value as requested by user via '-s, --set' option. |  | ||||||
| # arg1: user requested freq value |  | ||||||
| # |  | ||||||
| compute_freq_set() { |  | ||||||
|     local val |  | ||||||
|  |  | ||||||
|     case "$1" in |  | ||||||
|     +) |  | ||||||
|         val=${FREQ_RP0} |  | ||||||
|         ;; |  | ||||||
|     -) |  | ||||||
|         val=${FREQ_RPn} |  | ||||||
|         ;; |  | ||||||
|     *%) |  | ||||||
|         val=$((${1%?} * ${FREQ_RP0} / 100)) |  | ||||||
|         # Adjust freq to comply with 50 MHz increments |  | ||||||
|         val=$((val / 50 * 50)) |  | ||||||
|         ;; |  | ||||||
|     *[!0-9]*) |  | ||||||
|         log ERROR "Cannot set freq to invalid value: %s" "$1" |  | ||||||
|         return 1 |  | ||||||
|         ;; |  | ||||||
|     "") |  | ||||||
|         log ERROR "Cannot set freq to unspecified value" |  | ||||||
|         return 1 |  | ||||||
|         ;; |  | ||||||
|     *) |  | ||||||
|         # Adjust freq to comply with 50 MHz increments |  | ||||||
|         val=$(($1 / 50 * 50)) |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
|  |  | ||||||
|     printf "%s" "${val}" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper for set_freq(). |  | ||||||
| # |  | ||||||
| set_freq_max() { |  | ||||||
|     log INFO "Setting GPU max freq to %s MHz" "${SET_MAX_FREQ}" |  | ||||||
|  |  | ||||||
|     read_freq_info n min || return $? |  | ||||||
|  |  | ||||||
|     [ ${SET_MAX_FREQ} -gt ${FREQ_RP0} ] && { |  | ||||||
|         log ERROR "Cannot set GPU max freq (%s) to be greater than hw max freq (%s)" \ |  | ||||||
|             "${SET_MAX_FREQ}" "${FREQ_RP0}" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ ${SET_MAX_FREQ} -lt ${FREQ_RPn} ] && { |  | ||||||
|         log ERROR "Cannot set GPU max freq (%s) to be less than hw min freq (%s)" \ |  | ||||||
|             "${SET_MIN_FREQ}" "${FREQ_RPn}" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ ${SET_MAX_FREQ} -lt ${FREQ_min} ] && { |  | ||||||
|         log ERROR "Cannot set GPU max freq (%s) to be less than min freq (%s)" \ |  | ||||||
|             "${SET_MAX_FREQ}" "${FREQ_min}" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ -z "${DRY_RUN}" ] || return 0 |  | ||||||
|  |  | ||||||
|     printf "%s" ${SET_MAX_FREQ} | tee $(print_freq_sysfs_path max) \ |  | ||||||
|         $(print_freq_sysfs_path boost) > /dev/null |  | ||||||
|     [ $? -eq 0 ] || { |  | ||||||
|         log ERROR "Failed to set GPU max frequency" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper for set_freq(). |  | ||||||
| # |  | ||||||
| set_freq_min() { |  | ||||||
|     log INFO "Setting GPU min freq to %s MHz" "${SET_MIN_FREQ}" |  | ||||||
|  |  | ||||||
|     read_freq_info n max || return $? |  | ||||||
|  |  | ||||||
|     [ ${SET_MIN_FREQ} -gt ${FREQ_max} ] && { |  | ||||||
|         log ERROR "Cannot set GPU min freq (%s) to be greater than max freq (%s)" \ |  | ||||||
|             "${SET_MIN_FREQ}" "${FREQ_max}" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ ${SET_MIN_FREQ} -lt ${FREQ_RPn} ] && { |  | ||||||
|         log ERROR "Cannot set GPU min freq (%s) to be less than hw min freq (%s)" \ |  | ||||||
|             "${SET_MIN_FREQ}" "${FREQ_RPn}" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ -z "${DRY_RUN}" ] || return 0 |  | ||||||
|  |  | ||||||
|     printf "%s" ${SET_MIN_FREQ} > $(print_freq_sysfs_path min) |  | ||||||
|     [ $? -eq 0 ] || { |  | ||||||
|         log ERROR "Failed to set GPU min frequency" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Set min or max or both GPU frequencies to the user indicated values. |  | ||||||
| # |  | ||||||
| set_freq() { |  | ||||||
|     # Get hw max & min frequencies |  | ||||||
|     read_freq_info n RP0 RPn || return $? |  | ||||||
|  |  | ||||||
|     [ -z "${SET_MAX_FREQ}" ] || { |  | ||||||
|         SET_MAX_FREQ=$(compute_freq_set "${SET_MAX_FREQ}") |  | ||||||
|         [ -z "${SET_MAX_FREQ}" ] && return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ -z "${SET_MIN_FREQ}" ] || { |  | ||||||
|         SET_MIN_FREQ=$(compute_freq_set "${SET_MIN_FREQ}") |  | ||||||
|         [ -z "${SET_MIN_FREQ}" ] && return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     # |  | ||||||
|     # Ensure correct operation order, to avoid setting min freq |  | ||||||
|     # to a value which is larger than max freq. |  | ||||||
|     # |  | ||||||
|     # E.g.: |  | ||||||
|     #   crt_min=crt_max=600; new_min=new_max=700 |  | ||||||
|     #   > operation order: max=700; min=700 |  | ||||||
|     # |  | ||||||
|     #   crt_min=crt_max=600; new_min=new_max=500 |  | ||||||
|     #   > operation order: min=500; max=500 |  | ||||||
|     # |  | ||||||
|     if [ -n "${SET_MAX_FREQ}" ] && [ -n "${SET_MIN_FREQ}" ]; then |  | ||||||
|         [ ${SET_MAX_FREQ} -lt ${SET_MIN_FREQ} ] && { |  | ||||||
|             log ERROR "Cannot set GPU max freq to be less than min freq" |  | ||||||
|             return 1 |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         read_freq_info n min || return $? |  | ||||||
|  |  | ||||||
|         if [ ${SET_MAX_FREQ} -lt ${FREQ_min} ]; then |  | ||||||
|             set_freq_min || return $? |  | ||||||
|             set_freq_max |  | ||||||
|         else |  | ||||||
|             set_freq_max || return $? |  | ||||||
|             set_freq_min |  | ||||||
|         fi |  | ||||||
|     elif [ -n "${SET_MAX_FREQ}" ]; then |  | ||||||
|         set_freq_max |  | ||||||
|     elif [ -n "${SET_MIN_FREQ}" ]; then |  | ||||||
|         set_freq_min |  | ||||||
|     else |  | ||||||
|         log "Unexpected call to set_freq()" |  | ||||||
|         return 1 |  | ||||||
|     fi |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper for detect_throttling(). |  | ||||||
| # |  | ||||||
| get_thrott_detect_pid() { |  | ||||||
|     [ -e ${THROTT_DETECT_PID_FILE_PATH} ] || return 0 |  | ||||||
|  |  | ||||||
|     local pid |  | ||||||
|     read pid < ${THROTT_DETECT_PID_FILE_PATH} || { |  | ||||||
|         log ERROR "Failed to read pid from: %s" "${THROTT_DETECT_PID_FILE_PATH}" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     local proc_path=/proc/${pid:-invalid}/cmdline |  | ||||||
|     [ -r ${proc_path} ] && grep -qs "${0##*/}" ${proc_path} && { |  | ||||||
|         printf "%s" "${pid}" |  | ||||||
|         return 0 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     # Remove orphaned PID file |  | ||||||
|     rm -rf ${THROTT_DETECT_PID_FILE_PATH} |  | ||||||
|     return 1 |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Control detection and reporting of GPU throttling events. |  | ||||||
| # arg1: start - run throttle detector in background |  | ||||||
| #       stop - stop throttle detector process, if any |  | ||||||
| #       status - verify if throttle detector is running |  | ||||||
| # |  | ||||||
| detect_throttling() { |  | ||||||
|     local pid |  | ||||||
|     pid=$(get_thrott_detect_pid) |  | ||||||
|  |  | ||||||
|     case "$1" in |  | ||||||
|     status) |  | ||||||
|         printf "Throttling detector is " |  | ||||||
|         [ -z "${pid}" ] && printf "not running\n" && return 0 |  | ||||||
|         printf "running (pid=%s)\n" ${pid} |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     stop) |  | ||||||
|         [ -z "${pid}" ] && return 0 |  | ||||||
|  |  | ||||||
|         log INFO "Stopping throttling detector (pid=%s)" "${pid}" |  | ||||||
|         kill ${pid}; sleep 1; kill -0 ${pid} 2>/dev/null && kill -9 ${pid} |  | ||||||
|         rm -rf ${THROTT_DETECT_PID_FILE_PATH} |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     start) |  | ||||||
|         [ -n "${pid}" ] && { |  | ||||||
|             log WARN "Throttling detector is already running (pid=%s)" ${pid} |  | ||||||
|             return 0 |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         ( |  | ||||||
|             read_freq_info n RPn || exit $? |  | ||||||
|  |  | ||||||
|             while true; do |  | ||||||
|                 sleep ${THROTT_DETECT_SLEEP_SEC} |  | ||||||
|                 read_freq_info n act min cur || exit $? |  | ||||||
|  |  | ||||||
|                 # |  | ||||||
|                 # The throttling seems to occur when act freq goes below min. |  | ||||||
|                 # However, it's necessary to exclude the idle states, where |  | ||||||
|                 # act freq normally reaches RPn and cur goes below min. |  | ||||||
|                 # |  | ||||||
|                 [ ${FREQ_act} -lt ${FREQ_min} ] && \ |  | ||||||
|                 [ ${FREQ_act} -gt ${FREQ_RPn} ] && \ |  | ||||||
|                 [ ${FREQ_cur} -ge ${FREQ_min} ] && \ |  | ||||||
|                     printf "GPU throttling detected: act=%s min=%s cur=%s RPn=%s\n" \ |  | ||||||
|                     ${FREQ_act} ${FREQ_min} ${FREQ_cur} ${FREQ_RPn} |  | ||||||
|             done |  | ||||||
|         ) & |  | ||||||
|  |  | ||||||
|         pid=$! |  | ||||||
|         log INFO "Started GPU throttling detector (pid=%s)" ${pid} |  | ||||||
|  |  | ||||||
|         printf "%s\n" ${pid} > ${THROTT_DETECT_PID_FILE_PATH} || \ |  | ||||||
|             log WARN "Failed to write throttle detector PID file" |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Retrieve the list of online CPUs. |  | ||||||
| # |  | ||||||
| get_online_cpus() { |  | ||||||
|     local path cpu_index |  | ||||||
|  |  | ||||||
|     printf "0" |  | ||||||
|     for path in $(grep 1 ${CPU_SYSFS_PREFIX}/cpu*/online); do |  | ||||||
|         cpu_index=${path##*/cpu} |  | ||||||
|         printf " %s" ${cpu_index%%/*} |  | ||||||
|     done |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper to print sysfs path for the given CPU index and freq info. |  | ||||||
| # |  | ||||||
| # arg1: Frequency info sysfs name, one of *_CPU_FREQ_INFO constants above |  | ||||||
| # arg2: CPU index |  | ||||||
| # |  | ||||||
| print_cpu_freq_sysfs_path() { |  | ||||||
|     printf ${CPU_FREQ_SYSFS_PATTERN} "$2" "$1" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Read the specified CPU freq info from sysfs. |  | ||||||
| # |  | ||||||
| # arg1: CPU index |  | ||||||
| # arg2: Flag (y/n) to also enable printing the freq info. |  | ||||||
| # arg3...: Frequency info sysfs name(s), see *_CPU_FREQ_INFO constants above |  | ||||||
| # return: Global variable(s) CPU_FREQ_${arg} containing the requested information |  | ||||||
| # |  | ||||||
| read_cpu_freq_info() { |  | ||||||
|     local var val info path cpu_index print=0 ret=0 |  | ||||||
|  |  | ||||||
|     cpu_index=$1 |  | ||||||
|     [ "$2" = "y" ] && print=1 |  | ||||||
|     shift 2 |  | ||||||
|  |  | ||||||
|     while [ $# -gt 0 ]; do |  | ||||||
|         info=$1 |  | ||||||
|         shift |  | ||||||
|         var=CPU_FREQ_${info} |  | ||||||
|         path=$(print_cpu_freq_sysfs_path "${info}" ${cpu_index}) |  | ||||||
|  |  | ||||||
|         [ -r ${path} ] && read ${var} < ${path} || { |  | ||||||
|             log ERROR "Failed to read CPU freq info from: %s" "${path}" |  | ||||||
|             ret=1 |  | ||||||
|             continue |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         [ -n "${var}" ] || { |  | ||||||
|             log ERROR "Got empty CPU freq info from: %s" "${path}" |  | ||||||
|             ret=1 |  | ||||||
|             continue |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         [ ${print} -eq 1 ] && { |  | ||||||
|             eval val=\$${var} |  | ||||||
|             printf "%6s: %4s Hz\n" "${info}" "${val}" |  | ||||||
|         } |  | ||||||
|     done |  | ||||||
|  |  | ||||||
|     return ${ret} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper to print freq. value as requested by user via '--cpu-set-max' option. |  | ||||||
| # arg1: user requested freq value |  | ||||||
| # |  | ||||||
| compute_cpu_freq_set() { |  | ||||||
|     local val |  | ||||||
|  |  | ||||||
|     case "$1" in |  | ||||||
|     +) |  | ||||||
|         val=${CPU_FREQ_cpuinfo_max} |  | ||||||
|         ;; |  | ||||||
|     -) |  | ||||||
|         val=${CPU_FREQ_cpuinfo_min} |  | ||||||
|         ;; |  | ||||||
|     *%) |  | ||||||
|         val=$((${1%?} * ${CPU_FREQ_cpuinfo_max} / 100)) |  | ||||||
|         ;; |  | ||||||
|     *[!0-9]*) |  | ||||||
|         log ERROR "Cannot set CPU freq to invalid value: %s" "$1" |  | ||||||
|         return 1 |  | ||||||
|         ;; |  | ||||||
|     "") |  | ||||||
|         log ERROR "Cannot set CPU freq to unspecified value" |  | ||||||
|         return 1 |  | ||||||
|         ;; |  | ||||||
|     *) |  | ||||||
|         log ERROR "Cannot set CPU freq to custom value; use +, -, or % instead" |  | ||||||
|         return 1 |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
|  |  | ||||||
|     printf "%s" "${val}" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Adjust CPU max scaling frequency. |  | ||||||
| # |  | ||||||
| set_cpu_freq_max() { |  | ||||||
|     local target_freq res=0 |  | ||||||
|     case "${CPU_SET_MAX_FREQ}" in |  | ||||||
|     +) |  | ||||||
|         target_freq=100 |  | ||||||
|         ;; |  | ||||||
|     -) |  | ||||||
|         target_freq=1 |  | ||||||
|         ;; |  | ||||||
|     *%) |  | ||||||
|         target_freq=${CPU_SET_MAX_FREQ%?} |  | ||||||
|         ;; |  | ||||||
|     *) |  | ||||||
|         log ERROR "Invalid CPU freq" |  | ||||||
|         return 1 |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
|  |  | ||||||
|     local pstate_info=$(printf "${CPU_PSTATE_SYSFS_PATTERN}" max_perf_pct) |  | ||||||
|     [ -e "${pstate_info}" ] && { |  | ||||||
|         log INFO "Setting intel_pstate max perf to %s" "${target_freq}%" |  | ||||||
|         printf "%s" "${target_freq}" > "${pstate_info}" |  | ||||||
|         [ $? -eq 0 ] || { |  | ||||||
|             log ERROR "Failed to set intel_pstate max perf" |  | ||||||
|             res=1 |  | ||||||
|         } |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     local cpu_index |  | ||||||
|     for cpu_index in $(get_online_cpus); do |  | ||||||
|         read_cpu_freq_info ${cpu_index} n ${CAP_CPU_FREQ_INFO} || { res=$?; continue; } |  | ||||||
|  |  | ||||||
|         target_freq=$(compute_cpu_freq_set "${CPU_SET_MAX_FREQ}") |  | ||||||
|         [ -z "${target_freq}" ] && { res=$?; continue; } |  | ||||||
|  |  | ||||||
|         log INFO "Setting CPU%s max scaling freq to %s Hz" ${cpu_index} "${target_freq}" |  | ||||||
|         [ -n "${DRY_RUN}" ] && continue |  | ||||||
|  |  | ||||||
|         printf "%s" ${target_freq} > $(print_cpu_freq_sysfs_path scaling_max ${cpu_index}) |  | ||||||
|         [ $? -eq 0 ] || { |  | ||||||
|             res=1 |  | ||||||
|             log ERROR "Failed to set CPU%s max scaling frequency" ${cpu_index} |  | ||||||
|         } |  | ||||||
|     done |  | ||||||
|  |  | ||||||
|     return ${res} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Show help message. |  | ||||||
| # |  | ||||||
| print_usage() { |  | ||||||
|     cat <<EOF |  | ||||||
| Usage: ${0##*/} [OPTION]... |  | ||||||
|  |  | ||||||
| A script to manage Intel GPU frequencies. Can be used for debugging performance |  | ||||||
| problems or trying to obtain a stable frequency while benchmarking. |  | ||||||
|  |  | ||||||
| Note Intel GPUs only accept specific frequencies, usually multiples of 50 MHz. |  | ||||||
|  |  | ||||||
| Options: |  | ||||||
|   -g, --get [act|enf|cap|all] |  | ||||||
|                         Get frequency information: active (default), enforced, |  | ||||||
|                         hardware capabilities or all of them. |  | ||||||
|  |  | ||||||
|   -s, --set [{min|max}=]{FREQUENCY[%]|+|-} |  | ||||||
|                         Set min or max frequency to the given value (MHz). |  | ||||||
|                         Append '%' to interpret FREQUENCY as % of hw max. |  | ||||||
|                         Use '+' or '-' to set frequency to hardware max or min. |  | ||||||
|                         Omit min/max prefix to set both frequencies. |  | ||||||
|  |  | ||||||
|   -r, --reset           Reset frequencies to hardware defaults. |  | ||||||
|  |  | ||||||
|   -m, --monitor [act|enf|cap|all] |  | ||||||
|                         Monitor the indicated frequencies via 'watch' utility. |  | ||||||
|                         See '-g, --get' option for more details. |  | ||||||
|  |  | ||||||
|   -d|--detect-thrott [start|stop|status] |  | ||||||
|                         Start (default operation) the throttling detector |  | ||||||
|                         as a background process. Use 'stop' or 'status' to |  | ||||||
|                         terminate the detector process or verify its status. |  | ||||||
|  |  | ||||||
|   --cpu-set-max [FREQUENCY%|+|-} |  | ||||||
|                         Set CPU max scaling frequency as % of hw max. |  | ||||||
|                         Use '+' or '-' to set frequency to hardware max or min. |  | ||||||
|  |  | ||||||
|   -r, --reset           Reset frequencies to hardware defaults. |  | ||||||
|  |  | ||||||
|   --dry-run             See what the script will do without applying any |  | ||||||
|                         frequency changes. |  | ||||||
|  |  | ||||||
|   -h, --help            Display this help text and exit. |  | ||||||
| EOF |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Parse user input for '-g, --get' option. |  | ||||||
| # Returns 0 if a value has been provided, otherwise 1. |  | ||||||
| # |  | ||||||
| parse_option_get() { |  | ||||||
|     local ret=0 |  | ||||||
|  |  | ||||||
|     case "$1" in |  | ||||||
|     act) GET_ACT_FREQ=1;; |  | ||||||
|     enf) GET_ENF_FREQ=1;; |  | ||||||
|     cap) GET_CAP_FREQ=1;; |  | ||||||
|     all) GET_ACT_FREQ=1; GET_ENF_FREQ=1; GET_CAP_FREQ=1;; |  | ||||||
|     -*|"") |  | ||||||
|         # No value provided, using default. |  | ||||||
|         GET_ACT_FREQ=1 |  | ||||||
|         ret=1 |  | ||||||
|         ;; |  | ||||||
|     *) |  | ||||||
|         print_usage |  | ||||||
|         exit 1 |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
|  |  | ||||||
|     return ${ret} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Validate user input for '-s, --set' option. |  | ||||||
| # arg1: input value to be validated |  | ||||||
| # arg2: optional flag indicating input is restricted to % |  | ||||||
| # |  | ||||||
| validate_option_set() { |  | ||||||
|     case "$1" in |  | ||||||
|     +|-|[0-9]%|[0-9][0-9]%) |  | ||||||
|         return 0 |  | ||||||
|         ;; |  | ||||||
|     *[!0-9]*|"") |  | ||||||
|         print_usage |  | ||||||
|         exit 1 |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
|  |  | ||||||
|     [ -z "$2" ] || { print_usage; exit 1; } |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Parse script arguments. |  | ||||||
| # |  | ||||||
| [ $# -eq 0 ] && { print_usage; exit 1; } |  | ||||||
|  |  | ||||||
| while [ $# -gt 0 ]; do |  | ||||||
|     case "$1" in |  | ||||||
|     -g|--get) |  | ||||||
|         parse_option_get "$2" && shift |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     -s|--set) |  | ||||||
|         shift |  | ||||||
|         case "$1" in |  | ||||||
|         min=*) |  | ||||||
|             SET_MIN_FREQ=${1#min=} |  | ||||||
|             validate_option_set "${SET_MIN_FREQ}" |  | ||||||
|             ;; |  | ||||||
|         max=*) |  | ||||||
|             SET_MAX_FREQ=${1#max=} |  | ||||||
|             validate_option_set "${SET_MAX_FREQ}" |  | ||||||
|             ;; |  | ||||||
|         *) |  | ||||||
|             SET_MIN_FREQ=$1 |  | ||||||
|             validate_option_set "${SET_MIN_FREQ}" |  | ||||||
|             SET_MAX_FREQ=${SET_MIN_FREQ} |  | ||||||
|             ;; |  | ||||||
|         esac |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     -r|--reset) |  | ||||||
|         RESET_FREQ=1 |  | ||||||
|         SET_MIN_FREQ="-" |  | ||||||
|         SET_MAX_FREQ="+" |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     -m|--monitor) |  | ||||||
|         MONITOR_FREQ=act |  | ||||||
|         parse_option_get "$2" && MONITOR_FREQ=$2 && shift |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     -d|--detect-thrott) |  | ||||||
|         DETECT_THROTT=start |  | ||||||
|         case "$2" in |  | ||||||
|         start|stop|status) |  | ||||||
|             DETECT_THROTT=$2 |  | ||||||
|             shift |  | ||||||
|             ;; |  | ||||||
|         esac |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     --cpu-set-max) |  | ||||||
|         shift |  | ||||||
|         CPU_SET_MAX_FREQ=$1 |  | ||||||
|         validate_option_set "${CPU_SET_MAX_FREQ}" restricted |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     --dry-run) |  | ||||||
|         DRY_RUN=1 |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     -h|--help) |  | ||||||
|         print_usage |  | ||||||
|         exit 0 |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     *) |  | ||||||
|         print_usage |  | ||||||
|         exit 1 |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
|  |  | ||||||
|     shift |  | ||||||
| done |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Main |  | ||||||
| # |  | ||||||
| RET=0 |  | ||||||
|  |  | ||||||
| identify_intel_gpu || { |  | ||||||
|     log INFO "No Intel GPU detected" |  | ||||||
|     exit 0 |  | ||||||
| } |  | ||||||
|  |  | ||||||
| [ -n "${SET_MIN_FREQ}${SET_MAX_FREQ}" ] && { set_freq || RET=$?; } |  | ||||||
| print_freq_info |  | ||||||
|  |  | ||||||
| [ -n "${DETECT_THROTT}" ] && detect_throttling ${DETECT_THROTT} |  | ||||||
|  |  | ||||||
| [ -n "${CPU_SET_MAX_FREQ}" ] && { set_cpu_freq_max || RET=$?; } |  | ||||||
|  |  | ||||||
| [ -n "${MONITOR_FREQ}" ] && { |  | ||||||
|     log INFO "Entering frequency monitoring mode" |  | ||||||
|     sleep 2 |  | ||||||
|     exec watch -d -n 1 "$0" -g "${MONITOR_FREQ}" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| exit ${RET} |  | ||||||
| @@ -1,21 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| _XORG_SCRIPT="/xorg-script" |  | ||||||
| _FLAG_FILE="/xorg-started" |  | ||||||
|  |  | ||||||
| echo "touch ${_FLAG_FILE}; sleep 100000" > "${_XORG_SCRIPT}" |  | ||||||
| if [ "x$1" != "x" ]; then |  | ||||||
|     export LD_LIBRARY_PATH="${1}/lib" |  | ||||||
|     export LIBGL_DRIVERS_PATH="${1}/lib/dri" |  | ||||||
| fi |  | ||||||
| xinit /bin/sh "${_XORG_SCRIPT}" -- /usr/bin/Xorg vt45 -noreset -s 0 -dpms -logfile /Xorg.0.log & |  | ||||||
|  |  | ||||||
| # Wait for xorg to be ready for connections. |  | ||||||
| for i in 1 2 3 4 5; do |  | ||||||
|     if [ -e "${_FLAG_FILE}" ]; then |  | ||||||
|         break |  | ||||||
|     fi |  | ||||||
|     sleep 5 |  | ||||||
| done |  | ||||||
| @@ -1,63 +0,0 @@ | |||||||
| CONFIG_LOCALVERSION_AUTO=y |  | ||||||
| CONFIG_DEBUG_KERNEL=y |  | ||||||
|  |  | ||||||
| # abootimg with a 'dummy' rootfs fails with root=/dev/nfs |  | ||||||
| CONFIG_BLK_DEV_INITRD=n |  | ||||||
|  |  | ||||||
| CONFIG_DEVFREQ_GOV_PERFORMANCE=y |  | ||||||
| CONFIG_DEVFREQ_GOV_POWERSAVE=y |  | ||||||
| CONFIG_DEVFREQ_GOV_USERSPACE=y |  | ||||||
| CONFIG_DEVFREQ_GOV_PASSIVE=y |  | ||||||
| CONFIG_DEVFREQ_GOV_SIMPLE_ONDEMAND=y |  | ||||||
|  |  | ||||||
| CONFIG_DRM=y |  | ||||||
| CONFIG_DRM_ETNAVIV=y |  | ||||||
| CONFIG_DRM_ROCKCHIP=y |  | ||||||
| CONFIG_DRM_PANFROST=y |  | ||||||
| CONFIG_DRM_LIMA=y |  | ||||||
| CONFIG_DRM_PANEL_SIMPLE=y |  | ||||||
| CONFIG_PWM_CROS_EC=y |  | ||||||
| CONFIG_BACKLIGHT_PWM=y |  | ||||||
|  |  | ||||||
| CONFIG_ROCKCHIP_CDN_DP=n |  | ||||||
|  |  | ||||||
| CONFIG_SPI_ROCKCHIP=y |  | ||||||
| CONFIG_PWM_ROCKCHIP=y |  | ||||||
| CONFIG_PHY_ROCKCHIP_DP=y |  | ||||||
| CONFIG_DWMAC_ROCKCHIP=y |  | ||||||
|  |  | ||||||
| CONFIG_MFD_RK808=y |  | ||||||
| CONFIG_REGULATOR_RK808=y |  | ||||||
| CONFIG_RTC_DRV_RK808=y |  | ||||||
| CONFIG_COMMON_CLK_RK808=y |  | ||||||
|  |  | ||||||
| CONFIG_REGULATOR_FAN53555=y |  | ||||||
| CONFIG_REGULATOR=y |  | ||||||
|  |  | ||||||
| CONFIG_REGULATOR_VCTRL=y |  | ||||||
|  |  | ||||||
| CONFIG_KASAN=n |  | ||||||
| CONFIG_KASAN_INLINE=n |  | ||||||
| CONFIG_STACKTRACE=n |  | ||||||
|  |  | ||||||
| CONFIG_TMPFS=y |  | ||||||
|  |  | ||||||
| CONFIG_PROVE_LOCKING=n |  | ||||||
| CONFIG_DEBUG_LOCKDEP=n |  | ||||||
| CONFIG_SOFTLOCKUP_DETECTOR=n |  | ||||||
| CONFIG_BOOTPARAM_SOFTLOCKUP_PANIC=n |  | ||||||
|  |  | ||||||
| CONFIG_FW_LOADER_COMPRESS=y |  | ||||||
|  |  | ||||||
| CONFIG_USB_USBNET=y |  | ||||||
| CONFIG_NETDEVICES=y |  | ||||||
| CONFIG_USB_NET_DRIVERS=y |  | ||||||
| CONFIG_USB_RTL8152=y |  | ||||||
| CONFIG_USB_NET_AX8817X=y |  | ||||||
| CONFIG_USB_NET_SMSC95XX=y |  | ||||||
|  |  | ||||||
| # TK1 |  | ||||||
| CONFIG_ARM_TEGRA_DEVFREQ=y |  | ||||||
|  |  | ||||||
| # 32-bit build failure |  | ||||||
| CONFIG_DRM_MSM=n |  | ||||||
| @@ -1,173 +0,0 @@ | |||||||
| CONFIG_LOCALVERSION_AUTO=y |  | ||||||
| CONFIG_DEBUG_KERNEL=y |  | ||||||
|  |  | ||||||
| # abootimg with a 'dummy' rootfs fails with root=/dev/nfs |  | ||||||
| CONFIG_BLK_DEV_INITRD=n |  | ||||||
|  |  | ||||||
| CONFIG_DEVFREQ_GOV_PERFORMANCE=y |  | ||||||
| CONFIG_DEVFREQ_GOV_POWERSAVE=y |  | ||||||
| CONFIG_DEVFREQ_GOV_USERSPACE=y |  | ||||||
| CONFIG_DEVFREQ_GOV_PASSIVE=y |  | ||||||
|  |  | ||||||
| CONFIG_DRM=y |  | ||||||
| CONFIG_DRM_ROCKCHIP=y |  | ||||||
| CONFIG_DRM_PANFROST=y |  | ||||||
| CONFIG_DRM_LIMA=y |  | ||||||
| CONFIG_DRM_PANEL_SIMPLE=y |  | ||||||
| CONFIG_DRM_PANEL_EDP=y |  | ||||||
| CONFIG_DRM_MSM=y |  | ||||||
| CONFIG_DRM_ETNAVIV=y |  | ||||||
| CONFIG_DRM_I2C_ADV7511=y |  | ||||||
| CONFIG_PWM_CROS_EC=y |  | ||||||
| CONFIG_BACKLIGHT_PWM=y |  | ||||||
|  |  | ||||||
| CONFIG_ROCKCHIP_CDN_DP=n |  | ||||||
|  |  | ||||||
| CONFIG_SPI_ROCKCHIP=y |  | ||||||
| CONFIG_PWM_ROCKCHIP=y |  | ||||||
| CONFIG_PHY_ROCKCHIP_DP=y |  | ||||||
| CONFIG_DWMAC_ROCKCHIP=y |  | ||||||
| CONFIG_STMMAC_ETH=y |  | ||||||
| CONFIG_TYPEC_FUSB302=y |  | ||||||
| CONFIG_TYPEC=y |  | ||||||
| CONFIG_TYPEC_TCPM=y |  | ||||||
|  |  | ||||||
| # MSM platform bits |  | ||||||
|  |  | ||||||
| # For CONFIG_QCOM_LMH |  | ||||||
| CONFIG_OF=y |  | ||||||
|  |  | ||||||
| CONFIG_QCOM_COMMAND_DB=y |  | ||||||
| CONFIG_QCOM_RPMHPD=y |  | ||||||
| CONFIG_QCOM_RPMPD=y |  | ||||||
| CONFIG_SDM_GPUCC_845=y |  | ||||||
| CONFIG_SDM_VIDEOCC_845=y |  | ||||||
| CONFIG_SDM_DISPCC_845=y |  | ||||||
| CONFIG_SDM_LPASSCC_845=y |  | ||||||
| CONFIG_SDM_CAMCC_845=y |  | ||||||
| CONFIG_RESET_QCOM_PDC=y |  | ||||||
| CONFIG_DRM_TI_SN65DSI86=y |  | ||||||
| CONFIG_I2C_QCOM_GENI=y |  | ||||||
| CONFIG_SPI_QCOM_GENI=y |  | ||||||
| CONFIG_PHY_QCOM_QUSB2=y |  | ||||||
| CONFIG_PHY_QCOM_QMP=y |  | ||||||
| CONFIG_QCOM_CLK_APCC_MSM8996=y |  | ||||||
| CONFIG_QCOM_LLCC=y |  | ||||||
| CONFIG_QCOM_LMH=y |  | ||||||
| CONFIG_QCOM_SPMI_TEMP_ALARM=y |  | ||||||
| CONFIG_QCOM_WDT=y |  | ||||||
| CONFIG_POWER_RESET_QCOM_PON=y |  | ||||||
| CONFIG_RTC_DRV_PM8XXX=y |  | ||||||
| CONFIG_INTERCONNECT=y |  | ||||||
| CONFIG_INTERCONNECT_QCOM=y |  | ||||||
| CONFIG_INTERCONNECT_QCOM_SDM845=y |  | ||||||
| CONFIG_INTERCONNECT_QCOM_MSM8916=y |  | ||||||
| CONFIG_INTERCONNECT_QCOM_OSM_L3=y |  | ||||||
| CONFIG_INTERCONNECT_QCOM_SC7180=y |  | ||||||
| CONFIG_CRYPTO_DEV_QCOM_RNG=y |  | ||||||
| CONFIG_SC_DISPCC_7180=y |  | ||||||
| CONFIG_SC_GPUCC_7180=y |  | ||||||
|  |  | ||||||
| # db410c ethernet |  | ||||||
| CONFIG_USB_RTL8152=y |  | ||||||
| # db820c ethernet |  | ||||||
| CONFIG_ATL1C=y |  | ||||||
|  |  | ||||||
| CONFIG_ARCH_ALPINE=n |  | ||||||
| CONFIG_ARCH_BCM2835=n |  | ||||||
| CONFIG_ARCH_BCM_IPROC=n |  | ||||||
| CONFIG_ARCH_BERLIN=n |  | ||||||
| CONFIG_ARCH_BRCMSTB=n |  | ||||||
| CONFIG_ARCH_EXYNOS=n |  | ||||||
| CONFIG_ARCH_K3=n |  | ||||||
| CONFIG_ARCH_LAYERSCAPE=n |  | ||||||
| CONFIG_ARCH_LG1K=n |  | ||||||
| CONFIG_ARCH_HISI=n |  | ||||||
| CONFIG_ARCH_MVEBU=n |  | ||||||
| CONFIG_ARCH_SEATTLE=n |  | ||||||
| CONFIG_ARCH_SYNQUACER=n |  | ||||||
| CONFIG_ARCH_RENESAS=n |  | ||||||
| CONFIG_ARCH_R8A774A1=n |  | ||||||
| CONFIG_ARCH_R8A774C0=n |  | ||||||
| CONFIG_ARCH_R8A7795=n |  | ||||||
| CONFIG_ARCH_R8A7796=n |  | ||||||
| CONFIG_ARCH_R8A77965=n |  | ||||||
| CONFIG_ARCH_R8A77970=n |  | ||||||
| CONFIG_ARCH_R8A77980=n |  | ||||||
| CONFIG_ARCH_R8A77990=n |  | ||||||
| CONFIG_ARCH_R8A77995=n |  | ||||||
| CONFIG_ARCH_STRATIX10=n |  | ||||||
| CONFIG_ARCH_TEGRA=n |  | ||||||
| CONFIG_ARCH_SPRD=n |  | ||||||
| CONFIG_ARCH_THUNDER=n |  | ||||||
| CONFIG_ARCH_THUNDER2=n |  | ||||||
| CONFIG_ARCH_UNIPHIER=n |  | ||||||
| CONFIG_ARCH_VEXPRESS=n |  | ||||||
| CONFIG_ARCH_XGENE=n |  | ||||||
| CONFIG_ARCH_ZX=n |  | ||||||
| CONFIG_ARCH_ZYNQMP=n |  | ||||||
|  |  | ||||||
| # Strip out some stuff we don't need for graphics testing, to reduce |  | ||||||
| # the build. |  | ||||||
| CONFIG_CAN=n |  | ||||||
| CONFIG_WIRELESS=n |  | ||||||
| CONFIG_RFKILL=n |  | ||||||
| CONFIG_WLAN=n |  | ||||||
|  |  | ||||||
| CONFIG_REGULATOR_FAN53555=y |  | ||||||
| CONFIG_REGULATOR=y |  | ||||||
|  |  | ||||||
| CONFIG_REGULATOR_VCTRL=y |  | ||||||
|  |  | ||||||
| CONFIG_KASAN=n |  | ||||||
| CONFIG_KASAN_INLINE=n |  | ||||||
| CONFIG_STACKTRACE=n |  | ||||||
|  |  | ||||||
| CONFIG_TMPFS=y |  | ||||||
|  |  | ||||||
| CONFIG_PROVE_LOCKING=n |  | ||||||
| CONFIG_DEBUG_LOCKDEP=n |  | ||||||
| CONFIG_SOFTLOCKUP_DETECTOR=y |  | ||||||
| CONFIG_BOOTPARAM_SOFTLOCKUP_PANIC=y |  | ||||||
|  |  | ||||||
| CONFIG_DETECT_HUNG_TASK=y |  | ||||||
|  |  | ||||||
| CONFIG_FW_LOADER_COMPRESS=y |  | ||||||
| CONFIG_FW_LOADER_USER_HELPER=n |  | ||||||
|  |  | ||||||
| CONFIG_USB_USBNET=y |  | ||||||
| CONFIG_NETDEVICES=y |  | ||||||
| CONFIG_USB_NET_DRIVERS=y |  | ||||||
| CONFIG_USB_RTL8152=y |  | ||||||
| CONFIG_USB_NET_AX8817X=y |  | ||||||
| CONFIG_USB_NET_SMSC95XX=y |  | ||||||
|  |  | ||||||
| # For amlogic |  | ||||||
| CONFIG_MESON_GXL_PHY=y |  | ||||||
| CONFIG_MDIO_BUS_MUX_MESON_G12A=y |  | ||||||
| CONFIG_DRM_MESON=y |  | ||||||
|  |  | ||||||
| # For Mediatek |  | ||||||
| CONFIG_DRM_MEDIATEK=y |  | ||||||
| CONFIG_PWM_MEDIATEK=y |  | ||||||
| CONFIG_DRM_MEDIATEK_HDMI=y |  | ||||||
| CONFIG_GNSS=y |  | ||||||
| CONFIG_GNSS_MTK_SERIAL=y |  | ||||||
| CONFIG_HW_RANDOM=y |  | ||||||
| CONFIG_HW_RANDOM_MTK=y |  | ||||||
| CONFIG_MTK_DEVAPC=y |  | ||||||
| CONFIG_PWM_MTK_DISP=y |  | ||||||
| CONFIG_MTK_CMDQ=y |  | ||||||
|  |  | ||||||
| # For nouveau.  Note that DRM must be a module so that it's loaded after NFS is up to provide the firmware. |  | ||||||
| CONFIG_ARCH_TEGRA=y |  | ||||||
| CONFIG_DRM_NOUVEAU=m |  | ||||||
| CONFIG_DRM_TEGRA=m |  | ||||||
| CONFIG_R8169=y |  | ||||||
| CONFIG_STAGING=y |  | ||||||
| CONFIG_DRM_TEGRA_STAGING=y |  | ||||||
| CONFIG_TEGRA_HOST1X=y |  | ||||||
| CONFIG_ARM_TEGRA_DEVFREQ=y |  | ||||||
| CONFIG_TEGRA_SOCTHERM=y |  | ||||||
| CONFIG_DRM_TEGRA_DEBUG=y |  | ||||||
| CONFIG_PWM_TEGRA=y |  | ||||||
| @@ -1,55 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| # Fetch the arm-built rootfs image and unpack it in our x86 container (saves |  | ||||||
| # network transfer, disk usage, and runtime on test jobs) |  | ||||||
|  |  | ||||||
| # shellcheck disable=SC2154 # arch is assigned in previous scripts |  | ||||||
| if wget -q --method=HEAD "${ARTIFACTS_PREFIX}/${FDO_UPSTREAM_REPO}/${ARTIFACTS_SUFFIX}/${arch}/done"; then |  | ||||||
|   ARTIFACTS_URL="${ARTIFACTS_PREFIX}/${FDO_UPSTREAM_REPO}/${ARTIFACTS_SUFFIX}/${arch}" |  | ||||||
| else |  | ||||||
|   ARTIFACTS_URL="${ARTIFACTS_PREFIX}/${CI_PROJECT_PATH}/${ARTIFACTS_SUFFIX}/${arch}" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| wget "${ARTIFACTS_URL}"/lava-rootfs.tar.zst -O rootfs.tar.zst |  | ||||||
| mkdir -p /rootfs-"$arch" |  | ||||||
| tar -C /rootfs-"$arch" '--exclude=./dev/*' --zstd -xf rootfs.tar.zst |  | ||||||
| rm rootfs.tar.zst |  | ||||||
|  |  | ||||||
| if [[ $arch == "arm64" ]]; then |  | ||||||
|     mkdir -p /baremetal-files |  | ||||||
|     pushd /baremetal-files |  | ||||||
|  |  | ||||||
|     wget "${ARTIFACTS_URL}"/Image |  | ||||||
|     wget "${ARTIFACTS_URL}"/Image.gz |  | ||||||
|     wget "${ARTIFACTS_URL}"/cheza-kernel |  | ||||||
|  |  | ||||||
|     DEVICE_TREES="" |  | ||||||
|     DEVICE_TREES="$DEVICE_TREES apq8016-sbc.dtb" |  | ||||||
|     DEVICE_TREES="$DEVICE_TREES apq8096-db820c.dtb" |  | ||||||
|     DEVICE_TREES="$DEVICE_TREES tegra210-p3450-0000.dtb" |  | ||||||
|     DEVICE_TREES="$DEVICE_TREES imx8mq-nitrogen.dtb" |  | ||||||
|  |  | ||||||
|     for DTB in $DEVICE_TREES; do |  | ||||||
|         wget "${ARTIFACTS_URL}/$DTB" |  | ||||||
|     done |  | ||||||
|  |  | ||||||
|     popd |  | ||||||
| elif [[ $arch == "armhf" ]]; then |  | ||||||
|     mkdir -p /baremetal-files |  | ||||||
|     pushd /baremetal-files |  | ||||||
|  |  | ||||||
|     wget "${ARTIFACTS_URL}"/zImage |  | ||||||
|  |  | ||||||
|     DEVICE_TREES="" |  | ||||||
|     DEVICE_TREES="$DEVICE_TREES imx6q-cubox-i.dtb" |  | ||||||
|     DEVICE_TREES="$DEVICE_TREES tegra124-jetson-tk1.dtb" |  | ||||||
|  |  | ||||||
|     for DTB in $DEVICE_TREES; do |  | ||||||
|         wget "${ARTIFACTS_URL}/$DTB" |  | ||||||
|     done |  | ||||||
|  |  | ||||||
|     popd |  | ||||||
| fi |  | ||||||
| @@ -1,19 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| APITRACE_VERSION="790380e05854d5c9d315555444ffcc7acb8f4037" |  | ||||||
|  |  | ||||||
| git clone https://github.com/apitrace/apitrace.git --single-branch --no-checkout /apitrace |  | ||||||
| pushd /apitrace |  | ||||||
| git checkout "$APITRACE_VERSION" |  | ||||||
| git submodule update --init --depth 1 --recursive |  | ||||||
| cmake -S . -B _build -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_GUI=False -DENABLE_WAFFLE=on $EXTRA_CMAKE_ARGS |  | ||||||
| cmake --build _build --parallel --target apitrace eglretrace |  | ||||||
| mkdir build |  | ||||||
| cp _build/apitrace build |  | ||||||
| cp _build/eglretrace build |  | ||||||
| ${STRIP_CMD:-strip} build/* |  | ||||||
| find . -not -path './build' -not -path './build/*' -delete |  | ||||||
| popd |  | ||||||
| @@ -1,41 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| git config --global user.email "mesa@example.com" |  | ||||||
| git config --global user.name "Mesa CI" |  | ||||||
|  |  | ||||||
| CROSVM_VERSION=acd262cb42111c53b580a67355e795775545cced |  | ||||||
| git clone --single-branch -b main --no-checkout https://chromium.googlesource.com/crosvm/crosvm /platform/crosvm |  | ||||||
| pushd /platform/crosvm |  | ||||||
| git checkout "$CROSVM_VERSION" |  | ||||||
| git submodule update --init |  | ||||||
|  |  | ||||||
| VIRGLRENDERER_VERSION=3c5a9bbb7464e0e91e446991055300f4f989f6a9 |  | ||||||
| rm -rf third_party/virglrenderer |  | ||||||
| git clone --single-branch -b master --no-checkout https://gitlab.freedesktop.org/virgl/virglrenderer.git third_party/virglrenderer |  | ||||||
| pushd third_party/virglrenderer |  | ||||||
| git checkout "$VIRGLRENDERER_VERSION" |  | ||||||
| meson build/ -Drender-server=true -Drender-server-worker=process -Dvenus-experimental=true $EXTRA_MESON_ARGS |  | ||||||
| ninja -C build install |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| RUSTFLAGS='-L native=/usr/local/lib' cargo install \ |  | ||||||
|   bindgen \ |  | ||||||
|   -j ${FDO_CI_CONCURRENT:-4} \ |  | ||||||
|   --root /usr/local \ |  | ||||||
|   --version 0.60.1 \ |  | ||||||
|   $EXTRA_CARGO_ARGS |  | ||||||
|  |  | ||||||
| RUSTFLAGS='-L native=/usr/local/lib' cargo install \ |  | ||||||
|   -j ${FDO_CI_CONCURRENT:-4} \ |  | ||||||
|   --locked \ |  | ||||||
|   --features 'default-no-sandbox gpu x virgl_renderer virgl_renderer_next' \ |  | ||||||
|   --path . \ |  | ||||||
|   --root /usr/local \ |  | ||||||
|   $EXTRA_CARGO_ARGS |  | ||||||
|  |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| rm -rf /platform/crosvm |  | ||||||
| @@ -1,31 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| if [ -n "${DEQP_RUNNER_GIT_TAG}${DEQP_RUNNER_GIT_REV}" ]; then |  | ||||||
|     # Build and install from source |  | ||||||
|     DEQP_RUNNER_CARGO_ARGS="--git ${DEQP_RUNNER_GIT_URL:-https://gitlab.freedesktop.org/anholt/deqp-runner.git}" |  | ||||||
|  |  | ||||||
|     if [ -n "${DEQP_RUNNER_GIT_TAG}" ]; then |  | ||||||
|         DEQP_RUNNER_CARGO_ARGS="--tag ${DEQP_RUNNER_GIT_TAG} ${DEQP_RUNNER_CARGO_ARGS}" |  | ||||||
|     else |  | ||||||
|         DEQP_RUNNER_CARGO_ARGS="--rev ${DEQP_RUNNER_GIT_REV} ${DEQP_RUNNER_CARGO_ARGS}" |  | ||||||
|     fi |  | ||||||
|  |  | ||||||
|     DEQP_RUNNER_CARGO_ARGS="${DEQP_RUNNER_CARGO_ARGS} ${EXTRA_CARGO_ARGS}" |  | ||||||
| else |  | ||||||
|     # Install from package registry |  | ||||||
|     DEQP_RUNNER_CARGO_ARGS="--version 0.15.0 ${EXTRA_CARGO_ARGS} -- deqp-runner" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| cargo install --locked  \ |  | ||||||
|     -j ${FDO_CI_CONCURRENT:-4} \ |  | ||||||
|     --root /usr/local \ |  | ||||||
|     ${DEQP_RUNNER_CARGO_ARGS} |  | ||||||
|  |  | ||||||
| # remove unused test runners to shrink images for the Mesa CI build (not kernel, |  | ||||||
| # which chooses its own deqp branch) |  | ||||||
| if [ -z "${DEQP_RUNNER_GIT_TAG}${DEQP_RUNNER_GIT_REV}" ]; then |  | ||||||
|     rm -f /usr/local/bin/igt-runner |  | ||||||
| fi |  | ||||||
| @@ -1,98 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| git config --global user.email "mesa@example.com" |  | ||||||
| git config --global user.name "Mesa CI" |  | ||||||
| git clone \ |  | ||||||
|     https://github.com/KhronosGroup/VK-GL-CTS.git \ |  | ||||||
|     -b vulkan-cts-1.3.3.0 \ |  | ||||||
|     --depth 1 \ |  | ||||||
|     /VK-GL-CTS |  | ||||||
| pushd /VK-GL-CTS |  | ||||||
|  |  | ||||||
| # Apply a patch to update zlib link to an available version. |  | ||||||
| # vulkan-cts-1.3.3.0 uses zlib 1.2.12 which was removed from zlib server due to |  | ||||||
| # a CVE. See https://zlib.net/ |  | ||||||
| # FIXME: Remove this patch when uprev to 1.3.4.0+ |  | ||||||
| wget -O- https://github.com/KhronosGroup/VK-GL-CTS/commit/6bb2e7d64261bedb503947b1b251b1eeeb49be73.patch | |  | ||||||
|     git am - |  | ||||||
|  |  | ||||||
| # --insecure is due to SSL cert failures hitting sourceforge for zlib and |  | ||||||
| # libpng (sigh).  The archives get their checksums checked anyway, and git |  | ||||||
| # always goes through ssh or https. |  | ||||||
| python3 external/fetch_sources.py --insecure |  | ||||||
|  |  | ||||||
| mkdir -p /deqp |  | ||||||
|  |  | ||||||
| # Save the testlog stylesheets: |  | ||||||
| cp doc/testlog-stylesheet/testlog.{css,xsl} /deqp |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| pushd /deqp |  | ||||||
| # When including EGL/X11 testing, do that build first and save off its |  | ||||||
| # deqp-egl binary. |  | ||||||
| cmake -S /VK-GL-CTS -B . -G Ninja \ |  | ||||||
|       -DDEQP_TARGET=x11_egl_glx \ |  | ||||||
|       -DCMAKE_BUILD_TYPE=Release \ |  | ||||||
|       $EXTRA_CMAKE_ARGS |  | ||||||
| ninja modules/egl/deqp-egl |  | ||||||
| cp /deqp/modules/egl/deqp-egl /deqp/modules/egl/deqp-egl-x11 |  | ||||||
|  |  | ||||||
|  |  | ||||||
| cmake -S /VK-GL-CTS -B . -G Ninja \ |  | ||||||
|       -DDEQP_TARGET=${DEQP_TARGET:-x11_glx} \ |  | ||||||
|       -DCMAKE_BUILD_TYPE=Release \ |  | ||||||
|       $EXTRA_CMAKE_ARGS |  | ||||||
| ninja |  | ||||||
|  |  | ||||||
| mv /deqp/modules/egl/deqp-egl-x11 /deqp/modules/egl/deqp-egl |  | ||||||
|  |  | ||||||
| # Copy out the mustpass lists we want. |  | ||||||
| mkdir /deqp/mustpass |  | ||||||
| for mustpass in $(< /VK-GL-CTS/external/vulkancts/mustpass/main/vk-default.txt) ; do |  | ||||||
|     cat /VK-GL-CTS/external/vulkancts/mustpass/main/$mustpass \ |  | ||||||
|         >> /deqp/mustpass/vk-master.txt |  | ||||||
| done |  | ||||||
|  |  | ||||||
| cp \ |  | ||||||
|     /deqp/external/openglcts/modules/gl_cts/data/mustpass/gles/aosp_mustpass/3.2.6.x/*.txt \ |  | ||||||
|     /deqp/mustpass/. |  | ||||||
| cp \ |  | ||||||
|     /deqp/external/openglcts/modules/gl_cts/data/mustpass/egl/aosp_mustpass/3.2.6.x/egl-master.txt \ |  | ||||||
|     /deqp/mustpass/. |  | ||||||
| cp \ |  | ||||||
|     /deqp/external/openglcts/modules/gl_cts/data/mustpass/gles/khronos_mustpass/3.2.6.x/*-master.txt \ |  | ||||||
|     /deqp/mustpass/. |  | ||||||
| cp \ |  | ||||||
|     /deqp/external/openglcts/modules/gl_cts/data/mustpass/gl/khronos_mustpass/4.6.1.x/*-master.txt \ |  | ||||||
|     /deqp/mustpass/. |  | ||||||
| cp \ |  | ||||||
|     /deqp/external/openglcts/modules/gl_cts/data/mustpass/gl/khronos_mustpass_single/4.6.1.x/*-single.txt \ |  | ||||||
|     /deqp/mustpass/. |  | ||||||
|  |  | ||||||
| # Save *some* executor utils, but otherwise strip things down |  | ||||||
| # to reduct deqp build size: |  | ||||||
| mkdir /deqp/executor.save |  | ||||||
| cp /deqp/executor/testlog-to-* /deqp/executor.save |  | ||||||
| rm -rf /deqp/executor |  | ||||||
| mv /deqp/executor.save /deqp/executor |  | ||||||
|  |  | ||||||
| # Remove other mustpass files, since we saved off the ones we wanted to conventient locations above. |  | ||||||
| rm -rf /deqp/external/openglcts/modules/gl_cts/data/mustpass |  | ||||||
| rm -rf /deqp/external/vulkancts/modules/vulkan/vk-master* |  | ||||||
| rm -rf /deqp/external/vulkancts/modules/vulkan/vk-default |  | ||||||
|  |  | ||||||
| rm -rf /deqp/external/openglcts/modules/cts-runner |  | ||||||
| rm -rf /deqp/modules/internal |  | ||||||
| rm -rf /deqp/execserver |  | ||||||
| rm -rf /deqp/framework |  | ||||||
| # shellcheck disable=SC2038,SC2185 # TODO: rewrite find |  | ||||||
| find -iname '*cmake*' -o -name '*ninja*' -o -name '*.o' -o -name '*.a' | xargs rm -rf |  | ||||||
| ${STRIP_CMD:-strip} external/vulkancts/modules/vulkan/deqp-vk |  | ||||||
| ${STRIP_CMD:-strip} external/openglcts/modules/glcts |  | ||||||
| ${STRIP_CMD:-strip} modules/*/deqp-* |  | ||||||
| du -sh ./* |  | ||||||
| rm -rf /VK-GL-CTS |  | ||||||
| popd |  | ||||||
| @@ -1,14 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| git clone https://github.com/ValveSoftware/Fossilize.git |  | ||||||
| cd Fossilize |  | ||||||
| git checkout 16fba1b8b5d9310126bb02323d7bae3227338461 |  | ||||||
| git submodule update --init |  | ||||||
| mkdir build |  | ||||||
| cd build |  | ||||||
| cmake -S .. -B . -G Ninja -DCMAKE_BUILD_TYPE=Release |  | ||||||
| ninja -C . install |  | ||||||
| cd ../.. |  | ||||||
| rm -rf Fossilize |  | ||||||
| @@ -1,19 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| GFXRECONSTRUCT_VERSION=5ed3caeecc46e976c4df31e263df8451ae176c26 |  | ||||||
|  |  | ||||||
| git clone https://github.com/LunarG/gfxreconstruct.git \ |  | ||||||
|     --single-branch \ |  | ||||||
|     -b master \ |  | ||||||
|     --no-checkout \ |  | ||||||
|     /gfxreconstruct |  | ||||||
| pushd /gfxreconstruct |  | ||||||
| git checkout "$GFXRECONSTRUCT_VERSION" |  | ||||||
| git submodule update --init |  | ||||||
| git submodule update |  | ||||||
| cmake -S . -B _build -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX:PATH=/gfxreconstruct/build -DBUILD_WERROR=OFF |  | ||||||
| cmake --build _build --parallel --target tools/{replay,info}/install/strip |  | ||||||
| find . -not -path './build' -not -path './build/*' -delete |  | ||||||
| popd |  | ||||||
| @@ -1,16 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| PARALLEL_DEQP_RUNNER_VERSION=fe557794b5dadd8dbf0eae403296625e03bda18a |  | ||||||
|  |  | ||||||
| git clone https://gitlab.freedesktop.org/mesa/parallel-deqp-runner --single-branch -b master --no-checkout /parallel-deqp-runner |  | ||||||
| pushd /parallel-deqp-runner |  | ||||||
| git checkout "$PARALLEL_DEQP_RUNNER_VERSION" |  | ||||||
| meson . _build |  | ||||||
| ninja -C _build hang-detection |  | ||||||
| mkdir -p build/bin |  | ||||||
| install _build/hang-detection build/bin |  | ||||||
| strip build/bin/* |  | ||||||
| find . -not -path './build' -not -path './build/*' -delete |  | ||||||
| popd |  | ||||||
| @@ -1,53 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| mkdir -p kernel |  | ||||||
| wget -qO- ${KERNEL_URL} | tar -xj --strip-components=1 -C kernel |  | ||||||
| pushd kernel |  | ||||||
|  |  | ||||||
| # The kernel doesn't like the gold linker (or the old lld in our debians). |  | ||||||
| # Sneak in some override symlinks during kernel build until we can update |  | ||||||
| # debian (they'll get blown away by the rm of the kernel dir at the end). |  | ||||||
| mkdir -p ld-links |  | ||||||
| for i in /usr/bin/*-ld /usr/bin/ld; do |  | ||||||
|     i=$(basename $i) |  | ||||||
|     ln -sf /usr/bin/$i.bfd ld-links/$i |  | ||||||
| done |  | ||||||
|  |  | ||||||
| NEWPATH=$(pwd)/ld-links |  | ||||||
| export PATH=$NEWPATH:$PATH |  | ||||||
|  |  | ||||||
| KERNEL_FILENAME=$(basename $KERNEL_URL) |  | ||||||
| export LOCALVERSION="$KERNEL_FILENAME" |  | ||||||
| ./scripts/kconfig/merge_config.sh ${DEFCONFIG} ../.gitlab-ci/container/${KERNEL_ARCH}.config |  | ||||||
| make ${KERNEL_IMAGE_NAME} |  | ||||||
| for image in ${KERNEL_IMAGE_NAME}; do |  | ||||||
|     cp arch/${KERNEL_ARCH}/boot/${image} /lava-files/. |  | ||||||
| done |  | ||||||
|  |  | ||||||
| if [[ -n ${DEVICE_TREES} ]]; then |  | ||||||
|     make dtbs |  | ||||||
|     cp ${DEVICE_TREES} /lava-files/. |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| make modules |  | ||||||
| INSTALL_MOD_PATH=/lava-files/rootfs-${DEBIAN_ARCH}/ make modules_install |  | ||||||
|  |  | ||||||
| if [[ ${DEBIAN_ARCH} = "arm64" ]]; then |  | ||||||
|     make Image.lzma |  | ||||||
|     mkimage \ |  | ||||||
|         -f auto \ |  | ||||||
|         -A arm \ |  | ||||||
|         -O linux \ |  | ||||||
|         -d arch/arm64/boot/Image.lzma \ |  | ||||||
|         -C lzma\ |  | ||||||
|         -b arch/arm64/boot/dts/qcom/sdm845-cheza-r3.dtb \ |  | ||||||
|         /lava-files/cheza-kernel |  | ||||||
|     KERNEL_IMAGE_NAME+=" cheza-kernel" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| popd |  | ||||||
| rm -rf kernel |  | ||||||
|  |  | ||||||
| @@ -1,30 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| export LLVM_CONFIG="llvm-config-11" |  | ||||||
|  |  | ||||||
| $LLVM_CONFIG --version |  | ||||||
|  |  | ||||||
| git config --global user.email "mesa@example.com" |  | ||||||
| git config --global user.name "Mesa CI" |  | ||||||
| git clone \ |  | ||||||
|     https://github.com/llvm/llvm-project \ |  | ||||||
|     --depth 1 \ |  | ||||||
|     -b llvmorg-12.0.0-rc3 \ |  | ||||||
|     /llvm-project |  | ||||||
|  |  | ||||||
| mkdir /libclc |  | ||||||
| pushd /libclc |  | ||||||
| cmake -S /llvm-project/libclc -B . -G Ninja -DLLVM_CONFIG=$LLVM_CONFIG -DLIBCLC_TARGETS_TO_BUILD="spirv-mesa3d-;spirv64-mesa3d-" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr -DLLVM_SPIRV=/usr/bin/llvm-spirv |  | ||||||
| ninja |  | ||||||
| ninja install |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| # workaroud cmake vs debian packaging. |  | ||||||
| mkdir -p /usr/lib/clc |  | ||||||
| ln -s /usr/share/clc/spirv64-mesa3d-.spv /usr/lib/clc/ |  | ||||||
| ln -s /usr/share/clc/spirv-mesa3d-.spv /usr/lib/clc/ |  | ||||||
|  |  | ||||||
| du -sh ./* |  | ||||||
| rm -rf /libclc /llvm-project |  | ||||||
| @@ -1,14 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| export LIBDRM_VERSION=libdrm-2.4.110 |  | ||||||
|  |  | ||||||
| wget https://dri.freedesktop.org/libdrm/"$LIBDRM_VERSION".tar.xz |  | ||||||
| tar -xvf "$LIBDRM_VERSION".tar.xz && rm "$LIBDRM_VERSION".tar.xz |  | ||||||
| cd "$LIBDRM_VERSION" |  | ||||||
| meson build -D vc4=false -D freedreno=false -D etnaviv=false $EXTRA_MESON_ARGS |  | ||||||
| ninja -C build install |  | ||||||
| cd .. |  | ||||||
| rm -rf "$LIBDRM_VERSION" |  | ||||||
| @@ -1,19 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| wget https://github.com/KhronosGroup/SPIRV-LLVM-Translator/archive/refs/tags/v13.0.0.tar.gz |  | ||||||
| tar -xvf v13.0.0.tar.gz && rm v13.0.0.tar.gz |  | ||||||
|  |  | ||||||
| mkdir SPIRV-LLVM-Translator-13.0.0/build |  | ||||||
| pushd SPIRV-LLVM-Translator-13.0.0/build |  | ||||||
| cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr |  | ||||||
| ninja |  | ||||||
| ninja install |  | ||||||
| # For some reason llvm-spirv is not installed by default |  | ||||||
| ninja llvm-spirv |  | ||||||
| cp tools/llvm-spirv/llvm-spirv /usr/bin/ |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| du -sh SPIRV-LLVM-Translator-13.0.0 |  | ||||||
| rm -rf SPIRV-LLVM-Translator-13.0.0 |  | ||||||
| @@ -1,12 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| MOLD_VERSION="1.6.0" |  | ||||||
|  |  | ||||||
| git clone -b v"$MOLD_VERSION" --single-branch --depth 1 https://github.com/rui314/mold.git |  | ||||||
| cd mold |  | ||||||
| make |  | ||||||
| make install |  | ||||||
| cd .. |  | ||||||
| rm -rf mold |  | ||||||
| @@ -1,31 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| git clone https://gitlab.freedesktop.org/mesa/piglit.git --single-branch --no-checkout /piglit |  | ||||||
| pushd /piglit |  | ||||||
| git checkout 591c91865012de4224bea551eac5d2274acf06ad |  | ||||||
|  |  | ||||||
| # TODO: Remove the following patch when piglit commit got past |  | ||||||
| # 1cd716180cfb6ef0c1fc54702460ef49e5115791 |  | ||||||
| git apply $OLDPWD/.gitlab-ci/piglit/build-piglit_backport-s3-migration.diff |  | ||||||
|  |  | ||||||
| patch -p1 <$OLDPWD/.gitlab-ci/piglit/disable-vs_in.diff |  | ||||||
| cmake -S . -B . -G Ninja -DCMAKE_BUILD_TYPE=Release $PIGLIT_OPTS $EXTRA_CMAKE_ARGS |  | ||||||
| ninja $PIGLIT_BUILD_TARGETS |  | ||||||
| # shellcheck disable=SC2038,SC2185 # TODO: rewrite find |  | ||||||
| find -name .git -o -name '*ninja*' -o -iname '*cmake*' -o -name '*.[chao]' | xargs rm -rf |  | ||||||
| rm -rf target_api |  | ||||||
| if [ "$PIGLIT_BUILD_TARGETS" = "piglit_replayer" ]; then |  | ||||||
|     # shellcheck disable=SC2038,SC2185 # TODO: rewrite find |  | ||||||
|     find ! -regex "^\.$" \ |  | ||||||
|          ! -regex "^\.\/piglit.*" \ |  | ||||||
|          ! -regex "^\.\/framework.*" \ |  | ||||||
|          ! -regex "^\.\/bin$" \ |  | ||||||
|          ! -regex "^\.\/bin\/replayer\.py" \ |  | ||||||
|          ! -regex "^\.\/templates.*" \ |  | ||||||
|          ! -regex "^\.\/tests$" \ |  | ||||||
|          ! -regex "^\.\/tests\/replay\.py" 2>/dev/null | xargs rm -rf |  | ||||||
| fi |  | ||||||
| popd |  | ||||||
| @@ -1,38 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| # Note that this script is not actually "building" rust, but build- is the |  | ||||||
| # convention for the shared helpers for putting stuff in our containers. |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| # cargo (and rustup) wants to store stuff in $HOME/.cargo, and binaries in |  | ||||||
| # $HOME/.cargo/bin.  Make bin a link to a public bin directory so the commands |  | ||||||
| # are just available to all build jobs. |  | ||||||
| mkdir -p "$HOME"/.cargo |  | ||||||
| ln -s /usr/local/bin "$HOME"/.cargo/bin |  | ||||||
|  |  | ||||||
| # Rusticl requires at least Rust 1.59.0 |  | ||||||
| # |  | ||||||
| # Also, oick a specific snapshot from rustup so the compiler doesn't drift on |  | ||||||
| # us. |  | ||||||
| RUST_VERSION=1.59.0-2022-02-24 |  | ||||||
|  |  | ||||||
| # For rust in Mesa, we use rustup to install.  This lets us pick an arbitrary |  | ||||||
| # version of the compiler, rather than whatever the container's Debian comes |  | ||||||
| # with. |  | ||||||
| wget https://sh.rustup.rs -O - | sh -s -- \ |  | ||||||
|    --default-toolchain $RUST_VERSION \ |  | ||||||
|    --profile minimal \ |  | ||||||
|    -y |  | ||||||
|  |  | ||||||
| rustup component add rustfmt |  | ||||||
|  |  | ||||||
| # Set up a config script for cross compiling -- cargo needs your system cc for |  | ||||||
| # linking in cross builds, but doesn't know what you want to use for system cc. |  | ||||||
| cat > /root/.cargo/config <<EOF |  | ||||||
| [target.armv7-unknown-linux-gnueabihf] |  | ||||||
| linker = "arm-linux-gnueabihf-gcc" |  | ||||||
|  |  | ||||||
| [target.aarch64-unknown-linux-gnu] |  | ||||||
| linker = "aarch64-linux-gnu-gcc" |  | ||||||
| EOF |  | ||||||
| @@ -1,97 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # |  | ||||||
| # Copyright (C) 2022 Collabora Limited |  | ||||||
| # Author: Guilherme Gallo <guilherme.gallo@collabora.com> |  | ||||||
| # |  | ||||||
| # Permission is hereby granted, free of charge, to any person obtaining a |  | ||||||
| # copy of this software and associated documentation files (the "Software"), |  | ||||||
| # to deal in the Software without restriction, including without limitation |  | ||||||
| # the rights to use, copy, modify, merge, publish, distribute, sublicense, |  | ||||||
| # and/or sell copies of the Software, and to permit persons to whom the |  | ||||||
| # Software is furnished to do so, subject to the following conditions: |  | ||||||
| # |  | ||||||
| # The above copyright notice and this permission notice (including the next |  | ||||||
| # paragraph) shall be included in all copies or substantial portions of the |  | ||||||
| # Software. |  | ||||||
| # |  | ||||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |  | ||||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |  | ||||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL |  | ||||||
| # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |  | ||||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |  | ||||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |  | ||||||
| # SOFTWARE. |  | ||||||
|  |  | ||||||
|  |  | ||||||
| create_gn_args() { |  | ||||||
|     # gn can be configured to cross-compile skia and its tools |  | ||||||
|     # It is important to set the target_cpu to guarantee the intended target |  | ||||||
|     # machine |  | ||||||
|     cp "${BASE_ARGS_GN_FILE}" "${SKQP_OUT_DIR}"/args.gn |  | ||||||
|     echo "target_cpu = \"${SKQP_ARCH}\"" >> "${SKQP_OUT_DIR}"/args.gn |  | ||||||
| } |  | ||||||
|  |  | ||||||
|  |  | ||||||
| download_skia_source() { |  | ||||||
|     if [ -z ${SKIA_DIR+x} ] |  | ||||||
|     then |  | ||||||
|         return 1 |  | ||||||
|     fi |  | ||||||
|  |  | ||||||
|     # Skia cloned from https://android.googlesource.com/platform/external/skqp |  | ||||||
|     # has all needed assets tracked on git-fs |  | ||||||
|     SKQP_REPO=https://android.googlesource.com/platform/external/skqp |  | ||||||
|     SKQP_BRANCH=android-cts-11.0_r7 |  | ||||||
|  |  | ||||||
|     git clone --branch "${SKQP_BRANCH}" --depth 1 "${SKQP_REPO}" "${SKIA_DIR}" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| SCRIPT_DIR=$(realpath "$(dirname "$0")") |  | ||||||
| SKQP_PATCH_DIR="${SCRIPT_DIR}" |  | ||||||
| BASE_ARGS_GN_FILE="${SCRIPT_DIR}/build-skqp_base.gn" |  | ||||||
|  |  | ||||||
| SKQP_ARCH=${SKQP_ARCH:-x64} |  | ||||||
| SKIA_DIR=${SKIA_DIR:-$(mktemp -d)} |  | ||||||
| SKQP_OUT_DIR=${SKIA_DIR}/out/${SKQP_ARCH} |  | ||||||
| SKQP_INSTALL_DIR=${SKQP_INSTALL_DIR:-/skqp} |  | ||||||
| SKQP_ASSETS_DIR="${SKQP_INSTALL_DIR}/assets" |  | ||||||
| SKQP_BINARIES=(skqp list_gpu_unit_tests list_gms) |  | ||||||
|  |  | ||||||
| download_skia_source |  | ||||||
|  |  | ||||||
| pushd "${SKIA_DIR}" |  | ||||||
|  |  | ||||||
| # Apply all skqp patches for Mesa CI |  | ||||||
| cat "${SKQP_PATCH_DIR}"/build-skqp_*.patch | |  | ||||||
|     patch -p1 |  | ||||||
|  |  | ||||||
| # Fetch some needed build tools needed to build skia/skqp. |  | ||||||
| # Basically, it clones repositories with commits SHAs from ${SKIA_DIR}/DEPS |  | ||||||
| # directory. |  | ||||||
| python tools/git-sync-deps |  | ||||||
|  |  | ||||||
| mkdir -p "${SKQP_OUT_DIR}" |  | ||||||
| mkdir -p "${SKQP_INSTALL_DIR}" |  | ||||||
|  |  | ||||||
| create_gn_args |  | ||||||
|  |  | ||||||
| # Build and install skqp binaries |  | ||||||
| bin/gn gen "${SKQP_OUT_DIR}" |  | ||||||
|  |  | ||||||
| for BINARY in "${SKQP_BINARIES[@]}" |  | ||||||
| do |  | ||||||
|     /usr/bin/ninja -C "${SKQP_OUT_DIR}" "${BINARY}" |  | ||||||
|     # Strip binary, since gn is not stripping it even when `is_debug == false` |  | ||||||
|     ${STRIP_CMD:-strip} "${SKQP_OUT_DIR}/${BINARY}" |  | ||||||
|     install -m 0755 "${SKQP_OUT_DIR}/${BINARY}" "${SKQP_INSTALL_DIR}" |  | ||||||
| done |  | ||||||
|  |  | ||||||
| # Move assets to the target directory, which will reside in rootfs. |  | ||||||
| mv platform_tools/android/apps/skqp/src/main/assets/ "${SKQP_ASSETS_DIR}" |  | ||||||
|  |  | ||||||
| popd |  | ||||||
| rm -Rf "${SKIA_DIR}" |  | ||||||
|  |  | ||||||
| set +ex |  | ||||||
| @@ -1,13 +0,0 @@ | |||||||
| diff --git a/BUILD.gn b/BUILD.gn |  | ||||||
| index d2b1407..7b60c90 100644 |  | ||||||
| --- a/BUILD.gn |  | ||||||
| +++ b/BUILD.gn |  | ||||||
| @@ -144,7 +144,7 @@ config("skia_public") { |  | ||||||
|   |  | ||||||
|  # Skia internal APIs, used by Skia itself and a few test tools. |  | ||||||
|  config("skia_private") { |  | ||||||
| -  visibility = [ ":*" ] |  | ||||||
| +  visibility = [ "*" ] |  | ||||||
|   |  | ||||||
|    include_dirs = [ |  | ||||||
|      "include/private", |  | ||||||
| @@ -1,47 +0,0 @@ | |||||||
| cc = "clang" |  | ||||||
| cxx = "clang++" |  | ||||||
|  |  | ||||||
| extra_cflags = [ "-DSK_ENABLE_DUMP_GPU", "-DSK_BUILD_FOR_SKQP" ] |  | ||||||
| extra_cflags_cc = [ |  | ||||||
|         "-Wno-error", |  | ||||||
|  |  | ||||||
|         # skqp build process produces a lot of compilation warnings, silencing |  | ||||||
|         # most of them to remove clutter and avoid the CI job log to exceed the |  | ||||||
|         # maximum size |  | ||||||
|  |  | ||||||
|         # GCC flags |  | ||||||
|         "-Wno-redundant-move", |  | ||||||
|         "-Wno-suggest-override", |  | ||||||
|         "-Wno-class-memaccess", |  | ||||||
|         "-Wno-deprecated-copy", |  | ||||||
|         "-Wno-uninitialized", |  | ||||||
|  |  | ||||||
|         # Clang flags |  | ||||||
|         "-Wno-macro-redefined", |  | ||||||
|         "-Wno-anon-enum-enum-conversion", |  | ||||||
|         "-Wno-suggest-destructor-override", |  | ||||||
|         "-Wno-return-std-move-in-c++11", |  | ||||||
|         "-Wno-extra-semi-stmt", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
| cc_wrapper = "ccache" |  | ||||||
|  |  | ||||||
| is_debug = false |  | ||||||
|  |  | ||||||
| skia_enable_fontmgr_android = false |  | ||||||
| skia_enable_fontmgr_empty = true |  | ||||||
| skia_enable_pdf = false |  | ||||||
| skia_enable_skottie = false |  | ||||||
|  |  | ||||||
| skia_skqp_global_error_tolerance = 8 |  | ||||||
| skia_tools_require_resources = true |  | ||||||
|  |  | ||||||
| skia_use_dng_sdk = false |  | ||||||
| skia_use_expat = true |  | ||||||
| skia_use_icu = false |  | ||||||
| skia_use_libheif = false |  | ||||||
| skia_use_lua = false |  | ||||||
| skia_use_piex = false |  | ||||||
| skia_use_vulkan = true |  | ||||||
|  |  | ||||||
| target_os = "linux" |  | ||||||
| @@ -1,68 +0,0 @@ | |||||||
| diff --git a/bin/fetch-gn b/bin/fetch-gn |  | ||||||
| index d5e94a2..59c4591 100755 |  | ||||||
| --- a/bin/fetch-gn |  | ||||||
| +++ b/bin/fetch-gn |  | ||||||
| @@ -5,39 +5,44 @@ |  | ||||||
|  # Use of this source code is governed by a BSD-style license that can be |  | ||||||
|  # found in the LICENSE file. |  | ||||||
|   |  | ||||||
| -import hashlib |  | ||||||
|  import os |  | ||||||
| +import platform |  | ||||||
|  import shutil |  | ||||||
|  import stat |  | ||||||
|  import sys |  | ||||||
| -import urllib2 |  | ||||||
| +import tempfile |  | ||||||
| +import zipfile |  | ||||||
| + |  | ||||||
| +if sys.version_info[0] < 3: |  | ||||||
| +  from urllib2 import urlopen |  | ||||||
| +else: |  | ||||||
| +  from urllib.request import urlopen |  | ||||||
|   |  | ||||||
|  os.chdir(os.path.join(os.path.dirname(__file__), os.pardir)) |  | ||||||
|   |  | ||||||
| -dst = 'bin/gn.exe' if 'win32' in sys.platform else 'bin/gn' |  | ||||||
| +gnzip = os.path.join(tempfile.mkdtemp(), 'gn.zip') |  | ||||||
| +with open(gnzip, 'wb') as f: |  | ||||||
| +  OS  = {'darwin': 'mac', 'linux': 'linux', 'linux2': 'linux', 'win32': 'windows'}[sys.platform] |  | ||||||
| +  cpu = {'amd64': 'amd64', 'arm64': 'arm64', 'x86_64': 'amd64', 'aarch64': 'arm64'}[platform.machine().lower()] |  | ||||||
|   |  | ||||||
| -sha1 = '2f27ff0b6118e5886df976da5effa6003d19d1ce' if 'linux'  in sys.platform else \ |  | ||||||
| -       '9be792dd9010ce303a9c3a497a67bcc5ac8c7666' if 'darwin' in sys.platform else \ |  | ||||||
| -       'eb69be2d984b4df60a8c21f598135991f0ad1742'  # Windows |  | ||||||
| +  rev = 'd62642c920e6a0d1756316d225a90fd6faa9e21e' |  | ||||||
| +  url = 'https://chrome-infra-packages.appspot.com/dl/gn/gn/{}-{}/+/git_revision:{}'.format( |  | ||||||
| +          OS,cpu,rev) |  | ||||||
| +  f.write(urlopen(url).read()) |  | ||||||
|   |  | ||||||
| -def sha1_of_file(path): |  | ||||||
| -  h = hashlib.sha1() |  | ||||||
| -  if os.path.isfile(path): |  | ||||||
| -    with open(path, 'rb') as f: |  | ||||||
| -      h.update(f.read()) |  | ||||||
| -  return h.hexdigest() |  | ||||||
| +gn = 'gn.exe' if 'win32' in sys.platform else 'gn' |  | ||||||
| +with zipfile.ZipFile(gnzip, 'r') as f: |  | ||||||
| +  f.extract(gn, 'bin') |  | ||||||
|   |  | ||||||
| -if sha1_of_file(dst) != sha1: |  | ||||||
| -  with open(dst, 'wb') as f: |  | ||||||
| -    f.write(urllib2.urlopen('https://chromium-gn.storage-download.googleapis.com/' + sha1).read()) |  | ||||||
| +gn = os.path.join('bin', gn) |  | ||||||
|   |  | ||||||
| -  os.chmod(dst, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | |  | ||||||
| -                stat.S_IRGRP                | stat.S_IXGRP | |  | ||||||
| -                stat.S_IROTH                | stat.S_IXOTH ) |  | ||||||
| +os.chmod(gn, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | |  | ||||||
| +             stat.S_IRGRP                | stat.S_IXGRP | |  | ||||||
| +             stat.S_IROTH                | stat.S_IXOTH ) |  | ||||||
|   |  | ||||||
|  # We'll also copy to a path that depot_tools' GN wrapper will expect to find the binary. |  | ||||||
|  copy_path = 'buildtools/linux64/gn' if 'linux'  in sys.platform else \ |  | ||||||
|              'buildtools/mac/gn'     if 'darwin' in sys.platform else \ |  | ||||||
|              'buildtools/win/gn.exe' |  | ||||||
|  if os.path.isdir(os.path.dirname(copy_path)): |  | ||||||
| -  shutil.copy(dst, copy_path) |  | ||||||
| +  shutil.copy(gn, copy_path) |  | ||||||
| @@ -1,142 +0,0 @@ | |||||||
| Patch based from diff with skia repository from commit |  | ||||||
| 013397884c73959dc07cb0a26ee742b1cdfbda8a |  | ||||||
|  |  | ||||||
| Adds support for Python3, but removes the constraint of only SHA based refs in |  | ||||||
| DEPS |  | ||||||
| diff --git a/tools/git-sync-deps b/tools/git-sync-deps |  | ||||||
| index c7379c0b5c..f63d4d9ccf 100755 |  | ||||||
| --- a/tools/git-sync-deps |  | ||||||
| +++ b/tools/git-sync-deps |  | ||||||
| @@ -43,7 +43,7 @@ def git_executable(): |  | ||||||
|        A string suitable for passing to subprocess functions, or None. |  | ||||||
|    """ |  | ||||||
|    envgit = os.environ.get('GIT_EXECUTABLE') |  | ||||||
| -  searchlist = ['git'] |  | ||||||
| +  searchlist = ['git', 'git.bat'] |  | ||||||
|    if envgit: |  | ||||||
|      searchlist.insert(0, envgit) |  | ||||||
|    with open(os.devnull, 'w') as devnull: |  | ||||||
| @@ -94,21 +94,25 @@ def is_git_toplevel(git, directory): |  | ||||||
|    try: |  | ||||||
|      toplevel = subprocess.check_output( |  | ||||||
|        [git, 'rev-parse', '--show-toplevel'], cwd=directory).strip() |  | ||||||
| -    return os.path.realpath(directory) == os.path.realpath(toplevel) |  | ||||||
| +    return os.path.realpath(directory) == os.path.realpath(toplevel.decode()) |  | ||||||
|    except subprocess.CalledProcessError: |  | ||||||
|      return False |  | ||||||
|   |  | ||||||
|   |  | ||||||
| -def status(directory, checkoutable): |  | ||||||
| -  def truncate(s, length): |  | ||||||
| +def status(directory, commithash, change): |  | ||||||
| +  def truncate_beginning(s, length): |  | ||||||
| +    return s if len(s) <= length else '...' + s[-(length-3):] |  | ||||||
| +  def truncate_end(s, length): |  | ||||||
|      return s if len(s) <= length else s[:(length - 3)] + '...' |  | ||||||
| + |  | ||||||
|    dlen = 36 |  | ||||||
| -  directory = truncate(directory, dlen) |  | ||||||
| -  checkoutable = truncate(checkoutable, 40) |  | ||||||
| -  sys.stdout.write('%-*s @ %s\n' % (dlen, directory, checkoutable)) |  | ||||||
| +  directory = truncate_beginning(directory, dlen) |  | ||||||
| +  commithash = truncate_end(commithash, 40) |  | ||||||
| +  symbol = '>' if change else '@' |  | ||||||
| +  sys.stdout.write('%-*s %s %s\n' % (dlen, directory, symbol, commithash)) |  | ||||||
|   |  | ||||||
|   |  | ||||||
| -def git_checkout_to_directory(git, repo, checkoutable, directory, verbose): |  | ||||||
| +def git_checkout_to_directory(git, repo, commithash, directory, verbose): |  | ||||||
|    """Checkout (and clone if needed) a Git repository. |  | ||||||
|   |  | ||||||
|    Args: |  | ||||||
| @@ -117,8 +121,7 @@ def git_checkout_to_directory(git, repo, checkoutable, directory, verbose): |  | ||||||
|      repo (string) the location of the repository, suitable |  | ||||||
|           for passing to `git clone`. |  | ||||||
|   |  | ||||||
| -    checkoutable (string) a tag, branch, or commit, suitable for |  | ||||||
| -                 passing to `git checkout` |  | ||||||
| +    commithash (string) a commit, suitable for passing to `git checkout` |  | ||||||
|   |  | ||||||
|      directory (string) the path into which the repository |  | ||||||
|                should be checked out. |  | ||||||
| @@ -129,7 +132,12 @@ def git_checkout_to_directory(git, repo, checkoutable, directory, verbose): |  | ||||||
|    """ |  | ||||||
|    if not os.path.isdir(directory): |  | ||||||
|      subprocess.check_call( |  | ||||||
| -      [git, 'clone', '--quiet', repo, directory]) |  | ||||||
| +      [git, 'clone', '--quiet', '--no-checkout', repo, directory]) |  | ||||||
| +    subprocess.check_call([git, 'checkout', '--quiet', commithash], |  | ||||||
| +                          cwd=directory) |  | ||||||
| +    if verbose: |  | ||||||
| +      status(directory, commithash, True) |  | ||||||
| +    return |  | ||||||
|   |  | ||||||
|    if not is_git_toplevel(git, directory): |  | ||||||
|      # if the directory exists, but isn't a git repo, you will modify |  | ||||||
| @@ -145,11 +153,11 @@ def git_checkout_to_directory(git, repo, checkoutable, directory, verbose): |  | ||||||
|    with open(os.devnull, 'w') as devnull: |  | ||||||
|      # If this fails, we will fetch before trying again.  Don't spam user |  | ||||||
|      # with error infomation. |  | ||||||
| -    if 0 == subprocess.call([git, 'checkout', '--quiet', checkoutable], |  | ||||||
| +    if 0 == subprocess.call([git, 'checkout', '--quiet', commithash], |  | ||||||
|                              cwd=directory, stderr=devnull): |  | ||||||
|        # if this succeeds, skip slow `git fetch`. |  | ||||||
|        if verbose: |  | ||||||
| -        status(directory, checkoutable)  # Success. |  | ||||||
| +        status(directory, commithash, False)  # Success. |  | ||||||
|        return |  | ||||||
|   |  | ||||||
|    # If the repo has changed, always force use of the correct repo. |  | ||||||
| @@ -159,18 +167,24 @@ def git_checkout_to_directory(git, repo, checkoutable, directory, verbose): |  | ||||||
|   |  | ||||||
|    subprocess.check_call([git, 'fetch', '--quiet'], cwd=directory) |  | ||||||
|   |  | ||||||
| -  subprocess.check_call([git, 'checkout', '--quiet', checkoutable], cwd=directory) |  | ||||||
| +  subprocess.check_call([git, 'checkout', '--quiet', commithash], cwd=directory) |  | ||||||
|   |  | ||||||
|    if verbose: |  | ||||||
| -    status(directory, checkoutable)  # Success. |  | ||||||
| +    status(directory, commithash, True)  # Success. |  | ||||||
|   |  | ||||||
|   |  | ||||||
|  def parse_file_to_dict(path): |  | ||||||
|    dictionary = {} |  | ||||||
| -  execfile(path, dictionary) |  | ||||||
| +  with open(path) as f: |  | ||||||
| +    exec('def Var(x): return vars[x]\n' + f.read(), dictionary) |  | ||||||
|    return dictionary |  | ||||||
|   |  | ||||||
|   |  | ||||||
| +def is_sha1_sum(s): |  | ||||||
| +  """SHA1 sums are 160 bits, encoded as lowercase hexadecimal.""" |  | ||||||
| +  return len(s) == 40 and all(c in '0123456789abcdef' for c in s) |  | ||||||
| + |  | ||||||
| + |  | ||||||
|  def git_sync_deps(deps_file_path, command_line_os_requests, verbose): |  | ||||||
|    """Grab dependencies, with optional platform support. |  | ||||||
|   |  | ||||||
| @@ -204,19 +218,19 @@ def git_sync_deps(deps_file_path, command_line_os_requests, verbose): |  | ||||||
|          raise Exception('%r is parent of %r' % (other_dir, directory)) |  | ||||||
|    list_of_arg_lists = [] |  | ||||||
|    for directory in sorted(dependencies): |  | ||||||
| -    if not isinstance(dependencies[directory], basestring): |  | ||||||
| +    if not isinstance(dependencies[directory], str): |  | ||||||
|        if verbose: |  | ||||||
| -        print 'Skipping "%s".' % directory |  | ||||||
| +        sys.stdout.write( 'Skipping "%s".\n' % directory) |  | ||||||
|        continue |  | ||||||
|      if '@' in dependencies[directory]: |  | ||||||
| -      repo, checkoutable = dependencies[directory].split('@', 1) |  | ||||||
| +      repo, commithash = dependencies[directory].split('@', 1) |  | ||||||
|      else: |  | ||||||
| -      raise Exception("please specify commit or tag") |  | ||||||
| +      raise Exception("please specify commit") |  | ||||||
|   |  | ||||||
|      relative_directory = os.path.join(deps_file_directory, directory) |  | ||||||
|   |  | ||||||
|      list_of_arg_lists.append( |  | ||||||
| -      (git, repo, checkoutable, relative_directory, verbose)) |  | ||||||
| +      (git, repo, commithash, relative_directory, verbose)) |  | ||||||
|   |  | ||||||
|    multithread(git_checkout_to_directory, list_of_arg_lists) |  | ||||||
|   |  | ||||||
| @@ -1,41 +0,0 @@ | |||||||
| diff --git a/tools/skqp/src/skqp.cpp b/tools/skqp/src/skqp.cpp |  | ||||||
| index 50ed9db01d..938217000d 100644 |  | ||||||
| --- a/tools/skqp/src/skqp.cpp |  | ||||||
| +++ b/tools/skqp/src/skqp.cpp |  | ||||||
| @@ -448,7 +448,7 @@ inline void write(SkWStream* wStream, const T& text) { |  | ||||||
|   |  | ||||||
|  void SkQP::makeReport() { |  | ||||||
|      SkASSERT_RELEASE(fAssetManager); |  | ||||||
| -    int glesErrorCount = 0, vkErrorCount = 0, gles = 0, vk = 0; |  | ||||||
| +    int glErrorCount = 0, glesErrorCount = 0, vkErrorCount = 0, gl = 0, gles = 0, vk = 0; |  | ||||||
|   |  | ||||||
|      if (!sk_isdir(fReportDirectory.c_str())) { |  | ||||||
|          SkDebugf("Report destination does not exist: '%s'\n", fReportDirectory.c_str()); |  | ||||||
| @@ -460,6 +460,7 @@ void SkQP::makeReport() { |  | ||||||
|      htmOut.writeText(kDocHead); |  | ||||||
|      for (const SkQP::RenderResult& run : fRenderResults) { |  | ||||||
|          switch (run.fBackend) { |  | ||||||
| +            case SkQP::SkiaBackend::kGL: ++gl; break; |  | ||||||
|              case SkQP::SkiaBackend::kGLES: ++gles; break; |  | ||||||
|              case SkQP::SkiaBackend::kVulkan: ++vk; break; |  | ||||||
|              default: break; |  | ||||||
| @@ -477,15 +478,17 @@ void SkQP::makeReport() { |  | ||||||
|          } |  | ||||||
|          write(&htmOut, SkStringPrintf("  f(%s);\n", str.c_str())); |  | ||||||
|          switch (run.fBackend) { |  | ||||||
| +            case SkQP::SkiaBackend::kGL: ++glErrorCount; break; |  | ||||||
|              case SkQP::SkiaBackend::kGLES: ++glesErrorCount; break; |  | ||||||
|              case SkQP::SkiaBackend::kVulkan: ++vkErrorCount; break; |  | ||||||
|              default: break; |  | ||||||
|          } |  | ||||||
|      } |  | ||||||
|      htmOut.writeText(kDocMiddle); |  | ||||||
| -    write(&htmOut, SkStringPrintf("<p>gles errors: %d (of %d)</br>\n" |  | ||||||
| +    write(&htmOut, SkStringPrintf("<p>gl errors: %d (of %d)</br>\n" |  | ||||||
| +                                  "gles errors: %d (of %d)</br>\n" |  | ||||||
|                                    "vk errors: %d (of %d)</p>\n", |  | ||||||
| -                                  glesErrorCount, gles, vkErrorCount, vk)); |  | ||||||
| +                                  glErrorCount, gl, glesErrorCount, gles, vkErrorCount, vk)); |  | ||||||
|      htmOut.writeText(kDocTail); |  | ||||||
|      SkFILEWStream unitOut(SkOSPath::Join(fReportDirectory.c_str(), kUnitTestReportPath).c_str()); |  | ||||||
|      SkASSERT_RELEASE(unitOut.isValid()); |  | ||||||
| @@ -1,13 +0,0 @@ | |||||||
| diff --git a/gn/BUILDCONFIG.gn b/gn/BUILDCONFIG.gn |  | ||||||
| index 454334a..1797594 100644 |  | ||||||
| --- a/gn/BUILDCONFIG.gn |  | ||||||
| +++ b/gn/BUILDCONFIG.gn |  | ||||||
| @@ -80,7 +80,7 @@ if (current_cpu == "") { |  | ||||||
|  is_clang = is_android || is_ios || is_mac || |  | ||||||
|             (cc == "clang" && cxx == "clang++") || clang_win != "" |  | ||||||
|  if (!is_clang && !is_win) { |  | ||||||
| -  is_clang = exec_script("gn/is_clang.py", |  | ||||||
| +  is_clang = exec_script("//gn/is_clang.py", |  | ||||||
|                           [ |  | ||||||
|                             cc, |  | ||||||
|                             cxx, |  | ||||||
| @@ -1,18 +0,0 @@ | |||||||
| Nima-Cpp is not available anymore inside googlesource, revert to github one |  | ||||||
| Simulates `git revert 49233d2521054037ded7d760427c4a0dc1e11356` |  | ||||||
|  |  | ||||||
| diff --git a/DEPS b/DEPS |  | ||||||
| index 7e0b941..c88b064 100644 |  | ||||||
| --- a/DEPS |  | ||||||
| +++ b/DEPS |  | ||||||
| @@ -33,8 +33,8 @@ deps = { |  | ||||||
|    #"third_party/externals/v8"              : "https://chromium.googlesource.com/v8/v8.git@5f1ae66d5634e43563b2d25ea652dfb94c31a3b4", |  | ||||||
|    "third_party/externals/wuffs"           : "https://skia.googlesource.com/external/github.com/google/wuffs.git@fda3c4c9863d9f9fcec58ae66508c4621fc71ea5", |  | ||||||
|    "third_party/externals/zlib"            : "https://chromium.googlesource.com/chromium/src/third_party/zlib@47af7c547f8551bd25424e56354a2ae1e9062859", |  | ||||||
| -  "third_party/externals/Nima-Cpp"        : "https://skia.googlesource.com/external/github.com/2d-inc/Nima-Cpp.git@4bd02269d7d1d2e650950411325eafa15defb084", |  | ||||||
| -  "third_party/externals/Nima-Math-Cpp"   : "https://skia.googlesource.com/external/github.com/2d-inc/Nima-Math-Cpp.git@e0c12772093fa8860f55358274515b86885f0108", |  | ||||||
| +  "third_party/externals/Nima-Cpp"        : "https://github.com/2d-inc/Nima-Cpp.git@4bd02269d7d1d2e650950411325eafa15defb084", |  | ||||||
| +  "third_party/externals/Nima-Math-Cpp"   : "https://github.com/2d-inc/Nima-Math-Cpp.git@e0c12772093fa8860f55358274515b86885f0108", |  | ||||||
|   |  | ||||||
|    "../src": { |  | ||||||
|      "url": "https://chromium.googlesource.com/chromium/src.git@ccf3465732e5d5363f0e44a8fac54550f62dd1d0", |  | ||||||
| @@ -1,18 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| git config --global user.email "mesa@example.com" |  | ||||||
| git config --global user.name "Mesa CI" |  | ||||||
| git clone \ |  | ||||||
|     https://github.com/intel/libva-utils.git \ |  | ||||||
|     -b 2.13.0 \ |  | ||||||
|     --depth 1 \ |  | ||||||
|     /va-utils |  | ||||||
|  |  | ||||||
| pushd /va-utils |  | ||||||
| meson build -D tests=true  -Dprefix=/va $EXTRA_MESON_ARGS |  | ||||||
| ninja -C build install |  | ||||||
| popd |  | ||||||
| rm -rf /va-utils |  | ||||||
| @@ -1,39 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| VKD3D_PROTON_COMMIT="5b73139f182d86cd58a757e4b5f0d4cfad96d319" |  | ||||||
|  |  | ||||||
| VKD3D_PROTON_DST_DIR="/vkd3d-proton-tests" |  | ||||||
| VKD3D_PROTON_SRC_DIR="/vkd3d-proton-src" |  | ||||||
| VKD3D_PROTON_BUILD_DIR="/vkd3d-proton-$VKD3D_PROTON_VERSION" |  | ||||||
|  |  | ||||||
| function build_arch { |  | ||||||
|   local arch="$1" |  | ||||||
|   shift |  | ||||||
|  |  | ||||||
|   meson "$@"                               \ |  | ||||||
|         -Denable_tests=true                \ |  | ||||||
|         --buildtype release                \ |  | ||||||
|         --prefix "$VKD3D_PROTON_DST_DIR"   \ |  | ||||||
|         --strip                            \ |  | ||||||
|         --bindir "x${arch}"                \ |  | ||||||
|         --libdir "x${arch}"                \ |  | ||||||
|         "$VKD3D_PROTON_BUILD_DIR/build.${arch}" |  | ||||||
|  |  | ||||||
|   ninja -C "$VKD3D_PROTON_BUILD_DIR/build.${arch}" install |  | ||||||
|  |  | ||||||
|   install -D -m755 -t "${VKD3D_PROTON_DST_DIR}/x${arch}/bin" "$VKD3D_PROTON_BUILD_DIR/build.${arch}/tests/d3d12" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| git clone https://github.com/HansKristian-Work/vkd3d-proton.git --single-branch -b master --no-checkout "$VKD3D_PROTON_SRC_DIR" |  | ||||||
| pushd "$VKD3D_PROTON_SRC_DIR" |  | ||||||
| git checkout "$VKD3D_PROTON_COMMIT" |  | ||||||
| git submodule update --init --recursive |  | ||||||
| git submodule update --recursive |  | ||||||
| build_arch 64 |  | ||||||
| build_arch 86 |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| rm -rf "$VKD3D_PROTON_BUILD_DIR" |  | ||||||
| rm -rf "$VKD3D_PROTON_SRC_DIR" |  | ||||||
| @@ -1,22 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| export LIBWAYLAND_VERSION="1.18.0" |  | ||||||
| export WAYLAND_PROTOCOLS_VERSION="1.24" |  | ||||||
|  |  | ||||||
| git clone https://gitlab.freedesktop.org/wayland/wayland |  | ||||||
| cd wayland |  | ||||||
| git checkout "$LIBWAYLAND_VERSION" |  | ||||||
| meson -Ddocumentation=false -Ddtd_validation=false -Dlibraries=true _build |  | ||||||
| ninja -C _build install |  | ||||||
| cd .. |  | ||||||
| rm -rf wayland |  | ||||||
|  |  | ||||||
| git clone https://gitlab.freedesktop.org/wayland/wayland-protocols |  | ||||||
| cd wayland-protocols |  | ||||||
| git checkout "$WAYLAND_PROTOCOLS_VERSION" |  | ||||||
| meson _build |  | ||||||
| ninja -C _build install |  | ||||||
| cd .. |  | ||||||
| rm -rf wayland-protocols |  | ||||||
| @@ -1,10 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| if test -f /etc/debian_version; then |  | ||||||
|     apt-get autoremove -y --purge |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Clean up any build cache for rust. |  | ||||||
| rm -rf /.cargo |  | ||||||
|  |  | ||||||
| ccache --show-stats |  | ||||||
| @@ -1,46 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| if test -f /etc/debian_version; then |  | ||||||
|     CCACHE_PATH=/usr/lib/ccache |  | ||||||
| else |  | ||||||
|     CCACHE_PATH=/usr/lib64/ccache |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Common setup among container builds before we get to building code. |  | ||||||
|  |  | ||||||
| export CCACHE_COMPILERCHECK=content |  | ||||||
| export CCACHE_COMPRESS=true |  | ||||||
| export CCACHE_DIR=/cache/$CI_PROJECT_NAME/ccache |  | ||||||
| export PATH=$CCACHE_PATH:$PATH |  | ||||||
|  |  | ||||||
| # CMake ignores $PATH, so we have to force CC/GCC to the ccache versions. |  | ||||||
| export CC="${CCACHE_PATH}/gcc" |  | ||||||
| export CXX="${CCACHE_PATH}/g++" |  | ||||||
|  |  | ||||||
| # When not using the mold linker (e.g. unsupported architecture), force |  | ||||||
| # linkers to gold, since it's so much faster for building.  We can't use |  | ||||||
| # lld because we're on old debian and it's buggy.  ming fails meson builds |  | ||||||
| # with it with "meson.build:21:0: ERROR: Unable to determine dynamic linker" |  | ||||||
| find /usr/bin -name \*-ld -o -name ld | \ |  | ||||||
|     grep -v mingw | \ |  | ||||||
|     xargs -n 1 -I '{}' ln -sf '{}.gold' '{}' |  | ||||||
|  |  | ||||||
| ccache --show-stats |  | ||||||
|  |  | ||||||
| # Make a wrapper script for ninja to always include the -j flags |  | ||||||
| { |  | ||||||
|     echo '#!/bin/sh -x' |  | ||||||
|     # shellcheck disable=SC2016 |  | ||||||
|     echo '/usr/bin/ninja -j${FDO_CI_CONCURRENT:-4} "$@"' |  | ||||||
| } > /usr/local/bin/ninja |  | ||||||
| chmod +x /usr/local/bin/ninja |  | ||||||
|  |  | ||||||
| # Set MAKEFLAGS so that all make invocations in container builds include the |  | ||||||
| # flags (doesn't apply to non-container builds, but we don't run make there) |  | ||||||
| export MAKEFLAGS="-j${FDO_CI_CONCURRENT:-4}" |  | ||||||
|  |  | ||||||
| # make wget to try more than once, when download fails or timeout |  | ||||||
| echo -e "retry_connrefused = on\n" \ |  | ||||||
|         "read_timeout = 300\n" \ |  | ||||||
|         "tries = 4\n" \ |  | ||||||
|         "wait_retry = 32" >> /etc/wgetrc |  | ||||||
| @@ -1,35 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| ndk=$1 |  | ||||||
| arch=$2 |  | ||||||
| cpu_family=$3 |  | ||||||
| cpu=$4 |  | ||||||
| cross_file="/cross_file-$arch.txt" |  | ||||||
|  |  | ||||||
| # armv7 has the toolchain split between two names. |  | ||||||
| arch2=${5:-$2} |  | ||||||
|  |  | ||||||
| # Note that we disable C++ exceptions, because Mesa doesn't use exceptions, |  | ||||||
| # and allowing it in code generation means we get unwind symbols that break |  | ||||||
| # the libEGL and driver symbol tests. |  | ||||||
|  |  | ||||||
| cat > "$cross_file" <<EOF |  | ||||||
| [binaries] |  | ||||||
| ar = '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/$arch-ar' |  | ||||||
| c = ['ccache', '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch2}29-clang', '-fno-exceptions', '-fno-unwind-tables', '-fno-asynchronous-unwind-tables'] |  | ||||||
| cpp = ['ccache', '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch2}29-clang++', '-fno-exceptions', '-fno-unwind-tables', '-fno-asynchronous-unwind-tables'] |  | ||||||
| c_ld = 'lld' |  | ||||||
| cpp_ld = 'lld' |  | ||||||
| strip = '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/$arch-strip' |  | ||||||
| pkgconfig = ['/usr/bin/pkg-config'] |  | ||||||
|  |  | ||||||
| [host_machine] |  | ||||||
| system = 'linux' |  | ||||||
| cpu_family = '$cpu_family' |  | ||||||
| cpu = '$cpu' |  | ||||||
| endian = 'little' |  | ||||||
|  |  | ||||||
| [properties] |  | ||||||
| needs_exe_wrapper = true |  | ||||||
|  |  | ||||||
| EOF |  | ||||||
| @@ -1,39 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| # Makes a .pc file in the Android NDK for meson to find its libraries. |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| ndk="$1" |  | ||||||
| pc="$2" |  | ||||||
| cflags="$3" |  | ||||||
| libs="$4" |  | ||||||
| version="$5" |  | ||||||
|  |  | ||||||
| sysroot=$ndk/toolchains/llvm/prebuilt/linux-x86_64/sysroot |  | ||||||
|  |  | ||||||
| for arch in \ |  | ||||||
|         x86_64-linux-android \ |  | ||||||
|         i686-linux-android \ |  | ||||||
|         aarch64-linux-android \ |  | ||||||
|         arm-linux-androideabi; do |  | ||||||
|     pcdir=$sysroot/usr/lib/$arch/pkgconfig |  | ||||||
|     mkdir -p $pcdir |  | ||||||
|  |  | ||||||
|     cat >$pcdir/$pc <<EOF |  | ||||||
| prefix=$sysroot |  | ||||||
| exec_prefix=$sysroot |  | ||||||
| libdir=$sysroot/usr/lib/$arch/29 |  | ||||||
| sharedlibdir=$sysroot/usr/lib/$arch |  | ||||||
| includedir=$sysroot/usr/include |  | ||||||
|  |  | ||||||
| Name: zlib |  | ||||||
| Description: zlib compression library |  | ||||||
| Version: $version |  | ||||||
|  |  | ||||||
| Requires: |  | ||||||
| Libs: -L$sysroot/usr/lib/$arch/29 $libs |  | ||||||
| Cflags: -I$sysroot/usr/include $cflags |  | ||||||
| EOF |  | ||||||
| done |  | ||||||
| @@ -1,53 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| arch=$1 |  | ||||||
| cross_file="/cross_file-$arch.txt" |  | ||||||
| /usr/share/meson/debcrossgen --arch "$arch" -o "$cross_file" |  | ||||||
| # Explicitly set ccache path for cross compilers |  | ||||||
| sed -i "s|/usr/bin/\([^-]*\)-linux-gnu\([^-]*\)-g|/usr/lib/ccache/\\1-linux-gnu\\2-g|g" "$cross_file" |  | ||||||
| if [ "$arch" = "i386" ]; then |  | ||||||
|     # Work around a bug in debcrossgen that should be fixed in the next release |  | ||||||
|     sed -i "s|cpu_family = 'i686'|cpu_family = 'x86'|g" "$cross_file" |  | ||||||
| fi |  | ||||||
| # Rely on qemu-user being configured in binfmt_misc on the host |  | ||||||
| # shellcheck disable=SC1003 # how this sed doesn't seems to work for me locally |  | ||||||
| sed -i -e '/\[properties\]/a\' -e "needs_exe_wrapper = False" "$cross_file" |  | ||||||
|  |  | ||||||
| # Add a line for rustc, which debcrossgen is missing. |  | ||||||
| cc=$(sed -n 's|c = .\(.*\).|\1|p' < "$cross_file") |  | ||||||
| if [[ "$arch" = "arm64" ]]; then |  | ||||||
|     rust_target=aarch64-unknown-linux-gnu |  | ||||||
| elif [[ "$arch" = "armhf" ]]; then |  | ||||||
|     rust_target=armv7-unknown-linux-gnueabihf |  | ||||||
| elif [[ "$arch" = "i386" ]]; then |  | ||||||
|     rust_target=i686-unknown-linux-gnu |  | ||||||
| elif [[ "$arch" = "ppc64el" ]]; then |  | ||||||
|     rust_target=powerpc64le-unknown-linux-gnu |  | ||||||
| elif [[ "$arch" = "s390x" ]]; then |  | ||||||
|     rust_target=s390x-unknown-linux-gnu |  | ||||||
| else |  | ||||||
|     echo "Needs rustc target mapping" |  | ||||||
| fi |  | ||||||
| # shellcheck disable=SC1003 # how this sed doesn't seems to work for me locally |  | ||||||
| sed -i -e '/\[binaries\]/a\' -e "rust = ['rustc', '--target=$rust_target', '-C', 'linker=$cc']" "$cross_file" |  | ||||||
|  |  | ||||||
| # Set up cmake cross compile toolchain file for dEQP builds |  | ||||||
| toolchain_file="/toolchain-$arch.cmake" |  | ||||||
| if [[ "$arch" = "arm64" ]]; then |  | ||||||
|     GCC_ARCH="aarch64-linux-gnu" |  | ||||||
|     DE_CPU="DE_CPU_ARM_64" |  | ||||||
| elif [[ "$arch" = "armhf" ]]; then |  | ||||||
|     GCC_ARCH="arm-linux-gnueabihf" |  | ||||||
|     DE_CPU="DE_CPU_ARM" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [[ -n "$GCC_ARCH" ]]; then |  | ||||||
|     { |  | ||||||
|         echo "set(CMAKE_SYSTEM_NAME Linux)"; |  | ||||||
|         echo "set(CMAKE_SYSTEM_PROCESSOR arm)"; |  | ||||||
|         echo "set(CMAKE_C_COMPILER /usr/lib/ccache/$GCC_ARCH-gcc)"; |  | ||||||
|         echo "set(CMAKE_CXX_COMPILER /usr/lib/ccache/$GCC_ARCH-g++)"; |  | ||||||
|         echo "set(ENV{PKG_CONFIG} \"/usr/bin/$GCC_ARCH-pkg-config\")"; |  | ||||||
|         echo "set(DE_CPU $DE_CPU)"; |  | ||||||
|     } > "$toolchain_file" |  | ||||||
| fi |  | ||||||
| @@ -1,323 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2140  # ugly array, remove later |  | ||||||
| # shellcheck disable=SC2288  # ugly array, remove later |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| if [ $DEBIAN_ARCH = arm64 ]; then |  | ||||||
|     ARCH_PACKAGES="firmware-qcom-media |  | ||||||
|                    firmware-linux-nonfree |  | ||||||
|                    libfontconfig1 |  | ||||||
|                    libgl1 |  | ||||||
|                    libglu1-mesa |  | ||||||
|                    libvulkan-dev |  | ||||||
|     " |  | ||||||
| elif [ $DEBIAN_ARCH = amd64 ]; then |  | ||||||
|     # Add llvm 13 to the build image |  | ||||||
|     apt-get -y install --no-install-recommends wget gnupg2 software-properties-common |  | ||||||
|     apt-key add /llvm-snapshot.gpg.key |  | ||||||
|     add-apt-repository "deb https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-13 main" |  | ||||||
|     # Debian bullseye has older wine 5.0, we want >= 7.0 for traces. |  | ||||||
|     apt-key add /winehq.gpg.key |  | ||||||
|     apt-add-repository https://dl.winehq.org/wine-builds/debian/ |  | ||||||
|  |  | ||||||
|  |  | ||||||
|     ARCH_PACKAGES="firmware-amd-graphics |  | ||||||
|                    inetutils-syslogd |  | ||||||
|                    iptables |  | ||||||
|                    libcap2 |  | ||||||
|                    libfontconfig1 |  | ||||||
|                    libelf1 |  | ||||||
|                    libfdt1 |  | ||||||
|                    libgl1 |  | ||||||
|                    libglu1-mesa |  | ||||||
|                    libllvm13 |  | ||||||
|                    libllvm11 |  | ||||||
|                    libva2 |  | ||||||
|                    libva-drm2 |  | ||||||
|                    libvulkan-dev |  | ||||||
|                    socat |  | ||||||
|                    spirv-tools |  | ||||||
|                    sysvinit-core |  | ||||||
|                   " |  | ||||||
|  |  | ||||||
| elif [ $DEBIAN_ARCH = armhf ]; then |  | ||||||
|     ARCH_PACKAGES="firmware-misc-nonfree |  | ||||||
|                   " |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| INSTALL_CI_FAIRY_PACKAGES="git |  | ||||||
|                            python3-dev |  | ||||||
|                            python3-pip |  | ||||||
|                            python3-setuptools |  | ||||||
|                            python3-wheel |  | ||||||
|                            " |  | ||||||
|  |  | ||||||
| apt-get update |  | ||||||
| apt-get -y install --no-install-recommends \ |  | ||||||
|     $ARCH_PACKAGES \ |  | ||||||
|     $INSTALL_CI_FAIRY_PACKAGES \ |  | ||||||
|     $EXTRA_LOCAL_PACKAGES \ |  | ||||||
|     bash \ |  | ||||||
|     ca-certificates \ |  | ||||||
|     firmware-realtek \ |  | ||||||
|     initramfs-tools \ |  | ||||||
|     jq \ |  | ||||||
|     libasan6 \ |  | ||||||
|     libexpat1 \ |  | ||||||
|     libpng16-16 \ |  | ||||||
|     libpython3.9 \ |  | ||||||
|     libsensors5 \ |  | ||||||
|     libvulkan1 \ |  | ||||||
|     libwaffle-1-0 \ |  | ||||||
|     libx11-6 \ |  | ||||||
|     libx11-xcb1 \ |  | ||||||
|     libxcb-dri2-0 \ |  | ||||||
|     libxcb-dri3-0 \ |  | ||||||
|     libxcb-glx0 \ |  | ||||||
|     libxcb-present0 \ |  | ||||||
|     libxcb-randr0 \ |  | ||||||
|     libxcb-shm0 \ |  | ||||||
|     libxcb-sync1 \ |  | ||||||
|     libxcb-xfixes0 \ |  | ||||||
|     libxdamage1 \ |  | ||||||
|     libxext6 \ |  | ||||||
|     libxfixes3 \ |  | ||||||
|     libxkbcommon0 \ |  | ||||||
|     libxrender1 \ |  | ||||||
|     libxshmfence1 \ |  | ||||||
|     libxxf86vm1 \ |  | ||||||
|     netcat-openbsd \ |  | ||||||
|     python3 \ |  | ||||||
|     python3-lxml \ |  | ||||||
|     python3-mako \ |  | ||||||
|     python3-numpy \ |  | ||||||
|     python3-packaging \ |  | ||||||
|     python3-pil \ |  | ||||||
|     python3-renderdoc \ |  | ||||||
|     python3-requests \ |  | ||||||
|     python3-simplejson \ |  | ||||||
|     python3-yaml \ |  | ||||||
|     sntp \ |  | ||||||
|     strace \ |  | ||||||
|     waffle-utils \ |  | ||||||
|     wget \ |  | ||||||
|     xinit \ |  | ||||||
|     xserver-xorg-core \ |  | ||||||
|     zstd |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if [ "$DEBIAN_ARCH" = "amd64" ]; then |  | ||||||
|   # workaround wine needing 32-bit |  | ||||||
|   # https://bugs.winehq.org/show_bug.cgi?id=53393 |  | ||||||
|   apt-get install -y --no-remove wine-stable-amd64  # a requirement for wine-stable |  | ||||||
|   WINE_PKG="wine-stable" |  | ||||||
|   WINE_PKG_DROP="wine-stable-i386" |  | ||||||
|   apt download "${WINE_PKG}" |  | ||||||
|   dpkg --ignore-depends="${WINE_PKG_DROP}" -i "${WINE_PKG}"*.deb |  | ||||||
|   rm "${WINE_PKG}"*.deb |  | ||||||
|   sed -i "/${WINE_PKG_DROP}/d" /var/lib/dpkg/status |  | ||||||
|   apt-get install -y --no-remove winehq-stable  # symlinks-only, depends on wine-stable |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Needed for ci-fairy, this revision is able to upload files to |  | ||||||
| # MinIO and doesn't depend on git |  | ||||||
| pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2 |  | ||||||
|  |  | ||||||
| # Needed for manipulation with traces yaml files. |  | ||||||
| pip3 install yq |  | ||||||
|  |  | ||||||
| apt-get purge -y \ |  | ||||||
|         $INSTALL_CI_FAIRY_PACKAGES |  | ||||||
|  |  | ||||||
| passwd root -d |  | ||||||
| chsh -s /bin/sh |  | ||||||
|  |  | ||||||
| cat > /init <<EOF |  | ||||||
| #!/bin/sh |  | ||||||
| export PS1=lava-shell: |  | ||||||
| exec sh |  | ||||||
| EOF |  | ||||||
| chmod +x  /init |  | ||||||
|  |  | ||||||
| ####################################################################### |  | ||||||
| # Strip the image to a small minimal system without removing the debian |  | ||||||
| # toolchain. |  | ||||||
|  |  | ||||||
| # Copy timezone file and remove tzdata package |  | ||||||
| rm -rf /etc/localtime |  | ||||||
| cp /usr/share/zoneinfo/Etc/UTC /etc/localtime |  | ||||||
|  |  | ||||||
| UNNEEDED_PACKAGES=" |  | ||||||
|         libfdisk1 |  | ||||||
|         " |  | ||||||
|  |  | ||||||
| export DEBIAN_FRONTEND=noninteractive |  | ||||||
|  |  | ||||||
| # Removing unused packages |  | ||||||
| for PACKAGE in ${UNNEEDED_PACKAGES} |  | ||||||
| do |  | ||||||
| 	echo ${PACKAGE} |  | ||||||
| 	if ! apt-get remove --purge --yes "${PACKAGE}" |  | ||||||
| 	then |  | ||||||
| 		echo "WARNING: ${PACKAGE} isn't installed" |  | ||||||
| 	fi |  | ||||||
| done |  | ||||||
|  |  | ||||||
| apt-get autoremove --yes || true |  | ||||||
|  |  | ||||||
| # Dropping logs |  | ||||||
| rm -rf /var/log/* |  | ||||||
|  |  | ||||||
| # Dropping documentation, localization, i18n files, etc |  | ||||||
| rm -rf /usr/share/doc/* |  | ||||||
| rm -rf /usr/share/locale/* |  | ||||||
| rm -rf /usr/share/X11/locale/* |  | ||||||
| rm -rf /usr/share/man |  | ||||||
| rm -rf /usr/share/i18n/* |  | ||||||
| rm -rf /usr/share/info/* |  | ||||||
| rm -rf /usr/share/lintian/* |  | ||||||
| rm -rf /usr/share/common-licenses/* |  | ||||||
| rm -rf /usr/share/mime/* |  | ||||||
|  |  | ||||||
| # Dropping reportbug scripts |  | ||||||
| rm -rf /usr/share/bug |  | ||||||
|  |  | ||||||
| # Drop udev hwdb not required on a stripped system |  | ||||||
| rm -rf /lib/udev/hwdb.bin /lib/udev/hwdb.d/* |  | ||||||
|  |  | ||||||
| # Drop all gconv conversions && binaries |  | ||||||
| rm -rf usr/bin/iconv |  | ||||||
| rm -rf usr/sbin/iconvconfig |  | ||||||
| rm -rf usr/lib/*/gconv/ |  | ||||||
|  |  | ||||||
| # Remove libusb database |  | ||||||
| rm -rf usr/sbin/update-usbids |  | ||||||
| rm -rf var/lib/usbutils/usb.ids |  | ||||||
| rm -rf usr/share/misc/usb.ids |  | ||||||
|  |  | ||||||
| rm -rf /root/.pip |  | ||||||
|  |  | ||||||
| ####################################################################### |  | ||||||
| # Crush into a minimal production image to be deployed via some type of image |  | ||||||
| # updating system. |  | ||||||
| # IMPORTANT: The Debian system is not longer functional at this point, |  | ||||||
| # for example, apt and dpkg will stop working |  | ||||||
|  |  | ||||||
| UNNEEDED_PACKAGES="apt libapt-pkg6.0 "\ |  | ||||||
| "ncurses-bin ncurses-base libncursesw6 libncurses6 "\ |  | ||||||
| "perl-base "\ |  | ||||||
| "debconf libdebconfclient0 "\ |  | ||||||
| "e2fsprogs e2fslibs libfdisk1 "\ |  | ||||||
| "insserv "\ |  | ||||||
| "udev "\ |  | ||||||
| "init-system-helpers "\ |  | ||||||
| "cpio "\ |  | ||||||
| "passwd "\ |  | ||||||
| "libsemanage1 libsemanage-common "\ |  | ||||||
| "libsepol1 "\ |  | ||||||
| "gpgv "\ |  | ||||||
| "hostname "\ |  | ||||||
| "adduser "\ |  | ||||||
| "debian-archive-keyring "\ |  | ||||||
| "libegl1-mesa-dev "\ |  | ||||||
| "libegl-mesa0 "\ |  | ||||||
| "libgl1-mesa-dev "\ |  | ||||||
| "libgl1-mesa-dri "\ |  | ||||||
| "libglapi-mesa "\ |  | ||||||
| "libgles2-mesa-dev "\ |  | ||||||
| "libglx-mesa0 "\ |  | ||||||
| "mesa-common-dev "\ |  | ||||||
| "gnupg2 "\ |  | ||||||
| "software-properties-common " \ |  | ||||||
|  |  | ||||||
| # Removing unneeded packages |  | ||||||
| for PACKAGE in ${UNNEEDED_PACKAGES} |  | ||||||
| do |  | ||||||
| 	echo "Forcing removal of ${PACKAGE}" |  | ||||||
| 	if ! dpkg --purge --force-remove-essential --force-depends "${PACKAGE}" |  | ||||||
| 	then |  | ||||||
| 		echo "WARNING: ${PACKAGE} isn't installed" |  | ||||||
| 	fi |  | ||||||
| done |  | ||||||
|  |  | ||||||
| # Show what's left package-wise before dropping dpkg itself |  | ||||||
| COLUMNS=300 dpkg-query -W --showformat='${Installed-Size;10}\t${Package}\n' | sort -k1,1n |  | ||||||
|  |  | ||||||
| # Drop dpkg |  | ||||||
| dpkg --purge --force-remove-essential --force-depends  dpkg |  | ||||||
|  |  | ||||||
| # No apt or dpkg, no need for its configuration archives |  | ||||||
| rm -rf etc/apt |  | ||||||
| rm -rf etc/dpkg |  | ||||||
|  |  | ||||||
| # Drop directories not part of ostree |  | ||||||
| # Note that /var needs to exist as ostree bind mounts the deployment /var over |  | ||||||
| # it |  | ||||||
| rm -rf var/* srv share |  | ||||||
|  |  | ||||||
| # ca-certificates are in /etc drop the source |  | ||||||
| rm -rf usr/share/ca-certificates |  | ||||||
|  |  | ||||||
| # No need for completions |  | ||||||
| rm -rf usr/share/bash-completion |  | ||||||
|  |  | ||||||
| # No zsh, no need for comletions |  | ||||||
| rm -rf usr/share/zsh/vendor-completions |  | ||||||
|  |  | ||||||
| # drop gcc python helpers |  | ||||||
| rm -rf usr/share/gcc |  | ||||||
|  |  | ||||||
| # Drop sysvinit leftovers |  | ||||||
| rm -rf etc/init.d |  | ||||||
| rm -rf etc/rc[0-6S].d |  | ||||||
|  |  | ||||||
| # Drop upstart helpers |  | ||||||
| rm -rf etc/init |  | ||||||
|  |  | ||||||
| # Various xtables helpers |  | ||||||
| rm -rf usr/lib/xtables |  | ||||||
|  |  | ||||||
| # Drop all locales |  | ||||||
| # TODO: only remaining locale is actually "C". Should we really remove it? |  | ||||||
| rm -rf usr/lib/locale/* |  | ||||||
|  |  | ||||||
| # partition helpers |  | ||||||
| rm -rf usr/sbin/*fdisk |  | ||||||
|  |  | ||||||
| # local compiler |  | ||||||
| rm -rf usr/bin/localedef |  | ||||||
|  |  | ||||||
| # Systemd dns resolver |  | ||||||
| find usr etc -name '*systemd-resolve*' -prune -exec rm -r {} \; |  | ||||||
|  |  | ||||||
| # Systemd network configuration |  | ||||||
| find usr etc -name '*networkd*' -prune -exec rm -r {} \; |  | ||||||
|  |  | ||||||
| # systemd ntp client |  | ||||||
| find usr etc -name '*timesyncd*' -prune -exec rm -r {} \; |  | ||||||
|  |  | ||||||
| # systemd hw database manager |  | ||||||
| find usr etc -name '*systemd-hwdb*' -prune -exec rm -r {} \; |  | ||||||
|  |  | ||||||
| # No need for fuse |  | ||||||
| find usr etc -name '*fuse*' -prune -exec rm -r {} \; |  | ||||||
|  |  | ||||||
| # lsb init function leftovers |  | ||||||
| rm -rf usr/lib/lsb |  | ||||||
|  |  | ||||||
| # Only needed when adding libraries |  | ||||||
| rm -rf usr/sbin/ldconfig* |  | ||||||
|  |  | ||||||
| # Games, unused |  | ||||||
| rmdir usr/games |  | ||||||
|  |  | ||||||
| # Remove pam module to authenticate against a DB |  | ||||||
| # plus libdb-5.3.so that is only used by this pam module |  | ||||||
| rm -rf usr/lib/*/security/pam_userdb.so |  | ||||||
| rm -rf usr/lib/*/libdb-5.3.so |  | ||||||
|  |  | ||||||
| # remove NSS support for nis, nisplus and hesiod |  | ||||||
| rm -rf usr/lib/*/libnss_hesiod* |  | ||||||
| rm -rf usr/lib/*/libnss_nis* |  | ||||||
| @@ -1,81 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| export DEBIAN_FRONTEND=noninteractive |  | ||||||
|  |  | ||||||
| # Ephemeral packages (installed for this script and removed again at the end) |  | ||||||
| STABLE_EPHEMERAL=" \ |  | ||||||
|         " |  | ||||||
|  |  | ||||||
| dpkg --add-architecture $arch |  | ||||||
| apt-get update |  | ||||||
|  |  | ||||||
| apt-get install -y --no-remove \ |  | ||||||
|         $STABLE_EPHEMERAL \ |  | ||||||
|         crossbuild-essential-$arch \ |  | ||||||
|         libelf-dev:$arch \ |  | ||||||
|         libexpat1-dev:$arch \ |  | ||||||
|         libpciaccess-dev:$arch \ |  | ||||||
|         libstdc++6:$arch \ |  | ||||||
|         libvulkan-dev:$arch \ |  | ||||||
|         libx11-dev:$arch \ |  | ||||||
|         libx11-xcb-dev:$arch \ |  | ||||||
|         libxcb-dri2-0-dev:$arch \ |  | ||||||
|         libxcb-dri3-dev:$arch \ |  | ||||||
|         libxcb-glx0-dev:$arch \ |  | ||||||
|         libxcb-present-dev:$arch \ |  | ||||||
|         libxcb-randr0-dev:$arch \ |  | ||||||
|         libxcb-shm0-dev:$arch \ |  | ||||||
|         libxcb-xfixes0-dev:$arch \ |  | ||||||
|         libxdamage-dev:$arch \ |  | ||||||
|         libxext-dev:$arch \ |  | ||||||
|         libxrandr-dev:$arch \ |  | ||||||
|         libxshmfence-dev:$arch \ |  | ||||||
|         libxxf86vm-dev:$arch \ |  | ||||||
|         wget |  | ||||||
|  |  | ||||||
| if [[ $arch != "armhf" ]]; then |  | ||||||
|     # See the list of available architectures in https://apt.llvm.org/bullseye/dists/llvm-toolchain-bullseye-13/main/ |  | ||||||
|     if [[ $arch == "s390x" ]] || [[ $arch == "i386" ]] || [[ $arch == "arm64" ]]; then |  | ||||||
|         LLVM=13 |  | ||||||
|     else |  | ||||||
|         LLVM=11 |  | ||||||
|     fi |  | ||||||
|  |  | ||||||
|     # llvm-*-tools:$arch conflicts with python3:amd64. Install dependencies only |  | ||||||
|     # with apt-get, then force-install llvm-*-{dev,tools}:$arch with dpkg to get |  | ||||||
|     # around this. |  | ||||||
|     apt-get install -y --no-remove --no-install-recommends \ |  | ||||||
|             libclang-cpp${LLVM}:$arch \ |  | ||||||
|             libffi-dev:$arch \ |  | ||||||
|             libgcc-s1:$arch \ |  | ||||||
|             libtinfo-dev:$arch \ |  | ||||||
|             libz3-dev:$arch \ |  | ||||||
|             llvm-${LLVM}:$arch \ |  | ||||||
|             zlib1g |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/create-cross-file.sh $arch |  | ||||||
|  |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_pre_build.sh |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # dependencies where we want a specific version |  | ||||||
| EXTRA_MESON_ARGS="--cross-file=/cross_file-${arch}.txt -D libdir=lib/$(dpkg-architecture -A $arch -qDEB_TARGET_MULTIARCH)" |  | ||||||
| . .gitlab-ci/container/build-libdrm.sh |  | ||||||
|  |  | ||||||
| apt-get purge -y \ |  | ||||||
|         $STABLE_EPHEMERAL |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_post_build.sh |  | ||||||
|  |  | ||||||
| # This needs to be done after container_post_build.sh, or apt-get breaks in there |  | ||||||
| if [[ $arch != "armhf" ]]; then |  | ||||||
|     apt-get download llvm-${LLVM}-{dev,tools}:$arch |  | ||||||
|     dpkg -i --force-depends llvm-${LLVM}-*_${arch}.deb |  | ||||||
|     rm llvm-${LLVM}-*_${arch}.deb |  | ||||||
| fi |  | ||||||
| @@ -1,107 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| EPHEMERAL="\ |  | ||||||
|          autoconf \ |  | ||||||
|          rdfind \ |  | ||||||
|          unzip \ |  | ||||||
|          " |  | ||||||
|  |  | ||||||
| apt-get install -y --no-remove $EPHEMERAL |  | ||||||
|  |  | ||||||
| # Fetch the NDK and extract just the toolchain we want. |  | ||||||
| ndk=android-ndk-r21d |  | ||||||
| wget -O $ndk.zip https://dl.google.com/android/repository/$ndk-linux-x86_64.zip |  | ||||||
| unzip -d / $ndk.zip "$ndk/toolchains/llvm/*" |  | ||||||
| rm $ndk.zip |  | ||||||
| # Since it was packed as a zip file, symlinks/hardlinks got turned into |  | ||||||
| # duplicate files.  Turn them into hardlinks to save on container space. |  | ||||||
| rdfind -makehardlinks true -makeresultsfile false /android-ndk-r21d/ |  | ||||||
| # Drop some large tools we won't use in this build. |  | ||||||
| find /android-ndk-r21d/ -type f | grep -E -i "clang-check|clang-tidy|lldb" | xargs rm -f |  | ||||||
|  |  | ||||||
| sh .gitlab-ci/container/create-android-ndk-pc.sh /$ndk zlib.pc "" "-lz" "1.2.3" |  | ||||||
|  |  | ||||||
| sh .gitlab-ci/container/create-android-cross-file.sh /$ndk x86_64-linux-android x86_64 x86_64 |  | ||||||
| sh .gitlab-ci/container/create-android-cross-file.sh /$ndk i686-linux-android x86 x86 |  | ||||||
| sh .gitlab-ci/container/create-android-cross-file.sh /$ndk aarch64-linux-android arm armv8 |  | ||||||
| sh .gitlab-ci/container/create-android-cross-file.sh /$ndk arm-linux-androideabi arm armv7hl armv7a-linux-androideabi |  | ||||||
|  |  | ||||||
| # Not using build-libdrm.sh because we don't want its cleanup after building |  | ||||||
| # each arch.  Fetch and extract now. |  | ||||||
| export LIBDRM_VERSION=libdrm-2.4.110 |  | ||||||
| wget https://dri.freedesktop.org/libdrm/$LIBDRM_VERSION.tar.xz |  | ||||||
| tar -xf $LIBDRM_VERSION.tar.xz && rm $LIBDRM_VERSION.tar.xz |  | ||||||
|  |  | ||||||
| for arch in \ |  | ||||||
|         x86_64-linux-android \ |  | ||||||
|         i686-linux-android \ |  | ||||||
|         aarch64-linux-android \ |  | ||||||
|         arm-linux-androideabi ; do |  | ||||||
|  |  | ||||||
|     cd $LIBDRM_VERSION |  | ||||||
|     rm -rf build-$arch |  | ||||||
|     meson build-$arch \ |  | ||||||
|           --cross-file=/cross_file-$arch.txt \ |  | ||||||
|           --libdir=lib/$arch \ |  | ||||||
|           -Dlibkms=false \ |  | ||||||
|           -Dnouveau=false \ |  | ||||||
|           -Dvc4=false \ |  | ||||||
|           -Detnaviv=false \ |  | ||||||
|           -Dfreedreno=false \ |  | ||||||
|           -Dintel=false \ |  | ||||||
|           -Dcairo-tests=false \ |  | ||||||
|           -Dvalgrind=false |  | ||||||
|     ninja -C build-$arch install |  | ||||||
|     cd .. |  | ||||||
| done |  | ||||||
|  |  | ||||||
| rm -rf $LIBDRM_VERSION |  | ||||||
|  |  | ||||||
| export LIBELF_VERSION=libelf-0.8.13 |  | ||||||
| wget https://fossies.org/linux/misc/old/$LIBELF_VERSION.tar.gz |  | ||||||
|  |  | ||||||
| # Not 100% sure who runs the mirror above so be extra careful |  | ||||||
| if ! echo "4136d7b4c04df68b686570afa26988ac ${LIBELF_VERSION}.tar.gz" | md5sum -c -; then |  | ||||||
|     echo "Checksum failed" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| tar -xf ${LIBELF_VERSION}.tar.gz |  | ||||||
| cd $LIBELF_VERSION |  | ||||||
|  |  | ||||||
| # Work around a bug in the original configure not enabling __LIBELF64. |  | ||||||
| autoreconf |  | ||||||
|  |  | ||||||
| for arch in \ |  | ||||||
|         x86_64-linux-android \ |  | ||||||
|         i686-linux-android \ |  | ||||||
|         aarch64-linux-android \ |  | ||||||
|         arm-linux-androideabi ; do |  | ||||||
|  |  | ||||||
|     ccarch=${arch} |  | ||||||
|     if [ "${arch}" ==  'arm-linux-androideabi' ] |  | ||||||
|     then |  | ||||||
|        ccarch=armv7a-linux-androideabi |  | ||||||
|     fi |  | ||||||
|  |  | ||||||
|     export CC=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch}-ar |  | ||||||
|     export CC=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${ccarch}29-clang |  | ||||||
|     export CXX=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${ccarch}29-clang++ |  | ||||||
|     export LD=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch}-ld |  | ||||||
|     export RANLIB=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch}-ranlib |  | ||||||
|  |  | ||||||
|     # The configure script doesn't know about android, but doesn't really use the host anyway it |  | ||||||
|     # seems |  | ||||||
|     ./configure --host=x86_64-linux-gnu  --disable-nls --disable-shared \ |  | ||||||
|                 --libdir=/usr/local/lib/${arch} |  | ||||||
|     make install |  | ||||||
|     make distclean |  | ||||||
| done |  | ||||||
|  |  | ||||||
| cd .. |  | ||||||
| rm -rf $LIBELF_VERSION |  | ||||||
|  |  | ||||||
| apt-get purge -y $EPHEMERAL |  | ||||||
| @@ -1,86 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| apt-get -y install ca-certificates |  | ||||||
| sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list |  | ||||||
| echo 'deb https://deb.debian.org/debian buster main' >/etc/apt/sources.list.d/buster.list |  | ||||||
| apt-get update |  | ||||||
|  |  | ||||||
| # Ephemeral packages (installed for this script and removed again at |  | ||||||
| # the end) |  | ||||||
| STABLE_EPHEMERAL=" \ |  | ||||||
|         libssl-dev \ |  | ||||||
|         " |  | ||||||
|  |  | ||||||
| apt-get -y install \ |  | ||||||
| 	${EXTRA_LOCAL_PACKAGES} \ |  | ||||||
| 	${STABLE_EPHEMERAL} \ |  | ||||||
| 	autoconf \ |  | ||||||
| 	automake \ |  | ||||||
| 	bc \ |  | ||||||
| 	bison \ |  | ||||||
| 	ccache \ |  | ||||||
| 	cmake \ |  | ||||||
| 	debootstrap \ |  | ||||||
| 	fastboot \ |  | ||||||
| 	flex \ |  | ||||||
| 	g++ \ |  | ||||||
| 	git \ |  | ||||||
| 	glslang-tools \ |  | ||||||
| 	kmod \ |  | ||||||
| 	libasan6 \ |  | ||||||
| 	libdrm-dev \ |  | ||||||
| 	libelf-dev \ |  | ||||||
| 	libexpat1-dev \ |  | ||||||
| 	libvulkan-dev \ |  | ||||||
| 	libx11-dev \ |  | ||||||
| 	libx11-xcb-dev \ |  | ||||||
| 	libxcb-dri2-0-dev \ |  | ||||||
| 	libxcb-dri3-dev \ |  | ||||||
| 	libxcb-glx0-dev \ |  | ||||||
| 	libxcb-present-dev \ |  | ||||||
| 	libxcb-randr0-dev \ |  | ||||||
| 	libxcb-shm0-dev \ |  | ||||||
| 	libxcb-xfixes0-dev \ |  | ||||||
| 	libxdamage-dev \ |  | ||||||
| 	libxext-dev \ |  | ||||||
| 	libxrandr-dev \ |  | ||||||
| 	libxshmfence-dev \ |  | ||||||
| 	libxxf86vm-dev \ |  | ||||||
| 	llvm-11-dev \ |  | ||||||
| 	meson \ |  | ||||||
| 	pkg-config \ |  | ||||||
| 	python3-mako \ |  | ||||||
| 	python3-pil \ |  | ||||||
| 	python3-pip \ |  | ||||||
| 	python3-requests \ |  | ||||||
| 	python3-setuptools \ |  | ||||||
| 	u-boot-tools \ |  | ||||||
| 	wget \ |  | ||||||
| 	xz-utils \ |  | ||||||
| 	zlib1g-dev \ |  | ||||||
| 	zstd |  | ||||||
|  |  | ||||||
| # Not available anymore in bullseye |  | ||||||
| apt-get install -y --no-remove -t buster \ |  | ||||||
|         android-sdk-ext4-utils |  | ||||||
|  |  | ||||||
| pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2 |  | ||||||
|  |  | ||||||
| arch=armhf |  | ||||||
| . .gitlab-ci/container/cross_build.sh |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_pre_build.sh |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-mold.sh |  | ||||||
|  |  | ||||||
| # dependencies where we want a specific version |  | ||||||
| EXTRA_MESON_ARGS= |  | ||||||
| . .gitlab-ci/container/build-libdrm.sh |  | ||||||
|  |  | ||||||
| apt-get purge -y $STABLE_EPHEMERAL |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_post_build.sh |  | ||||||
| @@ -1,45 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| ############### Install packages for baremetal testing |  | ||||||
| apt-get install -y ca-certificates |  | ||||||
| sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list |  | ||||||
| apt-get update |  | ||||||
|  |  | ||||||
| apt-get install -y --no-remove \ |  | ||||||
|         cpio \ |  | ||||||
|         fastboot \ |  | ||||||
|         netcat \ |  | ||||||
|         procps \ |  | ||||||
|         python3-distutils \ |  | ||||||
|         python3-minimal \ |  | ||||||
|         python3-serial \ |  | ||||||
|         rsync \ |  | ||||||
|         snmp \ |  | ||||||
|         wget \ |  | ||||||
|         zstd |  | ||||||
|  |  | ||||||
| # setup SNMPv2 SMI MIB |  | ||||||
| wget https://raw.githubusercontent.com/net-snmp/net-snmp/master/mibs/SNMPv2-SMI.txt \ |  | ||||||
|     -O /usr/share/snmp/mibs/SNMPv2-SMI.txt |  | ||||||
|  |  | ||||||
| arch=arm64 . .gitlab-ci/container/baremetal_build.sh |  | ||||||
| arch=armhf . .gitlab-ci/container/baremetal_build.sh |  | ||||||
|  |  | ||||||
| # This firmware file from Debian bullseye causes hangs |  | ||||||
| wget https://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git/plain/qcom/a530_pfp.fw?id=d5f9eea5a251d43412b07f5295d03e97b89ac4a5 \ |  | ||||||
|      -O /rootfs-arm64/lib/firmware/qcom/a530_pfp.fw |  | ||||||
|  |  | ||||||
| mkdir -p /baremetal-files/jetson-nano/boot/ |  | ||||||
| ln -s \ |  | ||||||
|     /baremetal-files/Image \ |  | ||||||
|     /baremetal-files/tegra210-p3450-0000.dtb \ |  | ||||||
|     /baremetal-files/jetson-nano/boot/ |  | ||||||
|  |  | ||||||
| mkdir -p /baremetal-files/jetson-tk1/boot/ |  | ||||||
| ln -s \ |  | ||||||
|     /baremetal-files/zImage \ |  | ||||||
|     /baremetal-files/tegra124-jetson-tk1.dtb \ |  | ||||||
|     /baremetal-files/jetson-tk1/boot/ |  | ||||||
| @@ -1,5 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| arch=i386 |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/cross_build.sh |  | ||||||
| @@ -1,52 +0,0 @@ | |||||||
| -----BEGIN PGP PUBLIC KEY BLOCK----- |  | ||||||
| Version: GnuPG v1.4.12 (GNU/Linux) |  | ||||||
|  |  | ||||||
| mQINBFE9lCwBEADi0WUAApM/mgHJRU8lVkkw0CHsZNpqaQDNaHefD6Rw3S4LxNmM |  | ||||||
| EZaOTkhP200XZM8lVdbfUW9xSjA3oPldc1HG26NjbqqCmWpdo2fb+r7VmU2dq3NM |  | ||||||
| R18ZlKixiLDE6OUfaXWKamZsXb6ITTYmgTO6orQWYrnW6ckYHSeaAkW0wkDAryl2 |  | ||||||
| B5v8aoFnQ1rFiVEMo4NGzw4UX+MelF7rxaaregmKVTPiqCOSPJ1McC1dHFN533FY |  | ||||||
| Wh/RVLKWo6npu+owtwYFQW+zyQhKzSIMvNujFRzhIxzxR9Gn87MoLAyfgKEzrbbT |  | ||||||
| DhqqNXTxS4UMUKCQaO93TzetX/EBrRpJj+vP640yio80h4Dr5pAd7+LnKwgpTDk1 |  | ||||||
| G88bBXJAcPZnTSKu9I2c6KY4iRNbvRz4i+ZdwwZtdW4nSdl2792L7Sl7Nc44uLL/ |  | ||||||
| ZqkKDXEBF6lsX5XpABwyK89S/SbHOytXv9o4puv+65Ac5/UShspQTMSKGZgvDauU |  | ||||||
| cs8kE1U9dPOqVNCYq9Nfwinkf6RxV1k1+gwtclxQuY7UpKXP0hNAXjAiA5KS5Crq |  | ||||||
| 7aaJg9q2F4bub0mNU6n7UI6vXguF2n4SEtzPRk6RP+4TiT3bZUsmr+1ktogyOJCc |  | ||||||
| Ha8G5VdL+NBIYQthOcieYCBnTeIH7D3Sp6FYQTYtVbKFzmMK+36ERreL/wARAQAB |  | ||||||
| tD1TeWx2ZXN0cmUgTGVkcnUgLSBEZWJpYW4gTExWTSBwYWNrYWdlcyA8c3lsdmVz |  | ||||||
| dHJlQGRlYmlhbi5vcmc+iQI4BBMBAgAiBQJRPZQsAhsDBgsJCAcDAgYVCAIJCgsE |  | ||||||
| FgIDAQIeAQIXgAAKCRAVz00Yr090Ibx+EADArS/hvkDF8juWMXxh17CgR0WZlHCC |  | ||||||
| 9CTBWkg5a0bNN/3bb97cPQt/vIKWjQtkQpav6/5JTVCSx2riL4FHYhH0iuo4iAPR |  | ||||||
| udC7Cvg8g7bSPrKO6tenQZNvQm+tUmBHgFiMBJi92AjZ/Qn1Shg7p9ITivFxpLyX |  | ||||||
| wpmnF1OKyI2Kof2rm4BFwfSWuf8Fvh7kDMRLHv+MlnK/7j/BNpKdozXxLcwoFBmn |  | ||||||
| l0WjpAH3OFF7Pvm1LJdf1DjWKH0Dc3sc6zxtmBR/KHHg6kK4BGQNnFKujcP7TVdv |  | ||||||
| gMYv84kun14pnwjZcqOtN3UJtcx22880DOQzinoMs3Q4w4o05oIF+sSgHViFpc3W |  | ||||||
| R0v+RllnH05vKZo+LDzc83DQVrdwliV12eHxrMQ8UYg88zCbF/cHHnlzZWAJgftg |  | ||||||
| hB08v1BKPgYRUzwJ6VdVqXYcZWEaUJmQAPuAALyZESw94hSo28FAn0/gzEc5uOYx |  | ||||||
| K+xG/lFwgAGYNb3uGM5m0P6LVTfdg6vDwwOeTNIExVk3KVFXeSQef2ZMkhwA7wya |  | ||||||
| KJptkb62wBHFE+o9TUdtMCY6qONxMMdwioRE5BYNwAsS1PnRD2+jtlI0DzvKHt7B |  | ||||||
| MWd8hnoUKhMeZ9TNmo+8CpsAtXZcBho0zPGz/R8NlJhAWpdAZ1CmcPo83EW86Yq7 |  | ||||||
| BxQUKnNHcwj2ebkCDQRRPZQsARAA4jxYmbTHwmMjqSizlMJYNuGOpIidEdx9zQ5g |  | ||||||
| zOr431/VfWq4S+VhMDhs15j9lyml0y4ok215VRFwrAREDg6UPMr7ajLmBQGau0Fc |  | ||||||
| bvZJ90l4NjXp5p0NEE/qOb9UEHT7EGkEhaZ1ekkWFTWCgsy7rRXfZLxB6sk7pzLC |  | ||||||
| DshyW3zjIakWAnpQ5j5obiDy708pReAuGB94NSyb1HoW/xGsGgvvCw4r0w3xPStw |  | ||||||
| F1PhmScE6NTBIfLliea3pl8vhKPlCh54Hk7I8QGjo1ETlRP4Qll1ZxHJ8u25f/ta |  | ||||||
| RES2Aw8Hi7j0EVcZ6MT9JWTI83yUcnUlZPZS2HyeWcUj+8nUC8W4N8An+aNps9l/ |  | ||||||
| 21inIl2TbGo3Yn1JQLnA1YCoGwC34g8QZTJhElEQBN0X29ayWW6OdFx8MDvllbBV |  | ||||||
| ymmKq2lK1U55mQTfDli7S3vfGz9Gp/oQwZ8bQpOeUkc5hbZszYwP4RX+68xDPfn+ |  | ||||||
| M9udl+qW9wu+LyePbW6HX90LmkhNkkY2ZzUPRPDHZANU5btaPXc2H7edX4y4maQa |  | ||||||
| xenqD0lGh9LGz/mps4HEZtCI5CY8o0uCMF3lT0XfXhuLksr7Pxv57yue8LLTItOJ |  | ||||||
| d9Hmzp9G97SRYYeqU+8lyNXtU2PdrLLq7QHkzrsloG78lCpQcalHGACJzrlUWVP/ |  | ||||||
| fN3Ht3kAEQEAAYkCHwQYAQIACQUCUT2ULAIbDAAKCRAVz00Yr090IbhWEADbr50X |  | ||||||
| OEXMIMGRLe+YMjeMX9NG4jxs0jZaWHc/WrGR+CCSUb9r6aPXeLo+45949uEfdSsB |  | ||||||
| pbaEdNWxF5Vr1CSjuO5siIlgDjmT655voXo67xVpEN4HhMrxugDJfCa6z97P0+ML |  | ||||||
| PdDxim57uNqkam9XIq9hKQaurxMAECDPmlEXI4QT3eu5qw5/knMzDMZj4Vi6hovL |  | ||||||
| wvvAeLHO/jsyfIdNmhBGU2RWCEZ9uo/MeerPHtRPfg74g+9PPfP6nyHD2Wes6yGd |  | ||||||
| oVQwtPNAQD6Cj7EaA2xdZYLJ7/jW6yiPu98FFWP74FN2dlyEA2uVziLsfBrgpS4l |  | ||||||
| tVOlrO2YzkkqUGrybzbLpj6eeHx+Cd7wcjI8CalsqtL6cG8cUEjtWQUHyTbQWAgG |  | ||||||
| 5VPEgIAVhJ6RTZ26i/G+4J8neKyRs4vz+57UGwY6zI4AB1ZcWGEE3Bf+CDEDgmnP |  | ||||||
| LSwbnHefK9IljT9XU98PelSryUO/5UPw7leE0akXKB4DtekToO226px1VnGp3Bov |  | ||||||
| 1GBGvpHvL2WizEwdk+nfk8LtrLzej+9FtIcq3uIrYnsac47Pf7p0otcFeTJTjSq3 |  | ||||||
| krCaoG4Hx0zGQG2ZFpHrSrZTVy6lxvIdfi0beMgY6h78p6M9eYZHQHc02DjFkQXN |  | ||||||
| bXb5c6gCHESH5PXwPU4jQEE7Ib9J6sbk7ZT2Mw== |  | ||||||
| =j+4q |  | ||||||
| -----END PGP PUBLIC KEY BLOCK----- |  | ||||||
| @@ -1,5 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| arch=ppc64el |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/cross_build.sh |  | ||||||
| @@ -1,16 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| arch=s390x |  | ||||||
|  |  | ||||||
| # Ephemeral packages (installed for this script and removed again at the end) |  | ||||||
| STABLE_EPHEMERAL="libssl-dev" |  | ||||||
|  |  | ||||||
| apt-get -y install "$STABLE_EPHEMERAL" |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-mold.sh |  | ||||||
|  |  | ||||||
| apt-get purge -y "$STABLE_EPHEMERAL" |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/cross_build.sh |  | ||||||
| @@ -1,53 +0,0 @@ | |||||||
| -----BEGIN PGP PUBLIC KEY BLOCK----- |  | ||||||
|  |  | ||||||
| mQGNBFwOmrgBDAC9FZW3dFpew1hwDaqRfdQQ1ABcmOYu1NKZHwYjd+bGvcR2LRGe |  | ||||||
| R5dfRqG1Uc/5r6CPCMvnWxFprymkqKEADn8eFn+aCnPx03HrhA+lNEbciPfTHylt |  | ||||||
| NTTuRua7YpJIgEOjhXUbxXxnvF8fhUf5NJpJg6H6fPQARUW+5M//BlVgwn2jhzlW |  | ||||||
| U+uwgeJthhiuTXkls9Yo3EoJzmkUih+ABZgvaiBpr7GZRw9GO1aucITct0YDNTVX |  | ||||||
| KA6el78/udi5GZSCKT94yY9ArN4W6NiOFCLV7MU5d6qMjwGFhfg46NBv9nqpGinK |  | ||||||
| 3NDjqCevKouhtKl2J+nr3Ju3Spzuv6Iex7tsOqt+XdZCoY+8+dy3G5zbJwBYsMiS |  | ||||||
| rTNF55PHtBH1S0QK5OoN2UR1ie/aURAyAFEMhTzvFB2B2v7C0IKIOmYMEG+DPMs9 |  | ||||||
| FQs/vZ1UnAQgWk02ZiPryoHfjFO80+XYMrdWN+RSo5q9ODClloaKXjqI/aWLGirm |  | ||||||
| KXw2R8tz31go3NMAEQEAAbQnV2luZUhRIHBhY2thZ2VzIDx3aW5lLWRldmVsQHdp |  | ||||||
| bmVocS5vcmc+iQHOBBMBCgA4AhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4BAheAFiEE |  | ||||||
| 1D9kAUU2nFHXht3qdvGiD/mHZy8FAlwOmyUACgkQdvGiD/mHZy/zkwv7B+nKFlDY |  | ||||||
| Bzz/7j0gqIODbs5FRZRtuf/IuPP3vZdWlNfAW/VyaLtVLJCM/mmaf/O6/gJ+D+E9 |  | ||||||
| BBoSmHdHzBBOQHIj5IbRedynNcHT5qXsdBeU2ZPR50sdE+jmukvw3Wa5JijoDgUu |  | ||||||
| LGLGtU48Z3JsBXQ54OlnTZXQ2SMFhRUa10JANXSJQ+QY2Wo2Pi2+MEAHcrd71A2S |  | ||||||
| 0mT2DQSSBQ92c6WPfUpOSBawd8P0ipT7rVFNLJh8HVQGyEWxPl8ecDEHoVfG2rdV |  | ||||||
| D0ADbNLx9031UUwpUicO6vW/2Ec7c3VNG1cpOtyNTw/lEgvsXOh3GQs/DvFvMy/h |  | ||||||
| QzaeF3Qq6cAPlKuxieJe4lLYFBTmCAT4iB1J8oeFs4G7ScfZH4+4NBe3VGoeCD/M |  | ||||||
| Wl+qxntAroblxiFuqtPJg+NKZYWBzkptJNhnrBxcBnRinGZLw2k/GR/qPMgsR2L4 |  | ||||||
| cP+OUuka+R2gp9oDVTZTyMowz+ROIxnEijF50pkj2VBFRB02rfiMp7q6iQIzBBAB |  | ||||||
| CgAdFiEE2iNXmnTUrZr50/lFzvrI6q8XUZ0FAlwOm3AACgkQzvrI6q8XUZ3KKg/+ |  | ||||||
| MD8CgvLiHEX90fXQ23RZQRm2J21w3gxdIen/N8yJVIbK7NIgYhgWfGWsGQedtM7D |  | ||||||
| hMwUlDSRb4rWy9vrXBaiZoF3+nK9AcLvPChkZz28U59Jft6/l0gVrykey/ERU7EV |  | ||||||
| w1Ie1eRu0tRSXsKvMZyQH8897iHZ7uqoJgyk8U8CvSW+V80yqLB2M8Tk8ECZq34f |  | ||||||
| HqUIGs4Wo0UZh0vV4+dEQHBh1BYpmmWl+UPf7nzNwFWXu/EpjVhkExRqTnkEJ+Ai |  | ||||||
| OxbtrRn6ETKzpV4DjyifqQF639bMIem7DRRf+mkcrAXetvWkUkE76e3E9KLvETCZ |  | ||||||
| l4SBfgqSZs2vNngmpX6Qnoh883aFo5ZgVN3v6uTS+LgTwMt/XlnDQ7+Zw+ehCZ2R |  | ||||||
| CO21Y9Kbw6ZEWls/8srZdCQ2LxnyeyQeIzsLnqT/waGjQj35i4exzYeWpojVDb3r |  | ||||||
| tvvOALYGVlSYqZXIALTx2/tHXKLHyrn1C0VgHRnl+hwv7U49f7RvfQXpx47YQN/C |  | ||||||
| PWrpbG69wlKuJptr+olbyoKAWfl+UzoO8vLMo5njWQNAoAwh1H8aFUVNyhtbkRuq |  | ||||||
| l0kpy1Cmcq8uo6taK9lvYp8jak7eV8lHSSiGUKTAovNTwfZG2JboGV4/qLDUKvpa |  | ||||||
| lPp2xVpF9MzA8VlXTOzLpSyIVxZnPTpL+xR5P9WQjMS5AY0EXA6auAEMAMReKL89 |  | ||||||
| 0z0SL+/i/geB/agfG/k6AXiG2a9kVWeIjAqFwHKl9W/DTNvOqCDgAt51oiHGRRjt |  | ||||||
| 1Xm3XZD4p+GM1uZWn9qIFL49Gt5x94TqdrsKTVCJr0Kazn2mKQc7aja0zac+WtZG |  | ||||||
| OFn7KbniuAcwtC780cyikfmmExLI1/Vjg+NiMlMtZfpK6FIW+ulPiDQPdzIhVppx |  | ||||||
| w9/KlR2Fvh4TbzDsUqkFQSSAFdQ65BWgvzLpZHdKO/ILpDkThLbipjtvbBv/pHKM |  | ||||||
| O/NFTNoYkJ3cNW/kfcynwV+4AcKwdRz2A3Mez+g5TKFYPZROIbayOo01yTMLfz2p |  | ||||||
| jcqki/t4PACtwFOhkAs+MYPPyZDUkTFcEJQCPDstkAgmJWI3K2qELtDOLQyps3WY |  | ||||||
| Mfp+mntOdc8bKjFTMcCEk1zcm14K4Oms+w6dw2UnYsX1FAYYhPm8HUYwE4kP8M+D |  | ||||||
| 9HGLMjLqqF/kanlCFZs5Avx3mDSAx6zS8vtNdGh+64oDNk4x4A2j8GTUuQARAQAB |  | ||||||
| iQG8BBgBCgAmFiEE1D9kAUU2nFHXht3qdvGiD/mHZy8FAlwOmrgCGwwFCQPCZwAA |  | ||||||
| CgkQdvGiD/mHZy9FnAwAgfUkxsO53Pm2iaHhtF4+BUc8MNJj64Jvm1tghr6PBRtM |  | ||||||
| hpbvvN8SSOFwYIsS+2BMsJ2ldox4zMYhuvBcgNUlix0G0Z7h1MjftDdsLFi1DNv2 |  | ||||||
| J9dJ9LdpWdiZbyg4Sy7WakIZ/VvH1Znd89Imo7kCScRdXTjIw2yCkotE5lK7A6Ns |  | ||||||
| NbVuoYEN+dbGioF4csYehnjTdojwF/19mHFxrXkdDZ/V6ZYFIFxEsxL8FEuyI4+o |  | ||||||
| LC3DFSA4+QAFdkjGFXqFPlaEJxWt5d7wk0y+tt68v+ulkJ900BvR+OOMqQURwrAi |  | ||||||
| iP3I28aRrMjZYwyqHl8i/qyIv+WRakoDKV+wWteR5DmRAPHmX2vnlPlCmY8ysR6J |  | ||||||
| 2jUAfuDFVu4/qzJe6vw5tmPJMdfvy0W5oogX6sEdin5M5w2b3WrN8nXZcjbWymqP |  | ||||||
| 6jCdl6eoCCkKNOIbr/MMSkd2KqAqDVM5cnnlQ7q+AXzwNpj3RGJVoBxbS0nn9JWY |  | ||||||
| QNQrWh9rAcMIGT+b1le0 |  | ||||||
| =4lsa |  | ||||||
| -----END PGP PUBLIC KEY BLOCK----- |  | ||||||
| @@ -1,16 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| # Installing wine, need this for testing mingw or nine |  | ||||||
|  |  | ||||||
| apt-get update |  | ||||||
| apt-get install -y --no-remove \ |  | ||||||
|       wine \ |  | ||||||
|       wine64 \ |  | ||||||
|       xvfb |  | ||||||
|  |  | ||||||
| # Used to initialize the Wine environment to reduce build time |  | ||||||
| wine64 whoami.exe |  | ||||||
|  |  | ||||||
| @@ -1,92 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| export DEBIAN_FRONTEND=noninteractive |  | ||||||
|  |  | ||||||
| apt-get install -y ca-certificates gnupg2 software-properties-common |  | ||||||
|  |  | ||||||
| # Add llvm 13 to the build image |  | ||||||
| apt-key add .gitlab-ci/container/debian/llvm-snapshot.gpg.key |  | ||||||
| add-apt-repository "deb https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-13 main" |  | ||||||
|  |  | ||||||
| sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list |  | ||||||
|  |  | ||||||
| # Ephemeral packages (installed for this script and removed again at |  | ||||||
| # the end) |  | ||||||
| STABLE_EPHEMERAL=" \ |  | ||||||
|         python3-pip \ |  | ||||||
|         python3-setuptools \ |  | ||||||
|         " |  | ||||||
|  |  | ||||||
| apt-get update |  | ||||||
|  |  | ||||||
| apt-get install -y --no-remove \ |  | ||||||
|         $STABLE_EPHEMERAL \ |  | ||||||
|         bison \ |  | ||||||
|         ccache \ |  | ||||||
|         dpkg-cross \ |  | ||||||
|         findutils \ |  | ||||||
|         flex \ |  | ||||||
|         g++ \ |  | ||||||
|         cmake \ |  | ||||||
|         gcc \ |  | ||||||
|         git \ |  | ||||||
|         glslang-tools \ |  | ||||||
|         kmod \ |  | ||||||
|         libclang-13-dev \ |  | ||||||
|         libclang-11-dev \ |  | ||||||
|         libelf-dev \ |  | ||||||
|         libepoxy-dev \ |  | ||||||
|         libexpat1-dev \ |  | ||||||
|         libgtk-3-dev \ |  | ||||||
|         libllvm13 \ |  | ||||||
|         libllvm11 \ |  | ||||||
|         libomxil-bellagio-dev \ |  | ||||||
|         libpciaccess-dev \ |  | ||||||
|         libunwind-dev \ |  | ||||||
|         libva-dev \ |  | ||||||
|         libvdpau-dev \ |  | ||||||
|         libvulkan-dev \ |  | ||||||
|         libx11-dev \ |  | ||||||
|         libx11-xcb-dev \ |  | ||||||
|         libxext-dev \ |  | ||||||
|         libxml2-utils \ |  | ||||||
|         libxrandr-dev \ |  | ||||||
|         libxrender-dev \ |  | ||||||
|         libxshmfence-dev \ |  | ||||||
|         libxxf86vm-dev \ |  | ||||||
|         make \ |  | ||||||
|         meson \ |  | ||||||
|         pkg-config \ |  | ||||||
|         python3-mako \ |  | ||||||
|         python3-pil \ |  | ||||||
|         python3-ply \ |  | ||||||
|         python3-requests \ |  | ||||||
|         qemu-user \ |  | ||||||
|         valgrind \ |  | ||||||
|         wget \ |  | ||||||
|         x11proto-dri2-dev \ |  | ||||||
|         x11proto-gl-dev \ |  | ||||||
|         x11proto-randr-dev \ |  | ||||||
|         xz-utils \ |  | ||||||
|         zlib1g-dev \ |  | ||||||
| 	zstd |  | ||||||
|  |  | ||||||
| # Needed for ci-fairy, this revision is able to upload files to MinIO |  | ||||||
| pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2 |  | ||||||
|  |  | ||||||
| # We need at least 0.61.4 for proper Rust |  | ||||||
| pip3 install meson==0.61.5 |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-rust.sh |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/debian/x86_build-base-wine.sh |  | ||||||
|  |  | ||||||
| ############### Uninstall ephemeral packages |  | ||||||
|  |  | ||||||
| apt-get purge -y $STABLE_EPHEMERAL |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_post_build.sh |  | ||||||
| @@ -1,77 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| # Pull packages from msys2 repository that can be directly used. |  | ||||||
| # We can use https://packages.msys2.org/ to retrieve the newest package |  | ||||||
| mkdir ~/tmp |  | ||||||
| pushd ~/tmp |  | ||||||
| MINGW_PACKET_LIST=" |  | ||||||
| mingw-w64-x86_64-headers-git-10.0.0.r14.ga08c638f8-1-any.pkg.tar.zst |  | ||||||
| mingw-w64-x86_64-vulkan-loader-1.3.211-1-any.pkg.tar.zst |  | ||||||
| mingw-w64-x86_64-libelf-0.8.13-6-any.pkg.tar.zst |  | ||||||
| mingw-w64-x86_64-zlib-1.2.12-1-any.pkg.tar.zst |  | ||||||
| mingw-w64-x86_64-zstd-1.5.2-2-any.pkg.tar.zst |  | ||||||
| " |  | ||||||
|  |  | ||||||
| for i in $MINGW_PACKET_LIST |  | ||||||
| do |  | ||||||
|   wget -q https://mirror.msys2.org/mingw/mingw64/$i |  | ||||||
|   tar xf $i --strip-components=1 -C /usr/x86_64-w64-mingw32/ |  | ||||||
| done |  | ||||||
| popd |  | ||||||
| rm -rf ~/tmp |  | ||||||
|  |  | ||||||
| mkdir -p /usr/x86_64-w64-mingw32/bin |  | ||||||
|  |  | ||||||
| # The output of `wine64 llvm-config --system-libs --cxxflags mcdisassembler` |  | ||||||
| # containes absolute path like '-IZ:' |  | ||||||
| # The sed is used to replace `-IZ:/usr/x86_64-w64-mingw32/include` |  | ||||||
| # to `-I/usr/x86_64-w64-mingw32/include` |  | ||||||
|  |  | ||||||
| # Debian's pkg-config wrapers for mingw are broken, and there's no sign that |  | ||||||
| # they're going to be fixed, so we'll just have to fix it ourselves |  | ||||||
| # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=930492 |  | ||||||
| cat >/usr/x86_64-w64-mingw32/bin/pkg-config <<EOF |  | ||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| PKG_CONFIG_LIBDIR=/usr/x86_64-w64-mingw32/lib/pkgconfig:/usr/x86_64-w64-mingw32/share/pkgconfig pkg-config \$@ |  | ||||||
| EOF |  | ||||||
| chmod +x /usr/x86_64-w64-mingw32/bin/pkg-config |  | ||||||
|  |  | ||||||
| cat >/usr/x86_64-w64-mingw32/bin/llvm-config <<EOF |  | ||||||
| #!/bin/sh |  | ||||||
| wine64 llvm-config \$@ | sed -e "s,Z:/,/,gi" |  | ||||||
| EOF |  | ||||||
| chmod +x /usr/x86_64-w64-mingw32/bin/llvm-config |  | ||||||
|  |  | ||||||
| cat >/usr/x86_64-w64-mingw32/bin/clang <<EOF |  | ||||||
| #!/bin/sh |  | ||||||
| wine64 clang \$@ |  | ||||||
| EOF |  | ||||||
| chmod +x /usr/x86_64-w64-mingw32/bin/clang |  | ||||||
|  |  | ||||||
| cat >/usr/x86_64-w64-mingw32/bin/llvm-as <<EOF |  | ||||||
| #!/bin/sh |  | ||||||
| wine64 llvm-as \$@ |  | ||||||
| EOF |  | ||||||
| chmod +x /usr/x86_64-w64-mingw32/bin/llvm-as |  | ||||||
|  |  | ||||||
| cat >/usr/x86_64-w64-mingw32/bin/llvm-link <<EOF |  | ||||||
| #!/bin/sh |  | ||||||
| wine64 llvm-link \$@ |  | ||||||
| EOF |  | ||||||
| chmod +x /usr/x86_64-w64-mingw32/bin/llvm-link |  | ||||||
|  |  | ||||||
| cat >/usr/x86_64-w64-mingw32/bin/opt <<EOF |  | ||||||
| #!/bin/sh |  | ||||||
| wine64 opt \$@ |  | ||||||
| EOF |  | ||||||
| chmod +x /usr/x86_64-w64-mingw32/bin/opt |  | ||||||
|  |  | ||||||
| cat >/usr/x86_64-w64-mingw32/bin/llvm-spirv <<EOF |  | ||||||
| #!/bin/sh |  | ||||||
| wine64 llvm-spirv \$@ |  | ||||||
| EOF |  | ||||||
| chmod +x /usr/x86_64-w64-mingw32/bin/llvm-spirv |  | ||||||
| @@ -1,126 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| # Building libdrm (libva dependency) |  | ||||||
| . .gitlab-ci/container/build-libdrm.sh |  | ||||||
|  |  | ||||||
| wd=$PWD |  | ||||||
| CMAKE_TOOLCHAIN_MINGW_PATH=$wd/.gitlab-ci/container/debian/x86_mingw-toolchain.cmake |  | ||||||
| mkdir -p ~/tmp |  | ||||||
| pushd ~/tmp |  | ||||||
|  |  | ||||||
| # Building DirectX-Headers |  | ||||||
| git clone https://github.com/microsoft/DirectX-Headers -b v1.606.4 --depth 1 |  | ||||||
| mkdir -p DirectX-Headers/build |  | ||||||
| pushd DirectX-Headers/build |  | ||||||
| meson .. \ |  | ||||||
| --backend=ninja \ |  | ||||||
| --buildtype=release -Dbuild-test=false \ |  | ||||||
| -Dprefix=/usr/x86_64-w64-mingw32/ \ |  | ||||||
| --cross-file=$wd/.gitlab-ci/x86_64-w64-mingw32 |  | ||||||
|  |  | ||||||
| ninja install |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| # Building libva |  | ||||||
| git clone https://github.com/intel/libva |  | ||||||
| pushd libva/ |  | ||||||
| # Checking out commit hash with libva-win32 support |  | ||||||
| # This feature will be released with libva version 2.17 |  | ||||||
| git checkout 2579eb0f77897dc01a02c1e43defc63c40fd2988 |  | ||||||
| popd |  | ||||||
| # libva already has a build dir in their repo, use builddir instead |  | ||||||
| mkdir -p libva/builddir |  | ||||||
| pushd libva/builddir |  | ||||||
| meson .. \ |  | ||||||
| --backend=ninja \ |  | ||||||
| --buildtype=release \ |  | ||||||
| -Dprefix=/usr/x86_64-w64-mingw32/ \ |  | ||||||
| --cross-file=$wd/.gitlab-ci/x86_64-w64-mingw32 |  | ||||||
|  |  | ||||||
| ninja install |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| export VULKAN_SDK_VERSION=1.3.211.0 |  | ||||||
|  |  | ||||||
| # Building SPIRV Tools |  | ||||||
| git clone -b sdk-$VULKAN_SDK_VERSION --depth=1 \ |  | ||||||
| https://github.com/KhronosGroup/SPIRV-Tools SPIRV-Tools |  | ||||||
|  |  | ||||||
| git clone -b sdk-$VULKAN_SDK_VERSION --depth=1 \ |  | ||||||
| https://github.com/KhronosGroup/SPIRV-Headers SPIRV-Tools/external/SPIRV-Headers |  | ||||||
|  |  | ||||||
| mkdir -p SPIRV-Tools/build |  | ||||||
| pushd SPIRV-Tools/build |  | ||||||
| cmake .. \ |  | ||||||
| -DCMAKE_TOOLCHAIN_FILE=$CMAKE_TOOLCHAIN_MINGW_PATH \ |  | ||||||
| -DCMAKE_INSTALL_PREFIX=/usr/x86_64-w64-mingw32/ \ |  | ||||||
| -GNinja -DCMAKE_BUILD_TYPE=Release \ |  | ||||||
| -DCMAKE_CROSSCOMPILING=1 \ |  | ||||||
| -DCMAKE_POLICY_DEFAULT_CMP0091=NEW |  | ||||||
|  |  | ||||||
| ninja install |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| # Building LLVM |  | ||||||
| git clone -b release/14.x --depth=1 \ |  | ||||||
| https://github.com/llvm/llvm-project llvm-project |  | ||||||
|  |  | ||||||
| git clone -b v14.0.0 --depth=1 \ |  | ||||||
| https://github.com/KhronosGroup/SPIRV-LLVM-Translator llvm-project/llvm/projects/SPIRV-LLVM-Translator |  | ||||||
|  |  | ||||||
| mkdir llvm-project/build |  | ||||||
| pushd llvm-project/build |  | ||||||
| cmake ../llvm \ |  | ||||||
| -DCMAKE_TOOLCHAIN_FILE=$CMAKE_TOOLCHAIN_MINGW_PATH \ |  | ||||||
| -DCMAKE_INSTALL_PREFIX=/usr/x86_64-w64-mingw32/ \ |  | ||||||
| -GNinja -DCMAKE_BUILD_TYPE=Release \ |  | ||||||
| -DCMAKE_CROSSCOMPILING=1 \ |  | ||||||
| -DLLVM_ENABLE_RTTI=ON \ |  | ||||||
| -DCROSS_TOOLCHAIN_FLAGS_NATIVE=-DLLVM_EXTERNAL_SPIRV_HEADERS_SOURCE_DIR=$PWD/../../SPIRV-Tools/external/SPIRV-Headers \ |  | ||||||
| -DLLVM_EXTERNAL_SPIRV_HEADERS_SOURCE_DIR=$PWD/../../SPIRV-Tools/external/SPIRV-Headers \ |  | ||||||
| -DLLVM_ENABLE_PROJECTS="clang" \ |  | ||||||
| -DLLVM_TARGETS_TO_BUILD="AMDGPU;X86" \ |  | ||||||
| -DLLVM_OPTIMIZED_TABLEGEN=TRUE \ |  | ||||||
| -DLLVM_ENABLE_ASSERTIONS=TRUE \ |  | ||||||
| -DLLVM_INCLUDE_UTILS=OFF \ |  | ||||||
| -DLLVM_INCLUDE_RUNTIMES=OFF \ |  | ||||||
| -DLLVM_INCLUDE_TESTS=OFF \ |  | ||||||
| -DLLVM_INCLUDE_EXAMPLES=OFF \ |  | ||||||
| -DLLVM_INCLUDE_GO_TESTS=OFF \ |  | ||||||
| -DLLVM_INCLUDE_BENCHMARKS=OFF \ |  | ||||||
| -DLLVM_BUILD_LLVM_C_DYLIB=OFF \ |  | ||||||
| -DLLVM_ENABLE_DIA_SDK=OFF \ |  | ||||||
| -DCLANG_BUILD_TOOLS=ON \ |  | ||||||
| -DLLVM_SPIRV_INCLUDE_TESTS=OFF |  | ||||||
|  |  | ||||||
| ninja install |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| # Building libclc |  | ||||||
| mkdir llvm-project/build-libclc |  | ||||||
| pushd llvm-project/build-libclc |  | ||||||
| cmake ../libclc \ |  | ||||||
| -DCMAKE_TOOLCHAIN_FILE=$CMAKE_TOOLCHAIN_MINGW_PATH \ |  | ||||||
| -DCMAKE_INSTALL_PREFIX=/usr/x86_64-w64-mingw32/ \ |  | ||||||
| -GNinja -DCMAKE_BUILD_TYPE=Release \ |  | ||||||
| -DCMAKE_CROSSCOMPILING=1 \ |  | ||||||
| -DCMAKE_POLICY_DEFAULT_CMP0091=NEW \ |  | ||||||
| -DCMAKE_CXX_FLAGS="-m64" \ |  | ||||||
| -DLLVM_CONFIG="/usr/x86_64-w64-mingw32/bin/llvm-config" \ |  | ||||||
| -DLLVM_CLANG="/usr/x86_64-w64-mingw32/bin/clang" \ |  | ||||||
| -DLLVM_AS="/usr/x86_64-w64-mingw32/bin/llvm-as" \ |  | ||||||
| -DLLVM_LINK="/usr/x86_64-w64-mingw32/bin/llvm-link" \ |  | ||||||
| -DLLVM_OPT="/usr/x86_64-w64-mingw32/bin/opt" \ |  | ||||||
| -DLLVM_SPIRV="/usr/x86_64-w64-mingw32/bin/llvm-spirv" \ |  | ||||||
| -DLIBCLC_TARGETS_TO_BUILD="spirv-mesa3d-;spirv64-mesa3d-" |  | ||||||
|  |  | ||||||
| ninja install |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| popd # ~/tmp |  | ||||||
|  |  | ||||||
| # Cleanup ~/tmp |  | ||||||
| rm -rf ~/tmp |  | ||||||
| @@ -1,13 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| apt-get update |  | ||||||
| apt-get install -y --no-remove \ |  | ||||||
|         zstd \ |  | ||||||
|         g++-mingw-w64-i686 \ |  | ||||||
|         g++-mingw-w64-x86-64 |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/debian/x86_build-mingw-patch.sh |  | ||||||
| . .gitlab-ci/container/debian/x86_build-mingw-source-deps.sh |  | ||||||
| @@ -1,108 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| export DEBIAN_FRONTEND=noninteractive |  | ||||||
|  |  | ||||||
| # Ephemeral packages (installed for this script and removed again at the end) |  | ||||||
| STABLE_EPHEMERAL=" \ |  | ||||||
|       autoconf \ |  | ||||||
|       automake \ |  | ||||||
|       autotools-dev \ |  | ||||||
|       bzip2 \ |  | ||||||
|       libtool \ |  | ||||||
|       libssl-dev \ |  | ||||||
|       python3-pip \ |  | ||||||
|       " |  | ||||||
|  |  | ||||||
| apt-get update |  | ||||||
|  |  | ||||||
| apt-get install -y --no-remove \ |  | ||||||
|       $STABLE_EPHEMERAL \ |  | ||||||
|       check \ |  | ||||||
|       clang \ |  | ||||||
|       libasan6 \ |  | ||||||
|       libarchive-dev \ |  | ||||||
|       libclang-cpp13-dev \ |  | ||||||
|       libclang-cpp11-dev \ |  | ||||||
|       libgbm-dev \ |  | ||||||
|       libglvnd-dev \ |  | ||||||
|       liblua5.3-dev \ |  | ||||||
|       libxcb-dri2-0-dev \ |  | ||||||
|       libxcb-dri3-dev \ |  | ||||||
|       libxcb-glx0-dev \ |  | ||||||
|       libxcb-present-dev \ |  | ||||||
|       libxcb-randr0-dev \ |  | ||||||
|       libxcb-shm0-dev \ |  | ||||||
|       libxcb-sync-dev \ |  | ||||||
|       libxcb-xfixes0-dev \ |  | ||||||
|       libxcb1-dev \ |  | ||||||
|       libxml2-dev \ |  | ||||||
|       llvm-13-dev \ |  | ||||||
|       llvm-11-dev \ |  | ||||||
|       ocl-icd-opencl-dev \ |  | ||||||
|       python3-freezegun \ |  | ||||||
|       python3-pytest \ |  | ||||||
|       procps \ |  | ||||||
|       spirv-tools \ |  | ||||||
|       shellcheck \ |  | ||||||
|       strace \ |  | ||||||
|       time \ |  | ||||||
|       yamllint \ |  | ||||||
|       zstd |  | ||||||
|  |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_pre_build.sh |  | ||||||
|  |  | ||||||
| # dependencies where we want a specific version |  | ||||||
| export              XORG_RELEASES=https://xorg.freedesktop.org/releases/individual |  | ||||||
|  |  | ||||||
| export         XORGMACROS_VERSION=util-macros-1.19.0 |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-mold.sh |  | ||||||
|  |  | ||||||
| wget $XORG_RELEASES/util/$XORGMACROS_VERSION.tar.bz2 |  | ||||||
| tar -xvf $XORGMACROS_VERSION.tar.bz2 && rm $XORGMACROS_VERSION.tar.bz2 |  | ||||||
| cd $XORGMACROS_VERSION; ./configure; make install; cd .. |  | ||||||
| rm -rf $XORGMACROS_VERSION |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-llvm-spirv.sh |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-libclc.sh |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-libdrm.sh |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-wayland.sh |  | ||||||
|  |  | ||||||
| pushd /usr/local |  | ||||||
| git clone https://gitlab.freedesktop.org/mesa/shader-db.git --depth 1 |  | ||||||
| rm -rf shader-db/.git |  | ||||||
| cd shader-db |  | ||||||
| make |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| git clone https://github.com/microsoft/DirectX-Headers -b v1.606.4 --depth 1 |  | ||||||
| mkdir -p DirectX-Headers/build |  | ||||||
| pushd DirectX-Headers/build |  | ||||||
| meson .. --backend=ninja --buildtype=release -Dbuild-test=false |  | ||||||
| ninja |  | ||||||
| ninja install |  | ||||||
| popd |  | ||||||
| rm -rf DirectX-Headers |  | ||||||
|  |  | ||||||
| pip3 install git+https://git.lavasoftware.org/lava/lavacli@3db3ddc45e5358908bc6a17448059ea2340492b7 |  | ||||||
|  |  | ||||||
| # install bindgen |  | ||||||
| RUSTFLAGS='-L native=/usr/local/lib' cargo install \ |  | ||||||
|   bindgen --version 0.59.2 \ |  | ||||||
|   -j ${FDO_CI_CONCURRENT:-4} \ |  | ||||||
|   --root /usr/local |  | ||||||
|  |  | ||||||
| ############### Uninstall the build software |  | ||||||
|  |  | ||||||
| apt-get purge -y \ |  | ||||||
|       $STABLE_EPHEMERAL |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_post_build.sh |  | ||||||
| @@ -1,8 +0,0 @@ | |||||||
| set(CMAKE_SYSTEM_NAME Windows) |  | ||||||
| set(CMAKE_SYSTEM_PROCESSOR x86_64) |  | ||||||
|  |  | ||||||
| set(CMAKE_SYSROOT /usr/x86_64-w64-mingw32/) |  | ||||||
| set(ENV{PKG_CONFIG} /usr/x86_64-w64-mingw32/bin/pkg-config) |  | ||||||
|  |  | ||||||
| set(CMAKE_C_COMPILER x86_64-w64-mingw32-gcc-posix) |  | ||||||
| set(CMAKE_CXX_COMPILER x86_64-w64-mingw32-g++-posix) |  | ||||||
| @@ -1,160 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| export DEBIAN_FRONTEND=noninteractive |  | ||||||
|  |  | ||||||
| apt-get install -y ca-certificates gnupg2 software-properties-common |  | ||||||
|  |  | ||||||
| sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list |  | ||||||
|  |  | ||||||
| # Ephemeral packages (installed for this script and removed again at |  | ||||||
| # the end) |  | ||||||
| STABLE_EPHEMERAL=" \ |  | ||||||
|       autoconf \ |  | ||||||
|       automake \ |  | ||||||
|       bc \ |  | ||||||
|       bison \ |  | ||||||
|       bzip2 \ |  | ||||||
|       ccache \ |  | ||||||
|       cmake \ |  | ||||||
|       clang-11 \ |  | ||||||
|       flex \ |  | ||||||
|       glslang-tools \ |  | ||||||
|       g++ \ |  | ||||||
|       libasound2-dev \ |  | ||||||
|       libcap-dev \ |  | ||||||
|       libclang-cpp11-dev \ |  | ||||||
|       libegl-dev \ |  | ||||||
|       libelf-dev \ |  | ||||||
|       libepoxy-dev \ |  | ||||||
|       libgbm-dev \ |  | ||||||
|       libpciaccess-dev \ |  | ||||||
|       libvulkan-dev \ |  | ||||||
|       libwayland-dev \ |  | ||||||
|       libx11-xcb-dev \ |  | ||||||
|       libxext-dev \ |  | ||||||
|       llvm-13-dev \ |  | ||||||
|       llvm-11-dev \ |  | ||||||
|       make \ |  | ||||||
|       meson \ |  | ||||||
|       patch \ |  | ||||||
|       pkg-config \ |  | ||||||
|       protobuf-compiler \ |  | ||||||
|       python3-dev \ |  | ||||||
|       python3-pip \ |  | ||||||
|       python3-setuptools \ |  | ||||||
|       python3-wheel \ |  | ||||||
|       spirv-tools \ |  | ||||||
|       wayland-protocols \ |  | ||||||
|       xz-utils \ |  | ||||||
|       " |  | ||||||
|  |  | ||||||
| # Add llvm 13 to the build image |  | ||||||
| apt-key add .gitlab-ci/container/debian/llvm-snapshot.gpg.key |  | ||||||
| add-apt-repository "deb https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-13 main" |  | ||||||
|  |  | ||||||
| apt-get update |  | ||||||
| apt-get dist-upgrade -y |  | ||||||
|  |  | ||||||
| apt-get install -y \ |  | ||||||
|       sysvinit-core |  | ||||||
|  |  | ||||||
| apt-get install -y --no-remove \ |  | ||||||
|       git \ |  | ||||||
|       git-lfs \ |  | ||||||
|       inetutils-syslogd \ |  | ||||||
|       iptables \ |  | ||||||
|       jq \ |  | ||||||
|       libasan6 \ |  | ||||||
|       libexpat1 \ |  | ||||||
|       libllvm13 \ |  | ||||||
|       libllvm11 \ |  | ||||||
|       liblz4-1 \ |  | ||||||
|       libpng16-16 \ |  | ||||||
|       libpython3.9 \ |  | ||||||
|       libvulkan1 \ |  | ||||||
|       libwayland-client0 \ |  | ||||||
|       libwayland-server0 \ |  | ||||||
|       libxcb-ewmh2 \ |  | ||||||
|       libxcb-randr0 \ |  | ||||||
|       libxcb-xfixes0 \ |  | ||||||
|       libxkbcommon0 \ |  | ||||||
|       libxrandr2 \ |  | ||||||
|       libxrender1 \ |  | ||||||
|       python3-mako \ |  | ||||||
|       python3-numpy \ |  | ||||||
|       python3-packaging \ |  | ||||||
|       python3-pil \ |  | ||||||
|       python3-requests \ |  | ||||||
|       python3-six \ |  | ||||||
|       python3-yaml \ |  | ||||||
|       socat \ |  | ||||||
|       vulkan-tools \ |  | ||||||
|       waffle-utils \ |  | ||||||
|       wget \ |  | ||||||
|       xauth \ |  | ||||||
|       xvfb \ |  | ||||||
|       zlib1g \ |  | ||||||
|       zstd |  | ||||||
|  |  | ||||||
| apt-get install -y --no-install-recommends \ |  | ||||||
|       $STABLE_EPHEMERAL |  | ||||||
|  |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_pre_build.sh |  | ||||||
|  |  | ||||||
| ############### Build kernel |  | ||||||
|  |  | ||||||
| export DEFCONFIG="arch/x86/configs/x86_64_defconfig" |  | ||||||
| export KERNEL_IMAGE_NAME=bzImage |  | ||||||
| export KERNEL_ARCH=x86_64 |  | ||||||
| export DEBIAN_ARCH=amd64 |  | ||||||
|  |  | ||||||
| mkdir -p /lava-files/ |  | ||||||
| . .gitlab-ci/container/build-kernel.sh |  | ||||||
|  |  | ||||||
| # Needed for ci-fairy, this revision is able to upload files to MinIO |  | ||||||
| # and doesn't depend on git |  | ||||||
| pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2 |  | ||||||
|  |  | ||||||
| # Needed for manipulation with traces yaml files. |  | ||||||
| pip3 install yq |  | ||||||
|  |  | ||||||
| # Needed for crosvm compilation. |  | ||||||
| update-alternatives --install /usr/bin/clang clang /usr/bin/clang-11 100 |  | ||||||
|  |  | ||||||
| ############### Build LLVM-SPIRV translator |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-llvm-spirv.sh |  | ||||||
|  |  | ||||||
| ############### Build libclc |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-libclc.sh |  | ||||||
|  |  | ||||||
| ############### Build libdrm |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-libdrm.sh |  | ||||||
|  |  | ||||||
| ############### Build Wayland |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-wayland.sh |  | ||||||
|  |  | ||||||
| ############### Build Crosvm |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-rust.sh |  | ||||||
| . .gitlab-ci/container/build-crosvm.sh |  | ||||||
|  |  | ||||||
| ############### Build dEQP runner |  | ||||||
| . .gitlab-ci/container/build-deqp-runner.sh |  | ||||||
|  |  | ||||||
| rm -rf /root/.cargo |  | ||||||
| rm -rf /root/.rustup |  | ||||||
|  |  | ||||||
| ccache --show-stats |  | ||||||
|  |  | ||||||
| apt-get purge -y $STABLE_EPHEMERAL |  | ||||||
|  |  | ||||||
| apt-get autoremove -y --purge |  | ||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user