Compare commits
128 Commits
mesa-22.0.
...
mesa-21.2.
Author | SHA1 | Date | |
---|---|---|---|
|
33595f88d6 | ||
|
c0623dbe16 | ||
|
2eb92dec11 | ||
|
18b65515a6 | ||
|
ecfa127381 | ||
|
90645f3eea | ||
|
432964005b | ||
|
a5dec10d83 | ||
|
1d02d0743d | ||
|
5b4b4b9ef6 | ||
|
7deef80ef6 | ||
|
1a1cf756d2 | ||
|
58bf0165ca | ||
|
9fc8ae0cd5 | ||
|
5b99334ba3 | ||
|
76a317170e | ||
|
97955560fd | ||
|
149473db82 | ||
|
720645a5b3 | ||
|
7c2c2b9d2a | ||
|
075ec9e608 | ||
|
538e9f93c5 | ||
|
2ca05ac293 | ||
|
5b17ed9781 | ||
|
ce0e8e022d | ||
|
14f09b60e5 | ||
|
ca5fbe8517 | ||
|
39ffd918cd | ||
|
4bb8e29a28 | ||
|
4524e8bff8 | ||
|
bda17c7388 | ||
|
1396ddcc4e | ||
|
8b4a8972a8 | ||
|
4f7b4ba7f8 | ||
|
940cb9ebe9 | ||
|
f3a523a9be | ||
|
f9107dbf71 | ||
|
ebcd657099 | ||
|
72eeeba333 | ||
|
947fd891bf | ||
|
cfe3e2ff53 | ||
|
ad0ba78934 | ||
|
bfc6597375 | ||
|
d06433f883 | ||
|
5577fb807e | ||
|
6705d498f4 | ||
|
d8402f2ff0 | ||
|
0e7985a7de | ||
|
1b886f20a7 | ||
|
b795fc4a28 | ||
|
731f989135 | ||
|
319818e9fd | ||
|
0028e826c5 | ||
|
fc9650d1f3 | ||
|
0eb551f658 | ||
|
f27415c223 | ||
|
83ddb66d8a | ||
|
fdc5858caf | ||
|
d23a2a0128 | ||
|
fe2d7f3089 | ||
|
25b71cfa10 | ||
|
acbcb4f093 | ||
|
bfefde2248 | ||
|
f9bae7e537 | ||
|
55761919e4 | ||
|
d054db55e6 | ||
|
f2a07762df | ||
|
cb9df156a6 | ||
|
053b80c70f | ||
|
cee6a91d69 | ||
|
5bd5f23579 | ||
|
a7bf3f5262 | ||
|
765ef56f34 | ||
|
277718fa8c | ||
|
84be95c634 | ||
|
a408f780c1 | ||
|
b850f72628 | ||
|
0342e7f91f | ||
|
a7199a03ab | ||
|
5a49aee6b3 | ||
|
08781845fe | ||
|
98d88943e3 | ||
|
b47de6e919 | ||
|
9d0be7b613 | ||
|
f2bdec9ecd | ||
|
83d94cfe71 | ||
|
8447b1d52b | ||
|
ba9e463c65 | ||
|
f62606c825 | ||
|
25b8c5e4b0 | ||
|
00b732a566 | ||
|
cea26a9813 | ||
|
3b1f5b2d5a | ||
|
400c224efe | ||
|
d341882e13 | ||
|
588ee0fd3f | ||
|
8e00c1956d | ||
|
4be9824dc9 | ||
|
3cb96be239 | ||
|
04e8aeac6d | ||
|
742c35d17c | ||
|
3c5987e2b4 | ||
|
2193a19631 | ||
|
8653cbd0aa | ||
|
bfd3fcd3af | ||
|
5be2d785ae | ||
|
9af1aa306f | ||
|
22941ec612 | ||
|
bbeb420157 | ||
|
a63d23c4c2 | ||
|
c511014205 | ||
|
c84ea77f76 | ||
|
91f9753405 | ||
|
8a33436b88 | ||
|
75ea264543 | ||
|
a2c2e1ff6c | ||
|
94ea5e0d8e | ||
|
6b6553c49c | ||
|
1b7de4bffa | ||
|
3eb7232a9b | ||
|
7bc923e54b | ||
|
eec1f4092c | ||
|
18ed841f2a | ||
|
7fbea74c23 | ||
|
bf260384b9 | ||
|
7d2802b80c | ||
|
0ca42c89e7 | ||
|
b6c34d6cec |
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -1,6 +0,0 @@
|
||||
*.csv eol=crlf
|
||||
* text=auto
|
||||
*.jpg binary
|
||||
*.png binary
|
||||
*.gif binary
|
||||
*.ico binary
|
313
.gitlab-ci.yml
313
.gitlab-ci.yml
@@ -3,9 +3,7 @@ variables:
|
||||
MESA_TEMPLATES_COMMIT: &ci-templates-commit 290b79e0e78eab67a83766f4e9691be554fc4afd
|
||||
CI_PRE_CLONE_SCRIPT: |-
|
||||
set -o xtrace
|
||||
wget -q -O download-git-cache.sh ${CI_PROJECT_URL}/-/raw/${CI_COMMIT_SHA}/.gitlab-ci/download-git-cache.sh
|
||||
bash download-git-cache.sh
|
||||
rm download-git-cache.sh
|
||||
/usr/bin/wget -q -O- ${CI_PROJECT_URL}/-/raw/${CI_COMMIT_SHA}/.gitlab-ci/download-git-cache.sh | bash -
|
||||
set +o xtrace
|
||||
MINIO_HOST: minio-packet.freedesktop.org
|
||||
# per-pipeline artifact storage on MinIO
|
||||
@@ -14,29 +12,10 @@ variables:
|
||||
JOB_ARTIFACTS_BASE: ${PIPELINE_ARTIFACTS_BASE}/${CI_JOB_ID}
|
||||
# reference images stored for traces
|
||||
PIGLIT_REPLAY_REFERENCE_IMAGES_BASE: "${MINIO_HOST}/mesa-tracie-results/$FDO_UPSTREAM_REPO"
|
||||
# Individual CI farm status, set to "offline" to disable jobs
|
||||
# running on a particular CI farm (ie. for outages, etc):
|
||||
FD_FARM: "online"
|
||||
|
||||
default:
|
||||
before_script:
|
||||
- echo -e "\e[0Ksection_start:$(date +%s):unset_env_vars_section[collapsed=true]\r\e[0KUnsetting vulnerable environment variables"
|
||||
- export CI_JOB_JWT_FILE="${CI_JOB_JWT_FILE:-$(mktemp)}"
|
||||
- echo -n "${CI_JOB_JWT}" > "${CI_JOB_JWT_FILE}"
|
||||
- unset CI_JOB_JWT
|
||||
- echo -e "\e[0Ksection_end:$(date +%s):unset_env_vars_section\r\e[0K"
|
||||
|
||||
after_script:
|
||||
- >
|
||||
set +x
|
||||
|
||||
test -e "${CI_JOB_JWT_FILE}" &&
|
||||
export CI_JOB_JWT="$(<${CI_JOB_JWT_FILE})" &&
|
||||
rm "${CI_JOB_JWT_FILE}"
|
||||
|
||||
include:
|
||||
- project: 'freedesktop/ci-templates'
|
||||
ref: 34f4ade99434043f88e164933f570301fd18b125
|
||||
ref: 79c325922670137e8f0a4dc5f6f097e0eb57c1af
|
||||
file:
|
||||
- '/templates/ci-fairy.yml'
|
||||
- project: 'freedesktop/ci-templates'
|
||||
@@ -44,31 +23,26 @@ include:
|
||||
file:
|
||||
- '/templates/debian.yml'
|
||||
- '/templates/fedora.yml'
|
||||
- local: '.gitlab-ci/image-tags.yml'
|
||||
- local: '.gitlab-ci/lava/lava-gitlab-ci.yml'
|
||||
- local: '.gitlab-ci/test-source-dep.yml'
|
||||
- local: 'src/amd/ci/gitlab-ci.yml'
|
||||
- local: 'src/broadcom/ci/gitlab-ci.yml'
|
||||
- local: 'src/etnaviv/ci/gitlab-ci.yml'
|
||||
- local: 'src/freedreno/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/crocus/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/d3d12/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/i915/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/iris/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/lima/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/llvmpipe/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/panfrost/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/radeonsi/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/softpipe/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/virgl/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/zink/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/frontends/lavapipe/ci/gitlab-ci.yml'
|
||||
- local: 'src/intel/ci/gitlab-ci.yml'
|
||||
- local: 'src/microsoft/ci/gitlab-ci.yml'
|
||||
- local: 'src/panfrost/ci/gitlab-ci.yml'
|
||||
|
||||
stages:
|
||||
- sanity
|
||||
- container
|
||||
- container-2
|
||||
- git-archive
|
||||
- build-x86_64
|
||||
- build-misc
|
||||
@@ -77,10 +51,10 @@ stages:
|
||||
- arm
|
||||
- broadcom
|
||||
- freedreno
|
||||
- etnaviv
|
||||
- software-renderer
|
||||
- layered-backends
|
||||
- deploy
|
||||
- success
|
||||
|
||||
# Generic rule to not run the job during scheduled pipelines
|
||||
# ----------------------------------------------------------
|
||||
@@ -203,6 +177,25 @@ test-docs-mr:
|
||||
when:
|
||||
- runner_system_failure
|
||||
|
||||
success:
|
||||
stage: success
|
||||
image: debian:stable-slim
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- if: *is-post-merge
|
||||
when: never
|
||||
- if: *is-for-marge
|
||||
changes: *docs-or-ci
|
||||
when: never
|
||||
- changes: *all_paths
|
||||
when: never
|
||||
- if: *is-pre-merge
|
||||
when: on_success
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
script:
|
||||
- echo "Dummy job to make sure every merge request pipeline runs at least one job"
|
||||
|
||||
|
||||
.ci-deqp-artifacts:
|
||||
artifacts:
|
||||
@@ -229,10 +222,8 @@ test-docs-mr:
|
||||
FDO_DISTRIBUTION_TAG: "${MESA_IMAGE_TAG}--${MESA_BASE_TAG}--${MESA_TEMPLATES_COMMIT}"
|
||||
|
||||
.set-image:
|
||||
extends:
|
||||
- .incorporate-templates-commit
|
||||
variables:
|
||||
MESA_IMAGE: "$CI_REGISTRY_IMAGE/${MESA_IMAGE_PATH}:${FDO_DISTRIBUTION_TAG}"
|
||||
MESA_IMAGE: "$CI_REGISTRY_IMAGE/${MESA_IMAGE_PATH}:${MESA_IMAGE_TAG}--${MESA_TEMPLATES_COMMIT}"
|
||||
image: "$MESA_IMAGE"
|
||||
|
||||
.set-image-base-tag:
|
||||
@@ -240,7 +231,7 @@ test-docs-mr:
|
||||
- .set-image
|
||||
- .incorporate-base-tag+templates-commit
|
||||
variables:
|
||||
MESA_IMAGE: "$CI_REGISTRY_IMAGE/${MESA_IMAGE_PATH}:${FDO_DISTRIBUTION_TAG}"
|
||||
MESA_IMAGE: "$CI_REGISTRY_IMAGE/${MESA_IMAGE_PATH}:${MESA_IMAGE_TAG}--${MESA_BASE_TAG}--${MESA_TEMPLATES_COMMIT}"
|
||||
|
||||
|
||||
# Build the CI docker images.
|
||||
@@ -306,6 +297,7 @@ test-docs-mr:
|
||||
- .incorporate-base-tag+templates-commit
|
||||
# Don't want the .container rules
|
||||
- .ci-run-policy
|
||||
stage: container-2
|
||||
|
||||
# Debian 11 based x86 build image base
|
||||
debian/x86_build-base:
|
||||
@@ -313,14 +305,14 @@ debian/x86_build-base:
|
||||
- .fdo.container-build@debian
|
||||
- .container
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &debian-x86_build-base ${DEBIAN_BASE_TAG}
|
||||
MESA_IMAGE_TAG: &debian-x86_build-base "2021-07-02-bump-libdrm"
|
||||
|
||||
.use-debian/x86_build-base:
|
||||
extends:
|
||||
- .fdo.container-build@debian
|
||||
- .use-base-image
|
||||
variables:
|
||||
MESA_BASE_IMAGE: ${DEBIAN_X86_BUILD_BASE_IMAGE}
|
||||
MESA_BASE_IMAGE: "debian/x86_build-base"
|
||||
MESA_BASE_TAG: *debian-x86_build-base
|
||||
MESA_ARTIFACTS_BASE_TAG: *debian-x86_build-base
|
||||
needs:
|
||||
@@ -331,14 +323,14 @@ debian/x86_build:
|
||||
extends:
|
||||
- .use-debian/x86_build-base
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &debian-x86_build ${DEBIAN_BUILD_TAG}
|
||||
MESA_IMAGE_TAG: &debian-x86_build "2021-07-02-bump-libdrm"
|
||||
|
||||
.use-debian/x86_build:
|
||||
extends:
|
||||
- .set-image-base-tag
|
||||
variables:
|
||||
MESA_BASE_TAG: *debian-x86_build-base
|
||||
MESA_IMAGE_PATH: ${DEBIAN_X86_BUILD_IMAGE_PATH}
|
||||
MESA_IMAGE_PATH: "debian/x86_build"
|
||||
MESA_IMAGE_TAG: *debian-x86_build
|
||||
needs:
|
||||
- debian/x86_build
|
||||
@@ -348,7 +340,7 @@ debian/i386_build:
|
||||
extends:
|
||||
- .use-debian/x86_build-base
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &debian-i386_build ${DEBIAN_BUILD_TAG}
|
||||
MESA_IMAGE_TAG: &debian-i386_build "2021-07-02-bump-libdrm"
|
||||
|
||||
.use-debian/i386_build:
|
||||
extends:
|
||||
@@ -365,7 +357,7 @@ debian/ppc64el_build:
|
||||
extends:
|
||||
- .use-debian/x86_build-base
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &debian-ppc64el_build ${DEBIAN_BUILD_TAG}
|
||||
MESA_IMAGE_TAG: &debian-ppc64el_build "2021-07-02-bump-libdrm"
|
||||
|
||||
.use-debian/ppc64el_build:
|
||||
extends:
|
||||
@@ -382,7 +374,7 @@ debian/s390x_build:
|
||||
extends:
|
||||
- .use-debian/x86_build-base
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &debian-s390x_build ${DEBIAN_BUILD_TAG}
|
||||
MESA_IMAGE_TAG: &debian-s390x_build "2021-07-02-bump-libdrm"
|
||||
|
||||
.use-debian/s390x_build:
|
||||
extends:
|
||||
@@ -399,7 +391,7 @@ debian/android_build:
|
||||
extends:
|
||||
- .use-debian/x86_build-base
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &debian-android_build ${DEBIAN_BUILD_TAG}
|
||||
MESA_IMAGE_TAG: &debian-android_build "2021-07-02-bump-libdrm"
|
||||
|
||||
.use-debian/android_build:
|
||||
extends:
|
||||
@@ -415,14 +407,14 @@ debian/android_build:
|
||||
debian/x86_test-base:
|
||||
extends: debian/x86_build-base
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &debian-x86_test-base ${DEBIAN_BASE_TAG}
|
||||
MESA_IMAGE_TAG: &debian-x86_test-base "2021-07-02-bump-libdrm"
|
||||
|
||||
.use-debian/x86_test-base:
|
||||
extends:
|
||||
- .fdo.container-build@debian
|
||||
- .use-base-image
|
||||
variables:
|
||||
MESA_BASE_IMAGE: ${DEBIAN_X86_TEST_BASE_IMAGE}
|
||||
MESA_BASE_IMAGE: "debian/x86_test-base"
|
||||
MESA_BASE_TAG: *debian-x86_test-base
|
||||
needs:
|
||||
- debian/x86_test-base
|
||||
@@ -432,14 +424,14 @@ debian/x86_test-gl:
|
||||
extends: .use-debian/x86_test-base
|
||||
variables:
|
||||
FDO_DISTRIBUTION_EXEC: 'env KERNEL_URL=${KERNEL_URL} FDO_CI_CONCURRENT=${FDO_CI_CONCURRENT} bash .gitlab-ci/container/${CI_JOB_NAME}.sh'
|
||||
KERNEL_URL: &kernel-rootfs-url "https://gitlab.freedesktop.org/gfx-ci/linux/-/archive/v5.16-for-mesa-ci-bd8b344492bfd/linux-v5.16-for-mesa-ci-bd8b344492bfd.tar.bz2"
|
||||
MESA_IMAGE_TAG: &debian-x86_test-gl ${DEBIAN_X86_TEST_GL_TAG}
|
||||
KERNEL_URL: &kernel-rootfs-url "https://gitlab.freedesktop.org/gfx-ci/linux/-/archive/v5.13-rc5-for-mesa-ci-27df41f1e0cf/linux-v5.13-rc5-for-mesa-ci-27df41f1e0cf.tar.bz2"
|
||||
MESA_IMAGE_TAG: &debian-x86_test-gl "2021-07-12-dmabuf"
|
||||
|
||||
# Debian 11 based x86 test image for VK
|
||||
debian/x86_test-vk:
|
||||
extends: .use-debian/x86_test-base
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &debian-x86_test-vk ${DEBIAN_X86_TEST_VK_TAG}
|
||||
MESA_IMAGE_TAG: &debian-x86_test-vk "2021-07-02-bump-libdrm"
|
||||
|
||||
# Debian 11 based ARM build image
|
||||
debian/arm_build:
|
||||
@@ -449,7 +441,7 @@ debian/arm_build:
|
||||
tags:
|
||||
- aarch64
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &debian-arm_build ${DEBIAN_BASE_TAG}
|
||||
MESA_IMAGE_TAG: &debian-arm_build "2021-07-02-bump-libdrm"
|
||||
|
||||
.use-debian/arm_build:
|
||||
extends:
|
||||
@@ -469,7 +461,7 @@ fedora/x86_build:
|
||||
- .container
|
||||
variables:
|
||||
FDO_DISTRIBUTION_VERSION: 34
|
||||
MESA_IMAGE_TAG: &fedora-x86_build ${FEDORA_X86_BUILD_TAG}
|
||||
MESA_IMAGE_TAG: &fedora-x86_build "2021-07-08-bump-libdrm"
|
||||
|
||||
.use-fedora/x86_build:
|
||||
extends:
|
||||
@@ -484,11 +476,11 @@ fedora/x86_build:
|
||||
.kernel+rootfs:
|
||||
extends:
|
||||
- .ci-run-policy
|
||||
stage: container
|
||||
stage: container-2
|
||||
variables:
|
||||
GIT_STRATEGY: fetch
|
||||
KERNEL_URL: *kernel-rootfs-url
|
||||
MESA_ROOTFS_TAG: &kernel-rootfs ${KERNEL_ROOTFS_TAG}
|
||||
MESA_ROOTFS_TAG: &kernel-rootfs "2021-07-12-dmabuf"
|
||||
DISTRIBUTION_TAG: &distribution-tag-arm "${MESA_ROOTFS_TAG}--${MESA_ARTIFACTS_TAG}--${MESA_TEMPLATES_COMMIT}"
|
||||
script:
|
||||
- .gitlab-ci/container/lava_build.sh
|
||||
@@ -535,6 +527,7 @@ debian/arm_test:
|
||||
- .container
|
||||
# Don't want the .container rules
|
||||
- .ci-run-policy
|
||||
stage: build-misc
|
||||
needs:
|
||||
- kernel+rootfs_arm64
|
||||
- kernel+rootfs_armhf
|
||||
@@ -542,7 +535,7 @@ debian/arm_test:
|
||||
FDO_DISTRIBUTION_EXEC: 'env ARTIFACTS_PREFIX=https://${MINIO_HOST}/mesa-lava ARTIFACTS_SUFFIX=${MESA_ROOTFS_TAG}--${MESA_ARM_BUILD_TAG}--${MESA_TEMPLATES_COMMIT} CI_PROJECT_PATH=${CI_PROJECT_PATH} FDO_CI_CONCURRENT=${FDO_CI_CONCURRENT} FDO_UPSTREAM_REPO=${FDO_UPSTREAM_REPO} bash .gitlab-ci/container/${CI_JOB_NAME}.sh'
|
||||
FDO_DISTRIBUTION_TAG: "${MESA_IMAGE_TAG}--${MESA_ROOTFS_TAG}--${MESA_ARM_BUILD_TAG}--${MESA_TEMPLATES_COMMIT}"
|
||||
MESA_ARM_BUILD_TAG: *debian-arm_build
|
||||
MESA_IMAGE_TAG: &debian-arm_test ${DEBIAN_BASE_TAG}
|
||||
MESA_IMAGE_TAG: &debian-arm_test "2021-07-02-bump-libdrm"
|
||||
MESA_ROOTFS_TAG: *kernel-rootfs
|
||||
|
||||
.use-debian/arm_test:
|
||||
@@ -568,78 +561,36 @@ debian/arm_test:
|
||||
# then in the build stage by executing inside Docker.
|
||||
|
||||
.windows-docker-vs2019:
|
||||
extends:
|
||||
- .set-image
|
||||
variables:
|
||||
MESA_IMAGE: "$CI_REGISTRY_IMAGE/${MESA_IMAGE_PATH}:${MESA_IMAGE_TAG}"
|
||||
MESA_IMAGE_PATH: "windows/x64_build"
|
||||
MESA_IMAGE_TAG: "2021-06-24-zink-msvc"
|
||||
MESA_UPSTREAM_IMAGE: "$CI_REGISTRY/$FDO_UPSTREAM_REPO/$MESA_IMAGE_PATH:${MESA_IMAGE_TAG}"
|
||||
|
||||
.windows_container_build:
|
||||
inherit:
|
||||
default: false
|
||||
windows_build_vs2019:
|
||||
extends:
|
||||
- .container
|
||||
- .windows-docker-vs2019
|
||||
stage: container
|
||||
variables:
|
||||
GIT_STRATEGY: fetch # we do actually need the full repository though
|
||||
MESA_BASE_IMAGE: None
|
||||
timeout: 2h 30m # LLVM + piglit takes ages
|
||||
tags:
|
||||
- windows
|
||||
- shell
|
||||
- "1809"
|
||||
- mesa
|
||||
script:
|
||||
- .\.gitlab-ci\windows\mesa_container.ps1 $CI_REGISTRY $CI_REGISTRY_USER $CI_REGISTRY_PASSWORD $MESA_IMAGE $MESA_UPSTREAM_IMAGE ${DOCKERFILE} ${MESA_BASE_IMAGE}
|
||||
|
||||
windows_build_vs2019:
|
||||
inherit:
|
||||
default: false
|
||||
extends:
|
||||
- .windows_container_build
|
||||
variables:
|
||||
MESA_IMAGE_PATH: &windows_build_image_path ${WINDOWS_X64_BUILD_PATH}
|
||||
MESA_IMAGE_TAG: &windows_build_image_tag ${WINDOWS_X64_BUILD_TAG}
|
||||
DOCKERFILE: Dockerfile_build
|
||||
timeout: 2h 30m # LLVM takes ages
|
||||
|
||||
windows_test_vs2019:
|
||||
inherit:
|
||||
default: false
|
||||
extends:
|
||||
- .windows_container_build
|
||||
# Don't want the .container rules
|
||||
- .ci-run-policy
|
||||
variables:
|
||||
MESA_IMAGE_PATH: &windows_test_image_path ${WINDOWS_X64_TEST_PATH}
|
||||
MESA_IMAGE_TAG: &windows_test_image_tag ${WINDOWS_X64_BUILD_TAG}--${WINDOWS_X64_TEST_TAG}
|
||||
DOCKERFILE: Dockerfile_test
|
||||
# Right now this only needs the VS install to get DXIL.dll. Maybe see about decoupling this at some point
|
||||
MESA_BASE_IMAGE_PATH: *windows_build_image_path
|
||||
MESA_BASE_IMAGE_TAG: *windows_build_image_tag
|
||||
MESA_BASE_IMAGE: "$CI_REGISTRY_IMAGE/${MESA_BASE_IMAGE_PATH}:${MESA_BASE_IMAGE_TAG}"
|
||||
script:
|
||||
- .\.gitlab-ci\windows\mesa_container.ps1 $CI_REGISTRY $CI_REGISTRY_USER $CI_REGISTRY_PASSWORD $MESA_IMAGE $MESA_UPSTREAM_IMAGE Dockerfile_test ${MESA_BASE_IMAGE}
|
||||
needs:
|
||||
- windows_build_vs2019
|
||||
- .\.gitlab-ci\windows\mesa_container.ps1 $CI_REGISTRY $CI_REGISTRY_USER $CI_REGISTRY_PASSWORD $MESA_IMAGE $MESA_UPSTREAM_IMAGE
|
||||
|
||||
.use-windows_build_vs2019:
|
||||
inherit:
|
||||
default: false
|
||||
extends: .windows-docker-vs2019
|
||||
image: "$MESA_IMAGE"
|
||||
variables:
|
||||
MESA_IMAGE_PATH: *windows_build_image_path
|
||||
MESA_IMAGE_TAG: *windows_build_image_tag
|
||||
needs:
|
||||
- windows_build_vs2019
|
||||
|
||||
.use-windows_test_vs2019:
|
||||
inherit:
|
||||
default: false
|
||||
extends: .windows-docker-vs2019
|
||||
image: "$MESA_IMAGE"
|
||||
variables:
|
||||
MESA_IMAGE_PATH: *windows_test_image_path
|
||||
MESA_IMAGE_TAG: *windows_test_image_tag
|
||||
|
||||
|
||||
# Git archive
|
||||
|
||||
@@ -659,8 +610,8 @@ make git archive:
|
||||
# compress the current folder
|
||||
- tar -cvzf ../$CI_PROJECT_NAME.tar.gz .
|
||||
|
||||
# login with the JWT token file
|
||||
- ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
|
||||
# login with the JWT token
|
||||
- ci-fairy minio login $CI_JOB_JWT
|
||||
- ci-fairy minio cp ../$CI_PROJECT_NAME.tar.gz minio://$MINIO_HOST/git-cache/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/$CI_PROJECT_NAME.tar.gz
|
||||
|
||||
|
||||
@@ -708,13 +659,11 @@ sanity:
|
||||
CCACHE_DIR: /cache/mesa/ccache
|
||||
# Use ccache transparently, and print stats before/after
|
||||
before_script:
|
||||
- !reference [default, before_script]
|
||||
- export PATH="/usr/lib/ccache:$PATH"
|
||||
- export CCACHE_BASEDIR="$PWD"
|
||||
- ccache --show-stats
|
||||
after_script:
|
||||
- ccache --show-stats
|
||||
- !reference [default, after_script]
|
||||
|
||||
.build-windows:
|
||||
extends: .build-common
|
||||
@@ -751,9 +700,8 @@ debian-testing:
|
||||
-D platforms=x11
|
||||
GALLIUM_ST: >
|
||||
-D dri3=enabled
|
||||
-D gallium-va=enabled
|
||||
GALLIUM_DRIVERS: "swrast,virgl,radeonsi,zink,crocus,iris,i915"
|
||||
VULKAN_DRIVERS: "swrast,amd,intel"
|
||||
GALLIUM_DRIVERS: "swrast,virgl,radeonsi,zink,iris,i915"
|
||||
VULKAN_DRIVERS: "swrast,amd"
|
||||
BUILDTYPE: "debugoptimized"
|
||||
EXTRA_OPTION: >
|
||||
-D valgrind=false
|
||||
@@ -771,28 +719,9 @@ debian-testing-asan:
|
||||
EXTRA_OPTION: >
|
||||
-D b_sanitize=address
|
||||
-D valgrind=false
|
||||
-D tools=dlclose-skip
|
||||
MINIO_ARTIFACT_NAME: ""
|
||||
ARTIFACTS_DEBUG_SYMBOLS: 1
|
||||
|
||||
debian-testing-msan:
|
||||
extends:
|
||||
- debian-clang
|
||||
variables:
|
||||
# l_undef is incompatible with msan
|
||||
EXTRA_OPTION:
|
||||
-D b_sanitize=memory
|
||||
-D b_lundef=false
|
||||
MINIO_ARTIFACT_NAME: ""
|
||||
ARTIFACTS_DEBUG_SYMBOLS: 1
|
||||
# Don't run all the tests yet:
|
||||
# GLSL has some issues in sexpression reading.
|
||||
# gtest has issues in its test initialization.
|
||||
MESON_TEST_ARGS: "--suite glcpp --suite gallium --suite format"
|
||||
# Freedreno dropped because freedreno tools fail at msan.
|
||||
GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus"
|
||||
VULKAN_DRIVERS: intel,amd,broadcom,virtio-experimental
|
||||
|
||||
debian-clover-testing:
|
||||
extends:
|
||||
- .meson-build
|
||||
@@ -833,7 +762,7 @@ debian-gallium:
|
||||
-D gallium-xa=enabled
|
||||
-D gallium-nine=true
|
||||
-D gallium-opencl=disabled
|
||||
GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,asahi,crocus"
|
||||
GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swr,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,asahi,crocus"
|
||||
VULKAN_DRIVERS: swrast
|
||||
EXTRA_OPTION: >
|
||||
-D osmesa=true
|
||||
@@ -841,6 +770,7 @@ debian-gallium:
|
||||
script:
|
||||
- .gitlab-ci/meson/build.sh
|
||||
- .gitlab-ci/run-shader-db.sh
|
||||
- src/freedreno/.gitlab-ci/run-fdtools.sh
|
||||
|
||||
# Test a release build with -Werror so new warnings don't sneak in.
|
||||
debian-release:
|
||||
@@ -864,7 +794,6 @@ debian-release:
|
||||
-D gallium-opencl=disabled
|
||||
-D llvm=false
|
||||
GALLIUM_DRIVERS: "i915,iris,nouveau,kmsro,freedreno,r300,svga,swrast,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,crocus"
|
||||
VULKAN_DRIVERS: "amd"
|
||||
BUILDTYPE: "release"
|
||||
EXTRA_OPTION: >
|
||||
-D osmesa=true
|
||||
@@ -885,6 +814,7 @@ fedora-release:
|
||||
-Wno-error=uninitialized
|
||||
CPP_ARGS: >
|
||||
-Wno-error=array-bounds
|
||||
DRI_DRIVERS: "nouveau,i915,i965,r100,r200"
|
||||
DRI_LOADERS: >
|
||||
-D glx=dri
|
||||
-D gbm=enabled
|
||||
@@ -925,15 +855,20 @@ debian-android:
|
||||
variables:
|
||||
UNWIND: "disabled"
|
||||
C_ARGS: >
|
||||
-Wno-error=absolute-value
|
||||
-Wno-error=asm-operand-widths
|
||||
-Wno-error=constant-conversion
|
||||
-Wno-error=enum-conversion
|
||||
-Wno-error=extern-initializer
|
||||
-Wno-error=initializer-overrides
|
||||
-Wno-error=missing-braces
|
||||
-Wno-error=sometimes-uninitialized
|
||||
-Wno-error=tautological-constant-out-of-range-compare
|
||||
-Wno-error=unused-function
|
||||
-Wno-error=unused-label
|
||||
CPP_ARGS: >
|
||||
-Wno-error=deprecated-declarations
|
||||
-Wno-error=non-virtual-dtor
|
||||
DRI_LOADERS: >
|
||||
-D glx=disabled
|
||||
-D gbm=disabled
|
||||
@@ -959,10 +894,9 @@ debian-android:
|
||||
- PKG_CONFIG_PATH=/usr/local/lib/aarch64-linux-android/pkgconfig/:/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/lib/aarch64-linux-android/pkgconfig/ CROSS=aarch64-linux-android GALLIUM_DRIVERS=etnaviv,freedreno,lima,panfrost,vc4,v3d VULKAN_DRIVERS=freedreno,broadcom,virtio-experimental .gitlab-ci/meson/build.sh
|
||||
# x86_64 build:
|
||||
# Can't do Intel because gen_decoder.c currently requires libexpat, which
|
||||
# is not a dependency that AOSP wants to accept. Can't do Radeon Gallium
|
||||
# drivers because they requires LLVM, which we don't have an Android build
|
||||
# of.
|
||||
- PKG_CONFIG_PATH=/usr/local/lib/x86_64-linux-android/pkgconfig/:/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/lib/x86_64-linux-android/pkgconfig/ CROSS=x86_64-linux-android GALLIUM_DRIVERS=iris VULKAN_DRIVERS=amd,intel .gitlab-ci/meson/build.sh
|
||||
# is not a dependency that AOSP wants to accept. Can't do Radeon because
|
||||
# it requires LLVM, which we don't have an Android build of.
|
||||
# - PKG_CONFIG_PATH=/usr/local/lib/x86_64-linux-android/pkgconfig/ CROSS=x86_64-linux-android GALLIUM_DRIVERS=iris VULKAN_DRIVERS=intel .gitlab-ci/meson/build.sh
|
||||
|
||||
.meson-cross:
|
||||
extends:
|
||||
@@ -1017,7 +951,7 @@ debian-arm64:
|
||||
- .meson-arm
|
||||
- .ci-deqp-artifacts
|
||||
variables:
|
||||
VULKAN_DRIVERS: "freedreno,broadcom,panfrost"
|
||||
VULKAN_DRIVERS: "freedreno,broadcom"
|
||||
EXTRA_OPTION: >
|
||||
-D llvm=disabled
|
||||
-D valgrind=false
|
||||
@@ -1036,7 +970,6 @@ debian-arm64-asan:
|
||||
-D llvm=disabled
|
||||
-D b_sanitize=address
|
||||
-D valgrind=false
|
||||
-D tools=dlclose-skip
|
||||
ARTIFACTS_DEBUG_SYMBOLS: 1
|
||||
MINIO_ARTIFACT_NAME: mesa-arm64-asan
|
||||
MESON_TEST_ARGS: "--no-suite mesa:compiler"
|
||||
@@ -1057,25 +990,46 @@ debian-clang:
|
||||
variables:
|
||||
UNWIND: "enabled"
|
||||
C_ARGS: >
|
||||
-Wno-error=absolute-value
|
||||
-Wno-error=constant-conversion
|
||||
-Wno-error=constant-logical-operand
|
||||
-Wno-error=enum-conversion
|
||||
-Wno-error=gnu-variable-sized-type-not-at-end
|
||||
-Wno-error=implicit-const-int-float-conversion
|
||||
-Wno-error=initializer-overrides
|
||||
-Wno-error=sometimes-uninitialized
|
||||
-Wno-error=unused-function
|
||||
CPP_ARGS: >
|
||||
-Wno-error=c99-designator
|
||||
-Wno-error=deprecated-declarations
|
||||
-Wno-error=implicit-const-int-float-conversion
|
||||
-Wno-error=misleading-indentation
|
||||
-Wno-error=mismatched-tags
|
||||
-Wno-error=missing-braces
|
||||
-Wno-error=overloaded-virtual
|
||||
-Wno-error=self-assign
|
||||
-Wno-error=sometimes-uninitialized
|
||||
-Wno-error=tautological-constant-compare
|
||||
-Wno-error=tautological-constant-out-of-range-compare
|
||||
-Wno-error=tautological-overlap-compare
|
||||
-Wno-error=typedef-redefinition
|
||||
-Wno-error=unused-function
|
||||
-Wno-error=unused-private-field
|
||||
-Wno-error=xor-used-as-pow
|
||||
CPP_ARGS: >
|
||||
-Wno-error=c99-designator
|
||||
-Wno-error=constant-logical-operand
|
||||
-Wno-error=deprecated-declarations
|
||||
-Wno-error=implicit-const-int-float-conversion
|
||||
-Wno-error=mismatched-tags
|
||||
-Wno-error=missing-braces
|
||||
-Wno-error=overloaded-virtual
|
||||
-Wno-error=self-assign
|
||||
-Wno-error=sometimes-uninitialized
|
||||
-Wno-error=tautological-constant-out-of-range-compare
|
||||
-Wno-error=unused-const-variable
|
||||
-Wno-error=unused-private-field
|
||||
-Wno-error=unused-value
|
||||
-Wno-error=unused-variable
|
||||
DRI_LOADERS: >
|
||||
-D glvnd=true
|
||||
GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus,i915,asahi"
|
||||
VULKAN_DRIVERS: intel,amd,freedreno,broadcom,virtio-experimental,swrast,panfrost
|
||||
DRI_DRIVERS: "auto"
|
||||
GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swr,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus"
|
||||
VULKAN_DRIVERS: intel,amd,freedreno,broadcom,virtio-experimental
|
||||
CC: clang
|
||||
CXX: clang++
|
||||
|
||||
@@ -1092,6 +1046,29 @@ windows-vs2019:
|
||||
- _build/meson-logs/*.txt
|
||||
- _install/
|
||||
|
||||
test-d3d12-windows:
|
||||
extends:
|
||||
- .build-windows
|
||||
- .use-windows_build_vs2019
|
||||
- .windows-test-rules
|
||||
stage: layered-backends
|
||||
dependencies:
|
||||
- windows-vs2019
|
||||
needs:
|
||||
- windows-vs2019
|
||||
variables:
|
||||
GIT_STRATEGY: none # testing doesn't build anything from source
|
||||
GALLIUM_DRIVER: d3d12
|
||||
PIGLIT_PROFILE: quick_gl
|
||||
PIGLIT_TESTS: -x nv_copy_depth_to_color -x repeat-wait -x arb_timer_query@timestamp-get
|
||||
script:
|
||||
- . _install/piglit_run.ps1
|
||||
artifacts:
|
||||
when: on_failure
|
||||
name: "mesa_${CI_JOB_NAME}"
|
||||
paths:
|
||||
- summary/
|
||||
|
||||
debian-clover:
|
||||
extends: .meson-build
|
||||
variables:
|
||||
@@ -1152,6 +1129,7 @@ debian-i386:
|
||||
CROSS: i386
|
||||
VULKAN_DRIVERS: intel,amd,swrast,virtio-experimental
|
||||
GALLIUM_DRIVERS: "iris,nouveau,r300,r600,radeonsi,swrast,virgl,zink,crocus"
|
||||
DRI_DRIVERS: "i915,i965,r100,r200,nouveau"
|
||||
EXTRA_OPTION: >
|
||||
-D vulkan-layers=device-select,overlay
|
||||
|
||||
@@ -1188,6 +1166,7 @@ debian-mingw32-x86_64:
|
||||
-Wno-error=format
|
||||
-Wno-error=format-extra-args
|
||||
CPP_ARGS: $C_ARGS
|
||||
DRI_DRIVERS: ""
|
||||
GALLIUM_DRIVERS: "swrast"
|
||||
EXTRA_OPTION: >
|
||||
-Dllvm=disabled
|
||||
@@ -1203,7 +1182,6 @@ debian-mingw32-x86_64:
|
||||
variables:
|
||||
GIT_STRATEGY: none # testing doesn't build anything from source
|
||||
before_script:
|
||||
- !reference [default, before_script]
|
||||
# Note: Build dir (and thus install) may be dirty due to GIT_STRATEGY
|
||||
- rm -rf install
|
||||
- tar -xf artifacts/install.tar
|
||||
@@ -1220,7 +1198,7 @@ debian-mingw32-x86_64:
|
||||
- .set-image-base-tag
|
||||
variables:
|
||||
MESA_BASE_TAG: *debian-x86_test-base
|
||||
MESA_IMAGE_PATH: ${DEBIAN_X86_TEST_IMAGE_PATH}
|
||||
MESA_IMAGE_PATH: "debian/x86_test-gl"
|
||||
MESA_IMAGE_TAG: *debian-x86_test-gl
|
||||
needs:
|
||||
- debian/x86_test-gl
|
||||
@@ -1262,16 +1240,15 @@ debian-mingw32-x86_64:
|
||||
|
||||
.piglit-test:
|
||||
artifacts:
|
||||
when: on_failure
|
||||
name: "mesa_${CI_JOB_NAME}"
|
||||
paths:
|
||||
- results
|
||||
reports:
|
||||
junit: results/junit.xml
|
||||
- results/summary/
|
||||
- results/*.txt
|
||||
variables:
|
||||
PIGLIT_NO_WINDOW: 1
|
||||
HWCI_TEST_SCRIPT: "/install/piglit/piglit-runner.sh"
|
||||
script:
|
||||
- install/piglit/piglit-runner.sh
|
||||
- install/piglit/run.sh
|
||||
|
||||
.piglit-traces-test:
|
||||
extends:
|
||||
@@ -1285,15 +1262,11 @@ debian-mingw32-x86_64:
|
||||
name: "mesa_${CI_JOB_NAME}"
|
||||
reports:
|
||||
junit: results/junit.xml
|
||||
paths:
|
||||
- results/summary/
|
||||
- results/*.txt
|
||||
variables:
|
||||
CI_JOB_JWT_FILE: /minio_jwt # JWT file on piglit jobs should be available on / partition
|
||||
PIGLIT_REPLAY_EXTRA_ARGS: --keep-image --db-path ${CI_PROJECT_DIR}/replayer-db/ --minio_host=minio-packet.freedesktop.org --minio_bucket=mesa-tracie-public --role-session-name=${CI_PROJECT_PATH}:${CI_JOB_ID} --jwt-file=${CI_JOB_JWT_FILE}
|
||||
CI_JOB_JWT_FILE: /minio_jwt # JWT file on piglit jobs should be available on / partition
|
||||
script:
|
||||
- install/piglit/piglit-traces.sh
|
||||
PIGLIT_PROFILES: replay
|
||||
PIGLIT_REPLAY_UPLOAD_TO_MINIO: 1
|
||||
PIGLIT_REPLAY_EXTRA_ARGS: --keep-image --db-path ${CI_PROJECT_DIR}/replayer-db/
|
||||
PIGLIT_JUNIT_RESULTS: 1
|
||||
|
||||
.deqp-test:
|
||||
script:
|
||||
@@ -1334,7 +1307,6 @@ debian-mingw32-x86_64:
|
||||
- when: never
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
JOB_TIMEOUT: 80
|
||||
|
||||
# The above .test-manual rules doesn't allow the jobs to be available for MRs
|
||||
# but we should have an option to have manual jobs in MRs as well.
|
||||
@@ -1348,7 +1320,6 @@ debian-mingw32-x86_64:
|
||||
- when: never
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
JOB_TIMEOUT: 80
|
||||
|
||||
.baremetal-test:
|
||||
extends:
|
||||
@@ -1358,7 +1329,6 @@ debian-mingw32-x86_64:
|
||||
interruptible: true
|
||||
stage: test
|
||||
before_script:
|
||||
- !reference [default, before_script]
|
||||
# Use this instead of gitlab's artifacts download because it hits packet.net
|
||||
# instead of fd.o. Set FDO_HTTP_CACHE_URI to an http cache for your test lab to
|
||||
# improve it even more (see https://docs.mesa3d.org/ci/bare-metal.html for
|
||||
@@ -1379,19 +1349,11 @@ debian-mingw32-x86_64:
|
||||
extends:
|
||||
- .baremetal-test
|
||||
variables:
|
||||
BM_ROOTFS: /rootfs-armhf
|
||||
MINIO_ARTIFACT_NAME: mesa-armhf
|
||||
|
||||
.baremetal-test-arm64:
|
||||
extends:
|
||||
- .baremetal-test
|
||||
variables:
|
||||
BM_ROOTFS: /rootfs-arm64
|
||||
MINIO_ARTIFACT_NAME: mesa-arm64
|
||||
|
||||
.baremetal-arm64-asan-test:
|
||||
variables:
|
||||
DEQP_RUNNER_OPTIONS: "--env LD_PRELOAD=libasan.so.6:/install/lib/libdlclose-skip.so"
|
||||
TEST_LD_PRELOAD: libasan.so.6
|
||||
MINIO_ARTIFACT_NAME: mesa-arm64-asan
|
||||
needs:
|
||||
- debian/arm_test
|
||||
@@ -1401,9 +1363,4 @@ debian-mingw32-x86_64:
|
||||
.baremetal-deqp-test:
|
||||
variables:
|
||||
HWCI_TEST_SCRIPT: "/install/deqp-runner.sh"
|
||||
FDO_CI_CONCURRENT: 0 # Default to number of CPUs
|
||||
|
||||
.baremetal-skqp-test:
|
||||
variables:
|
||||
HWCI_START_XORG: 1
|
||||
HWCI_TEST_SCRIPT: "/install/skqp-runner.sh"
|
||||
DEQP_PARALLEL: 0 # Default to number of CPUs
|
||||
|
@@ -10,7 +10,7 @@ EXIT=0
|
||||
# Run reset tests without parallelism:
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/reset \
|
||||
FDO_CI_CONCURRENT=1 \
|
||||
DEQP_PARALLEL=1 \
|
||||
DEQP_CASELIST_FILTER='.*reset.*' \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
|
81
.gitlab-ci/bare-metal/arm64_a630_gles_others.sh
Executable file
81
.gitlab-ci/bare-metal/arm64_a630_gles_others.sh
Executable file
@@ -0,0 +1,81 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This test script groups together a bunch of fast dEQP variant runs
|
||||
# to amortize the cost of rebooting the board.
|
||||
|
||||
set -ex
|
||||
|
||||
EXIT=0
|
||||
|
||||
# Test rendering with the gmem path forced when possible (~1 minute)
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/gmem \
|
||||
DEQP_VER=gles31 \
|
||||
DEQP_FRACTION=5 \
|
||||
FD_MESA_DEBUG=nobypass \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
||||
# Test rendering with the bypass path forced when possible (~1 minute)
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/bypass \
|
||||
DEQP_VER=gles31 \
|
||||
DEQP_FRACTION=5 \
|
||||
FD_MESA_DEBUG=nogmem \
|
||||
GPU_VERSION=freedreno-a630-bypass \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
||||
# Test rendering with the UBO-to-constants optimization disabled (~1 minute)
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/nouboopt \
|
||||
DEQP_VER=gles31 \
|
||||
IR3_SHADER_DEBUG=nouboopt \
|
||||
DEQP_CASELIST_FILTER="functional.*ubo" \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
||||
# gles3-565nozs mustpass (~20s)
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/gles3-565nozs \
|
||||
DEQP_VER=gles3 \
|
||||
DEQP_CONFIG="rgb565d0s0ms0" \
|
||||
DEQP_VARIANT="565-no-depth-no-stencil" \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
||||
# gles31-565nozs mustpass (~1s)
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/gles31-565nozs \
|
||||
DEQP_VER=gles31 \
|
||||
DEQP_CONFIG="rgb565d0s0ms0" \
|
||||
DEQP_VARIANT="565-no-depth-no-stencil" \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
||||
# gles3-multisample mustpass -- disabled pending https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/1859
|
||||
# if ! env \
|
||||
# DEQP_RESULTS_DIR=results/gles3-multisample \
|
||||
# DEQP_VER=gles3 \
|
||||
# DEQP_CONFIG="rgba8888d24s8ms4" \
|
||||
# DEQP_VARIANT="multisample" \
|
||||
# /install/deqp-runner.sh; then
|
||||
# EXIT=1
|
||||
# fi
|
||||
|
||||
# gles31-multisample mustpass -- disabled pending https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/1859
|
||||
# if ! env \
|
||||
# DEQP_RESULTS_DIR=results/gles31-multisample \
|
||||
# DEQP_VER=gles31 \
|
||||
# DEQP_CONFIG="rgba8888d24s8ms4" \
|
||||
# DEQP_VARIANT="multisample" \
|
||||
# /install/deqp-runner.sh; then
|
||||
# EXIT=1
|
||||
# fi
|
||||
|
||||
exit $EXIT
|
@@ -50,18 +50,12 @@ class CrosServoRun:
|
||||
target=self.iter_feed_queue, daemon=True, args=(self.cpu_ser.lines(),))
|
||||
self.iter_feed_cpu.start()
|
||||
|
||||
def close(self):
|
||||
self.ec_ser.close()
|
||||
self.cpu_ser.close()
|
||||
self.iter_feed_ec.join()
|
||||
self.iter_feed_cpu.join()
|
||||
|
||||
# Feed lines from our serial queues into the merged queue, marking when our
|
||||
# input is done.
|
||||
def iter_feed_queue(self, it):
|
||||
for i in it:
|
||||
self.serial_queue.put(i)
|
||||
self.serial_queue.put(self.sentinel)
|
||||
self.serial_queue.put(sentinel)
|
||||
|
||||
# Return the next line from the queue, counting how many threads have
|
||||
# terminated and joining when done
|
||||
@@ -132,12 +126,6 @@ class CrosServoRun:
|
||||
self.print_error("Detected cheza power management bus error, restarting run...")
|
||||
return 2
|
||||
|
||||
# If the network device dies, it's probably not graphics's fault, just try again.
|
||||
if re.search("NETDEV WATCHDOG", line):
|
||||
self.print_error(
|
||||
"Detected network device failure, restarting run...")
|
||||
return 2
|
||||
|
||||
# These HFI response errors started appearing with the introduction
|
||||
# of piglit runs. CosmicPenguin says:
|
||||
#
|
||||
@@ -151,11 +139,6 @@ class CrosServoRun:
|
||||
self.print_error("Detected cheza power management bus error, restarting run...")
|
||||
return 2
|
||||
|
||||
if re.search("coreboot.*bootblock starting", line):
|
||||
self.print_error(
|
||||
"Detected spontaneous reboot, restarting run...")
|
||||
return 2
|
||||
|
||||
result = re.search("hwci: mesa: (\S*)", line)
|
||||
if result:
|
||||
if result.group(1) == "pass":
|
||||
@@ -185,8 +168,6 @@ def main():
|
||||
# power down the CPU on the device
|
||||
servo.ec_write("power off\n")
|
||||
|
||||
servo.close()
|
||||
|
||||
sys.exit(retval)
|
||||
|
||||
|
||||
|
@@ -1,10 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
relay=$1
|
||||
|
||||
if [ -z "$relay" ]; then
|
||||
echo "Must supply a relay arg"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
$CI_PROJECT_DIR/install/bare-metal/eth008-power-relay.py $ETH_HOST $ETH_PORT off $relay
|
@@ -1,28 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import sys
|
||||
import socket
|
||||
|
||||
host = sys.argv[1]
|
||||
port = sys.argv[2]
|
||||
mode = sys.argv[3]
|
||||
relay = sys.argv[4]
|
||||
msg = None
|
||||
|
||||
if mode == "on":
|
||||
msg = b'\x20'
|
||||
else:
|
||||
msg = b'\x21'
|
||||
|
||||
msg += int(relay).to_bytes(1, 'big')
|
||||
msg += b'\x00'
|
||||
|
||||
c = socket.create_connection((host, int(port)))
|
||||
c.sendall(msg)
|
||||
|
||||
data = c.recv(1)
|
||||
c.close()
|
||||
|
||||
if data[0] == b'\x01':
|
||||
print('Command failed')
|
||||
sys.exit(1)
|
@@ -1,12 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
relay=$1
|
||||
|
||||
if [ -z "$relay" ]; then
|
||||
echo "Must supply a relay arg"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
$CI_PROJECT_DIR/install/bare-metal/eth008-power-relay.py $ETH_HOST $ETH_PORT off $relay
|
||||
sleep 5
|
||||
$CI_PROJECT_DIR/install/bare-metal/eth008-power-relay.py $ETH_HOST $ETH_PORT on $relay
|
@@ -36,9 +36,6 @@ class FastbootRun:
|
||||
self.ser = SerialBuffer(args.dev, "results/serial-output.txt", "R SERIAL> ", timeout=600)
|
||||
self.fastboot="fastboot boot -s {ser} artifacts/fastboot.img".format(ser=args.fbserial)
|
||||
|
||||
def close(self):
|
||||
self.ser.close()
|
||||
|
||||
def print_error(self, message):
|
||||
RED = '\033[0;31m'
|
||||
NO_COLOR = '\033[0m'
|
||||
@@ -70,13 +67,7 @@ class FastbootRun:
|
||||
if self.logged_system(self.fastboot) != 0:
|
||||
return 1
|
||||
|
||||
print_more_lines = -1
|
||||
for line in self.ser.lines():
|
||||
if print_more_lines == 0:
|
||||
return 2
|
||||
if print_more_lines > 0:
|
||||
print_more_lines -= 1
|
||||
|
||||
if re.search("---. end Kernel panic", line):
|
||||
return 1
|
||||
|
||||
@@ -92,24 +83,6 @@ class FastbootRun:
|
||||
"Detected kernel soft lockup, restarting run...")
|
||||
return 2
|
||||
|
||||
# If the network device dies, it's probably not graphics's fault, just try again.
|
||||
if re.search("NETDEV WATCHDOG", line):
|
||||
self.print_error(
|
||||
"Detected network device failure, restarting run...")
|
||||
return 2
|
||||
|
||||
# A3xx recovery doesn't quite work. Sometimes the GPU will get
|
||||
# wedged and recovery will fail (because power can't be reset?)
|
||||
# This assumes that the jobs are sufficiently well-tested that GPU
|
||||
# hangs aren't always triggered, so just try again. But print some
|
||||
# more lines first so that we get better information on the cause
|
||||
# of the hang. Once a hang happens, it's pretty chatty.
|
||||
if "[drm:adreno_recover] *ERROR* gpu hw init failed: -22" in line:
|
||||
self.print_error(
|
||||
"Detected GPU hang, restarting run...")
|
||||
if print_more_lines == -1:
|
||||
print_more_lines = 30
|
||||
|
||||
result = re.search("hwci: mesa: (\S*)", line)
|
||||
if result:
|
||||
if result.group(1) == "pass":
|
||||
@@ -132,7 +105,6 @@ def main():
|
||||
|
||||
while True:
|
||||
retval = fastboot.run()
|
||||
fastboot.close()
|
||||
if retval != 2:
|
||||
break
|
||||
|
||||
|
@@ -8,20 +8,15 @@ mkdir -p $rootfs_dst/results
|
||||
cp $BM/bm-init.sh $rootfs_dst/init
|
||||
cp $CI_COMMON/init*.sh $rootfs_dst/
|
||||
|
||||
# Make JWT token available as file in the bare-metal storage to enable access
|
||||
# to MinIO
|
||||
cp "${CI_JOB_JWT_FILE}" "${rootfs_dst}${CI_JOB_JWT_FILE}"
|
||||
|
||||
cp $CI_COMMON/capture-devcoredump.sh $rootfs_dst/
|
||||
|
||||
set +x
|
||||
|
||||
# Pass through relevant env vars from the gitlab job to the baremetal init script
|
||||
"$CI_COMMON"/generate-env.sh > $rootfs_dst/set-job-env-vars.sh
|
||||
chmod +x $rootfs_dst/set-job-env-vars.sh
|
||||
echo "Variables passed through:"
|
||||
cat $rootfs_dst/set-job-env-vars.sh
|
||||
|
||||
echo "export CI_JOB_JWT=${CI_JOB_JWT@Q}" >> $rootfs_dst/set-job-env-vars.sh
|
||||
set -x
|
||||
|
||||
# Add the Mesa drivers we built, and make a consistent symlink to them.
|
||||
|
@@ -28,6 +28,7 @@ import serial
|
||||
import threading
|
||||
import time
|
||||
|
||||
|
||||
class SerialBuffer:
|
||||
def __init__(self, dev, filename, prefix, timeout = None):
|
||||
self.filename = filename
|
||||
@@ -35,17 +36,15 @@ class SerialBuffer:
|
||||
|
||||
if dev:
|
||||
self.f = open(filename, "wb+")
|
||||
self.serial = serial.Serial(dev, 115200, timeout=timeout)
|
||||
self.serial = serial.Serial(dev, 115200, timeout=timeout if timeout else 10)
|
||||
else:
|
||||
self.f = open(filename, "rb")
|
||||
self.serial = None
|
||||
|
||||
self.byte_queue = queue.Queue()
|
||||
self.line_queue = queue.Queue()
|
||||
self.prefix = prefix
|
||||
self.timeout = timeout
|
||||
self.sentinel = object()
|
||||
self.closing = False
|
||||
|
||||
if self.dev:
|
||||
self.read_thread = threading.Thread(
|
||||
@@ -59,31 +58,24 @@ class SerialBuffer:
|
||||
target=self.serial_lines_thread_loop, daemon=True)
|
||||
self.lines_thread.start()
|
||||
|
||||
def close(self):
|
||||
self.closing = True
|
||||
if self.serial:
|
||||
self.serial.cancel_read()
|
||||
self.read_thread.join()
|
||||
self.lines_thread.join()
|
||||
if self.serial:
|
||||
self.serial.close()
|
||||
|
||||
# Thread that just reads the bytes from the serial device to try to keep from
|
||||
# buffer overflowing it. If nothing is received in 1 minute, it finalizes.
|
||||
def serial_read_thread_loop(self):
|
||||
greet = "Serial thread reading from %s\n" % self.dev
|
||||
self.byte_queue.put(greet.encode())
|
||||
|
||||
while not self.closing:
|
||||
while True:
|
||||
try:
|
||||
b = self.serial.read()
|
||||
if len(b) == 0:
|
||||
if len(b) > 0:
|
||||
self.byte_queue.put(b)
|
||||
elif self.timeout:
|
||||
self.byte_queue.put(self.sentinel)
|
||||
break
|
||||
self.byte_queue.put(b)
|
||||
except Exception as err:
|
||||
print(self.prefix + str(err))
|
||||
self.byte_queue.put(self.sentinel)
|
||||
break
|
||||
self.byte_queue.put(self.sentinel)
|
||||
|
||||
# Thread that just reads the bytes from the file of serial output that some
|
||||
# other process is appending to.
|
||||
@@ -91,13 +83,12 @@ class SerialBuffer:
|
||||
greet = "Serial thread reading from %s\n" % self.filename
|
||||
self.byte_queue.put(greet.encode())
|
||||
|
||||
while not self.closing:
|
||||
while True:
|
||||
line = self.f.readline()
|
||||
if line:
|
||||
self.byte_queue.put(line)
|
||||
else:
|
||||
time.sleep(0.1)
|
||||
self.byte_queue.put(self.sentinel)
|
||||
|
||||
# Thread that processes the stream of bytes to 1) log to stdout, 2) log to
|
||||
# file, 3) add to the queue of lines to be read by program logic
|
||||
|
@@ -1,41 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Copyright © 2020 Christian Gmeiner
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
# copy of this software and associated documentation files (the "Software"),
|
||||
# to deal in the Software without restriction, including without limitation
|
||||
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
# and/or sell copies of the Software, and to permit persons to whom the
|
||||
# Software is furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice (including the next
|
||||
# paragraph) shall be included in all copies or substantial portions of the
|
||||
# Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
#
|
||||
# Tiny script to read bytes from telnet, and write the output to stdout, with a
|
||||
# buffer in between so we don't lose serial output from its buffer.
|
||||
#
|
||||
|
||||
import sys
|
||||
import telnetlib
|
||||
|
||||
host=sys.argv[1]
|
||||
port=sys.argv[2]
|
||||
|
||||
tn = telnetlib.Telnet(host, port, 1000000)
|
||||
|
||||
while True:
|
||||
bytes = tn.read_some()
|
||||
sys.stdout.buffer.write(bytes)
|
||||
sys.stdout.flush()
|
||||
|
||||
tn.close()
|
@@ -5,11 +5,8 @@ for var in \
|
||||
BASE_SYSTEM_FORK_HOST_PREFIX \
|
||||
BASE_SYSTEM_MAINLINE_HOST_PREFIX \
|
||||
CI_COMMIT_BRANCH \
|
||||
CI_COMMIT_REF_NAME \
|
||||
CI_COMMIT_TITLE \
|
||||
CI_JOB_ID \
|
||||
CI_JOB_JWT_FILE \
|
||||
CI_JOB_NAME \
|
||||
CI_JOB_URL \
|
||||
CI_MERGE_REQUEST_SOURCE_BRANCH_NAME \
|
||||
CI_MERGE_REQUEST_TITLE \
|
||||
@@ -17,47 +14,33 @@ for var in \
|
||||
CI_NODE_TOTAL \
|
||||
CI_PAGES_DOMAIN \
|
||||
CI_PIPELINE_ID \
|
||||
CI_PIPELINE_URL \
|
||||
CI_PROJECT_DIR \
|
||||
CI_PROJECT_NAME \
|
||||
CI_PROJECT_PATH \
|
||||
CI_PROJECT_ROOT_NAMESPACE \
|
||||
CI_RUNNER_DESCRIPTION \
|
||||
CI_SERVER_URL \
|
||||
CROSVM_GALLIUM_DRIVER \
|
||||
CROSVM_GPU_ARGS \
|
||||
CROSVM_TEST_SCRIPT \
|
||||
DEQP_BIN_DIR \
|
||||
DEQP_CASELIST_FILTER \
|
||||
DEQP_CASELIST_INV_FILTER \
|
||||
DEQP_CONFIG \
|
||||
DEQP_EXPECTED_RENDERER \
|
||||
DEQP_FRACTION \
|
||||
DEQP_HEIGHT \
|
||||
DEQP_NO_SAVE_RESULTS \
|
||||
DEQP_PARALLEL \
|
||||
DEQP_RESULTS_DIR \
|
||||
DEQP_RUNNER_OPTIONS \
|
||||
DEQP_SUITE \
|
||||
DEQP_TEMP_DIR \
|
||||
DEQP_VARIANT \
|
||||
DEQP_VER \
|
||||
DEQP_WIDTH \
|
||||
DEVICE_NAME \
|
||||
DRIVER_NAME \
|
||||
EGL_PLATFORM \
|
||||
ETNA_MESA_DEBUG \
|
||||
FDO_CI_CONCURRENT \
|
||||
FDO_UPSTREAM_REPO \
|
||||
FD_MESA_DEBUG \
|
||||
FLAKES_CHANNEL \
|
||||
GALLIUM_DRIVER \
|
||||
GALLIVM_PERF \
|
||||
GPU_VERSION \
|
||||
GTEST \
|
||||
GTEST_FAILS \
|
||||
GTEST_FRACTION \
|
||||
GTEST_RESULTS_DIR \
|
||||
GTEST_RUNNER_OPTIONS \
|
||||
GTEST_SKIPS \
|
||||
HWCI_FREQ_MAX \
|
||||
HWCI_KERNEL_MODULES \
|
||||
HWCI_START_XORG \
|
||||
@@ -66,44 +49,34 @@ for var in \
|
||||
JOB_ARTIFACTS_BASE \
|
||||
JOB_RESULTS_PATH \
|
||||
JOB_ROOTFS_OVERLAY_PATH \
|
||||
LD_LIBRARY_PATH \
|
||||
LP_NUM_THREADS \
|
||||
MESA_BASE_TAG \
|
||||
MESA_BUILD_PATH \
|
||||
MESA_DEBUG \
|
||||
MESA_GLES_VERSION_OVERRIDE \
|
||||
MESA_GLSL_VERSION_OVERRIDE \
|
||||
MESA_GL_VERSION_OVERRIDE \
|
||||
MESA_IMAGE \
|
||||
MESA_IMAGE_PATH \
|
||||
MESA_IMAGE_TAG \
|
||||
MESA_TEMPLATES_COMMIT \
|
||||
MESA_GLSL_VERSION_OVERRIDE \
|
||||
MESA_GLES_VERSION_OVERRIDE \
|
||||
MESA_VK_IGNORE_CONFORMANCE_WARNING \
|
||||
MINIO_HOST \
|
||||
NIR_DEBUG \
|
||||
PAN_I_WANT_A_BROKEN_VULKAN_DRIVER \
|
||||
NIR_VALIDATE \
|
||||
PAN_MESA_DEBUG \
|
||||
PIGLIT_FRACTION \
|
||||
PIGLIT_JUNIT_RESULTS \
|
||||
PIGLIT_NO_WINDOW \
|
||||
PIGLIT_OPTIONS \
|
||||
PIGLIT_PLATFORM \
|
||||
PIGLIT_PROFILES \
|
||||
PIGLIT_REPLAY_ARTIFACTS_BASE_URL \
|
||||
PIGLIT_REPLAY_SUBCOMMAND \
|
||||
PIGLIT_REPLAY_DESCRIPTION_FILE \
|
||||
PIGLIT_REPLAY_DEVICE_NAME \
|
||||
PIGLIT_REPLAY_EXTRA_ARGS \
|
||||
PIGLIT_REPLAY_REFERENCE_IMAGES_BASE \
|
||||
PIGLIT_REPLAY_SUBCOMMAND \
|
||||
PIGLIT_REPLAY_UPLOAD_TO_MINIO \
|
||||
PIGLIT_RESULTS \
|
||||
PIGLIT_TESTS \
|
||||
PIPELINE_ARTIFACTS_BASE \
|
||||
SKQP_ASSETS_DIR \
|
||||
SKQP_BACKENDS \
|
||||
TEST_LD_PRELOAD \
|
||||
TU_DEBUG \
|
||||
VIRGL_HOST_API \
|
||||
VK_CPU \
|
||||
VK_DRIVER \
|
||||
VK_ICD_FILENAMES \
|
||||
; do
|
||||
if [ -n "${!var+x}" ]; then
|
||||
echo "export $var=${!var@Q}"
|
||||
|
@@ -23,9 +23,6 @@ export XDG_CACHE_HOME=/tmp
|
||||
export PYTHONPATH=$(python3 -c "import sys;print(\":\".join(sys.path))")
|
||||
|
||||
if [ "$HWCI_FREQ_MAX" = "true" ]; then
|
||||
# Ensure initialization of the DRM device (needed by MSM)
|
||||
head -0 /dev/dri/renderD128
|
||||
|
||||
# Disable GPU frequency scaling
|
||||
DEVFREQ_GOVERNOR=`find /sys/devices -name governor | grep gpu || true`
|
||||
test -z "$DEVFREQ_GOVERNOR" || echo performance > $DEVFREQ_GOVERNOR || true
|
||||
@@ -71,7 +68,7 @@ fi
|
||||
MINIO=$(cat /proc/cmdline | tr ' ' '\n' | grep minio_results | cut -d '=' -f 2 || true)
|
||||
if [ -n "$MINIO" ]; then
|
||||
tar -czf results.tar.gz results/;
|
||||
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}";
|
||||
ci-fairy minio login "$CI_JOB_JWT";
|
||||
ci-fairy minio cp results.tar.gz minio://"$MINIO"/results.tar.gz;
|
||||
fi
|
||||
|
||||
|
@@ -11,7 +11,6 @@ CONFIG_DEVFREQ_GOV_PASSIVE=y
|
||||
CONFIG_DEVFREQ_GOV_SIMPLE_ONDEMAND=y
|
||||
|
||||
CONFIG_DRM=y
|
||||
CONFIG_DRM_ETNAVIV=y
|
||||
CONFIG_DRM_ROCKCHIP=y
|
||||
CONFIG_DRM_PANFROST=y
|
||||
CONFIG_DRM_LIMA=y
|
||||
|
@@ -14,9 +14,9 @@ CONFIG_DRM_ROCKCHIP=y
|
||||
CONFIG_DRM_PANFROST=y
|
||||
CONFIG_DRM_LIMA=y
|
||||
CONFIG_DRM_PANEL_SIMPLE=y
|
||||
CONFIG_DRM_PANEL_EDP=y
|
||||
CONFIG_DRM_MSM=y
|
||||
CONFIG_DRM_I2C_ADV7511=y
|
||||
CONFIG_DRM_I2C_ADV7533=y
|
||||
CONFIG_PWM_CROS_EC=y
|
||||
CONFIG_BACKLIGHT_PWM=y
|
||||
|
||||
@@ -32,11 +32,6 @@ CONFIG_TYPEC=y
|
||||
CONFIG_TYPEC_TCPM=y
|
||||
|
||||
# MSM platform bits
|
||||
|
||||
# For CONFIG_QCOM_LMH
|
||||
CONFIG_OF=y
|
||||
|
||||
CONFIG_QCOM_COMMAND_DB=y
|
||||
CONFIG_QCOM_RPMHPD=y
|
||||
CONFIG_QCOM_RPMPD=y
|
||||
CONFIG_SDM_GPUCC_845=y
|
||||
@@ -50,11 +45,9 @@ CONFIG_I2C_QCOM_GENI=y
|
||||
CONFIG_SPI_QCOM_GENI=y
|
||||
CONFIG_PHY_QCOM_QUSB2=y
|
||||
CONFIG_PHY_QCOM_QMP=y
|
||||
CONFIG_QCOM_CLK_APCC_MSM8996=y
|
||||
CONFIG_QCOM_LLCC=y
|
||||
CONFIG_QCOM_LMH=y
|
||||
CONFIG_QCOM_SPMI_TEMP_ALARM=y
|
||||
CONFIG_QCOM_WDT=y
|
||||
CONFIG_QCOM_CLK_APCC_MSM8996=y
|
||||
CONFIG_POWER_RESET_QCOM_PON=y
|
||||
CONFIG_RTC_DRV_PM8XXX=y
|
||||
CONFIG_INTERCONNECT=y
|
||||
@@ -63,9 +56,8 @@ CONFIG_INTERCONNECT_QCOM_SDM845=y
|
||||
CONFIG_INTERCONNECT_QCOM_MSM8916=y
|
||||
CONFIG_INTERCONNECT_QCOM_OSM_L3=y
|
||||
CONFIG_INTERCONNECT_QCOM_SC7180=y
|
||||
CONFIG_QCOM_WDT=y
|
||||
CONFIG_CRYPTO_DEV_QCOM_RNG=y
|
||||
CONFIG_SC_DISPCC_7180=y
|
||||
CONFIG_SC_GPUCC_7180=y
|
||||
|
||||
# db410c ethernet
|
||||
CONFIG_USB_RTL8152=y
|
||||
@@ -144,15 +136,12 @@ CONFIG_USB_NET_SMSC95XX=y
|
||||
# For amlogic
|
||||
CONFIG_MESON_GXL_PHY=y
|
||||
CONFIG_MDIO_BUS_MUX_MESON_G12A=y
|
||||
CONFIG_DRM_MESON=y
|
||||
|
||||
# For Mediatek
|
||||
CONFIG_DRM_MEDIATEK=y
|
||||
CONFIG_PWM_MEDIATEK=y
|
||||
CONFIG_DRM_MEDIATEK_HDMI=y
|
||||
CONFIG_GNSS=y
|
||||
CONFIG_GNSS_MTK_SERIAL=y
|
||||
CONFIG_HW_RANDOM=y
|
||||
CONFIG_HW_RANDOM_MTK=y
|
||||
CONFIG_MTK_DEVAPC=y
|
||||
CONFIG_PWM_MTK_DISP=y
|
||||
|
@@ -31,18 +31,5 @@ if [[ $arch == "arm64" ]]; then
|
||||
wget ${ARTIFACTS_URL}/$DTB
|
||||
done
|
||||
|
||||
popd
|
||||
elif [[ $arch == "armhf" ]]; then
|
||||
mkdir -p /baremetal-files
|
||||
pushd /baremetal-files
|
||||
|
||||
wget ${ARTIFACTS_URL}/zImage
|
||||
|
||||
DEVICE_TREES="imx6q-cubox-i.dtb"
|
||||
|
||||
for DTB in $DEVICE_TREES; do
|
||||
wget ${ARTIFACTS_URL}/$DTB
|
||||
done
|
||||
|
||||
popd
|
||||
fi
|
||||
|
@@ -2,35 +2,49 @@
|
||||
|
||||
set -ex
|
||||
|
||||
CROSVM_VERSION=d2b6a64dd31c92a284a905c0f2483d0b222b1220
|
||||
git clone --single-branch -b for-mesa-ci --no-checkout https://gitlab.freedesktop.org/tomeu/crosvm.git /platform/crosvm
|
||||
pushd /platform/crosvm
|
||||
git checkout "$CROSVM_VERSION"
|
||||
git submodule update --init
|
||||
# Pull down repositories that crosvm depends on to cros checkout-like locations.
|
||||
CROS_ROOT=/
|
||||
THIRD_PARTY_ROOT=$CROS_ROOT/third_party
|
||||
mkdir -p $THIRD_PARTY_ROOT
|
||||
AOSP_EXTERNAL_ROOT=$CROS_ROOT/aosp/external
|
||||
mkdir -p $AOSP_EXTERNAL_ROOT
|
||||
PLATFORM2_ROOT=/platform2
|
||||
|
||||
VIRGLRENDERER_VERSION=e420a5aab92de8fb42fad50762f0ac3b5fcb3bfb
|
||||
rm -rf third_party/virglrenderer
|
||||
git clone --single-branch -b master --no-checkout https://gitlab.freedesktop.org/virgl/virglrenderer.git third_party/virglrenderer
|
||||
pushd third_party/virglrenderer
|
||||
git checkout "$VIRGLRENDERER_VERSION"
|
||||
meson build/ $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
PLATFORM2_COMMIT=2079dd5fcd61f1ac39e2fc16595956617f3f1e9e
|
||||
git clone --single-branch --no-checkout https://chromium.googlesource.com/chromiumos/platform2 $PLATFORM2_ROOT
|
||||
pushd $PLATFORM2_ROOT
|
||||
git checkout $PLATFORM2_COMMIT
|
||||
popd
|
||||
|
||||
RUSTFLAGS='-L native=/usr/local/lib' cargo install \
|
||||
bindgen \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--root /usr/local \
|
||||
$EXTRA_CARGO_ARGS
|
||||
# minijail does not exist in upstream linux distros.
|
||||
MINIJAIL_COMMIT=5f9e3001c61626d2863dad91248ba8496c3ef511
|
||||
git clone --single-branch --no-checkout https://android.googlesource.com/platform/external/minijail $AOSP_EXTERNAL_ROOT/minijail
|
||||
pushd $AOSP_EXTERNAL_ROOT/minijail
|
||||
git checkout $MINIJAIL_COMMIT
|
||||
make
|
||||
cp libminijail.so /usr/lib/x86_64-linux-gnu/
|
||||
popd
|
||||
|
||||
# Pull the cras library for audio access.
|
||||
ADHD_COMMIT=5068bdd18b51de8f2d5bcff754cdecda80de8f44
|
||||
git clone --single-branch --no-checkout https://chromium.googlesource.com/chromiumos/third_party/adhd $THIRD_PARTY_ROOT/adhd
|
||||
pushd $THIRD_PARTY_ROOT/adhd
|
||||
git checkout $ADHD_COMMIT
|
||||
popd
|
||||
|
||||
CROSVM_VERSION=f70350ba51e9631e3b7fe711c0296e041a61a499
|
||||
git clone --single-branch --no-checkout https://chromium.googlesource.com/chromiumos/platform/crosvm /platform/crosvm
|
||||
pushd /platform/crosvm
|
||||
git checkout "$CROSVM_VERSION"
|
||||
|
||||
RUSTFLAGS='-L native=/usr/local/lib' cargo install \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--locked \
|
||||
--features 'default-no-sandbox gpu x virgl_renderer virgl_renderer_next' \
|
||||
--features 'default-no-sandbox gpu x virtio-gpu-next' \
|
||||
--path . \
|
||||
--root /usr/local \
|
||||
$EXTRA_CARGO_ARGS
|
||||
|
||||
popd
|
||||
|
||||
rm -rf $PLATFORM2_ROOT $AOSP_EXTERNAL_ROOT/minijail $THIRD_PARTY_ROOT/adhd $THIRD_PARTY_ROOT/rust-vmm /platform/crosvm
|
||||
rm -rf $PLATFORM2_ROOT $AOSP_EXTERNAL_ROOT/minijail $THIRD_PARTY_ROOT/adhd /platform/crosvm
|
||||
|
@@ -1,24 +1,9 @@
|
||||
#!/bin/sh
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
if [ -n "${DEQP_RUNNER_GIT_TAG}${DEQP_RUNNER_GIT_REV}" ]; then
|
||||
# Build and install from source
|
||||
DEQP_RUNNER_CARGO_ARGS="--git ${DEQP_RUNNER_GIT_URL:-https://gitlab.freedesktop.org/anholt/deqp-runner.git}"
|
||||
|
||||
if [ -n "${DEQP_RUNNER_GIT_TAG}" ]; then
|
||||
DEQP_RUNNER_CARGO_ARGS="--tag ${DEQP_RUNNER_GIT_TAG} ${DEQP_RUNNER_CARGO_ARGS}"
|
||||
else
|
||||
DEQP_RUNNER_CARGO_ARGS="--rev ${DEQP_RUNNER_GIT_REV} ${DEQP_RUNNER_CARGO_ARGS}"
|
||||
fi
|
||||
|
||||
DEQP_RUNNER_CARGO_ARGS="${DEQP_RUNNER_CARGO_ARGS} ${EXTRA_CARGO_ARGS}"
|
||||
else
|
||||
# Install from package registry
|
||||
DEQP_RUNNER_CARGO_ARGS="--version 0.11.0 ${EXTRA_CARGO_ARGS} -- deqp-runner"
|
||||
fi
|
||||
|
||||
cargo install --locked \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--root /usr/local \
|
||||
${DEQP_RUNNER_CARGO_ARGS}
|
||||
cargo install --locked deqp-runner \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--version 0.7.2 \
|
||||
--root /usr/local \
|
||||
$EXTRA_CARGO_ARGS
|
||||
|
@@ -6,7 +6,7 @@ git config --global user.email "mesa@example.com"
|
||||
git config --global user.name "Mesa CI"
|
||||
git clone \
|
||||
https://github.com/KhronosGroup/VK-GL-CTS.git \
|
||||
-b vulkan-cts-1.2.7.2 \
|
||||
-b vulkan-cts-1.2.6.2 \
|
||||
--depth 1 \
|
||||
/VK-GL-CTS
|
||||
pushd /VK-GL-CTS
|
||||
@@ -43,10 +43,8 @@ mv /deqp/modules/egl/deqp-egl-x11 /deqp/modules/egl/deqp-egl
|
||||
|
||||
# Copy out the mustpass lists we want.
|
||||
mkdir /deqp/mustpass
|
||||
for mustpass in $(< /VK-GL-CTS/external/vulkancts/mustpass/master/vk-default.txt) ; do
|
||||
cat /VK-GL-CTS/external/vulkancts/mustpass/master/$mustpass \
|
||||
>> /deqp/mustpass/vk-master.txt
|
||||
done
|
||||
cp /VK-GL-CTS/external/vulkancts/mustpass/master/vk-default.txt \
|
||||
/deqp/mustpass/vk-master.txt
|
||||
|
||||
cp \
|
||||
/deqp/external/openglcts/modules/gl_cts/data/mustpass/gles/aosp_mustpass/3.2.6.x/*.txt \
|
||||
@@ -68,11 +66,7 @@ cp /deqp/executor/testlog-to-* /deqp/executor.save
|
||||
rm -rf /deqp/executor
|
||||
mv /deqp/executor.save /deqp/executor
|
||||
|
||||
# Remove other mustpass files, since we saved off the ones we wanted to conventient locations above.
|
||||
rm -rf /deqp/external/openglcts/modules/gl_cts/data/mustpass
|
||||
rm -rf /deqp/external/vulkancts/modules/vulkan/vk-master*
|
||||
rm -rf /deqp/external/vulkancts/modules/vulkan/vk-default
|
||||
|
||||
rm -rf /deqp/external/openglcts/modules/cts-runner
|
||||
rm -rf /deqp/modules/internal
|
||||
rm -rf /deqp/execserver
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
set -ex
|
||||
|
||||
export LIBDRM_VERSION=libdrm-2.4.109
|
||||
export LIBDRM_VERSION=libdrm-2.4.107
|
||||
|
||||
wget https://dri.freedesktop.org/libdrm/$LIBDRM_VERSION.tar.xz
|
||||
tar -xvf $LIBDRM_VERSION.tar.xz && rm $LIBDRM_VERSION.tar.xz
|
||||
|
@@ -4,7 +4,7 @@ set -ex
|
||||
|
||||
git clone https://gitlab.freedesktop.org/mesa/piglit.git --single-branch --no-checkout /piglit
|
||||
pushd /piglit
|
||||
git checkout af1785f31f65622d9b1ca1c08c75cf140bc7ed22
|
||||
git checkout 11025faf96df23debbefd3678fe959eaa35a50f0
|
||||
patch -p1 <$OLDPWD/.gitlab-ci/piglit/disable-vs_in.diff
|
||||
cmake -S . -B . -G Ninja -DCMAKE_BUILD_TYPE=Release $PIGLIT_OPTS $EXTRA_CMAKE_ARGS
|
||||
ninja $PIGLIT_BUILD_TARGETS
|
||||
|
@@ -15,10 +15,10 @@ ln -s /usr/local/bin $HOME/.cargo/bin
|
||||
# version of the compiler, rather than whatever the container's Debian comes
|
||||
# with.
|
||||
#
|
||||
# Pick the rust compiler (1.48) available in Debian stable, and pick a specific
|
||||
# Pick the rust compiler (1.41) available in Debian stable, and pick a specific
|
||||
# snapshot from rustup so the compiler doesn't drift on us.
|
||||
wget https://sh.rustup.rs -O - | \
|
||||
sh -s -- -y --default-toolchain 1.49.0-2020-12-31
|
||||
sh -s -- -y --default-toolchain 1.41.1-2020-02-27
|
||||
|
||||
# Set up a config script for cross compiling -- cargo needs your system cc for
|
||||
# linking in cross builds, but doesn't know what you want to use for system cc.
|
||||
|
@@ -1,82 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
create_gn_args() {
|
||||
cp "${BASE_ARGS_GN_FILE}" "${SKQP_OUT_DIR}"/args.gn
|
||||
echo "target_cpu = \"${SKQP_ARCH}\"" >> "${SKQP_OUT_DIR}"/args.gn
|
||||
}
|
||||
|
||||
download_skqp_models() (
|
||||
# The download_model.py script needs a checksum file to know what models
|
||||
# version to download.
|
||||
|
||||
# This is the most recent commit available in the skia repository with a
|
||||
# valid files.checksum
|
||||
SKIA_LAST_SKQP_CUT_COMMIT_SHA=ccf5f0d75b6a6b54756f2c62d57e3730eed8aa45
|
||||
git fetch origin "${SKIA_LAST_SKQP_CUT_COMMIT_SHA}:refs/remotes/origin/${SKIA_LAST_SKQP_CUT_COMMIT_SHA}"
|
||||
git checkout "${SKIA_LAST_SKQP_CUT_COMMIT_SHA}" -- \
|
||||
platform_tools/android/apps/skqp/src/main/assets/files.checksum
|
||||
|
||||
# The following patch transforms download_model.py from python2 to python3.
|
||||
git apply "${DOWNLOAD_MODEL_PATCH_FILE}"
|
||||
python3 tools/skqp/download_model.py
|
||||
|
||||
# Copy resources from skia to skqp directory
|
||||
python3 tools/skqp/setup_resources
|
||||
)
|
||||
|
||||
set -ex
|
||||
|
||||
SCRIPT_DIR=$(realpath "$(dirname "$0")")
|
||||
FETCH_GN_PATCH_FILE="${SCRIPT_DIR}/build-skqp_fetch-gn.patch"
|
||||
BASE_ARGS_GN_FILE="${SCRIPT_DIR}/build-skqp_base.gn"
|
||||
DOWNLOAD_MODEL_PATCH_FILE="${SCRIPT_DIR}/build-skqp_download_model.patch"
|
||||
|
||||
SKQP_ARCH=${SKQP_ARCH:-x64}
|
||||
SKIA_DIR=${SKIA_DIR:-$(mktemp -d)}
|
||||
SKQP_DIR=${SKQP_DIR:-$(mktemp -d)}
|
||||
SKQP_OUT_DIR=${SKIA_DIR}/out/${SKQP_ARCH}
|
||||
SKQP_INSTALL_DIR=/skqp
|
||||
SKQP_ASSETS_DIR="${SKQP_INSTALL_DIR}/assets"
|
||||
# Build list_gpu_unit_tests to update the unittests.txt file properly to the
|
||||
# target hardware.
|
||||
SKQP_BINARIES=(skqp list_gpu_unit_tests)
|
||||
|
||||
# Using a recent release version to mitigate instability during test phase
|
||||
SKIA_COMMIT_SHA="canvaskit/0.32.0"
|
||||
|
||||
git clone 'https://skia.googlesource.com/skia/' \
|
||||
--single-branch \
|
||||
-b "${SKIA_COMMIT_SHA}" \
|
||||
"${SKIA_DIR}"
|
||||
|
||||
pushd "${SKIA_DIR}"
|
||||
|
||||
git apply "${FETCH_GN_PATCH_FILE}"
|
||||
# Fetch some needed build tools needed to build skia/skqp
|
||||
# Basically, it clones repositories with commits SHAs from
|
||||
# ${SKIA_DIR}/DEPS directory
|
||||
python3 tools/git-sync-deps
|
||||
|
||||
mkdir -p "${SKQP_OUT_DIR}"
|
||||
mkdir -p "${SKQP_INSTALL_DIR}"
|
||||
|
||||
create_gn_args
|
||||
|
||||
# Build and install skqp binaries
|
||||
bin/gn gen "${SKQP_OUT_DIR}"
|
||||
|
||||
for BINARY in "${SKQP_BINARIES[@]}"
|
||||
do
|
||||
/usr/bin/ninja -C "${SKQP_OUT_DIR}" "${BINARY}"
|
||||
install -m 0755 "${SKQP_OUT_DIR}/${BINARY}" "${SKQP_INSTALL_DIR}"
|
||||
done
|
||||
|
||||
# Acquire assets and move them to the target directory.
|
||||
download_skqp_models
|
||||
mv platform_tools/android/apps/skqp/src/main/assets/ "${SKQP_ASSETS_DIR}"
|
||||
|
||||
popd
|
||||
rm -Rf "${SKQP_DIR}"
|
||||
rm -Rf "${SKIA_DIR}"
|
||||
|
||||
set +ex
|
@@ -1,25 +0,0 @@
|
||||
cc = "gcc"
|
||||
cxx = "g++"
|
||||
|
||||
extra_cflags = [ "-DSK_ENABLE_DUMP_GPU", "-DSK_BUILD_FOR_SKQP" ]
|
||||
extra_cflags_cc = [ "-static", "-Wno-error", "-Wno-macro-redefined", "-Wno-suggest-destructor-override", "-Wno-suggest-override" ]
|
||||
|
||||
is_debug = false
|
||||
|
||||
skia_enable_fontmgr_android = false
|
||||
skia_enable_fontmgr_empty = true
|
||||
skia_enable_pdf = false
|
||||
skia_enable_skottie = false
|
||||
|
||||
skia_skqp_global_error_tolerance = 8
|
||||
skia_tools_require_resources = true
|
||||
|
||||
skia_use_dng_sdk = false
|
||||
skia_use_expat = true
|
||||
skia_use_icu = false
|
||||
skia_use_libheif = false
|
||||
skia_use_lua = false
|
||||
skia_use_piex = false
|
||||
skia_use_vulkan = true
|
||||
|
||||
target_os = "linux"
|
@@ -1,22 +0,0 @@
|
||||
diff --git a/tools/skqp/download_model.py b/tools/skqp/download_model.py
|
||||
index fb0020e481..a5d8a03754 100755
|
||||
--- a/tools/skqp/download_model.py
|
||||
+++ b/tools/skqp/download_model.py
|
||||
@@ -10,7 +10,7 @@ import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
-import urllib2
|
||||
+import urllib.request, urllib.error, urllib.parse
|
||||
|
||||
def checksum(path):
|
||||
if not os.path.exists(path):
|
||||
@@ -33,7 +33,7 @@ def download(md5, path):
|
||||
pass # ignore race condition
|
||||
url = 'https://storage.googleapis.com/skia-skqp-assets/' + md5
|
||||
with open(path, 'wb') as o:
|
||||
- shutil.copyfileobj(urllib2.urlopen(url), o)
|
||||
+ shutil.copyfileobj(urllib.request.urlopen(url), o)
|
||||
|
||||
def tmp(prefix):
|
||||
fd, path = tempfile.mkstemp(prefix=prefix)
|
@@ -1,13 +0,0 @@
|
||||
diff --git a/bin/fetch-gn b/bin/fetch-gn
|
||||
index b4bb14c630..59c4591a30 100755
|
||||
--- a/bin/fetch-gn
|
||||
+++ b/bin/fetch-gn
|
||||
@@ -23,7 +23,7 @@ os.chdir(os.path.join(os.path.dirname(__file__), os.pardir))
|
||||
gnzip = os.path.join(tempfile.mkdtemp(), 'gn.zip')
|
||||
with open(gnzip, 'wb') as f:
|
||||
OS = {'darwin': 'mac', 'linux': 'linux', 'linux2': 'linux', 'win32': 'windows'}[sys.platform]
|
||||
- cpu = {'amd64': 'amd64', 'arm64': 'arm64', 'x86_64': 'amd64'}[platform.machine().lower()]
|
||||
+ cpu = {'amd64': 'amd64', 'arm64': 'arm64', 'x86_64': 'amd64', 'aarch64': 'arm64'}[platform.machine().lower()]
|
||||
|
||||
rev = 'd62642c920e6a0d1756316d225a90fd6faa9e21e'
|
||||
url = 'https://chrome-infra-packages.appspot.com/dl/gn/gn/{}-{}/+/git_revision:{}'.format(
|
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
git config --global user.email "mesa@example.com"
|
||||
git config --global user.name "Mesa CI"
|
||||
git clone \
|
||||
https://github.com/intel/libva-utils.git \
|
||||
-b 2.13.0 \
|
||||
--depth 1 \
|
||||
/va-utils
|
||||
|
||||
pushd /va-utils
|
||||
meson build -D tests=true -Dprefix=/va $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
popd
|
||||
rm -rf /va-utils
|
20
.gitlab-ci/container/build-virglrenderer.sh
Normal file
20
.gitlab-ci/container/build-virglrenderer.sh
Normal file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
mkdir -p /epoxy
|
||||
pushd /epoxy
|
||||
wget -qO- https://github.com/anholt/libepoxy/releases/download/1.5.8/libepoxy-1.5.8.tar.xz | tar -xJ --strip-components=1
|
||||
meson build/ $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
popd
|
||||
rm -rf /epoxy
|
||||
|
||||
VIRGLRENDERER_VERSION=08e11a495429c222f150b6d6f8c4936f2f0e0759
|
||||
git clone https://gitlab.freedesktop.org/virgl/virglrenderer.git --single-branch --no-checkout /virglrenderer
|
||||
pushd /virglrenderer
|
||||
git checkout "$VIRGLRENDERER_VERSION"
|
||||
meson build/ $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
popd
|
||||
rm -rf /virglrenderer
|
@@ -1,22 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
export LIBWAYLAND_VERSION="1.18.0"
|
||||
export WAYLAND_PROTOCOLS_VERSION="1.24"
|
||||
|
||||
git clone https://gitlab.freedesktop.org/wayland/wayland
|
||||
cd wayland
|
||||
git checkout "$LIBWAYLAND_VERSION"
|
||||
meson -Ddocumentation=false -Ddtd_validation=false -Dlibraries=true _build
|
||||
ninja -C _build install
|
||||
cd ..
|
||||
rm -rf wayland
|
||||
|
||||
git clone https://gitlab.freedesktop.org/wayland/wayland-protocols
|
||||
cd wayland-protocols
|
||||
git checkout "$WAYLAND_PROTOCOLS_VERSION"
|
||||
meson _build
|
||||
ninja -C _build install
|
||||
cd ..
|
||||
rm -rf wayland-protocols
|
@@ -17,7 +17,7 @@ cat >$cross_file <<EOF
|
||||
[binaries]
|
||||
ar = '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/$arch-ar'
|
||||
c = ['ccache', '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch2}29-clang', '-fno-exceptions', '-fno-unwind-tables', '-fno-asynchronous-unwind-tables']
|
||||
cpp = ['ccache', '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch2}29-clang++', '-fno-exceptions', '-fno-unwind-tables', '-fno-asynchronous-unwind-tables']
|
||||
cpp = ['ccache', '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch2}29-clang++', '-fno-exceptions', '-fno-unwind-tables', '-fno-asynchronous-unwind-tables', '-static-libstdc++']
|
||||
c_ld = 'lld'
|
||||
cpp_ld = 'lld'
|
||||
strip = '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/$arch-strip'
|
||||
|
@@ -3,17 +3,11 @@
|
||||
set -ex
|
||||
|
||||
if [ $DEBIAN_ARCH = arm64 ]; then
|
||||
ARCH_PACKAGES="firmware-qcom-media
|
||||
libfontconfig1
|
||||
libgl1
|
||||
libglu1-mesa
|
||||
"
|
||||
ARCH_PACKAGES="firmware-qcom-media"
|
||||
elif [ $DEBIAN_ARCH = amd64 ]; then
|
||||
ARCH_PACKAGES="firmware-amd-graphics
|
||||
libelf1
|
||||
libllvm11
|
||||
libva2
|
||||
libva-drm2
|
||||
"
|
||||
fi
|
||||
|
||||
@@ -75,7 +69,7 @@ apt-get -y install --no-install-recommends \
|
||||
|
||||
# Needed for ci-fairy, this revision is able to upload files to
|
||||
# MinIO and doesn't depend on git
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@0f1abc24c043e63894085a6bd12f14263e8b29eb
|
||||
|
||||
apt-get purge -y \
|
||||
$INSTALL_CI_FAIRY_PACKAGES
|
||||
@@ -150,8 +144,6 @@ rm -rf usr/sbin/update-usbids
|
||||
rm -rf var/lib/usbutils/usb.ids
|
||||
rm -rf usr/share/misc/usb.ids
|
||||
|
||||
rm -rf /root/.pip
|
||||
|
||||
#######################################################################
|
||||
# Crush into a minimal production image to be deployed via some type of image
|
||||
# updating system.
|
||||
|
@@ -3,7 +3,6 @@
|
||||
set -ex
|
||||
|
||||
EPHEMERAL="\
|
||||
autoconf \
|
||||
rdfind \
|
||||
unzip \
|
||||
"
|
||||
@@ -30,7 +29,7 @@ sh .gitlab-ci/container/create-android-cross-file.sh /$ndk arm-linux-androideabi
|
||||
|
||||
# Not using build-libdrm.sh because we don't want its cleanup after building
|
||||
# each arch. Fetch and extract now.
|
||||
export LIBDRM_VERSION=libdrm-2.4.109
|
||||
export LIBDRM_VERSION=libdrm-2.4.102
|
||||
wget https://dri.freedesktop.org/libdrm/$LIBDRM_VERSION.tar.xz
|
||||
tar -xf $LIBDRM_VERSION.tar.xz && rm $LIBDRM_VERSION.tar.xz
|
||||
|
||||
@@ -51,56 +50,11 @@ for arch in \
|
||||
-Detnaviv=false \
|
||||
-Dfreedreno=false \
|
||||
-Dintel=false \
|
||||
-Dcairo-tests=false \
|
||||
-Dvalgrind=false
|
||||
-Dcairo-tests=false
|
||||
ninja -C build-$arch install
|
||||
cd ..
|
||||
done
|
||||
|
||||
rm -rf $LIBDRM_VERSION
|
||||
|
||||
export LIBELF_VERSION=libelf-0.8.13
|
||||
wget https://fossies.org/linux/misc/old/$LIBELF_VERSION.tar.gz
|
||||
|
||||
# Not 100% sure who runs the mirror above so be extra careful
|
||||
if ! echo "4136d7b4c04df68b686570afa26988ac ${LIBELF_VERSION}.tar.gz" | md5sum -c -; then
|
||||
echo "Checksum failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
tar -xf ${LIBELF_VERSION}.tar.gz
|
||||
cd $LIBELF_VERSION
|
||||
|
||||
# Work around a bug in the original configure not enabling __LIBELF64.
|
||||
autoreconf
|
||||
|
||||
for arch in \
|
||||
x86_64-linux-android \
|
||||
i686-linux-android \
|
||||
aarch64-linux-android \
|
||||
arm-linux-androideabi ; do
|
||||
|
||||
ccarch=${arch}
|
||||
if [ "${arch}" == 'arm-linux-androideabi' ]
|
||||
then
|
||||
ccarch=armv7a-linux-androideabi
|
||||
fi
|
||||
|
||||
export CC=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch}-ar
|
||||
export CC=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${ccarch}29-clang
|
||||
export CXX=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${ccarch}29-clang++
|
||||
export LD=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch}-ld
|
||||
export RANLIB=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch}-ranlib
|
||||
|
||||
# The configure script doesn't know about android, but doesn't really use the host anyway it
|
||||
# seems
|
||||
./configure --host=x86_64-linux-gnu --disable-nls --disable-shared \
|
||||
--libdir=/usr/local/lib/${arch}
|
||||
make install
|
||||
make distclean
|
||||
done
|
||||
|
||||
cd ..
|
||||
rm -rf $LIBELF_VERSION
|
||||
|
||||
apt-get purge -y $EPHEMERAL
|
||||
|
@@ -43,6 +43,7 @@ apt-get -y install \
|
||||
llvm-11-dev \
|
||||
meson \
|
||||
pkg-config \
|
||||
python-is-python3 \
|
||||
python3-mako \
|
||||
python3-pil \
|
||||
python3-pip \
|
||||
@@ -57,7 +58,7 @@ apt-get -y install \
|
||||
apt-get install -y --no-remove -t buster \
|
||||
android-sdk-ext4-utils
|
||||
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@6f5af7e5574509726c79109e3c147cee95e81366
|
||||
|
||||
arch=armhf
|
||||
. .gitlab-ci/container/cross_build.sh
|
||||
|
@@ -14,6 +14,7 @@ apt-get install -y --no-remove \
|
||||
fastboot \
|
||||
netcat \
|
||||
procps \
|
||||
python-is-python3 \
|
||||
python3-distutils \
|
||||
python3-minimal \
|
||||
python3-serial \
|
||||
|
@@ -58,11 +58,13 @@ apt-get install -y --no-remove \
|
||||
make \
|
||||
meson \
|
||||
pkg-config \
|
||||
python-is-python3 \
|
||||
python3-mako \
|
||||
python3-pil \
|
||||
python3-requests \
|
||||
qemu-user \
|
||||
valgrind \
|
||||
wayland-protocols \
|
||||
wget \
|
||||
wine64 \
|
||||
x11proto-dri2-dev \
|
||||
@@ -72,7 +74,7 @@ apt-get install -y --no-remove \
|
||||
zlib1g-dev
|
||||
|
||||
# Needed for ci-fairy, this revision is able to upload files to MinIO
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@6f5af7e5574509726c79109e3c147cee95e81366
|
||||
|
||||
############### Uninstall ephemeral packages
|
||||
|
||||
|
@@ -11,6 +11,8 @@ STABLE_EPHEMERAL=" \
|
||||
automake \
|
||||
autotools-dev \
|
||||
bzip2 \
|
||||
cmake \
|
||||
libgbm-dev \
|
||||
libtool \
|
||||
python3-pip \
|
||||
"
|
||||
@@ -21,13 +23,10 @@ apt-get update
|
||||
|
||||
apt-get install -y --no-remove \
|
||||
$STABLE_EPHEMERAL \
|
||||
check \
|
||||
clang \
|
||||
cmake \
|
||||
libasan6 \
|
||||
libarchive-dev \
|
||||
libclang-cpp11-dev \
|
||||
libgbm-dev \
|
||||
libglvnd-dev \
|
||||
libllvmspirvlib-dev \
|
||||
liblua5.3-dev \
|
||||
@@ -68,8 +67,10 @@ chmod +x /usr/local/bin/x86_64-w64-mingw32-pkg-config
|
||||
|
||||
# dependencies where we want a specific version
|
||||
export XORG_RELEASES=https://xorg.freedesktop.org/releases/individual
|
||||
export WAYLAND_RELEASES=https://wayland.freedesktop.org/releases
|
||||
|
||||
export XORGMACROS_VERSION=util-macros-1.19.0
|
||||
export LIBWAYLAND_VERSION=wayland-1.18.0
|
||||
|
||||
wget $XORG_RELEASES/util/$XORGMACROS_VERSION.tar.bz2
|
||||
tar -xvf $XORGMACROS_VERSION.tar.bz2 && rm $XORGMACROS_VERSION.tar.bz2
|
||||
@@ -78,7 +79,11 @@ rm -rf $XORGMACROS_VERSION
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
wget $WAYLAND_RELEASES/$LIBWAYLAND_VERSION.tar.xz
|
||||
tar -xvf $LIBWAYLAND_VERSION.tar.xz && rm $LIBWAYLAND_VERSION.tar.xz
|
||||
cd $LIBWAYLAND_VERSION; ./configure --enable-libraries --without-host-scanner --disable-documentation --disable-dtd-validation; make install; cd ..
|
||||
rm -rf $LIBWAYLAND_VERSION
|
||||
|
||||
|
||||
pushd /usr/local
|
||||
git clone https://gitlab.freedesktop.org/mesa/shader-db.git --depth 1
|
||||
|
@@ -41,6 +41,7 @@ apt-get install -y --no-remove \
|
||||
libxkbcommon0 \
|
||||
libxrandr2 \
|
||||
libxrender1 \
|
||||
python-is-python3 \
|
||||
python3-mako \
|
||||
python3-numpy \
|
||||
python3-packaging \
|
||||
@@ -59,7 +60,7 @@ apt-get install -y --no-install-recommends \
|
||||
|
||||
# Needed for ci-fairy, this revision is able to upload files to MinIO
|
||||
# and doesn't depend on git
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@0f1abc24c043e63894085a6bd12f14263e8b29eb
|
||||
|
||||
############### Build dEQP runner
|
||||
. .gitlab-ci/container/build-deqp-runner.sh
|
||||
|
@@ -12,6 +12,7 @@ STABLE_EPHEMERAL=" \
|
||||
bc \
|
||||
bison \
|
||||
bzip2 \
|
||||
cargo \
|
||||
ccache \
|
||||
clang-11 \
|
||||
cmake \
|
||||
@@ -22,7 +23,6 @@ STABLE_EPHEMERAL=" \
|
||||
libcap-dev \
|
||||
libclang-cpp11-dev \
|
||||
libelf-dev \
|
||||
libexpat1-dev \
|
||||
libfdt-dev \
|
||||
libgbm-dev \
|
||||
libgles2-mesa-dev \
|
||||
@@ -32,6 +32,7 @@ STABLE_EPHEMERAL=" \
|
||||
libudev-dev \
|
||||
libvulkan-dev \
|
||||
libwaffle-dev \
|
||||
libwayland-dev \
|
||||
libx11-xcb-dev \
|
||||
libxcb-dri2-0-dev \
|
||||
libxext-dev \
|
||||
@@ -45,18 +46,20 @@ STABLE_EPHEMERAL=" \
|
||||
patch \
|
||||
pkg-config \
|
||||
python3-distutils \
|
||||
wayland-protocols \
|
||||
wget \
|
||||
xz-utils \
|
||||
"
|
||||
|
||||
apt-get install -y --no-remove \
|
||||
$STABLE_EPHEMERAL \
|
||||
clinfo \
|
||||
inetutils-syslogd \
|
||||
iptables \
|
||||
libclang-common-11-dev \
|
||||
libclang-cpp11 \
|
||||
libcap2 \
|
||||
libegl1 \
|
||||
libepoxy-dev \
|
||||
libfdt1 \
|
||||
libllvmspirvlib11 \
|
||||
libxcb-shm0 \
|
||||
@@ -65,27 +68,11 @@ apt-get install -y --no-remove \
|
||||
python3-renderdoc \
|
||||
python3-simplejson \
|
||||
spirv-tools \
|
||||
sysvinit-core \
|
||||
wget
|
||||
sysvinit-core
|
||||
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
############### Build libdrm
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
############### Build Wayland
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
||||
############### Build Crosvm
|
||||
|
||||
. .gitlab-ci/container/build-rust.sh
|
||||
. .gitlab-ci/container/build-crosvm.sh
|
||||
rm -rf /root/.cargo
|
||||
rm -rf /root/.rustup
|
||||
|
||||
############### Build kernel
|
||||
|
||||
export DEFCONFIG="arch/x86/configs/x86_64_defconfig"
|
||||
@@ -96,14 +83,27 @@ export DEBIAN_ARCH=amd64
|
||||
mkdir -p /lava-files/
|
||||
. .gitlab-ci/container/build-kernel.sh
|
||||
|
||||
############### Build libdrm
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
############### Build libclc
|
||||
|
||||
. .gitlab-ci/container/build-libclc.sh
|
||||
|
||||
############### Build virglrenderer
|
||||
|
||||
. .gitlab-ci/container/build-virglrenderer.sh
|
||||
|
||||
############### Build piglit
|
||||
|
||||
PIGLIT_OPTS="-DPIGLIT_BUILD_CL_TESTS=ON -DPIGLIT_BUILD_DMA_BUF_TESTS=ON" . .gitlab-ci/container/build-piglit.sh
|
||||
|
||||
############### Build Crosvm
|
||||
|
||||
. .gitlab-ci/container/build-crosvm.sh
|
||||
rm -rf /root/.cargo
|
||||
|
||||
############### Build dEQP GL
|
||||
|
||||
DEQP_TARGET=surfaceless . .gitlab-ci/container/build-deqp.sh
|
||||
|
@@ -13,7 +13,6 @@ STABLE_EPHEMERAL=" \
|
||||
g++-mingw-w64-i686-posix \
|
||||
g++-mingw-w64-x86-64-posix \
|
||||
glslang-tools \
|
||||
libexpat1-dev \
|
||||
libgbm-dev \
|
||||
libgles2-mesa-dev \
|
||||
liblz4-dev \
|
||||
@@ -21,6 +20,7 @@ STABLE_EPHEMERAL=" \
|
||||
libudev-dev \
|
||||
libvulkan-dev \
|
||||
libwaffle-dev \
|
||||
libwayland-dev \
|
||||
libx11-xcb-dev \
|
||||
libxcb-ewmh-dev \
|
||||
libxcb-keysyms1-dev \
|
||||
@@ -120,14 +120,6 @@ wine \
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
############### Build libdrm
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
############### Build Wayland
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
||||
############### Build parallel-deqp-runner's hang-detection tool
|
||||
|
||||
. .gitlab-ci/container/build-hang-detection.sh
|
||||
@@ -154,6 +146,10 @@ setup_wine "/vkd3d-proton-wine64"
|
||||
|
||||
. .gitlab-ci/container/build-vkd3d-proton.sh
|
||||
|
||||
############### Build libdrm
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
############### Uninstall the build software
|
||||
|
||||
ccache --show-stats
|
||||
|
@@ -40,6 +40,9 @@ dnf install -y --setopt=install_weak_deps=False \
|
||||
"pkgconfig(pciaccess)" \
|
||||
"pkgconfig(vdpau)" \
|
||||
"pkgconfig(vulkan)" \
|
||||
"pkgconfig(wayland-egl-backend)" \
|
||||
"pkgconfig(wayland-protocols)" \
|
||||
"pkgconfig(wayland-scanner)" \
|
||||
"pkgconfig(x11)" \
|
||||
"pkgconfig(x11-xcb)" \
|
||||
"pkgconfig(xcb)" \
|
||||
@@ -71,8 +74,10 @@ dnf install -y --setopt=install_weak_deps=False \
|
||||
|
||||
# dependencies where we want a specific version
|
||||
export XORG_RELEASES=https://xorg.freedesktop.org/releases/individual
|
||||
export WAYLAND_RELEASES=https://wayland.freedesktop.org/releases
|
||||
|
||||
export XORGMACROS_VERSION=util-macros-1.19.0
|
||||
export LIBWAYLAND_VERSION=wayland-1.18.0
|
||||
|
||||
wget $XORG_RELEASES/util/$XORGMACROS_VERSION.tar.bz2
|
||||
tar -xvf $XORGMACROS_VERSION.tar.bz2 && rm $XORGMACROS_VERSION.tar.bz2
|
||||
@@ -81,7 +86,11 @@ rm -rf $XORGMACROS_VERSION
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
wget $WAYLAND_RELEASES/$LIBWAYLAND_VERSION.tar.xz
|
||||
tar -xvf $LIBWAYLAND_VERSION.tar.xz && rm $LIBWAYLAND_VERSION.tar.xz
|
||||
cd $LIBWAYLAND_VERSION; ./configure --enable-libraries --without-host-scanner --disable-documentation --disable-dtd-validation; make install; cd ..
|
||||
rm -rf $LIBWAYLAND_VERSION
|
||||
|
||||
|
||||
pushd /usr/local
|
||||
git clone https://gitlab.freedesktop.org/mesa/shader-db.git --depth 1
|
||||
|
@@ -27,22 +27,19 @@ if [[ "$DEBIAN_ARCH" = "arm64" ]]; then
|
||||
KERNEL_ARCH="arm64"
|
||||
DEFCONFIG="arch/arm64/configs/defconfig"
|
||||
DEVICE_TREES="arch/arm64/boot/dts/rockchip/rk3399-gru-kevin.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/amlogic/meson-gxl-s805x-libretech-ac.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/amlogic/meson-gxl-s905x-libretech-cc.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/allwinner/sun50i-h6-pine-h64.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/amlogic/meson-gxm-khadas-vim2.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/qcom/apq8016-sbc.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/qcom/apq8096-db820c.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/amlogic/meson-g12b-a311d-khadas-vim3.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/mediatek/mt8183-kukui-jacuzzi-juniper-sku16.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/qcom/sc7180-trogdor-lazor-limozeen-nots.dtb"
|
||||
KERNEL_IMAGE_NAME="Image"
|
||||
elif [[ "$DEBIAN_ARCH" = "armhf" ]]; then
|
||||
GCC_ARCH="arm-linux-gnueabihf"
|
||||
KERNEL_ARCH="arm"
|
||||
DEFCONFIG="arch/arm/configs/multi_v7_defconfig"
|
||||
DEVICE_TREES="arch/arm/boot/dts/rk3288-veyron-jaq.dtb"
|
||||
DEVICE_TREES+=" arch/arm/boot/dts/sun8i-h3-libretech-all-h3-cc.dtb"
|
||||
DEVICE_TREES+=" arch/arm/boot/dts/imx6q-cubox-i.dtb"
|
||||
DEVICE_TREES="arch/arm/boot/dts/rk3288-veyron-jaq.dtb arch/arm/boot/dts/sun8i-h3-libretech-all-h3-cc.dtb"
|
||||
KERNEL_IMAGE_NAME="zImage"
|
||||
. .gitlab-ci/container/create-cross-file.sh armhf
|
||||
else
|
||||
@@ -51,7 +48,6 @@ else
|
||||
DEFCONFIG="arch/x86/configs/x86_64_defconfig"
|
||||
DEVICE_TREES=""
|
||||
KERNEL_IMAGE_NAME="bzImage"
|
||||
ARCH_PACKAGES="libva-dev"
|
||||
fi
|
||||
|
||||
# Determine if we're in a cross build.
|
||||
@@ -73,7 +69,6 @@ fi
|
||||
|
||||
apt-get update
|
||||
apt-get install -y --no-remove \
|
||||
${ARCH_PACKAGES} \
|
||||
automake \
|
||||
bc \
|
||||
cmake \
|
||||
@@ -82,12 +77,8 @@ apt-get install -y --no-remove \
|
||||
glslang-tools \
|
||||
libdrm-dev \
|
||||
libegl1-mesa-dev \
|
||||
libfontconfig-dev \
|
||||
libgbm-dev \
|
||||
libgl-dev \
|
||||
libgles2-mesa-dev \
|
||||
libglu1-mesa-dev \
|
||||
libglx-dev \
|
||||
libpng-dev \
|
||||
libssl-dev \
|
||||
libudev-dev \
|
||||
@@ -97,14 +88,11 @@ apt-get install -y --no-remove \
|
||||
libx11-xcb-dev \
|
||||
libxcb-dri2-0-dev \
|
||||
libxkbcommon-dev \
|
||||
ninja-build \
|
||||
patch \
|
||||
python-is-python3 \
|
||||
python3-distutils \
|
||||
python3-mako \
|
||||
python3-numpy \
|
||||
python3-serial \
|
||||
unzip \
|
||||
wget
|
||||
|
||||
|
||||
@@ -139,7 +127,8 @@ rm -rf /apitrace
|
||||
############### Build dEQP runner
|
||||
. .gitlab-ci/container/build-deqp-runner.sh
|
||||
mkdir -p /lava-files/rootfs-${DEBIAN_ARCH}/usr/bin
|
||||
mv /usr/local/bin/*-runner /lava-files/rootfs-${DEBIAN_ARCH}/usr/bin/.
|
||||
mv /usr/local/bin/deqp-runner /lava-files/rootfs-${DEBIAN_ARCH}/usr/bin/.
|
||||
mv /usr/local/bin/piglit-runner /lava-files/rootfs-${DEBIAN_ARCH}/usr/bin/.
|
||||
|
||||
|
||||
############### Build dEQP
|
||||
@@ -148,22 +137,10 @@ DEQP_TARGET=surfaceless . .gitlab-ci/container/build-deqp.sh
|
||||
mv /deqp /lava-files/rootfs-${DEBIAN_ARCH}/.
|
||||
|
||||
|
||||
############### Build SKQP
|
||||
if [[ "$DEBIAN_ARCH" = "arm64" ]]; then
|
||||
SKQP_ARCH="arm64" . .gitlab-ci/container/build-skqp.sh
|
||||
mv /skqp /lava-files/rootfs-${DEBIAN_ARCH}/.
|
||||
fi
|
||||
|
||||
|
||||
############### Build piglit
|
||||
PIGLIT_OPTS="-DPIGLIT_BUILD_DMA_BUF_TESTS=ON" . .gitlab-ci/container/build-piglit.sh
|
||||
mv /piglit /lava-files/rootfs-${DEBIAN_ARCH}/.
|
||||
|
||||
############### Build libva tests
|
||||
if [[ "$DEBIAN_ARCH" = "amd64" ]]; then
|
||||
. .gitlab-ci/container/build-va-tools.sh
|
||||
mv /va/bin/* /lava-files/rootfs-${DEBIAN_ARCH}/usr/bin/
|
||||
fi
|
||||
|
||||
############### Build libdrm
|
||||
EXTRA_MESON_ARGS+=" -D prefix=/libdrm"
|
||||
@@ -174,7 +151,6 @@ EXTRA_MESON_ARGS+=" -D prefix=/libdrm"
|
||||
|
||||
############### Delete rust, since the tests won't be compiling anything.
|
||||
rm -rf /root/.cargo
|
||||
rm -rf /root/.rustup
|
||||
|
||||
############### Create rootfs
|
||||
set +e
|
||||
@@ -201,8 +177,6 @@ rm /lava-files/rootfs-${DEBIAN_ARCH}/create-rootfs.sh
|
||||
# created.
|
||||
mkdir -p /lava-files/rootfs-${DEBIAN_ARCH}/usr/lib/$GCC_ARCH
|
||||
find /libdrm/ -name lib\*\.so\* | xargs cp -t /lava-files/rootfs-${DEBIAN_ARCH}/usr/lib/$GCC_ARCH/.
|
||||
mkdir -p /lava-files/rootfs-${DEBIAN_ARCH}/libdrm/
|
||||
cp -Rp /libdrm/share /lava-files/rootfs-${DEBIAN_ARCH}/libdrm/share
|
||||
rm -rf /libdrm
|
||||
|
||||
|
||||
@@ -220,7 +194,7 @@ popd
|
||||
. .gitlab-ci/container/container_post_build.sh
|
||||
|
||||
############### Upload the files!
|
||||
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
|
||||
ci-fairy minio login $CI_JOB_JWT
|
||||
FILES_TO_UPLOAD="lava-rootfs.tgz \
|
||||
$KERNEL_IMAGE_NAME"
|
||||
|
||||
|
@@ -1,11 +1,6 @@
|
||||
CONFIG_LOCALVERSION_AUTO=y
|
||||
CONFIG_DEBUG_KERNEL=y
|
||||
|
||||
CONFIG_PWM=y
|
||||
CONFIG_PM_DEVFREQ=y
|
||||
CONFIG_OF=y
|
||||
CONFIG_CROS_EC=y
|
||||
|
||||
# abootimg with a 'dummy' rootfs fails with root=/dev/nfs
|
||||
CONFIG_BLK_DEV_INITRD=n
|
||||
|
||||
@@ -62,7 +57,7 @@ CONFIG_X86_AMD_FREQ_SENSITIVITY=y
|
||||
CONFIG_PINCTRL=y
|
||||
CONFIG_PINCTRL_AMD=y
|
||||
CONFIG_DRM_AMDGPU=m
|
||||
CONFIG_DRM_AMDGPU_SI=y
|
||||
CONFIG_DRM_AMDGPU_SI=m
|
||||
CONFIG_DRM_AMDGPU_USERPTR=y
|
||||
CONFIG_DRM_AMD_ACP=n
|
||||
CONFIG_ACPI_WMI=y
|
||||
|
@@ -1 +1,2 @@
|
||||
lp_test_arit
|
||||
lp_test_format
|
||||
|
@@ -1,38 +1,25 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
export DEQP_TEMP_DIR="$1"
|
||||
set -ex
|
||||
|
||||
mount -t proc none /proc
|
||||
mount -t sysfs none /sys
|
||||
mount -t devtmpfs none /dev || echo possibly already mounted
|
||||
mkdir -p /dev/pts
|
||||
mount -t devpts devpts /dev/pts
|
||||
mount -t tmpfs tmpfs /tmp
|
||||
|
||||
. $DEQP_TEMP_DIR/crosvm-env.sh
|
||||
. /crosvm-env.sh
|
||||
|
||||
# .gitlab-ci.yml script variable is using relative paths to install directory,
|
||||
# so change to that dir before running `crosvm-script`
|
||||
cd "${CI_PROJECT_DIR}"
|
||||
# / is ro
|
||||
export PIGLIT_REPLAY_EXTRA_ARGS="$PIGLIT_REPLAY_EXTRA_ARGS --db-path /tmp/replayer-db"
|
||||
|
||||
# The exception is the dEQP binary, since it needs to run from the directory
|
||||
# it's in
|
||||
if [ -d "${DEQP_BIN_DIR}" ]
|
||||
then
|
||||
cd "${DEQP_BIN_DIR}"
|
||||
if sh $CROSVM_TEST_SCRIPT; then
|
||||
touch /results/success
|
||||
fi
|
||||
|
||||
dmesg --level crit,err,warn -w >> $DEQP_TEMP_DIR/stderr &
|
||||
|
||||
set +e
|
||||
stdbuf -oL sh $DEQP_TEMP_DIR/crosvm-script.sh 2>> $DEQP_TEMP_DIR/stderr >> $DEQP_TEMP_DIR/stdout
|
||||
echo $? > $DEQP_TEMP_DIR/exit_code
|
||||
set -e
|
||||
|
||||
sync
|
||||
sleep 1
|
||||
|
||||
poweroff -d -n -f || true
|
||||
|
||||
sleep 1 # Just in case init would exit before the kernel shuts down the VM
|
||||
sleep 10 # Just in case init would exit before the kernel shuts down the VM
|
||||
|
||||
exit 1
|
||||
|
@@ -1,58 +1,45 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
set -x
|
||||
|
||||
# This script can be called concurrently, pass arguments and env in a
|
||||
# per-instance tmp dir
|
||||
DEQP_TEMP_DIR=$(mktemp -d /tmp.XXXXXXXXXX)
|
||||
export DEQP_TEMP_DIR
|
||||
ln -sf $CI_PROJECT_DIR/install /install
|
||||
|
||||
# The dEQP binary needs to run from the directory it's in
|
||||
if [ -n "${1##*.sh}" ] && [ -z "${1##*"deqp"*}" ]; then
|
||||
DEQP_BIN_DIR=$(dirname "$1")
|
||||
export DEQP_BIN_DIR
|
||||
fi
|
||||
export LD_LIBRARY_PATH=$CI_PROJECT_DIR/install/lib/
|
||||
export EGL_PLATFORM=surfaceless
|
||||
|
||||
# Securely pass the current variables to the crosvm environment
|
||||
CI_COMMON="$CI_PROJECT_DIR"/install/common
|
||||
echo "Variables passed through:"
|
||||
"${CI_COMMON}"/generate-env.sh | tee ${DEQP_TEMP_DIR}/crosvm-env.sh
|
||||
export -p > /crosvm-env.sh
|
||||
export GALLIUM_DRIVER="$CROSVM_GALLIUM_DRIVER"
|
||||
export LIBGL_ALWAYS_SOFTWARE="true"
|
||||
|
||||
CROSVM_KERNEL_ARGS="quiet console=null root=my_root rw rootfstype=virtiofs init=$CI_PROJECT_DIR/install/crosvm-init.sh ip=192.168.30.2::192.168.30.1:255.255.255.0:crosvm:eth0 -- $DEQP_TEMP_DIR"
|
||||
CROSVM_KERNEL_ARGS="root=my_root rw rootfstype=virtiofs loglevel=3 init=$CI_PROJECT_DIR/install/crosvm-init.sh ip=192.168.30.2::192.168.30.1:255.255.255.0:crosvm:eth0"
|
||||
|
||||
# Set the crosvm-script as the arguments of the current script.
|
||||
echo "$@" > $DEQP_TEMP_DIR/crosvm-script.sh
|
||||
# Temporary results dir because from the guest we cannot write to /
|
||||
mkdir -p /results
|
||||
mount -t tmpfs tmpfs /results
|
||||
|
||||
mkdir -p /piglit/.gitlab-ci/piglit
|
||||
mount -t tmpfs tmpfs /piglit/.gitlab-ci/piglit
|
||||
|
||||
unset DISPLAY
|
||||
unset XDG_RUNTIME_DIR
|
||||
|
||||
/usr/sbin/iptables-legacy -w -t nat -A POSTROUTING -o eth0 -j MASQUERADE
|
||||
/usr/sbin/iptables-legacy -t nat -A POSTROUTING -o eth0 -j MASQUERADE
|
||||
echo 1 > /proc/sys/net/ipv4/ip_forward
|
||||
|
||||
# Send output from guest to host
|
||||
touch $DEQP_TEMP_DIR/stderr $DEQP_TEMP_DIR/stdout
|
||||
tail -f $DEQP_TEMP_DIR/stderr >> /dev/stderr &
|
||||
ERR_TAIL_PID=$!
|
||||
tail -f $DEQP_TEMP_DIR/stdout >> /dev/stdout &
|
||||
OUT_TAIL_PID=$!
|
||||
# Crosvm wants this
|
||||
syslogd > /dev/null
|
||||
|
||||
trap "exit \$exit_code" INT TERM
|
||||
trap "exit_code=\$?; kill $ERR_TAIL_PID $OUT_TAIL_PID; rm -rf $DEQP_TEMP_DIR" EXIT
|
||||
|
||||
# We aren't testing LLVMPipe here, so we don't need to validate NIR on the host
|
||||
NIR_DEBUG="novalidate" LIBGL_ALWAYS_SOFTWARE="true" GALLIUM_DRIVER="$CROSVM_GALLIUM_DRIVER" crosvm run \
|
||||
--gpu "$CROSVM_GPU_ARGS" \
|
||||
crosvm run \
|
||||
--gpu gles=false,backend=3d,egl=true,surfaceless=true \
|
||||
-m 4096 \
|
||||
-c 2 \
|
||||
-c 4 \
|
||||
--disable-sandbox \
|
||||
--shared-dir /:my_root:type=fs:writeback=true:timeout=60:cache=always \
|
||||
--host_ip=192.168.30.1 --netmask=255.255.255.0 --mac "AA:BB:CC:00:00:12" \
|
||||
-p "$CROSVM_KERNEL_ARGS" \
|
||||
/lava-files/bzImage > $DEQP_TEMP_DIR/crosvm 2>&1
|
||||
/lava-files/bzImage
|
||||
|
||||
RET=$(cat $DEQP_TEMP_DIR/exit_code || true)
|
||||
mkdir -p $CI_PROJECT_DIR/results
|
||||
mv /results/* $CI_PROJECT_DIR/results/.
|
||||
|
||||
# Got no exit code from the script, show crosvm output to help with debugging
|
||||
[ -n "$RET" ] || cat $DEQP_TEMP_DIR/crosvm || true
|
||||
|
||||
exit ${RET:-1}
|
||||
test -f $CI_PROJECT_DIR/results/success
|
||||
|
@@ -6,12 +6,5 @@
|
||||
# reliable to be run in parallel with other tests due to CPU-side timing.
|
||||
dEQP-GLES[0-9]*.functional.flush_finish.*
|
||||
|
||||
# piglit: WGL is Windows-only
|
||||
wgl@.*
|
||||
|
||||
# These are sensitive to CPU timing, and would need to be run in isolation
|
||||
# on the system rather than in parallel with other tests.
|
||||
glx@glx_arb_sync_control@timing.*
|
||||
|
||||
# This test is not built with waffle, while we do build tests with waffle
|
||||
spec@!opengl 1.1@windowoverlap
|
||||
# https://gitlab.freedesktop.org/mesa/mesa/-/issues/4575
|
||||
dEQP-VK.wsi.display.get_display_plane_capabilities
|
@@ -2,11 +2,30 @@
|
||||
|
||||
set -ex
|
||||
|
||||
# Needed so configuration files can contain paths to files in /install
|
||||
ln -sf $CI_PROJECT_DIR/install /install
|
||||
DEQP_WIDTH=${DEQP_WIDTH:-256}
|
||||
DEQP_HEIGHT=${DEQP_HEIGHT:-256}
|
||||
DEQP_CONFIG=${DEQP_CONFIG:-rgba8888d24s8ms0}
|
||||
DEQP_VARIANT=${DEQP_VARIANT:-master}
|
||||
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-surface-width=$DEQP_WIDTH --deqp-surface-height=$DEQP_HEIGHT"
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-surface-type=${DEQP_SURFACE_TYPE:-pbuffer}"
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-gl-config-name=$DEQP_CONFIG"
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-visibility=hidden"
|
||||
|
||||
if [ -z "$DEQP_VER" ]; then
|
||||
echo 'DEQP_VER must be set to something like "gles2", "gles31-khr" or "vk" for the test run'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$DEQP_VER" = "vk" ]; then
|
||||
if [ -z "$VK_DRIVER" ]; then
|
||||
echo 'VK_DRIVER must be to something like "radeon" or "intel" for the test run'
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$GPU_VERSION" ]; then
|
||||
echo 'GPU_VERSION must be set to something like "llvmpipe" or "freedreno-a630" (the name used in .gitlab-ci/gpu-version-*.txt)'
|
||||
echo 'GPU_VERSION must be set to something like "llvmpipe" or "freedreno-a630" (the name used in .gitlab-ci/deqp-gpu-version-*.txt)'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -17,103 +36,145 @@ export LD_LIBRARY_PATH=`pwd`/install/lib/
|
||||
export EGL_PLATFORM=surfaceless
|
||||
export VK_ICD_FILENAMES=`pwd`/install/share/vulkan/icd.d/"$VK_DRIVER"_icd.${VK_CPU:-`uname -m`}.json
|
||||
|
||||
# the runner was failing to look for libkms in /usr/local/lib for some reason
|
||||
# I never figured out.
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib
|
||||
|
||||
RESULTS=`pwd`/${DEQP_RESULTS_DIR:-results}
|
||||
mkdir -p $RESULTS
|
||||
|
||||
HANG_DETECTION_CMD=""
|
||||
|
||||
if [ -z "$DEQP_SUITE" ]; then
|
||||
if [ -z "$DEQP_VER" ]; then
|
||||
echo 'DEQP_SUITE must be set to the name of your deqp-gpu_version.toml, or DEQP_VER must be set to something like "gles2", "gles31-khr" or "vk" for the test run'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DEQP_WIDTH=${DEQP_WIDTH:-256}
|
||||
DEQP_HEIGHT=${DEQP_HEIGHT:-256}
|
||||
DEQP_CONFIG=${DEQP_CONFIG:-rgba8888d24s8ms0}
|
||||
DEQP_VARIANT=${DEQP_VARIANT:-master}
|
||||
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-surface-width=$DEQP_WIDTH --deqp-surface-height=$DEQP_HEIGHT"
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-surface-type=${DEQP_SURFACE_TYPE:-pbuffer}"
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-gl-config-name=$DEQP_CONFIG"
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-visibility=hidden"
|
||||
|
||||
if [ "$DEQP_VER" = "vk" -a -z "$VK_DRIVER" ]; then
|
||||
echo 'VK_DRIVER must be to something like "radeon" or "intel" for the test run'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Generate test case list file.
|
||||
if [ "$DEQP_VER" = "vk" ]; then
|
||||
MUSTPASS=/deqp/mustpass/vk-$DEQP_VARIANT.txt
|
||||
DEQP=/deqp/external/vulkancts/modules/vulkan/deqp-vk
|
||||
HANG_DETECTION_CMD="/parallel-deqp-runner/build/bin/hang-detection"
|
||||
elif [ "$DEQP_VER" = "gles2" -o "$DEQP_VER" = "gles3" -o "$DEQP_VER" = "gles31" -o "$DEQP_VER" = "egl" ]; then
|
||||
MUSTPASS=/deqp/mustpass/$DEQP_VER-$DEQP_VARIANT.txt
|
||||
DEQP=/deqp/modules/$DEQP_VER/deqp-$DEQP_VER
|
||||
elif [ "$DEQP_VER" = "gles2-khr" -o "$DEQP_VER" = "gles3-khr" -o "$DEQP_VER" = "gles31-khr" -o "$DEQP_VER" = "gles32-khr" ]; then
|
||||
MUSTPASS=/deqp/mustpass/$DEQP_VER-$DEQP_VARIANT.txt
|
||||
DEQP=/deqp/external/openglcts/modules/glcts
|
||||
else
|
||||
MUSTPASS=/deqp/mustpass/$DEQP_VER-$DEQP_VARIANT.txt
|
||||
DEQP=/deqp/external/openglcts/modules/glcts
|
||||
fi
|
||||
|
||||
cp $MUSTPASS /tmp/case-list.txt
|
||||
|
||||
# If the caselist is too long to run in a reasonable amount of time, let the job
|
||||
# specify what fraction (1/n) of the caselist we should run. Note: N~M is a gnu
|
||||
# sed extension to match every nth line (first line is #1).
|
||||
if [ -n "$DEQP_FRACTION" ]; then
|
||||
sed -ni 1~$DEQP_FRACTION"p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
# If the job is parallel at the gitab job level, take the corresponding fraction
|
||||
# of the caselist.
|
||||
if [ -n "$CI_NODE_INDEX" ]; then
|
||||
sed -ni $CI_NODE_INDEX~$CI_NODE_TOTAL"p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ -n "$DEQP_CASELIST_FILTER" ]; then
|
||||
sed -ni "/$DEQP_CASELIST_FILTER/p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ -n "$DEQP_CASELIST_INV_FILTER" ]; then
|
||||
sed -ni "/$DEQP_CASELIST_INV_FILTER/!p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ ! -s /tmp/case-list.txt ]; then
|
||||
echo "Caselist generation failed"
|
||||
exit 1
|
||||
fi
|
||||
# Generate test case list file.
|
||||
if [ "$DEQP_VER" = "vk" ]; then
|
||||
cp /deqp/mustpass/vk-$DEQP_VARIANT.txt /tmp/case-list.txt
|
||||
DEQP=/deqp/external/vulkancts/modules/vulkan/deqp-vk
|
||||
HANG_DETECTION_CMD="/parallel-deqp-runner/build/bin/hang-detection"
|
||||
elif [ "$DEQP_VER" = "gles2" -o "$DEQP_VER" = "gles3" -o "$DEQP_VER" = "gles31" -o "$DEQP_VER" = "egl" ]; then
|
||||
cp /deqp/mustpass/$DEQP_VER-$DEQP_VARIANT.txt /tmp/case-list.txt
|
||||
DEQP=/deqp/modules/$DEQP_VER/deqp-$DEQP_VER
|
||||
SUITE=dEQP
|
||||
elif [ "$DEQP_VER" = "gles2-khr" -o "$DEQP_VER" = "gles3-khr" -o "$DEQP_VER" = "gles31-khr" -o "$DEQP_VER" = "gles32-khr" ]; then
|
||||
cp /deqp/mustpass/$DEQP_VER-$DEQP_VARIANT.txt /tmp/case-list.txt
|
||||
DEQP=/deqp/external/openglcts/modules/glcts
|
||||
SUITE=dEQP
|
||||
else
|
||||
cp /deqp/mustpass/$DEQP_VER-$DEQP_VARIANT.txt /tmp/case-list.txt
|
||||
DEQP=/deqp/external/openglcts/modules/glcts
|
||||
SUITE=KHR
|
||||
fi
|
||||
|
||||
if [ -e "$INSTALL/$GPU_VERSION-fails.txt" ]; then
|
||||
DEQP_RUNNER_OPTIONS="$DEQP_RUNNER_OPTIONS --baseline $INSTALL/$GPU_VERSION-fails.txt"
|
||||
# If the caselist is too long to run in a reasonable amount of time, let the job
|
||||
# specify what fraction (1/n) of the caselist we should run. Note: N~M is a gnu
|
||||
# sed extension to match every nth line (first line is #1).
|
||||
if [ -n "$DEQP_FRACTION" ]; then
|
||||
sed -ni 1~$DEQP_FRACTION"p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
# If the job is parallel at the gitab job level, take the corresponding fraction
|
||||
# of the caselist.
|
||||
if [ -n "$CI_NODE_INDEX" ]; then
|
||||
sed -ni $CI_NODE_INDEX~$CI_NODE_TOTAL"p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ -n "$DEQP_CASELIST_FILTER" ]; then
|
||||
sed -ni "/$DEQP_CASELIST_FILTER/p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ -n "$DEQP_CASELIST_INV_FILTER" ]; then
|
||||
sed -ni "/$DEQP_CASELIST_INV_FILTER/!p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ ! -s /tmp/case-list.txt ]; then
|
||||
echo "Caselist generation failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -e "$INSTALL/deqp-$GPU_VERSION-fails.txt" ]; then
|
||||
DEQP_RUNNER_OPTIONS="$DEQP_RUNNER_OPTIONS --baseline $INSTALL/deqp-$GPU_VERSION-fails.txt"
|
||||
fi
|
||||
|
||||
# Default to an empty known flakes file if it doesn't exist.
|
||||
touch $INSTALL/$GPU_VERSION-flakes.txt
|
||||
touch $INSTALL/deqp-$GPU_VERSION-flakes.txt
|
||||
|
||||
|
||||
if [ -n "$VK_DRIVER" ] && [ -e "$INSTALL/$VK_DRIVER-skips.txt" ]; then
|
||||
DEQP_SKIPS="$DEQP_SKIPS $INSTALL/$VK_DRIVER-skips.txt"
|
||||
fi
|
||||
|
||||
if [ -n "$GALLIUM_DRIVER" ] && [ -e "$INSTALL/$GALLIUM_DRIVER-skips.txt" ]; then
|
||||
DEQP_SKIPS="$DEQP_SKIPS $INSTALL/$GALLIUM_DRIVER-skips.txt"
|
||||
fi
|
||||
|
||||
if [ -n "$DRIVER_NAME" ] && [ -e "$INSTALL/$DRIVER_NAME-skips.txt" ]; then
|
||||
DEQP_SKIPS="$DEQP_SKIPS $INSTALL/$DRIVER_NAME-skips.txt"
|
||||
fi
|
||||
|
||||
if [ -e "$INSTALL/$GPU_VERSION-skips.txt" ]; then
|
||||
DEQP_SKIPS="$DEQP_SKIPS $INSTALL/$GPU_VERSION-skips.txt"
|
||||
if [ -e "$INSTALL/deqp-$GPU_VERSION-skips.txt" ]; then
|
||||
DEQP_SKIPS="$INSTALL/deqp-$GPU_VERSION-skips.txt"
|
||||
fi
|
||||
|
||||
set +e
|
||||
|
||||
if [ -n "$DEQP_PARALLEL" ]; then
|
||||
JOB="--jobs $DEQP_PARALLEL"
|
||||
elif [ -n "$FDO_CI_CONCURRENT" ]; then
|
||||
JOB="--jobs $FDO_CI_CONCURRENT"
|
||||
else
|
||||
JOB="--jobs 4"
|
||||
fi
|
||||
|
||||
# If this CI lab lacks artifacts support, print the whole list of failures/flakes.
|
||||
if [ -n "$DEQP_NO_SAVE_RESULTS" ]; then
|
||||
SUMMARY_LIMIT="--summary-limit 0"
|
||||
fi
|
||||
|
||||
run_cts() {
|
||||
deqp=$1
|
||||
caselist=$2
|
||||
output=$3
|
||||
deqp-runner \
|
||||
run \
|
||||
--deqp $deqp \
|
||||
--output $RESULTS \
|
||||
--caselist $caselist \
|
||||
--skips $INSTALL/deqp-all-skips.txt $DEQP_SKIPS \
|
||||
--flakes $INSTALL/deqp-$GPU_VERSION-flakes.txt \
|
||||
--testlog-to-xml /deqp/executor/testlog-to-xml \
|
||||
$JOB \
|
||||
$SUMMARY_LIMIT \
|
||||
$DEQP_RUNNER_OPTIONS \
|
||||
-- \
|
||||
$DEQP_OPTIONS
|
||||
}
|
||||
|
||||
parse_renderer() {
|
||||
RENDERER=`grep -A1 TestCaseResult.\*info.renderer $RESULTS/deqp-info.qpa | grep '<Text' | sed 's|.*<Text>||g' | sed 's|</Text>||g'`
|
||||
VERSION=`grep -A1 TestCaseResult.\*info.version $RESULTS/deqp-info.qpa | grep '<Text' | sed 's|.*<Text>||g' | sed 's|</Text>||g'`
|
||||
echo "Renderer: $RENDERER"
|
||||
echo "Version: $VERSION "
|
||||
|
||||
if ! echo $RENDERER | grep -q $DEQP_EXPECTED_RENDERER; then
|
||||
echo "Expected GL_RENDERER $DEQP_EXPECTED_RENDERER"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
check_renderer() {
|
||||
if echo $DEQP_VER | grep -q egl; then
|
||||
return
|
||||
fi
|
||||
echo "Capturing renderer info for GLES driver sanity checks"
|
||||
# If you're having trouble loading your driver, uncommenting this may help
|
||||
# debug.
|
||||
# export EGL_LOG_LEVEL=debug
|
||||
VERSION=`echo $DEQP_VER | cut -d '-' -f1 | tr '[a-z]' '[A-Z]'`
|
||||
export LD_PRELOAD=$TEST_LD_PRELOAD
|
||||
$DEQP $DEQP_OPTIONS --deqp-case=$SUITE-$VERSION.info.\* --deqp-log-filename=$RESULTS/deqp-info.qpa
|
||||
export LD_PRELOAD=
|
||||
parse_renderer
|
||||
}
|
||||
|
||||
check_vk_device_name() {
|
||||
echo "Capturing device info for VK driver sanity checks"
|
||||
export LD_PRELOAD=$TEST_LD_PRELOAD
|
||||
$DEQP $DEQP_OPTIONS --deqp-case=dEQP-VK.info.device --deqp-log-filename=$RESULTS/deqp-info.qpa
|
||||
export LD_PRELOAD=
|
||||
DEVICENAME=`grep deviceName $RESULTS/deqp-info.qpa | sed 's|deviceName: ||g'`
|
||||
echo "deviceName: $DEVICENAME"
|
||||
if ! echo $DEVICENAME | grep -q "$DEQP_EXPECTED_RENDERER"; then
|
||||
echo "Expected deviceName $DEQP_EXPECTED_RENDERER"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
report_load() {
|
||||
echo "System load: $(cut -d' ' -f1-3 < /proc/loadavg)"
|
||||
echo "# of CPU cores: $(cat /proc/cpuinfo | grep processor | wc -l)"
|
||||
@@ -136,47 +197,27 @@ if [ "$GALLIUM_DRIVER" = "virpipe" ]; then
|
||||
fi
|
||||
|
||||
GALLIUM_DRIVER=llvmpipe \
|
||||
GALLIVM_PERF="nopt,no_filter_hacks" \
|
||||
virgl_test_server $VTEST_ARGS >$RESULTS/vtest-log.txt 2>&1 &
|
||||
|
||||
sleep 1
|
||||
fi
|
||||
|
||||
if [ -z "$DEQP_SUITE" ]; then
|
||||
if [ -n "$DEQP_EXPECTED_RENDERER" ]; then
|
||||
export DEQP_RUNNER_OPTIONS="$DEQP_RUNNER_OPTIONS --renderer-check "$DEQP_EXPECTED_RENDERER""
|
||||
fi
|
||||
if [ $DEQP_VER != vk -a $DEQP_VER != egl ]; then
|
||||
export DEQP_RUNNER_OPTIONS="$DEQP_RUNNER_OPTIONS --version-check `cat $INSTALL/VERSION | sed 's/[() ]/./g'`"
|
||||
fi
|
||||
|
||||
deqp-runner \
|
||||
run \
|
||||
--deqp $DEQP \
|
||||
--output $RESULTS \
|
||||
--caselist /tmp/case-list.txt \
|
||||
--skips $INSTALL/all-skips.txt $DEQP_SKIPS \
|
||||
--flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--testlog-to-xml /deqp/executor/testlog-to-xml \
|
||||
--jobs ${FDO_CI_CONCURRENT:-4} \
|
||||
$DEQP_RUNNER_OPTIONS \
|
||||
-- \
|
||||
$DEQP_OPTIONS
|
||||
if [ $DEQP_VER = vk ]; then
|
||||
quiet check_vk_device_name
|
||||
else
|
||||
deqp-runner \
|
||||
suite \
|
||||
--suite $INSTALL/deqp-$DEQP_SUITE.toml \
|
||||
--output $RESULTS \
|
||||
--skips $INSTALL/all-skips.txt $DEQP_SKIPS \
|
||||
--flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--testlog-to-xml /deqp/executor/testlog-to-xml \
|
||||
--fraction-start $CI_NODE_INDEX \
|
||||
--fraction $CI_NODE_TOTAL \
|
||||
--jobs ${FDO_CI_CONCURRENT:-4} \
|
||||
$DEQP_RUNNER_OPTIONS
|
||||
quiet check_renderer
|
||||
fi
|
||||
|
||||
RESULTS_CSV=$RESULTS/results.csv
|
||||
FAILURES_CSV=$RESULTS/failures.csv
|
||||
|
||||
export LD_PRELOAD=$TEST_LD_PRELOAD
|
||||
|
||||
run_cts $DEQP /tmp/case-list.txt $RESULTS_CSV
|
||||
DEQP_EXITCODE=$?
|
||||
|
||||
export LD_PRELOAD=
|
||||
quiet report_load
|
||||
|
||||
# Remove all but the first 50 individual XML files uploaded as artifacts, to
|
||||
@@ -191,8 +232,8 @@ find $RESULTS -name \*.xml \
|
||||
-exec cp /deqp/testlog.css /deqp/testlog.xsl "$RESULTS/" ";" \
|
||||
-quit
|
||||
|
||||
deqp-runner junit \
|
||||
--testsuite dEQP \
|
||||
$HANG_DETECTION_CMD deqp-runner junit \
|
||||
--testsuite $DEQP_VER \
|
||||
--results $RESULTS/failures.csv \
|
||||
--output $RESULTS/junit.xml \
|
||||
--limit 50 \
|
||||
@@ -203,8 +244,8 @@ if [ -n "$FLAKES_CHANNEL" ]; then
|
||||
python3 $INSTALL/report-flakes.py \
|
||||
--host irc.oftc.net \
|
||||
--port 6667 \
|
||||
--results $RESULTS/results.csv \
|
||||
--known-flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--results $RESULTS_CSV \
|
||||
--known-flakes $INSTALL/deqp-$GPU_VERSION-flakes.txt \
|
||||
--channel "$FLAKES_CHANNEL" \
|
||||
--runner "$CI_RUNNER_DESCRIPTION" \
|
||||
--job "$CI_JOB_ID" \
|
||||
|
@@ -1,70 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
INSTALL=`pwd`/install
|
||||
|
||||
# Set up the driver environment.
|
||||
export LD_LIBRARY_PATH=`pwd`/install/lib/
|
||||
export LIBVA_DRIVERS_PATH=`pwd`/install/lib/dri/
|
||||
# libva spams driver open info by default, and that happens per testcase.
|
||||
export LIBVA_MESSAGING_LEVEL=1
|
||||
|
||||
if [ -e "$INSTALL/$GPU_VERSION-fails.txt" ]; then
|
||||
GTEST_RUNNER_OPTIONS="$GTEST_RUNNER_OPTIONS --baseline $INSTALL/$GPU_VERSION-fails.txt"
|
||||
fi
|
||||
|
||||
# Default to an empty known flakes file if it doesn't exist.
|
||||
touch $INSTALL/$GPU_VERSION-flakes.txt
|
||||
|
||||
if [ -n "$GALLIUM_DRIVER" ] && [ -e "$INSTALL/$GALLIUM_DRIVER-skips.txt" ]; then
|
||||
GTEST_SKIPS="$GTEST_SKIPS --skips $INSTALL/$GALLIUM_DRIVER-skips.txt"
|
||||
fi
|
||||
|
||||
if [ -n "$DRIVER_NAME" ] && [ -e "$INSTALL/$DRIVER_NAME-skips.txt" ]; then
|
||||
GTEST_SKIPS="$GTEST_SKIPS --skips $INSTALL/$DRIVER_NAME-skips.txt"
|
||||
fi
|
||||
|
||||
if [ -e "$INSTALL/$GPU_VERSION-skips.txt" ]; then
|
||||
GTEST_SKIPS="$GTEST_SKIPS --skips $INSTALL/$GPU_VERSION-skips.txt"
|
||||
fi
|
||||
|
||||
set +e
|
||||
|
||||
gtest-runner \
|
||||
run \
|
||||
--gtest $GTEST \
|
||||
--output ${GTEST_RESULTS_DIR:-results} \
|
||||
--jobs ${FDO_CI_CONCURRENT:-4} \
|
||||
$GTEST_SKIPS \
|
||||
--flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--fraction-start ${CI_NODE_INDEX:-1} \
|
||||
--fraction $((${CI_NODE_TOTAL:-1} * ${GTEST_FRACTION:-1})) \
|
||||
--env "LD_PRELOAD=$TEST_LD_PRELOAD" \
|
||||
$GTEST_RUNNER_OPTIONS
|
||||
|
||||
GTEST_EXITCODE=$?
|
||||
|
||||
deqp-runner junit \
|
||||
--testsuite gtest \
|
||||
--results $RESULTS/failures.csv \
|
||||
--output $RESULTS/junit.xml \
|
||||
--limit 50 \
|
||||
--template "See https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/results/{{testcase}}.xml"
|
||||
|
||||
# Report the flakes to the IRC channel for monitoring (if configured):
|
||||
if [ -n "$FLAKES_CHANNEL" ]; then
|
||||
python3 $INSTALL/report-flakes.py \
|
||||
--host irc.oftc.net \
|
||||
--port 6667 \
|
||||
--results $RESULTS/results.csv \
|
||||
--known-flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--channel "$FLAKES_CHANNEL" \
|
||||
--runner "$CI_RUNNER_DESCRIPTION" \
|
||||
--job "$CI_JOB_ID" \
|
||||
--url "$CI_JOB_URL" \
|
||||
--branch "${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:-$CI_COMMIT_BRANCH}" \
|
||||
--branch-title "${CI_MERGE_REQUEST_TITLE:-$CI_COMMIT_TITLE}"
|
||||
fi
|
||||
|
||||
exit $GTEST_EXITCODE
|
@@ -1,21 +0,0 @@
|
||||
variables:
|
||||
DEBIAN_X86_BUILD_BASE_IMAGE: "debian/x86_build-base"
|
||||
DEBIAN_BASE_TAG: "2022-01-02-deqp-runner"
|
||||
|
||||
DEBIAN_X86_BUILD_IMAGE_PATH: "debian/x86_build"
|
||||
DEBIAN_BUILD_TAG: "2021-12-31-keep-cmake"
|
||||
|
||||
DEBIAN_X86_TEST_BASE_IMAGE: "debian/x86_test-base"
|
||||
|
||||
DEBIAN_X86_TEST_IMAGE_PATH: "debian/x86_test-gl"
|
||||
DEBIAN_X86_TEST_GL_TAG: "2022-01-18-kernel"
|
||||
DEBIAN_X86_TEST_VK_TAG: "2022-01-02-deqp-runner"
|
||||
|
||||
FEDORA_X86_BUILD_TAG: "2021-12-31-refactor"
|
||||
KERNEL_ROOTFS_TAG: "2022-01-18-kernel"
|
||||
|
||||
WINDOWS_X64_BUILD_PATH: "windows/x64_build"
|
||||
WINDOWS_X64_BUILD_TAG: "2022-20-02-base_split"
|
||||
|
||||
WINDOWS_X64_TEST_PATH: "windows/x64_test"
|
||||
WINDOWS_X64_TEST_TAG: "2022-20-02-base_split"
|
@@ -5,7 +5,8 @@
|
||||
interruptible: true
|
||||
variables:
|
||||
GIT_STRATEGY: none # testing doesn't build anything from source
|
||||
FDO_CI_CONCURRENT: 6 # should be replaced by per-machine definitions
|
||||
DEQP_PARALLEL: 6 # should be replaced by per-machine definitions
|
||||
DEQP_NO_SAVE_RESULTS: 1 # but why not ... ?
|
||||
DEQP_VER: gles2
|
||||
# proxy used to cache data locally
|
||||
FDO_HTTP_CACHE_URI: "http://caching-proxy/cache/?uri="
|
||||
@@ -18,7 +19,6 @@
|
||||
JOB_ROOTFS_OVERLAY_PATH: "${JOB_ARTIFACTS_BASE}/job-rootfs-overlay.tar.gz"
|
||||
JOB_RESULTS_PATH: "${JOB_ARTIFACTS_BASE}/results.tar.gz"
|
||||
PIGLIT_NO_WINDOW: 1
|
||||
VISIBILITY_GROUP: "Collabora+fdo"
|
||||
script:
|
||||
- ./artifacts/lava/lava-submit.sh
|
||||
artifacts:
|
||||
@@ -85,7 +85,7 @@
|
||||
|
||||
.lava-traces-base:
|
||||
variables:
|
||||
HWCI_TEST_SCRIPT: "/install/piglit/piglit-traces.sh"
|
||||
HWCI_TEST_SCRIPT: "/install/piglit/run.sh"
|
||||
artifacts:
|
||||
reports:
|
||||
junit: results/junit.xml
|
||||
|
@@ -14,12 +14,15 @@ fi
|
||||
rm -rf results
|
||||
mkdir -p results/job-rootfs-overlay/
|
||||
|
||||
# LAVA always uploads to MinIO when necessary as we don't have direct upload
|
||||
# from the DUT
|
||||
export PIGLIT_REPLAY_UPLOAD_TO_MINIO=1
|
||||
cp artifacts/ci-common/capture-devcoredump.sh results/job-rootfs-overlay/
|
||||
cp artifacts/ci-common/init-*.sh results/job-rootfs-overlay/
|
||||
artifacts/ci-common/generate-env.sh > results/job-rootfs-overlay/set-job-env-vars.sh
|
||||
|
||||
tar zcf job-rootfs-overlay.tar.gz -C results/job-rootfs-overlay/ .
|
||||
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
|
||||
ci-fairy minio login "${CI_JOB_JWT}"
|
||||
ci-fairy minio cp job-rootfs-overlay.tar.gz "minio://${JOB_ROOTFS_OVERLAY_PATH}"
|
||||
|
||||
touch results/lava.log
|
||||
@@ -31,14 +34,12 @@ artifacts/lava/lava_job_submitter.py \
|
||||
--mesa-build-url "${FDO_HTTP_CACHE_URI:-}https://${MESA_BUILD_PATH}" \
|
||||
--job-rootfs-overlay-url "${FDO_HTTP_CACHE_URI:-}https://${JOB_ROOTFS_OVERLAY_PATH}" \
|
||||
--job-artifacts-base ${JOB_ARTIFACTS_BASE} \
|
||||
--job-timeout ${JOB_TIMEOUT:-30} \
|
||||
--first-stage-init artifacts/ci-common/init-stage1.sh \
|
||||
--ci-project-dir ${CI_PROJECT_DIR} \
|
||||
--device-type ${DEVICE_TYPE} \
|
||||
--dtb ${DTB} \
|
||||
--jwt-file "${CI_JOB_JWT_FILE}" \
|
||||
--jwt "${CI_JOB_JWT}" \
|
||||
--kernel-image-name ${KERNEL_IMAGE_NAME} \
|
||||
--kernel-image-type "${KERNEL_IMAGE_TYPE}" \
|
||||
--boot-method ${BOOT_METHOD} \
|
||||
--visibility-group ${VISIBILITY_GROUP} \
|
||||
--lava-tags "${LAVA_TAGS}" >> results/lava.log
|
||||
|
@@ -25,32 +25,18 @@
|
||||
"""Send a job to LAVA, track it and collect log back"""
|
||||
|
||||
import argparse
|
||||
import pathlib
|
||||
import lavacli
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import urllib.parse
|
||||
import xmlrpc
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import lavacli
|
||||
import yaml
|
||||
|
||||
from datetime import datetime
|
||||
from lavacli.utils import loader
|
||||
|
||||
# Timeout in minutes to decide if the device from the dispatched LAVA job has
|
||||
# hung or not due to the lack of new log output.
|
||||
DEVICE_HANGING_TIMEOUT_MIN = 5
|
||||
|
||||
# How many seconds the script should wait before try a new polling iteration to
|
||||
# check if the dispatched LAVA job is running or waiting in the job queue.
|
||||
WAIT_FOR_DEVICE_POLLING_TIME_SEC = 10
|
||||
|
||||
# How many seconds to wait between log output LAVA RPC calls.
|
||||
LOG_POLLING_TIME_SEC = 5
|
||||
|
||||
# How many retries should be made when a timeout happen.
|
||||
NUMBER_OF_RETRIES_TIMEOUT_DETECTION = 2
|
||||
|
||||
|
||||
def print_log(msg):
|
||||
print("{}: {}".format(datetime.now(), msg))
|
||||
@@ -59,31 +45,19 @@ def fatal_err(msg):
|
||||
print_log(msg)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def hide_sensitive_data(yaml_data, hide_tag="HIDEME"):
|
||||
out_data = ""
|
||||
|
||||
for line in yaml_data.splitlines(True):
|
||||
if hide_tag in line:
|
||||
continue
|
||||
out_data += line
|
||||
|
||||
return out_data
|
||||
|
||||
|
||||
def generate_lava_yaml(args):
|
||||
# General metadata and permissions, plus also inexplicably kernel arguments
|
||||
values = {
|
||||
'job_name': 'mesa: {}'.format(args.pipeline_info),
|
||||
'device_type': args.device_type,
|
||||
'visibility': { 'group': [ args.visibility_group ] },
|
||||
'visibility': { 'group': [ 'Collabora+fdo'] },
|
||||
'priority': 75,
|
||||
'context': {
|
||||
'extra_nfsroot_args': ' init=/init rootwait minio_results={}'.format(args.job_artifacts_base)
|
||||
},
|
||||
'timeouts': {
|
||||
'job': {
|
||||
'minutes': args.job_timeout
|
||||
'minutes': 30
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -123,7 +97,7 @@ def generate_lava_yaml(args):
|
||||
# skeleton test definition: only declaring each job as a single 'test'
|
||||
# since LAVA's test parsing is not useful to us
|
||||
test = {
|
||||
'timeout': { 'minutes': args.job_timeout },
|
||||
'timeout': { 'minutes': 30 },
|
||||
'failure_retry': 1,
|
||||
'definitions': [ {
|
||||
'name': 'mesa',
|
||||
@@ -138,7 +112,7 @@ def generate_lava_yaml(args):
|
||||
'format': 'Lava-Test Test Definition 1.0',
|
||||
},
|
||||
'parse': {
|
||||
'pattern': r'hwci: (?P<test_case_id>\S*):\s+(?P<result>(pass|fail))'
|
||||
'pattern': 'hwci: (?P<test_case_id>\S*):\s+(?P<result>(pass|fail))'
|
||||
},
|
||||
'run': {
|
||||
},
|
||||
@@ -152,22 +126,15 @@ def generate_lava_yaml(args):
|
||||
# - fetch and unpack per-job environment from lava-submit.sh
|
||||
# - exec .gitlab-ci/common/init-stage2.sh
|
||||
init_lines = []
|
||||
|
||||
with open(args.first_stage_init, 'r') as init_sh:
|
||||
init_lines += [ x.rstrip() for x in init_sh if not x.startswith('#') and x.rstrip() ]
|
||||
|
||||
with open(args.jwt_file) as jwt_file:
|
||||
init_lines += [
|
||||
"set +x",
|
||||
f'echo -n "{jwt_file.read()}" > "{args.jwt_file}" # HIDEME',
|
||||
"set -x",
|
||||
]
|
||||
|
||||
init_lines += [
|
||||
'mkdir -p {}'.format(args.ci_project_dir),
|
||||
'wget -S --progress=dot:giga -O- {} | tar -xz -C {}'.format(args.mesa_build_url, args.ci_project_dir),
|
||||
'wget -S --progress=dot:giga -O- {} | tar -xz -C /'.format(args.job_rootfs_overlay_url),
|
||||
f'echo "export CI_JOB_JWT_FILE={args.jwt_file}" >> /set-job-env-vars.sh',
|
||||
'set +x',
|
||||
'export CI_JOB_JWT="{}"'.format(args.jwt),
|
||||
'set -x',
|
||||
'exec /init-stage2.sh',
|
||||
]
|
||||
test['definitions'][0]['repository']['run']['steps'] = init_lines
|
||||
@@ -243,44 +210,19 @@ def get_job_results(proxy, job_id, test_suite, test_case):
|
||||
|
||||
return True
|
||||
|
||||
def wait_until_job_is_started(proxy, job_id):
|
||||
print_log(f"Waiting for job {job_id} to start.")
|
||||
current_state = "Submitted"
|
||||
waiting_states = ["Submitted", "Scheduling", "Scheduled"]
|
||||
while current_state in waiting_states:
|
||||
job_state = _call_proxy(proxy.scheduler.job_state, job_id)
|
||||
current_state = job_state["job_state"]
|
||||
|
||||
time.sleep(WAIT_FOR_DEVICE_POLLING_TIME_SEC)
|
||||
print_log(f"Job {job_id} started.")
|
||||
|
||||
def follow_job_execution(proxy, job_id):
|
||||
line_count = 0
|
||||
finished = False
|
||||
last_time_logs = datetime.now()
|
||||
while not finished:
|
||||
(finished, data) = _call_proxy(proxy.scheduler.jobs.logs, job_id, line_count)
|
||||
logs = yaml.load(str(data), Loader=loader(False))
|
||||
if logs:
|
||||
# Reset the timeout
|
||||
last_time_logs = datetime.now()
|
||||
for line in logs:
|
||||
print("{} {}".format(line["dt"], line["msg"]))
|
||||
|
||||
line_count += len(logs)
|
||||
|
||||
else:
|
||||
time_limit = timedelta(minutes=DEVICE_HANGING_TIMEOUT_MIN)
|
||||
if datetime.now() - last_time_logs > time_limit:
|
||||
print_log("LAVA job {} doesn't advance (machine got hung?). Retry.".format(job_id))
|
||||
return False
|
||||
|
||||
# `proxy.scheduler.jobs.logs` does not block, even when there is no
|
||||
# new log to be fetched. To avoid dosing the LAVA dispatcher
|
||||
# machine, let's add a sleep to save them some stamina.
|
||||
time.sleep(LOG_POLLING_TIME_SEC)
|
||||
|
||||
return True
|
||||
|
||||
def show_job_data(proxy, job_id):
|
||||
show = _call_proxy(proxy.scheduler.jobs.show, job_id)
|
||||
@@ -304,7 +246,9 @@ def main(args):
|
||||
yaml_file = generate_lava_yaml(args)
|
||||
|
||||
if args.dump_yaml:
|
||||
print(hide_sensitive_data(generate_lava_yaml(args)))
|
||||
censored_args = args
|
||||
censored_args.jwt = "jwt-hidden"
|
||||
print(generate_lava_yaml(censored_args))
|
||||
|
||||
if args.validate_only:
|
||||
ret = validate_job(proxy, yaml_file)
|
||||
@@ -313,29 +257,26 @@ def main(args):
|
||||
print("LAVA job definition validated successfully")
|
||||
return
|
||||
|
||||
retry_count = NUMBER_OF_RETRIES_TIMEOUT_DETECTION
|
||||
|
||||
while retry_count >= 0:
|
||||
while True:
|
||||
job_id = submit_job(proxy, yaml_file)
|
||||
|
||||
print_log("LAVA job id: {}".format(job_id))
|
||||
|
||||
wait_until_job_is_started(proxy, job_id)
|
||||
|
||||
if not follow_job_execution(proxy, job_id):
|
||||
print_log(f"Job {job_id} has timed out. Cancelling it.")
|
||||
# Cancel the job as it is considered unreachable by Mesa CI.
|
||||
proxy.scheduler.jobs.cancel(job_id)
|
||||
|
||||
retry_count -= 1
|
||||
continue
|
||||
follow_job_execution(proxy, job_id)
|
||||
|
||||
show_job_data(proxy, job_id)
|
||||
|
||||
if get_job_results(proxy, job_id, "0_mesa", "mesa") == True:
|
||||
break
|
||||
|
||||
def create_parser():
|
||||
|
||||
if __name__ == '__main__':
|
||||
# given that we proxy from DUT -> LAVA dispatcher -> LAVA primary -> us ->
|
||||
# GitLab runner -> GitLab primary -> user, safe to say we don't need any
|
||||
# more buffering
|
||||
sys.stdout.reconfigure(line_buffering=True)
|
||||
sys.stderr.reconfigure(line_buffering=True)
|
||||
parser = argparse.ArgumentParser("LAVA job submitter")
|
||||
|
||||
parser.add_argument("--pipeline-info")
|
||||
@@ -343,7 +284,6 @@ def create_parser():
|
||||
parser.add_argument("--mesa-build-url")
|
||||
parser.add_argument("--job-rootfs-overlay-url")
|
||||
parser.add_argument("--job-artifacts-base")
|
||||
parser.add_argument("--job-timeout", type=int)
|
||||
parser.add_argument("--first-stage-init")
|
||||
parser.add_argument("--ci-project-dir")
|
||||
parser.add_argument("--device-type")
|
||||
@@ -352,21 +292,9 @@ def create_parser():
|
||||
parser.add_argument("--kernel-image-type", nargs='?', default="")
|
||||
parser.add_argument("--boot-method")
|
||||
parser.add_argument("--lava-tags", nargs='?', default="")
|
||||
parser.add_argument("--jwt-file", type=pathlib.Path)
|
||||
parser.add_argument("--jwt")
|
||||
parser.add_argument("--validate-only", action='store_true')
|
||||
parser.add_argument("--dump-yaml", action='store_true')
|
||||
parser.add_argument("--visibility-group")
|
||||
|
||||
return parser
|
||||
|
||||
if __name__ == "__main__":
|
||||
# given that we proxy from DUT -> LAVA dispatcher -> LAVA primary -> us ->
|
||||
# GitLab runner -> GitLab primary -> user, safe to say we don't need any
|
||||
# more buffering
|
||||
sys.stdout.reconfigure(line_buffering=True)
|
||||
sys.stderr.reconfigure(line_buffering=True)
|
||||
|
||||
parser = create_parser()
|
||||
|
||||
parser.set_defaults(func=main)
|
||||
args = parser.parse_args()
|
||||
|
@@ -68,6 +68,7 @@ meson _build --native-file=native.file \
|
||||
-D cpp_args="$(echo -n $CPP_ARGS)" \
|
||||
-D libunwind=${UNWIND} \
|
||||
${DRI_LOADERS} \
|
||||
-D dri-drivers=${DRI_DRIVERS:-[]} \
|
||||
${GALLIUM_ST} \
|
||||
-D gallium-drivers=${GALLIUM_DRIVERS:-[]} \
|
||||
-D vulkan-drivers=${VULKAN_DRIVERS:-[]} \
|
||||
|
6
.gitlab-ci/piglit/piglit-all-skips.txt
Normal file
6
.gitlab-ci/piglit/piglit-all-skips.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
# WGL is Windows-only
|
||||
wgl@.*
|
||||
|
||||
# These are sensitive to CPU timing, and would need to be run in isolation
|
||||
# on the system rather than in parallel with other tests.
|
||||
glx@glx_arb_sync_control@timing.*
|
@@ -3,7 +3,7 @@
|
||||
set -ex
|
||||
|
||||
if [ -z "$GPU_VERSION" ]; then
|
||||
echo 'GPU_VERSION must be set to something like "llvmpipe" or "freedreno-a630" (the name used in your ci/gpu-version-*.txt)'
|
||||
echo 'GPU_VERSION must be set to something like "llvmpipe" or "freedreno-a630" (the name used in your ci/piglit-gpu-version-*.txt)'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -17,22 +17,6 @@ export VK_ICD_FILENAMES=`pwd`/install/share/vulkan/icd.d/"$VK_DRIVER"_icd.${VK_C
|
||||
RESULTS=`pwd`/${PIGLIT_RESULTS_DIR:-results}
|
||||
mkdir -p $RESULTS
|
||||
|
||||
if [ "$GALLIUM_DRIVER" = "virpipe" ]; then
|
||||
# deqp is to use virpipe, and virgl_test_server llvmpipe
|
||||
export GALLIUM_DRIVER="$GALLIUM_DRIVER"
|
||||
|
||||
VTEST_ARGS="--use-egl-surfaceless"
|
||||
if [ "$VIRGL_HOST_API" = "GLES" ]; then
|
||||
VTEST_ARGS="$VTEST_ARGS --use-gles"
|
||||
fi
|
||||
|
||||
GALLIUM_DRIVER=llvmpipe \
|
||||
GALLIVM_PERF="nopt" \
|
||||
virgl_test_server $VTEST_ARGS >$RESULTS/vtest-log.txt 2>&1 &
|
||||
|
||||
sleep 1
|
||||
fi
|
||||
|
||||
if [ -n "$PIGLIT_FRACTION" -o -n "$CI_NODE_INDEX" ]; then
|
||||
FRACTION=`expr ${PIGLIT_FRACTION:-1} \* ${CI_NODE_TOTAL:-1}`
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --fraction $FRACTION"
|
||||
@@ -44,45 +28,47 @@ if [ -n "$CI_NODE_INDEX" ]; then
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --fraction-start ${CI_NODE_INDEX}"
|
||||
fi
|
||||
|
||||
if [ -e "$INSTALL/$GPU_VERSION-fails.txt" ]; then
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --baseline $INSTALL/$GPU_VERSION-fails.txt"
|
||||
if [ -e "$INSTALL/piglit-$GPU_VERSION-fails.txt" ]; then
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --baseline $INSTALL/piglit-$GPU_VERSION-fails.txt"
|
||||
fi
|
||||
|
||||
# Default to an empty known flakes file if it doesn't exist.
|
||||
touch $INSTALL/$GPU_VERSION-flakes.txt
|
||||
touch $INSTALL/piglit-$GPU_VERSION-flakes.txt
|
||||
|
||||
if [ -n "$VK_DRIVER" ] && [ -e "$INSTALL/$VK_DRIVER-skips.txt" ]; then
|
||||
PIGLIT_SKIPS="$PIGLIT_SKIPS $INSTALL/$VK_DRIVER-skips.txt"
|
||||
fi
|
||||
|
||||
if [ -n "$GALLIUM_DRIVER" ] && [ -e "$INSTALL/$GALLIUM_DRIVER-skips.txt" ]; then
|
||||
PIGLIT_SKIPS="$PIGLIT_SKIPS $INSTALL/$GALLIUM_DRIVER-skips.txt"
|
||||
fi
|
||||
|
||||
if [ -n "$DRIVER_NAME" ] && [ -e "$INSTALL/$DRIVER_NAME-skips.txt" ]; then
|
||||
PIGLIT_SKIPS="$PIGLIT_SKIPS $INSTALL/$DRIVER_NAME-skips.txt"
|
||||
fi
|
||||
|
||||
if [ -e "$INSTALL/$GPU_VERSION-skips.txt" ]; then
|
||||
PIGLIT_SKIPS="$PIGLIT_SKIPS $INSTALL/$GPU_VERSION-skips.txt"
|
||||
if [ -e "$INSTALL/piglit-$GPU_VERSION-skips.txt" ]; then
|
||||
PIGLIT_SKIPS="$INSTALL/piglit-$GPU_VERSION-skips.txt"
|
||||
fi
|
||||
|
||||
set +e
|
||||
|
||||
piglit-runner \
|
||||
run \
|
||||
--piglit-folder /piglit \
|
||||
--output $RESULTS \
|
||||
--jobs ${FDO_CI_CONCURRENT:-4} \
|
||||
--skips $INSTALL/all-skips.txt $PIGLIT_SKIPS \
|
||||
--flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--profile $PIGLIT_PROFILES \
|
||||
--process-isolation \
|
||||
$PIGLIT_RUNNER_OPTIONS \
|
||||
-v -v
|
||||
if [ -n "$PIGLIT_PARALLEL" ]; then
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --jobs $PIGLIT_PARALLEL"
|
||||
elif [ -n "$FDO_CI_CONCURRENT" ]; then
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --jobs $FDO_CI_CONCURRENT"
|
||||
else
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --jobs 4"
|
||||
fi
|
||||
|
||||
RESULTS_CSV=$RESULTS/results.csv
|
||||
FAILURES_CSV=$RESULTS/failures.csv
|
||||
|
||||
export LD_PRELOAD=$TEST_LD_PRELOAD
|
||||
|
||||
piglit-runner \
|
||||
run \
|
||||
--piglit-folder /piglit \
|
||||
--output $RESULTS \
|
||||
--skips $INSTALL/piglit/piglit-all-skips.txt $PIGLIT_SKIPS \
|
||||
--flakes $INSTALL/piglit-$GPU_VERSION-flakes.txt \
|
||||
--profile $PIGLIT_PROFILES \
|
||||
--process-isolation \
|
||||
$PIGLIT_RUNNER_OPTIONS \
|
||||
-v -v
|
||||
|
||||
PIGLIT_EXITCODE=$?
|
||||
|
||||
export LD_PRELOAD=
|
||||
|
||||
deqp-runner junit \
|
||||
--testsuite $PIGLIT_PROFILES \
|
||||
--results $RESULTS/failures.csv \
|
||||
@@ -95,8 +81,8 @@ if [ -n "$FLAKES_CHANNEL" ]; then
|
||||
python3 $INSTALL/report-flakes.py \
|
||||
--host irc.oftc.net \
|
||||
--port 6667 \
|
||||
--results $RESULTS/results.csv \
|
||||
--known-flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--results $RESULTS_CSV \
|
||||
--known-flakes $INSTALL/piglit-$GPU_VERSION-flakes.txt \
|
||||
--channel "$FLAKES_CHANNEL" \
|
||||
--runner "$CI_RUNNER_DESCRIPTION" \
|
||||
--job "$CI_JOB_ID" \
|
||||
|
@@ -40,17 +40,19 @@ if [ "$VK_DRIVER" ]; then
|
||||
# Set the Vulkan driver to use.
|
||||
export VK_ICD_FILENAMES="$INSTALL/share/vulkan/icd.d/${VK_DRIVER}_icd.x86_64.json"
|
||||
|
||||
# Set environment for Wine.
|
||||
export WINEDEBUG="-all"
|
||||
export WINEPREFIX="/dxvk-wine64"
|
||||
export WINEESYNC=1
|
||||
if [ "x$PIGLIT_PROFILES" = "xreplay" ]; then
|
||||
# Set environment for Wine.
|
||||
export WINEDEBUG="-all"
|
||||
export WINEPREFIX="/dxvk-wine64"
|
||||
export WINEESYNC=1
|
||||
|
||||
# Set environment for DXVK.
|
||||
export DXVK_LOG_LEVEL="none"
|
||||
export DXVK_STATE_CACHE=0
|
||||
# Set environment for DXVK.
|
||||
export DXVK_LOG_LEVEL="none"
|
||||
export DXVK_STATE_CACHE=0
|
||||
|
||||
# Set environment for gfxreconstruct executables.
|
||||
export PATH="/gfxreconstruct/build/bin:$PATH"
|
||||
# Set environment for gfxreconstruct executables.
|
||||
export PATH="/gfxreconstruct/build/bin:$PATH"
|
||||
fi
|
||||
|
||||
SANITY_MESA_VERSION_CMD="vulkaninfo"
|
||||
|
||||
@@ -75,12 +77,14 @@ else
|
||||
|
||||
### GL/ES ###
|
||||
|
||||
# Set environment for apitrace executable.
|
||||
export PATH="/apitrace/build:$PATH"
|
||||
if [ "x$PIGLIT_PROFILES" = "xreplay" ]; then
|
||||
# Set environment for apitrace executable.
|
||||
export PATH="/apitrace/build:$PATH"
|
||||
|
||||
# Our rootfs may not have "less", which apitrace uses during
|
||||
# apitrace dump
|
||||
export PAGER=cat
|
||||
# Our rootfs may not have "less", which apitrace uses during
|
||||
# apitrace dump
|
||||
export PAGER=cat
|
||||
fi
|
||||
|
||||
SANITY_MESA_VERSION_CMD="wflinfo"
|
||||
|
||||
@@ -103,6 +107,7 @@ else
|
||||
|
||||
LD_LIBRARY_PATH="$__LD_LIBRARY_PATH" \
|
||||
GALLIUM_DRIVER=llvmpipe \
|
||||
GALLIVM_PERF="nopt,no_filter_hacks" \
|
||||
VTEST_USE_EGL_SURFACELESS=1 \
|
||||
VTEST_USE_GLES=1 \
|
||||
virgl_test_server >"$RESULTS"/vtest-log.txt 2>&1 &
|
||||
@@ -128,6 +133,13 @@ fi
|
||||
# If the job is parallel at the gitlab job level, will take the corresponding
|
||||
# fraction of the caselist.
|
||||
if [ -n "$CI_NODE_INDEX" ]; then
|
||||
|
||||
if [ "$PIGLIT_PROFILES" != "${PIGLIT_PROFILES% *}" ]; then
|
||||
FAILURE_MESSAGE=$(printf "%s" "Can't parallelize piglit with multiple profiles")
|
||||
quiet print_red printf "%s\n" "$FAILURE_MESSAGE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
USE_CASELIST=1
|
||||
fi
|
||||
|
||||
@@ -148,6 +160,18 @@ replay_minio_upload_images() {
|
||||
else
|
||||
__MINIO_PATH="$JOB_ARTIFACTS_BASE"
|
||||
__DESTINATION_FILE_PATH="$__MINIO_TRACES_PREFIX/${line##*-}"
|
||||
# Adding to the JUnit the direct link to the diff page in
|
||||
# the dashboard
|
||||
__PIGLIT_TESTCASE_CLASSNAME="piglit\.trace\.$PIGLIT_REPLAY_DEVICE_NAME\.$(dirname $__TRACE | sed 's%/%\\.%g;s@%@\\%@')"
|
||||
__PIGLIT_TESTCASE_NAME="$(basename $__TRACE | sed 's%\.%_%g;s@%@\\%@')"
|
||||
__DASHBOARD_URL="https://tracie.freedesktop.org/dashboard/imagediff/${CI_PROJECT_PATH}/${CI_JOB_ID}/${__TRACE}"
|
||||
__START_TEST_PATTERN='<testcase classname="'"${__PIGLIT_TESTCASE_CLASSNAME}"'" name="'"${__PIGLIT_TESTCASE_NAME}"'" status="fail"'
|
||||
__REPLACE_TEST_PATTERN='</system-out><failure type="fail"/></testcase>'
|
||||
# Replace in the range between __START_TEST_PATTERN and
|
||||
# __REPLACE_TEST_PATTERN leaving __START_TEST_PATTERN out
|
||||
# from the substitution
|
||||
sed '\%'"${__START_TEST_PATTERN}"'%,\%'"${__REPLACE_TEST_PATTERN}"'%{\%'"${__START_TEST_PATTERN}"'%b;s%'"${__REPLACE_TEST_PATTERN}"'%</system-out><failure type="fail">To view the image differences visit: '"${__DASHBOARD_URL}"'</failure></testcase>%}' \
|
||||
-i "$RESULTS"/junit.xml
|
||||
fi
|
||||
|
||||
ci-fairy minio cp $MINIO_ARGS "$RESULTS/$__PREFIX/$line" \
|
||||
@@ -164,7 +188,7 @@ cd /piglit
|
||||
|
||||
if [ -n "$USE_CASELIST" ]; then
|
||||
PIGLIT_TESTS=$(printf "%s" "$PIGLIT_TESTS")
|
||||
PIGLIT_GENTESTS="./piglit print-cmd $PIGLIT_TESTS replay --format \"{name}\" > /tmp/case-list.txt"
|
||||
PIGLIT_GENTESTS="./piglit print-cmd $PIGLIT_TESTS $PIGLIT_PROFILES --format \"{name}\" > /tmp/case-list.txt"
|
||||
RUN_GENTESTS="export LD_LIBRARY_PATH=$__LD_LIBRARY_PATH; $PIGLIT_GENTESTS"
|
||||
|
||||
eval $RUN_GENTESTS
|
||||
@@ -178,7 +202,7 @@ PIGLIT_OPTIONS=$(printf "%s" "$PIGLIT_OPTIONS")
|
||||
|
||||
PIGLIT_TESTS=$(printf "%s" "$PIGLIT_TESTS")
|
||||
|
||||
PIGLIT_CMD="./piglit run --timeout 300 -j${FDO_CI_CONCURRENT:-4} $PIGLIT_OPTIONS $PIGLIT_TESTS replay "$(/usr/bin/printf "%q" "$RESULTS")
|
||||
PIGLIT_CMD="./piglit run --timeout 300 -j${FDO_CI_CONCURRENT:-4} $PIGLIT_OPTIONS $PIGLIT_TESTS $PIGLIT_PROFILES "$(/usr/bin/printf "%q" "$RESULTS")
|
||||
|
||||
RUN_CMD="export LD_LIBRARY_PATH=$__LD_LIBRARY_PATH; $SANITY_MESA_VERSION_CMD && $HANG_DETECTION_CMD $PIGLIT_CMD"
|
||||
|
||||
@@ -186,7 +210,7 @@ if [ "$RUN_CMD_WRAPPER" ]; then
|
||||
RUN_CMD="set +e; $RUN_CMD_WRAPPER "$(/usr/bin/printf "%q" "$RUN_CMD")"; set -e"
|
||||
fi
|
||||
|
||||
ci-fairy minio login $MINIO_ARGS --token-file "${CI_JOB_JWT_FILE}"
|
||||
FAILURE_MESSAGE=$(printf "%s" "Unexpected change in results:")
|
||||
|
||||
eval $RUN_CMD
|
||||
|
||||
@@ -196,9 +220,12 @@ fi
|
||||
|
||||
ARTIFACTS_BASE_URL="https://${CI_PROJECT_ROOT_NAMESPACE}.${CI_PAGES_DOMAIN}/-/${CI_PROJECT_NAME}/-/jobs/${CI_JOB_ID}/artifacts"
|
||||
|
||||
./piglit summary aggregate "$RESULTS" -o junit.xml
|
||||
if [ ${PIGLIT_JUNIT_RESULTS:-0} -eq 1 ]; then
|
||||
./piglit summary aggregate "$RESULTS" -o junit.xml
|
||||
FAILURE_MESSAGE=$(printf "${FAILURE_MESSAGE}\n%s" "Check the JUnit report for failures at: ${ARTIFACTS_BASE_URL}/results/junit.xml")
|
||||
fi
|
||||
|
||||
PIGLIT_RESULTS="${PIGLIT_RESULTS:-replay}"
|
||||
PIGLIT_RESULTS="${PIGLIT_RESULTS:-$PIGLIT_PROFILES}"
|
||||
RESULTSFILE="$RESULTS/$PIGLIT_RESULTS.txt"
|
||||
mkdir -p .gitlab-ci/piglit
|
||||
./piglit summary console "$RESULTS"/results.json.bz2 \
|
||||
@@ -207,28 +234,51 @@ mkdir -p .gitlab-ci/piglit
|
||||
| sed '/^summary:/Q' \
|
||||
> $RESULTSFILE
|
||||
|
||||
__PREFIX="trace/$PIGLIT_REPLAY_DEVICE_NAME"
|
||||
__MINIO_PATH="$PIGLIT_REPLAY_ARTIFACTS_BASE_URL"
|
||||
__MINIO_TRACES_PREFIX="traces"
|
||||
if [ "x$PIGLIT_PROFILES" = "xreplay" ] \
|
||||
&& [ ${PIGLIT_REPLAY_UPLOAD_TO_MINIO:-0} -eq 1 ]; then
|
||||
|
||||
if [ "x$PIGLIT_REPLAY_SUBCOMMAND" != "xprofile" ]; then
|
||||
quiet replay_minio_upload_images
|
||||
ci-fairy minio login $MINIO_ARGS $CI_JOB_JWT
|
||||
|
||||
__PREFIX="trace/$PIGLIT_REPLAY_DEVICE_NAME"
|
||||
__MINIO_PATH="$PIGLIT_REPLAY_ARTIFACTS_BASE_URL"
|
||||
__MINIO_TRACES_PREFIX="traces"
|
||||
|
||||
if [ "x$PIGLIT_REPLAY_SUBCOMMAND" != "xprofile" ]; then
|
||||
quiet replay_minio_upload_images
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -n "$USE_CASELIST" ]; then
|
||||
# Just filter the expected results based on the tests that were actually
|
||||
# executed, and switch to the version with no summary
|
||||
cat ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.orig" | sed '/^summary:/Q' | rev \
|
||||
| cut -f2- -d: | rev | sed "s/$/:/g" > /tmp/executed.txt
|
||||
|
||||
if [ ! -s $RESULTSFILE ]; then
|
||||
grep -F -f /tmp/executed.txt "$INSTALL/$PIGLIT_RESULTS.txt" \
|
||||
> ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline" || true
|
||||
elif [ -f "$INSTALL/$PIGLIT_RESULTS.txt" ]; then
|
||||
cp "$INSTALL/$PIGLIT_RESULTS.txt" \
|
||||
".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline"
|
||||
else
|
||||
touch ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline"
|
||||
fi
|
||||
|
||||
if diff -q ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline" $RESULTSFILE; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
./piglit summary html --exclude-details=pass \
|
||||
"$RESULTS"/summary "$RESULTS"/results.json.bz2
|
||||
|
||||
if [ "x$PIGLIT_PROFILES" = "xreplay" ]; then
|
||||
find "$RESULTS"/summary -type f -name "*.html" -print0 \
|
||||
| xargs -0 sed -i 's%<img src="file://'"${RESULTS}"'.*-\([0-9a-f]*\)\.png%<img src="https://'"${JOB_ARTIFACTS_BASE}"'/traces/\1.png%g'
|
||||
find "$RESULTS"/summary -type f -name "*.html" -print0 \
|
||||
| xargs -0 sed -i 's%<img src="file://%<img src="https://'"${PIGLIT_REPLAY_REFERENCE_IMAGES_BASE}"'/%g'
|
||||
fi
|
||||
|
||||
quiet print_red echo "Failures in traces:"
|
||||
cat $RESULTSFILE
|
||||
quiet print_red echo "Review the image changes and get the new checksums at: ${ARTIFACTS_BASE_URL}/results/summary/problems.html"
|
||||
FAILURE_MESSAGE=$(printf "${FAILURE_MESSAGE}\n%s" "Check the HTML summary for problems at: ${ARTIFACTS_BASE_URL}/results/summary/problems.html")
|
||||
|
||||
quiet print_red printf "%s\n" "$FAILURE_MESSAGE"
|
||||
quiet diff --color=always -u ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline" $RESULTSFILE
|
||||
exit 1
|
@@ -18,7 +18,7 @@ if [ -n "$CROSS" ]; then
|
||||
else
|
||||
STRIP="strip"
|
||||
fi
|
||||
if [ -z "$ARTIFACTS_DEBUG_SYMBOLS" ]; then
|
||||
if [ -z "$ARTIFACTS_DEBUG_SYMBOLS"]; then
|
||||
find install -name \*.so -exec $STRIP {} \;
|
||||
fi
|
||||
|
||||
@@ -31,13 +31,13 @@ cp -Rp .gitlab-ci/piglit install/
|
||||
cp -Rp .gitlab-ci/fossils.yml install/
|
||||
cp -Rp .gitlab-ci/fossils install/
|
||||
cp -Rp .gitlab-ci/fossilize-runner.sh install/
|
||||
cp -Rp .gitlab-ci/deqp-runner.sh install/
|
||||
cp -Rp .gitlab-ci/crosvm-runner.sh install/
|
||||
cp -Rp .gitlab-ci/crosvm-init.sh install/
|
||||
cp -Rp .gitlab-ci/*.txt install/
|
||||
cp -Rp .gitlab-ci/deqp-*.txt install/
|
||||
cp -Rp .gitlab-ci/report-flakes.py install/
|
||||
cp -Rp .gitlab-ci/vkd3d-proton install/
|
||||
cp -Rp .gitlab-ci/*-runner.sh install/
|
||||
find . -path \*/ci/\*.txt \
|
||||
-o -path \*/ci/\*.toml \
|
||||
-o -path \*/ci/\*traces\*.yml \
|
||||
| xargs -I '{}' cp -p '{}' install/
|
||||
|
||||
@@ -52,6 +52,6 @@ if [ -n "$MINIO_ARTIFACT_NAME" ]; then
|
||||
# Pass needed files to the test stage
|
||||
MINIO_ARTIFACT_NAME="$MINIO_ARTIFACT_NAME.tar.gz"
|
||||
gzip -c artifacts/install.tar > ${MINIO_ARTIFACT_NAME}
|
||||
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
|
||||
ci-fairy minio login $CI_JOB_JWT
|
||||
ci-fairy minio cp ${MINIO_ARTIFACT_NAME} minio://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}
|
||||
fi
|
||||
|
@@ -1,85 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
copy_tests_files() (
|
||||
SKQP_BACKEND="${1}"
|
||||
SKQP_FILE_PREFIX="${INSTALL}/${GPU_VERSION}-skqp"
|
||||
|
||||
if echo "${SKQP_BACKEND}" | grep -qE 'gl(es)?'
|
||||
then
|
||||
SKQP_RENDER_TESTS_FILE="${SKQP_FILE_PREFIX}-${SKQP_BACKEND}_rendertests.txt"
|
||||
cp "${SKQP_RENDER_TESTS_FILE}" "${SKQP_ASSETS_DIR}"/skqp/rendertests.txt
|
||||
return 0
|
||||
fi
|
||||
|
||||
# The unittests.txt path is hardcoded inside assets directory,
|
||||
# that is why it needs to be a special case.
|
||||
if echo "${SKQP_BACKEND}" | grep -qE "unitTest"
|
||||
then
|
||||
cp "${SKQP_FILE_PREFIX}_unittests.txt" "${SKQP_ASSETS_DIR}"/skqp/unittests.txt
|
||||
fi
|
||||
)
|
||||
|
||||
set -ex
|
||||
|
||||
# Needed so configuration files can contain paths to files in /install
|
||||
ln -sf "$CI_PROJECT_DIR"/install /install
|
||||
|
||||
INSTALL=${PWD}/install
|
||||
|
||||
if [ -z "$GPU_VERSION" ]; then
|
||||
echo 'GPU_VERSION must be set to something like "llvmpipe" or "freedreno-a630" (the name used in .gitlab-ci/gpu-version-*.txt)'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SKQP_ASSETS_DIR=/skqp/assets
|
||||
SKQP_RESULTS_DIR="${SKQP_RESULTS_DIR:-results}"
|
||||
|
||||
mkdir "${SKQP_ASSETS_DIR}"/skqp
|
||||
|
||||
SKQP_EXITCODE=0
|
||||
for SKQP_BACKEND in ${SKQP_BACKENDS}
|
||||
do
|
||||
set -e
|
||||
copy_tests_files "${SKQP_BACKEND}"
|
||||
|
||||
set +e
|
||||
SKQP_BACKEND_RESULTS_DIR="${SKQP_RESULTS_DIR}"/"${SKQP_BACKEND}"
|
||||
mkdir -p "${SKQP_BACKEND_RESULTS_DIR}"
|
||||
/skqp/skqp "${SKQP_ASSETS_DIR}" '' "${SKQP_BACKEND_RESULTS_DIR}" "${SKQP_BACKEND}_"
|
||||
BACKEND_EXITCODE=$?
|
||||
|
||||
if [ ! $BACKEND_EXITCODE -eq 0 ]
|
||||
then
|
||||
echo "skqp failed on ${SKQP_BACKEND} tests with ${BACKEND_EXITCODE} exit code."
|
||||
fi
|
||||
|
||||
# Propagate error codes to leverage the final job result
|
||||
SKQP_EXITCODE=$(( SKQP_EXITCODE | BACKEND_EXITCODE ))
|
||||
done
|
||||
|
||||
set +x
|
||||
|
||||
# Unit tests produce empty HTML reports, guide the user to check the TXT file.
|
||||
if echo "${SKQP_BACKENDS}" | grep -qE "unitTest"
|
||||
then
|
||||
# Remove the empty HTML report to avoid confusion
|
||||
rm -f "${SKQP_RESULTS_DIR}"/unitTest/report.html
|
||||
|
||||
echo "See skqp unit test results at:"
|
||||
echo "https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/${SKQP_RESULTS_DIR}/unitTest/unit_tests.txt"
|
||||
fi
|
||||
|
||||
for REPORT in "${SKQP_RESULTS_DIR}"/**/report.html
|
||||
do
|
||||
BACKEND_NAME=$(echo "${REPORT}" | sed 's@.*/\([^/]*\)/report.html@\1@')
|
||||
echo "See skqp ${BACKEND_NAME} render tests report at:"
|
||||
echo "https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/${REPORT}"
|
||||
done
|
||||
|
||||
# If there is no report available, tell the user that something is wrong.
|
||||
if [ ! -f "${REPORT}" ]
|
||||
then
|
||||
echo "No skqp report available. Probably some fatal error has occured during the skqp execution."
|
||||
fi
|
||||
|
||||
exit $SKQP_EXITCODE
|
@@ -18,7 +18,6 @@
|
||||
- .gitlab-ci/**/*
|
||||
- include/**/*
|
||||
- meson.build
|
||||
- .gitattributes
|
||||
- src/*
|
||||
- src/compiler/**/*
|
||||
- src/drm-shim/**/*
|
||||
@@ -31,6 +30,10 @@
|
||||
- src/loader/**/*
|
||||
- src/mapi/**/*
|
||||
- src/mesa/*
|
||||
- src/mesa/drivers/*
|
||||
- src/mesa/drivers/common/**/*
|
||||
- src/mesa/drivers/dri/*
|
||||
- src/mesa/drivers/dri/common/**/*
|
||||
- src/mesa/main/**/*
|
||||
- src/mesa/math/**/*
|
||||
- src/mesa/program/**/*
|
||||
@@ -38,10 +41,11 @@
|
||||
- src/mesa/state_tracker/**/*
|
||||
- src/mesa/swrast/**/*
|
||||
- src/mesa/swrast_setup/**/*
|
||||
- src/mesa/tnl/**/*
|
||||
- src/mesa/tnl_dd/**/*
|
||||
- src/mesa/vbo/**/*
|
||||
- src/mesa/x86/**/*
|
||||
- src/mesa/x86-64/**/*
|
||||
- src/tool/**/*
|
||||
- src/util/**/*
|
||||
|
||||
.vulkan-rules:
|
||||
@@ -128,7 +132,6 @@
|
||||
- .gitlab-ci.yml
|
||||
- .gitlab-ci/**/*
|
||||
- meson.build
|
||||
- .gitattributes
|
||||
- include/**/*
|
||||
- src/compiler/**/*
|
||||
- src/include/**/*
|
||||
@@ -148,8 +151,6 @@
|
||||
.freedreno-rules:
|
||||
stage: freedreno
|
||||
rules:
|
||||
- if: '$FD_FARM == "offline"'
|
||||
when: never
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
@@ -160,7 +161,7 @@
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: on_success
|
||||
- changes: &freedreno_file_list
|
||||
- changes:
|
||||
# Note: when https://gitlab.com/gitlab-org/gitlab/-/issues/198688
|
||||
# is supported, we can change the src/freedreno/ rule to explicitly
|
||||
# exclude tools
|
||||
@@ -170,57 +171,6 @@
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
# Unfortunately YAML doesn't let us concatenate arrays, so we have to do the
|
||||
# rules duplication manually
|
||||
.freedreno-rules-restricted:
|
||||
stage: freedreno
|
||||
rules:
|
||||
- if: '$FD_FARM == "offline"'
|
||||
when: never
|
||||
# If the triggerer has access to the restricted traces and if it is pre-merge
|
||||
- if: '($GITLAB_USER_LOGIN !~ "/^(robclark|anholt|flto|cwabbott0|Danil|tomeu)$/") &&
|
||||
($GITLAB_USER_LOGIN != "marge-bot" || $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME != $CI_COMMIT_REF_NAME)'
|
||||
when: never
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*freedreno_file_list
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
# Unfortunately YAML doesn't let us concatenate arrays, so we have to do the
|
||||
# rules duplication manually
|
||||
.freedreno-rules-performance:
|
||||
stage: freedreno
|
||||
rules:
|
||||
- if: '$FD_FARM == "offline"'
|
||||
when: never
|
||||
- *ignore_scheduled_pipelines
|
||||
# Run only on pre-merge pipelines from Marge
|
||||
- if: '$GITLAB_USER_LOGIN != "marge-bot" || $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME != $CI_COMMIT_REF_NAME'
|
||||
when: never
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: manual
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: manual
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: manual
|
||||
- changes:
|
||||
*freedreno_file_list
|
||||
when: manual
|
||||
- when: never
|
||||
|
||||
.panfrost-midgard-rules:
|
||||
stage: arm
|
||||
rules:
|
||||
@@ -236,7 +186,6 @@
|
||||
- src/gallium/winsys/panfrost/**/*
|
||||
when: on_success
|
||||
- changes: &panfrost_common_file_list
|
||||
- src/panfrost/ci/*
|
||||
- src/panfrost/include/*
|
||||
- src/panfrost/lib/*
|
||||
- src/panfrost/shared/*
|
||||
@@ -263,12 +212,6 @@
|
||||
- changes:
|
||||
*panfrost_gallium_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: on_success
|
||||
- changes: &panfrost_vulkan_file_list
|
||||
- src/panfrost/vulkan/*
|
||||
when: on_success
|
||||
- changes:
|
||||
- src/panfrost/bifrost/**/*
|
||||
when: on_success
|
||||
@@ -395,25 +338,6 @@
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.radeonsi-vaapi-rules:
|
||||
stage: amd
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*radeonsi_file_list
|
||||
when: on_success
|
||||
- changes: &radeon_vcn_file_list
|
||||
- src/gallium/frontends/va/**/*
|
||||
- src/gallium/drivers/radeon/**/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.i915g-rules:
|
||||
stage: intel
|
||||
rules:
|
||||
@@ -431,23 +355,6 @@
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.crocus-rules:
|
||||
stage: intel
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
- src/gallium/drivers/crocus/**/*
|
||||
- src/gallium/winsys/crocus/**/*
|
||||
- src/intel/**/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.iris-rules:
|
||||
stage: intel
|
||||
rules:
|
||||
@@ -458,33 +365,13 @@
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes: &iris_file_list
|
||||
- changes:
|
||||
- src/gallium/drivers/iris/**/*
|
||||
- src/gallium/winsys/iris/**/*
|
||||
- src/intel/**/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
# Unfortunately YAML doesn't let us concatenate arrays, so we have to do the
|
||||
# rules duplication manually
|
||||
.iris-rules-performance:
|
||||
stage: intel
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
# Run only on pre-merge pipelines from Marge
|
||||
- if: '$GITLAB_USER_LOGIN != "marge-bot" || $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME != $CI_COMMIT_REF_NAME'
|
||||
when: never
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: manual
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: manual
|
||||
- changes:
|
||||
*iris_file_list
|
||||
when: manual
|
||||
- when: never
|
||||
|
||||
.anv-rules:
|
||||
stage: intel
|
||||
rules:
|
||||
@@ -529,9 +416,6 @@
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*softpipe_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*lavapipe_file_list
|
||||
when: on_success
|
||||
@@ -573,26 +457,6 @@
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.etnaviv-rules:
|
||||
stage: etnaviv
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
- src/etnaviv/**/*
|
||||
- src/gallium/drivers/etnaviv/**/*
|
||||
- src/gallium/winsys/etnaviv/**/*
|
||||
- src/gallium/auxiliary/renderonly/**/*
|
||||
- src/gallium/winsys/kmsro/**/*
|
||||
- src/gallium/winsys/etnaviv/**/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
# Rules for unusual architectures that only build a subset of drivers
|
||||
.ppc64el-rules:
|
||||
rules:
|
||||
|
@@ -9,5 +9,5 @@ ENV ErrorActionPreference='Stop'
|
||||
COPY mesa_deps_vs2019.ps1 C:\
|
||||
RUN C:\mesa_deps_vs2019.ps1
|
||||
|
||||
COPY mesa_deps_build.ps1 C:\
|
||||
RUN C:\mesa_deps_build.ps1
|
||||
COPY mesa_deps.ps1 C:\
|
||||
RUN C:\mesa_deps.ps1
|
@@ -1,7 +0,0 @@
|
||||
# escape=`
|
||||
|
||||
ARG base_image
|
||||
FROM ${base_image}
|
||||
|
||||
COPY mesa_deps_test.ps1 C:\
|
||||
RUN C:\mesa_deps_test.ps1
|
@@ -9,7 +9,7 @@ Write-Host "Compiling Mesa"
|
||||
$builddir = New-Item -ItemType Directory -Name "_build"
|
||||
$installdir = New-Item -ItemType Directory -Name "_install"
|
||||
Push-Location $builddir.FullName
|
||||
cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && meson --default-library=shared -Dzlib:default_library=static --buildtype=release -Db_ndebug=false -Dc_std=c17 -Dcpp_std=vc++latest -Db_vscrt=mt --cmake-prefix-path=`"C:\llvm-10`" --pkg-config-path=`"C:\llvm-10\lib\pkgconfig;C:\llvm-10\share\pkgconfig;C:\spirv-tools\lib\pkgconfig`" --prefix=`"$installdir`" -Dllvm=enabled -Dshared-llvm=disabled -Dvulkan-drivers=swrast,amd -Dgallium-drivers=swrast,d3d12,zink -Dshared-glapi=enabled -Dgles2=enabled -Dmicrosoft-clc=enabled -Dstatic-libclc=all -Dspirv-to-dxil=true -Dbuild-tests=true -Dwerror=true -Dwarning_level=2 -Dzlib:warning_level=1 -Dlibelf:warning_level=1 && ninja -j32 install && meson test --num-processes 32"
|
||||
cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && meson --default-library=shared -Dzlib:default_library=static --buildtype=release -Db_ndebug=false -Dc_std=c17 -Dcpp_std=vc++latest -Db_vscrt=mt --cmake-prefix-path=`"C:\llvm-10`" --pkg-config-path=`"C:\llvm-10\lib\pkgconfig;C:\llvm-10\share\pkgconfig;C:\spirv-tools\lib\pkgconfig`" --prefix=`"$installdir`" -Dllvm=enabled -Dshared-llvm=disabled -Dvulkan-drivers=swrast,amd -Dgallium-drivers=swrast,d3d12,zink -Dmicrosoft-clc=enabled -Dstatic-libclc=all -Dbuild-tests=true -Dwerror=true -Dwarning_level=2 -Dzlib:warning_level=1 -Dlibelf:warning_level=1 && ninja -j32 install && meson test --num-processes 32"
|
||||
$buildstatus = $?
|
||||
Pop-Location
|
||||
|
||||
@@ -21,8 +21,4 @@ if (!$buildstatus) {
|
||||
}
|
||||
|
||||
Copy-Item ".\.gitlab-ci\windows\piglit_run.ps1" -Destination $installdir
|
||||
|
||||
Copy-Item ".\.gitlab-ci\windows\spirv2dxil_check.ps1" -Destination $installdir
|
||||
Copy-Item ".\.gitlab-ci\windows\spirv2dxil_run.ps1" -Destination $installdir
|
||||
|
||||
Get-ChildItem -Recurse -Filter "ci" | Get-ChildItem -Filter "*.txt" | Copy-Item -Destination $installdir
|
||||
Copy-Item ".\.gitlab-ci\windows\quick_gl.txt" -Destination $installdir
|
||||
|
@@ -6,8 +6,6 @@ $registry_username = $args[1]
|
||||
$registry_password = $args[2]
|
||||
$registry_user_image = $args[3]
|
||||
$registry_central_image = $args[4]
|
||||
$build_dockerfile = $args[5]
|
||||
$registry_base_image = $args[6]
|
||||
|
||||
Set-Location -Path ".\.gitlab-ci\windows"
|
||||
|
||||
@@ -41,7 +39,7 @@ if ($?) {
|
||||
}
|
||||
|
||||
Write-Host "No image found at $registry_user_image or $registry_central_image; rebuilding"
|
||||
docker --config "windows-docker.conf" build --no-cache -t "$registry_user_image" -f "$build_dockerfile" --build-arg base_image="$registry_base_image" .
|
||||
docker --config "windows-docker.conf" build --no-cache -t "$registry_user_image" .
|
||||
if (!$?) {
|
||||
Write-Host "Container build failed"
|
||||
docker --config "windows-docker.conf" logout "$registry_uri"
|
||||
|
@@ -129,8 +129,6 @@ if (!$buildstatus) {
|
||||
Exit 1
|
||||
}
|
||||
|
||||
# See https://gitlab.freedesktop.org/mesa/mesa/-/issues/3855
|
||||
# Until that's resolved, we need the vulkan-runtime as a build dependency to be able to run any unit tests on GL
|
||||
Get-Date
|
||||
Write-Host "Downloading Vulkan-Runtime"
|
||||
Invoke-WebRequest -Uri 'https://sdk.lunarg.com/sdk/download/latest/windows/vulkan-runtime.exe' -OutFile 'C:\vulkan-runtime.exe' | Out-Null
|
||||
@@ -142,5 +140,66 @@ if (!$?) {
|
||||
}
|
||||
Remove-Item C:\vulkan-runtime.exe -Force
|
||||
|
||||
Get-Date
|
||||
Write-Host "Downloading Freeglut"
|
||||
|
||||
$freeglut_zip = 'freeglut-MSVC.zip'
|
||||
$freeglut_url = "https://www.transmissionzero.co.uk/files/software/development/GLUT/$freeglut_zip"
|
||||
|
||||
For ($i = 0; $i -lt 5; $i++) {
|
||||
Invoke-WebRequest -Uri $freeglut_url -OutFile $freeglut_zip
|
||||
$freeglut_downloaded = $?
|
||||
if ($freeglut_downloaded) {
|
||||
Break
|
||||
}
|
||||
}
|
||||
|
||||
if (!$freeglut_downloaded) {
|
||||
Write-Host "Failed to download Freeglut"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Host "Installing Freeglut"
|
||||
Expand-Archive $freeglut_zip -DestinationPath C:\
|
||||
if (!$?) {
|
||||
Write-Host "Failed to install Freeglut"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Host "Downloading glext.h"
|
||||
New-Item -ItemType Directory -Path ".\glext" -Name "GL"
|
||||
$ProgressPreference = "SilentlyContinue"
|
||||
Invoke-WebRequest -Uri 'https://www.khronos.org/registry/OpenGL/api/GL/glext.h' -OutFile '.\glext\GL\glext.h' | Out-Null
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning Piglit"
|
||||
git clone --no-progress --single-branch --no-checkout https://gitlab.freedesktop.org/mesa/piglit.git 'C:\src\piglit'
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone Piglit repository"
|
||||
Exit 1
|
||||
}
|
||||
Push-Location -Path C:\src\piglit
|
||||
git checkout b0bbeb876a506e0ee689dd7e17cee374c8284058
|
||||
Pop-Location
|
||||
|
||||
Get-Date
|
||||
$piglit_build = New-Item -ItemType Directory -Path "C:\src\piglit" -Name "build"
|
||||
Push-Location -Path $piglit_build.FullName
|
||||
Write-Host "Compiling Piglit"
|
||||
cmd.exe /C 'C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="C:\Piglit" -DGLUT_INCLUDE_DIR=C:\freeglut\include -DGLUT_glut_LIBRARY_RELEASE=C:\freeglut\lib\x64\freeglut.lib -DGLEXT_INCLUDE_DIR=.\glext && ninja -j32'
|
||||
$buildstatus = $?
|
||||
ninja -j32 install | Out-Null
|
||||
$installstatus = $?
|
||||
Pop-Location
|
||||
Remove-Item -Recurse -Path $piglit_build
|
||||
if (!$buildstatus -Or !$installstatus) {
|
||||
Write-Host "Failed to compile or install Piglit"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Copy-Item -Path C:\freeglut\bin\x64\freeglut.dll -Destination C:\Piglit\lib\piglit\bin\freeglut.dll
|
||||
|
||||
Get-Date
|
||||
Write-Host "Complete"
|
@@ -1,70 +0,0 @@
|
||||
Get-Date
|
||||
Write-Host "Downloading Freeglut"
|
||||
|
||||
$freeglut_zip = 'freeglut-MSVC.zip'
|
||||
$freeglut_url = "https://www.transmissionzero.co.uk/files/software/development/GLUT/$freeglut_zip"
|
||||
|
||||
For ($i = 0; $i -lt 5; $i++) {
|
||||
Invoke-WebRequest -Uri $freeglut_url -OutFile $freeglut_zip
|
||||
$freeglut_downloaded = $?
|
||||
if ($freeglut_downloaded) {
|
||||
Break
|
||||
}
|
||||
}
|
||||
|
||||
if (!$freeglut_downloaded) {
|
||||
Write-Host "Failed to download Freeglut"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Host "Installing Freeglut"
|
||||
Expand-Archive $freeglut_zip -DestinationPath C:\
|
||||
if (!$?) {
|
||||
Write-Host "Failed to install Freeglut"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Host "Downloading glext.h"
|
||||
New-Item -ItemType Directory -Path ".\glext" -Name "GL"
|
||||
$ProgressPreference = "SilentlyContinue"
|
||||
Invoke-WebRequest -Uri 'https://www.khronos.org/registry/OpenGL/api/GL/glext.h' -OutFile '.\glext\GL\glext.h' | Out-Null
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning Piglit"
|
||||
git clone --no-progress --single-branch --no-checkout https://gitlab.freedesktop.org/mesa/piglit.git 'C:\src\piglit'
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone Piglit repository"
|
||||
Exit 1
|
||||
}
|
||||
Push-Location -Path C:\src\piglit
|
||||
git checkout f7f2a6c2275cae023a27b6cc81be3dda8c99492d
|
||||
Pop-Location
|
||||
|
||||
Get-Date
|
||||
$piglit_build = New-Item -ItemType Directory -Path "C:\src\piglit" -Name "build"
|
||||
Push-Location -Path $piglit_build.FullName
|
||||
Write-Host "Compiling Piglit"
|
||||
cmd.exe /C 'C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="C:\Piglit" -DGLUT_INCLUDE_DIR=C:\freeglut\include -DGLUT_glut_LIBRARY_RELEASE=C:\freeglut\lib\x64\freeglut.lib -DGLEXT_INCLUDE_DIR=.\glext && ninja -j32'
|
||||
$buildstatus = $?
|
||||
ninja -j32 install | Out-Null
|
||||
$installstatus = $?
|
||||
Pop-Location
|
||||
Remove-Item -Recurse -Path $piglit_build
|
||||
if (!$buildstatus -Or !$installstatus) {
|
||||
Write-Host "Failed to compile or install Piglit"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Copy-Item -Path C:\freeglut\bin\x64\freeglut.dll -Destination C:\Piglit\lib\piglit\bin\freeglut.dll
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning spirv-samples"
|
||||
git clone --no-progress --single-branch --no-checkout https://github.com/dneto0/spirv-samples.git C:\spirv-samples\
|
||||
Push-Location -Path C:\spirv-samples\
|
||||
git checkout 7ac0ad5a7fe0ec884faba1dc2916028d0268eeef
|
||||
Pop-Location
|
||||
|
||||
Get-Date
|
||||
Write-Host "Complete"
|
@@ -1,15 +1,13 @@
|
||||
$env:PIGLIT_NO_FAST_SKIP = 1
|
||||
|
||||
Copy-Item -Path _install\bin\opengl32.dll -Destination C:\Piglit\lib\piglit\bin\opengl32.dll
|
||||
Copy-Item -Path _install\bin\libgallium_wgl.dll -Destination C:\Piglit\lib\piglit\bin\libgallium_wgl.dll
|
||||
Copy-Item -Path _install\bin\libglapi.dll -Destination C:\Piglit\lib\piglit\bin\libglapi.dll
|
||||
|
||||
# Run this using VsDevCmd.bat to ensure DXIL.dll is in %PATH%
|
||||
cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && py -3 C:\Piglit\bin\piglit.py run `"$env:PIGLIT_PROFILE`" $env:PIGLIT_OPTIONS $env:PIGLIT_TESTS .\results"
|
||||
|
||||
py -3 C:\Piglit\bin\piglit.py summary console .\results | Select -SkipLast 1 | Select-String -NotMatch -Pattern ': pass' | Set-Content -Path .\result.txt
|
||||
|
||||
$reference = Get-Content ".\_install\$env:PIGLIT_RESULTS.txt"
|
||||
$reference = Get-Content ".\_install\$env:PIGLIT_PROFILE.txt"
|
||||
$result = Get-Content .\result.txt
|
||||
if (-Not ($reference -And $result)) {
|
||||
Exit 1
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,54 +0,0 @@
|
||||
# Ensure that dxil.dll in on the %PATH%
|
||||
$dxil_dll = cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 -no_logo && where dxil.dll" 2>&1
|
||||
if ($dxil_dll -notmatch "dxil.dll$") {
|
||||
Write-Output "Couldn't get path to dxil.dll"
|
||||
exit 1
|
||||
}
|
||||
$env:Path = "$(Split-Path $dxil_dll);$env:Path"
|
||||
|
||||
$exec_mode_to_stage = @{ Fragment = "fragment"; Vertex = "vertex"; GLCompute = "compute" }
|
||||
|
||||
$spvasm_files = (Get-ChildItem C:\spirv-samples\spvasm\*.spvasm) | Sort-Object Name
|
||||
foreach ($spvasm in $spvasm_files) {
|
||||
$test_name = "Test:$($spvasm.Name):"
|
||||
$spvfile = ($spvasm -replace '\.spvasm$', '.spv')
|
||||
$content = Get-Content $spvasm
|
||||
$spv_version = "1.0"
|
||||
if ($content | Where-Object { $_ -match 'Version:\s(\d+\.\d+)' }) {
|
||||
$spv_version = $Matches[1]
|
||||
}
|
||||
|
||||
$as_output = C:\spirv-tools\bin\spirv-as.exe --target-env spv$spv_version --preserve-numeric-ids -o $spvfile $spvasm 2>&1 | % { if ($_ -is [System.Management.Automation.ErrorRecord]) { $_.Exception.Message } else { $_ } } | Out-String
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Output "$test_name Skip: Unable to assemble shader"
|
||||
Write-Output "$as_output`n"
|
||||
continue
|
||||
}
|
||||
|
||||
$entry_points = $content | Select-String -Pattern '^OpEntryPoint\s(\w+)[^"]+"(\w+)"' | Select-Object -ExpandProperty Matches -First 1
|
||||
if ($entry_points.Count -eq 0) {
|
||||
Write-Output "$test_name Skip"
|
||||
Write-Output "No OpEntryPoint not found`n"
|
||||
continue
|
||||
}
|
||||
|
||||
foreach ($match in $entry_points) {
|
||||
$exec_mode, $entry_point = $match.Groups[1].Value, $match.Groups[2].Value
|
||||
$subtest = "$test_name$entry_point|${exec_mode}:"
|
||||
$stage = $exec_mode_to_stage[$exec_mode]
|
||||
if ($stage -eq '') {
|
||||
Write-Output "$subtest Fail: Unknown shader type ($exec_mode)"
|
||||
continue
|
||||
}
|
||||
|
||||
$s2d_output = .\_install\bin\spirv2dxil.exe -v -e "$entry_point" -s "$stage" -o NUL $spvfile 2>&1 | ForEach-Object { if ($_ -is [System.Management.Automation.ErrorRecord]) { $_.Exception.Message } else { $_ } } | Out-String
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
Write-Output "$subtest Pass"
|
||||
}
|
||||
else {
|
||||
Write-Output "$subtest Fail"
|
||||
$sanitized_output = $s2d_output -replace ', file .+, line \d+' -replace ' In file .+:\d+'
|
||||
Write-Output "$sanitized_output`n"
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,16 +0,0 @@
|
||||
. .\_install\spirv2dxil_check.ps1 2>&1 | Set-Content -Path .\spirv2dxil_results.txt
|
||||
$reference = Get-Content .\_install\spirv2dxil_reference.txt
|
||||
$result = Get-Content .\spirv2dxil_results.txt
|
||||
if (-Not ($reference -And $result)) {
|
||||
Exit 1
|
||||
}
|
||||
|
||||
$diff = Compare-Object -ReferenceObject $reference -DifferenceObject $result
|
||||
if (-Not $diff) {
|
||||
Exit 0
|
||||
}
|
||||
|
||||
Write-Host "Unexpected change in results:"
|
||||
Write-Output $diff | Format-Table -Property SideIndicator, InputObject -Wrap
|
||||
|
||||
Exit 1
|
5
.mailmap
5
.mailmap
@@ -107,8 +107,6 @@ Bruce Cherniak <bruce.cherniak@intel.com>
|
||||
|
||||
Bruce Merry <bmerry@users.sourceforge.net> <bmerry@gmail.com>
|
||||
|
||||
Caio Oliveira <caio.oliveira@intel.com>
|
||||
|
||||
Carl-Philip Hänsch <cphaensch@googlemail.com>
|
||||
Carl-Philip Hänsch <cphaensch@googlemail.com> <s3734770@mail.zih.tu-dresden.de>
|
||||
Carl-Philip Hänsch <cphaensch@googlemail.com> <carli@carli-laptop.(none)>
|
||||
@@ -297,8 +295,7 @@ Jan Vesely <jano.vesely@gmail.com> Jan Vesely <jan.vesely@rutgers.edu>
|
||||
|
||||
Jan Zielinski <jan.zielinski@intel.com> jzielins <jan.zielinski@intel.com>
|
||||
|
||||
Jason Ekstrand <jason.ekstrand@collabora.com> <jason@jlekstrand.net>
|
||||
Jason Ekstrand <jason.ekstrand@collabora.com> <jason.ekstrand@intel.com>
|
||||
Jason Ekstrand <jason@jlekstrand.net> <jason.ekstrand@intel.com>
|
||||
|
||||
Jeremy Huddleston <jeremyhu@apple.com>
|
||||
Jeremy Huddleston <jeremyhu@apple.com> <jeremyhu@freedesktop.org>
|
||||
|
27483
.pick_status.json
27483
.pick_status.json
File diff suppressed because it is too large
Load Diff
131
CODEOWNERS
131
CODEOWNERS
@@ -1,131 +0,0 @@
|
||||
# This file contains the GitLab handle of the maintainers/reviewers for
|
||||
# a given file:
|
||||
# https://docs.gitlab.com/ce/user/project/code_owners.html
|
||||
#
|
||||
# Consider these as the list of people who want to be involved in MRs
|
||||
# touching these files/folders, and whom you can ask your questions and
|
||||
# tag in issues.
|
||||
#
|
||||
# As of GitLab 14.3, all features surrounding this file are premium-only,
|
||||
# which means this file is only read by humans for now.
|
||||
#
|
||||
# Paths starting with a `/` are relative to the git root, otherwise they
|
||||
# can match any substring of the file's path.
|
||||
# If multiple lines match, only the last one applies; there is no
|
||||
# accumulation.
|
||||
|
||||
|
||||
##################
|
||||
# INFRASTRUCTURE #
|
||||
##################
|
||||
|
||||
# Build system - Meson
|
||||
meson.build @dbaker @eric
|
||||
/meson_options.txt @dbaker @eric
|
||||
/docs/meson.rst @dbaker @eric
|
||||
|
||||
# Build system - Android
|
||||
/android/ @roman.stratiienko
|
||||
|
||||
# Compatibility headers
|
||||
/include/c99* @evelikov
|
||||
/include/c11* @eric
|
||||
|
||||
# Documentation
|
||||
/docs/ @eric @evelikov
|
||||
|
||||
|
||||
##########
|
||||
# COMMON #
|
||||
##########
|
||||
|
||||
# NIR
|
||||
/src/compiler/nir/ @jekstrand
|
||||
|
||||
# Vulkan
|
||||
/src/vulkan/ @eric @jekstrand
|
||||
/include/vulkan/ @eric @jekstrand
|
||||
|
||||
|
||||
#############
|
||||
# PLATFORMS #
|
||||
#############
|
||||
|
||||
# EGL
|
||||
/src/egl/ @eric @evelikov
|
||||
/include/EGL/ @eric @evelikov
|
||||
|
||||
# EGL - Android support
|
||||
/src/egl/drivers/dri2/platform_android.c @robh @tfiga
|
||||
|
||||
# EGL - Device support
|
||||
/src/egl/drivers/dri2/platform_device.c @evelikov
|
||||
|
||||
# EGL - Wayland support
|
||||
/src/egl/wayland/ @daniels @eric
|
||||
/src/egl/drivers/dri2/platform_wayland.c @daniels @eric
|
||||
|
||||
# Gallium targets
|
||||
/src/gallium/targets/ @evelikov
|
||||
|
||||
# GLX
|
||||
/src/glx/ @ajax
|
||||
/include/GL/glx* @ajax
|
||||
|
||||
# GLVND
|
||||
/src/egl/main/eglglvnd.c @kbrenneman
|
||||
/src/egl/main/egldispatchstubs.* @kbrenneman
|
||||
/src/egl/generate/ @kbrenneman
|
||||
/src/glx/*glvnd* @kbrenneman
|
||||
|
||||
# Haiku
|
||||
/include/HaikuGL/ @kallisti5
|
||||
/src/egl/drivers/haiku/ @kallisti5
|
||||
/src/gallium/frontends/hgl/ @kallisti5
|
||||
/src/gallium/targets/haiku-softpipe/ @kallisti5
|
||||
/src/gallium/winsys/sw/hgl/ @kallisti5
|
||||
/src/hgl/ @kallisti5
|
||||
|
||||
# Loader - DRI/classic
|
||||
/src/loader/ @evelikov
|
||||
|
||||
# Loader - Gallium
|
||||
/src/gallium/auxiliary/pipe-loader/ @evelikov
|
||||
/src/gallium/auxiliary/target-helpers/ @evelikov
|
||||
|
||||
# Vulkan WSI - Display
|
||||
/src/vulkan/wsi/wsi_common_display.* @keithp
|
||||
/src/*/vulkan/*_wsi_display.c @keithp
|
||||
|
||||
|
||||
###########
|
||||
# Drivers #
|
||||
###########
|
||||
|
||||
# Asahi
|
||||
/src/asahi/ @alyssa
|
||||
/src/gallium/drivers/asahi/ @alyssa
|
||||
|
||||
# Freedreno
|
||||
/src/gallium/drivers/freedreno/ @robclark
|
||||
|
||||
# Intel
|
||||
/include/drm-uapi/i915_drm.h @kwg @llandwerlin @jekstrand @idr
|
||||
/include/pci_ids/i*_pci_ids.h @kwg @llandwerlin @jekstrand @idr
|
||||
/src/intel/ @kwg @llandwerlin @jekstrand @idr
|
||||
/src/gallium/winsys/iris/ @kwg @llandwerlin @jekstrand @idr
|
||||
/src/gallium/drivers/iris/ @kwg @llandwerlin @jekstrand @idr
|
||||
/src/gallium/drivers/i915/ @anholt
|
||||
|
||||
# Microsoft
|
||||
/src/microsoft/ @jenatali
|
||||
/src/gallium/drivers/d3d12/ @jenatali
|
||||
|
||||
# Panfrost
|
||||
/src/panfrost/ @alyssa
|
||||
/src/panfrost/vulkan/ @bbrezillon
|
||||
/src/gallium/drivers/panfrost/ @alyssa
|
||||
|
||||
# VMware
|
||||
/src/gallium/drivers/svga/ @brianp @charmainel
|
||||
/src/gallium/winsys/svga/ @thomash @drawat
|
114
REVIEWERS
Normal file
114
REVIEWERS
Normal file
@@ -0,0 +1,114 @@
|
||||
Overview:
|
||||
|
||||
This file is similar in syntax (or more precisly a subset) of what is
|
||||
used by the MAINTAINERS file in the linux kernel.
|
||||
The purpose is not exactly the same the MAINTAINERS file in the linux
|
||||
kernel, as there are not official/formal maintainers of different
|
||||
subsystems in mesa, but is meant to give an idea of who to CC for
|
||||
various patches for review.
|
||||
|
||||
Descriptions of section entries:
|
||||
|
||||
R: Designated reviewer: FullName <address@domain>
|
||||
These reviewers should be CCed on patches.
|
||||
F: Files and directories with wildcard patterns.
|
||||
A trailing slash includes all files and subdirectory files.
|
||||
F: drivers/net/ all files in and below drivers/net
|
||||
F: drivers/net/* all files in drivers/net, but not below
|
||||
F: */net/* all files in "any top level directory"/net
|
||||
One pattern per line. Multiple F: lines acceptable.
|
||||
|
||||
Maintainers List (try to look for most precise areas first)
|
||||
|
||||
Note: this is an opt-in system, I have not tried to add anyone who hasn't
|
||||
either asked me or sent a patch to add themselves.
|
||||
|
||||
-----------------------------------
|
||||
|
||||
NIR
|
||||
R: Jason Ekstrand <jason@jlekstrand.net>
|
||||
F: src/compiler/nir/
|
||||
|
||||
DOCUMENTATION
|
||||
R: Emil Velikov <emil.l.velikov@gmail.com>
|
||||
R: Eric Engestrom <eric@engestrom.ch>
|
||||
F: docs/
|
||||
|
||||
COMPATIBILITY HEADERS
|
||||
R: Emil Velikov <emil.l.velikov@gmail.com>
|
||||
F: include/c99*
|
||||
|
||||
DRI LOADER
|
||||
R: Emil Velikov <emil.l.velikov@gmail.com>
|
||||
F: src/loader/
|
||||
|
||||
EGL
|
||||
R: Eric Engestrom <eric@engestrom.ch>
|
||||
R: Emil Velikov <emil.l.velikov@gmail.com>
|
||||
F: src/egl/
|
||||
F: include/EGL/
|
||||
|
||||
HAIKU
|
||||
R: Alexander von Gluck IV <kallisti5@unixzen.com>
|
||||
F: include/HaikuGL/
|
||||
F: src/egl/drivers/haiku/
|
||||
F: src/gallium/frontends/hgl/
|
||||
F: src/gallium/targets/haiku-softpipe/
|
||||
F: src/gallium/winsys/sw/hgl/
|
||||
F: src/hgl/
|
||||
|
||||
GALLIUM LOADER
|
||||
R: Emil Velikov <emil.l.velikov@gmail.com>
|
||||
F: src/gallium/auxiliary/pipe-loader/
|
||||
F: src/gallium/auxiliary/target-helpers/
|
||||
|
||||
GALLIUM TARGETS
|
||||
R: Emil Velikov <emil.l.velikov@gmail.com>
|
||||
F: src/gallium/targets/
|
||||
|
||||
ANDROID BUILD
|
||||
R: Emil Velikov <emil.l.velikov@gmail.com>
|
||||
R: Rob Herring <robh@kernel.org>
|
||||
F: CleanSpec.mk
|
||||
F: */Android.*mk
|
||||
F: */Makefile.sources
|
||||
|
||||
MESON BUILD
|
||||
R: Dylan Baker <dylan@pnwbakers.com>
|
||||
R: Eric Engestrom <eric@engestrom.ch>
|
||||
F: */meson.build
|
||||
F: meson.build
|
||||
F: meson_options.txt
|
||||
|
||||
ANDROID EGL SUPPORT
|
||||
R: Rob Herring <robh@kernel.org>
|
||||
R: Tomasz Figa <tfiga@chromium.org>
|
||||
F: src/egl/drivers/dri2/platform_android.c
|
||||
|
||||
WAYLAND EGL SUPPORT
|
||||
R: Daniel Stone <daniels@collabora.com>
|
||||
F: src/egl/wayland/*
|
||||
F: src/egl/drivers/dri2/platform_wayland.c
|
||||
|
||||
FREEDRENO
|
||||
R: Rob Clark <robclark@freedesktop.org>
|
||||
F: src/gallium/drivers/freedreno/
|
||||
|
||||
GLX
|
||||
R: Adam Jackson <ajax@redhat.com>
|
||||
F: src/glx/
|
||||
|
||||
VULKAN
|
||||
R: Eric Engestrom <eric@engestrom.ch>
|
||||
F: src/vulkan/
|
||||
F: include/vulkan/
|
||||
|
||||
VMWARE DRIVER
|
||||
R: Brian Paul <brianp@vmware.com>
|
||||
R: Charmaine Lee <charmainel@vmware.com>
|
||||
F: src/gallium/drivers/svga/
|
||||
|
||||
VMWARE WINSYS CODE
|
||||
R: Thomas Hellstrom <thellstrom@vmware.com>
|
||||
R: Deepak Rawat <drawat@vmware.com>
|
||||
F: src/gallium/winsys/svga/
|
@@ -26,13 +26,10 @@ ifneq ($(filter true, $(BOARD_MESA3D_USES_MESON_BUILD)),)
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
MESA3D_TOP := $(dir $(LOCAL_PATH))
|
||||
|
||||
LIBDRM_VERSION = $(shell cat external/libdrm/meson.build | grep -o "\<version\>\s*:\s*'\w*\.\w*\.\w*'" | grep -o "\w*\.\w*\.\w*" | head -1)
|
||||
|
||||
MESA_VK_LIB_SUFFIX_amd := radeon
|
||||
MESA_VK_LIB_SUFFIX_intel := intel
|
||||
MESA_VK_LIB_SUFFIX_freedreno := freedreno
|
||||
MESA_VK_LIB_SUFFIX_broadcom := broadcom
|
||||
MESA_VK_LIB_SUFFIX_panfrost := panfrost
|
||||
MESA_VK_LIB_SUFFIX_virtio-experimental := virtio
|
||||
MESA_VK_LIB_SUFFIX_swrast := lvp
|
||||
|
||||
@@ -41,14 +38,11 @@ include $(CLEAR_VARS)
|
||||
LOCAL_SHARED_LIBRARIES := libc libdl libdrm libm liblog libcutils libz libc++ libnativewindow libsync libhardware
|
||||
LOCAL_STATIC_LIBRARIES := libexpat libarect libelf
|
||||
LOCAL_HEADER_LIBRARIES := libnativebase_headers hwvulkan_headers libbacktrace_headers
|
||||
MESON_GEN_PKGCONFIGS := backtrace cutils expat hardware libdrm:$(LIBDRM_VERSION) nativewindow sync zlib:1.2.11 libelf
|
||||
LOCAL_CFLAGS += $(BOARD_MESA3D_CFLAGS)
|
||||
MESON_GEN_PKGCONFIGS := backtrace cutils expat hardware libdrm:2.4.105 nativewindow sync zlib:1.2.11 libelf
|
||||
|
||||
ifneq ($(filter swrast,$(BOARD_MESA3D_GALLIUM_DRIVERS) $(BOARD_MESA3D_VULKAN_DRIVERS)),)
|
||||
ifeq ($(BOARD_MESA3D_FORCE_SOFTPIPE),)
|
||||
ifneq ($(filter swr swrast,$(BOARD_MESA3D_GALLIUM_DRIVERS) $(BOARD_MESA3D_VULKAN_DRIVERS)),)
|
||||
MESON_GEN_LLVM_STUB := true
|
||||
endif
|
||||
endif
|
||||
|
||||
ifneq ($(filter zink,$(BOARD_MESA3D_GALLIUM_DRIVERS)),)
|
||||
LOCAL_SHARED_LIBRARIES += libvulkan
|
||||
@@ -57,35 +51,31 @@ endif
|
||||
|
||||
ifneq ($(filter iris,$(BOARD_MESA3D_GALLIUM_DRIVERS)),)
|
||||
LOCAL_SHARED_LIBRARIES += libdrm_intel
|
||||
MESON_GEN_PKGCONFIGS += libdrm_intel:$(LIBDRM_VERSION)
|
||||
MESON_GEN_PKGCONFIGS += libdrm_intel:2.4.105
|
||||
endif
|
||||
|
||||
ifneq ($(filter radeonsi amd,$(BOARD_MESA3D_GALLIUM_DRIVERS) $(BOARD_MESA3D_VULKAN_DRIVERS)),)
|
||||
MESON_GEN_LLVM_STUB := true
|
||||
LOCAL_CFLAGS += -DFORCE_BUILD_AMDGPU # instructs LLVM to declare LLVMInitializeAMDGPU* functions
|
||||
LOCAL_SHARED_LIBRARIES += libdrm_amdgpu
|
||||
MESON_GEN_PKGCONFIGS += libdrm_amdgpu:$(LIBDRM_VERSION)
|
||||
MESON_GEN_PKGCONFIGS += libdrm_amdgpu:2.4.105
|
||||
endif
|
||||
|
||||
ifneq ($(filter radeonsi r300 r600,$(BOARD_MESA3D_GALLIUM_DRIVERS)),)
|
||||
LOCAL_SHARED_LIBRARIES += libdrm_radeon
|
||||
MESON_GEN_PKGCONFIGS += libdrm_radeon:$(LIBDRM_VERSION)
|
||||
MESON_GEN_PKGCONFIGS += libdrm_radeon:2.4.105
|
||||
endif
|
||||
|
||||
ifneq ($(filter nouveau,$(BOARD_MESA3D_GALLIUM_DRIVERS)),)
|
||||
LOCAL_SHARED_LIBRARIES += libdrm_nouveau
|
||||
MESON_GEN_PKGCONFIGS += libdrm_nouveau:$(LIBDRM_VERSION)
|
||||
endif
|
||||
|
||||
ifneq ($(filter d3d12,$(BOARD_MESA3D_GALLIUM_DRIVERS)),)
|
||||
LOCAL_HEADER_LIBRARIES += DirectX-Headers
|
||||
LOCAL_STATIC_LIBRARIES += DirectX-Guids
|
||||
MESON_GEN_PKGCONFIGS += DirectX-Headers
|
||||
MESON_GEN_PKGCONFIGS += libdrm_nouveau:2.4.105
|
||||
endif
|
||||
|
||||
ifneq ($(MESON_GEN_LLVM_STUB),)
|
||||
MESON_LLVM_VERSION := 12.0.0
|
||||
LOCAL_SHARED_LIBRARIES += libLLVM12
|
||||
MESON_LLVM_VERSION := 11.0.0
|
||||
# Required for swr gallium target
|
||||
MESON_LLVM_IRBUILDER_PATH := external/llvm-project/llvm/include/llvm/IR/IRBuilder.h
|
||||
LOCAL_SHARED_LIBRARIES += libLLVM11
|
||||
endif
|
||||
|
||||
ifeq ($(shell test $(PLATFORM_SDK_VERSION) -ge 30; echo $$?), 0)
|
||||
|
@@ -93,7 +93,6 @@ MESON_GEN_NINJA := \
|
||||
-Dvulkan-drivers=$(subst $(space),$(comma),$(subst radeon,amd,$(BOARD_MESA3D_VULKAN_DRIVERS))) \
|
||||
-Dgbm=enabled \
|
||||
-Degl=enabled \
|
||||
-Dcpp_rtti=false \
|
||||
|
||||
MESON_BUILD := PATH=/usr/bin:/bin:/sbin:$$PATH ninja -C $(MESON_OUT_DIR)/build
|
||||
|
||||
@@ -129,6 +128,7 @@ $(MESON_GEN_FILES_TARGET): PRIVATE_C_INCLUDES := $(my_c_includes)
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_IMPORTED_INCLUDES := $(imported_includes)
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_LDFLAGS := $(my_ldflags)
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_LDLIBS := $(my_ldlibs)
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_TARGET_GLOBAL_LDFLAGS := $(my_target_global_ldflags)
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_TIDY_CHECKS := $(my_tidy_checks)
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_TIDY_FLAGS := $(my_tidy_flags)
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_ARFLAGS := $(my_arflags)
|
||||
@@ -139,11 +139,6 @@ $(MESON_GEN_FILES_TARGET): PRIVATE_ALL_OBJECTS := $(strip $(all_objects))
|
||||
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
|
||||
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_TARGET_GLOBAL_CFLAGS := $(my_target_global_cflags)
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_TARGET_GLOBAL_CONLYFLAGS := $(my_target_global_conlyflags)
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_TARGET_GLOBAL_CPPFLAGS := $(my_target_global_cppflags)
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_TARGET_GLOBAL_LDFLAGS := $(my_target_global_ldflags)
|
||||
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_TARGET_LIBCRT_BUILTINS := $(my_target_libcrt_builtins)
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_TARGET_LIBATOMIC := $(my_target_libatomic)
|
||||
$(MESON_GEN_FILES_TARGET): PRIVATE_TARGET_CRTBEGIN_SO_O := $(my_target_crtbegin_so_o)
|
||||
@@ -257,7 +252,8 @@ ifneq ($(MESON_GEN_LLVM_STUB),)
|
||||
mkdir -p $(dir $@)/subprojects/llvm/
|
||||
echo -e "project('llvm', 'cpp', version : '$(MESON_LLVM_VERSION)')\n" \
|
||||
"dep_llvm = declare_dependency()\n" \
|
||||
"has_rtti = false\n" > $(dir $@)/subprojects/llvm/meson.build
|
||||
"has_rtti = false\n" \
|
||||
"irbuilder_h = files('$(AOSP_ABSOLUTE_PATH)/$(MESON_LLVM_IRBUILDER_PATH)')" > $(dir $@)/subprojects/llvm/meson.build
|
||||
endif
|
||||
$(MESON_GEN_NINJA)
|
||||
$(MESON_BUILD)
|
||||
@@ -276,13 +272,12 @@ $(MESON_OUT_DIR)/install/.install.timestamp: $(MESON_OUT_DIR)/.build.timestamp
|
||||
touch $@
|
||||
|
||||
$($(M_TARGET_PREFIX)MESA3D_LIBGBM_BIN) $(MESA3D_GLES_BINS): $(MESON_OUT_DIR)/install/.install.timestamp
|
||||
echo "Build $@"
|
||||
touch $@
|
||||
echo "Build $@"\
|
||||
|
||||
define vulkan_target
|
||||
$(M_TARGET_PREFIX)MESA3D_VULKAN_$1_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/libvulkan_$(MESA_VK_LIB_SUFFIX_$1).so
|
||||
$(MESON_OUT_DIR)/install/usr/local/lib/libvulkan_$(MESA_VK_LIB_SUFFIX_$1).so: $(MESON_OUT_DIR)/install/.install.timestamp
|
||||
touch $(MESON_OUT_DIR)/install/usr/local/lib/libvulkan_$(MESA_VK_LIB_SUFFIX_$1).so
|
||||
echo $@
|
||||
|
||||
endef
|
||||
|
||||
@@ -297,4 +292,3 @@ $($(M_TARGET_PREFIX)TARGET_OUT_VENDOR_SHARED_LIBRARIES)/dri/.symlinks.timestamp:
|
||||
|
||||
$($(M_TARGET_PREFIX)MESA3D_GALLIUM_DRI_BIN): $(TARGET_OUT_VENDOR)/$(MESA3D_LIB_DIR)/dri/.symlinks.timestamp
|
||||
echo "Build $@"
|
||||
touch $@
|
||||
|
151
bin/auto-pick.py
151
bin/auto-pick.py
@@ -1,151 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: MIT
|
||||
# Copyright © 2022 Intel Corporation
|
||||
|
||||
"""Tool that automatically applies patches and tests them as possible."""
|
||||
|
||||
from __future__ import annotations
|
||||
import asyncio
|
||||
import sys
|
||||
import typing
|
||||
|
||||
from pick import core
|
||||
|
||||
import aiohttp
|
||||
|
||||
|
||||
async def revert() -> None:
|
||||
await reset('HEAD~')
|
||||
|
||||
|
||||
async def reset(to: str = 'HEAD') -> None:
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'git', 'reset', '--hard', to,
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
await p.wait()
|
||||
|
||||
|
||||
async def git_push(commit: typing.Optional[core.Commit], commits: typing.List[core.Commit],
|
||||
force: bool = False) -> None:
|
||||
cmd = ['git', 'push']
|
||||
if force:
|
||||
cmd.append('-f')
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
if await p.wait() != 0:
|
||||
print(' Critical Error: failed to push to gitlib')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
async def set_need_manual_resolution(commit: Commit, commits: T.List[Commit], force_push: bool = True) -> None:
|
||||
commit.resolution = core.Resolution.MANUAL_RESOLUTION
|
||||
core.save(commits)
|
||||
await core.commit_state(message=f'Mark {commit.sha} as needing manual resolution')
|
||||
await git_push(commit, commits, force_push)
|
||||
|
||||
|
||||
async def main(loop: asyncio.BaseEventLoop) -> None:
|
||||
commits = await core.update_commits()
|
||||
new_commits = [c for c in commits if
|
||||
c.nominated and c.resolution is core.Resolution.UNRESOLVED]
|
||||
failed: typing.Set[str] = set()
|
||||
|
||||
print(' Sanity testing', flush=True)
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'meson', 'setup', '--reconfigure', 'builddir',
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
if await p.wait() != 0:
|
||||
print('ERROR: sanity check failed!')
|
||||
sys.exit(2)
|
||||
|
||||
with open('VERSION', 'r') as f:
|
||||
version = f.read().split('-')[0].strip()
|
||||
version = '.'.join(version.split('.')[:2])
|
||||
url = 'https://gitlab.freedesktop.org/api/v4/projects/176/pipelines'
|
||||
params = {
|
||||
'ref': f'staging/{version}',
|
||||
'per_page': '1',
|
||||
}
|
||||
|
||||
lock = asyncio.Lock()
|
||||
|
||||
for commit in reversed(new_commits):
|
||||
async with lock:
|
||||
print(f'Commit: {commit.sha}: {commit.description}')
|
||||
if commit.because_sha in failed:
|
||||
# This isn't actually failed, but in a case like:
|
||||
# C requires B, B requires A, A fails to apply
|
||||
# We want C to be excluded as well
|
||||
failed.add(commit.sha)
|
||||
print(' Not applying because the commit it fixes was not applied successfully')
|
||||
continue
|
||||
result, _ = await commit.apply()
|
||||
if not result:
|
||||
failed.add(commit.sha)
|
||||
print(f' FAILED to apply: {commit.sha}: {commit.description}')
|
||||
await reset()
|
||||
await set_need_manual_resolution(commit, commits, force_push=False)
|
||||
continue
|
||||
|
||||
print(' Compiling project', flush=True)
|
||||
# TODO: make builddir configureable?
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'ninja', '-C', 'builddir', 'test',
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
if await p.wait() != 0:
|
||||
failed.add(commit.sha)
|
||||
print(f' FAILED to compile: {commit.sha}: {commit.description}, reverting')
|
||||
await revert()
|
||||
await set_need_manual_resolution(commit, commits, force_push=False)
|
||||
continue
|
||||
|
||||
print(' Pushing update to git', flush=True)
|
||||
# update the ocmmit log with merged so that we don't force push and
|
||||
# hide the gitlab pipeline resuilts.
|
||||
commit.resolution = core.Resolution.MERGED
|
||||
core.save(commits)
|
||||
await core.commit_state(amend=True)
|
||||
await git_push(commit, commits)
|
||||
|
||||
print(' Waiting for for CI to finish: ', end='', flush=True)
|
||||
async with aiohttp.ClientSession(loop=loop) as session:
|
||||
async with session.get(url, params=params) as response:
|
||||
content = await response.json()
|
||||
id_ = content[0]['id']
|
||||
while True:
|
||||
async with session.get(f'{url}/{id_}') as response:
|
||||
content = await response.json()
|
||||
status: str = content['status']
|
||||
if status in {'created', 'waiting_for_resources', 'preparing', 'pending',
|
||||
'running', 'scheduled'}:
|
||||
print('.', end='', flush=True)
|
||||
await asyncio.sleep(60)
|
||||
continue
|
||||
elif status == 'success':
|
||||
print(f'\n Successfully applied: {commit.sha}')
|
||||
break
|
||||
else:
|
||||
if status == 'failed':
|
||||
print(f'\n CI Failed: {commit.sha}')
|
||||
else:
|
||||
print(f'\n Unexpected CI status "{status}": {commit.sha}')
|
||||
failed.add(commit.sha)
|
||||
await revert()
|
||||
await set_need_manual_resolution(commit, commits)
|
||||
break
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(main(loop))
|
@@ -105,7 +105,7 @@ def release_candidate(args: RCArguments) -> None:
|
||||
|
||||
data = read_calendar()
|
||||
|
||||
with CALENDAR_CSV.open('w', newline='') as f:
|
||||
with CALENDAR_CSV.open('w') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerows(data)
|
||||
|
||||
@@ -147,7 +147,7 @@ def final_release(args: FinalArguments) -> None:
|
||||
data = read_calendar()
|
||||
date = _calculate_next_release_date(not args.zero_released)
|
||||
|
||||
with CALENDAR_CSV.open('w', newline='') as f:
|
||||
with CALENDAR_CSV.open('w') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerows(data)
|
||||
|
||||
@@ -199,7 +199,7 @@ def extend(args: ExtendArguments) -> None:
|
||||
|
||||
current = read_calendar()
|
||||
|
||||
with CALENDAR_CSV.open('w', newline='') as f:
|
||||
with CALENDAR_CSV.open('w') as f:
|
||||
writer = csv.writer(f)
|
||||
with write_existing(writer, current) as row:
|
||||
# Get rid of -rcX as well
|
||||
|
@@ -182,7 +182,9 @@ async def gather_commits(version: str) -> str:
|
||||
return out.decode().strip()
|
||||
|
||||
|
||||
async def parse_issues(commits: str) -> typing.List[str]:
|
||||
async def gather_bugs(version: str) -> typing.List[str]:
|
||||
commits = await gather_commits(version)
|
||||
|
||||
issues: typing.List[str] = []
|
||||
for commit in commits.split('\n'):
|
||||
sha, message = commit.split(maxsplit=1)
|
||||
@@ -191,24 +193,17 @@ async def parse_issues(commits: str) -> typing.List[str]:
|
||||
stdout=asyncio.subprocess.PIPE)
|
||||
_out, _ = await p.communicate()
|
||||
out = _out.decode().split('\n')
|
||||
|
||||
for line in reversed(out):
|
||||
if line.startswith('Closes:'):
|
||||
bug = line.lstrip('Closes:').strip()
|
||||
if bug.startswith('https://gitlab.freedesktop.org/mesa/mesa'):
|
||||
# This means we have a bug in the form "Closes: https://..."
|
||||
issues.append(os.path.basename(urllib.parse.urlparse(bug).path))
|
||||
elif ',' in bug:
|
||||
issues.extend([b.strip().lstrip('#') for b in bug.split(',')])
|
||||
elif bug.startswith('#'):
|
||||
issues.append(bug.lstrip('#'))
|
||||
|
||||
return issues
|
||||
|
||||
|
||||
async def gather_bugs(version: str) -> typing.List[str]:
|
||||
commits = await gather_commits(version)
|
||||
issues = await parse_issues(commits)
|
||||
break
|
||||
else:
|
||||
raise Exception('No closes found?')
|
||||
if bug.startswith('h'):
|
||||
# This means we have a bug in the form "Closes: https://..."
|
||||
issues.append(os.path.basename(urllib.parse.urlparse(bug).path))
|
||||
else:
|
||||
issues.append(bug.lstrip('#'))
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
async with aiohttp.ClientSession(loop=loop) as session:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright © 2019,2021 Intel Corporation
|
||||
# Copyright © 2019 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
@@ -18,19 +18,8 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import sys
|
||||
import textwrap
|
||||
import typing
|
||||
|
||||
import pytest
|
||||
|
||||
# AsyncMock is new in 3.8, so if we're using an older version we need the
|
||||
# backported version of mock
|
||||
if sys.version_info >= (3, 8):
|
||||
from unittest import mock
|
||||
else:
|
||||
import mock
|
||||
|
||||
from .gen_release_notes import *
|
||||
|
||||
|
||||
@@ -69,93 +58,3 @@ async def test_gather_commits():
|
||||
version = '19.2.0'
|
||||
out = await gather_commits(version)
|
||||
assert out
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
'content, bugs',
|
||||
[
|
||||
# It is important to have the title on a new line, as
|
||||
# textwrap.dedent wont work otherwise.
|
||||
|
||||
# Test the `Closes: #N` syntax
|
||||
(
|
||||
'''\
|
||||
A commit
|
||||
|
||||
It has a message in it
|
||||
|
||||
Closes: #1
|
||||
''',
|
||||
['1'],
|
||||
),
|
||||
|
||||
# Test the Full url
|
||||
(
|
||||
'''\
|
||||
A commit with no body
|
||||
|
||||
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/3456
|
||||
''',
|
||||
['3456'],
|
||||
),
|
||||
|
||||
# Test projects that are not mesa
|
||||
(
|
||||
'''\
|
||||
A commit for libdrm
|
||||
|
||||
Closes: https://gitlab.freedesktop.org/mesa/drm/-/3456
|
||||
''',
|
||||
[],
|
||||
),
|
||||
(
|
||||
'''\
|
||||
A commit for for something else completely
|
||||
|
||||
Closes: https://github.com/Organiztion/project/1234
|
||||
''',
|
||||
[],
|
||||
),
|
||||
|
||||
# Test multiple issues on one line
|
||||
(
|
||||
'''\
|
||||
Fix many bugs
|
||||
|
||||
Closes: #1, #2
|
||||
''',
|
||||
['1', '2'],
|
||||
),
|
||||
|
||||
# Test multiple closes
|
||||
(
|
||||
'''\
|
||||
Fix many bugs
|
||||
|
||||
Closes: #1
|
||||
Closes: #2
|
||||
''',
|
||||
['1', '2'],
|
||||
),
|
||||
(
|
||||
'''\
|
||||
With long form
|
||||
|
||||
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/3456
|
||||
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/3457
|
||||
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/3458
|
||||
''',
|
||||
['3456', '3457', '3458'],
|
||||
),
|
||||
])
|
||||
async def test_parse_issues(content: str, bugs: typing.List[str]) -> None:
|
||||
mock_com = mock.AsyncMock(return_value=(textwrap.dedent(content).encode(), ''))
|
||||
mock_p = mock.Mock()
|
||||
mock_p.communicate = mock_com
|
||||
mock_exec = mock.AsyncMock(return_value=mock_p)
|
||||
|
||||
with mock.patch('bin.gen_release_notes.asyncio.create_subprocess_exec', mock_exec), \
|
||||
mock.patch('bin.gen_release_notes.gather_commits', mock.AsyncMock(return_value='sha\n')):
|
||||
ids = await parse_issues('1234 not used')
|
||||
assert set(ids) == set(bugs)
|
||||
|
@@ -22,6 +22,7 @@
|
||||
|
||||
"""Script to install megadriver symlinks for meson."""
|
||||
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import os
|
||||
|
||||
|
@@ -47,7 +47,7 @@ SOURCES = [
|
||||
'api': 'khr',
|
||||
'inc_folder': 'KHR',
|
||||
'sources': [
|
||||
Source('include/KHR/khrplatform.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/main/api/KHR/khrplatform.h'),
|
||||
Source('include/KHR/khrplatform.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/master/api/KHR/khrplatform.h'),
|
||||
],
|
||||
},
|
||||
|
||||
@@ -55,10 +55,10 @@ SOURCES = [
|
||||
'api': 'egl',
|
||||
'inc_folder': 'EGL',
|
||||
'sources': [
|
||||
Source('src/egl/generate/egl.xml', 'https://github.com/KhronosGroup/EGL-Registry/raw/main/api/egl.xml'),
|
||||
Source('include/EGL/egl.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/main/api/EGL/egl.h'),
|
||||
Source('include/EGL/eglplatform.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/main/api/EGL/eglplatform.h'),
|
||||
Source('include/EGL/eglext.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/main/api/EGL/eglext.h'),
|
||||
Source('src/egl/generate/egl.xml', 'https://github.com/KhronosGroup/EGL-Registry/raw/master/api/egl.xml'),
|
||||
Source('include/EGL/egl.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/master/api/EGL/egl.h'),
|
||||
Source('include/EGL/eglplatform.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/master/api/EGL/eglplatform.h'),
|
||||
Source('include/EGL/eglext.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/master/api/EGL/eglext.h'),
|
||||
Source('include/EGL/eglextchromium.h', 'https://chromium.googlesource.com/chromium/src/+/refs/heads/master/ui/gl/EGL/eglextchromium.h?format=TEXT'),
|
||||
Source('include/EGL/eglext_angle.h', 'https://chromium.googlesource.com/angle/angle/+/refs/heads/master/include/EGL/eglext_angle.h?format=TEXT'),
|
||||
Source('include/EGL/eglmesaext.h', None),
|
||||
@@ -69,11 +69,11 @@ SOURCES = [
|
||||
'api': 'gl',
|
||||
'inc_folder': 'GL',
|
||||
'sources': [
|
||||
Source('src/mapi/glapi/registry/gl.xml', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/xml/gl.xml'),
|
||||
Source('include/GL/glcorearb.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GL/glcorearb.h'),
|
||||
Source('include/GL/glext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GL/glext.h'),
|
||||
Source('include/GL/glxext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GL/glxext.h'),
|
||||
Source('include/GL/wglext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GL/wglext.h'),
|
||||
Source('src/mapi/glapi/registry/gl.xml', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/xml/gl.xml'),
|
||||
Source('include/GL/glcorearb.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GL/glcorearb.h'),
|
||||
Source('include/GL/glext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GL/glext.h'),
|
||||
Source('include/GL/glxext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GL/glxext.h'),
|
||||
Source('include/GL/wglext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GL/wglext.h'),
|
||||
Source('include/GL/gl.h', None), # FIXME: I don't know what the canonical source is
|
||||
Source('include/GL/glx.h', None), # FIXME: I don't know what the canonical source is
|
||||
Source('include/GL/internal/', None),
|
||||
@@ -86,10 +86,10 @@ SOURCES = [
|
||||
'api': 'gles1',
|
||||
'inc_folder': 'GLES',
|
||||
'sources': [
|
||||
Source('include/GLES/gl.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES/gl.h'),
|
||||
Source('include/GLES/glplatform.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES/glplatform.h'),
|
||||
Source('include/GLES/glext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES/glext.h'),
|
||||
Source('include/GLES/egl.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES/egl.h'),
|
||||
Source('include/GLES/gl.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GLES/gl.h'),
|
||||
Source('include/GLES/glplatform.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GLES/glplatform.h'),
|
||||
Source('include/GLES/glext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GLES/glext.h'),
|
||||
Source('include/GLES/egl.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GLES/egl.h'),
|
||||
],
|
||||
},
|
||||
|
||||
@@ -97,9 +97,9 @@ SOURCES = [
|
||||
'api': 'gles2',
|
||||
'inc_folder': 'GLES2',
|
||||
'sources': [
|
||||
Source('include/GLES2/gl2.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES2/gl2.h'),
|
||||
Source('include/GLES2/gl2platform.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES2/gl2platform.h'),
|
||||
Source('include/GLES2/gl2ext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES2/gl2ext.h'),
|
||||
Source('include/GLES2/gl2.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GLES2/gl2.h'),
|
||||
Source('include/GLES2/gl2platform.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GLES2/gl2platform.h'),
|
||||
Source('include/GLES2/gl2ext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GLES2/gl2ext.h'),
|
||||
],
|
||||
},
|
||||
|
||||
@@ -107,10 +107,10 @@ SOURCES = [
|
||||
'api': 'gles3',
|
||||
'inc_folder': 'GLES3',
|
||||
'sources': [
|
||||
Source('include/GLES3/gl3.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES3/gl3.h'),
|
||||
Source('include/GLES3/gl31.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES3/gl31.h'),
|
||||
Source('include/GLES3/gl32.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES3/gl32.h'),
|
||||
Source('include/GLES3/gl3platform.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES3/gl3platform.h'),
|
||||
Source('include/GLES3/gl3.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GLES3/gl3.h'),
|
||||
Source('include/GLES3/gl31.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GLES3/gl31.h'),
|
||||
Source('include/GLES3/gl32.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GLES3/gl32.h'),
|
||||
Source('include/GLES3/gl3platform.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/master/api/GLES3/gl3platform.h'),
|
||||
Source('include/GLES3/gl3ext.h', None), # FIXME: I don't know what the canonical source is
|
||||
],
|
||||
},
|
||||
@@ -155,35 +155,26 @@ SOURCES = [
|
||||
'api': 'vulkan',
|
||||
'inc_folder': 'vulkan',
|
||||
'sources': [
|
||||
Source('src/vulkan/registry/vk.xml', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/registry/vk.xml'),
|
||||
Source('include/vulkan/vulkan.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan.h'),
|
||||
Source('include/vulkan/vulkan_core.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_core.h'),
|
||||
Source('include/vulkan/vulkan_beta.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_beta.h'),
|
||||
Source('include/vulkan/vk_icd.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vk_icd.h'),
|
||||
Source('include/vulkan/vk_layer.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vk_layer.h'),
|
||||
Source('include/vulkan/vk_platform.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vk_platform.h'),
|
||||
Source('include/vulkan/vulkan_android.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_android.h'),
|
||||
Source('include/vulkan/vulkan_directfb.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_directfb.h'),
|
||||
Source('include/vulkan/vulkan_fuchsia.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_fuchsia.h'),
|
||||
Source('include/vulkan/vulkan_ggp.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_ggp.h'),
|
||||
Source('include/vulkan/vulkan_ios.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_ios.h'),
|
||||
Source('include/vulkan/vulkan_macos.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_macos.h'),
|
||||
Source('include/vulkan/vulkan_metal.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_metal.h'),
|
||||
Source('include/vulkan/vulkan_screen.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_screen.h'),
|
||||
Source('include/vulkan/vulkan_vi.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_vi.h'),
|
||||
Source('include/vulkan/vulkan_wayland.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_wayland.h'),
|
||||
Source('include/vulkan/vulkan_win32.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_win32.h'),
|
||||
Source('include/vulkan/vulkan_xcb.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_xcb.h'),
|
||||
Source('include/vulkan/vulkan_xlib.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_xlib.h'),
|
||||
Source('include/vulkan/vulkan_xlib_xrandr.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_xlib_xrandr.h'),
|
||||
Source('src/vulkan/registry/vk.xml', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/registry/vk.xml'),
|
||||
Source('include/vulkan/vulkan.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan.h'),
|
||||
Source('include/vulkan/vulkan_core.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_core.h'),
|
||||
Source('include/vulkan/vulkan_beta.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_beta.h'),
|
||||
Source('include/vulkan/vk_icd.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vk_icd.h'),
|
||||
Source('include/vulkan/vk_layer.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vk_layer.h'),
|
||||
Source('include/vulkan/vk_platform.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vk_platform.h'),
|
||||
Source('include/vulkan/vulkan_android.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_android.h'),
|
||||
Source('include/vulkan/vulkan_fuchsia.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_fuchsia.h'),
|
||||
Source('include/vulkan/vulkan_ggp.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_ggp.h'),
|
||||
Source('include/vulkan/vulkan_ios.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_ios.h'),
|
||||
Source('include/vulkan/vulkan_macos.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_macos.h'),
|
||||
Source('include/vulkan/vulkan_metal.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_metal.h'),
|
||||
Source('include/vulkan/vulkan_vi.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_vi.h'),
|
||||
Source('include/vulkan/vulkan_wayland.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_wayland.h'),
|
||||
Source('include/vulkan/vulkan_win32.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_win32.h'),
|
||||
Source('include/vulkan/vulkan_xcb.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_xcb.h'),
|
||||
Source('include/vulkan/vulkan_xlib.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_xlib.h'),
|
||||
Source('include/vulkan/vulkan_xlib_xrandr.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/master/include/vulkan/vulkan_xlib_xrandr.h'),
|
||||
Source('include/vulkan/vk_android_native_buffer.h', 'https://android.googlesource.com/platform/frameworks/native/+/master/vulkan/include/vulkan/vk_android_native_buffer.h?format=TEXT'),
|
||||
Source('include/vk_video/vulkan_video_codec_h264std.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codec_h264std.h'),
|
||||
Source('include/vk_video/vulkan_video_codec_h264std_decode.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codec_h264std_decode.h'),
|
||||
Source('include/vk_video/vulkan_video_codec_h264std_encode.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codec_h264std_encode.h'),
|
||||
Source('include/vk_video/vulkan_video_codec_h265std.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codec_h265std.h'),
|
||||
Source('include/vk_video/vulkan_video_codec_h265std_decode.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codec_h265std_decode.h'),
|
||||
Source('include/vk_video/vulkan_video_codec_h265std_encode.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codec_h265std_encode.h'),
|
||||
Source('include/vk_video/vulkan_video_codecs_common.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codecs_common.h'),
|
||||
Source('include/vulkan/.editorconfig', None),
|
||||
],
|
||||
},
|
||||
|
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
# encoding=utf-8
|
||||
# Copyright © 2017 Intel Corporation
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
|
||||
|
||||
|
@@ -20,10 +20,8 @@
|
||||
|
||||
"""Core data structures and routines for pick."""
|
||||
|
||||
from __future__ import annotations
|
||||
import asyncio
|
||||
import enum
|
||||
import itertools
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
@@ -44,6 +42,7 @@ if typing.TYPE_CHECKING:
|
||||
nominated: bool
|
||||
nomination_type: typing.Optional[int]
|
||||
resolution: typing.Optional[int]
|
||||
main_sha: typing.Optional[str]
|
||||
because_sha: typing.Optional[str]
|
||||
|
||||
IS_FIX = re.compile(r'^\s*fixes:\s*([a-f0-9]{6,40})', flags=re.MULTILINE | re.IGNORECASE)
|
||||
@@ -82,7 +81,6 @@ class Resolution(enum.Enum):
|
||||
DENOMINATED = 2
|
||||
BACKPORTED = 3
|
||||
NOTNEEDED = 4
|
||||
MANUAL_RESOLUTION = 5
|
||||
|
||||
|
||||
async def commit_state(*, amend: bool = False, message: str = 'Update') -> bool:
|
||||
@@ -120,6 +118,7 @@ class Commit:
|
||||
nominated: bool = attr.ib(False)
|
||||
nomination_type: typing.Optional[NominationType] = attr.ib(None)
|
||||
resolution: Resolution = attr.ib(Resolution.UNRESOLVED)
|
||||
main_sha: typing.Optional[str] = attr.ib(None)
|
||||
because_sha: typing.Optional[str] = attr.ib(None)
|
||||
|
||||
def to_json(self) -> 'CommitDict':
|
||||
@@ -132,22 +131,14 @@ class Commit:
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, data: 'CommitDict') -> 'Commit':
|
||||
c = cls(data['sha'], data['description'], data['nominated'], because_sha=data['because_sha'])
|
||||
c = cls(data['sha'], data['description'], data['nominated'], main_sha=data['main_sha'], because_sha=data['because_sha'])
|
||||
if data['nomination_type'] is not None:
|
||||
c.nomination_type = NominationType(data['nomination_type'])
|
||||
if data['resolution'] is not None:
|
||||
c.resolution = Resolution(data['resolution'])
|
||||
return c
|
||||
|
||||
def date(self) -> str:
|
||||
# Show commit date, ie. when the commit actually landed
|
||||
# (as opposed to when it was first written)
|
||||
return subprocess.check_output(
|
||||
['git', 'show', '--no-patch', '--format=%cs', self.sha],
|
||||
stderr=subprocess.DEVNULL
|
||||
).decode("ascii").strip()
|
||||
|
||||
async def apply(self) -> typing.Tuple[bool, str]:
|
||||
async def apply(self, ui: 'UI') -> typing.Tuple[bool, str]:
|
||||
# FIXME: This isn't really enough if we fail to cherry-pick because the
|
||||
# git tree will still be dirty
|
||||
async with COMMIT_LOCK:
|
||||
@@ -162,6 +153,10 @@ class Commit:
|
||||
return (False, err.decode())
|
||||
|
||||
self.resolution = Resolution.MERGED
|
||||
await ui.feedback(f'{self.sha} ({self.description}) applied successfully')
|
||||
|
||||
# Append the changes to the .pickstatus.json file
|
||||
ui.save()
|
||||
v = await commit_state(amend=True)
|
||||
return (v, '')
|
||||
|
||||
@@ -336,9 +331,7 @@ async def resolve_fixes(commits: typing.List['Commit'], previous: typing.List['C
|
||||
|
||||
|
||||
async def gather_commits(version: str, previous: typing.List['Commit'],
|
||||
new: typing.List[typing.Tuple[str, str]],
|
||||
cb: typing.Optional[typing.Callable[[], None]] = None
|
||||
) -> typing.List['Commit']:
|
||||
new: typing.List[typing.Tuple[str, str]], cb) -> typing.List['Commit']:
|
||||
# We create an array of the final size up front, then we pass that array
|
||||
# to the "inner" co-routine, which is turned into a list of tasks and
|
||||
# collected by asyncio.gather. We do this to allow the tasks to be
|
||||
@@ -349,10 +342,9 @@ async def gather_commits(version: str, previous: typing.List['Commit'],
|
||||
|
||||
async def inner(commit: 'Commit', version: str,
|
||||
commits: typing.List[typing.Optional['Commit']],
|
||||
index: int, cb: typing.Optional[typing.Callable[[], None]]) -> None:
|
||||
index: int, cb) -> None:
|
||||
commits[index] = await resolve_nomination(commit, version)
|
||||
if cb:
|
||||
cb()
|
||||
cb()
|
||||
|
||||
for i, (sha, desc) in enumerate(new):
|
||||
tasks.append(asyncio.ensure_future(
|
||||
@@ -371,27 +363,6 @@ async def gather_commits(version: str, previous: typing.List['Commit'],
|
||||
return commits
|
||||
|
||||
|
||||
async def update_commits() -> typing.List[Commit]:
|
||||
"""Gather all new commits and update the on-disk cache.
|
||||
"""
|
||||
commits = load()
|
||||
with open('VERSION', 'r') as f:
|
||||
version = '.'.join(f.read().split('.')[:2])
|
||||
if commits:
|
||||
sha = commits[0].sha
|
||||
else:
|
||||
sha = f'{version}-branchpoint'
|
||||
|
||||
if new := await get_new_commits(sha):
|
||||
collected_commits = await gather_commits(version, commits, new)
|
||||
else:
|
||||
collected_commits = []
|
||||
|
||||
all_commits = list(itertools.chain(collected_commits, commits))
|
||||
save(all_commits)
|
||||
return all_commits
|
||||
|
||||
|
||||
def load() -> typing.List['Commit']:
|
||||
if not pick_status_json.exists():
|
||||
return []
|
||||
|
@@ -43,8 +43,9 @@ PALETTE = [
|
||||
|
||||
class RootWidget(urwid.Frame):
|
||||
|
||||
def __init__(self, *args, ui: 'UI', **kwargs):
|
||||
def __init__(self, *args, ui: 'UI' = None, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
assert ui is not None
|
||||
self.ui = ui
|
||||
|
||||
def keypress(self, size: int, key: str) -> typing.Optional[str]:
|
||||
@@ -66,20 +67,16 @@ class CommitWidget(urwid.Text):
|
||||
_selectable = True
|
||||
|
||||
def __init__(self, ui: 'UI', commit: 'core.Commit'):
|
||||
assert commit.nomination_type is not None
|
||||
reason = commit.nomination_type.name.ljust(6)
|
||||
super().__init__(f'{commit.date()} {reason} {commit.sha[:10]} {commit.description}')
|
||||
super().__init__(f'{commit.sha[:10]} {commit.description}')
|
||||
self.ui = ui
|
||||
self.commit = commit
|
||||
|
||||
async def apply(self) -> None:
|
||||
async with self.ui.git_lock:
|
||||
result, err = await self.commit.apply()
|
||||
result, err = await self.commit.apply(self.ui)
|
||||
if not result:
|
||||
self.ui.chp_failed(self, err)
|
||||
else:
|
||||
self.ui.feedback(f'{self.commit.sha} ({self.commit.description}) applied successfully.')
|
||||
self.ui.save()
|
||||
self.ui.remove_commit(self)
|
||||
|
||||
async def denominate(self) -> None:
|
||||
@@ -173,7 +170,7 @@ class UI:
|
||||
self.mainloop.widget = o
|
||||
|
||||
for commit in reversed(list(itertools.chain(self.new_commits, self.previous_commits))):
|
||||
if commit.nominated and commit.resolution in {core.Resolution.UNRESOLVED, core.Resolution.MANUAL_RESOLUTION}:
|
||||
if commit.nominated and commit.resolution is core.Resolution.UNRESOLVED:
|
||||
b = urwid.AttrMap(CommitWidget(self, commit), None, focus_map='reversed')
|
||||
self.commit_list.append(b)
|
||||
self.save()
|
||||
@@ -214,7 +211,7 @@ class UI:
|
||||
else:
|
||||
raise RuntimeError(f"Couldn't find {sha}")
|
||||
|
||||
await commit.apply()
|
||||
await commit.apply(self)
|
||||
|
||||
q = urwid.Edit("Commit sha\n")
|
||||
ok_btn = urwid.Button('Ok')
|
||||
|
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
|
||||
import argparse
|
||||
import os
|
||||
@@ -21,9 +21,6 @@ PLATFORM_SYMBOLS = [
|
||||
'_end',
|
||||
'_fini',
|
||||
'_init',
|
||||
'_fbss',
|
||||
'_fdata',
|
||||
'_ftext',
|
||||
]
|
||||
|
||||
def get_symbols_nm(nm, lib):
|
||||
@@ -73,7 +70,7 @@ def get_symbols_dumpbin(dumpbin, lib):
|
||||
continue
|
||||
symbol_name = fields[3]
|
||||
# De-mangle symbols
|
||||
if symbol_name[0] == '_' and '@' in symbol_name:
|
||||
if symbol_name[0] == '_':
|
||||
symbol_name = symbol_name[1:].split('@')[0]
|
||||
symbols.append(symbol_name)
|
||||
return symbols
|
||||
|
@@ -8,11 +8,9 @@ if [ ! -e .git ]; then
|
||||
fi
|
||||
|
||||
if [ ! -d platform-hardware-libhardware ]; then
|
||||
git clone --depth 1 https://android.googlesource.com/platform/frameworks/native platform-frameworks-native
|
||||
git clone --depth 1 https://android.googlesource.com/platform/hardware/libhardware platform-hardware-libhardware
|
||||
git clone --depth 1 https://android.googlesource.com/platform/system/core platform-system-core
|
||||
git clone --depth 1 https://android.googlesource.com/platform/system/logging platform-system-logging
|
||||
git clone --depth 1 https://android.googlesource.com/platform/system/unwinding platform-system-unwinding
|
||||
git clone --depth 1 https://android.googlesource.com/platform/frameworks/native platform-frameworks-native
|
||||
fi
|
||||
|
||||
dest=include/android_stub
|
||||
@@ -27,14 +25,14 @@ mkdir ${dest}
|
||||
|
||||
# These directories contains mostly only the files we need, so copy wholesale
|
||||
|
||||
cp -av \
|
||||
platform-frameworks-native/libs/nativewindow/include/vndk \
|
||||
platform-frameworks-native/libs/nativebase/include/nativebase \
|
||||
platform-system-core/libsync/include/ndk \
|
||||
cp -av platform-frameworks-native/libs/nativewindow/include/vndk \
|
||||
platform-system-core/libsync/include/sync \
|
||||
platform-system-core/libsync/include/ndk \
|
||||
platform-system-core/libbacktrace/include/backtrace \
|
||||
platform-system-core/libsystem/include/system \
|
||||
platform-system-logging/liblog/include/log \
|
||||
platform-system-unwinding/libbacktrace/include/backtrace \
|
||||
platform-system-core/liblog/include/log \
|
||||
platform-frameworks-native/libs/nativewindow/include/apex \
|
||||
platform-frameworks-native/libs/nativebase/include/nativebase \
|
||||
${dest}
|
||||
|
||||
|
||||
@@ -45,16 +43,15 @@ cp -av platform-hardware-libhardware/include/hardware/{hardware,gralloc,gralloc1
|
||||
cp -av platform-frameworks-native/vulkan/include/hardware/hwvulkan.h ${dest}/hardware
|
||||
|
||||
mkdir ${dest}/cutils
|
||||
cp -av platform-system-core/libcutils/include/cutils/{compiler,log,native_handle,properties,trace}.h ${dest}/cutils
|
||||
cp -av platform-system-core/libcutils/include/cutils/{log,native_handle,properties}.h ${dest}/cutils
|
||||
|
||||
|
||||
# include/android has files from a few different projects
|
||||
|
||||
mkdir ${dest}/android
|
||||
cp -av \
|
||||
platform-frameworks-native/libs/nativewindow/include/android/* \
|
||||
cp -av platform-frameworks-native/libs/nativewindow/include/android/* \
|
||||
platform-frameworks-native/libs/arect/include/android/* \
|
||||
platform-system-core/liblog/include/android/* \
|
||||
platform-system-core/libsync/include/android/* \
|
||||
platform-system-logging/liblog/include/android/* \
|
||||
${dest}/android
|
||||
|
||||
|
@@ -1,9 +0,0 @@
|
||||
/drivers/vmware-guest.html /drivers/svga3d.html 301
|
||||
/gallium/drivers/freedreno.html /drivers/freedreno.html 301
|
||||
/gallium/drivers/freedreno/ir3-notes.html /drivers/freedreno/ir3-notes.html 301
|
||||
/gallium/drivers/llvmpipe.html /drivers/llvmpipe.html 301
|
||||
/gallium/drivers/zink.html /drivers/zink.html 301
|
||||
/llvmpipe.html /drivers/llvmpipe.html 301
|
||||
/postprocess.html /gallium/postprocess.html 301
|
||||
/versions.html /relnotes.html 301
|
||||
/vmware-guest.html /drivers/vmware-guest.html 301
|
@@ -1,8 +1,12 @@
|
||||
Report a Bug
|
||||
============
|
||||
|
||||
The Mesa bug database is hosted on
|
||||
`freedesktop.org <https://freedesktop.org>`__. The old bug database on
|
||||
SourceForge is no longer used.
|
||||
|
||||
To file a Mesa bug, go to `GitLab on
|
||||
freedesktop.org <https://gitlab.freedesktop.org/mesa/mesa/-/issues>`__.
|
||||
freedesktop.org <https://gitlab.freedesktop.org/mesa/mesa/-/issues>`__
|
||||
|
||||
Please follow these bug reporting guidelines:
|
||||
|
||||
|
@@ -52,41 +52,6 @@ The three GitLab CI systems currently integrated are:
|
||||
LAVA
|
||||
docker
|
||||
|
||||
Application traces replay
|
||||
-------------------------
|
||||
|
||||
The CI replays application traces with various drivers in two different jobs. The first
|
||||
job replays traces listed in ``src/<driver>/ci/traces-<driver>.yml`` files and if any
|
||||
of those traces fail the pipeline fails as well. The second job replays traces listed in
|
||||
``src/<driver>/ci/restricted-traces-<driver>.yml`` and it is allowed to fail. This second
|
||||
job is only created when the pipeline is triggered by `marge-bot` or any other user that
|
||||
has been granted access to these traces.
|
||||
|
||||
A traces YAML file also includes a ``download-url`` pointing to a MinIO
|
||||
instance where to download the traces from. While the first job should always work with
|
||||
publicly accessible traces, the second job could point to an url with restricted access.
|
||||
|
||||
Restricted traces are those that have been made available to Mesa developers without a
|
||||
license to redistribute at will, and thus should not be exposed to the public. Failing to
|
||||
access that URL would not prevent the pipeline to pass, therefore forks made by
|
||||
contributors without permissions to download non-redistributable traces can be merged
|
||||
without friction.
|
||||
|
||||
As an aside, only maintainers of such non-redistributable traces are responsible for
|
||||
ensuring that replays are successful, since other contributors would not be able to
|
||||
download and test them by themselves.
|
||||
|
||||
Those Mesa contributors that believe they could have permission to access such
|
||||
non-redistributable traces can request permission to Daniel Stone <daniels@collabora.com>.
|
||||
|
||||
gitlab.freedesktop.org accounts that are to be granted access to these traces will be
|
||||
added to the OPA policy for the MinIO repository as per
|
||||
https://gitlab.freedesktop.org/freedesktop/helm-gitlab-config/-/commit/a3cd632743019f68ac8a829267deb262d9670958 .
|
||||
|
||||
So the jobs are created in personal repositories, the name of the user's account needs
|
||||
to be added to the rules attribute of the Gitlab CI job that accesses the restricted
|
||||
accounts.
|
||||
|
||||
Intel CI
|
||||
--------
|
||||
|
||||
@@ -242,27 +207,3 @@ directory. You can hack on mesa and iterate testing the build with:
|
||||
.. code-block:: console
|
||||
|
||||
sudo docker run --rm -v `pwd`:/mesa $IMAGE ninja -C /mesa/_build
|
||||
|
||||
|
||||
Conformance Tests
|
||||
-----------------
|
||||
|
||||
Some conformance tests require a special treatment to be maintained on Gitlab CI.
|
||||
This section lists their documentation pages.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
skqp
|
||||
|
||||
|
||||
Updating Gitlab CI Linux Kernel
|
||||
-------------------------------
|
||||
|
||||
Gitlab CI usually runs a bleeding-edge kernel. The following documentation has
|
||||
instructions on how to uprev Linux Kernel in the Gitlab Ci ecosystem.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
kernel
|
||||
|
@@ -1,121 +0,0 @@
|
||||
Upreving Linux Kernel
|
||||
=====================
|
||||
|
||||
Occasionally, the Gitlab CI needs a Linux Kernel update to enable new kernel
|
||||
features, device drivers, bug fixes etc to CI jobs.
|
||||
Kernel uprevs in Gitlab CI are relatively simple, but prone to lots of
|
||||
side-effects since many devices from different platforms are involved in the
|
||||
pipeline.
|
||||
|
||||
Kernel repository
|
||||
-----------------
|
||||
|
||||
The Linux Kernel used in the Gitlab CI is stored at the following repository:
|
||||
https://gitlab.freedesktop.org/gfx-ci/linux
|
||||
|
||||
It is common that Mesa kernel brings some patches that were not merged on the
|
||||
Linux mainline, that is why Mesa has its own kernel version which should be used
|
||||
as the base for newer kernels.
|
||||
|
||||
So, one should base the kernel uprev from the last tag used in the Mesa CI,
|
||||
please refer to `.gitlab-ci.yml` `KERNEL_URL` variable.
|
||||
Every tag has a standard naming: `vX.YZ-for-mesa-ci-<commit_short_SHA>`, which
|
||||
can be created via the command:
|
||||
|
||||
:code:`git tag vX.YZ-for-mesa-ci-$(git rev-parse --short HEAD)`
|
||||
|
||||
Building Kernel
|
||||
---------------
|
||||
|
||||
When Mesa CI generates a new rootfs image, the Linux Kernel is built based on
|
||||
the script located at `.gitlab-ci/build-kernel.sh`.
|
||||
|
||||
Updating Kconfigs
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
When a Kernel uprev happens, it is worth compiling and cross-compiling the
|
||||
Kernel locally, in order to update the Kconfigs accordingly. Remember that the
|
||||
resulting Kconfig is a merge between *Mesa CI Kconfig* and *Linux tree
|
||||
defconfig* made via `merge_config.sh` script located at Linux Kernel tree.
|
||||
|
||||
Kconfigs location
|
||||
"""""""""""""""""
|
||||
|
||||
+------------+--------------------------------------------+-------------------------------------+
|
||||
| Platform | Mesa CI Kconfig location | Linux tree defconfig |
|
||||
+============+============================================+=====================================+
|
||||
| arm | .gitlab-ci/container/arm.config | arch/arm/configs/multi_v7_defconfig |
|
||||
+------------+--------------------------------------------+-------------------------------------+
|
||||
| arm64 | .gitlab-ci/container/arm64.config | arch/arm64/configs/defconfig |
|
||||
+------------+--------------------------------------------+-------------------------------------+
|
||||
| x86-64 | .gitlab-ci/container/x86_64.config | arch/x86/configs/x86_64_defconfig |
|
||||
+------------+--------------------------------------------+-------------------------------------+
|
||||
|
||||
Updating image tags
|
||||
-------------------
|
||||
|
||||
Every kernel uprev should update 3 image tags, located at two files.
|
||||
|
||||
:code:`.gitlab-ci.yml` tag
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
- **KERNEL_URL** for the location of the new kernel
|
||||
|
||||
:code:`.gitlab-ci/image-tags.yml` tags
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
- **KERNEL_ROOTFS_TAG** to rebuild rootfs with the new kernel
|
||||
- **DEBIAN_X86_TEST_GL_TAG** to ensure that the new rootfs is being used by the Gitlab x86 jobs
|
||||
|
||||
Development routine
|
||||
-------------------
|
||||
|
||||
1. Compile the newer kernel locally for each platform.
|
||||
2. Compile device trees for ARM platforms
|
||||
3. Update Kconfigs. Are new Kconfigs necessary? Is CONFIG_XYZ_BLA deprecated? Does the `merge_config.sh` override an important config?
|
||||
4. Push a new development branch to `Kernel repository`_ based on the latest kernel tag used in Gitlab CI
|
||||
5. Hack `build-kernel.sh` script to clone kernel from your development branch
|
||||
6. Update image tags. See `Updating image tags`_
|
||||
7. Run the entire CI pipeline, all the automatic jobs should be green. If some job is red or taking too long, you will need to investigate it and probably ask for help.
|
||||
|
||||
When the Kernel uprev is stable
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
1. Push a new tag to Mesa CI `Kernel repository`_
|
||||
2. Update KERNEL_URL `debian/x86_test-gl` job definition
|
||||
3. Open a merge request, if it is not opened yet
|
||||
|
||||
Tips and Tricks
|
||||
---------------
|
||||
|
||||
Compare pipelines
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
To have the most confidence that a kernel uprev does not break anything in Mesa,
|
||||
it is suggested that one runs the entire CI pipeline to check if the update affected the manual CI jobs.
|
||||
|
||||
Step-by-step
|
||||
""""""""""""
|
||||
|
||||
1. Create a local branch in the same git ref (should be the main branch) before branching to the kernel uprev kernel.
|
||||
2. Push this test branch
|
||||
3. Run the entire pipeline against the test branch, even the manual jobs
|
||||
4. Now do the same for the kernel uprev branch
|
||||
5. Compare the job results. If a CI job turned red on your uprev branch, it means that the kernel update broke the test. Otherwise, it should be fine.
|
||||
|
||||
Bare-metal custom kernels
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Some CI jobs have support to plug in a custom kernel by simply changing a variable.
|
||||
This is great, since rebuilding the kernel and rootfs may takes dozens of minutes.
|
||||
|
||||
For example, freedreno jobs `gitlab.yml` manifest support a variable named
|
||||
`BM_KERNEL`. If one puts a gz-compressed kernel URL there, the job will use that
|
||||
kernel to boot the freedreno bare-metal devices. The same works for `BM_DTB` in
|
||||
the case of device tree binaries.
|
||||
|
||||
Careful reading of the job logs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Sometimes a job may turn to red for reasons unrelated to the kernel update, e.g.
|
||||
LAVA `tftp` timeout, problems with the freedesktop servers etc.
|
||||
So it is important to see the reason why the job turned red, and retry it if an
|
||||
infrastructure error has happened.
|
101
docs/ci/skqp.rst
101
docs/ci/skqp.rst
@@ -1,101 +0,0 @@
|
||||
skqp
|
||||
====
|
||||
|
||||
`skqp <https://skia.org/docs/dev/testing/skqp/>`_ stands for SKIA Quality
|
||||
Program conformance tests. Basically, it has sets of rendering tests and unit
|
||||
tests to ensure that `SKIA <https://skia.org/>`_ is meeting its design specifications on a specific
|
||||
device.
|
||||
|
||||
The rendering tests have support for GL, GLES and Vulkan backends and test some
|
||||
rendering scenarios.
|
||||
And the unit tests check the GPU behavior without rendering images.
|
||||
|
||||
Tests
|
||||
-----
|
||||
|
||||
Render tests design
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
It is worth noting that `rendertests.txt` can bring some detail about each test
|
||||
expectation, so each test can have a max pixel error count, to tell skqp that it
|
||||
is OK to have at most that number of errors for that test. See also:
|
||||
https://github.com/google/skia/blob/main/tools/skqp/README_ALGORITHM.md
|
||||
|
||||
.. _test-location:
|
||||
|
||||
Location
|
||||
^^^^^^^^
|
||||
|
||||
Each `rendertests.txt` and `unittest.txt` file must be located inside a specific
|
||||
subdirectory inside skqp assets directory.
|
||||
|
||||
+--------------+--------------------------------------------+
|
||||
| Test type | Location |
|
||||
+==============+============================================+
|
||||
| Render tests | `${SKQP_ASSETS_DIR}/skqp/rendertests.txt` |
|
||||
+--------------+--------------------------------------------+
|
||||
| Unit tests | `${SKQP_ASSETS_DIR}/skqp/unittests.txt` |
|
||||
+--------------+--------------------------------------------+
|
||||
|
||||
The `skqp-runner.sh` script will make the necessary modifications to separate
|
||||
`rendertests.txt` for each backend-driver combination. As long as the test files are located in the expected place:
|
||||
|
||||
+--------------+----------------------------------------------------------------------------------------------+
|
||||
| Test type | Location |
|
||||
+==============+==============================================================================================+
|
||||
| Render tests | `${MESA_REPOSITORY_DIR}/src/${GPU_DRIVER}/ci/${GPU_VERSION}-${SKQP_BACKEND}_rendertests.txt` |
|
||||
+--------------+----------------------------------------------------------------------------------------------+
|
||||
| Unit tests | `${MESA_REPOSITORY_DIR}/src/${GPU_DRIVER}/ci/${GPU_VERSION}_unittests.txt` |
|
||||
+--------------+----------------------------------------------------------------------------------------------+
|
||||
|
||||
Where `SKQP_BACKEND` can be:
|
||||
|
||||
- gl: for GL backend
|
||||
- gles: for GLES backend
|
||||
- vk: for Vulkan backend
|
||||
|
||||
Example file
|
||||
""""""""""""
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
src/freedreno/ci/freedreno-a630-skqp-gl_rendertests.txt
|
||||
|
||||
- GPU_DRIVER: `freedreno`
|
||||
- GPU_VERSION: `freedreno-a630`
|
||||
- SKQP_BACKEND: `gl`
|
||||
|
||||
.. _rendertests-design:
|
||||
|
||||
skqp reports
|
||||
------------
|
||||
|
||||
skqp generates reports after finishing its execution, they are located at the job
|
||||
artifacts results directory and are divided in subdirectories by rendering tests
|
||||
backends and unit
|
||||
tests. The job log has links to every generated report in order to facilitate
|
||||
the skqp debugging.
|
||||
|
||||
Maintaining skqp on Mesa CI
|
||||
---------------------------
|
||||
|
||||
skqp is built alongside with another binary, namely `list_gpu_unit_tests`, it is
|
||||
located in the same folder where `skqp` binary is.
|
||||
|
||||
This binary will generate the expected `unittests.txt` for the target GPU, so
|
||||
ideally it should be executed on every skqp update and when a new device
|
||||
receives skqp CI jobs.
|
||||
|
||||
1. Generate target unit tests for the current GPU with :code:`./list_gpu_unit_tests > unittests.txt`
|
||||
|
||||
2. Run skqp job
|
||||
|
||||
3. If there is a failing or crashing unit test, remove it from the corresponding `unittests.txt`
|
||||
|
||||
4. If there is a crashing render test, remove it from the corresponding `rendertests.txt`
|
||||
|
||||
5. If there is a failing render test, visually inspect the result from the HTML report
|
||||
- If the render result is OK, update the max error count for that test
|
||||
- Otherwise, or put `-1` in the same threshold, as seen in :ref:`rendertests-design`
|
||||
|
||||
6. Remember to put the new tests files to the locations cited in :ref:`test-location`
|
@@ -1,4 +1,4 @@
|
||||
set $proxy_authorization '';
|
||||
set $authorization '';
|
||||
|
||||
set_by_lua $proxyuri '
|
||||
unescaped = ngx.unescape_uri(ngx.var.arg_uri);
|
||||
@@ -13,12 +13,10 @@ set_by_lua $proxyuri '
|
||||
host = it[3];
|
||||
query = it[4];
|
||||
|
||||
if ngx.var.http_authorization and ngx.var.http_authorization ~= "" then
|
||||
ngx.var.proxy_authorization = ngx.var.http_authorization;
|
||||
elseif authstring then
|
||||
if authstring then
|
||||
auth = string.sub(authstring, 0, -2);
|
||||
auth64 = ngx.encode_base64(auth);
|
||||
ngx.var.proxy_authorization = "Basic " .. auth64;
|
||||
ngx.var.authorization = "Basic " .. auth64;
|
||||
end
|
||||
|
||||
-- Default to / if none is set to avoid using the request_uri query
|
||||
@@ -30,7 +28,7 @@ set_by_lua $proxyuri '
|
||||
';
|
||||
|
||||
add_header X-GG-Cache-Status $upstream_cache_status;
|
||||
proxy_set_header Authorization $proxy_authorization;
|
||||
proxy_set_header Authorization $authorization;
|
||||
|
||||
proxy_pass $proxyuri;
|
||||
# Redirect back to ourselves on 301 replies
|
||||
|
@@ -128,5 +128,5 @@ Basic formatting guidelines
|
||||
prefer the use of ``bool``, ``true``, and ``false`` over
|
||||
``GLboolean``, ``GL_TRUE``, and ``GL_FALSE``. In C code, this may
|
||||
mean that ``#include <stdbool.h>`` needs to be added. The
|
||||
``try_emit_*`` method ``src/mesa/state_tracker/st_glsl_to_tgsi.cpp``
|
||||
can serve as an example.
|
||||
``try_emit_*`` methods in ``src/mesa/program/ir_to_mesa.cpp`` and
|
||||
``src/mesa/state_tracker/st_glsl_to_tgsi.cpp`` can serve as examples.
|
||||
|
14
docs/conf.py
14
docs/conf.py
@@ -135,6 +135,20 @@ html_extra_path = [
|
||||
]
|
||||
|
||||
html_redirects = [
|
||||
('drivers/vmware-guest', 'drivers/svga3d.html'),
|
||||
('gallium/drivers/freedreno', 'drivers/freedreno.html'),
|
||||
('gallium/drivers/freedreno/ir3-notes', 'drivers/freedreno/ir3-notes.html'),
|
||||
('gallium/drivers/llvmpipe', 'drivers/llvmpipe.html'),
|
||||
('gallium/drivers/openswr', 'drivers/openswr.html'),
|
||||
('gallium/drivers/openswr/faq', 'drivers/openswr/faq.html'),
|
||||
('gallium/drivers/openswr/knobs', 'drivers/openswr/knobs.html'),
|
||||
('gallium/drivers/openswr/profiling', 'drivers/openswr/profiling.html'),
|
||||
('gallium/drivers/openswr/usage', 'drivers/openswr/usage.html'),
|
||||
('gallium/drivers/zink', 'drivers/zink.html'),
|
||||
('llvmpipe', 'drivers/llvmpipe.html'),
|
||||
('postprocess', 'gallium/postprocess.html'),
|
||||
('versions', 'relnotes.html'),
|
||||
('vmware-guest', 'drivers/vmware-guest.html'),
|
||||
('webmaster', 'https://www.mesa3d.org/website/'),
|
||||
]
|
||||
|
||||
|
@@ -23,7 +23,7 @@ To add a new GL extension to Mesa you have to do at least the following.
|
||||
functions and enums to the ``gl_API.xml`` file. Then, a bunch of
|
||||
source files must be regenerated by executing the corresponding
|
||||
Python scripts.
|
||||
- Add a new entry to the ``gl_extensions`` struct in ``consts_exts.h`` if
|
||||
- Add a new entry to the ``gl_extensions`` struct in ``mtypes.h`` if
|
||||
the extension requires driver capabilities not already exposed by
|
||||
another extension.
|
||||
- Add a new entry to the ``src/mesa/main/extensions_table.h`` file.
|
||||
|
@@ -1,232 +0,0 @@
|
||||
ANV
|
||||
===
|
||||
|
||||
Debugging
|
||||
---------
|
||||
|
||||
Here are a few environment variable debug environment variables
|
||||
specific to Anv:
|
||||
|
||||
:envvar:`ANV_ENABLE_PIPELINE_CACHE`
|
||||
If defined to ``0`` or ``false``, this will disable pipeline
|
||||
caching, forcing Anv to reparse and recompile any VkShaderModule
|
||||
(SPIRV) it is given.
|
||||
:envvar:`ANV_DISABLE_SECONDARY_CMD_BUFFER_CALLS`
|
||||
If defined to ``1`` or ``true``, this will prevent usage of self
|
||||
modifying command buffers to implement ``vkCmdExecuteCommands``. As
|
||||
a result of this, it will also disable ``VK_KHR_performance_query``.
|
||||
:envvar:`ANV_ALWAYS_BINDLESS`
|
||||
If defined to ``1`` or ``true``, this forces all descriptor sets to
|
||||
use the internal `Bindless model`_.
|
||||
:envvar:`ANV_QUEUE_THREAD_DISABLE`
|
||||
If defined to ``1`` or ``true``, this disables support for timeline
|
||||
semaphores.
|
||||
:envvar:`ANV_USERSPACE_RELOCS`
|
||||
If defined to ``1`` or ``true``, this forces Anv to always do
|
||||
kernel relocations in command buffers. This should only have an
|
||||
effect on hardware that doesn't support soft-pinning (Ivybridge,
|
||||
Haswell, Cherryview).
|
||||
:envvar:`ANV_PRIMITIVE_REPLICATION_MAX_VIEWS`
|
||||
Specifies up to how many view shaders can be lowered to handle
|
||||
VK_KHR_multiview. Beyond this number, multiview is implemented
|
||||
using instanced rendering. If unspecified, the value default to
|
||||
``2``.
|
||||
|
||||
|
||||
Experimental features
|
||||
---------------------
|
||||
|
||||
:envvar:`ANV_EXPERIMENTAL_NV_MESH_SHADER`
|
||||
If defined to ``1`` or ``true``, this advertise support for
|
||||
VK_NV_mesh_shader extension for platforms that have hardware
|
||||
support for it.
|
||||
|
||||
|
||||
.. _`Bindless model`:
|
||||
|
||||
Binding Model
|
||||
-------------
|
||||
|
||||
Here is the Anv bindless binding model that was implemented for the
|
||||
descriptor indexing feature of Vulkan 1.2 :
|
||||
|
||||
.. graphviz::
|
||||
|
||||
digraph G {
|
||||
fontcolor="black";
|
||||
compound=true;
|
||||
|
||||
subgraph cluster_1 {
|
||||
label = "Binding Table (HW)";
|
||||
|
||||
bgcolor="cornflowerblue";
|
||||
|
||||
node [ style=filled,shape="record",fillcolor="white",
|
||||
label="RT0" ] n0;
|
||||
node [ label="RT1" ] n1;
|
||||
node [ label="dynbuf0"] n2;
|
||||
node [ label="set0" ] n3;
|
||||
node [ label="set1" ] n4;
|
||||
node [ label="set2" ] n5;
|
||||
|
||||
n0 -> n1 -> n2 -> n3 -> n4 -> n5 [style=invis];
|
||||
}
|
||||
subgraph cluster_2 {
|
||||
label = "Descriptor Set 0";
|
||||
|
||||
bgcolor="burlywood3";
|
||||
fixedsize = true;
|
||||
|
||||
node [ style=filled,shape="record",fillcolor="white", fixedsize = true, width=4,
|
||||
label="binding 0 - STORAGE_IMAGE\n anv_storage_image_descriptor" ] n8;
|
||||
node [ label="binding 1 - COMBINED_IMAGE_SAMPLER\n anv_sampled_image_descriptor" ] n9;
|
||||
node [ label="binding 2 - UNIFORM_BUFFER\n anv_address_range_descriptor" ] n10;
|
||||
node [ label="binding 3 - UNIFORM_TEXEL_BUFFER\n anv_storage_image_descriptor" ] n11;
|
||||
|
||||
n8 -> n9 -> n10 -> n11 [style=invis];
|
||||
}
|
||||
subgraph cluster_5 {
|
||||
label = "Vulkan Objects"
|
||||
|
||||
fontcolor="black";
|
||||
bgcolor="darkolivegreen4";
|
||||
|
||||
subgraph cluster_6 {
|
||||
label = "VkImageView";
|
||||
|
||||
bgcolor=darkolivegreen3;
|
||||
node [ style=filled,shape="box",fillcolor="white", fixedsize = true, width=2,
|
||||
label="surface_state" ] n12;
|
||||
}
|
||||
subgraph cluster_7 {
|
||||
label = "VkSampler";
|
||||
|
||||
bgcolor=darkolivegreen3;
|
||||
node [ style=filled,shape="box",fillcolor="white", fixedsize = true, width=2,
|
||||
label="sample_state" ] n13;
|
||||
}
|
||||
subgraph cluster_8 {
|
||||
label = "VkImageView";
|
||||
bgcolor="darkolivegreen3";
|
||||
|
||||
node [ style=filled,shape="box",fillcolor="white", fixedsize = true, width=2,
|
||||
label="surface_state" ] n14;
|
||||
}
|
||||
subgraph cluster_9 {
|
||||
label = "VkBuffer";
|
||||
bgcolor=darkolivegreen3;
|
||||
|
||||
node [ style=filled,shape="box",fillcolor="white", fixedsize = true, width=2,
|
||||
label="address" ] n15;
|
||||
}
|
||||
subgraph cluster_10 {
|
||||
label = "VkBufferView";
|
||||
|
||||
bgcolor=darkolivegreen3;
|
||||
node [ style=filled,shape="box",fillcolor="white", fixedsize = true, width=2,
|
||||
label="surface_state" ] n16;
|
||||
}
|
||||
|
||||
n12 -> n13 -> n14 -> n15 -> n16 [style=invis];
|
||||
}
|
||||
|
||||
subgraph cluster_11 {
|
||||
subgraph cluster_12 {
|
||||
label = "CommandBuffer state stream";
|
||||
|
||||
bgcolor="gold3";
|
||||
node [ style=filled,shape="box",fillcolor="white", fixedsize = true, width=2,
|
||||
label="surface_state" ] n17;
|
||||
node [ label="surface_state" ] n18;
|
||||
node [ label="surface_state" ] n19;
|
||||
|
||||
n17 -> n18 -> n19 [style=invis];
|
||||
}
|
||||
}
|
||||
|
||||
n3 -> n8 [lhead=cluster_2];
|
||||
|
||||
n8 -> n12;
|
||||
n9 -> n13;
|
||||
n9 -> n14;
|
||||
n10 -> n15;
|
||||
n11 -> n16;
|
||||
|
||||
n0 -> n17;
|
||||
n1 -> n18;
|
||||
n2 -> n19;
|
||||
}
|
||||
|
||||
|
||||
|
||||
The HW binding table is generated when the draw or dispatch commands
|
||||
are emitted. Here are the types of entries one can find in the binding
|
||||
table :
|
||||
|
||||
- The currently bound descriptor sets, one entry per descriptor set
|
||||
(our limit is 8).
|
||||
|
||||
- For dynamic buffers, one entry per dynamic buffer.
|
||||
|
||||
- For draw commands, render target entries if needed.
|
||||
|
||||
The entries of the HW binding table for descriptor sets are
|
||||
RENDER_SURFACE_STATE similar to what you would have for a normal
|
||||
uniform buffer. The shader will emit reads this buffer first to get
|
||||
the information it needs to access a surface/sampler/etc... and then
|
||||
emits the appropriate message using the information gathered from the
|
||||
descriptor set buffer.
|
||||
|
||||
Each binding type entry gets an associated structure in memory
|
||||
(``anv_storage_image_descriptor``, ``anv_sampled_image_descriptor``,
|
||||
``anv_address_range_descriptor``, ``anv_storage_image_descriptor``).
|
||||
This is the information read by the shader.
|
||||
|
||||
|
||||
.. _`Descriptor Set Memory Layout`:
|
||||
|
||||
Descriptor Set Memory Layout
|
||||
----------------------------
|
||||
|
||||
Here is a representation of how the descriptor set bindings, with each
|
||||
elements in each binding is mapped to a the descriptor set memory :
|
||||
|
||||
.. graphviz::
|
||||
|
||||
digraph structs {
|
||||
node [shape=record];
|
||||
rankdir=LR;
|
||||
|
||||
struct1 [label="Descriptor Set | \
|
||||
<b0> binding 0\n STORAGE_IMAGE \n (array_length=3) | \
|
||||
<b1> binding 1\n COMBINED_IMAGE_SAMPLER \n (array_length=2) | \
|
||||
<b2> binding 2\n UNIFORM_BUFFER \n (array_length=1) | \
|
||||
<b3> binding 3\n UNIFORM_TEXEL_BUFFER \n (array_length=1)"];
|
||||
struct2 [label="Descriptor Set Memory | \
|
||||
<b0e0> anv_storage_image_descriptor|\
|
||||
<b0e1> anv_storage_image_descriptor|\
|
||||
<b0e2> anv_storage_image_descriptor|\
|
||||
<b1e0> anv_sampled_image_descriptor|\
|
||||
<b1e1> anv_sampled_image_descriptor|\
|
||||
<b2e0> anv_address_range_descriptor|\
|
||||
<b3e0> anv_storage_image_descriptor"];
|
||||
|
||||
struct1:b0 -> struct2:b0e0;
|
||||
struct1:b0 -> struct2:b0e1;
|
||||
struct1:b0 -> struct2:b0e2;
|
||||
struct1:b1 -> struct2:b1e0;
|
||||
struct1:b1 -> struct2:b1e1;
|
||||
struct1:b2 -> struct2:b2e0;
|
||||
struct1:b3 -> struct2:b3e0;
|
||||
}
|
||||
|
||||
Each Binding in the descriptor set is allocated an array of
|
||||
``anv_*_descriptor`` data structure. The type of ``anv_*_descriptor``
|
||||
used for a binding is selected based on the ``VkDescriptorType`` of
|
||||
the bindings.
|
||||
|
||||
The value of ``anv_descriptor_set_binding_layout::descriptor_offset``
|
||||
is a byte offset from the descriptor set memory to the associated
|
||||
binding. ``anv_descriptor_set_binding_layout::array_size`` is the
|
||||
number of ``anv_*_descriptor`` elements in the descriptor set memory
|
||||
from that offset for the binding.
|
@@ -22,7 +22,7 @@ cuts taken to get things up and running (which are mostly not inherent to
|
||||
the xml schema, and should not be too difficult to remove from the py and
|
||||
decode/disasm utility):
|
||||
|
||||
* Maximum "field" size is 64b
|
||||
* Maximum "bitset" size is 64b
|
||||
* Fixed instruction size
|
||||
|
||||
Often times, especially when new functionality is added in later gens
|
||||
@@ -184,9 +184,6 @@ decoding. The display template consists of references to fields (which may
|
||||
be derived fields) specified as ``{FIELDNAME}`` and other characters
|
||||
which are just echoed through to the resulting decoded bitset.
|
||||
|
||||
It is possible to define a line column alignment value per field to influence
|
||||
the visual output. It needs to be pecified as ``{FIELDNAME:align=xx}``.
|
||||
|
||||
The ``<override>`` element will be described in the next section, but it
|
||||
provides for both different decoded instruction syntax/mnemonics (when
|
||||
simply providing a different display template string) as well as instruction
|
||||
|
@@ -110,6 +110,8 @@ Here are some known caveats in OpenGL support:
|
||||
|
||||
- ``glPolygonMode()`` with ``GL_LINE`` is not supported. This is not part of
|
||||
OpenGL ES 2.0 and so it is not possible to reverse engineer.
|
||||
- Texture wrapping with ``GL_CLAMP_TO_BORDER`` is not supported. This is not
|
||||
part of OpenGL ES 2.0 and so it is not possible to reverse engineer.
|
||||
|
||||
- Precision limitations in fragment shaders:
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user