Compare commits

..

11 Commits

Author SHA1 Message Date
Joshua Ashton
aa5bc3e41f wsi: Implement linux-drm-syncobj-v1
This implements explicit sync with linux-drm-syncobj-v1 for the
Wayland WSI.

Signed-off-by: Joshua Ashton <joshua@froggi.es>
2024-03-27 17:49:03 +00:00
Joshua Ashton
e4e3436d45 wsi: Add common infrastructure for explicit sync
Signed-off-by: Joshua Ashton <joshua@froggi.es>
2024-03-27 17:49:03 +00:00
Joshua Ashton
becb5d5161 wsi: Get timeline semaphore exportable handle types
We need to know this for explicit sync

Signed-off-by: Joshua Ashton <joshua@froggi.es>
2024-03-27 17:44:16 +00:00
Joshua Ashton
06c2af994b wsi: Track CPU side present ordering via a serial
We will use this in our hueristics to pick the most optimal buffer in AcquireNextImageKHR

Signed-off-by: Joshua Ashton <joshua@froggi.es>
2024-03-25 21:00:54 +00:00
Joshua Ashton
d9cbc79941 wsi: Add acquired member to wsi_image
Tracks whether this wsi_image has been acquired by the app

Signed-off-by: Joshua Ashton <joshua@froggi.es>
2024-03-25 21:00:54 +00:00
Joshua Ashton
e209b02b97 wsi: Track if timeline semaphores are supported
This will be needed before we expose and use explicit sync.

Even if the host Wayland compositor supports timeline semaphores, in the
case of Venus, etc the underlying driver may not.

Signed-off-by: Joshua Ashton <joshua@froggi.es>
2024-03-22 00:24:26 +00:00
Joshua Ashton
8a098f591b build: Add linux-drm-syncobj-v1 wayland protocol
Signed-off-by: Joshua Ashton <joshua@froggi.es>
2024-03-22 00:24:26 +00:00
Joshua Ashton
754f52e1e1 wsi: Add explicit_sync to wsi_drm_image_params
Allow the WSI frontend to request explicit sync buffers.

Signed-off-by: Joshua Ashton <joshua@froggi.es>
2024-03-22 00:24:26 +00:00
Joshua Ashton
00dba3992c wsi: Add explicit_sync to wsi_image_info
Will be used in future for specifying explicit sync for Vulkan WSI when supported.

Additionally cleans up wsi_create_buffer_blit_context, etc..

Signed-off-by: Joshua Ashton <joshua@froggi.es>
2024-03-22 00:24:26 +00:00
Joshua Ashton
9c8f205131 wsi: Pass wsi_drm_image_params to wsi_configure_prime_image
Signed-off-by: Joshua Ashton <joshua@froggi.es>
2024-03-20 17:21:27 +00:00
Joshua Ashton
f17f43b149 wsi: Pass wsi_drm_image_params to wsi_configure_native_image
No need to split this out into function parameters, it's just less clean.

Signed-off-by: Joshua Ashton <joshua@froggi.es>
2024-03-20 17:21:26 +00:00
3849 changed files with 148291 additions and 363315 deletions

1
.gitignore vendored
View File

@@ -1,4 +1,3 @@
.cache
.vscode* .vscode*
*.pyc *.pyc
*.pyo *.pyo

View File

@@ -33,7 +33,7 @@ workflow:
# merge pipeline # merge pipeline
- if: &is-merge-attempt $GITLAB_USER_LOGIN == "marge-bot" && $CI_PIPELINE_SOURCE == "merge_request_event" - if: &is-merge-attempt $GITLAB_USER_LOGIN == "marge-bot" && $CI_PIPELINE_SOURCE == "merge_request_event"
variables: variables:
KERNEL_IMAGE_BASE: https://${S3_HOST}/${S3_KERNEL_BUCKET}/${KERNEL_REPO}/${KERNEL_TAG} KERNEL_IMAGE_BASE: https://${S3_HOST}/mesa-lava/${KERNEL_REPO}/${KERNEL_TAG}
MESA_CI_PERFORMANCE_ENABLED: 1 MESA_CI_PERFORMANCE_ENABLED: 1
VALVE_INFRA_VANGOGH_JOB_PRIORITY: "" # Empty tags are ignored by gitlab VALVE_INFRA_VANGOGH_JOB_PRIORITY: "" # Empty tags are ignored by gitlab
# post-merge pipeline # post-merge pipeline
@@ -41,24 +41,24 @@ workflow:
# nightly pipeline # nightly pipeline
- if: &is-scheduled-pipeline $CI_PIPELINE_SOURCE == "schedule" - if: &is-scheduled-pipeline $CI_PIPELINE_SOURCE == "schedule"
variables: variables:
KERNEL_IMAGE_BASE: https://${S3_HOST}/${S3_KERNEL_BUCKET}/${KERNEL_REPO}/${KERNEL_TAG} KERNEL_IMAGE_BASE: https://${S3_HOST}/mesa-lava/${KERNEL_REPO}/${KERNEL_TAG}
JOB_PRIORITY: 50 JOB_PRIORITY: 50
VALVE_INFRA_VANGOGH_JOB_PRIORITY: priority:low VALVE_INFRA_VANGOGH_JOB_PRIORITY: priority:low
# pipeline for direct pushes that bypassed the CI # pipeline for direct pushes that bypassed the CI
- if: &is-direct-push $CI_PROJECT_NAMESPACE == "mesa" && $CI_PIPELINE_SOURCE == "push" && $GITLAB_USER_LOGIN != "marge-bot" - if: &is-direct-push $CI_PROJECT_NAMESPACE == "mesa" && $CI_PIPELINE_SOURCE == "push" && $GITLAB_USER_LOGIN != "marge-bot"
variables: variables:
KERNEL_IMAGE_BASE: https://${S3_HOST}/${S3_KERNEL_BUCKET}/${KERNEL_REPO}/${KERNEL_TAG} KERNEL_IMAGE_BASE: https://${S3_HOST}/mesa-lava/${KERNEL_REPO}/${KERNEL_TAG}
JOB_PRIORITY: 70 JOB_PRIORITY: 40
VALVE_INFRA_VANGOGH_JOB_PRIORITY: priority:low VALVE_INFRA_VANGOGH_JOB_PRIORITY: priority:low
# pre-merge or fork pipeline # pre-merge or fork pipeline
- if: $FORCE_KERNEL_TAG != null - if: $FORCE_KERNEL_TAG != null
variables: variables:
KERNEL_IMAGE_BASE: https://${S3_HOST}/${S3_KERNEL_BUCKET}/${KERNEL_REPO}/${FORCE_KERNEL_TAG} KERNEL_IMAGE_BASE: https://${S3_HOST}/mesa-lava/${KERNEL_REPO}/${FORCE_KERNEL_TAG}
JOB_PRIORITY: 50 JOB_PRIORITY: 50
VALVE_INFRA_VANGOGH_JOB_PRIORITY: priority:low VALVE_INFRA_VANGOGH_JOB_PRIORITY: priority:low
- if: $FORCE_KERNEL_TAG == null - if: $FORCE_KERNEL_TAG == null
variables: variables:
KERNEL_IMAGE_BASE: https://${S3_HOST}/${S3_KERNEL_BUCKET}/${KERNEL_REPO}/${KERNEL_TAG} KERNEL_IMAGE_BASE: https://${S3_HOST}/mesa-lava/${KERNEL_REPO}/${KERNEL_TAG}
JOB_PRIORITY: 50 JOB_PRIORITY: 50
VALVE_INFRA_VANGOGH_JOB_PRIORITY: priority:low VALVE_INFRA_VANGOGH_JOB_PRIORITY: priority:low
@@ -72,24 +72,14 @@ variables:
bash download-git-cache.sh bash download-git-cache.sh
rm download-git-cache.sh rm download-git-cache.sh
set +o xtrace set +o xtrace
S3_JWT_FILE: /s3_jwt CI_JOB_JWT_FILE: /minio_jwt
S3_HOST: s3.freedesktop.org S3_HOST: s3.freedesktop.org
# This bucket is used to fetch the kernel image
S3_KERNEL_BUCKET: mesa-rootfs
# Bucket for git cache
S3_GITCACHE_BUCKET: git-cache
# Bucket for the pipeline artifacts pushed to S3
S3_ARTIFACTS_BUCKET: artifacts
# Buckets for traces
S3_TRACIE_RESULTS_BUCKET: mesa-tracie-results
S3_TRACIE_PUBLIC_BUCKET: mesa-tracie-public
S3_TRACIE_PRIVATE_BUCKET: mesa-tracie-private
# per-pipeline artifact storage on MinIO # per-pipeline artifact storage on MinIO
PIPELINE_ARTIFACTS_BASE: ${S3_HOST}/${S3_ARTIFACTS_BUCKET}/${CI_PROJECT_PATH}/${CI_PIPELINE_ID} PIPELINE_ARTIFACTS_BASE: ${S3_HOST}/artifacts/${CI_PROJECT_PATH}/${CI_PIPELINE_ID}
# per-job artifact storage on MinIO # per-job artifact storage on MinIO
JOB_ARTIFACTS_BASE: ${PIPELINE_ARTIFACTS_BASE}/${CI_JOB_ID} JOB_ARTIFACTS_BASE: ${PIPELINE_ARTIFACTS_BASE}/${CI_JOB_ID}
# reference images stored for traces # reference images stored for traces
PIGLIT_REPLAY_REFERENCE_IMAGES_BASE: "${S3_HOST}/${S3_TRACIE_RESULTS_BUCKET}/$FDO_UPSTREAM_REPO" PIGLIT_REPLAY_REFERENCE_IMAGES_BASE: "${S3_HOST}/mesa-tracie-results/$FDO_UPSTREAM_REPO"
# For individual CI farm status see .ci-farms folder # For individual CI farm status see .ci-farms folder
# Disable farm with `git mv .ci-farms{,-disabled}/$farm_name` # Disable farm with `git mv .ci-farms{,-disabled}/$farm_name`
# Re-enable farm with `git mv .ci-farms{-disabled,}/$farm_name` # Re-enable farm with `git mv .ci-farms{-disabled,}/$farm_name`
@@ -97,22 +87,15 @@ variables:
ARTIFACTS_BASE_URL: https://${CI_PROJECT_ROOT_NAMESPACE}.${CI_PAGES_DOMAIN}/-/${CI_PROJECT_NAME}/-/jobs/${CI_JOB_ID}/artifacts ARTIFACTS_BASE_URL: https://${CI_PROJECT_ROOT_NAMESPACE}.${CI_PAGES_DOMAIN}/-/${CI_PROJECT_NAME}/-/jobs/${CI_JOB_ID}/artifacts
# Python scripts for structured logger # Python scripts for structured logger
PYTHONPATH: "$PYTHONPATH:$CI_PROJECT_DIR/install" PYTHONPATH: "$PYTHONPATH:$CI_PROJECT_DIR/install"
# Drop once deqp-runner is upgraded to > 0.18.0
MESA_VK_ABORT_ON_DEVICE_LOSS: 1
# Avoid the wall of "Unsupported SPIR-V capability" warnings in CI job log, hiding away useful output
MESA_SPIRV_LOG_LEVEL: error
default: default:
id_tokens:
S3_JWT:
aud: https://s3.freedesktop.org
before_script: before_script:
- > - >
export SCRIPTS_DIR=$(mktemp -d) && export SCRIPTS_DIR=$(mktemp -d) &&
curl -L -s --retry 4 -f --retry-all-errors --retry-delay 60 -O --output-dir "${SCRIPTS_DIR}" "${CI_PROJECT_URL}/-/raw/${CI_COMMIT_SHA}/.gitlab-ci/setup-test-env.sh" && curl -L -s --retry 4 -f --retry-all-errors --retry-delay 60 -O --output-dir "${SCRIPTS_DIR}" "${CI_PROJECT_URL}/-/raw/${CI_COMMIT_SHA}/.gitlab-ci/setup-test-env.sh" &&
. ${SCRIPTS_DIR}/setup-test-env.sh && . ${SCRIPTS_DIR}/setup-test-env.sh &&
echo -n "${S3_JWT}" > "${S3_JWT_FILE}" && echo -n "${CI_JOB_JWT}" > "${CI_JOB_JWT_FILE}" &&
unset CI_JOB_JWT S3_JWT # Unsetting vulnerable env variables unset CI_JOB_JWT # Unsetting vulnerable env variables
after_script: after_script:
# Work around https://gitlab.com/gitlab-org/gitlab/-/issues/20338 # Work around https://gitlab.com/gitlab-org/gitlab/-/issues/20338
@@ -121,9 +104,9 @@ default:
- > - >
set +x set +x
test -e "${S3_JWT_FILE}" && test -e "${CI_JOB_JWT_FILE}" &&
export S3_JWT="$(<${S3_JWT_FILE})" && export CI_JOB_JWT="$(<${CI_JOB_JWT_FILE})" &&
rm "${S3_JWT_FILE}" rm "${CI_JOB_JWT_FILE}"
# Retry when job fails. Failed jobs can be found in the Mesa CI Daily Reports: # Retry when job fails. Failed jobs can be found in the Mesa CI Daily Reports:
# https://gitlab.freedesktop.org/mesa/mesa/-/issues/?sort=created_date&state=opened&label_name%5B%5D=CI%20daily # https://gitlab.freedesktop.org/mesa/mesa/-/issues/?sort=created_date&state=opened&label_name%5B%5D=CI%20daily
@@ -276,7 +259,8 @@ make git archive:
# compress the current folder # compress the current folder
- tar -cvzf ../$CI_PROJECT_NAME.tar.gz . - tar -cvzf ../$CI_PROJECT_NAME.tar.gz .
- ci-fairy s3cp --token-file "${S3_JWT_FILE}" ../$CI_PROJECT_NAME.tar.gz https://$S3_HOST/git-cache/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/$CI_PROJECT_NAME.tar.gz - ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" ../$CI_PROJECT_NAME.tar.gz https://$S3_HOST/git-cache/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/$CI_PROJECT_NAME.tar.gz
# Sanity checks of MR settings and commit logs # Sanity checks of MR settings and commit logs
sanity: sanity:
@@ -297,9 +281,9 @@ sanity:
image_tags=( image_tags=(
DEBIAN_BASE_TAG DEBIAN_BASE_TAG
DEBIAN_BUILD_TAG DEBIAN_BUILD_TAG
DEBIAN_TEST_ANDROID_TAG DEBIAN_X86_64_TEST_ANDROID_TAG
DEBIAN_TEST_GL_TAG DEBIAN_X86_64_TEST_GL_TAG
DEBIAN_TEST_VK_TAG DEBIAN_X86_64_TEST_VK_TAG
ALPINE_X86_64_BUILD_TAG ALPINE_X86_64_BUILD_TAG
ALPINE_X86_64_LAVA_SSH_TAG ALPINE_X86_64_LAVA_SSH_TAG
FEDORA_X86_64_BUILD_TAG FEDORA_X86_64_BUILD_TAG
@@ -326,22 +310,6 @@ sanity:
- placeholder-job - placeholder-job
mr-label-maker-test:
extends:
- .fdo.ci-fairy
stage: sanity
rules:
- !reference [.mr-label-maker-rules, rules]
variables:
GIT_STRATEGY: fetch
timeout: 10m
script:
- set -eu
- python3 -m venv .venv
- source .venv/bin/activate
- pip install git+https://gitlab.freedesktop.org/freedesktop/mr-label-maker
- mr-label-maker --dry-run --mr $CI_MERGE_REQUEST_IID
# Jobs that need to pass before spending hardware resources on further testing # Jobs that need to pass before spending hardware resources on further testing
.required-for-hardware-jobs: .required-for-hardware-jobs:
needs: needs:

View File

@@ -5,25 +5,17 @@ target:
id: '{{ ci_runner_id }}' id: '{{ ci_runner_id }}'
timeouts: timeouts:
first_console_activity: # This limits the time it can take to receive the first console log first_console_activity: # This limits the time it can take to receive the first console log
minutes: {{ timeout_first_console_activity_minutes | default(0, true) }} minutes: {{ timeout_first_minutes }}
seconds: {{ timeout_first_console_activity_seconds | default(0, true) }} retries: {{ timeout_first_retries }}
retries: {{ timeout_first_console_activity_retries }}
console_activity: # Reset every time we receive a message from the logs console_activity: # Reset every time we receive a message from the logs
minutes: {{ timeout_console_activity_minutes | default(0, true) }} minutes: {{ timeout_minutes }}
seconds: {{ timeout_console_activity_seconds | default(0, true) }} retries: {{ timeout_retries }}
retries: {{ timeout_console_activity_retries }}
boot_cycle: boot_cycle:
minutes: {{ timeout_boot_minutes | default(0, true) }} minutes: {{ timeout_boot_minutes }}
seconds: {{ timeout_boot_seconds | default(0, true) }}
retries: {{ timeout_boot_retries }} retries: {{ timeout_boot_retries }}
overall: # Maximum time the job can take, not overrideable by the "continue" deployment overall: # Maximum time the job can take, not overrideable by the "continue" deployment
minutes: {{ timeout_overall_minutes | default(0, true) }} minutes: {{ timeout_overall_minutes }}
seconds: {{ timeout_overall_seconds | default(0, true) }}
retries: 0 retries: 0
# no retries possible here # no retries possible here
@@ -39,38 +31,29 @@ console_patterns:
job_success: job_success:
regex: >- regex: >-
{{ job_success_regex }} {{ job_success_regex }}
{% if job_warn_regex %}
job_warn: job_warn:
regex: >- regex: >-
{{ job_warn_regex }} {{ job_warn_regex }}
{% endif %}
# Environment to deploy # Environment to deploy
deployment: deployment:
# Initial boot # Initial boot
start: start:
storage:
http:
- path: "/b2c-extra-args"
data: >
b2c.pipefail b2c.poweroff_delay={{ poweroff_delay }}
b2c.minio="gateway,{{ '{{' }} minio_url }},{{ '{{' }} job_bucket_access_key }},{{ '{{' }} job_bucket_secret_key }}"
b2c.volume="{{ '{{' }} job_bucket }}-results,mirror=gateway/{{ '{{' }} job_bucket }},pull_on=pipeline_start,push_on=changes,overwrite{% for excl in job_volume_exclusions %},exclude={{ excl }}{% endfor %},remove,expiration=pipeline_end,preserve"
{% for volume in volumes %}
b2c.volume={{ volume }}
{% endfor %}
b2c.service="--privileged --tls-verify=false --pid=host docker://{{ '{{' }} fdo_proxy_registry }}/gfx-ci/ci-tron/telegraf:latest" b2c.hostname=dut-{{ '{{' }} machine.full_name }}
b2c.container="-v {{ '{{' }} job_bucket }}-results:{{ working_dir }} -w {{ working_dir }} {% for mount_volume in mount_volumes %} -v {{ mount_volume }}{% endfor %} --tls-verify=false docker://{{ local_container }} {{ container_cmd }}"
kernel: kernel:
url: '{{ kernel_url }}' url: '{{ kernel_url }}'
# NOTE: b2c.cache_device should not be here, but this works around
# a limitation of b2c which will be removed in the next release
cmdline: > cmdline: >
SALAD.machine_id={{ '{{' }} machine_id }} SALAD.machine_id={{ '{{' }} machine_id }}
console={{ '{{' }} local_tty_device }},115200 console={{ '{{' }} local_tty_device }},115200 earlyprintk=vga,keep
b2c.cache_device=auto b2c.ntp_peer=10.42.0.1 loglevel={{ log_level }} no_hash_pointers
b2c.extra_args_url={{ '{{' }} job.http.url }}/b2c-extra-args b2c.service="--privileged --tls-verify=false --pid=host docker://{{ '{{' }} fdo_proxy_registry }}/gfx-ci/ci-tron/telegraf:latest" b2c.hostname=dut-{{ '{{' }} machine.full_name }}
b2c.container="-ti --tls-verify=false docker://{{ '{{' }} fdo_proxy_registry }}/gfx-ci/ci-tron/machine-registration:latest check"
b2c.ntp_peer=10.42.0.1 b2c.pipefail b2c.cache_device=auto b2c.poweroff_delay={{ poweroff_delay }}
b2c.minio="gateway,{{ '{{' }} minio_url }},{{ '{{' }} job_bucket_access_key }},{{ '{{' }} job_bucket_secret_key }}"
b2c.volume="{{ '{{' }} job_bucket }}-results,mirror=gateway/{{ '{{' }} job_bucket }},pull_on=pipeline_start,push_on=changes,overwrite{% for excl in job_volume_exclusions %},exclude={{ excl }}{% endfor %},remove,expiration=pipeline_end,preserve"
{% for volume in volumes %}
b2c.volume={{ volume }}
{% endfor %}
b2c.container="-v {{ '{{' }} job_bucket }}-results:{{ working_dir }} -w {{ working_dir }} {% for mount_volume in mount_volumes %} -v {{ mount_volume }}{% endfor %} --tls-verify=false docker://{{ local_container }} {{ container_cmd }}"
{% if kernel_cmdline_extras is defined %} {% if kernel_cmdline_extras is defined %}
{{ kernel_cmdline_extras }} {{ kernel_cmdline_extras }}
{% endif %} {% endif %}
@@ -78,7 +61,3 @@ deployment:
initramfs: initramfs:
url: '{{ initramfs_url }}' url: '{{ initramfs_url }}'
{% if dtb_url is defined %}
dtb:
url: '{{ dtb_url }}'
{% endif %}

View File

@@ -10,7 +10,7 @@ if [ -z "$BM_POE_ADDRESS" ]; then
exit 1 exit 1
fi fi
SNMP_KEY="SNMPv2-SMI::mib-2.105.1.1.1.3.1.$((${BM_POE_BASE:-0} + BM_POE_INTERFACE))" SNMP_KEY="SNMPv2-SMI::mib-2.105.1.1.1.3.1.$((48 + BM_POE_INTERFACE))"
SNMP_OFF="i 2" SNMP_OFF="i 2"
flock /var/run/poe.lock -c "snmpset -v2c -r 3 -t 30 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_OFF" flock /var/run/poe.lock -c "snmpset -v2c -r 3 -t 30 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_OFF"

View File

@@ -10,7 +10,7 @@ if [ -z "$BM_POE_ADDRESS" ]; then
exit 1 exit 1
fi fi
SNMP_KEY="SNMPv2-SMI::mib-2.105.1.1.1.3.1.$((${BM_POE_BASE:-0} + BM_POE_INTERFACE))" SNMP_KEY="SNMPv2-SMI::mib-2.105.1.1.1.3.1.$((48 + BM_POE_INTERFACE))"
SNMP_ON="i 1" SNMP_ON="i 1"
SNMP_OFF="i 2" SNMP_OFF="i 2"

View File

@@ -64,7 +64,7 @@ class PoERun:
if not boot_detected: if not boot_detected:
self.print_error( self.print_error(
"Something wrong; couldn't detect the boot start up sequence") "Something wrong; couldn't detect the boot start up sequence")
return 2 return 1
self.logger.create_job_phase("test") self.logger.create_job_phase("test")
for line in self.ser.lines(timeout=self.test_timeout, phase="test"): for line in self.ser.lines(timeout=self.test_timeout, phase="test"):

View File

@@ -13,7 +13,7 @@ date +'%F %T'
# Make JWT token available as file in the bare-metal storage to enable access # Make JWT token available as file in the bare-metal storage to enable access
# to MinIO # to MinIO
cp "${S3_JWT_FILE}" "${rootfs_dst}${S3_JWT_FILE}" cp "${CI_JOB_JWT_FILE}" "${rootfs_dst}${CI_JOB_JWT_FILE}"
date +'%F %T' date +'%F %T'

View File

@@ -17,6 +17,7 @@
paths: paths:
- _build/meson-logs/*.txt - _build/meson-logs/*.txt
- _build/meson-logs/strace - _build/meson-logs/strace
- shader-db
- artifacts - artifacts
# Just Linux # Just Linux
@@ -70,14 +71,13 @@ debian-testing:
-D glx=dri -D glx=dri
-D gbm=enabled -D gbm=enabled
-D egl=enabled -D egl=enabled
-D glvnd=disabled
-D platforms=x11,wayland -D platforms=x11,wayland
GALLIUM_ST: > GALLIUM_ST: >
-D dri3=enabled -D dri3=enabled
-D gallium-nine=true -D gallium-nine=true
-D gallium-va=enabled -D gallium-va=enabled
-D gallium-rusticl=true -D gallium-rusticl=true
GALLIUM_DRIVERS: "llvmpipe,softpipe,virgl,radeonsi,zink,crocus,iris,i915,r300,svga" GALLIUM_DRIVERS: "swrast,virgl,radeonsi,zink,crocus,iris,i915,r300"
VULKAN_DRIVERS: "swrast,amd,intel,intel_hasvk,virtio,nouveau" VULKAN_DRIVERS: "swrast,amd,intel,intel_hasvk,virtio,nouveau"
BUILDTYPE: "debugoptimized" BUILDTYPE: "debugoptimized"
EXTRA_OPTION: > EXTRA_OPTION: >
@@ -138,7 +138,7 @@ debian-testing-msan:
# GLSL has some issues in sexpression reading. # GLSL has some issues in sexpression reading.
# gtest has issues in its test initialization. # gtest has issues in its test initialization.
MESON_TEST_ARGS: "--suite glcpp --suite format" MESON_TEST_ARGS: "--suite glcpp --suite format"
GALLIUM_DRIVERS: "freedreno,iris,nouveau,r300,r600,llvmpipe,softpipe,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus" GALLIUM_DRIVERS: "freedreno,iris,nouveau,kmsro,r300,r600,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus"
VULKAN_DRIVERS: intel,amd,broadcom,virtio VULKAN_DRIVERS: intel,amd,broadcom,virtio
# Do a host build for intel-clc (msan complains about # Do a host build for intel-clc (msan complains about
# uninitialized values in the LLVM libs) # uninitialized values in the LLVM libs)
@@ -154,9 +154,7 @@ debian-testing-msan:
-D intel-clc=enabled -D intel-clc=enabled
-D install-intel-clc=true -D install-intel-clc=true
# Disabled because it is unacceptably slow and blocks too many MRs debian-build-testing:
# Should be re-enabled once this problem has been fixed.
.debian-build-testing:
extends: .meson-build extends: .meson-build
variables: variables:
BUILDTYPE: debug BUILDTYPE: debug
@@ -165,7 +163,6 @@ debian-testing-msan:
-D glx=dri -D glx=dri
-D gbm=enabled -D gbm=enabled
-D egl=enabled -D egl=enabled
-D glvnd=disabled
-D platforms=x11,wayland -D platforms=x11,wayland
GALLIUM_ST: > GALLIUM_ST: >
-D dri3=enabled -D dri3=enabled
@@ -174,9 +171,9 @@ debian-testing-msan:
-D gallium-omx=bellagio -D gallium-omx=bellagio
-D gallium-va=enabled -D gallium-va=enabled
-D gallium-xa=enabled -D gallium-xa=enabled
-D gallium-nine=false -D gallium-nine=true
-D gallium-rusticl=false -D gallium-rusticl=false
GALLIUM_DRIVERS: "iris,nouveau,r300,r600,freedreno,llvmpipe,softpipe,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,asahi,crocus" GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,asahi,crocus"
VULKAN_DRIVERS: swrast VULKAN_DRIVERS: swrast
EXTRA_OPTION: > EXTRA_OPTION: >
-D spirv-to-dxil=true -D spirv-to-dxil=true
@@ -184,7 +181,6 @@ debian-testing-msan:
-D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi -D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi
-D b_lto=true -D b_lto=true
LLVM_VERSION: 15 LLVM_VERSION: 15
S3_ARTIFACT_NAME: debian-build-testing
script: | script: |
section_start lava-pytest "lava-pytest" section_start lava-pytest "lava-pytest"
.gitlab-ci/lava/lava-pytest.sh .gitlab-ci/lava/lava-pytest.sh
@@ -192,29 +188,11 @@ debian-testing-msan:
.gitlab-ci/run-shellcheck.sh .gitlab-ci/run-shellcheck.sh
section_switch yamllint "yamllint" section_switch yamllint "yamllint"
.gitlab-ci/run-yamllint.sh .gitlab-ci/run-yamllint.sh
section_end yamllint section_switch meson "meson"
.gitlab-ci/meson/build.sh .gitlab-ci/meson/build.sh
.gitlab-ci/prepare-artifacts.sh section_switch shader-db "shader-db"
timeout: 15m
# Disabled because it currently needs debian-build-testing
.shader-db:
stage: code-validation
extends:
- .use-debian/x86_64_build
- .container+build-rules
needs:
- .debian-build-testing
variables:
S3_ARTIFACT_NAME: debian-build-testing
before_script:
- !reference [.download_s3, before_script]
script: |
.gitlab-ci/run-shader-db.sh .gitlab-ci/run-shader-db.sh
artifacts: timeout: 30m
paths:
- shader-db
timeout: 15m
# Test a release build with -Werror so new warnings don't sneak in. # Test a release build with -Werror so new warnings don't sneak in.
debian-release: debian-release:
@@ -228,7 +206,6 @@ debian-release:
-D glx=dri -D glx=dri
-D gbm=enabled -D gbm=enabled
-D egl=enabled -D egl=enabled
-D glvnd=disabled
-D platforms=x11,wayland -D platforms=x11,wayland
GALLIUM_ST: > GALLIUM_ST: >
-D dri3=enabled -D dri3=enabled
@@ -240,7 +217,7 @@ debian-release:
-D gallium-nine=false -D gallium-nine=false
-D gallium-rusticl=false -D gallium-rusticl=false
-D llvm=enabled -D llvm=enabled
GALLIUM_DRIVERS: "i915,iris,nouveau,freedreno,r300,svga,llvmpipe,softpipe,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,crocus" GALLIUM_DRIVERS: "i915,iris,nouveau,kmsro,freedreno,r300,svga,swrast,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,crocus"
VULKAN_DRIVERS: "amd,imagination-experimental,microsoft-experimental" VULKAN_DRIVERS: "amd,imagination-experimental,microsoft-experimental"
EXTRA_OPTION: > EXTRA_OPTION: >
-D spirv-to-dxil=true -D spirv-to-dxil=true
@@ -270,10 +247,10 @@ alpine-build-testing:
-D glx=disabled -D glx=disabled
-D gbm=enabled -D gbm=enabled
-D egl=enabled -D egl=enabled
-D glvnd=disabled -D glvnd=false
-D platforms=wayland -D platforms=wayland
LLVM_VERSION: "16" LLVM_VERSION: "16"
GALLIUM_DRIVERS: "crocus,etnaviv,freedreno,iris,lima,nouveau,panfrost,r300,r600,radeonsi,svga,llvmpipe,softpipe,tegra,v3d,vc4,virgl,zink" GALLIUM_DRIVERS: "crocus,etnaviv,freedreno,iris,kmsro,lima,nouveau,panfrost,r300,r600,radeonsi,svga,swrast,tegra,v3d,vc4,virgl,zink"
GALLIUM_ST: > GALLIUM_ST: >
-D dri3=enabled -D dri3=enabled
-D gallium-extra-hud=true -D gallium-extra-hud=true
@@ -310,7 +287,7 @@ fedora-release:
-D glx=dri -D glx=dri
-D gbm=enabled -D gbm=enabled
-D egl=enabled -D egl=enabled
-D glvnd=enabled -D glvnd=true
-D platforms=x11,wayland -D platforms=x11,wayland
EXTRA_OPTION: > EXTRA_OPTION: >
-D b_lto=true -D b_lto=true
@@ -321,7 +298,7 @@ fedora-release:
-D intel-rt=enabled -D intel-rt=enabled
-D imagination-srv=true -D imagination-srv=true
-D teflon=true -D teflon=true
GALLIUM_DRIVERS: "crocus,etnaviv,freedreno,i915,iris,lima,nouveau,panfrost,r300,r600,radeonsi,svga,llvmpipe,softpipe,tegra,v3d,vc4,virgl,zink" GALLIUM_DRIVERS: "crocus,etnaviv,freedreno,i915,iris,kmsro,lima,nouveau,panfrost,r300,r600,radeonsi,svga,swrast,tegra,v3d,vc4,virgl,zink"
GALLIUM_ST: > GALLIUM_ST: >
-D dri3=enabled -D dri3=enabled
-D gallium-extra-hud=true -D gallium-extra-hud=true
@@ -363,7 +340,6 @@ debian-android:
-D glx=disabled -D glx=disabled
-D gbm=disabled -D gbm=disabled
-D egl=enabled -D egl=enabled
-D glvnd=disabled
-D platforms=android -D platforms=android
EXTRA_OPTION: > EXTRA_OPTION: >
-D android-stub=true -D android-stub=true
@@ -433,7 +409,7 @@ debian-android:
- debian/arm64_build - debian/arm64_build
variables: variables:
VULKAN_DRIVERS: freedreno,broadcom VULKAN_DRIVERS: freedreno,broadcom
GALLIUM_DRIVERS: "etnaviv,freedreno,lima,nouveau,panfrost,llvmpipe,softpipe,tegra,v3d,vc4,zink" GALLIUM_DRIVERS: "etnaviv,freedreno,kmsro,lima,nouveau,panfrost,swrast,tegra,v3d,vc4,zink"
BUILDTYPE: "debugoptimized" BUILDTYPE: "debugoptimized"
tags: tags:
- aarch64 - aarch64
@@ -444,10 +420,6 @@ debian-arm32:
- .ci-deqp-artifacts - .ci-deqp-artifacts
variables: variables:
CROSS: armhf CROSS: armhf
DRI_LOADERS:
-D glvnd=disabled
# remove llvmpipe from the .meson-arm list because here we have llvm=disabled
GALLIUM_DRIVERS: "etnaviv,freedreno,lima,nouveau,panfrost,softpipe,tegra,v3d,vc4,zink"
EXTRA_OPTION: > EXTRA_OPTION: >
-D llvm=disabled -D llvm=disabled
-D valgrind=disabled -D valgrind=disabled
@@ -463,8 +435,6 @@ debian-arm32-asan:
extends: extends:
- debian-arm32 - debian-arm32
variables: variables:
DRI_LOADERS:
-D glvnd=disabled
EXTRA_OPTION: > EXTRA_OPTION: >
-D llvm=disabled -D llvm=disabled
-D b_sanitize=address -D b_sanitize=address
@@ -483,16 +453,13 @@ debian-arm64:
-Wno-error=array-bounds -Wno-error=array-bounds
-Wno-error=stringop-truncation -Wno-error=stringop-truncation
VULKAN_DRIVERS: "freedreno,broadcom,panfrost,imagination-experimental" VULKAN_DRIVERS: "freedreno,broadcom,panfrost,imagination-experimental"
DRI_LOADERS:
-D glvnd=disabled
EXTRA_OPTION: > EXTRA_OPTION: >
-D llvm=disabled
-D valgrind=disabled -D valgrind=disabled
-D imagination-srv=true -D imagination-srv=true
-D perfetto=true -D perfetto=true
-D freedreno-kmds=msm,virtio -D freedreno-kmds=msm,virtio
-D teflon=true -D teflon=true
GALLIUM_ST:
-D gallium-rusticl=true
S3_ARTIFACT_NAME: mesa-arm64-default-${BUILDTYPE} S3_ARTIFACT_NAME: mesa-arm64-default-${BUILDTYPE}
script: script:
- .gitlab-ci/meson/build.sh - .gitlab-ci/meson/build.sh
@@ -502,9 +469,8 @@ debian-arm64-asan:
extends: extends:
- debian-arm64 - debian-arm64
variables: variables:
DRI_LOADERS:
-D glvnd=disabled
EXTRA_OPTION: > EXTRA_OPTION: >
-D llvm=disabled
-D b_sanitize=address -D b_sanitize=address
-D valgrind=disabled -D valgrind=disabled
-D tools=dlclose-skip -D tools=dlclose-skip
@@ -518,8 +484,6 @@ debian-arm64-build-test:
- .ci-deqp-artifacts - .ci-deqp-artifacts
variables: variables:
VULKAN_DRIVERS: "amd" VULKAN_DRIVERS: "amd"
DRI_LOADERS:
-D glvnd=disabled
EXTRA_OPTION: > EXTRA_OPTION: >
-Dtools=panfrost,imagination -Dtools=panfrost,imagination
@@ -558,7 +522,7 @@ debian-clang:
-D glx=dri -D glx=dri
-D gbm=enabled -D gbm=enabled
-D egl=enabled -D egl=enabled
-D glvnd=enabled -D glvnd=true
-D platforms=x11,wayland -D platforms=x11,wayland
GALLIUM_ST: > GALLIUM_ST: >
-D dri3=enabled -D dri3=enabled
@@ -575,7 +539,7 @@ debian-clang:
-D shared-llvm=enabled -D shared-llvm=enabled
-D opencl-spirv=true -D opencl-spirv=true
-D shared-glapi=enabled -D shared-glapi=enabled
GALLIUM_DRIVERS: "iris,nouveau,r300,r600,freedreno,llvmpipe,softpipe,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus,i915,asahi" GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus,i915,asahi"
VULKAN_DRIVERS: intel,amd,freedreno,broadcom,virtio,swrast,panfrost,imagination-experimental,microsoft-experimental,nouveau VULKAN_DRIVERS: intel,amd,freedreno,broadcom,virtio,swrast,panfrost,imagination-experimental,microsoft-experimental,nouveau
EXTRA_OPTION: EXTRA_OPTION:
-D spirv-to-dxil=true -D spirv-to-dxil=true
@@ -640,7 +604,6 @@ debian-vulkan:
-D opengl=false -D opengl=false
-D gles1=disabled -D gles1=disabled
-D gles2=disabled -D gles2=disabled
-D glvnd=disabled
-D platforms=x11,wayland -D platforms=x11,wayland
-D osmesa=false -D osmesa=false
GALLIUM_ST: > GALLIUM_ST: >
@@ -670,10 +633,8 @@ debian-x86_32:
BUILDTYPE: debug BUILDTYPE: debug
CROSS: i386 CROSS: i386
VULKAN_DRIVERS: intel,amd,swrast,virtio VULKAN_DRIVERS: intel,amd,swrast,virtio
GALLIUM_DRIVERS: "iris,nouveau,r300,r600,radeonsi,llvmpipe,softpipe,virgl,zink,crocus,d3d12" GALLIUM_DRIVERS: "iris,nouveau,r300,r600,radeonsi,swrast,virgl,zink,crocus,d3d12"
LLVM_VERSION: 15 LLVM_VERSION: 15
DRI_LOADERS:
-D glvnd=disabled
EXTRA_OPTION: > EXTRA_OPTION: >
-D vulkan-layers=device-select,overlay -D vulkan-layers=device-select,overlay
-D intel-clc=system -D intel-clc=system
@@ -698,11 +659,9 @@ debian-s390x:
- kvm - kvm
variables: variables:
CROSS: s390x CROSS: s390x
GALLIUM_DRIVERS: "llvmpipe,softpipe,zink" GALLIUM_DRIVERS: "swrast,zink"
LLVM_VERSION: 15 LLVM_VERSION: 15
VULKAN_DRIVERS: "swrast" VULKAN_DRIVERS: "swrast"
DRI_LOADERS:
-D glvnd=disabled
debian-ppc64el: debian-ppc64el:
extends: extends:
@@ -712,7 +671,5 @@ debian-ppc64el:
variables: variables:
BUILDTYPE: debug BUILDTYPE: debug
CROSS: ppc64el CROSS: ppc64el
GALLIUM_DRIVERS: "nouveau,radeonsi,llvmpipe,softpipe,virgl,zink" GALLIUM_DRIVERS: "nouveau,radeonsi,swrast,virgl,zink"
VULKAN_DRIVERS: "amd,swrast" VULKAN_DRIVERS: "amd,swrast"
DRI_LOADERS:
-D glvnd=disabled

View File

@@ -1,139 +1,130 @@
#!/bin/bash #!/bin/bash
VARS=( for var in \
ACO_DEBUG ACO_DEBUG \
ARTIFACTS_BASE_URL ARTIFACTS_BASE_URL \
ASAN_OPTIONS ASAN_OPTIONS \
BASE_SYSTEM_FORK_HOST_PREFIX BASE_SYSTEM_FORK_HOST_PREFIX \
BASE_SYSTEM_MAINLINE_HOST_PREFIX BASE_SYSTEM_MAINLINE_HOST_PREFIX \
CI_COMMIT_BRANCH CI_COMMIT_BRANCH \
CI_COMMIT_REF_NAME CI_COMMIT_REF_NAME \
CI_COMMIT_TITLE CI_COMMIT_TITLE \
CI_JOB_ID CI_JOB_ID \
S3_JWT_FILE CI_JOB_JWT_FILE \
CI_JOB_STARTED_AT CI_JOB_STARTED_AT \
CI_JOB_NAME CI_JOB_NAME \
CI_JOB_URL CI_JOB_URL \
CI_MERGE_REQUEST_SOURCE_BRANCH_NAME CI_MERGE_REQUEST_SOURCE_BRANCH_NAME \
CI_MERGE_REQUEST_TITLE CI_MERGE_REQUEST_TITLE \
CI_NODE_INDEX CI_NODE_INDEX \
CI_NODE_TOTAL CI_NODE_TOTAL \
CI_PAGES_DOMAIN CI_PAGES_DOMAIN \
CI_PIPELINE_ID CI_PIPELINE_ID \
CI_PIPELINE_URL CI_PIPELINE_URL \
CI_PROJECT_DIR CI_PROJECT_DIR \
CI_PROJECT_NAME CI_PROJECT_NAME \
CI_PROJECT_PATH CI_PROJECT_PATH \
CI_PROJECT_ROOT_NAMESPACE CI_PROJECT_ROOT_NAMESPACE \
CI_RUNNER_DESCRIPTION CI_RUNNER_DESCRIPTION \
CI_SERVER_URL CI_SERVER_URL \
CROSVM_GALLIUM_DRIVER CROSVM_GALLIUM_DRIVER \
CROSVM_GPU_ARGS CROSVM_GPU_ARGS \
CURRENT_SECTION CURRENT_SECTION \
DEQP_BIN_DIR DEQP_BIN_DIR \
DEQP_CONFIG DEQP_CONFIG \
DEQP_EXPECTED_RENDERER DEQP_EXPECTED_RENDERER \
DEQP_FRACTION DEQP_FRACTION \
DEQP_HEIGHT DEQP_HEIGHT \
DEQP_RESULTS_DIR DEQP_RESULTS_DIR \
DEQP_RUNNER_OPTIONS DEQP_RUNNER_OPTIONS \
DEQP_SUITE DEQP_SUITE \
DEQP_TEMP_DIR DEQP_TEMP_DIR \
DEQP_VER DEQP_VER \
DEQP_WIDTH DEQP_WIDTH \
DEVICE_NAME DEVICE_NAME \
DRIVER_NAME DRIVER_NAME \
EGL_PLATFORM EGL_PLATFORM \
ETNA_MESA_DEBUG ETNA_MESA_DEBUG \
FDO_CI_CONCURRENT FDO_CI_CONCURRENT \
FDO_UPSTREAM_REPO FDO_UPSTREAM_REPO \
FD_MESA_DEBUG FD_MESA_DEBUG \
FLAKES_CHANNEL FLAKES_CHANNEL \
FREEDRENO_HANGCHECK_MS FREEDRENO_HANGCHECK_MS \
GALLIUM_DRIVER GALLIUM_DRIVER \
GALLIVM_PERF GALLIVM_PERF \
GPU_VERSION GPU_VERSION \
GTEST GTEST \
GTEST_FAILS GTEST_FAILS \
GTEST_FRACTION GTEST_FRACTION \
GTEST_RESULTS_DIR GTEST_RESULTS_DIR \
GTEST_RUNNER_OPTIONS GTEST_RUNNER_OPTIONS \
GTEST_SKIPS GTEST_SKIPS \
HWCI_FREQ_MAX HWCI_FREQ_MAX \
HWCI_KERNEL_MODULES HWCI_KERNEL_MODULES \
HWCI_KVM HWCI_KVM \
HWCI_START_WESTON HWCI_START_WESTON \
HWCI_START_XORG HWCI_START_XORG \
HWCI_TEST_SCRIPT HWCI_TEST_SCRIPT \
IR3_SHADER_DEBUG IR3_SHADER_DEBUG \
JOB_ARTIFACTS_BASE JOB_ARTIFACTS_BASE \
JOB_RESULTS_PATH JOB_RESULTS_PATH \
JOB_ROOTFS_OVERLAY_PATH JOB_ROOTFS_OVERLAY_PATH \
KERNEL_IMAGE_BASE KERNEL_IMAGE_BASE \
KERNEL_IMAGE_NAME KERNEL_IMAGE_NAME \
LD_LIBRARY_PATH LD_LIBRARY_PATH \
LIBGL_ALWAYS_SOFTWARE LP_NUM_THREADS \
LP_NUM_THREADS MESA_BASE_TAG \
MESA_BASE_TAG MESA_BUILD_PATH \
MESA_BUILD_PATH MESA_DEBUG \
MESA_DEBUG MESA_GLES_VERSION_OVERRIDE \
MESA_GLES_VERSION_OVERRIDE MESA_GLSL_VERSION_OVERRIDE \
MESA_GLSL_VERSION_OVERRIDE MESA_GL_VERSION_OVERRIDE \
MESA_GL_VERSION_OVERRIDE MESA_IMAGE \
MESA_IMAGE MESA_IMAGE_PATH \
MESA_IMAGE_PATH MESA_IMAGE_TAG \
MESA_IMAGE_TAG MESA_LOADER_DRIVER_OVERRIDE \
MESA_LOADER_DRIVER_OVERRIDE MESA_TEMPLATES_COMMIT \
MESA_SPIRV_LOG_LEVEL MESA_VK_IGNORE_CONFORMANCE_WARNING \
MESA_TEMPLATES_COMMIT S3_HOST \
MESA_VK_ABORT_ON_DEVICE_LOSS S3_RESULTS_UPLOAD \
MESA_VK_IGNORE_CONFORMANCE_WARNING NIR_DEBUG \
S3_HOST PAN_I_WANT_A_BROKEN_VULKAN_DRIVER \
S3_RESULTS_UPLOAD PAN_MESA_DEBUG \
NIR_DEBUG PANVK_DEBUG \
PAN_I_WANT_A_BROKEN_VULKAN_DRIVER PIGLIT_FRACTION \
PAN_MESA_DEBUG PIGLIT_NO_WINDOW \
PANVK_DEBUG PIGLIT_OPTIONS \
PIGLIT_FRACTION PIGLIT_PLATFORM \
PIGLIT_NO_WINDOW PIGLIT_PROFILES \
PIGLIT_OPTIONS PIGLIT_REPLAY_ARTIFACTS_BASE_URL \
PIGLIT_PLATFORM PIGLIT_REPLAY_DEVICE_NAME \
PIGLIT_PROFILES PIGLIT_REPLAY_EXTRA_ARGS \
PIGLIT_REPLAY_ANGLE_TAG PIGLIT_REPLAY_LOOP_TIMES \
PIGLIT_REPLAY_ARTIFACTS_BASE_URL PIGLIT_REPLAY_REFERENCE_IMAGES_BASE \
PIGLIT_REPLAY_DEVICE_NAME PIGLIT_REPLAY_SUBCOMMAND \
PIGLIT_REPLAY_EXTRA_ARGS PIGLIT_RESULTS \
PIGLIT_REPLAY_LOOP_TIMES PIGLIT_TESTS \
PIGLIT_REPLAY_REFERENCE_IMAGES_BASE PIGLIT_TRACES_FILE \
PIGLIT_REPLAY_SUBCOMMAND PIPELINE_ARTIFACTS_BASE \
PIGLIT_RESULTS RADEON_DEBUG \
PIGLIT_TESTS RADV_DEBUG \
PIGLIT_TRACES_FILE RADV_PERFTEST \
PIPELINE_ARTIFACTS_BASE SKQP_ASSETS_DIR \
RADEON_DEBUG SKQP_BACKENDS \
RADV_DEBUG TU_DEBUG \
RADV_PERFTEST USE_ANGLE \
SKQP_ASSETS_DIR VIRGL_HOST_API \
SKQP_BACKENDS WAFFLE_PLATFORM \
TU_DEBUG VK_CPU \
USE_ANGLE VK_DRIVER \
VIRGL_HOST_API VK_ICD_FILENAMES \
VIRGL_RENDER_SERVER VKD3D_PROTON_RESULTS \
WAFFLE_PLATFORM VKD3D_CONFIG \
VK_DRIVER VKD3D_TEST_EXCLUDE \
VKD3D_PROTON_RESULTS ZINK_DESCRIPTORS \
VKD3D_CONFIG ZINK_DEBUG \
VKD3D_TEST_EXCLUDE LVP_POISON_MEMORY \
ZINK_DESCRIPTORS ; do
ZINK_DEBUG
LVP_POISON_MEMORY
# Dead code within Mesa CI, but required by virglrender CI
# (because they include our files in their CI)
VK_DRIVER_FILES
)
for var in "${VARS[@]}"; do
if [ -n "${!var+x}" ]; then if [ -n "${!var+x}" ]; then
echo "export $var=${!var@Q}" echo "export $var=${!var@Q}"
fi fi

View File

@@ -113,7 +113,7 @@ export PYTHONPATH=$(python3 -c "import sys;print(\":\".join(sys.path))")
if [ -n "$MESA_LOADER_DRIVER_OVERRIDE" ]; then if [ -n "$MESA_LOADER_DRIVER_OVERRIDE" ]; then
rm /install/lib/dri/!($MESA_LOADER_DRIVER_OVERRIDE)_dri.so rm /install/lib/dri/!($MESA_LOADER_DRIVER_OVERRIDE)_dri.so
fi fi
ls -1 /install/lib/dri/*_dri.so || true ls -1 /install/lib/dri/*_dri.so
if [ "$HWCI_FREQ_MAX" = "true" ]; then if [ "$HWCI_FREQ_MAX" = "true" ]; then
# Ensure initialization of the DRM device (needed by MSM) # Ensure initialization of the DRM device (needed by MSM)
@@ -158,9 +158,6 @@ if [ -x /capture-devcoredump.sh ]; then
BACKGROUND_PIDS="$! $BACKGROUND_PIDS" BACKGROUND_PIDS="$! $BACKGROUND_PIDS"
fi fi
ARCH=$(uname -m)
export VK_DRIVER_FILES="/install/share/vulkan/icd.d/${VK_DRIVER}_icd.$ARCH.json"
# If we want Xorg to be running for the test, then we start it up before the # If we want Xorg to be running for the test, then we start it up before the
# HWCI_TEST_SCRIPT because we need to use xinit to start X (otherwise # HWCI_TEST_SCRIPT because we need to use xinit to start X (otherwise
# without using -displayfd you can race with Xorg's startup), but xinit will eat # without using -displayfd you can race with Xorg's startup), but xinit will eat
@@ -168,6 +165,7 @@ export VK_DRIVER_FILES="/install/share/vulkan/icd.d/${VK_DRIVER}_icd.$ARCH.json"
if [ -n "$HWCI_START_XORG" ]; then if [ -n "$HWCI_START_XORG" ]; then
echo "touch /xorg-started; sleep 100000" > /xorg-script echo "touch /xorg-started; sleep 100000" > /xorg-script
env \ env \
VK_ICD_FILENAMES="/install/share/vulkan/icd.d/${VK_DRIVER}_icd.$(uname -m).json" \
xinit /bin/sh /xorg-script -- /usr/bin/Xorg -noreset -s 0 -dpms -logfile /Xorg.0.log & xinit /bin/sh /xorg-script -- /usr/bin/Xorg -noreset -s 0 -dpms -logfile /Xorg.0.log &
BACKGROUND_PIDS="$! $BACKGROUND_PIDS" BACKGROUND_PIDS="$! $BACKGROUND_PIDS"
@@ -194,6 +192,7 @@ if [ -n "$HWCI_START_WESTON" ]; then
mkdir -p /tmp/.X11-unix mkdir -p /tmp/.X11-unix
env \ env \
VK_ICD_FILENAMES="/install/share/vulkan/icd.d/${VK_DRIVER}_icd.$(uname -m).json" \
weston -Bheadless-backend.so --use-gl -Swayland-0 --xwayland --idle-time=0 & weston -Bheadless-backend.so --use-gl -Swayland-0 --xwayland --idle-time=0 &
BACKGROUND_PIDS="$! $BACKGROUND_PIDS" BACKGROUND_PIDS="$! $BACKGROUND_PIDS"
@@ -218,7 +217,7 @@ cleanup
# upload artifacts # upload artifacts
if [ -n "$S3_RESULTS_UPLOAD" ]; then if [ -n "$S3_RESULTS_UPLOAD" ]; then
tar --zstd -cf results.tar.zst results/; tar --zstd -cf results.tar.zst results/;
ci-fairy s3cp --token-file "${S3_JWT_FILE}" results.tar.zst https://"$S3_RESULTS_UPLOAD"/results.tar.zst; ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" results.tar.zst https://"$S3_RESULTS_UPLOAD"/results.tar.zst;
fi fi
# We still need to echo the hwci: mesa message, as some scripts rely on it, such # We still need to echo the hwci: mesa message, as some scripts rely on it, such

View File

@@ -18,7 +18,7 @@ DEPS=(
bash bash
bison bison
ccache ccache
"clang${LLVM_VERSION}-dev" clang16-dev
cmake cmake
clang-dev clang-dev
coreutils coreutils
@@ -29,13 +29,10 @@ DEPS=(
git git
gettext gettext
glslang glslang
graphviz
linux-headers linux-headers
"llvm${LLVM_VERSION}-static" llvm16-static
"llvm${LLVM_VERSION}-dev" llvm16-dev
meson meson
mold
musl-dev
expat-dev expat-dev
elfutils-dev elfutils-dev
libdrm-dev libdrm-dev
@@ -44,13 +41,8 @@ DEPS=(
libpciaccess-dev libpciaccess-dev
zlib-dev zlib-dev
python3-dev python3-dev
py3-clang
py3-cparser
py3-mako py3-mako
py3-packaging
py3-pip
py3-ply py3-ply
py3-yaml
vulkan-headers vulkan-headers
spirv-tools-dev spirv-tools-dev
util-macros util-macros
@@ -60,8 +52,6 @@ DEPS=(
apk --no-cache add "${DEPS[@]}" "${EPHEMERAL[@]}" apk --no-cache add "${DEPS[@]}" "${EPHEMERAL[@]}"
pip3 install --break-system-packages sphinx===5.1.1 hawkmoth===0.16.0
. .gitlab-ci/container/build-llvm-spirv.sh . .gitlab-ci/container/build-llvm-spirv.sh
. .gitlab-ci/container/build-libclc.sh . .gitlab-ci/container/build-libclc.sh

View File

@@ -1,12 +1,8 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# When changing this file, you need to bump the following
# .gitlab-ci/image-tags.yml tags:
# KERNEL_ROOTFS_TAG
set -ex set -ex
ANGLE_REV="1409a05a81e3ccb279142433a2b987bc330f555b" ANGLE_REV="0518a3ff4d4e7e5b2ce8203358f719613a31c118"
# DEPOT tools # DEPOT tools
git clone --depth 1 https://chromium.googlesource.com/chromium/tools/depot_tools.git git clone --depth 1 https://chromium.googlesource.com/chromium/tools/depot_tools.git

View File

@@ -3,8 +3,8 @@
# When changing this file, you need to bump the following # When changing this file, you need to bump the following
# .gitlab-ci/image-tags.yml tags: # .gitlab-ci/image-tags.yml tags:
# DEBIAN_TEST_GL_TAG # DEBIAN_X86_64_TEST_GL_TAG
# DEBIAN_TEST_VK_TAG # DEBIAN_X86_64_TEST_VK_TAG
# KERNEL_ROOTFS_TAG # KERNEL_ROOTFS_TAG
set -ex set -ex

View File

@@ -1,20 +0,0 @@
#!/usr/bin/env bash
# shellcheck disable=SC2086 # we want word splitting
BINDGEN_VER=0.65.1
CBINDGEN_VER=0.26.0
# bindgen
RUSTFLAGS='-L native=/usr/local/lib' cargo install \
bindgen-cli --version ${BINDGEN_VER} \
--locked \
-j ${FDO_CI_CONCURRENT:-4} \
--root /usr/local
# cbindgen
RUSTFLAGS='-L native=/usr/local/lib' cargo install \
cbindgen --version ${CBINDGEN_VER} \
--locked \
-j ${FDO_CI_CONCURRENT:-4} \
--root /usr/local

View File

@@ -3,9 +3,9 @@
# When changing this file, you need to bump the following # When changing this file, you need to bump the following
# .gitlab-ci/image-tags.yml tags: # .gitlab-ci/image-tags.yml tags:
# DEBIAN_TEST_ANDROID_TAG # DEBIAN_X86_64_TEST_ANDROID_TAG
# DEBIAN_TEST_GL_TAG # DEBIAN_X86_64_TEST_GL_TAG
# DEBIAN_TEST_VK_TAG # DEBIAN_X86_64_TEST_VK_TAG
# KERNEL_ROOTFS_TAG # KERNEL_ROOTFS_TAG
set -ex set -ex

View File

@@ -3,9 +3,9 @@
# When changing this file, you need to bump the following # When changing this file, you need to bump the following
# .gitlab-ci/image-tags.yml tags: # .gitlab-ci/image-tags.yml tags:
# DEBIAN_TEST_ANDROID_TAG # DEBIAN_X86_64_TEST_ANDROID_TAG
# DEBIAN_TEST_GL_TAG # DEBIAN_X86_64_TEST_GL_TAG
# DEBIAN_TEST_VK_TAG # DEBIAN_X86_64_TEST_VK_TAG
# KERNEL_ROOTFS_TAG # KERNEL_ROOTFS_TAG
set -ex -o pipefail set -ex -o pipefail
@@ -16,9 +16,9 @@ set -ex -o pipefail
# - the GL release produces `glcts`, and # - the GL release produces `glcts`, and
# - the GLES release produces `deqp-gles*` and `deqp-egl` # - the GLES release produces `deqp-gles*` and `deqp-egl`
DEQP_VK_VERSION=1.3.8.2 DEQP_VK_VERSION=1.3.7.0
DEQP_GL_VERSION=4.6.4.1 DEQP_GL_VERSION=4.6.4.0
DEQP_GLES_VERSION=3.2.10.1 DEQP_GLES_VERSION=3.2.10.0
# Patches to VulkanCTS may come from commits in their repo (listed in # Patches to VulkanCTS may come from commits in their repo (listed in
# cts_commits_to_backport) or patch files stored in our repo (in the patch # cts_commits_to_backport) or patch files stored in our repo (in the patch
@@ -28,18 +28,28 @@ DEQP_GLES_VERSION=3.2.10.1
# shellcheck disable=SC2034 # shellcheck disable=SC2034
vk_cts_commits_to_backport=( vk_cts_commits_to_backport=(
# Fix more ASAN errors due to missing virtual destructors # Take multiview into account for task shader inv. stats
dd40bcfef1b4035ea55480b6fd4d884447120768 22aa3f4c59f6e1d4daebd5a8c9c05bce6cd3b63b
# Remove "unused shader stages" tests # Remove illegal mesh shader query tests
7dac86c6bbd15dec91d7d9a98cd6dd57c11092a7 2a87f7b25dc27188be0f0a003b2d7aef69d9002e
# Emit point size from "many indirect draws" test # Relax fragment shader invocations result verifications
771e56d1c4d03e073ddb7f1200ad6d57e0a0c979 0d8bf6a2715f95907e9cf86a86876ff1f26c66fe
# Fix several issues in dynamic rendering basic tests
c5453824b498c981c6ba42017d119f5de02a3e34
# Add setVisible for VulkanWindowDirectDrm
a8466bf6ea98f6cd6733849ad8081775318a3e3e
) )
# shellcheck disable=SC2034 # shellcheck disable=SC2034
vk_cts_patch_files=( vk_cts_patch_files=(
# Derivate subgroup fix
# https://github.com/KhronosGroup/VK-GL-CTS/pull/442
build-deqp-vk_Use-subgroups-helper-in-derivate-tests.patch
build-deqp-vk_Add-missing-subgroup-support-checks-for-linear-derivate-tests.patch
) )
if [ "${DEQP_TARGET}" = 'android' ]; then if [ "${DEQP_TARGET}" = 'android' ]; then
@@ -67,6 +77,8 @@ fi
# shellcheck disable=SC2034 # shellcheck disable=SC2034
# GLES builds also EGL # GLES builds also EGL
gles_cts_commits_to_backport=( gles_cts_commits_to_backport=(
# Implement support for the EGL_EXT_config_select_group extension
88ba9ac270db5be600b1ecacbc6d9db0c55d5be4
) )
# shellcheck disable=SC2034 # shellcheck disable=SC2034
@@ -206,7 +218,7 @@ if [ "${DEQP_TARGET}" != 'android' ]; then
if [ "${DEQP_API}" = 'VK' ]; then if [ "${DEQP_API}" = 'VK' ]; then
for mustpass in $(< /VK-GL-CTS/external/vulkancts/mustpass/main/vk-default.txt) ; do for mustpass in $(< /VK-GL-CTS/external/vulkancts/mustpass/main/vk-default.txt) ; do
cat /VK-GL-CTS/external/vulkancts/mustpass/main/$mustpass \ cat /VK-GL-CTS/external/vulkancts/mustpass/main/$mustpass \
>> /deqp/mustpass/vk-main.txt >> /deqp/mustpass/vk-master.txt
done done
fi fi
@@ -241,7 +253,7 @@ fi
# Remove other mustpass files, since we saved off the ones we wanted to conventient locations above. # Remove other mustpass files, since we saved off the ones we wanted to conventient locations above.
rm -rf /deqp/external/**/mustpass/ rm -rf /deqp/external/**/mustpass/
rm -rf /deqp/external/vulkancts/modules/vulkan/vk-main* rm -rf /deqp/external/vulkancts/modules/vulkan/vk-master*
rm -rf /deqp/external/vulkancts/modules/vulkan/vk-default rm -rf /deqp/external/vulkancts/modules/vulkan/vk-default
rm -rf /deqp/external/openglcts/modules/cts-runner rm -rf /deqp/external/openglcts/modules/cts-runner

View File

@@ -7,7 +7,7 @@
set -ex set -ex
git clone https://github.com/microsoft/DirectX-Headers -b v1.613.1 --depth 1 git clone https://github.com/microsoft/DirectX-Headers -b v1.611.0 --depth 1
pushd DirectX-Headers pushd DirectX-Headers
meson setup build --backend=ninja --buildtype=release -Dbuild-test=false $EXTRA_MESON_ARGS meson setup build --backend=ninja --buildtype=release -Dbuild-test=false $EXTRA_MESON_ARGS
meson install -C build meson install -C build

View File

@@ -2,7 +2,7 @@
# When changing this file, you need to bump the following # When changing this file, you need to bump the following
# .gitlab-ci/image-tags.yml tags: # .gitlab-ci/image-tags.yml tags:
# DEBIAN_TEST_VK_TAG # DEBIAN_X86_64_TEST_VK_TAG
# KERNEL_ROOTFS_TAG # KERNEL_ROOTFS_TAG
set -ex set -ex

View File

@@ -1,11 +1,10 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# Script used for Android and Fedora builds (Debian builds get their libdrm version # Script used for Android and Fedora builds
# from https://gitlab.freedesktop.org/gfx-ci/ci-deb-repo - see PKG_REPO_REV)
# shellcheck disable=SC2086 # we want word splitting # shellcheck disable=SC2086 # we want word splitting
set -ex set -ex
export LIBDRM_VERSION=libdrm-2.4.122 export LIBDRM_VERSION=libdrm-2.4.119
curl -L -O --retry 4 -f --retry-all-errors --retry-delay 60 \ curl -L -O --retry 4 -f --retry-all-errors --retry-delay 60 \
https://dri.freedesktop.org/libdrm/"$LIBDRM_VERSION".tar.xz https://dri.freedesktop.org/libdrm/"$LIBDRM_VERSION".tar.xz

View File

@@ -4,13 +4,12 @@ set -ex
# When changing this file, you need to bump the following # When changing this file, you need to bump the following
# .gitlab-ci/image-tags.yml tags: # .gitlab-ci/image-tags.yml tags:
# ALPINE_X86_64_BUILD_TAG
# DEBIAN_BASE_TAG # DEBIAN_BASE_TAG
# DEBIAN_BUILD_TAG # DEBIAN_BUILD_TAG
# FEDORA_X86_64_BUILD_TAG # FEDORA_X86_64_BUILD_TAG
# KERNEL_ROOTFS_TAG # KERNEL_ROOTFS_TAG
MOLD_VERSION="2.32.0" MOLD_VERSION="2.4.1"
git clone -b v"$MOLD_VERSION" --single-branch --depth 1 https://github.com/rui314/mold.git git clone -b v"$MOLD_VERSION" --single-branch --depth 1 https://github.com/rui314/mold.git
pushd mold pushd mold

View File

@@ -2,7 +2,7 @@
# When changing this file, you need to bump the following # When changing this file, you need to bump the following
# .gitlab-ci/image-tags.yml tags: # .gitlab-ci/image-tags.yml tags:
# DEBIAN_TEST_GL_TAG # DEBIAN_X86_64_TEST_GL_TAG
set -ex -o pipefail set -ex -o pipefail

View File

@@ -4,11 +4,11 @@ set -ex
# When changing this file, you need to bump the following # When changing this file, you need to bump the following
# .gitlab-ci/image-tags.yml tags: # .gitlab-ci/image-tags.yml tags:
# DEBIAN_TEST_GL_TAG # DEBIAN_X86_64_TEST_GL_TAG
# DEBIAN_TEST_VK_TAG # DEBIAN_X86_64_TEST_VK_TAG
# KERNEL_ROOTFS_TAG # KERNEL_ROOTFS_TAG
REV="582f5490a124c27c26d3a452fee03a8c85fa9a5c" REV="1e631479c0b477006dd7561c55e06269d2878d8d"
git clone https://gitlab.freedesktop.org/mesa/piglit.git --single-branch --no-checkout /piglit git clone https://gitlab.freedesktop.org/mesa/piglit.git --single-branch --no-checkout /piglit
pushd /piglit pushd /piglit

View File

@@ -2,11 +2,11 @@
# When changing this file, you need to bump the following # When changing this file, you need to bump the following
# .gitlab-ci/image-tags.yml tags: # .gitlab-ci/image-tags.yml tags:
# DEBIAN_TEST_VK_TAG # DEBIAN_X86_64_TEST_VK_TAG
# KERNEL_ROOTFS_TAG # KERNEL_ROOTFS_TAG
set -ex set -ex
VKD3D_PROTON_COMMIT="3d46c082906c77544385d10801e4c0184f0385d9" VKD3D_PROTON_COMMIT="a0ccc383937903f4ca0997ce53e41ccce7f2f2ec"
VKD3D_PROTON_DST_DIR="/vkd3d-proton-tests" VKD3D_PROTON_DST_DIR="/vkd3d-proton-tests"
VKD3D_PROTON_SRC_DIR="/vkd3d-proton-src" VKD3D_PROTON_SRC_DIR="/vkd3d-proton-src"

View File

@@ -2,12 +2,12 @@
# When changing this file, you need to bump the following # When changing this file, you need to bump the following
# .gitlab-ci/image-tags.yml tags: # .gitlab-ci/image-tags.yml tags:
# DEBIAN_TEST_GL_TAG # DEBIAN_X86_64_TEST_GL_TAG
# KERNEL_ROOTFS_TAG # KERNEL_ROOTFS_TAG:
set -ex set -ex
VALIDATION_TAG="v1.3.289" VALIDATION_TAG="snapshot-2024wk06"
git clone -b "$VALIDATION_TAG" --single-branch --depth 1 https://github.com/KhronosGroup/Vulkan-ValidationLayers.git git clone -b "$VALIDATION_TAG" --single-branch --depth 1 https://github.com/KhronosGroup/Vulkan-ValidationLayers.git
pushd Vulkan-ValidationLayers pushd Vulkan-ValidationLayers

View File

@@ -3,17 +3,8 @@
set -ex set -ex
# When changing this file, you need to bump the following
# .gitlab-ci/image-tags.yml tags:
# DEBIAN_BUILD_TAG
# DEBIAN_TEST_ANDROID_TAG
# DEBIAN_TEST_GL_TAG
# DEBIAN_TEST_VK_TAG
# FEDORA_X86_64_BUILD_TAG
# KERNEL_ROOTFS_TAG
export LIBWAYLAND_VERSION="1.21.0" export LIBWAYLAND_VERSION="1.21.0"
export WAYLAND_PROTOCOLS_VERSION="1.34" export WAYLAND_PROTOCOLS_VERSION="1.31"
git clone https://gitlab.freedesktop.org/wayland/wayland git clone https://gitlab.freedesktop.org/wayland/wayland
cd wayland cd wayland

View File

@@ -22,7 +22,7 @@ cpp = ['ccache', '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch2}${sdk_v
c_ld = 'lld' c_ld = 'lld'
cpp_ld = 'lld' cpp_ld = 'lld'
strip = '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip' strip = '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip'
pkg-config = ['/usr/bin/pkgconf'] pkgconfig = ['/usr/bin/pkgconf']
[host_machine] [host_machine]
system = 'android' system = 'android'

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -e
arch=armhf . .gitlab-ci/container/debian/arm_test.sh

View File

@@ -26,7 +26,6 @@ DEPS=(
ccache ccache
cmake cmake
curl curl
"clang-${LLVM_VERSION}"
fastboot fastboot
flatbuffers-compiler flatbuffers-compiler
flex flex
@@ -34,15 +33,11 @@ DEPS=(
git git
glslang-tools glslang-tools
kmod kmod
"libclang-${LLVM_VERSION}-dev"
"libclang-cpp${LLVM_VERSION}-dev"
"libclang-common-${LLVM_VERSION}-dev"
libasan8 libasan8
libdrm-dev libdrm-dev
libelf-dev libelf-dev
libexpat1-dev libexpat1-dev
libflatbuffers-dev libflatbuffers-dev
"libllvm${LLVM_VERSION}"
libvulkan-dev libvulkan-dev
libx11-dev libx11-dev
libx11-xcb-dev libx11-xcb-dev
@@ -69,7 +64,6 @@ DEPS=(
python3-mako python3-mako
python3-pil python3-pil
python3-pip python3-pip
python3-pycparser
python3-requests python3-requests
python3-setuptools python3-setuptools
u-boot-tools u-boot-tools
@@ -91,16 +85,6 @@ arch=armhf
. .gitlab-ci/container/build-wayland.sh . .gitlab-ci/container/build-wayland.sh
. .gitlab-ci/container/build-llvm-spirv.sh
. .gitlab-ci/container/build-libclc.sh
. .gitlab-ci/container/install-meson.sh
. .gitlab-ci/container/build-rust.sh
. .gitlab-ci/container/build-bindgen.sh
apt-get purge -y "${EPHEMERAL[@]}" apt-get purge -y "${EPHEMERAL[@]}"
. .gitlab-ci/container/container_post_build.sh . .gitlab-ci/container/container_post_build.sh

View File

@@ -1,4 +0,0 @@
#!/usr/bin/env bash
DEBIAN_ARCH=arm64 \
. .gitlab-ci/container/debian/test-base.sh

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
. .gitlab-ci/container/debian/test-gl.sh

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
. .gitlab-ci/container/debian/test-vk.sh

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -e
arch=arm64 . .gitlab-ci/container/debian/arm_test.sh

View File

@@ -1,5 +0,0 @@
#!/usr/bin/env bash
set -e
arch=armhf . .gitlab-ci/container/debian/baremetal_arm_test.sh

View File

@@ -1,5 +0,0 @@
#!/usr/bin/env bash
set -e
arch=arm64 . .gitlab-ci/container/debian/baremetal_arm_test.sh

View File

@@ -1,160 +0,0 @@
#!/usr/bin/env bash
# shellcheck disable=SC2086 # we want word splitting
# When changing this file, you need to bump the following
# .gitlab-ci/image-tags.yml tags:
# DEBIAN_BASE_TAG
set -e
set -o xtrace
export DEBIAN_FRONTEND=noninteractive
apt-get install -y ca-certificates gnupg2 software-properties-common
sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list.d/*
echo "deb [trusted=yes] https://gitlab.freedesktop.org/gfx-ci/ci-deb-repo/-/raw/${PKG_REPO_REV}/ ${FDO_DISTRIBUTION_VERSION%-*} main" | tee /etc/apt/sources.list.d/gfx-ci_.list
export LLVM_VERSION="${LLVM_VERSION:=15}"
# Ephemeral packages (installed for this script and removed again at the end)
EPHEMERAL=(
autoconf
automake
bc
bison
bzip2
ccache
cmake
"clang-${LLVM_VERSION}"
dpkg-dev
flex
glslang-tools
g++
libasound2-dev
libcap-dev
"libclang-cpp${LLVM_VERSION}-dev"
libdrm-dev
libegl-dev
libelf-dev
libepoxy-dev
libgbm-dev
libpciaccess-dev
libssl-dev
libvulkan-dev
libwayland-dev
libx11-xcb-dev
libxext-dev
"llvm-${LLVM_VERSION}-dev"
make
meson
openssh-server
patch
pkgconf
protobuf-compiler
python3-dev
python3-pip
python3-setuptools
python3-wheel
spirv-tools
wayland-protocols
xz-utils
)
DEPS=(
apt-utils
curl
git
git-lfs
inetutils-syslogd
iptables
jq
libasan8
libdrm2
libexpat1
"libllvm${LLVM_VERSION}"
liblz4-1
libpng16-16
libpython3.11
libvulkan1
libwayland-client0
libwayland-server0
libxcb-ewmh2
libxcb-randr0
libxcb-xfixes0
libxkbcommon0
libxrandr2
libxrender1
python3-mako
python3-numpy
python3-packaging
python3-pil
python3-requests
python3-six
python3-yaml
socat
vulkan-tools
waffle-utils
xauth
xvfb
zlib1g
zstd
)
apt-get update
apt-get dist-upgrade -y
apt-get install --purge -y \
sysvinit-core libelogind0
apt-get install -y --no-remove "${DEPS[@]}"
apt-get install -y --no-install-recommends "${EPHEMERAL[@]}"
. .gitlab-ci/container/container_pre_build.sh
############### Download prebuilt kernel
if [ "$DEBIAN_ARCH" = amd64 ]; then
export KERNEL_IMAGE_NAME=bzImage
mkdir -p /lava-files/
. .gitlab-ci/container/download-prebuilt-kernel.sh
fi
# Needed for ci-fairy, this revision is able to upload files to MinIO
# and doesn't depend on git
pip3 install --break-system-packages git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2
# Needed for manipulation with traces yaml files.
pip3 install --break-system-packages yq
. .gitlab-ci/container/build-mold.sh
############### Build LLVM-SPIRV translator
. .gitlab-ci/container/build-llvm-spirv.sh
############### Build libclc
. .gitlab-ci/container/build-libclc.sh
############### Build Wayland
. .gitlab-ci/container/build-wayland.sh
############### Build Crosvm
. .gitlab-ci/container/build-rust.sh
. .gitlab-ci/container/build-crosvm.sh
############### Build dEQP runner
. .gitlab-ci/container/build-deqp-runner.sh
apt-get purge -y "${EPHEMERAL[@]}"
rm -rf /root/.rustup
. .gitlab-ci/container/container_post_build.sh

View File

@@ -1,124 +0,0 @@
#!/usr/bin/env bash
# shellcheck disable=SC2086 # we want word splitting
set -e
set -o xtrace
export DEBIAN_FRONTEND=noninteractive
export LLVM_VERSION="${LLVM_VERSION:=15}"
apt-get install -y libelogind0 # this interfere with systemd deps, install separately
# Ephemeral packages (installed for this script and removed again at the end)
EPHEMERAL=(
bzip2
ccache
"clang-${LLVM_VERSION}"
cmake
dpkg-dev
g++
glslang-tools
libasound2-dev
libcap-dev
"libclang-cpp${LLVM_VERSION}-dev"
libdrm-dev
libgles2-mesa-dev
libgtest-dev
libpciaccess-dev
libpng-dev
libudev-dev
libvulkan-dev
libwaffle-dev
libwayland-dev
libx11-xcb-dev
libxcb-dri2-0-dev
libxcb-dri3-dev
libxcb-present-dev
libxfixes-dev
libxkbcommon-dev
libxrandr-dev
libxrender-dev
"llvm-${LLVM_VERSION}-dev"
make
meson
ocl-icd-opencl-dev
patch
pkgconf
python3-distutils
xz-utils
)
DEPS=(
clinfo
iptables
kmod
"libclang-common-${LLVM_VERSION}-dev"
"libclang-cpp${LLVM_VERSION}"
libcap2
libegl1
libepoxy0
libfdt1
libxcb-shm0
ocl-icd-libopencl1
python3-lxml
python3-renderdoc
python3-simplejson
spirv-tools
sysvinit-core
weston
xwayland
)
apt-get update
apt-get install -y --no-remove "${DEPS[@]}" "${EPHEMERAL[@]}" \
$EXTRA_LOCAL_PACKAGES
. .gitlab-ci/container/container_pre_build.sh
############### Build piglit
PIGLIT_OPTS="-DPIGLIT_USE_WAFFLE=ON
-DPIGLIT_USE_GBM=ON
-DPIGLIT_USE_WAYLAND=ON
-DPIGLIT_USE_X11=ON
-DPIGLIT_BUILD_GLX_TESTS=ON
-DPIGLIT_BUILD_EGL_TESTS=ON
-DPIGLIT_BUILD_WGL_TESTS=OFF
-DPIGLIT_BUILD_GL_TESTS=ON
-DPIGLIT_BUILD_GLES1_TESTS=ON
-DPIGLIT_BUILD_GLES2_TESTS=ON
-DPIGLIT_BUILD_GLES3_TESTS=ON
-DPIGLIT_BUILD_CL_TESTS=ON
-DPIGLIT_BUILD_VK_TESTS=ON
-DPIGLIT_BUILD_DMA_BUF_TESTS=ON" \
. .gitlab-ci/container/build-piglit.sh
############### Build dEQP GL
DEQP_API=GL \
DEQP_TARGET=surfaceless \
. .gitlab-ci/container/build-deqp.sh
DEQP_API=GLES \
DEQP_TARGET=surfaceless \
. .gitlab-ci/container/build-deqp.sh
############### Build apitrace
. .gitlab-ci/container/build-apitrace.sh
############### Build validation layer for zink
. .gitlab-ci/container/build-vulkan-validation.sh
############### Build nine tests
. .gitlab-ci/container/build-ninetests.sh
############### Uninstall the build software
apt-get purge -y "${EPHEMERAL[@]}"
. .gitlab-ci/container/container_post_build.sh

View File

@@ -1,145 +0,0 @@
#!/usr/bin/env bash
# The relative paths in this file only become valid at runtime.
# shellcheck disable=SC1091
# shellcheck disable=SC2086 # we want word splitting
set -e
set -o xtrace
export DEBIAN_FRONTEND=noninteractive
apt-get install -y libelogind0 # this interfere with systemd deps, install separately
# Ephemeral packages (installed for this script and removed again at the end)
EPHEMERAL=(
ccache
cmake
dpkg-dev
g++
glslang-tools
libexpat1-dev
gnupg2
libdrm-dev
libgbm-dev
libgles2-mesa-dev
liblz4-dev
libpciaccess-dev
libudev-dev
libvulkan-dev
libwaffle-dev
libx11-xcb-dev
libxcb-dri2-0-dev
libxcb-ewmh-dev
libxcb-keysyms1-dev
libxkbcommon-dev
libxrandr-dev
libxrender-dev
libzstd-dev
meson
p7zip
patch
pkgconf
python3-dev
python3-distutils
python3-pip
python3-setuptools
python3-wheel
software-properties-common
wine64-tools
xz-utils
)
DEPS=(
curl
libepoxy0
libxcb-shm0
pciutils
python3-lxml
python3-simplejson
sysvinit-core
weston
xwayland
wine
wine64
xinit
xserver-xorg-video-amdgpu
xserver-xorg-video-ati
)
apt-get update
apt-get install -y --no-remove --no-install-recommends \
"${DEPS[@]}" "${EPHEMERAL[@]}"
############### Install DXVK
. .gitlab-ci/container/setup-wine.sh "/dxvk-wine64"
. .gitlab-ci/container/install-wine-dxvk.sh
############### Install apitrace binaries for wine
. .gitlab-ci/container/install-wine-apitrace.sh
# Add the apitrace path to the registry
wine \
reg add "HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment" \
/v Path \
/t REG_EXPAND_SZ \
/d "C:\windows\system32;C:\windows;C:\windows\system32\wbem;Z:\apitrace-msvc-win64\bin" \
/f
############### Building ...
. .gitlab-ci/container/container_pre_build.sh
############### Build parallel-deqp-runner's hang-detection tool
. .gitlab-ci/container/build-hang-detection.sh
############### Build piglit replayer
# We don't run any _piglit_ Vulkan tests in the containers.
PIGLIT_OPTS="-DPIGLIT_USE_WAFFLE=ON
-DPIGLIT_USE_GBM=OFF
-DPIGLIT_USE_WAYLAND=OFF
-DPIGLIT_USE_X11=OFF
-DPIGLIT_BUILD_GLX_TESTS=OFF
-DPIGLIT_BUILD_EGL_TESTS=OFF
-DPIGLIT_BUILD_WGL_TESTS=OFF
-DPIGLIT_BUILD_GL_TESTS=OFF
-DPIGLIT_BUILD_GLES1_TESTS=OFF
-DPIGLIT_BUILD_GLES2_TESTS=OFF
-DPIGLIT_BUILD_GLES3_TESTS=OFF
-DPIGLIT_BUILD_CL_TESTS=OFF
-DPIGLIT_BUILD_VK_TESTS=OFF
-DPIGLIT_BUILD_DMA_BUF_TESTS=OFF" \
PIGLIT_BUILD_TARGETS="piglit_replayer" \
. .gitlab-ci/container/build-piglit.sh
############### Build Fossilize
. .gitlab-ci/container/build-fossilize.sh
############### Build dEQP VK
DEQP_API=VK \
DEQP_TARGET=default \
. .gitlab-ci/container/build-deqp.sh
############### Build apitrace
. .gitlab-ci/container/build-apitrace.sh
############### Build gfxreconstruct
. .gitlab-ci/container/build-gfxreconstruct.sh
############### Build VKD3D-Proton
. .gitlab-ci/container/setup-wine.sh "/vkd3d-proton-wine64"
. .gitlab-ci/container/build-vkd3d-proton.sh
############### Uninstall the build software
apt-get purge -y "${EPHEMERAL[@]}"
. .gitlab-ci/container/container_post_build.sh

View File

@@ -28,7 +28,6 @@ DEPS=(
"clang-${LLVM_VERSION}" "clang-${LLVM_VERSION}"
"clang-format-${LLVM_VERSION}" "clang-format-${LLVM_VERSION}"
dpkg-cross dpkg-cross
dpkg-dev
findutils findutils
flex flex
flatbuffers-compiler flatbuffers-compiler
@@ -71,10 +70,8 @@ DEPS=(
python3-pil python3-pil
python3-pip python3-pip
python3-ply python3-ply
python3-pycparser
python3-requests python3-requests
python3-setuptools python3-setuptools
python3-yaml
qemu-user qemu-user
valgrind valgrind
x11proto-dri2-dev x11proto-dri2-dev
@@ -97,7 +94,8 @@ apt-get install -y --no-remove "${DEPS[@]}" "${EPHEMERAL[@]}" \
# Needed for ci-fairy, this revision is able to upload files to S3 # Needed for ci-fairy, this revision is able to upload files to S3
pip3 install --break-system-packages git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2 pip3 install --break-system-packages git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2
. .gitlab-ci/container/install-meson.sh # We need at least 1.3.1 for rusticl
pip3 install --break-system-packages 'meson==1.3.1'
. .gitlab-ci/container/build-rust.sh . .gitlab-ci/container/build-rust.sh

View File

@@ -81,10 +81,15 @@ rm -rf $XORGMACROS_VERSION
. .gitlab-ci/container/build-directx-headers.sh . .gitlab-ci/container/build-directx-headers.sh
. .gitlab-ci/container/build-bindgen.sh
python3 -m pip install --break-system-packages -r .gitlab-ci/lava/requirements.txt python3 -m pip install --break-system-packages -r .gitlab-ci/lava/requirements.txt
# install bindgen
RUSTFLAGS='-L native=/usr/local/lib' cargo install \
bindgen-cli --version 0.62.0 \
--locked \
-j ${FDO_CI_CONCURRENT:-4} \
--root /usr/local
############### Uninstall the build software ############### Uninstall the build software
apt-get purge -y "${EPHEMERAL[@]}" apt-get purge -y "${EPHEMERAL[@]}"

View File

@@ -1,4 +1,160 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# shellcheck disable=SC2086 # we want word splitting
DEBIAN_ARCH=amd64 \ # When changing this file, you need to bump the following
. .gitlab-ci/container/debian/test-base.sh # .gitlab-ci/image-tags.yml tags:
# DEBIAN_BASE_TAG
set -e
set -o xtrace
export DEBIAN_FRONTEND=noninteractive
apt-get install -y ca-certificates gnupg2 software-properties-common
sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list.d/*
echo "deb [trusted=yes] https://gitlab.freedesktop.org/gfx-ci/ci-deb-repo/-/raw/${PKG_REPO_REV}/ ${FDO_DISTRIBUTION_VERSION%-*} main" | tee /etc/apt/sources.list.d/gfx-ci_.list
export LLVM_VERSION="${LLVM_VERSION:=15}"
# Ephemeral packages (installed for this script and removed again at the end)
EPHEMERAL=(
autoconf
automake
bc
bison
bzip2
ccache
cmake
"clang-${LLVM_VERSION}"
flex
glslang-tools
g++
libasound2-dev
libcap-dev
"libclang-cpp${LLVM_VERSION}-dev"
libdrm-dev
libegl-dev
libelf-dev
libepoxy-dev
libgbm-dev
libpciaccess-dev
libssl-dev
libvulkan-dev
libwayland-dev
libx11-xcb-dev
libxext-dev
"llvm-${LLVM_VERSION}-dev"
make
meson
openssh-server
patch
pkgconf
protobuf-compiler
python3-dev
python3-pip
python3-setuptools
python3-wheel
spirv-tools
wayland-protocols
xz-utils
)
DEPS=(
apt-utils
curl
git
git-lfs
inetutils-syslogd
iptables
jq
libasan8
libdrm2
libexpat1
"libllvm${LLVM_VERSION}"
liblz4-1
libpng16-16
libpython3.11
libvulkan1
libwayland-client0
libwayland-server0
libxcb-ewmh2
libxcb-randr0
libxcb-xfixes0
libxkbcommon0
libxrandr2
libxrender1
python3-mako
python3-numpy
python3-packaging
python3-pil
python3-requests
python3-six
python3-yaml
socat
vulkan-tools
waffle-utils
xauth
xvfb
zlib1g
zstd
)
apt-get update
apt-get dist-upgrade -y
apt-get install --purge -y \
sysvinit-core libelogind0
apt-get install -y --no-remove "${DEPS[@]}"
apt-get install -y --no-install-recommends "${EPHEMERAL[@]}"
. .gitlab-ci/container/container_pre_build.sh
############### Build kernel
export DEFCONFIG="arch/x86/configs/x86_64_defconfig"
export KERNEL_IMAGE_NAME=bzImage
export KERNEL_ARCH=x86_64
export DEBIAN_ARCH=amd64
mkdir -p /lava-files/
. .gitlab-ci/container/build-kernel.sh
# Needed for ci-fairy, this revision is able to upload files to MinIO
# and doesn't depend on git
pip3 install --break-system-packages git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2
# Needed for manipulation with traces yaml files.
pip3 install --break-system-packages yq
. .gitlab-ci/container/build-mold.sh
############### Build LLVM-SPIRV translator
. .gitlab-ci/container/build-llvm-spirv.sh
############### Build libclc
. .gitlab-ci/container/build-libclc.sh
############### Build Wayland
. .gitlab-ci/container/build-wayland.sh
############### Build Crosvm
. .gitlab-ci/container/build-rust.sh
. .gitlab-ci/container/build-crosvm.sh
############### Build dEQP runner
. .gitlab-ci/container/build-deqp-runner.sh
apt-get purge -y "${EPHEMERAL[@]}"
rm -rf /root/.rustup
. .gitlab-ci/container/container_post_build.sh

View File

@@ -1,3 +1,109 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# shellcheck disable=SC2086 # we want word splitting
. .gitlab-ci/container/debian/test-gl.sh set -e
set -o xtrace
export DEBIAN_FRONTEND=noninteractive
export LLVM_VERSION="${LLVM_VERSION:=15}"
apt-get install -y libelogind0 # this interfere with systemd deps, install separately
# Ephemeral packages (installed for this script and removed again at the end)
EPHEMERAL=(
bzip2
ccache
"clang-${LLVM_VERSION}"
cmake
g++
glslang-tools
libasound2-dev
libcap-dev
"libclang-cpp${LLVM_VERSION}-dev"
libdrm-dev
libgles2-mesa-dev
libgtest-dev
libpciaccess-dev
libpng-dev
libudev-dev
libvulkan-dev
libwaffle-dev
libwayland-dev
libx11-xcb-dev
libxcb-dri2-0-dev
libxcb-dri3-dev
libxcb-present-dev
libxfixes-dev
libxkbcommon-dev
libxrandr-dev
libxrender-dev
"llvm-${LLVM_VERSION}-dev"
make
meson
ocl-icd-opencl-dev
patch
pkgconf
python3-distutils
xz-utils
)
DEPS=(
clinfo
iptables
kmod
"libclang-common-${LLVM_VERSION}-dev"
"libclang-cpp${LLVM_VERSION}"
libcap2
libegl1
libepoxy0
libfdt1
libxcb-shm0
ocl-icd-libopencl1
python3-lxml
python3-renderdoc
python3-simplejson
spirv-tools
sysvinit-core
weston
xwayland
)
apt-get update
apt-get install -y --no-remove "${DEPS[@]}" "${EPHEMERAL[@]}" \
$EXTRA_LOCAL_PACKAGES
. .gitlab-ci/container/container_pre_build.sh
############### Build piglit
PIGLIT_OPTS="-DPIGLIT_BUILD_GLX_TESTS=ON -DPIGLIT_BUILD_CL_TESTS=ON -DPIGLIT_BUILD_DMA_BUF_TESTS=ON" . .gitlab-ci/container/build-piglit.sh
############### Build dEQP GL
DEQP_API=GL \
DEQP_TARGET=surfaceless \
. .gitlab-ci/container/build-deqp.sh
DEQP_API=GLES \
DEQP_TARGET=surfaceless \
. .gitlab-ci/container/build-deqp.sh
############### Build apitrace
. .gitlab-ci/container/build-apitrace.sh
############### Build validation layer for zink
. .gitlab-ci/container/build-vulkan-validation.sh
############### Build nine tests
. .gitlab-ci/container/build-ninetests.sh
############### Uninstall the build software
apt-get purge -y "${EPHEMERAL[@]}"
. .gitlab-ci/container/container_post_build.sh

View File

@@ -1,3 +1,128 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# The relative paths in this file only become valid at runtime.
# shellcheck disable=SC1091
# shellcheck disable=SC2086 # we want word splitting
. .gitlab-ci/container/debian/test-vk.sh set -e
set -o xtrace
export DEBIAN_FRONTEND=noninteractive
apt-get install -y libelogind0 # this interfere with systemd deps, install separately
# Ephemeral packages (installed for this script and removed again at the end)
EPHEMERAL=(
ccache
cmake
g++
glslang-tools
libexpat1-dev
gnupg2
libdrm-dev
libgbm-dev
libgles2-mesa-dev
liblz4-dev
libpciaccess-dev
libudev-dev
libvulkan-dev
libwaffle-dev
libx11-xcb-dev
libxcb-ewmh-dev
libxcb-keysyms1-dev
libxkbcommon-dev
libxrandr-dev
libxrender-dev
libzstd-dev
meson
p7zip
patch
pkgconf
python3-dev
python3-distutils
python3-pip
python3-setuptools
python3-wheel
software-properties-common
wine64-tools
xz-utils
)
DEPS=(
curl
libepoxy0
libxcb-shm0
pciutils
python3-lxml
python3-simplejson
sysvinit-core
weston
xwayland
wine
wine64
xinit
xserver-xorg-video-amdgpu
xserver-xorg-video-ati
)
apt-get update
apt-get install -y --no-remove --no-install-recommends \
"${DEPS[@]}" "${EPHEMERAL[@]}"
############### Install DXVK
. .gitlab-ci/container/setup-wine.sh "/dxvk-wine64"
. .gitlab-ci/container/install-wine-dxvk.sh
############### Install apitrace binaries for wine
. .gitlab-ci/container/install-wine-apitrace.sh
# Add the apitrace path to the registry
wine \
reg add "HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment" \
/v Path \
/t REG_EXPAND_SZ \
/d "C:\windows\system32;C:\windows;C:\windows\system32\wbem;Z:\apitrace-msvc-win64\bin" \
/f
############### Building ...
. .gitlab-ci/container/container_pre_build.sh
############### Build parallel-deqp-runner's hang-detection tool
. .gitlab-ci/container/build-hang-detection.sh
############### Build piglit replayer
PIGLIT_BUILD_TARGETS="piglit_replayer" . .gitlab-ci/container/build-piglit.sh
############### Build Fossilize
. .gitlab-ci/container/build-fossilize.sh
############### Build dEQP VK
DEQP_API=VK \
DEQP_TARGET=default \
. .gitlab-ci/container/build-deqp.sh
############### Build apitrace
. .gitlab-ci/container/build-apitrace.sh
############### Build gfxreconstruct
. .gitlab-ci/container/build-gfxreconstruct.sh
############### Build VKD3D-Proton
. .gitlab-ci/container/setup-wine.sh "/vkd3d-proton-wine64"
. .gitlab-ci/container/build-vkd3d-proton.sh
############### Uninstall the build software
apt-get purge -y "${EPHEMERAL[@]}"
. .gitlab-ci/container/container_post_build.sh

View File

@@ -27,7 +27,6 @@ EPHEMERAL=(
DEPS=( DEPS=(
bindgen bindgen
bison bison
cbindgen
ccache ccache
clang-devel clang-devel
flex flex
@@ -77,8 +76,6 @@ DEPS=(
python3-devel python3-devel
python3-mako python3-mako
python3-ply python3-ply
python3-pycparser
python3-yaml
rust-packaging rust-packaging
vulkan-headers vulkan-headers
spirv-tools-devel spirv-tools-devel
@@ -102,7 +99,8 @@ tar -xvf $XORGMACROS_VERSION.tar.bz2 && rm $XORGMACROS_VERSION.tar.bz2
cd $XORGMACROS_VERSION; ./configure; make install; cd .. cd $XORGMACROS_VERSION; ./configure; make install; cd ..
rm -rf $XORGMACROS_VERSION rm -rf $XORGMACROS_VERSION
. .gitlab-ci/container/install-meson.sh # We need at least 1.3.1 for rusticl
pip install meson==1.3.1
. .gitlab-ci/container/build-mold.sh . .gitlab-ci/container/build-mold.sh

View File

@@ -40,16 +40,10 @@
# repository's container registry, so that the image from the main # repository's container registry, so that the image from the main
# repository's registry will be used there as well. # repository's registry will be used there as well.
.debian-container-version: .debian-container:
variables: variables:
FDO_DISTRIBUTION_VERSION: bookworm-slim FDO_DISTRIBUTION_VERSION: bookworm-slim
.debian-container:
extends:
- .fdo.container-build@debian
- .container
- .debian-container-version
.container: .container:
stage: container stage: container
extends: extends:
@@ -69,6 +63,8 @@
# Debian based x86_64 build image base # Debian based x86_64 build image base
debian/x86_64_build-base: debian/x86_64_build-base:
extends: extends:
- .fdo.container-build@debian
- .container
- .debian-container - .debian-container
variables: variables:
MESA_IMAGE_TAG: &debian-x86_64_build-base "${DEBIAN_BASE_TAG}--${PKG_REPO_REV}" MESA_IMAGE_TAG: &debian-x86_64_build-base "${DEBIAN_BASE_TAG}--${PKG_REPO_REV}"
@@ -80,7 +76,7 @@ debian/x86_64_build-base:
.use-debian/x86_64_build-base: .use-debian/x86_64_build-base:
extends: extends:
- .fdo.container-build@debian - .fdo.container-build@debian
- .debian-container-version - .debian-container
- .use-base-image - .use-base-image
variables: variables:
MESA_BASE_IMAGE: ${DEBIAN_X86_64_BUILD_BASE_IMAGE} MESA_BASE_IMAGE: ${DEBIAN_X86_64_BUILD_BASE_IMAGE}
@@ -182,15 +178,14 @@ debian/android_build:
# Debian based x86_64 test image base # Debian based x86_64 test image base
debian/x86_64_test-base: debian/x86_64_test-base:
extends: extends: debian/x86_64_build-base
- .debian-container
variables: variables:
MESA_IMAGE_TAG: &debian-x86_64_test-base "${DEBIAN_BASE_TAG}--${PKG_REPO_REV}--${KERNEL_TAG}" MESA_IMAGE_TAG: &debian-x86_64_test-base "${DEBIAN_BASE_TAG}--${PKG_REPO_REV}--${KERNEL_TAG}"
.use-debian/x86_64_test-base: .use-debian/x86_64_test-base:
extends: extends:
- .fdo.container-build@debian - .fdo.container-build@debian
- .debian-container-version - .debian-container
- .use-base-image - .use-base-image
variables: variables:
MESA_BASE_IMAGE: ${DEBIAN_X86_64_TEST_BASE_IMAGE} MESA_BASE_IMAGE: ${DEBIAN_X86_64_TEST_BASE_IMAGE}
@@ -198,33 +193,11 @@ debian/x86_64_test-base:
needs: needs:
- debian/x86_64_test-base - debian/x86_64_test-base
# Debian based aarch64 test image base
debian/arm64_test-base:
tags:
- aarch64
extends:
- .debian-container
variables:
MESA_IMAGE_TAG: &debian-arm64_test-base "${DEBIAN_BASE_TAG}--${PKG_REPO_REV}"
.use-debian/arm64_test-base:
tags:
- aarch64
extends:
- .fdo.container-build@debian
- .debian-container-version
- .use-base-image
variables:
MESA_BASE_IMAGE: ${DEBIAN_ARM64_TEST_BASE_IMAGE}
MESA_BASE_TAG: *debian-arm64_test-base
needs:
- debian/arm64_test-base
# Debian based x86_64 test image for GL # Debian based x86_64 test image for GL
debian/x86_64_test-gl: debian/x86_64_test-gl:
extends: .use-debian/x86_64_test-base extends: .use-debian/x86_64_test-base
variables: variables:
MESA_IMAGE_TAG: &debian-x86_64_test-gl ${DEBIAN_TEST_GL_TAG} MESA_IMAGE_TAG: &debian-x86_64_test-gl ${DEBIAN_X86_64_TEST_GL_TAG}
.use-debian/x86_64_test-gl: .use-debian/x86_64_test-gl:
extends: extends:
@@ -240,7 +213,7 @@ debian/x86_64_test-gl:
debian/x86_64_test-vk: debian/x86_64_test-vk:
extends: .use-debian/x86_64_test-base extends: .use-debian/x86_64_test-base
variables: variables:
MESA_IMAGE_TAG: &debian-x86_64_test-vk ${DEBIAN_TEST_VK_TAG} MESA_IMAGE_TAG: &debian-x86_64_test-vk ${DEBIAN_X86_64_TEST_VK_TAG}
.use-debian/x86_64_test-vk: .use-debian/x86_64_test-vk:
extends: extends:
@@ -253,10 +226,10 @@ debian/x86_64_test-vk:
- debian/x86_64_test-vk - debian/x86_64_test-vk
# Debian based x86_64 test image for Android # Debian based x86_64 test image for Android
.debian/x86_64_test-android: debian/x86_64_test-android:
extends: .use-debian/x86_64_test-base extends: .use-debian/x86_64_test-base
variables: variables:
MESA_IMAGE_TAG: &debian-x86_64_test-android ${DEBIAN_TEST_ANDROID_TAG} MESA_IMAGE_TAG: &debian-x86_64_test-android ${DEBIAN_X86_64_TEST_ANDROID_TAG}
ANDROID_NDK: android-ndk-r25b ANDROID_NDK: android-ndk-r25b
.use-debian/x86_64_test-android: .use-debian/x86_64_test-android:
@@ -269,52 +242,12 @@ debian/x86_64_test-vk:
needs: needs:
- debian/x86_64_test-android - debian/x86_64_test-android
# Debian based aarch64 test image for GL
debian/arm64_test-gl:
tags:
- aarch64
extends: .use-debian/arm64_test-base
variables:
MESA_IMAGE_TAG: &debian-arm64_test-gl ${DEBIAN_TEST_GL_TAG}
.use-debian/arm64_test-gl:
tags:
- aarch64
extends:
- .set-image-base-tag
variables:
MESA_BASE_TAG: *debian-arm64_test-base
MESA_IMAGE_PATH: ${DEBIAN_ARM64_TEST_IMAGE_GL_PATH}
MESA_IMAGE_TAG: *debian-arm64_test-gl
needs:
- debian/arm64_test-gl
# Debian based aarch64 test image for VK
debian/arm64_test-vk:
tags:
- aarch64
extends: .use-debian/arm64_test-base
variables:
MESA_IMAGE_TAG: &debian-arm64_test-vk ${DEBIAN_TEST_VK_TAG}
.use-debian/arm64_test-vk:
tags:
- aarch64
extends:
- .set-image-base-tag
variables:
MESA_BASE_TAG: *debian-arm64_test-base
MESA_IMAGE_PATH: ${DEBIAN_ARM64_TEST_IMAGE_VK_PATH}
MESA_IMAGE_TAG: *debian-arm64_test-vk
needs:
- debian/arm64_test-vk
# Debian based ARM build image # Debian based ARM build image
debian/arm64_build: debian/arm64_build:
extends: extends:
- .fdo.container-build@debian - .fdo.container-build@debian
- .container - .container
- .debian-container-version - .debian-container
tags: tags:
- aarch64 - aarch64
variables: variables:
@@ -346,14 +279,6 @@ alpine/x86_64_build:
- .alpine/x86_64_build-base - .alpine/x86_64_build-base
variables: variables:
MESA_IMAGE_TAG: &alpine-x86_64_build ${ALPINE_X86_64_BUILD_TAG} MESA_IMAGE_TAG: &alpine-x86_64_build ${ALPINE_X86_64_BUILD_TAG}
rules:
# Note: the next three lines must remain in that order, so that the rules
# in `linkcheck-docs` catch nightly pipelines before the rules in `pages`
# exclude them.
- !reference [linkcheck-docs, rules]
- !reference [pages, rules]
- !reference [test-docs, rules]
- !reference [.container, rules]
.use-alpine/x86_64_build: .use-alpine/x86_64_build:
extends: extends:
@@ -393,9 +318,8 @@ fedora/x86_64_build:
.kernel+rootfs: .kernel+rootfs:
extends: extends:
- .container+build-rules - .container+build-rules
- .debian-container-version - .debian-container
stage: container stage: container
timeout: 90m
variables: variables:
GIT_STRATEGY: fetch GIT_STRATEGY: fetch
MESA_ROOTFS_TAG: &kernel-rootfs ${KERNEL_ROOTFS_TAG} MESA_ROOTFS_TAG: &kernel-rootfs ${KERNEL_ROOTFS_TAG}
@@ -439,59 +363,59 @@ kernel+rootfs_arm32:
MESA_ROOTFS_TAG: *kernel-rootfs MESA_ROOTFS_TAG: *kernel-rootfs
# x86_64 image with ARM64 & ARM32 kernel & rootfs for baremetal testing # x86_64 image with ARM64 & ARM32 kernel & rootfs for baremetal testing
.debian/baremetal_arm_test: .debian/arm_test:
extends: extends:
- .fdo.container-build@debian - .fdo.container-build@debian
- .container - .container
- .debian-container-version - .debian-container
# Don't want the .container rules # Don't want the .container rules
- .container+build-rules - .container+build-rules
variables: variables:
FDO_DISTRIBUTION_TAG: "${MESA_IMAGE_TAG}--${MESA_ROOTFS_TAG}--${KERNEL_TAG}--${MESA_TEMPLATES_COMMIT}" FDO_DISTRIBUTION_TAG: "${MESA_IMAGE_TAG}--${MESA_ROOTFS_TAG}--${KERNEL_TAG}--${MESA_TEMPLATES_COMMIT}"
ARTIFACTS_PREFIX: "https://${S3_HOST}/${S3_KERNEL_BUCKET}" ARTIFACTS_PREFIX: "https://${S3_HOST}/mesa-lava"
ARTIFACTS_SUFFIX: "${MESA_ROOTFS_TAG}--${KERNEL_TAG}--${MESA_ARTIFACTS_TAG}--${MESA_TEMPLATES_COMMIT}" ARTIFACTS_SUFFIX: "${MESA_ROOTFS_TAG}--${KERNEL_TAG}--${MESA_ARTIFACTS_TAG}--${MESA_TEMPLATES_COMMIT}"
MESA_ARTIFACTS_TAG: *debian-arm64_build MESA_ARTIFACTS_TAG: *debian-arm64_build
MESA_ROOTFS_TAG: *kernel-rootfs MESA_ROOTFS_TAG: *kernel-rootfs
debian/baremetal_arm32_test: debian/arm32_test:
extends: extends:
- .debian/baremetal_arm_test - .debian/arm_test
needs: needs:
- kernel+rootfs_arm32 - kernel+rootfs_arm32
variables: variables:
MESA_IMAGE_TAG: &debian-arm32_test "${DEBIAN_BASE_TAG}--${PKG_REPO_REV}" MESA_IMAGE_TAG: &debian-arm32_test "${DEBIAN_BASE_TAG}--${PKG_REPO_REV}"
debian/baremetal_arm64_test: debian/arm64_test:
extends: extends:
- .debian/baremetal_arm_test - .debian/arm_test
needs: needs:
- kernel+rootfs_arm64 - kernel+rootfs_arm64
variables: variables:
MESA_IMAGE_TAG: &debian-arm64_test "${DEBIAN_BASE_TAG}--${PKG_REPO_REV}" MESA_IMAGE_TAG: &debian-arm64_test "${DEBIAN_BASE_TAG}--${PKG_REPO_REV}"
.use-debian/baremetal_arm_test: .use-debian/arm_test:
variables: variables:
MESA_ROOTFS_TAG: *kernel-rootfs MESA_ROOTFS_TAG: *kernel-rootfs
.use-debian/baremetal_arm32_test: .use-debian/arm32_test:
image: "$CI_REGISTRY_IMAGE/${MESA_IMAGE_PATH}:${MESA_IMAGE_TAG}--${MESA_ROOTFS_TAG}--${KERNEL_TAG}--${MESA_TEMPLATES_COMMIT}" image: "$CI_REGISTRY_IMAGE/${MESA_IMAGE_PATH}:${MESA_IMAGE_TAG}--${MESA_ROOTFS_TAG}--${KERNEL_TAG}--${MESA_TEMPLATES_COMMIT}"
extends: extends:
- .use-debian/baremetal_arm_test - .use-debian/arm_test
variables: variables:
MESA_IMAGE_PATH: "debian/baremetal_arm32_test" MESA_IMAGE_PATH: "debian/arm32_test"
MESA_IMAGE_TAG: *debian-arm32_test MESA_IMAGE_TAG: *debian-arm32_test
needs: needs:
- debian/baremetal_arm_test - debian/arm_test
.use-debian/baremetal_arm64_test: .use-debian/arm64_test:
image: "$CI_REGISTRY_IMAGE/${MESA_IMAGE_PATH}:${MESA_IMAGE_TAG}--${MESA_ROOTFS_TAG}--${KERNEL_TAG}--${MESA_TEMPLATES_COMMIT}" image: "$CI_REGISTRY_IMAGE/${MESA_IMAGE_PATH}:${MESA_IMAGE_TAG}--${MESA_ROOTFS_TAG}--${KERNEL_TAG}--${MESA_TEMPLATES_COMMIT}"
extends: extends:
- .use-debian/baremetal_arm_test - .use-debian/arm_test
variables: variables:
MESA_IMAGE_PATH: "debian/baremetal_arm64_test" MESA_IMAGE_PATH: "debian/arm64_test"
MESA_IMAGE_TAG: *debian-arm64_test MESA_IMAGE_TAG: *debian-arm64_test
needs: needs:
- debian/baremetal_arm_test - debian/arm_test
# Native Windows docker builds # Native Windows docker builds
# #

View File

@@ -1,11 +0,0 @@
#!/usr/bin/env bash
# When changing this file, you need to bump the following
# .gitlab-ci/image-tags.yml tags:
# DEBIAN_BUILD_TAG
# FEDORA_X86_64_BUILD_TAG
rm -f /usr/lib/python3.*/EXTERNALLY-MANAGED
# We need at least 1.4.0 for rusticl
pip3 install 'meson==1.4.0'

View File

@@ -14,7 +14,7 @@ export LLVM_VERSION="${LLVM_VERSION:=15}"
check_minio() check_minio()
{ {
S3_PATH="${S3_HOST}/${S3_KERNEL_BUCKET}/$1/${DISTRIBUTION_TAG}/${DEBIAN_ARCH}" S3_PATH="${S3_HOST}/mesa-lava/$1/${DISTRIBUTION_TAG}/${DEBIAN_ARCH}"
if curl -L --retry 4 -f --retry-delay 60 -s -X HEAD \ if curl -L --retry 4 -f --retry-delay 60 -s -X HEAD \
"https://${S3_PATH}/done"; then "https://${S3_PATH}/done"; then
echo "Remote files are up-to-date, skip rebuilding them." echo "Remote files are up-to-date, skip rebuilding them."
@@ -31,8 +31,6 @@ check_minio "${CI_PROJECT_PATH}"
. .gitlab-ci/container/build-rust.sh . .gitlab-ci/container/build-rust.sh
if [[ "$DEBIAN_ARCH" = "arm64" ]]; then if [[ "$DEBIAN_ARCH" = "arm64" ]]; then
BUILD_CL="ON"
BUILD_VK="ON"
GCC_ARCH="aarch64-linux-gnu" GCC_ARCH="aarch64-linux-gnu"
KERNEL_ARCH="arm64" KERNEL_ARCH="arm64"
SKQP_ARCH="arm64" SKQP_ARCH="arm64"
@@ -54,8 +52,6 @@ if [[ "$DEBIAN_ARCH" = "arm64" ]]; then
KERNEL_IMAGE_NAME="Image" KERNEL_IMAGE_NAME="Image"
elif [[ "$DEBIAN_ARCH" = "armhf" ]]; then elif [[ "$DEBIAN_ARCH" = "armhf" ]]; then
BUILD_CL="OFF"
BUILD_VK="OFF"
GCC_ARCH="arm-linux-gnueabihf" GCC_ARCH="arm-linux-gnueabihf"
KERNEL_ARCH="arm" KERNEL_ARCH="arm"
SKQP_ARCH="arm" SKQP_ARCH="arm"
@@ -80,8 +76,6 @@ elif [[ "$DEBIAN_ARCH" = "armhf" ]]; then
libxkbcommon-dev:armhf libxkbcommon-dev:armhf
) )
else else
BUILD_CL="ON"
BUILD_VK="ON"
GCC_ARCH="x86_64-linux-gnu" GCC_ARCH="x86_64-linux-gnu"
KERNEL_ARCH="x86_64" KERNEL_ARCH="x86_64"
SKQP_ARCH="x64" SKQP_ARCH="x64"
@@ -89,7 +83,7 @@ else
DEVICE_TREES="" DEVICE_TREES=""
KERNEL_IMAGE_NAME="bzImage" KERNEL_IMAGE_NAME="bzImage"
CONTAINER_ARCH_PACKAGES=( CONTAINER_ARCH_PACKAGES=(
libasound2-dev libcap-dev libfdt-dev libva-dev p7zip wine libasound2-dev libcap-dev libfdt-dev libva-dev wayland-protocols p7zip wine
) )
fi fi
@@ -150,15 +144,9 @@ CONTAINER_EPHEMERAL=(
python3-serial python3-serial
python3-venv python3-venv
unzip unzip
wayland-protocols
zstd zstd
) )
[ "$BUILD_CL" == "ON" ] && CONTAINER_EPHEMERAL+=(
ocl-icd-opencl-dev
)
echo "deb [trusted=yes] https://gitlab.freedesktop.org/gfx-ci/ci-deb-repo/-/raw/${PKG_REPO_REV}/ ${FDO_DISTRIBUTION_VERSION%-*} main" | tee /etc/apt/sources.list.d/gfx-ci_.list echo "deb [trusted=yes] https://gitlab.freedesktop.org/gfx-ci/ci-deb-repo/-/raw/${PKG_REPO_REV}/ ${FDO_DISTRIBUTION_VERSION%-*} main" | tee /etc/apt/sources.list.d/gfx-ci_.list
apt-get update apt-get update
@@ -202,6 +190,7 @@ PKG_DEP=(
# arch dependent rootfs packages # arch dependent rootfs packages
[ "$DEBIAN_ARCH" = "arm64" ] && PKG_ARCH=( [ "$DEBIAN_ARCH" = "arm64" ] && PKG_ARCH=(
libgl1 libglu1-mesa libgl1 libglu1-mesa
libvulkan-dev
firmware-linux-nonfree firmware-qcom-media firmware-linux-nonfree firmware-qcom-media
libfontconfig1 libfontconfig1
) )
@@ -214,6 +203,7 @@ PKG_DEP=(
spirv-tools spirv-tools
libelf1 libfdt1 "libllvm${LLVM_VERSION}" libelf1 libfdt1 "libllvm${LLVM_VERSION}"
libva2 libva-drm2 libva2 libva-drm2
libvulkan-dev
socat socat
sysvinit-core sysvinit-core
wine wine
@@ -222,16 +212,6 @@ PKG_DEP=(
firmware-misc-nonfree firmware-misc-nonfree
) )
[ "$BUILD_CL" == "ON" ] && PKG_ARCH+=(
clinfo
"libclang-cpp${LLVM_VERSION}"
"libclang-common-${LLVM_VERSION}-dev"
ocl-icd-libopencl1
)
[ "$BUILD_VK" == "ON" ] && PKG_ARCH+=(
libvulkan-dev
)
mmdebstrap \ mmdebstrap \
--variant=apt \ --variant=apt \
--arch="${DEBIAN_ARCH}" \ --arch="${DEBIAN_ARCH}" \
@@ -264,17 +244,6 @@ fi
STRIP_CMD="${GCC_ARCH}-strip" STRIP_CMD="${GCC_ARCH}-strip"
mkdir -p $ROOTFS/usr/lib/$GCC_ARCH mkdir -p $ROOTFS/usr/lib/$GCC_ARCH
############### Build libclc
if [ "$BUILD_CL" = "ON" ]; then
rm -rf /usr/lib/clc/*
. .gitlab-ci/container/build-libclc.sh
mkdir -p $ROOTFS/usr/{share,lib}/clc
mv /usr/share/clc/spirv*-mesa3d-.spv $ROOTFS/usr/share/clc/
ln -s /usr/share/clc/spirv64-mesa3d-.spv $ROOTFS/usr/lib/clc/
ln -s /usr/share/clc/spirv-mesa3d-.spv $ROOTFS/usr/lib/clc/
fi
############### Build Vulkan validation layer (for zink) ############### Build Vulkan validation layer (for zink)
if [ "$DEBIAN_ARCH" = "amd64" ]; then if [ "$DEBIAN_ARCH" = "amd64" ]; then
. .gitlab-ci/container/build-vulkan-validation.sh . .gitlab-ci/container/build-vulkan-validation.sh
@@ -311,7 +280,7 @@ DEQP_API=GLES \
DEQP_TARGET=surfaceless \ DEQP_TARGET=surfaceless \
. .gitlab-ci/container/build-deqp.sh . .gitlab-ci/container/build-deqp.sh
[ "$BUILD_VK" == "ON" ] && DEQP_API=VK \ DEQP_API=VK \
DEQP_TARGET=default \ DEQP_TARGET=default \
. .gitlab-ci/container/build-deqp.sh . .gitlab-ci/container/build-deqp.sh
@@ -326,21 +295,7 @@ if [[ "$DEBIAN_ARCH" = "arm64" ]] \
fi fi
############### Build piglit ############### Build piglit
PIGLIT_OPTS="-DPIGLIT_USE_WAFFLE=ON PIGLIT_OPTS="-DPIGLIT_BUILD_DMA_BUF_TESTS=ON -DPIGLIT_BUILD_GLX_TESTS=ON" . .gitlab-ci/container/build-piglit.sh
-DPIGLIT_USE_GBM=ON
-DPIGLIT_USE_WAYLAND=ON
-DPIGLIT_USE_X11=ON
-DPIGLIT_BUILD_GLX_TESTS=ON
-DPIGLIT_BUILD_EGL_TESTS=ON
-DPIGLIT_BUILD_WGL_TESTS=OFF
-DPIGLIT_BUILD_GL_TESTS=ON
-DPIGLIT_BUILD_GLES1_TESTS=ON
-DPIGLIT_BUILD_GLES2_TESTS=ON
-DPIGLIT_BUILD_GLES3_TESTS=ON
-DPIGLIT_BUILD_CL_TESTS=$BUILD_CL
-DPIGLIT_BUILD_VK_TESTS=$BUILD_VK
-DPIGLIT_BUILD_DMA_BUF_TESTS=ON" \
. .gitlab-ci/container/build-piglit.sh
mv /piglit $ROOTFS/. mv /piglit $ROOTFS/.
############### Build libva tests ############### Build libva tests
@@ -372,8 +327,8 @@ if [[ -e ".gitlab-ci/local/build-rootfs.sh" ]]; then
fi fi
############### Download prebuilt kernel ############### Build kernel
. .gitlab-ci/container/download-prebuilt-kernel.sh . .gitlab-ci/container/build-kernel.sh
############### Delete rust, since the tests won't be compiling anything. ############### Delete rust, since the tests won't be compiling anything.
rm -rf /root/.cargo rm -rf /root/.cargo
@@ -410,8 +365,8 @@ popd
. .gitlab-ci/container/container_post_build.sh . .gitlab-ci/container/container_post_build.sh
ci-fairy s3cp --token-file "${S3_JWT_FILE}" /lava-files/"${ROOTFSTAR}" \ ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" /lava-files/"${ROOTFSTAR}" \
https://${S3_PATH}/"${ROOTFSTAR}" https://${S3_PATH}/"${ROOTFSTAR}"
touch /lava-files/done touch /lava-files/done
ci-fairy s3cp --token-file "${S3_JWT_FILE}" /lava-files/done https://${S3_PATH}/done ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" /lava-files/done https://${S3_PATH}/done

View File

@@ -0,0 +1,29 @@
From 7c9aa6f846f9f2f0d70b5c4a8e7c99a3d31b3b1a Mon Sep 17 00:00:00 2001
From: Rob Clark <robdclark@chromium.org>
Date: Sat, 27 Jan 2024 10:59:00 -0800
Subject: [PATCH] Add missing subgroup support checks for linear derivate tests
Some of these tests require subgroup ops support, but didn't bother
checking whether they were supported. Add this missing checks.
---
.../vulkan/shaderrender/vktShaderRenderDerivateTests.cpp | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDerivateTests.cpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDerivateTests.cpp
index 3253505958..709044f2e8 100644
--- a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDerivateTests.cpp
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDerivateTests.cpp
@@ -1145,6 +1145,13 @@ LinearDerivateCase::~LinearDerivateCase (void)
TestInstance* LinearDerivateCase::createInstance (Context& context) const
{
DE_ASSERT(m_uniformSetup != DE_NULL);
+ if (m_fragmentTmpl.find("gl_SubgroupInvocationID") != std::string::npos) {
+ if (!subgroups::areQuadOperationsSupportedForStages(context, VK_SHADER_STAGE_FRAGMENT_BIT))
+ throw tcu::NotSupportedError("test requires VK_SUBGROUP_FEATURE_QUAD_BIT");
+
+ if (subgroups::getSubgroupSize(context) < 4)
+ throw tcu::NotSupportedError("test requires subgroupSize >= 4");
+ }
return new LinearDerivateCaseInstance(context, *m_uniformSetup, m_definitions, m_values);
}

View File

@@ -0,0 +1,56 @@
From ed3794c975d284a5453ae33ae59dd1541a9eb804 Mon Sep 17 00:00:00 2001
From: Rob Clark <robdclark@chromium.org>
Date: Sat, 27 Jan 2024 10:57:28 -0800
Subject: [PATCH] Use subgroups helper in derivate tests
For the tests that need subgroup ops, use the existing subgroups helper,
rather than open-coding the same checks.
---
.../vktShaderRenderDerivateTests.cpp | 23 ++++---------------
1 file changed, 5 insertions(+), 18 deletions(-)
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDerivateTests.cpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDerivateTests.cpp
index a8bb5a3ba7..3253505958 100644
--- a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDerivateTests.cpp
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDerivateTests.cpp
@@ -31,6 +31,7 @@
#include "vktShaderRenderDerivateTests.hpp"
#include "vktShaderRender.hpp"
+#include "subgroups/vktSubgroupsTestsUtils.hpp"
#include "vkImageUtil.hpp"
#include "vkQueryUtil.hpp"
@@ -707,28 +708,14 @@ tcu::TestStatus TriangleDerivateCaseInstance::iterate (void)
{
const std::string errorPrefix = m_definitions.inNonUniformControlFlow ? "Derivatives in dynamic control flow" :
"Manual derivatives with subgroup operations";
- if (!m_context.contextSupports(vk::ApiVersion(0, 1, 1, 0)))
- throw tcu::NotSupportedError(errorPrefix + " require Vulkan 1.1");
-
- vk::VkPhysicalDeviceSubgroupProperties subgroupProperties;
- deMemset(&subgroupProperties, 0, sizeof(subgroupProperties));
- subgroupProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
-
- vk::VkPhysicalDeviceProperties2 properties2;
- deMemset(&properties2, 0, sizeof(properties2));
- properties2.sType = vk::VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
- properties2.pNext = &subgroupProperties;
-
- m_context.getInstanceInterface().getPhysicalDeviceProperties2(m_context.getPhysicalDevice(), &properties2);
+ if (!subgroups::areQuadOperationsSupportedForStages(m_context, VK_SHADER_STAGE_FRAGMENT_BIT))
+ throw tcu::NotSupportedError(errorPrefix + " tests require VK_SUBGROUP_FEATURE_QUAD_BIT");
- if (subgroupProperties.subgroupSize < 4)
+ if (subgroups::getSubgroupSize(m_context) < 4)
throw tcu::NotSupportedError(errorPrefix + " require subgroupSize >= 4");
- if ((subgroupProperties.supportedOperations & VK_SUBGROUP_FEATURE_BALLOT_BIT) == 0)
+ if (!subgroups::isSubgroupFeatureSupportedForDevice(m_context, VK_SUBGROUP_FEATURE_BALLOT_BIT))
throw tcu::NotSupportedError(errorPrefix + " tests require VK_SUBGROUP_FEATURE_BALLOT_BIT");
-
- if (isSubgroupFunc(m_definitions.func) && (subgroupProperties.supportedOperations & VK_SUBGROUP_FEATURE_QUAD_BIT) == 0)
- throw tcu::NotSupportedError(errorPrefix + " tests require VK_SUBGROUP_FEATURE_QUAD_BIT");
}
setup();

View File

@@ -96,9 +96,9 @@ set +e -x
NIR_DEBUG="novalidate" \ NIR_DEBUG="novalidate" \
LIBGL_ALWAYS_SOFTWARE=${CROSVM_LIBGL_ALWAYS_SOFTWARE} \ LIBGL_ALWAYS_SOFTWARE=${CROSVM_LIBGL_ALWAYS_SOFTWARE} \
GALLIUM_DRIVER=${CROSVM_GALLIUM_DRIVER} \ GALLIUM_DRIVER=${CROSVM_GALLIUM_DRIVER} \
VK_DRIVER_FILES=$CI_PROJECT_DIR/install/share/vulkan/icd.d/${CROSVM_VK_DRIVER}_icd.x86_64.json \ VK_ICD_FILENAMES=$CI_PROJECT_DIR/install/share/vulkan/icd.d/${CROSVM_VK_DRIVER}_icd.x86_64.json \
crosvm --no-syslog run \ crosvm --no-syslog run \
--gpu "${CROSVM_GPU_ARGS}" --gpu-render-server "path=${VIRGL_RENDER_SERVER:-/usr/local/libexec/virgl_render_server}" \ --gpu "${CROSVM_GPU_ARGS}" --gpu-render-server "path=/usr/local/libexec/virgl_render_server" \
-m "${CROSVM_MEMORY:-4096}" -c "${CROSVM_CPU:-2}" --disable-sandbox \ -m "${CROSVM_MEMORY:-4096}" -c "${CROSVM_CPU:-2}" --disable-sandbox \
--shared-dir /:my_root:type=fs:writeback=true:timeout=60:cache=always \ --shared-dir /:my_root:type=fs:writeback=true:timeout=60:cache=always \
--net "host-ip=192.168.30.1,netmask=255.255.255.0,mac=AA:BB:CC:00:00:12" \ --net "host-ip=192.168.30.1,netmask=255.255.255.0,mac=AA:BB:CC:00:00:12" \

View File

@@ -18,8 +18,7 @@ INSTALL=$(realpath -s "$PWD"/install)
# Set up the driver environment. # Set up the driver environment.
export LD_LIBRARY_PATH="$INSTALL"/lib/:$LD_LIBRARY_PATH export LD_LIBRARY_PATH="$INSTALL"/lib/:$LD_LIBRARY_PATH
export EGL_PLATFORM=surfaceless export EGL_PLATFORM=surfaceless
ARCH=$(uname -m) export VK_ICD_FILENAMES="$PWD"/install/share/vulkan/icd.d/"$VK_DRIVER"_icd.${VK_CPU:-$(uname -m)}.json
export VK_DRIVER_FILES="$PWD"/install/share/vulkan/icd.d/"$VK_DRIVER"_icd."$ARCH".json
export OCL_ICD_VENDORS="$PWD"/install/etc/OpenCL/vendors/ export OCL_ICD_VENDORS="$PWD"/install/etc/OpenCL/vendors/
if [ -n "$USE_ANGLE" ]; then if [ -n "$USE_ANGLE" ]; then
@@ -60,7 +59,7 @@ if [ -z "$DEQP_SUITE" ]; then
# Generate test case list file. # Generate test case list file.
if [ "$DEQP_VER" = "vk" ]; then if [ "$DEQP_VER" = "vk" ]; then
MUSTPASS=/deqp/mustpass/vk-main.txt MUSTPASS=/deqp/mustpass/vk-master.txt
DEQP=/deqp/external/vulkancts/modules/vulkan/deqp-vk DEQP=/deqp/external/vulkancts/modules/vulkan/deqp-vk
elif [ "$DEQP_VER" = "gles2" ] || [ "$DEQP_VER" = "gles3" ] || [ "$DEQP_VER" = "gles31" ] || [ "$DEQP_VER" = "egl" ]; then elif [ "$DEQP_VER" = "gles2" ] || [ "$DEQP_VER" = "gles3" ] || [ "$DEQP_VER" = "gles31" ] || [ "$DEQP_VER" = "egl" ]; then
MUSTPASS=/deqp/mustpass/$DEQP_VER-main.txt MUSTPASS=/deqp/mustpass/$DEQP_VER-main.txt
@@ -170,7 +169,7 @@ fi
uncollapsed_section_switch deqp "deqp: deqp-runner" uncollapsed_section_switch deqp "deqp: deqp-runner"
# Print the detailed version with the list of backports and local patches # Print the detailed version with the list of backports and local patches
for api in vk gl gles; do for api in vk gl; do
deqp_version_log=/deqp/version-$api deqp_version_log=/deqp/version-$api
if [ -r "$deqp_version_log" ]; then if [ -r "$deqp_version_log" ]; then
cat "$deqp_version_log" cat "$deqp_version_log"

View File

@@ -18,7 +18,7 @@ TMP_DIR=$(mktemp -d)
echo "$(date +"%F %T") Downloading archived master..." echo "$(date +"%F %T") Downloading archived master..."
if ! /usr/bin/wget \ if ! /usr/bin/wget \
-O "$TMP_DIR/$CI_PROJECT_NAME.tar.gz" \ -O "$TMP_DIR/$CI_PROJECT_NAME.tar.gz" \
"https://${S3_HOST}/${S3_GITCACHE_BUCKET}/${FDO_UPSTREAM_REPO}/$CI_PROJECT_NAME.tar.gz"; "https://${S3_HOST}/git-cache/${FDO_UPSTREAM_REPO}/$CI_PROJECT_NAME.tar.gz";
then then
echo "Repository cache not available" echo "Repository cache not available"
exit exit

View File

@@ -237,25 +237,6 @@
when: never when: never
- !reference [.freedreno-farm-rules, rules] - !reference [.freedreno-farm-rules, rules]
.vmware-farm-rules:
rules:
- exists: [ .ci-farms-disabled/vmware ]
when: never
- changes: [ .ci-farms-disabled/vmware ]
if: '$CI_PIPELINE_SOURCE != "schedule"'
when: on_success
- changes: [ .ci-farms-disabled/* ]
if: '$CI_PIPELINE_SOURCE != "schedule"'
when: never
.vmware-farm-manual-rules:
rules:
- exists: [ .ci-farms-disabled/vmware ]
when: never
- changes: [ .ci-farms-disabled/vmware ]
if: '$CI_PIPELINE_SOURCE != "schedule"'
when: never
- !reference [.vmware-farm-rules, rules]
.ondracka-farm-rules: .ondracka-farm-rules:
rules: rules:
@@ -330,10 +311,6 @@
changes: [ .ci-farms-disabled/ondracka ] changes: [ .ci-farms-disabled/ondracka ]
exists: [ .ci-farms-disabled/ondracka ] exists: [ .ci-farms-disabled/ondracka ]
when: never when: never
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
changes: [ .ci-farms-disabled/vmware ]
exists: [ .ci-farms-disabled/vmware ]
when: never
# Any other change to ci-farms/* means some farm is getting re-enabled. # Any other change to ci-farms/* means some farm is getting re-enabled.
# Run jobs in Marge pipelines (and let it fallback to manual otherwise) # Run jobs in Marge pipelines (and let it fallback to manual otherwise)
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && $GITLAB_USER_LOGIN == "marge-bot"' - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && $GITLAB_USER_LOGIN == "marge-bot"'

View File

@@ -11,7 +11,7 @@ INSTALL=$PWD/install
# Set up the driver environment. # Set up the driver environment.
export LD_LIBRARY_PATH="$INSTALL/lib/" export LD_LIBRARY_PATH="$INSTALL/lib/"
export VK_DRIVER_FILES="$INSTALL/share/vulkan/icd.d/${VK_DRIVER}_icd.x86_64.json" export VK_ICD_FILENAMES="$INSTALL/share/vulkan/icd.d/${VK_DRIVER}_icd.x86_64.json"
# To store Fossilize logs on failure. # To store Fossilize logs on failure.
RESULTS="$PWD/results" RESULTS="$PWD/results"

View File

@@ -13,37 +13,34 @@
variables: variables:
DEBIAN_X86_64_BUILD_BASE_IMAGE: "debian/x86_64_build-base" DEBIAN_X86_64_BUILD_BASE_IMAGE: "debian/x86_64_build-base"
DEBIAN_BASE_TAG: "20240717-pyyaml-3" DEBIAN_BASE_TAG: "20240307-virglcrosvm"
DEBIAN_X86_64_BUILD_IMAGE_PATH: "debian/x86_64_build" DEBIAN_X86_64_BUILD_IMAGE_PATH: "debian/x86_64_build"
DEBIAN_BUILD_TAG: "20240717-pyyaml-3" DEBIAN_BUILD_TAG: "20240301-mold"
DEBIAN_X86_64_TEST_BASE_IMAGE: "debian/x86_64_test-base" DEBIAN_X86_64_TEST_BASE_IMAGE: "debian/x86_64_test-base"
DEBIAN_ARM64_TEST_BASE_IMAGE: "debian/arm64_test-base"
DEBIAN_X86_64_TEST_IMAGE_GL_PATH: "debian/x86_64_test-gl" DEBIAN_X86_64_TEST_IMAGE_GL_PATH: "debian/x86_64_test-gl"
DEBIAN_ARM64_TEST_IMAGE_GL_PATH: "debian/arm64_test-gl"
DEBIAN_X86_64_TEST_IMAGE_VK_PATH: "debian/x86_64_test-vk" DEBIAN_X86_64_TEST_IMAGE_VK_PATH: "debian/x86_64_test-vk"
DEBIAN_ARM64_TEST_IMAGE_VK_PATH: "debian/arm64_test-vk"
DEBIAN_X86_64_TEST_ANDROID_IMAGE_PATH: "debian/x86_64_test-android" DEBIAN_X86_64_TEST_ANDROID_IMAGE_PATH: "debian/x86_64_test-android"
DEBIAN_TEST_ANDROID_TAG: "20240423-deqp" DEBIAN_X86_64_TEST_ANDROID_TAG: "20240311-runner"
DEBIAN_TEST_GL_TAG: "20240713-piglit-58" DEBIAN_X86_64_TEST_GL_TAG: "20240313-ninetests"
DEBIAN_TEST_VK_TAG: "20240713-piglit-58" DEBIAN_X86_64_TEST_VK_TAG: "20240317-direct_drm"
KERNEL_ROOTFS_TAG: "20240713-piglit-58" KERNEL_ROOTFS_TAG: "20240317-direct_drm"
ALPINE_X86_64_BUILD_TAG: "20240717-pyyaml" ALPINE_X86_64_BUILD_TAG: "20240208-libclc-5"
ALPINE_X86_64_LAVA_SSH_TAG: "20240401-wlproto" ALPINE_X86_64_LAVA_SSH_TAG: "20230626-v1"
FEDORA_X86_64_BUILD_TAG: "20240717-pyyaml" FEDORA_X86_64_BUILD_TAG: "20240301-mold"
KERNEL_TAG: "v6.6.21-mesa-f8ea" KERNEL_TAG: "v6.6.21-mesa-19fc"
KERNEL_REPO: "gfx-ci/linux" KERNEL_REPO: "gfx-ci/linux"
PKG_REPO_REV: "bca9635d" PKG_REPO_REV: "3cc12a2a"
WINDOWS_X64_MSVC_PATH: "windows/x86_64_msvc" WINDOWS_X64_MSVC_PATH: "windows/x86_64_msvc"
WINDOWS_X64_MSVC_TAG: "20240717-pyyaml-3" WINDOWS_X64_MSVC_TAG: "20231222-msvc"
WINDOWS_X64_BUILD_PATH: "windows/x86_64_build" WINDOWS_X64_BUILD_PATH: "windows/x86_64_build"
WINDOWS_X64_BUILD_TAG: "20240717-pyyaml-3" WINDOWS_X64_BUILD_TAG: "20240117-vulkan-sdk"
WINDOWS_X64_TEST_PATH: "windows/x86_64_test" WINDOWS_X64_TEST_PATH: "windows/x86_64_test"
WINDOWS_X64_TEST_TAG: "20240717-pyyaml-3" WINDOWS_X64_TEST_TAG: "20240117-vulkan-sdk"

View File

@@ -5,36 +5,24 @@ class MesaCIException(Exception):
pass pass
class MesaCIRetriableException(MesaCIException): class MesaCITimeoutError(MesaCIException):
pass
class MesaCITimeoutError(MesaCIRetriableException):
def __init__(self, *args, timeout_duration: timedelta) -> None: def __init__(self, *args, timeout_duration: timedelta) -> None:
super().__init__(*args) super().__init__(*args)
self.timeout_duration = timeout_duration self.timeout_duration = timeout_duration
class MesaCIRetryError(MesaCIRetriableException): class MesaCIRetryError(MesaCIException):
def __init__(self, *args, retry_count: int, last_job: None) -> None: def __init__(self, *args, retry_count: int, last_job: None) -> None:
super().__init__(*args) super().__init__(*args)
self.retry_count = retry_count self.retry_count = retry_count
self.last_job = last_job self.last_job = last_job
class MesaCIFatalException(MesaCIException): class MesaCIParseException(MesaCIException):
"""Exception raised when the Mesa CI script encounters a fatal error that
prevents the script from continuing."""
def __init__(self, *args) -> None:
super().__init__(*args)
class MesaCIParseException(MesaCIRetriableException):
pass pass
class MesaCIKnownIssueException(MesaCIRetriableException): class MesaCIKnownIssueException(MesaCIException):
"""Exception raised when the Mesa CI script finds something in the logs that """Exception raised when the Mesa CI script finds something in the logs that
is known to cause the LAVA job to eventually fail""" is known to cause the LAVA job to eventually fail"""

View File

@@ -11,7 +11,7 @@ variables:
# proxy used to cache data locally # proxy used to cache data locally
FDO_HTTP_CACHE_URI: "http://caching-proxy/cache/?uri=" FDO_HTTP_CACHE_URI: "http://caching-proxy/cache/?uri="
# base system generated by the container build job, shared between many pipelines # base system generated by the container build job, shared between many pipelines
BASE_SYSTEM_HOST_PREFIX: "${S3_HOST}/${S3_KERNEL_BUCKET}" BASE_SYSTEM_HOST_PREFIX: "${S3_HOST}/mesa-lava"
BASE_SYSTEM_MAINLINE_HOST_PATH: "${BASE_SYSTEM_HOST_PREFIX}/${FDO_UPSTREAM_REPO}/${DISTRIBUTION_TAG}/${DEBIAN_ARCH}" BASE_SYSTEM_MAINLINE_HOST_PATH: "${BASE_SYSTEM_HOST_PREFIX}/${FDO_UPSTREAM_REPO}/${DISTRIBUTION_TAG}/${DEBIAN_ARCH}"
BASE_SYSTEM_FORK_HOST_PATH: "${BASE_SYSTEM_HOST_PREFIX}/${CI_PROJECT_PATH}/${DISTRIBUTION_TAG}/${DEBIAN_ARCH}" BASE_SYSTEM_FORK_HOST_PATH: "${BASE_SYSTEM_HOST_PREFIX}/${CI_PROJECT_PATH}/${DISTRIBUTION_TAG}/${DEBIAN_ARCH}"
# per-job build artifacts # per-job build artifacts

View File

@@ -30,7 +30,7 @@ artifacts/ci-common/generate-env.sh | tee results/job-rootfs-overlay/set-job-env
section_end variables section_end variables
tar zcf job-rootfs-overlay.tar.gz -C results/job-rootfs-overlay/ . tar zcf job-rootfs-overlay.tar.gz -C results/job-rootfs-overlay/ .
ci-fairy s3cp --token-file "${S3_JWT_FILE}" job-rootfs-overlay.tar.gz "https://${JOB_ROOTFS_OVERLAY_PATH}" ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" job-rootfs-overlay.tar.gz "https://${JOB_ROOTFS_OVERLAY_PATH}"
ARTIFACT_URL="${FDO_HTTP_CACHE_URI:-}https://${PIPELINE_ARTIFACTS_BASE}/${S3_ARTIFACT_NAME:?}.tar.zst" ARTIFACT_URL="${FDO_HTTP_CACHE_URI:-}https://${PIPELINE_ARTIFACTS_BASE}/${S3_ARTIFACT_NAME:?}.tar.zst"
@@ -49,9 +49,8 @@ PYTHONPATH=artifacts/ artifacts/lava/lava_job_submitter.py \
--first-stage-init artifacts/ci-common/init-stage1.sh \ --first-stage-init artifacts/ci-common/init-stage1.sh \
--ci-project-dir "${CI_PROJECT_DIR}" \ --ci-project-dir "${CI_PROJECT_DIR}" \
--device-type "${DEVICE_TYPE}" \ --device-type "${DEVICE_TYPE}" \
--farm "${FARM}" \
--dtb-filename "${DTB}" \ --dtb-filename "${DTB}" \
--jwt-file "${S3_JWT_FILE}" \ --jwt-file "${CI_JOB_JWT_FILE}" \
--kernel-image-name "${KERNEL_IMAGE_NAME}" \ --kernel-image-name "${KERNEL_IMAGE_NAME}" \
--kernel-image-type "${KERNEL_IMAGE_TYPE}" \ --kernel-image-type "${KERNEL_IMAGE_TYPE}" \
--boot-method "${BOOT_METHOD}" \ --boot-method "${BOOT_METHOD}" \

View File

@@ -16,7 +16,7 @@ import sys
import time import time
from collections import defaultdict from collections import defaultdict
from dataclasses import dataclass, fields from dataclasses import dataclass, fields
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta
from os import environ, getenv, path from os import environ, getenv, path
from typing import Any, Optional from typing import Any, Optional
@@ -25,8 +25,6 @@ from lavacli.utils import flow_yaml as lava_yaml
from lava.exceptions import ( from lava.exceptions import (
MesaCIException, MesaCIException,
MesaCIFatalException,
MesaCIRetriableException,
MesaCIParseException, MesaCIParseException,
MesaCIRetryError, MesaCIRetryError,
MesaCITimeoutError, MesaCITimeoutError,
@@ -60,7 +58,7 @@ except ImportError as e:
# Timeout in seconds to decide if the device from the dispatched LAVA job has # Timeout in seconds to decide if the device from the dispatched LAVA job has
# hung or not due to the lack of new log output. # hung or not due to the lack of new log output.
DEVICE_HANGING_TIMEOUT_SEC = int(getenv("DEVICE_HANGING_TIMEOUT_SEC", 5 * 60)) DEVICE_HANGING_TIMEOUT_SEC = int(getenv("DEVICE_HANGING_TIMEOUT_SEC", 5*60))
# How many seconds the script should wait before try a new polling iteration to # How many seconds the script should wait before try a new polling iteration to
# check if the dispatched LAVA job is running or waiting in the job queue. # check if the dispatched LAVA job is running or waiting in the job queue.
@@ -83,29 +81,18 @@ NUMBER_OF_RETRIES_TIMEOUT_DETECTION = int(
getenv("LAVA_NUMBER_OF_RETRIES_TIMEOUT_DETECTION", 2) getenv("LAVA_NUMBER_OF_RETRIES_TIMEOUT_DETECTION", 2)
) )
CI_JOB_TIMEOUT_SEC = int(getenv("CI_JOB_TIMEOUT", 3600))
# How many seconds the script will wait to let LAVA run the job and give the final details.
EXPECTED_JOB_DURATION_SEC = int(getenv("EXPECTED_JOB_DURATION_SEC", 60 * 10))
# CI_JOB_STARTED is given by GitLab CI/CD in UTC timezone by default.
CI_JOB_STARTED_AT_RAW = getenv("CI_JOB_STARTED_AT", "")
CI_JOB_STARTED_AT: datetime = (
datetime.fromisoformat(CI_JOB_STARTED_AT_RAW)
if CI_JOB_STARTED_AT_RAW
else datetime.now(timezone.utc)
)
def raise_exception_from_metadata(metadata: dict, job_id: int) -> None: def raise_exception_from_metadata(metadata: dict, job_id: int) -> None:
""" """
Investigate infrastructure errors from the job metadata. Investigate infrastructure errors from the job metadata.
If it finds an error, raise it as MesaCIRetriableException. If it finds an error, raise it as MesaCIException.
""" """
if "result" not in metadata or metadata["result"] != "fail": if "result" not in metadata or metadata["result"] != "fail":
return return
if "error_type" in metadata: if "error_type" in metadata:
error_type = metadata["error_type"] error_type = metadata["error_type"]
if error_type == "Infrastructure": if error_type == "Infrastructure":
raise MesaCIRetriableException( raise MesaCIException(
f"LAVA job {job_id} failed with Infrastructure Error. Retry." f"LAVA job {job_id} failed with Infrastructure Error. Retry."
) )
if error_type == "Job": if error_type == "Job":
@@ -113,12 +100,12 @@ def raise_exception_from_metadata(metadata: dict, job_id: int) -> None:
# with mal-formed job definitions. As we are always validating the # with mal-formed job definitions. As we are always validating the
# jobs, only the former is probable to happen. E.g.: When some LAVA # jobs, only the former is probable to happen. E.g.: When some LAVA
# action timed out more times than expected in job definition. # action timed out more times than expected in job definition.
raise MesaCIRetriableException( raise MesaCIException(
f"LAVA job {job_id} failed with JobError " f"LAVA job {job_id} failed with JobError "
"(possible LAVA timeout misconfiguration/bug). Retry." "(possible LAVA timeout misconfiguration/bug). Retry."
) )
if "case" in metadata and metadata["case"] == "validate": if "case" in metadata and metadata["case"] == "validate":
raise MesaCIRetriableException( raise MesaCIException(
f"LAVA job {job_id} failed validation (possible download error). Retry." f"LAVA job {job_id} failed validation (possible download error). Retry."
) )
@@ -195,6 +182,7 @@ def is_job_hanging(job, max_idle_time):
def parse_log_lines(job, log_follower, new_log_lines): def parse_log_lines(job, log_follower, new_log_lines):
if log_follower.feed(new_log_lines): if log_follower.feed(new_log_lines):
# If we had non-empty log data, we can assure that the device is alive. # If we had non-empty log data, we can assure that the device is alive.
job.heartbeat() job.heartbeat()
@@ -212,6 +200,7 @@ def parse_log_lines(job, log_follower, new_log_lines):
def fetch_new_log_lines(job): def fetch_new_log_lines(job):
# The XMLRPC binary packet may be corrupted, causing a YAML scanner error. # The XMLRPC binary packet may be corrupted, causing a YAML scanner error.
# Retry the log fetching several times before exposing the error. # Retry the log fetching several times before exposing the error.
for _ in range(5): for _ in range(5):
@@ -227,28 +216,14 @@ def submit_job(job):
try: try:
job.submit() job.submit()
except Exception as mesa_ci_err: except Exception as mesa_ci_err:
raise MesaCIRetriableException( raise MesaCIException(
f"Could not submit LAVA job. Reason: {mesa_ci_err}" f"Could not submit LAVA job. Reason: {mesa_ci_err}"
) from mesa_ci_err ) from mesa_ci_err
def wait_for_job_get_started(job, attempt_no): def wait_for_job_get_started(job):
print_log(f"Waiting for job {job.job_id} to start.") print_log(f"Waiting for job {job.job_id} to start.")
while not job.is_started(): while not job.is_started():
current_job_duration_sec: int = int(
(datetime.now(timezone.utc) - CI_JOB_STARTED_AT).total_seconds()
)
remaining_time_sec: int = max(0, CI_JOB_TIMEOUT_SEC - current_job_duration_sec)
if remaining_time_sec < EXPECTED_JOB_DURATION_SEC:
job.cancel()
raise MesaCIFatalException(
f"{CONSOLE_LOG['BOLD']}"
f"{CONSOLE_LOG['FG_YELLOW']}"
f"Job {job.job_id} only has {remaining_time_sec} seconds "
"remaining to run, but it is expected to take at least "
f"{EXPECTED_JOB_DURATION_SEC} seconds."
f"{CONSOLE_LOG['RESET']}",
)
time.sleep(WAIT_FOR_DEVICE_POLLING_TIME_SEC) time.sleep(WAIT_FOR_DEVICE_POLLING_TIME_SEC)
job.refresh_log() job.refresh_log()
print_log(f"Job {job.job_id} started.") print_log(f"Job {job.job_id} started.")
@@ -324,7 +299,7 @@ def execute_job_with_retries(
try: try:
job_log["submitter_start_time"] = datetime.now().isoformat() job_log["submitter_start_time"] = datetime.now().isoformat()
submit_job(job) submit_job(job)
wait_for_job_get_started(job, attempt_no) wait_for_job_get_started(job)
log_follower: LogFollower = bootstrap_log_follower() log_follower: LogFollower = bootstrap_log_follower()
follow_job_execution(job, log_follower) follow_job_execution(job, log_follower)
return job return job
@@ -343,8 +318,6 @@ def execute_job_with_retries(
f"Finished executing LAVA job in the attempt #{attempt_no}" f"Finished executing LAVA job in the attempt #{attempt_no}"
f"{CONSOLE_LOG['RESET']}" f"{CONSOLE_LOG['RESET']}"
) )
if job.exception and not isinstance(job.exception, MesaCIRetriableException):
break
return last_failed_job return last_failed_job
@@ -388,7 +361,6 @@ class LAVAJobSubmitter(PathResolver):
boot_method: str boot_method: str
ci_project_dir: str ci_project_dir: str
device_type: str device_type: str
farm: str
job_timeout_min: int # The job timeout in minutes job_timeout_min: int # The job timeout in minutes
build_url: str = None build_url: str = None
dtb_filename: str = None dtb_filename: str = None
@@ -499,9 +471,8 @@ class LAVAJobSubmitter(PathResolver):
if not last_attempt_job: if not last_attempt_job:
# No job was run, something bad happened # No job was run, something bad happened
STRUCTURAL_LOG["job_combined_status"] = "script_crash" STRUCTURAL_LOG["job_combined_status"] = "script_crash"
current_exception = str(sys.exc_info()[1]) current_exception = str(sys.exc_info()[0])
STRUCTURAL_LOG["job_combined_fail_reason"] = current_exception STRUCTURAL_LOG["job_combined_fail_reason"] = current_exception
print(f"Interrupting the script. Reason: {current_exception}")
raise SystemExit(1) raise SystemExit(1)
STRUCTURAL_LOG["job_combined_status"] = last_attempt_job.status STRUCTURAL_LOG["job_combined_status"] = last_attempt_job.status
@@ -517,7 +488,6 @@ class StructuredLoggerWrapper:
def _init_logger(self): def _init_logger(self):
STRUCTURAL_LOG["fixed_tags"] = self.__submitter.lava_tags STRUCTURAL_LOG["fixed_tags"] = self.__submitter.lava_tags
STRUCTURAL_LOG["dut_job_type"] = self.__submitter.device_type STRUCTURAL_LOG["dut_job_type"] = self.__submitter.device_type
STRUCTURAL_LOG["farm"] = self.__submitter.farm
STRUCTURAL_LOG["job_combined_fail_reason"] = None STRUCTURAL_LOG["job_combined_fail_reason"] = None
STRUCTURAL_LOG["job_combined_status"] = "not_submitted" STRUCTURAL_LOG["job_combined_status"] = "not_submitted"
STRUCTURAL_LOG["dut_attempt_counter"] = 0 STRUCTURAL_LOG["dut_attempt_counter"] = 0
@@ -539,6 +509,7 @@ class StructuredLoggerWrapper:
def logger_context(self): def logger_context(self):
context = contextlib.nullcontext() context = contextlib.nullcontext()
try: try:
global STRUCTURAL_LOG global STRUCTURAL_LOG
STRUCTURAL_LOG = StructuredLogger( STRUCTURAL_LOG = StructuredLogger(
self.__submitter.structured_log_file, truncate=True self.__submitter.structured_log_file, truncate=True

View File

@@ -23,10 +23,3 @@ KNOWN_ISSUE_R8152_PATTERNS: tuple[str, ...] = (
# This is considered noise, since LAVA produces this log after receiving a package of feedback # This is considered noise, since LAVA produces this log after receiving a package of feedback
# messages. # messages.
LOG_DEBUG_FEEDBACK_NOISE = "Listened to connection for namespace 'dut' done" LOG_DEBUG_FEEDBACK_NOISE = "Listened to connection for namespace 'dut' done"
A6XX_GPU_RECOVERY_WATCH_PERIOD_MIN = 3
A6XX_GPU_RECOVERY_FAILURE_MAX_COUNT = 30
A6XX_GPU_RECOVERY_FAILURE_MESSAGE = (
"cx gdsc didn't collapse",
"Timeout waiting for GMU OOB",
)

View File

@@ -6,7 +6,6 @@ from typing import Any, Optional
from lava.exceptions import ( from lava.exceptions import (
MesaCIException, MesaCIException,
MesaCIRetriableException,
MesaCIKnownIssueException, MesaCIKnownIssueException,
MesaCIParseException, MesaCIParseException,
MesaCITimeoutError, MesaCITimeoutError,
@@ -35,7 +34,7 @@ class LAVAJob:
self._is_finished = False self._is_finished = False
self.log: dict[str, Any] = log self.log: dict[str, Any] = log
self.status = "not_submitted" self.status = "not_submitted"
self.__exception: Optional[Exception] = None self.__exception: Optional[str] = None
def heartbeat(self) -> None: def heartbeat(self) -> None:
self.last_log_time: datetime = datetime.now() self.last_log_time: datetime = datetime.now()
@@ -64,13 +63,13 @@ class LAVAJob:
return self._is_finished return self._is_finished
@property @property
def exception(self) -> Optional[Exception]: def exception(self) -> str:
return self.__exception return self.__exception
@exception.setter @exception.setter
def exception(self, exception: Exception) -> None: def exception(self, exception: Exception) -> None:
self.__exception = exception self.__exception = repr(exception)
self.log["dut_job_fail_reason"] = repr(self.__exception) self.log["dut_job_fail_reason"] = self.__exception
def validate(self) -> Optional[dict]: def validate(self) -> Optional[dict]:
"""Returns a dict with errors, if the validation fails. """Returns a dict with errors, if the validation fails.
@@ -177,15 +176,11 @@ class LAVAJob:
self.status = "canceled" self.status = "canceled"
elif isinstance(exception, MesaCITimeoutError): elif isinstance(exception, MesaCITimeoutError):
self.status = "hung" self.status = "hung"
elif isinstance(exception, MesaCIRetriableException): elif isinstance(exception, MesaCIException):
self.status = "failed" self.status = "failed"
elif isinstance(exception, KeyboardInterrupt): elif isinstance(exception, KeyboardInterrupt):
self.status = "interrupted" self.status = "interrupted"
print_log("LAVA job submitter was interrupted. Cancelling the job.") print_log("LAVA job submitter was interrupted. Cancelling the job.")
raise raise
elif isinstance(exception, MesaCIException):
self.status = "interrupted"
print_log("LAVA job submitter was interrupted. Cancelling the job.")
raise
else: else:
self.status = "job_submitter_error" self.status = "job_submitter_error"

View File

@@ -15,8 +15,6 @@ from lava.utils.uart_job_definition import (
fastboot_deploy_actions, fastboot_deploy_actions,
tftp_boot_action, tftp_boot_action,
tftp_deploy_actions, tftp_deploy_actions,
qemu_boot_action,
qemu_deploy_actions,
uart_test_actions, uart_test_actions,
) )
@@ -34,10 +32,6 @@ class LAVAJobDefinition:
def __init__(self, job_submitter: "LAVAJobSubmitter") -> None: def __init__(self, job_submitter: "LAVAJobSubmitter") -> None:
self.job_submitter: "LAVAJobSubmitter" = job_submitter self.job_submitter: "LAVAJobSubmitter" = job_submitter
# NFS args provided by LAVA
self.lava_nfs_args: str = "root=/dev/nfs rw nfsroot=$NFS_SERVER_IP:$NFS_ROOTFS,tcp,hard,v3 ip=dhcp"
# extra_nfsroot_args appends to cmdline
self.extra_nfsroot_args: str = " init=/init rootwait usbcore.quirks=0bda:8153:k"
def has_ssh_support(self) -> bool: def has_ssh_support(self) -> bool:
if FORCE_UART: if FORCE_UART:
@@ -61,11 +55,11 @@ class LAVAJobDefinition:
actions for the LAVA job submission. actions for the LAVA job submission.
""" """
args = self.job_submitter args = self.job_submitter
values = self.generate_metadata()
nfsrootfs = { nfsrootfs = {
"url": f"{args.rootfs_url_prefix}/lava-rootfs.tar.zst", "url": f"{args.rootfs_url_prefix}/lava-rootfs.tar.zst",
"compression": "zstd", "compression": "zstd",
} }
values = self.generate_metadata()
init_stage1_steps = self.init_stage1_steps() init_stage1_steps = self.init_stage1_steps()
artifact_download_steps = self.artifact_download_steps() artifact_download_steps = self.artifact_download_steps()
@@ -77,9 +71,6 @@ class LAVAJobDefinition:
if args.boot_method == "fastboot": if args.boot_method == "fastboot":
deploy_actions = fastboot_deploy_actions(self, nfsrootfs) deploy_actions = fastboot_deploy_actions(self, nfsrootfs)
boot_action = fastboot_boot_action(args) boot_action = fastboot_boot_action(args)
elif args.boot_method == "qemu-nfs":
deploy_actions = qemu_deploy_actions(self, nfsrootfs)
boot_action = qemu_boot_action(args)
else: # tftp else: # tftp
deploy_actions = tftp_deploy_actions(self, nfsrootfs) deploy_actions = tftp_deploy_actions(self, nfsrootfs)
boot_action = tftp_boot_action(args) boot_action = tftp_boot_action(args)
@@ -126,7 +117,7 @@ class LAVAJobDefinition:
"device_type": self.job_submitter.device_type, "device_type": self.job_submitter.device_type,
"visibility": {"group": [self.job_submitter.visibility_group]}, "visibility": {"group": [self.job_submitter.visibility_group]},
"priority": JOB_PRIORITY, "priority": JOB_PRIORITY,
"context": {"extra_nfsroot_args": self.extra_nfsroot_args}, "context": {"extra_nfsroot_args": " init=/init rootwait usbcore.quirks=0bda:8153:k"},
"timeouts": { "timeouts": {
"job": {"minutes": self.job_submitter.job_timeout_min}, "job": {"minutes": self.job_submitter.job_timeout_min},
"actions": { "actions": {
@@ -151,10 +142,6 @@ class LAVAJobDefinition:
if self.job_submitter.lava_tags: if self.job_submitter.lava_tags:
values["tags"] = self.job_submitter.lava_tags.split(",") values["tags"] = self.job_submitter.lava_tags.split(",")
# QEMU lava jobs mandate proper arch value in the context
if self.job_submitter.boot_method == "qemu-nfs":
values["context"]["arch"] = self.job_submitter.mesa_job_name.split(":")[1]
return values return values
def attach_kernel_and_dtb(self, deploy_field): def attach_kernel_and_dtb(self, deploy_field):
@@ -197,7 +184,7 @@ class LAVAJobDefinition:
"set +x # HIDE_START", "set +x # HIDE_START",
f'echo -n "{jwt_file.read()}" > "{self.job_submitter.jwt_file}"', f'echo -n "{jwt_file.read()}" > "{self.job_submitter.jwt_file}"',
"set -x # HIDE_END", "set -x # HIDE_END",
f'echo "export S3_JWT_FILE={self.job_submitter.jwt_file}" >> /set-job-env-vars.sh', f'echo "export CI_JOB_JWT_FILE={self.job_submitter.jwt_file}" >> /set-job-env-vars.sh',
] ]
else: else:
download_steps += [ download_steps += [
@@ -216,13 +203,7 @@ class LAVAJobDefinition:
# - exec .gitlab-ci/common/init-stage2.sh # - exec .gitlab-ci/common/init-stage2.sh
with open(self.job_submitter.first_stage_init, "r") as init_sh: with open(self.job_submitter.first_stage_init, "r") as init_sh:
# For vmware farm, patch nameserver as 8.8.8.8 is off limit. run_steps += [x.rstrip() for x in init_sh if not x.startswith("#") and x.rstrip()]
# This is temporary and will be reverted once the farm is moved.
if self.job_submitter.mesa_job_name.startswith("vmware-"):
run_steps += [x.rstrip().replace("nameserver 8.8.8.8", "nameserver 10.25.198.110") for x in init_sh if not x.startswith("#") and x.rstrip()]
else:
run_steps += [x.rstrip() for x in init_sh if not x.startswith("#") and x.rstrip()]
# We cannot distribute the Adreno 660 shader firmware inside rootfs, # We cannot distribute the Adreno 660 shader firmware inside rootfs,
# since the license isn't bundled inside the repository # since the license isn't bundled inside the repository
if self.job_submitter.device_type == "sm8350-hdk": if self.job_submitter.device_type == "sm8350-hdk":

View File

@@ -1,9 +1,8 @@
from __future__ import annotations from __future__ import annotations
import re import re
from datetime import datetime, timedelta
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Optional, Sequence from typing import TYPE_CHECKING, Any, Sequence
if TYPE_CHECKING: if TYPE_CHECKING:
from lava.utils import LogFollower from lava.utils import LogFollower
@@ -14,9 +13,6 @@ from lava.utils.constants import (
KNOWN_ISSUE_R8152_MAX_CONSECUTIVE_COUNTER, KNOWN_ISSUE_R8152_MAX_CONSECUTIVE_COUNTER,
LOG_DEBUG_FEEDBACK_NOISE, LOG_DEBUG_FEEDBACK_NOISE,
KNOWN_ISSUE_R8152_PATTERNS, KNOWN_ISSUE_R8152_PATTERNS,
A6XX_GPU_RECOVERY_WATCH_PERIOD_MIN,
A6XX_GPU_RECOVERY_FAILURE_MESSAGE,
A6XX_GPU_RECOVERY_FAILURE_MAX_COUNT,
) )
from lava.utils.log_section import LogSectionType from lava.utils.log_section import LogSectionType
@@ -33,8 +29,6 @@ class LAVALogHints:
log_follower: LogFollower log_follower: LogFollower
r8152_issue_consecutive_counter: int = field(default=0, init=False) r8152_issue_consecutive_counter: int = field(default=0, init=False)
reboot_counter: int = field(default=0, init=False) reboot_counter: int = field(default=0, init=False)
a6xx_gpu_recovery_fail_counter: int = field(default=0, init=False)
a6xx_gpu_first_fail_time: Optional[datetime] = field(default=None, init=False)
def raise_known_issue(self, message) -> None: def raise_known_issue(self, message) -> None:
raise MesaCIKnownIssueException( raise MesaCIKnownIssueException(
@@ -50,7 +44,6 @@ class LAVALogHints:
continue continue
self.detect_r8152_issue(line) self.detect_r8152_issue(line)
self.detect_forced_reboot(line) self.detect_forced_reboot(line)
self.detect_a6xx_gpu_recovery_failure(line)
def detect_r8152_issue(self, line): def detect_r8152_issue(self, line):
if self.log_follower.phase in ( if self.log_follower.phase in (
@@ -84,23 +77,3 @@ class LAVALogHints:
self.raise_known_issue( self.raise_known_issue(
"Forced reboot detected during test phase, failing the job..." "Forced reboot detected during test phase, failing the job..."
) )
# If the a6xx gpu repeatedly fails to recover over a short period of time,
# then successful recovery is unlikely so cancel the job preemptively.
def detect_a6xx_gpu_recovery_failure(self, line: dict[str, Any]) -> None:
if search_known_issue_patterns(A6XX_GPU_RECOVERY_FAILURE_MESSAGE, line["msg"]):
time_of_failure = datetime.fromisoformat(line["dt"])
self.a6xx_gpu_recovery_fail_counter += 1
if self.a6xx_gpu_first_fail_time is None:
self.a6xx_gpu_first_fail_time = time_of_failure
if self.a6xx_gpu_recovery_fail_counter == A6XX_GPU_RECOVERY_FAILURE_MAX_COUNT:
time_since_first_fail = time_of_failure - self.a6xx_gpu_first_fail_time
if time_since_first_fail <= timedelta(minutes=A6XX_GPU_RECOVERY_WATCH_PERIOD_MIN):
self.raise_known_issue(
"Repeated GPU recovery failure detected: cancelling the job"
)
else:
self.a6xx_gpu_first_fail_time = None
self.a6xx_gpu_recovery_fail_counter = 0

View File

@@ -18,7 +18,6 @@ def fastboot_deploy_actions(
job_definition: "LAVAJobDefinition", nfsrootfs job_definition: "LAVAJobDefinition", nfsrootfs
) -> tuple[dict[str, Any], ...]: ) -> tuple[dict[str, Any], ...]:
args = job_definition.job_submitter args = job_definition.job_submitter
cmdline = f"{job_definition.lava_nfs_args}{job_definition.extra_nfsroot_args}"
fastboot_deploy_nfs = { fastboot_deploy_nfs = {
"timeout": {"minutes": 10}, "timeout": {"minutes": 10},
"to": "nfs", "to": "nfs",
@@ -40,7 +39,7 @@ def fastboot_deploy_actions(
"steps": [ "steps": [
f"cat Image.gz {args.dtb_filename}.dtb > Image.gz+dtb", f"cat Image.gz {args.dtb_filename}.dtb > Image.gz+dtb",
"mkbootimg --kernel Image.gz+dtb" "mkbootimg --kernel Image.gz+dtb"
+ f' --cmdline "{cmdline}"' + ' --cmdline "root=/dev/nfs rw nfsroot=$NFS_SERVER_IP:$NFS_ROOTFS,tcp,hard rootwait ip=dhcp init=/init"'
+ " --pagesize 4096 --base 0x80000000 -o boot.img", + " --pagesize 4096 --base 0x80000000 -o boot.img",
], ],
} }
@@ -83,24 +82,6 @@ def tftp_deploy_actions(job_definition: "LAVAJobDefinition", nfsrootfs) -> tuple
return (tftp_deploy,) return (tftp_deploy,)
def qemu_deploy_actions(job_definition: "LAVAJobDefinition", nfsrootfs) -> tuple[dict[str, Any]]:
args = job_definition.job_submitter
qemu_deploy = {
"timeout": {"minutes": 5},
"to": "nfs",
"images": {
"kernel": {
"image_arg": "-kernel {kernel}",
"url": f"{args.kernel_url_prefix}/{args.kernel_image_name}",
},
"nfsrootfs": nfsrootfs,
},
}
job_definition.attach_external_modules(qemu_deploy)
return (qemu_deploy,)
def uart_test_actions( def uart_test_actions(
args: "LAVAJobSubmitter", init_stage1_steps: list[str], artifact_download_steps: list[str] args: "LAVAJobSubmitter", init_stage1_steps: list[str], artifact_download_steps: list[str]
) -> tuple[dict[str, Any]]: ) -> tuple[dict[str, Any]]:
@@ -159,16 +140,6 @@ def tftp_boot_action(args: "LAVAJobSubmitter") -> dict[str, Any]:
return tftp_boot return tftp_boot
def qemu_boot_action(args: "LAVAJobSubmitter") -> dict[str, Any]:
qemu_boot = {
"failure_retry": NUMBER_OF_ATTEMPTS_LAVA_BOOT,
"method": args.boot_method,
"prompts": ["lava-shell:"],
}
return qemu_boot
def fastboot_boot_action(args: "LAVAJobSubmitter") -> dict[str, Any]: def fastboot_boot_action(args: "LAVAJobSubmitter") -> dict[str, Any]:
fastboot_boot = { fastboot_boot = {
"timeout": {"minutes": 2}, "timeout": {"minutes": 2},

View File

@@ -104,7 +104,7 @@ rm -rf _build
meson setup _build \ meson setup _build \
--native-file=native.file \ --native-file=native.file \
--wrap-mode=nofallback \ --wrap-mode=nofallback \
--force-fallback-for perfetto,syn,paste,pest,pest_derive,pest_generator,pest_meta,roxmltree,indexmap \ --force-fallback-for perfetto,syn \
${CROSS+--cross "$CROSS_FILE"} \ ${CROSS+--cross "$CROSS_FILE"} \
-D prefix=$PWD/install \ -D prefix=$PWD/install \
-D libdir=lib \ -D libdir=lib \

View File

@@ -13,8 +13,7 @@ INSTALL="$PWD/install"
# Set up the driver environment. # Set up the driver environment.
export LD_LIBRARY_PATH="$INSTALL/lib/" export LD_LIBRARY_PATH="$INSTALL/lib/"
export EGL_PLATFORM=surfaceless export EGL_PLATFORM=surfaceless
ARCH=$(uname -m) export VK_ICD_FILENAMES="$INSTALL/share/vulkan/icd.d/${VK_DRIVER}_icd.${VK_CPU:-$(uname -m)}.json"
export VK_DRIVER_FILES="$INSTALL/share/vulkan/icd.d/${VK_DRIVER}_icd.$ARCH.json"
RESULTS=$PWD/${PIGLIT_RESULTS_DIR:-results} RESULTS=$PWD/${PIGLIT_RESULTS_DIR:-results}
mkdir -p $RESULTS mkdir -p $RESULTS

View File

@@ -8,7 +8,7 @@ set -ex
export PAGER=cat # FIXME: export everywhere export PAGER=cat # FIXME: export everywhere
INSTALL=$(realpath -s "$PWD"/install) INSTALL=$(realpath -s "$PWD"/install)
S3_ARGS="--token-file ${S3_JWT_FILE}" S3_ARGS="--token-file ${CI_JOB_JWT_FILE}"
RESULTS=$(realpath -s "$PWD"/results) RESULTS=$(realpath -s "$PWD"/results)
mkdir -p "$RESULTS" mkdir -p "$RESULTS"
@@ -54,8 +54,7 @@ if [ -n "${VK_DRIVER}" ]; then
export DXVK_LOG="$RESULTS/dxvk" export DXVK_LOG="$RESULTS/dxvk"
[ -d "$DXVK_LOG" ] || mkdir -pv "$DXVK_LOG" [ -d "$DXVK_LOG" ] || mkdir -pv "$DXVK_LOG"
export DXVK_STATE_CACHE=0 export DXVK_STATE_CACHE=0
ARCH=$(uname -m) export VK_ICD_FILENAMES="$INSTALL/share/vulkan/icd.d/${VK_DRIVER}_icd.${VK_CPU:-$(uname -m)}.json"
export VK_DRIVER_FILES="$INSTALL/share/vulkan/icd.d/${VK_DRIVER}_icd.$ARCH.json"
fi fi
# Sanity check to ensure that our environment is sufficient to make our tests # Sanity check to ensure that our environment is sufficient to make our tests
@@ -118,7 +117,8 @@ else
mkdir -p /tmp/.X11-unix mkdir -p /tmp/.X11-unix
env \ env \
weston -Bheadless-backend.so --use-gl -Swayland-0 --xwayland --idle-time=0 & VK_ICD_FILENAMES="/install/share/vulkan/icd.d/${VK_DRIVER}_icd.$(uname -m).json" \
weston -Bheadless-backend.so --use-gl -Swayland-0 --xwayland --idle-time=0 &
while [ ! -S "$WESTON_X11_SOCK" ]; do sleep 1; done while [ ! -S "$WESTON_X11_SOCK" ]; do sleep 1; done
} }
@@ -189,15 +189,6 @@ RUN_CMD="export LD_LIBRARY_PATH=$__LD_LIBRARY_PATH; $SANITY_MESA_VERSION_CMD &&
# run. # run.
rm -rf replayer-db rm -rf replayer-db
# ANGLE: download compiled ANGLE runtime and the compiled restricted traces (all-in-one package)
if [ -n "$PIGLIT_REPLAY_ANGLE_TAG" ]; then
ARCH="amd64"
FILE="angle-bin-${ARCH}-${PIGLIT_REPLAY_ANGLE_TAG}.tar.zst"
ci-fairy s3cp $S3_ARGS "https://s3.freedesktop.org/mesa-tracie-private/${FILE}" "${FILE}"
mkdir -p replayer-db/angle
tar --zstd -xf ${FILE} -C replayer-db/angle/
fi
if ! eval $RUN_CMD; if ! eval $RUN_CMD;
then then
printf "%s\n" "Found $(cat /tmp/version.txt), expected $MESA_VERSION" printf "%s\n" "Found $(cat /tmp/version.txt), expected $MESA_VERSION"

View File

@@ -38,6 +38,7 @@ cp -Rp .gitlab-ci/fossilize-runner.sh install/
cp -Rp .gitlab-ci/crosvm-init.sh install/ cp -Rp .gitlab-ci/crosvm-init.sh install/
cp -Rp .gitlab-ci/*.txt install/ cp -Rp .gitlab-ci/*.txt install/
cp -Rp .gitlab-ci/report-flakes.py install/ cp -Rp .gitlab-ci/report-flakes.py install/
cp -Rp .gitlab-ci/valve install/
cp -Rp .gitlab-ci/vkd3d-proton install/ cp -Rp .gitlab-ci/vkd3d-proton install/
cp -Rp .gitlab-ci/setup-test-env.sh install/ cp -Rp .gitlab-ci/setup-test-env.sh install/
cp -Rp .gitlab-ci/*-runner.sh install/ cp -Rp .gitlab-ci/*-runner.sh install/
@@ -60,7 +61,7 @@ if [ -n "$S3_ARTIFACT_NAME" ]; then
# Pass needed files to the test stage # Pass needed files to the test stage
S3_ARTIFACT_NAME="$S3_ARTIFACT_NAME.tar.zst" S3_ARTIFACT_NAME="$S3_ARTIFACT_NAME.tar.zst"
zstd artifacts/install.tar -o ${S3_ARTIFACT_NAME} zstd artifacts/install.tar -o ${S3_ARTIFACT_NAME}
ci-fairy s3cp --token-file "${S3_JWT_FILE}" ${S3_ARTIFACT_NAME} https://${PIPELINE_ARTIFACTS_BASE}/${S3_ARTIFACT_NAME} ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" ${S3_ARTIFACT_NAME} https://${PIPELINE_ARTIFACTS_BASE}/${S3_ARTIFACT_NAME}
fi fi
section_end prepare-artifacts section_end prepare-artifacts

View File

@@ -10,7 +10,7 @@ export LD_LIBRARY_PATH=$LIBDIR
cd /usr/local/shader-db cd /usr/local/shader-db
for driver in freedreno intel lima v3d vc4; do for driver in freedreno intel v3d vc4; do
section_start shader-db-${driver} "Running shader-db for $driver" section_start shader-db-${driver} "Running shader-db for $driver"
env LD_PRELOAD="$LIBDIR/lib${driver}_noop_drm_shim.so" \ env LD_PRELOAD="$LIBDIR/lib${driver}_noop_drm_shim.so" \
./run -j"${FDO_CI_CONCURRENT:-4}" ./shaders \ ./run -j"${FDO_CI_CONCURRENT:-4}" ./shaders \
@@ -19,14 +19,14 @@ for driver in freedreno intel lima v3d vc4; do
done done
# Run shader-db over a number of supported chipsets for nouveau # Run shader-db over a number of supported chipsets for nouveau
for chipset in 40 a3 c0 e4 f0 134 162; do #for chipset in 40 a3 c0 e4 f0 134 162; do
section_start shader-db-nouveau-${chipset} "Running shader-db for nouveau - ${chipset}" # section_start shader-db-nouveau-${chipset} "Running shader-db for nouveau - ${chipset}"
env LD_PRELOAD="$LIBDIR/libnouveau_noop_drm_shim.so" \ # env LD_PRELOAD="$LIBDIR/libnouveau_noop_drm_shim.so" \
NOUVEAU_CHIPSET=${chipset} \ # NOUVEAU_CHIPSET=${chipset} \
./run -j"${FDO_CI_CONCURRENT:-4}" ./shaders \ # ./run -j"${FDO_CI_CONCURRENT:-4}" ./shaders \
> "$ARTIFACTSDIR/nouveau-${chipset}-shader-db.txt" # > "$ARTIFACTSDIR/nouveau-${chipset}-shader-db.txt"
section_end shader-db-nouveau-${chipset} # section_end shader-db-nouveau-${chipset}
done #done
# Run shader-db for r300 (RV370 and RV515) # Run shader-db for r300 (RV370 and RV515)
for chipset in 0x5460 0x7140; do for chipset in 0x5460 0x7140; do

View File

@@ -14,14 +14,6 @@ function x_off {
# TODO: implement x_on ! # TODO: implement x_on !
export JOB_START_S=$(date -u +"%s" -d "${CI_JOB_STARTED_AT:?}")
function get_current_minsec {
DATE_S=$(date -u +"%s")
CURR_TIME=$((DATE_S-JOB_START_S))
printf "%02d:%02d" $((CURR_TIME/60)) $((CURR_TIME%60))
}
function error { function error {
x_off 2>/dev/null x_off 2>/dev/null
RED="\e[0;31m" RED="\e[0;31m"
@@ -29,7 +21,10 @@ function error {
# we force the following to be not in a section # we force the following to be not in a section
section_end $CURRENT_SECTION section_end $CURRENT_SECTION
CURR_MINSEC=$(get_current_minsec) DATE_S=$(date -u +"%s")
JOB_START_S=$(date -u +"%s" -d "${CI_JOB_STARTED_AT:?}")
CURR_TIME=$((DATE_S-JOB_START_S))
CURR_MINSEC="$(printf "%02d" $((CURR_TIME/60))):$(printf "%02d" $((CURR_TIME%60)))"
echo -e "\n${RED}[${CURR_MINSEC}] ERROR: $*${ENDCOLOR}\n" echo -e "\n${RED}[${CURR_MINSEC}] ERROR: $*${ENDCOLOR}\n"
[ "$state_x" -eq 0 ] || set -x [ "$state_x" -eq 0 ] || set -x
} }
@@ -47,7 +42,10 @@ function build_section_start {
CYAN="\e[0;36m" CYAN="\e[0;36m"
ENDCOLOR="\e[0m" ENDCOLOR="\e[0m"
CURR_MINSEC=$(get_current_minsec) DATE_S=$(date -u +"%s")
JOB_START_S=$(date -u +"%s" -d "${CI_JOB_STARTED_AT:?}")
CURR_TIME=$((DATE_S-JOB_START_S))
CURR_MINSEC="$(printf "%02d" $((CURR_TIME/60))):$(printf "%02d" $((CURR_TIME%60)))"
echo -e "\n\e[0Ksection_start:$(date +%s):$section_name$section_params\r\e[0K${CYAN}[${CURR_MINSEC}] $*${ENDCOLOR}\n" echo -e "\n\e[0Ksection_start:$(date +%s):$section_name$section_params\r\e[0K${CYAN}[${CURR_MINSEC}] $*${ENDCOLOR}\n"
} }
@@ -89,7 +87,6 @@ function uncollapsed_section_switch {
} }
export -f x_off export -f x_off
export -f get_current_minsec
export -f error export -f error
export -f trap_err export -f trap_err
export -f build_section_start export -f build_section_start

View File

@@ -6,11 +6,6 @@
rules: rules:
- if: &is-scheduled-pipeline '$CI_PIPELINE_SOURCE == "schedule"' - if: &is-scheduled-pipeline '$CI_PIPELINE_SOURCE == "schedule"'
when: on_success when: on_success
# Override of the `default: retry:` settings, which automatically retries jobs
# if one of the tests result didn't match its expectation; this override
# disables that, but keeps the auto-retry for infrastructure failures.
.no-auto-retry:
retry: retry:
max: 1 max: 1
# Don't retry on script_failure, job_execution_timeout, runner_unsupported, # Don't retry on script_failure, job_execution_timeout, runner_unsupported,
@@ -40,7 +35,7 @@
.restricted-rules: .restricted-rules:
rules: rules:
# If the triggerer has access to the restricted traces and if it is pre-merge # If the triggerer has access to the restricted traces and if it is pre-merge
- if: '($GITLAB_USER_LOGIN !~ "/^(robclark|anholt|flto|cwabbott0|Danil|tomeu|okias|gallo|kwg|llanderwelin|zmike|vigneshraman)$/") && - if: '($GITLAB_USER_LOGIN !~ "/^(robclark|anholt|flto|cwabbott0|Danil|tomeu|okias|gallo|kwg|majanes|llanderwelin|zmike|vigneshraman)$/") &&
($GITLAB_USER_LOGIN != "marge-bot" || $CI_COMMIT_BRANCH)' ($GITLAB_USER_LOGIN != "marge-bot" || $CI_COMMIT_BRANCH)'
when: never when: never
@@ -72,7 +67,7 @@
# Same core dependencies for doing manual runs. # Same core dependencies for doing manual runs.
.core-manual-rules: .core-manual-rules:
extends: .no-auto-retry retry: !reference [.scheduled_pipeline-rules, retry]
rules: rules:
# We only want manual jobs to show up when it's not marge's pre-merge CI # We only want manual jobs to show up when it's not marge's pre-merge CI
# run, otherwise she'll wait until her timeout. The exception is # run, otherwise she'll wait until her timeout. The exception is
@@ -130,7 +125,7 @@
extends: extends:
- .piglit-performance-base - .piglit-performance-base
needs: needs:
- debian/baremetal_arm64_test - debian/arm64_test
- debian-arm64-release - debian-arm64-release
.piglit-performance:x86_64: .piglit-performance:x86_64:
@@ -179,7 +174,7 @@
- !reference [.gallium-core-rules, rules] - !reference [.gallium-core-rules, rules]
.gl-manual-rules: .gl-manual-rules:
extends: .no-auto-retry retry: !reference [.scheduled_pipeline-rules, retry]
rules: rules:
- !reference [.core-manual-rules, rules] - !reference [.core-manual-rules, rules]
- changes: - changes:
@@ -198,7 +193,7 @@
when: on_success when: on_success
.vulkan-manual-rules: .vulkan-manual-rules:
extends: .no-auto-retry retry: !reference [.scheduled_pipeline-rules, retry]
rules: rules:
- !reference [.core-manual-rules, rules] - !reference [.core-manual-rules, rules]
- changes: - changes:
@@ -232,10 +227,7 @@
.lint-rustfmt-rules: .lint-rustfmt-rules:
rules: rules:
- !reference [.never-post-merge-rules, rules] - !reference [.never-post-merge-rules, rules]
- !reference [.no_scheduled_pipelines-rules, rules] - !reference [.core-rules, rules]
- changes:
- .gitlab-ci.yml
- .gitlab-ci/**/*
# in merge pipeline, formatting checks are not allowed to fail # in merge pipeline, formatting checks are not allowed to fail
- if: $GITLAB_USER_LOGIN == "marge-bot" && $CI_PIPELINE_SOURCE == "merge_request_event" - if: $GITLAB_USER_LOGIN == "marge-bot" && $CI_PIPELINE_SOURCE == "merge_request_event"
changes: &rust_file_list changes: &rust_file_list
@@ -246,13 +238,3 @@
- changes: *rust_file_list - changes: *rust_file_list
when: on_success when: on_success
allow_failure: true allow_failure: true
# Rules for .mr-label-maker.yml
.mr-label-maker-rules:
rules:
- !reference [.never-post-merge-rules, rules]
- !reference [.no_scheduled_pipelines-rules, rules]
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes:
- .mr-label-maker.yml
when: on_success

View File

@@ -43,7 +43,7 @@ rustfmt:
- rustfmt --verbose src/**/lib.rs - rustfmt --verbose src/**/lib.rs
- rustfmt --verbose src/**/main.rs - rustfmt --verbose src/**/main.rs
python-test: .test-check:
# Cancel job if a newer commit is pushed to the same branch # Cancel job if a newer commit is pushed to the same branch
interruptible: true interruptible: true
stage: code-validation stage: code-validation
@@ -52,6 +52,10 @@ python-test:
variables: variables:
GIT_STRATEGY: fetch GIT_STRATEGY: fetch
timeout: 10m timeout: 10m
python-test:
extends:
- .test-check
script: script:
- cd bin/ci - cd bin/ci
- pip install --break-system-packages -r test/requirements.txt - pip install --break-system-packages -r test/requirements.txt
@@ -59,18 +63,8 @@ python-test:
rules: rules:
- !reference [.disable-farm-mr-rules, rules] - !reference [.disable-farm-mr-rules, rules]
- !reference [.never-post-merge-rules, rules] - !reference [.never-post-merge-rules, rules]
- if: $CI_PIPELINE_SOURCE == "schedule" - changes:
when: on_success
- if: $CI_PIPELINE_SOURCE == "push" && $CI_PROJECT_NAMESPACE == "mesa" && $GITLAB_USER_LOGIN != "marge-bot"
when: on_success
- if: $GITLAB_USER_LOGIN == "marge-bot"
changes: &bin_ci_files
- .gitlab-ci.yml
- .gitlab-ci/**/*
- bin/ci/**/* - bin/ci/**/*
when: on_success
- changes: *bin_ci_files
when: manual
.test-gl: .test-gl:
extends: extends:
@@ -123,11 +117,31 @@ python-test:
paths: paths:
- results/ - results/
.b2c-vkd3d-proton-test: .vkd3d-proton-test:
artifacts:
when: on_failure
name: "mesa_${CI_JOB_NAME}"
paths:
- results/vkd3d-proton.log
script:
- ./install/vkd3d-proton/run.sh
.piglit-test:
artifacts:
name: "mesa_${CI_JOB_NAME}"
paths:
- results
reports:
junit: results/junit.xml
variables: variables:
HWCI_TEST_SCRIPT: ./install/vkd3d-proton/run.sh PIGLIT_NO_WINDOW: 1
HWCI_TEST_SCRIPT: "/install/piglit/piglit-runner.sh"
script:
- install/piglit/piglit-runner.sh
.piglit-traces-test: .piglit-traces-test:
extends:
- .piglit-test
artifacts: artifacts:
when: on_failure when: on_failure
name: "mesa_${CI_JOB_NAME}" name: "mesa_${CI_JOB_NAME}"
@@ -138,11 +152,9 @@ python-test:
exclude: exclude:
- results/*.shader_cache - results/*.shader_cache
variables: variables:
PIGLIT_REPLAY_EXTRA_ARGS: --db-path ${CI_PROJECT_DIR}/replayer-db/ --minio_bucket=mesa-tracie-public --jwt-file=${CI_JOB_JWT_FILE}
# until we overcome Infrastructure issues, give traces extra 5 min before timeout # until we overcome Infrastructure issues, give traces extra 5 min before timeout
DEVICE_HANGING_TIMEOUT_SEC: 600 DEVICE_HANGING_TIMEOUT_SEC: 600
PIGLIT_REPLAY_EXTRA_ARGS: --db-path ${CI_PROJECT_DIR}/replayer-db/ --minio_bucket=${S3_TRACIE_PUBLIC_BUCKET} --jwt-file=${S3_JWT_FILE}
PIGLIT_NO_WINDOW: 1
HWCI_TEST_SCRIPT: "/install/piglit/piglit-runner.sh"
script: script:
- section_start variables "Variables passed through:" - section_start variables "Variables passed through:"
- install/common/generate-env.sh - install/common/generate-env.sh
@@ -168,7 +180,11 @@ python-test:
paths: paths:
- results/ - results/
.download_s3: .baremetal-test:
extends:
- .test
# Cancel job if a newer commit is pushed to the same branch
interruptible: true
before_script: before_script:
- !reference [default, before_script] - !reference [default, before_script]
# Use this instead of gitlab's artifacts download because it hits packet.net # Use this instead of gitlab's artifacts download because it hits packet.net
@@ -180,14 +196,6 @@ python-test:
- rm -rf install - rm -rf install
- (set -x; curl -L --retry 4 -f --retry-all-errors --retry-delay 60 ${FDO_HTTP_CACHE_URI:-}https://${PIPELINE_ARTIFACTS_BASE}/${S3_ARTIFACT_NAME}.tar.zst | tar --zstd -x) - (set -x; curl -L --retry 4 -f --retry-all-errors --retry-delay 60 ${FDO_HTTP_CACHE_URI:-}https://${PIPELINE_ARTIFACTS_BASE}/${S3_ARTIFACT_NAME}.tar.zst | tar --zstd -x)
- section_end artifacts_download - section_end artifacts_download
.baremetal-test:
extends:
- .test
# Cancel job if a newer commit is pushed to the same branch
interruptible: true
before_script:
- !reference [.download_s3, before_script]
variables: variables:
BM_ROOTFS: /rootfs-${DEBIAN_ARCH} BM_ROOTFS: /rootfs-${DEBIAN_ARCH}
artifacts: artifacts:
@@ -205,12 +213,12 @@ python-test:
.baremetal-test-arm32: .baremetal-test-arm32:
extends: extends:
- .baremetal-test - .baremetal-test
- .use-debian/baremetal_arm32_test - .use-debian/arm32_test
variables: variables:
DEBIAN_ARCH: armhf DEBIAN_ARCH: armhf
S3_ARTIFACT_NAME: mesa-arm32-default-debugoptimized S3_ARTIFACT_NAME: mesa-arm32-default-debugoptimized
needs: needs:
- debian/baremetal_arm32_test - debian/arm32_test
- job: debian-arm32 - job: debian-arm32
artifacts: false artifacts: false
- !reference [.required-for-hardware-jobs, needs] - !reference [.required-for-hardware-jobs, needs]
@@ -219,12 +227,12 @@ python-test:
.baremetal-test-arm64: .baremetal-test-arm64:
extends: extends:
- .baremetal-test - .baremetal-test
- .use-debian/baremetal_arm64_test - .use-debian/arm64_test
variables: variables:
DEBIAN_ARCH: arm64 DEBIAN_ARCH: arm64
S3_ARTIFACT_NAME: mesa-arm64-default-debugoptimized S3_ARTIFACT_NAME: mesa-arm64-default-debugoptimized
needs: needs:
- debian/baremetal_arm64_test - debian/arm64_test
- job: debian-arm64 - job: debian-arm64
artifacts: false artifacts: false
- !reference [.required-for-hardware-jobs, needs] - !reference [.required-for-hardware-jobs, needs]
@@ -233,12 +241,12 @@ python-test:
.baremetal-arm32-asan-test: .baremetal-arm32-asan-test:
extends: extends:
- .baremetal-test - .baremetal-test
- .use-debian/baremetal_arm32_test - .use-debian/arm32_test
variables: variables:
DEQP_RUNNER_OPTIONS: "--env LD_PRELOAD=libasan.so.8:/install/lib/libdlclose-skip.so" DEQP_RUNNER_OPTIONS: "--env LD_PRELOAD=libasan.so.8:/install/lib/libdlclose-skip.so"
S3_ARTIFACT_NAME: mesa-arm32-asan-debugoptimized S3_ARTIFACT_NAME: mesa-arm32-asan-debugoptimized
needs: needs:
- debian/baremetal_arm32_test - debian/arm32_test
- job: debian-arm32-asan - job: debian-arm32-asan
artifacts: false artifacts: false
- !reference [.required-for-hardware-jobs, needs] - !reference [.required-for-hardware-jobs, needs]
@@ -246,12 +254,12 @@ python-test:
.baremetal-arm64-asan-test: .baremetal-arm64-asan-test:
extends: extends:
- .baremetal-test - .baremetal-test
- .use-debian/baremetal_arm64_test - .use-debian/arm64_test
variables: variables:
DEQP_RUNNER_OPTIONS: "--env LD_PRELOAD=libasan.so.8:/install/lib/libdlclose-skip.so" DEQP_RUNNER_OPTIONS: "--env LD_PRELOAD=libasan.so.8:/install/lib/libdlclose-skip.so"
S3_ARTIFACT_NAME: mesa-arm64-asan-debugoptimized S3_ARTIFACT_NAME: mesa-arm64-asan-debugoptimized
needs: needs:
- debian/baremetal_arm64_test - debian/arm64_test
- job: debian-arm64-asan - job: debian-arm64-asan
artifacts: false artifacts: false
- !reference [.required-for-hardware-jobs, needs] - !reference [.required-for-hardware-jobs, needs]
@@ -275,24 +283,28 @@ python-test:
# built as part of the CI in the boot2container project. # built as part of the CI in the boot2container project.
image: registry.freedesktop.org/gfx-ci/ci-tron/mesa-trigger:2024-01-05.1 image: registry.freedesktop.org/gfx-ci/ci-tron/mesa-trigger:2024-01-05.1
timeout: 1h 40m timeout: 1h 40m
rules:
- if: $FORCE_KERNEL_TAG != null
when: never
variables: variables:
# No need by default to pull the whole repo # No need by default to pull the whole repo
GIT_STRATEGY: none GIT_STRATEGY: none
# boot2container initrd configuration parameters. # boot2container initrd configuration parameters.
B2C_KERNEL_URL: 'https://gitlab.freedesktop.org/gfx-ci/ci-tron/-/package_files/519/download' # Linux 6.1 B2C_KERNEL_URL: 'https://gitlab.freedesktop.org/gfx-ci/ci-tron/-/package_files/519/download' # Linux 6.1
B2C_INITRAMFS_URL: 'https://gitlab.freedesktop.org/mupuf/boot2container/-/releases/v0.9.10/downloads/initramfs.linux_amd64.cpio.xz' B2C_INITRAMFS_URL: 'https://gitlab.freedesktop.org/mupuf/boot2container/-/releases/v0.9.10/downloads/initramfs.linux_amd64.cpio.xz'
B2C_JOB_SUCCESS_REGEX: 'hwci: mesa: pass\r$' B2C_JOB_SUCCESS_REGEX: '\[.*\]: Execution is over, pipeline status: 0\r$'
B2C_JOB_WARN_REGEX: '\*ERROR\* ring .* timeout'
B2C_LOG_LEVEL: 6 B2C_LOG_LEVEL: 6
B2C_POWEROFF_DELAY: 15 B2C_POWEROFF_DELAY: 15
B2C_SESSION_END_REGEX: '^.*It''s now safe to turn off your computer\r$' B2C_SESSION_END_REGEX: '^.*It''s now safe to turn off your computer\r$'
B2C_SESSION_REBOOT_REGEX: '' B2C_SESSION_REBOOT_REGEX: ''
B2C_TIMEOUT_BOOT_MINUTES: 45 B2C_TIMEOUT_BOOT_MINUTES: 45
B2C_TIMEOUT_BOOT_RETRIES: 0 B2C_TIMEOUT_BOOT_RETRIES: 0
B2C_TIMEOUT_FIRST_CONSOLE_ACTIVITY_MINUTES: 2 B2C_TIMEOUT_FIRST_MINUTES: 2
B2C_TIMEOUT_FIRST_CONSOLE_ACTIVITY_RETRIES: 3 B2C_TIMEOUT_FIRST_RETRIES: 3
B2C_TIMEOUT_CONSOLE_ACTIVITY_MINUTES: 5 B2C_TIMEOUT_MINUTES: 5
B2C_TIMEOUT_OVERALL_MINUTES: 90 B2C_TIMEOUT_OVERALL_MINUTES: 90
B2C_TIMEOUT_CONSOLE_ACTIVITY_RETRIES: 0 B2C_TIMEOUT_RETRIES: 0
B2C_JOB_VOLUME_EXCLUSIONS: "*.shader_cache,install/*,*/install/*,*/vkd3d-proton.cache*,vkd3d-proton.cache*,*.qpa" B2C_JOB_VOLUME_EXCLUSIONS: "*.shader_cache,install/*,*/install/*,*/vkd3d-proton.cache*,vkd3d-proton.cache*,*.qpa"
# As noted in the top description, we make a distinction between the # As noted in the top description, we make a distinction between the
@@ -385,7 +397,7 @@ python-test:
reports: reports:
junit: results/**/junit.xml junit: results/**/junit.xml
.b2c-x86_64-test-vk: .b2c-test-vk:
extends: extends:
- .use-debian/x86_64_test-vk - .use-debian/x86_64_test-vk
- .b2c-test - .b2c-test
@@ -394,7 +406,7 @@ python-test:
- debian-testing - debian-testing
- !reference [.required-for-hardware-jobs, needs] - !reference [.required-for-hardware-jobs, needs]
.b2c-x86_64-test-gl: .b2c-test-gl:
extends: extends:
- .use-debian/x86_64_test-gl - .use-debian/x86_64_test-gl
- .b2c-test - .b2c-test
@@ -402,21 +414,3 @@ python-test:
- debian/x86_64_test-gl - debian/x86_64_test-gl
- debian-testing - debian-testing
- !reference [.required-for-hardware-jobs, needs] - !reference [.required-for-hardware-jobs, needs]
.b2c-arm64-test-vk:
extends:
- .use-debian/arm64_test-vk
- .b2c-test
needs:
- debian/arm64_test-vk
- debian-arm64
- !reference [.required-for-hardware-jobs, needs]
.b2c-arm64-test-gl:
extends:
- .use-debian/arm64_test-gl
- .b2c-test
needs:
- debian/arm64_test-gl
- debian-arm64
- !reference [.required-for-hardware-jobs, needs]

View File

@@ -15,7 +15,7 @@ from typing import Generator
from unittest.mock import MagicMock, patch from unittest.mock import MagicMock, patch
import pytest import pytest
from lava.exceptions import MesaCIException, MesaCIRetryError, MesaCIFatalException from lava.exceptions import MesaCIException, MesaCIRetryError
from lava.lava_job_submitter import ( from lava.lava_job_submitter import (
DEVICE_HANGING_TIMEOUT_SEC, DEVICE_HANGING_TIMEOUT_SEC,
NUMBER_OF_RETRIES_TIMEOUT_DETECTION, NUMBER_OF_RETRIES_TIMEOUT_DETECTION,
@@ -24,7 +24,6 @@ from lava.lava_job_submitter import (
bootstrap_log_follower, bootstrap_log_follower,
follow_job_execution, follow_job_execution,
retriable_follow_job, retriable_follow_job,
wait_for_job_get_started,
) )
from lava.utils import LogSectionType from lava.utils import LogSectionType
@@ -84,7 +83,7 @@ def lava_job_submitter(
def test_submit_and_follow_respects_exceptions(mock_sleep, mock_proxy, exception): def test_submit_and_follow_respects_exceptions(mock_sleep, mock_proxy, exception):
with pytest.raises(MesaCIException): with pytest.raises(MesaCIException):
proxy = mock_proxy(side_effect=exception) proxy = mock_proxy(side_effect=exception)
job = LAVAJob(proxy, "") job = LAVAJob(proxy, '')
log_follower = bootstrap_log_follower() log_follower = bootstrap_log_follower()
follow_job_execution(job, log_follower) follow_job_execution(job, log_follower)
@@ -166,13 +165,21 @@ PROXY_SCENARIOS = {
mock_logs(result="pass"), mock_logs(result="pass"),
does_not_raise(), does_not_raise(),
"pass", "pass",
{"testsuite_results": [generate_testsuite_result(result="pass")]}, {
"testsuite_results": [
generate_testsuite_result(result="pass")
]
},
), ),
"no retries, but testsuite fails": ( "no retries, but testsuite fails": (
mock_logs(result="fail"), mock_logs(result="fail"),
does_not_raise(), does_not_raise(),
"fail", "fail",
{"testsuite_results": [generate_testsuite_result(result="fail")]}, {
"testsuite_results": [
generate_testsuite_result(result="fail")
]
},
), ),
"no retries, one testsuite fails": ( "no retries, one testsuite fails": (
generate_n_logs(n=1, tick_fn=0, result="fail"), generate_n_logs(n=1, tick_fn=0, result="fail"),
@@ -181,7 +188,7 @@ PROXY_SCENARIOS = {
{ {
"testsuite_results": [ "testsuite_results": [
generate_testsuite_result(result="fail"), generate_testsuite_result(result="fail"),
generate_testsuite_result(result="pass"), generate_testsuite_result(result="pass")
] ]
}, },
), ),
@@ -258,27 +265,6 @@ def test_simulate_a_long_wait_to_start_a_job(
assert delta_time.total_seconds() >= wait_time assert delta_time.total_seconds() >= wait_time
LONG_LAVA_QUEUE_SCENARIOS = {
"no_time_to_run": (0, pytest.raises(MesaCIFatalException)),
"enough_time_to_run": (9999999999, does_not_raise()),
}
@pytest.mark.parametrize(
"job_timeout, expectation",
LONG_LAVA_QUEUE_SCENARIOS.values(),
ids=LONG_LAVA_QUEUE_SCENARIOS.keys(),
)
def test_wait_for_job_get_started_no_time_to_run(monkeypatch, job_timeout, expectation):
monkeypatch.setattr("lava.lava_job_submitter.CI_JOB_TIMEOUT_SEC", job_timeout)
job = MagicMock()
# Make it escape the loop
job.is_started.side_effect = (False, False, True)
with expectation as e:
wait_for_job_get_started(job, 1)
if e:
job.cancel.assert_called_with()
CORRUPTED_LOG_SCENARIOS = { CORRUPTED_LOG_SCENARIOS = {
"too much subsequent corrupted data": ( "too much subsequent corrupted data": (
@@ -452,7 +438,9 @@ def test_job_combined_status(
"lava.lava_job_submitter.retriable_follow_job" "lava.lava_job_submitter.retriable_follow_job"
) as mock_retriable_follow_job, patch( ) as mock_retriable_follow_job, patch(
"lava.lava_job_submitter.LAVAJobSubmitter._LAVAJobSubmitter__prepare_submission" "lava.lava_job_submitter.LAVAJobSubmitter._LAVAJobSubmitter__prepare_submission"
) as mock_prepare_submission, patch("sys.exit"): ) as mock_prepare_submission, patch(
"sys.exit"
):
from lava.lava_job_submitter import STRUCTURAL_LOG from lava.lava_job_submitter import STRUCTURAL_LOG
mock_retriable_follow_job.return_value = MagicMock(status=finished_job_status) mock_retriable_follow_job.return_value = MagicMock(status=finished_job_status)

View File

@@ -16,13 +16,7 @@ from lava.utils import (
fix_lava_gitlab_section_log, fix_lava_gitlab_section_log,
hide_sensitive_data, hide_sensitive_data,
) )
from lava.utils.constants import ( from lava.utils.constants import KNOWN_ISSUE_R8152_MAX_CONSECUTIVE_COUNTER
KNOWN_ISSUE_R8152_MAX_CONSECUTIVE_COUNTER,
A6XX_GPU_RECOVERY_WATCH_PERIOD_MIN,
A6XX_GPU_RECOVERY_FAILURE_MESSAGE,
A6XX_GPU_RECOVERY_FAILURE_MAX_COUNT,
)
from lava.utils.lava_log_hints import LAVALogHints
from ..lava.helpers import ( from ..lava.helpers import (
create_lava_yaml_msg, create_lava_yaml_msg,
@@ -396,44 +390,3 @@ def test_detect_failure(messages, expectation):
lf = LogFollower(starting_section=boot_section) lf = LogFollower(starting_section=boot_section)
with expectation: with expectation:
lf.feed(messages) lf.feed(messages)
def test_detect_a6xx_gpu_recovery_failure(frozen_time):
log_follower = LogFollower()
lava_log_hints = LAVALogHints(log_follower=log_follower)
failure_message = {
"dt": datetime.now().isoformat(),
"msg": A6XX_GPU_RECOVERY_FAILURE_MESSAGE[0],
"lvl": "feedback",
}
with pytest.raises(MesaCIKnownIssueException):
for _ in range(A6XX_GPU_RECOVERY_FAILURE_MAX_COUNT):
lava_log_hints.detect_a6xx_gpu_recovery_failure(failure_message)
# Simulate the passage of time within the watch period
frozen_time.tick(1)
failure_message["dt"] = datetime.now().isoformat()
def test_detect_a6xx_gpu_recovery_success(frozen_time):
log_follower = LogFollower()
lava_log_hints = LAVALogHints(log_follower=log_follower)
failure_message = {
"dt": datetime.now().isoformat(),
"msg": A6XX_GPU_RECOVERY_FAILURE_MESSAGE[0],
"lvl": "feedback",
}
# Simulate sending a tolerable number of failure messages
for _ in range(A6XX_GPU_RECOVERY_FAILURE_MAX_COUNT - 1):
lava_log_hints.detect_a6xx_gpu_recovery_failure(failure_message)
frozen_time.tick(1)
failure_message["dt"] = datetime.now().isoformat()
# Simulate the passage of time outside of the watch period
frozen_time.tick(60 * A6XX_GPU_RECOVERY_WATCH_PERIOD_MIN + 1)
failure_message = {
"dt": datetime.now().isoformat(),
"msg": A6XX_GPU_RECOVERY_FAILURE_MESSAGE[1],
"lvl": "feedback",
}
with does_not_raise():
lava_log_hints.detect_a6xx_gpu_recovery_failure(failure_message)
assert lava_log_hints.a6xx_gpu_first_fail_time is None, "a6xx_gpu_first_fail_time is not None"
assert lava_log_hints.a6xx_gpu_recovery_fail_counter == 0, "a6xx_gpu_recovery_fail_counter is not 0"

View File

@@ -0,0 +1,87 @@
#!/usr/bin/env bash
# shellcheck disable=SC2086 # we want word splitting
set -ex
if [[ -z "$VK_DRIVER" ]]; then
exit 1
fi
# Useful debug output, you rarely know what envirnoment you'll be
# running in within container-land, this can be a landmark.
ls -l
INSTALL=$(realpath -s "$PWD"/install)
RESULTS=$(realpath -s "$PWD"/results)
# Set up the driver environment.
# Modifiying here directly LD_LIBRARY_PATH may cause problems when
# using a command wrapper. Hence, we will just set it when running the
# command.
export __LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$INSTALL/lib/"
# Sanity check to ensure that our environment is sufficient to make our tests
# run against the Mesa built by CI, rather than any installed distro version.
MESA_VERSION=$(sed 's/\./\\./g' "$INSTALL/VERSION")
# Force the stdout and stderr streams to be unbuffered in python.
export PYTHONUNBUFFERED=1
# Set the Vulkan driver to use.
export VK_ICD_FILENAMES="$INSTALL/share/vulkan/icd.d/${VK_DRIVER}_icd.x86_64.json"
if [ "${VK_DRIVER}" = "radeon" ]; then
# Disable vsync
export MESA_VK_WSI_PRESENT_MODE=mailbox
export vblank_mode=0
fi
# Set environment for Wine.
export WINEDEBUG="-all"
export WINEPREFIX="/dxvk-wine64"
export WINEESYNC=1
# Wait for amdgpu to be fully loaded
sleep 1
# Avoid having to perform nasty command pre-processing to insert the
# wine executable in front of the test executables. Instead, use the
# kernel's binfmt support to automatically use Wine as an interpreter
# when asked to load PE executables.
# TODO: Have boot2container mount this filesystem for all jobs?
mount -t binfmt_misc none /proc/sys/fs/binfmt_misc
echo ':DOSWin:M::MZ::/usr/bin/wine64:' > /proc/sys/fs/binfmt_misc/register
# Set environment for DXVK.
export DXVK_LOG_LEVEL="info"
export DXVK_LOG="$RESULTS/dxvk"
[ -d "$DXVK_LOG" ] || mkdir -pv "$DXVK_LOG"
export DXVK_STATE_CACHE=0
# Set environment for replaying traces.
export PATH="/apitrace-msvc-win64/bin:/gfxreconstruct/build/bin:$PATH"
SANITY_MESA_VERSION_CMD="vulkaninfo"
# Set up the Window System Interface (WSI)
# TODO: Can we get away with GBM?
if [ "${TEST_START_XORG:-0}" -eq 1 ]; then
"$INSTALL"/common/start-x.sh "$INSTALL"
export DISPLAY=:0
fi
wine64 --version
SANITY_MESA_VERSION_CMD="$SANITY_MESA_VERSION_CMD | tee /tmp/version.txt | grep \"Mesa $MESA_VERSION\(\s\|$\)\""
RUN_CMD="export LD_LIBRARY_PATH=$__LD_LIBRARY_PATH; $SANITY_MESA_VERSION_CMD"
set +e
if ! eval $RUN_CMD;
then
printf "%s\n" "Found $(cat /tmp/version.txt), expected $MESA_VERSION"
fi
set -e
# Just to be sure...
chmod +x ./valvetraces-run.sh
./valvetraces-run.sh

View File

@@ -1,4 +1,5 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# shellcheck disable=SC2035 # FIXME glob
set -ex set -ex
@@ -10,9 +11,6 @@ INSTALL=$(realpath -s "$PWD"/install)
RESULTS=$(realpath -s "$PWD"/results) RESULTS=$(realpath -s "$PWD"/results)
# Make sure the results folder exists
mkdir -p "$RESULTS"
# Set up the driver environment. # Set up the driver environment.
# Modifiying here directly LD_LIBRARY_PATH may cause problems when # Modifiying here directly LD_LIBRARY_PATH may cause problems when
# using a command wrapper. Hence, we will just set it when running the # using a command wrapper. Hence, we will just set it when running the
@@ -20,54 +18,71 @@ mkdir -p "$RESULTS"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$INSTALL/lib/:/vkd3d-proton-tests/x64/" export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$INSTALL/lib/:/vkd3d-proton-tests/x64/"
# Sanity check to ensure that our environment is sufficient to make our tests
# run against the Mesa built by CI, rather than any installed distro version.
MESA_VERSION=$(sed 's/\./\\./g' "$INSTALL/VERSION")
# Set the Vulkan driver to use. # Set the Vulkan driver to use.
ARCH=$(uname -m) export VK_ICD_FILENAMES="$INSTALL/share/vulkan/icd.d/${VK_DRIVER}_icd.x86_64.json"
export VK_DRIVER_FILES="$INSTALL/share/vulkan/icd.d/${VK_DRIVER}_icd.$ARCH.json"
# Set environment for Wine. # Set environment for Wine.
export WINEDEBUG="-all" export WINEDEBUG="-all"
export WINEPREFIX="/vkd3d-proton-wine64" export WINEPREFIX="/vkd3d-proton-wine64"
export WINEESYNC=1 export WINEESYNC=1
# Sanity check to ensure that our environment is sufficient to make our tests # wrapper to supress +x to avoid spamming the log
# run against the Mesa built by CI, rather than any installed distro version. quiet() {
MESA_VERSION=$(cat "$INSTALL/VERSION") set +x
if ! vulkaninfo | grep driverInfo | tee /tmp/version.txt | grep -F "Mesa $MESA_VERSION"; then "$@"
set -x
}
set +e
if ! vulkaninfo | tee /tmp/version.txt | grep "\"Mesa $MESA_VERSION\(\s\|$\)\"";
then
printf "%s\n" "Found $(cat /tmp/version.txt), expected $MESA_VERSION" printf "%s\n" "Found $(cat /tmp/version.txt), expected $MESA_VERSION"
exit 1 fi
set -e
if [ -d "$RESULTS" ]; then
cd "$RESULTS" && rm -rf ..?* .[!.]* * && cd -
else
mkdir "$RESULTS"
fi fi
printf "%s\n" "Running vkd3d-proton testsuite..." quiet printf "%s\n" "Running vkd3d-proton testsuite..."
if ! /vkd3d-proton-tests/x64/bin/d3d12 > "$RESULTS/vkd3d-proton-log.txt"; then set +e
if ! /vkd3d-proton-tests/x64/bin/d3d12 > "$RESULTS/vkd3d-proton.log";
then
# Check if the executable finished (ie. no segfault). # Check if the executable finished (ie. no segfault).
if ! grep "tests executed" "$RESULTS/vkd3d-proton-log.txt" > /dev/null; then if ! grep "tests executed" "$RESULTS/vkd3d-proton.log" > /dev/null; then
error "Failed, see ${ARTIFACTS_BASE_URL}/results/vkd3d-proton-log.txt" error printf "%s\n" "Failed, see vkd3d-proton.log!"
exit 1 exit 1
fi fi
# Collect all the failures # Collect all the failures
VKD3D_PROTON_RESULTS="${VKD3D_PROTON_RESULTS:-vkd3d-proton-results}"
RESULTSFILE="$RESULTS/$VKD3D_PROTON_RESULTS.txt" RESULTSFILE="$RESULTS/$VKD3D_PROTON_RESULTS.txt"
mkdir -p .gitlab-ci/vkd3d-proton mkdir -p .gitlab-ci/vkd3d-proton
if ! grep "Test failed" "$RESULTS"/vkd3d-proton-log.txt > "$RESULTSFILE"; then grep "Test failed" "$RESULTS"/vkd3d-proton.log > "$RESULTSFILE"
error "Failed to get the list of failing tests, see ${ARTIFACTS_BASE_URL}/results/vkd3d-proton-log.txt"
exit 1
fi
# Gather the list expected failures # Gather the list expected failures
if [ -f "$INSTALL/$VKD3D_PROTON_RESULTS-vkd3d.txt" ]; then if [ -f "$INSTALL/$VKD3D_PROTON_RESULTS.txt" ]; then
cp "$INSTALL/$VKD3D_PROTON_RESULTS-vkd3d.txt" \ cp "$INSTALL/$VKD3D_PROTON_RESULTS.txt" \
".gitlab-ci/vkd3d-proton/$VKD3D_PROTON_RESULTS.txt.baseline" ".gitlab-ci/vkd3d-proton/$VKD3D_PROTON_RESULTS.txt.baseline"
else else
printf "%s\n" "$VKD3D_PROTON_RESULTS-vkd3d.txt not found, assuming a \"no failures\" baseline."
touch ".gitlab-ci/vkd3d-proton/$VKD3D_PROTON_RESULTS.txt.baseline" touch ".gitlab-ci/vkd3d-proton/$VKD3D_PROTON_RESULTS.txt.baseline"
fi fi
# Make sure that the failures found in this run match the current expectation # Make sure that the failures found in this run match the current expectation
if ! diff --color=always -u ".gitlab-ci/vkd3d-proton/$VKD3D_PROTON_RESULTS.txt.baseline" "$RESULTSFILE"; then if ! diff -q ".gitlab-ci/vkd3d-proton/$VKD3D_PROTON_RESULTS.txt.baseline" "$RESULTSFILE"; then
error "Changes found, see ${ARTIFACTS_BASE_URL}/results/vkd3d-proton-log.txt" error printf "%s\n" "Changes found, see vkd3d-proton.log!"
quiet diff --color=always -u ".gitlab-ci/vkd3d-proton/$VKD3D_PROTON_RESULTS.txt.baseline" "$RESULTSFILE"
exit 1 exit 1
fi fi
fi fi
printf "%s\n" "vkd3d-proton execution: SUCCESS"
exit 0 exit 0

View File

@@ -7,10 +7,6 @@ COPY mesa_deps_vulkan_sdk.ps1 C:\
RUN C:\mesa_deps_vulkan_sdk.ps1 RUN C:\mesa_deps_vulkan_sdk.ps1
COPY mesa_init_msvc.ps1 C:\ COPY mesa_init_msvc.ps1 C:\
COPY mesa_deps_libva.ps1 C:\
RUN C:\mesa_deps_libva.ps1
COPY mesa_deps_build.ps1 C:\ COPY mesa_deps_build.ps1 C:\
RUN C:\mesa_deps_build.ps1 RUN C:\mesa_deps_build.ps1

View File

@@ -14,9 +14,6 @@ RUN C:\mesa_deps_rust.ps1
COPY mesa_init_msvc.ps1 C:\ COPY mesa_init_msvc.ps1 C:\
COPY mesa_deps_libva.ps1 C:\
RUN C:\mesa_deps_libva.ps1
COPY mesa_deps_test_piglit.ps1 C:\ COPY mesa_deps_test_piglit.ps1 C:\
RUN C:\mesa_deps_test_piglit.ps1 RUN C:\mesa_deps_test_piglit.ps1
COPY mesa_deps_test_deqp.ps1 c:\ COPY mesa_deps_test_deqp.ps1 c:\

View File

@@ -1,4 +1,4 @@
# VK_DRIVER_FILES environment variable is not used when running with # VK_ICD_FILENAMES environment variable is not used when running with
# elevated privileges. Add a key to the registry instead. # elevated privileges. Add a key to the registry instead.
$hkey_path = "HKLM:\SOFTWARE\Khronos\Vulkan\Drivers\" $hkey_path = "HKLM:\SOFTWARE\Khronos\Vulkan\Drivers\"
$hkey_name = Join-Path -Path $pwd -ChildPath "_install\share\vulkan\icd.d\dzn_icd.x86_64.json" $hkey_name = Join-Path -Path $pwd -ChildPath "_install\share\vulkan\icd.d\dzn_icd.x86_64.json"

View File

@@ -84,6 +84,4 @@ Copy-Item ".\.gitlab-ci\windows\spirv2dxil_run.ps1" -Destination $installdir
Copy-Item ".\.gitlab-ci\windows\deqp_runner_run.ps1" -Destination $installdir Copy-Item ".\.gitlab-ci\windows\deqp_runner_run.ps1" -Destination $installdir
Copy-Item ".\.gitlab-ci\windows\vainfo_run.ps1" -Destination $installdir
Get-ChildItem -Recurse -Filter "ci" | Get-ChildItem -Include "*.txt","*.toml" | Copy-Item -Destination $installdir Get-ChildItem -Recurse -Filter "ci" | Get-ChildItem -Include "*.txt","*.toml" | Copy-Item -Destination $installdir

View File

@@ -12,7 +12,7 @@ $depsInstallPath="C:\mesa-deps"
Get-Date Get-Date
Write-Host "Cloning DirectX-Headers" Write-Host "Cloning DirectX-Headers"
git clone -b v1.613.1 --depth=1 https://github.com/microsoft/DirectX-Headers deps/DirectX-Headers git clone -b v1.611.0 --depth=1 https://github.com/microsoft/DirectX-Headers deps/DirectX-Headers
if (!$?) { if (!$?) {
Write-Host "Failed to clone DirectX-Headers repository" Write-Host "Failed to clone DirectX-Headers repository"
Exit 1 Exit 1
@@ -32,17 +32,16 @@ if (!$buildstatus) {
Get-Date Get-Date
Write-Host "Cloning zlib" Write-Host "Cloning zlib"
git clone -b v1.3.1 --depth=1 https://github.com/madler/zlib deps/zlib git clone -b v1.2.13 --depth=1 https://github.com/madler/zlib deps/zlib
if (!$?) { if (!$?) {
Write-Host "Failed to clone zlib repository" Write-Host "Failed to clone zlib repository"
Exit 1 Exit 1
} }
Write-Host "Downloading zlib meson build files" Write-Host "Downloading zlib meson build files"
Invoke-WebRequest -Uri "https://wrapdb.mesonbuild.com/v2/zlib_1.3.1-1/get_patch" -OutFile deps/zlib.zip Invoke-WebRequest -Uri "https://wrapdb.mesonbuild.com/v2/zlib_1.2.13-1/get_patch" -OutFile deps/zlib.zip
Expand-Archive -Path deps/zlib.zip -Destination deps/zlib Expand-Archive -Path deps/zlib.zip -Destination deps/zlib
# Wrap archive puts build files in a version subdir # Wrap archive puts build files in a version subdir
robocopy deps/zlib/zlib-1.3.1 deps/zlib /E Move-Item deps/zlib/zlib-1.2.13/* deps/zlib
Remove-Item -Recurse -Force -ErrorAction SilentlyContinue -Path deps/zlib/zlib-1.3.1
$zlib_build = New-Item -ItemType Directory -Path ".\deps\zlib" -Name "build" $zlib_build = New-Item -ItemType Directory -Path ".\deps\zlib" -Name "build"
Push-Location -Path $zlib_build.FullName Push-Location -Path $zlib_build.FullName
meson .. --backend=ninja -Dprefix="$depsInstallPath" --default-library=static --buildtype=release -Db_vscrt=mt && ` meson .. --backend=ninja -Dprefix="$depsInstallPath" --default-library=static --buildtype=release -Db_vscrt=mt && `
@@ -55,6 +54,35 @@ if (!$buildstatus) {
Exit 1 Exit 1
} }
Get-Date
Write-Host "Cloning libva"
git clone https://github.com/intel/libva.git deps/libva
if (!$?) {
Write-Host "Failed to clone libva repository"
Exit 1
}
Push-Location -Path ".\deps\libva"
Write-Host "Checking out libva df3c584bb79d1a1e521372d62fa62e8b1c52ce6c"
# libva-win32 is released with libva version 2.17 (see https://github.com/intel/libva/releases/tag/2.17.0)
git checkout 2.17.0
Pop-Location
Write-Host "Building libva"
# libva already has a build dir in their repo, use builddir instead
$libva_build = New-Item -ItemType Directory -Path ".\deps\libva" -Name "builddir"
Push-Location -Path $libva_build.FullName
meson .. -Dprefix="$depsInstallPath"
ninja -j32 install
$buildstatus = $?
Pop-Location
Remove-Item -Recurse -Force -ErrorAction SilentlyContinue -Path $libva_build
if (!$buildstatus) {
Write-Host "Failed to compile libva"
Exit 1
}
Get-Date Get-Date
Write-Host "Cloning LLVM release/15.x" Write-Host "Cloning LLVM release/15.x"
git clone -b release/15.x --depth=1 https://github.com/llvm/llvm-project deps/llvm-project git clone -b release/15.x --depth=1 https://github.com/llvm/llvm-project deps/llvm-project
@@ -96,7 +124,6 @@ cmake ../llvm `
-DLLVM_ENABLE_DIA_SDK=OFF ` -DLLVM_ENABLE_DIA_SDK=OFF `
-DCLANG_BUILD_TOOLS=ON ` -DCLANG_BUILD_TOOLS=ON `
-DLLVM_SPIRV_INCLUDE_TESTS=OFF ` -DLLVM_SPIRV_INCLUDE_TESTS=OFF `
-DLLVM_ENABLE_ZLIB=OFF `
-Wno-dev && ` -Wno-dev && `
ninja -j32 install ninja -j32 install
$buildstatus = $? $buildstatus = $?

View File

@@ -68,7 +68,7 @@ Get-Date
python -m pip install --upgrade pip --progress-bar off python -m pip install --upgrade pip --progress-bar off
Write-Host "Installing python packages at:" Write-Host "Installing python packages at:"
Get-Date Get-Date
pip3 install packaging meson mako "numpy < 2.0" pyyaml --progress-bar off pip3 install packaging meson mako numpy --progress-bar off
if (!$?) { if (!$?) {
Write-Host "Failed to install dependencies from pip" Write-Host "Failed to install dependencies from pip"
Exit 1 Exit 1

View File

@@ -8,7 +8,7 @@ $depsInstallPath="C:\mesa-deps"
Write-Host "Downloading DirectX 12 Agility SDK at:" Write-Host "Downloading DirectX 12 Agility SDK at:"
Get-Date Get-Date
Invoke-WebRequest -Uri https://www.nuget.org/api/v2/package/Microsoft.Direct3D.D3D12/1.613.2 -OutFile 'agility.zip' Invoke-WebRequest -Uri https://www.nuget.org/api/v2/package/Microsoft.Direct3D.D3D12/1.610.2 -OutFile 'agility.zip'
Expand-Archive -Path 'agility.zip' -DestinationPath 'C:\agility' Expand-Archive -Path 'agility.zip' -DestinationPath 'C:\agility'
# Copy Agility SDK into mesa-deps\bin\D3D12 # Copy Agility SDK into mesa-deps\bin\D3D12
New-Item -ErrorAction SilentlyContinue -ItemType Directory -Path $depsInstallPath\bin -Name 'D3D12' New-Item -ErrorAction SilentlyContinue -ItemType Directory -Path $depsInstallPath\bin -Name 'D3D12'
@@ -18,7 +18,7 @@ Remove-Item -Recurse 'C:\agility'
Write-Host "Downloading Updated WARP at:" Write-Host "Downloading Updated WARP at:"
Get-Date Get-Date
Invoke-WebRequest -Uri https://www.nuget.org/api/v2/package/Microsoft.Direct3D.WARP/1.0.11 -OutFile 'warp.zip' Invoke-WebRequest -Uri https://www.nuget.org/api/v2/package/Microsoft.Direct3D.WARP/1.0.9 -OutFile 'warp.zip'
Expand-Archive -Path 'warp.zip' -DestinationPath 'C:\warp' Expand-Archive -Path 'warp.zip' -DestinationPath 'C:\warp'
# Copy WARP into mesa-deps\bin # Copy WARP into mesa-deps\bin
Copy-Item 'C:\warp\build\native\amd64\d3d10warp.dll' -Destination $depsInstallPath\bin Copy-Item 'C:\warp\build\native\amd64\d3d10warp.dll' -Destination $depsInstallPath\bin
@@ -27,7 +27,7 @@ Remove-Item -Recurse 'C:\warp'
Write-Host "Downloading DirectXShaderCompiler release at:" Write-Host "Downloading DirectXShaderCompiler release at:"
Get-Date Get-Date
Invoke-WebRequest -Uri https://github.com/microsoft/DirectXShaderCompiler/releases/download/v1.8.2403/dxc_2024_03_07.zip -OutFile 'DXC.zip' Invoke-WebRequest -Uri https://github.com/microsoft/DirectXShaderCompiler/releases/download/v1.7.2207/dxc_2022_07_18.zip -OutFile 'DXC.zip'
Expand-Archive -Path 'DXC.zip' -DestinationPath 'C:\DXC' Expand-Archive -Path 'DXC.zip' -DestinationPath 'C:\DXC'
# No more need to get dxil.dll from the VS install # No more need to get dxil.dll from the VS install
Copy-Item 'C:\DXC\bin\x64\*.dll' -Destination 'C:\Windows\System32' Copy-Item 'C:\DXC\bin\x64\*.dll' -Destination 'C:\Windows\System32'

View File

@@ -1,79 +0,0 @@
# Compiling libva/libva-utils deps
$ProgressPreference = "SilentlyContinue"
$MyPath = $MyInvocation.MyCommand.Path | Split-Path -Parent
. "$MyPath\mesa_init_msvc.ps1"
Remove-Item -Recurse -Force -ErrorAction SilentlyContinue "deps" | Out-Null
$depsInstallPath="C:\mesa-deps"
Write-Host "Cloning libva at:"
Get-Date
git clone https://github.com/intel/libva.git deps/libva
if (!$?) {
Write-Host "Failed to clone libva repository"
Exit 1
}
Write-Host "Cloning libva finished at:"
Get-Date
Write-Host "Building libva at:"
Get-Date
Push-Location -Path ".\deps\libva"
Write-Host "Checking out libva..."
git checkout 2.21.0
Pop-Location
# libva already has a build dir in their repo, use builddir instead
$libva_build = New-Item -ItemType Directory -Path ".\deps\libva" -Name "builddir"
Push-Location -Path $libva_build.FullName
meson .. -Dprefix="$depsInstallPath"
ninja -j32 install
$buildstatus = $?
Pop-Location
Remove-Item -Recurse -Force -ErrorAction SilentlyContinue -Path $libva_build
if (!$buildstatus) {
Write-Host "Failed to compile libva"
Exit 1
}
Write-Host "Building libva finished at:"
Get-Date
Write-Host "Cloning libva-utils at:"
Get-Date
git clone https://github.com/intel/libva-utils.git deps/libva-utils
if (!$?) {
Write-Host "Failed to clone libva-utils repository"
Exit 1
}
Write-Host "Cloning libva-utils finished at:"
Get-Date
Write-Host "Building libva-utils at:"
Get-Date
Push-Location -Path ".\deps\libva-utils"
Write-Host "Checking out libva-utils..."
git checkout 2.21.0
Pop-Location
Write-Host "Building libva-utils"
# libva-utils already has a build dir in their repo, use builddir instead
$libva_utils_build = New-Item -ItemType Directory -Path ".\deps\libva-utils" -Name "builddir"
Push-Location -Path $libva_utils_build.FullName
meson .. -Dprefix="$depsInstallPath" --pkg-config-path="$depsInstallPath\lib\pkgconfig;$depsInstallPath\share\pkgconfig"
ninja -j32 install
$buildstatus = $?
Pop-Location
Remove-Item -Recurse -Force -ErrorAction SilentlyContinue -Path $libva_utils_build
if (!$buildstatus) {
Write-Host "Failed to compile libva-utils"
Exit 1
}
Write-Host "Building libva-utils finished at:"
Get-Date

View File

@@ -1,99 +0,0 @@
function Deploy-Dependencies {
param (
[string] $deploy_directory
)
Write-Host "Copying libva runtime and driver at:"
Get-Date
# Copy the VA runtime binaries from the mesa built dependencies so the versions match with the built mesa VA driver binary
$depsInstallPath="C:\mesa-deps"
Copy-Item "$depsInstallPath\bin\vainfo.exe" -Destination "$deploy_directory\vainfo.exe"
Copy-Item "$depsInstallPath\bin\va_win32.dll" -Destination "$deploy_directory\va_win32.dll"
Copy-Item "$depsInstallPath\bin\va.dll" -Destination "$deploy_directory\va.dll"
# Copy Agility SDK into D3D12 subfolder of vainfo
New-Item -ItemType Directory -Force -Path "$deploy_directory\D3D12" | Out-Null
Copy-Item "$depsInstallPath\bin\D3D12\D3D12Core.dll" -Destination "$deploy_directory\D3D12\D3D12Core.dll"
Copy-Item "$depsInstallPath\bin\D3D12\d3d12SDKLayers.dll" -Destination "$deploy_directory\D3D12\d3d12SDKLayers.dll"
# Copy WARP next to vainfo
Copy-Item "$depsInstallPath\bin\d3d10warp.dll" -Destination "$deploy_directory\d3d10warp.dll"
Write-Host "Copying libva runtime and driver finished at:"
Get-Date
}
function Check-VAInfo-Entrypoint {
param (
[string] $vainfo_app_path,
[string] $entrypoint
)
$vainfo_run_cmd = "$vainfo_app_path --display win32 --device 0 2>&1 | Select-String $entrypoint -Quiet"
Write-Host "Running: $vainfo_run_cmd"
$vainfo_ret_code= Invoke-Expression $vainfo_run_cmd
if (-not($vainfo_ret_code)) {
return 0
}
return 1
}
# Set testing environment variables
$successful_run=1
$testing_dir="$PWD\_install\bin" # vaon12_drv_video.dll is placed on this directory by the build
$vainfo_app_path = "$testing_dir\vainfo.exe"
# Deploy vainfo and dependencies
Deploy-Dependencies -deploy_directory $testing_dir
# Set VA runtime environment variables
$env:LIBVA_DRIVER_NAME="vaon12"
$env:LIBVA_DRIVERS_PATH="$testing_dir"
Write-Host "LIBVA_DRIVER_NAME: $env:LIBVA_DRIVER_NAME"
Write-Host "LIBVA_DRIVERS_PATH: $env:LIBVA_DRIVERS_PATH"
# Check video processing entrypoint is supported
# Inbox WARP/D3D12 supports this entrypoint with VA frontend shaders support (e.g no video APIs support required)
$entrypoint = "VAEntrypointVideoProc"
# First run without app verifier
Write-Host "Disabling appverifier for $vainfo_app_path and checking for the presence of $entrypoint supported..."
appverif.exe /disable * -for "$vainfo_app_path"
$result_without_appverifier = Check-VAInfo-Entrypoint -vainfo_app_path $vainfo_app_path -entrypoint $entrypoint
if ($result_without_appverifier -eq 1) {
Write-Host "Process exited successfully."
} else {
$successful_run=0
Write-Error "Process exit not successful for $vainfo_run_cmd. Please see vainfo verbose output below for diagnostics..."
# verbose run to print more info on error (helpful to investigate issues from the CI output)
Invoke-Expression "$vainfo_app_path -a --display win32 --device help"
Invoke-Expression "$vainfo_app_path -a --display win32 --device 0"
}
# Enable appverif and run again
Write-Host "Enabling appverifier for $vainfo_app_path and checking for the presence of $entrypoint supported..."
appverif.exe /logtofile enable
appverif.exe /verify "$vainfo_app_path"
appverif.exe /enable "Leak" -for "$vainfo_app_path"
$verifier_log_path="$testing_dir\vainfo_appverif_log.xml"
$result_with_appverifier = Check-VAInfo-Entrypoint -vainfo_app_path $vainfo_app_path -entrypoint $entrypoint
if ($result_with_appverifier -eq 1) {
Write-Host "Process exited successfully."
appverif.exe /logtofile disable
} else {
Write-Host "Process failed. Please see Application Verifier log contents below."
# Need to wait for appverif to exit before gathering log
Start-Process -Wait -FilePath "appverif.exe" -ArgumentList "-export", "log", "-for", "$vainfo_app_path", "-with", "to=$verifier_log_path"
Get-Content $verifier_log_path
Write-Error "Process exit not successful for $vainfo_run_cmd."
appverif.exe /logtofile disable
$successful_run=0
}
if ($successful_run -ne 1) {
Exit 1
}

View File

@@ -30,7 +30,7 @@ For rendering errors, attach screenshots of the problem and (if possible) of how
- Backtrace (for crashes) - Backtrace (for crashes)
- Output of `dmesg` - Output of `dmesg`
- Hang reports: Follow the steps described [here](https://docs.mesa3d.org/drivers/amd/hang-debugging.html#radv-debug-hang) and attach the files created in `$HOME/radv_dumps_*/`. - Hang reports: Run with `RADV_DEBUG=hang` and attach the files created in `$HOME/radv_dumps_*/`
### Steps to reproduce ### Steps to reproduce

View File

@@ -84,8 +84,6 @@ Ben Widawsky <benjamin.widawsky@intel.com> Ben Widawsky <ben@bwidawsk.net>
Blair Sadewitz <blair.sadewitz@gmail.com> Blair Sadewitz <blair.sadewitz.gmail.com> Blair Sadewitz <blair.sadewitz@gmail.com> Blair Sadewitz <blair.sadewitz.gmail.com>
Bob Beckett <bob.beckett@collabora.com>
Boris Brezillon <boris.brezillon@collabora.com> <boris.brezillon@free-electrons.com> Boris Brezillon <boris.brezillon@collabora.com> <boris.brezillon@free-electrons.com>
Boris Peterbarg <reist@users.sourceforge.net> reist <reist> Boris Peterbarg <reist@users.sourceforge.net> reist <reist>
@@ -159,8 +157,6 @@ Christopher James Halse Rogers <christopher.halse.rogers@canonical.com> Christop
Christopher Li <chrisl@vmware.com> Chris Li <chrisl@vmware.com> Christopher Li <chrisl@vmware.com> Chris Li <chrisl@vmware.com>
Christopher Li <chrisl@vmware.com> Qicheng Christopher Li <chrisl@vmware.com> Christopher Li <chrisl@vmware.com> Qicheng Christopher Li <chrisl@vmware.com>
Christopher Michael <cmichael@igalia.com> <devilhorns@comcast.net>
Claudio Ciccani <klan@directfb.org> <klan@users.sf.net> Claudio Ciccani <klan@directfb.org> <klan@users.sf.net>
Claudio Ciccani <klan@directfb.org> <klan@users.sourceforge.net> Claudio Ciccani <klan@directfb.org> <klan@users.sourceforge.net>
@@ -172,7 +168,7 @@ Colin McDonald <cjmmail10-bz@yahoo.co.uk> <cjmcdonald@qinetiq.com>
Connor Abbott <cwabbott0@gmail.com> <connor.w.abbott@intel.com> Connor Abbott <cwabbott0@gmail.com> <connor.w.abbott@intel.com>
Connor Abbott <cwabbott0@gmail.com> <connor.abbott@intel.com> Connor Abbott <cwabbott0@gmail.com> <connor.abbott@intel.com>
Constantine Shablia <constantine.shablya@collabora.com> Konstantin Kharlamov <Hi-Angel@yandex.ru>
Corbin Simpson <MostAwesomeDude@gmail.com> <mostawesomed...@gmail.com> Corbin Simpson <MostAwesomeDude@gmail.com> <mostawesomed...@gmail.com>
Corbin Simpson <MostAwesomeDude@gmail.com> <mostawesomedude@gmail.com> Corbin Simpson <MostAwesomeDude@gmail.com> <mostawesomedude@gmail.com>
@@ -196,7 +192,6 @@ David Miller <davem@davemloft.net> davem69 <davem69>
David Heidelberg <david@ixit.cz> David Heidelberger <david.heidelberger@ixit.cz> David Heidelberg <david@ixit.cz> David Heidelberger <david.heidelberger@ixit.cz>
David Heidelberg <david@ixit.cz> <d.okias@gmail.com> David Heidelberg <david@ixit.cz> <d.okias@gmail.com>
David Heidelberg <david@ixit.cz> <david.heidelberg@collabora.com>
David Reveman <reveman@chromium.org> <c99drn@cs.umu.se> David Reveman <reveman@chromium.org> <c99drn@cs.umu.se>
@@ -236,7 +231,7 @@ Eric Engestrom <eric@igalia.com> <eric@engestrom.ch>
Eric Engestrom <eric@igalia.com> <eric.engestrom@imgtec.com> Eric Engestrom <eric@igalia.com> <eric.engestrom@imgtec.com>
Eric Engestrom <eric@igalia.com> <eric.engestrom@intel.com> Eric Engestrom <eric@igalia.com> <eric.engestrom@intel.com>
Erik Faye-Lund <erik.faye-lund@collabora.com> <kusmabite@gmail.com> Erik Faye-Lund <kusmabite@gmail.com> <erik.faye-lund@collabora.com>
Eugeni Dodonov <eugeni.dodonov@intel.com> <eugeni@mandriva.com> Eugeni Dodonov <eugeni.dodonov@intel.com> <eugeni@mandriva.com>
@@ -251,12 +246,9 @@ Frank Binns <frank.binns@imgtec.com> <francisbinns@gmail.com>
Frank Henigman <fjhenigman@google.com> <fjhenigman@chromium.org> Frank Henigman <fjhenigman@google.com> <fjhenigman@chromium.org>
Freya Gentz <zegentzy@protonmail.com>
George Sapountzis <gsapountzis@gmail.com> George Sapountzis <gsap7@yahoo.gr> George Sapountzis <gsapountzis@gmail.com> George Sapountzis <gsap7@yahoo.gr>
Gert Wollny <gert.wollny@collabora.com> <gw.fossdev@gmail.com> Gert Wollny <gert.wollny@collabora.com> <gw.fossdev@gmail.com>
Gert Wollny <gert.wollny@collabora.com> <gert.wollny@collabora.co.uk>
Gurchetan Singh <gurchetansingh@chromium.org> Gurchetan Singh <gurchetansingh@chromium.org>
@@ -378,9 +370,6 @@ Karl Schultz <karl.w.schultz@gmail.com> <k.w.schultz@comcast.net>
Karl Schultz <karl.w.schultz@gmail.com> <Karl.W.Schultz@gmail.com> Karl Schultz <karl.w.schultz@gmail.com> <Karl.W.Schultz@gmail.com>
Karl Schultz <karl.w.schultz@gmail.com> <kschultz@freedesktop.org> Karl Schultz <karl.w.schultz@gmail.com> <kschultz@freedesktop.org>
Karmjit Mahil <karmjit.mahil@igalia.com>
Karmjit Mahil <karmjit.mahil@igalia.com> <karmjit.mahil@imgtec.com>
Karol Herbst <kherbst@redhat.com> <git@karolherbst.de> Karol Herbst <kherbst@redhat.com> <git@karolherbst.de>
Karol Herbst <kherbst@redhat.com> <karolherbst@gmail.com> Karol Herbst <kherbst@redhat.com> <karolherbst@gmail.com>
Karol Herbst <kherbst@redhat.com> <nouveau@karolherbst.de> Karol Herbst <kherbst@redhat.com> <nouveau@karolherbst.de>
@@ -395,8 +384,6 @@ Keith Whitwell <keithw@vmware.com> keithw <keithw@keithw-laptop.(none)>
Kevin Rogovin <kevin.rogovin@intel.com> <kevin.rogovin@gmail.com> Kevin Rogovin <kevin.rogovin@intel.com> <kevin.rogovin@gmail.com>
Konstantin Kharlamov <Hi-Angel@yandex.ru>
Kristian Høgsberg <krh@bitplanet.net> <krh@redhat.com> Kristian Høgsberg <krh@bitplanet.net> <krh@redhat.com>
Kristian Høgsberg <krh@bitplanet.net> <krh@hinata.boston.redhat.com> Kristian Høgsberg <krh@bitplanet.net> <krh@hinata.boston.redhat.com>
Kristian Høgsberg <krh@bitplanet.net> <krh@sasori.boston.redhat.com> Kristian Høgsberg <krh@bitplanet.net> <krh@sasori.boston.redhat.com>
@@ -586,16 +573,9 @@ Robert Hooker <sarvatt@ubuntu.com> <robert.hooker@canonical.com>
Rodrigo Vivi <rodrigo.vivi@intel.com> <rodrigo.vivi@gmail.com> Rodrigo Vivi <rodrigo.vivi@intel.com> <rodrigo.vivi@gmail.com>
Rohan Garg <rohan.garg@intel.com> <rohan.garg@collabora.com>
Rohan Garg <rohan.garg@intel.com> <rohan@garg.io>
Roland Scheidegger <sroland@vmware.com> <rscheidegger@gmx.ch> Roland Scheidegger <sroland@vmware.com> <rscheidegger@gmx.ch>
Roland Scheidegger <sroland@vmware.com> <sroland@tungstengraphics.com> Roland Scheidegger <sroland@vmware.com> <sroland@tungstengraphics.com>
Roman Stratiienko <r.stratiienko@gmail.com> <roman.stratiienko@globallogic.com>
Roman Stratiienko <r.stratiienko@gmail.com> <roman.o.stratiienko@globallogic.com>
Roman Stratiienko <r.stratiienko@gmail.com> <roman.stratiienko@nure.ua>
Roy Spliet <rspliet@eclipso.eu> <r.spliet@student.tudelft.nl> Roy Spliet <rspliet@eclipso.eu> <r.spliet@student.tudelft.nl>
Rune Petersen <rune@megahurts.dk> Rune Peterson <rune@megahurts.dk> Rune Petersen <rune@megahurts.dk> Rune Peterson <rune@megahurts.dk>
@@ -672,7 +652,7 @@ Tom Stellard <thomas.stellard@amd.com> <tstellar@redhat.com>
Tomasz Figa <tfiga@chromium.org> <tomasz.figa@gmail.com> Tomasz Figa <tfiga@chromium.org> <tomasz.figa@gmail.com>
Tomeu Vizoso <tomeu@tomeuvizoso.net> <tomeu.vizoso@collabora.com> Tomeu Vizoso <tomeu.vizoso@collabora.com> <tomeu@tomeuvizoso.net>
Topi Pohjolainen <topi.pohjolainen@intel.com> <topi.pohjolainen@gmail.com> Topi Pohjolainen <topi.pohjolainen@intel.com> <topi.pohjolainen@gmail.com>

View File

@@ -22,7 +22,6 @@ issues:
'dzn': 'dozen' 'dzn': 'dozen'
'egl': 'EGL' 'egl': 'EGL'
'etnaviv': 'etnaviv' 'etnaviv': 'etnaviv'
'etnaviv/nn': ['etnaviv', 'teflon']
'freedreno': 'freedreno' 'freedreno': 'freedreno'
'freedreno/ir3': ['freedreno', 'ir3'] 'freedreno/ir3': ['freedreno', 'ir3']
'gallium': 'gallium' 'gallium': 'gallium'
@@ -41,48 +40,47 @@ issues:
'iris': 'iris' 'iris': 'iris'
'isl': 'ISL' 'isl': 'ISL'
'lima': 'lima' 'lima': 'lima'
'lima/ppir': 'lima' 'lima/ppir' : 'lima'
'llvmpipe': 'llvmpipe' 'llvmpipe': 'llvmpipe'
'mesa': 'mesa' 'mesa' : 'mesa'
'meson': 'meson' 'meson' : 'meson'
'microsoft/compiler': 'd3d12' 'microsoft/compiler': 'd3d12'
'nak': 'NAK' 'nak' : 'NAK'
'nine': 'nine' 'nine' : 'nine'
'nir': 'NIR' 'nir': 'NIR'
'nir2dxil': 'd3d12' 'nir2dxil': 'd3d12'
'nouveau': 'nouveau' 'nouveau' : 'nouveau'
'nv30': ['nouveau', 'nv30'] 'nv30' : ['nouveau', 'nv30']
'nv50': ['nouveau', 'nv50'] 'nv50' : ['nouveau', 'nv50']
'nv50/ir': 'nouveau' 'nv50/ir' : 'nouveau'
'nvc0': ['nouveau', 'nvc0'] 'nvc0' : ['nouveau', 'nvc0']
'nvk': 'NVK' 'nvk' : 'NVK'
'panfrost': 'panfrost' 'panfrost' : 'panfrost'
'pan/midgard': 'panfrost' 'pan/midgard' : 'panfrost'
'pvr': 'powervr' 'pvr' : 'powervr'
'r100': 'r100' 'r100' : 'r100'
'r200': 'r200' 'r200' : 'r200'
'r300': 'r300' 'r300' : 'r300'
'r600': 'r600' 'r600' : 'r600'
'radeonsi': 'radeonsi' 'radeonsi' : 'radeonsi'
'radv': 'RADV' 'radv': 'RADV'
'radv/aco': ['RADV', 'ACO'] 'radv/aco': ['RADV', 'ACO']
'rusticl': 'Rusticl' 'rusticl': 'Rusticl'
'softpipe': 'softpipe' 'softpipe' : 'softpipe'
'spirv': 'SPIR-V' 'spirv' : 'SPIR-V'
'swr': 'swr' 'swr' : 'swr'
'swrast': 'swrast' 'swrast' : 'swrast'
'teflon': 'teflon' 'tegra' : 'tegra'
'tegra': 'tegra' 'tgsi' : 'TGSI'
'tgsi': 'TGSI'
'turnip': 'turnip' 'turnip': 'turnip'
'util': 'util' 'util' : 'util'
'v3d': 'v3d' 'v3d': 'v3d'
'vaapi': 'VA-API' 'vaapi' : 'VA-API'
'vc4': 'vc4' 'vc4' : 'vc4'
'vdpau': 'VDPAU' 'vdpau' : 'VDPAU'
'vieux': 'vieux' 'vieux' : 'vieux'
'virgl': 'virgl' 'virgl' : 'virgl'
'vmwgfx': 'vmwgfx' 'vmwgfx' : 'vmwgfx'
'zink': 'zink' 'zink': 'zink'
titles: &titles titles: &titles
@@ -90,21 +88,21 @@ issues:
'bisected': 'bisected' 'bisected': 'bisected'
'coverity': 'coverity' 'coverity': 'coverity'
'deqp': 'deqp' 'deqp': 'deqp'
'feature request': 'feature request' 'feature request': 'feature_request'
'haiku': 'haiku' 'haiku' : 'haiku'
'regression': 'regression' 'regression': 'regression'
'i915_dri.so': 'i915' 'i915_dri.so' : 'i915'
'i965_dri.so': 'i965' 'i965_dri.so' : 'i965'
'iris_dri.so': 'iris' 'iris_dri.so' : 'iris'
'nouveau_dri.so': 'nouveau' 'nouveau_dri.so' : 'nouveau'
'nouveau_vieux_dri.so': 'vieux' 'nouveau_vieux_dri.so' : 'vieux'
'r200_dri.so': 'r200' 'r200_dri.so' : 'r200'
'r300_dri.so': 'r300' 'r300_dri.so' : 'r300'
'r600_dri.so': 'r600' 'r600_dri.so' : 'r600'
'radeonsi_dri.so': 'radeonsi' 'radeonsi_dri.so' : 'radeonsi'
'swrast_dri.so': 'swrast' 'swrast_dri.so' : 'swrast'
'vmwgfx_dri.so': 'vmwgfx' 'vmwgfx_dri.so' : 'vmwgfx'
merge_requests: merge_requests:
topics: topics:
@@ -113,189 +111,169 @@ merge_requests:
*titles *titles
paths: paths:
'^.gitlab/issue_templates/': ['docs'] '^.gitlab/issue_templates/' : ['docs']
'^.gitlab-ci': ['CI'] '^.gitlab-ci' : ['CI']
'^.*/gitlab-ci(-inc)?.yml': ['CI'] '^.*/gitlab-ci.yml' : ['CI']
'^.*/ci/deqp-.*\.toml': ['CI'] '^.*/ci/' : ['CI']
'^.*/ci/.*-(fails|flakes|skips)\.txt': ['CI-Expectations'] '^.gitlab-ci/windows/' : ['Windows']
'^.*/ci/(restricted-)?traces-.*\.yml': ['CI-Expectations', 'trace'] '^bin/__init__.py$' : ['maintainer-scripts']
'^.*/ci/.*-validation-settings\.txt': ['CI-Expectations'] '^bin/gen_release_notes' : ['maintainer-scripts']
'^.*/ci/.*-vkd3d\.txt': ['CI-Expectations'] '^bin/git_sha1_gen.py$' : ['meson', 'android']
'^.gitlab-ci/windows/': ['Windows'] '^bin/install_megadrivers.py$' : ['meson']
'^bin/__init__.py$': ['maintainer-scripts'] '^bin/meson-cmd-extract.py$' : ['meson']
'^bin/gen_release_notes': ['maintainer-scripts'] '^bin/meson.build$' : ['meson']
'^bin/git_sha1_gen.py$': ['meson', 'android'] '^bin/pick-ui' : ['maintainer-scripts']
'^bin/install_megadrivers.py$': ['meson'] '^bin/pick/' : ['maintainer-scripts']
'^bin/meson-cmd-extract.py$': ['meson'] '^bin/post_version' : ['maintainer-scripts']
'^bin/meson.build$': ['meson'] '^bin/symbols-check.py$' : ['meson']
'^bin/pick-ui': ['maintainer-scripts'] '^docs/' : ['docs']
'^bin/pick/': ['maintainer-scripts'] '^include/drm-uapi/i915_drm.h' : ['intel']
'^bin/post_version': ['maintainer-scripts'] '^include/drm-uapi/xe_drm.h' : ['intel']
'^bin/symbols-check.py$': ['meson'] '^include/vulkan/' : ['vulkan']
'^docs/': ['docs'] '^meson_options.txt' : ['meson']
'^include/drm-uapi/i915_drm.h': ['intel'] '^README.rst' : ['docs']
'^include/drm-uapi/xe_drm.h': ['intel'] '^src/amd/addrlib/' : ['AMD common']
'^include/vulkan/': ['vulkan'] '^src/amd/common/' : ['AMD common']
'^meson_options.txt': ['meson'] '^src/amd/compiler/' : ['ACO']
'^README.rst': ['docs'] '^src/amd/llvm/' : ['AMD common']
'^src/amd/addrlib/': ['AMD common'] '^src/amd/registers/' : ['AMD common']
'^src/amd/common/': ['AMD common'] '^src/amd/vulkan/' : ['RADV']
'^src/amd/compiler/': ['ACO'] '^src/asahi/' : ['asahi']
'^src/amd/llvm/': ['AMD common'] '^src/broadcom/drm-shim/v3d' : ['v3d']
'^src/amd/registers/': ['AMD common'] '^src/broadcom/vulkan/' : ['v3dv']
'^src/amd/vulkan/': ['RADV'] '^src/compiler/clc' : ['OpenCL']
'^src/asahi/': ['asahi'] '^src/compiler/glsl' : ['GLSL']
'^src/broadcom/drm-shim/v3d': ['v3d'] '^src/compiler/nir' : ['NIR']
'^src/broadcom/vulkan/': ['v3dv'] '^src/compiler/spirv/' : ['SPIR-V']
'^src/compiler/clc': ['OpenCL'] '^src/egl/' : ['EGL']
'^src/compiler/glsl': ['GLSL'] '^src/egl/drivers/wgl/' : ['wgl']
'^src/compiler/nir': ['NIR'] '^src/etnaviv/' : ['etnaviv']
'^src/compiler/spirv/': ['SPIR-V'] '^src/freedreno/' : ['freedreno']
'^src/egl/': ['EGL'] '^src/freedreno/ir3/' : ['ir3']
'^src/egl/drivers/wgl/': ['wgl'] '^src/freedreno/vulkan/' : ['turnip']
'^src/etnaviv/': ['etnaviv'] '^src/gallium/auxiliary/' : ['gallium']
'^src/freedreno/afuc/': ['freedreno'] '^src/gallium/auxiliary/nir/' : ['NIR']
'^src/freedreno/common/': ['freedreno'] '^src/gallium/auxiliary/nir/.*tgsi.*' : ['TGSI']
'^src/freedreno/computerator/': ['freedreno'] '^src/gallium/auxiliary/tgsi/' : ['TGSI']
'^src/freedreno/decode/': ['freedreno'] '^src/gallium/docs/' : ['gallium']
'^src/freedreno/drm-shim/': ['freedreno'] '^src/gallium/drivers/asahi/' : ['asahi']
'^src/freedreno/drm/': ['freedreno'] '^src/gallium/drivers/crocus/' : ['crocus']
'^src/freedreno/ds/': ['freedreno'] '^src/gallium/drivers/d3d12/' : ['d3d12']
'^src/freedreno/fdl/': ['freedreno'] '^src/gallium/drivers/etnaviv/' : ['etnaviv']
'^src/freedreno/ir2/': ['freedreno'] '^src/gallium/drivers/freedreno/' : ['freedreno']
'^src/freedreno/ir3/': ['ir3'] '^src/gallium/drivers/grate/' : ['tegra']
'^src/freedreno/isa/': ['freedreno'] '^src/gallium/drivers/i915/' : ['i915g']
'^src/freedreno/perfcntrs/': ['freedreno'] '^src/gallium/drivers/iris/' : ['iris']
'^src/freedreno/registers/': ['freedreno'] '^src/gallium/drivers/lima/' : ['lima']
'^src/freedreno/rnn/': ['freedreno'] '^src/gallium/drivers/llvmpipe/' : ['llvmpipe']
'^src/freedreno/vulkan/': ['turnip'] '^src/gallium/drivers/nouveau/' : ['nouveau']
'^src/gallium/auxiliary/': ['gallium'] '^src/gallium/drivers/nouveau/nv30' : ['nv30']
'^src/gallium/auxiliary/nir/': ['NIR'] '^src/gallium/drivers/nouveau/nv50' : ['nv50']
'^src/gallium/auxiliary/nir/.*tgsi.*': ['TGSI'] '^src/gallium/drivers/nouveau/nvc0' : ['nvc0']
'^src/gallium/auxiliary/tgsi/': ['TGSI'] '^src/gallium/drivers/panfrost/' : ['panfrost']
'^src/gallium/docs/': ['gallium'] '^src/gallium/drivers/r300/' : ['r300']
'^src/gallium/drivers/asahi/': ['asahi'] '^src/gallium/drivers/r600/' : ['r600']
'^src/gallium/drivers/crocus/': ['crocus'] '^src/gallium/drivers/radeonsi' : ['radeonsi']
'^src/gallium/drivers/d3d12/': ['d3d12'] '^src/gallium/drivers/softpipe' : ['softpipe']
'^src/gallium/drivers/etnaviv/': ['etnaviv'] '^src/gallium/drivers/svga/' : ['svga']
'^src/gallium/drivers/etnaviv/etnaviv_ml_.*': ['teflon'] '^src/gallium/drivers/swr/' : ['swr']
'^src/gallium/drivers/freedreno/': ['freedreno'] '^src/gallium/drivers/tegra/' : ['tegra']
'^src/gallium/drivers/grate/': ['tegra'] '^src/gallium/drivers/v3d/' : ['v3d']
'^src/gallium/drivers/i915/': ['i915g'] '^src/gallium/drivers/vc4/' : ['vc4']
'^src/gallium/drivers/iris/': ['iris'] '^src/gallium/drivers/virgl/' : ['virgl']
'^src/gallium/drivers/lima/': ['lima'] '^src/gallium/drivers/zink/' : ['zink']
'^src/gallium/drivers/llvmpipe/': ['llvmpipe'] '^src/gallium/frontends/clover/' : ['clover']
'^src/gallium/drivers/nouveau/': ['nouveau'] '^src/gallium/frontends/dri/' : ['gallium']
'^src/gallium/drivers/nouveau/nv30': ['nv30'] '^src/gallium/frontends/glx/' : ['GLX']
'^src/gallium/drivers/nouveau/nv50': ['nv50'] '^src/gallium/frontends/hgl/' : ['haiku']
'^src/gallium/drivers/nouveau/nvc0': ['nvc0'] '^src/gallium/frontends/lavapipe/' : ['lavapipe']
'^src/gallium/drivers/panfrost/': ['panfrost'] '^src/gallium/frontends/nine/' : ['nine']
'^src/gallium/drivers/r300/': ['r300'] '^src/gallium/frontends/omx/' : ['omx']
'^src/gallium/drivers/r600/': ['r600'] '^src/gallium/frontends/osmesa/' : ['osmesa']
'^src/gallium/drivers/radeonsi': ['radeonsi'] '^src/gallium/frontends/rusticl/' : ['Rusticl']
'^src/gallium/drivers/softpipe': ['softpipe'] '^src/gallium/frontends/va/' : ['VA-API']
'^src/gallium/drivers/svga/': ['svga'] '^src/gallium/frontends/vdpau/' : ['VDPAU']
'^src/gallium/drivers/swr/': ['swr'] '^src/gallium/frontends/wgl/' : ['wgl']
'^src/gallium/drivers/tegra/': ['tegra'] # '^src/gallium/frontends/xa/' : ['']
'^src/gallium/drivers/v3d/': ['v3d'] '^src/gallium/include/' : ['gallium']
'^src/gallium/drivers/vc4/': ['vc4'] '^src/gallium/targets/' : ['gallium']
'^src/gallium/drivers/virgl/': ['virgl'] '^src/gallium/targets/opencl/' : ['clover']
'^src/gallium/drivers/zink/': ['zink'] '^src/gallium/targets/osmesa/' : ['osmesa']
'^src/gallium/frontends/clover/': ['clover'] '^src/gallium/targets/rusticl/' : ['Rusticl']
'^src/gallium/frontends/dri/': ['gallium'] '^src/gallium/tests/' : ['gallium']
'^src/gallium/frontends/glx/': ['GLX'] '^src/gallium/tools/' : ['gallium']
'^src/gallium/frontends/hgl/': ['haiku'] # '^src/gallium/winsys/amdgpu/' : ['']
'^src/gallium/frontends/lavapipe/': ['lavapipe'] '^src/gallium/winsys/crocus/' : ['crocus']
'^src/gallium/frontends/nine/': ['nine'] '^src/gallium/winsys/d3d12/' : ['d3d12']
'^src/gallium/frontends/omx/': ['omx'] '^src/gallium/winsys/etnaviv/' : ['etnaviv']
'^src/gallium/frontends/osmesa/': ['osmesa'] '^src/gallium/winsys/freedreno/' : ['freedreno']
'^src/gallium/frontends/rusticl/': ['Rusticl'] '^src/gallium/winsys/grate/' : ['tegra']
'^src/gallium/frontends/teflon/': ['teflon'] '^src/gallium/winsys/i915/' : ['i915g']
'^src/gallium/frontends/va/': ['VA-API'] '^src/gallium/winsys/iris/' : ['iris']
'^src/gallium/frontends/vdpau/': ['VDPAU'] # '^src/gallium/winsys/kmsro/' : ['']
'^src/gallium/frontends/wgl/': ['wgl'] '^src/gallium/winsys/lima/' : ['lima']
# '^src/gallium/frontends/xa/': [''] '^src/gallium/winsys/nouveau/' : ['nouveau']
'^src/gallium/include/': ['gallium'] '^src/gallium/winsys/panfrost/' : ['panfrost']
'^src/gallium/targets/': ['gallium'] # '^src/gallium/winsys/radeon/' : ['radeon']
'^src/gallium/targets/opencl/': ['clover'] '^src/gallium/winsys/svga/' : ['svga']
'^src/gallium/targets/osmesa/': ['osmesa'] # '^src/gallium/winsys/sw/' : ['']
'^src/gallium/targets/rusticl/': ['Rusticl'] '^src/gallium/winsys/sw/gdi/' : ['wgl']
'^src/gallium/tests/': ['gallium'] '^src/gallium/winsys/tegra/' : ['tegra']
'^src/gallium/tools/': ['gallium'] '^src/gallium/winsys/v3d/' : ['v3d']
# '^src/gallium/winsys/amdgpu/': [''] '^src/gallium/winsys/vc4/' : ['vc4']
'^src/gallium/winsys/crocus/': ['crocus'] '^src/gallium/winsys/virgl/' : ['virgl']
'^src/gallium/winsys/d3d12/': ['d3d12'] '^src/gbm/' : ['gbm']
'^src/gallium/winsys/etnaviv/': ['etnaviv'] '^src/glx/' : ['GLX']
'^src/gallium/winsys/freedreno/': ['freedreno'] '^src/imagination/' : ['powervr']
'^src/gallium/winsys/grate/': ['tegra'] '^src/intel/blorp/' : ['blorp']
'^src/gallium/winsys/i915/': ['i915g'] '^src/intel/common/' : ['intel']
'^src/gallium/winsys/iris/': ['iris'] '^src/intel/compiler/' : ['intel-compiler']
# '^src/gallium/winsys/kmsro/': [''] '^src/intel/compiler/brw' : ['intel-brw']
'^src/gallium/winsys/lima/': ['lima'] '^src/intel/compiler/elk' : ['intel-elk']
'^src/gallium/winsys/nouveau/': ['nouveau'] '^src/intel/dev/' : ['intel']
'^src/gallium/winsys/panfrost/': ['panfrost'] '^src/intel/ds/' : ['intel']
# '^src/gallium/winsys/radeon/': ['radeon'] '^src/intel/genxml/' : ['intel']
'^src/gallium/winsys/svga/': ['svga'] '^src/intel/isl/' : ['ISL']
# '^src/gallium/winsys/sw/': [''] '^src/intel/nullhw-layer/' : ['intel']
'^src/gallium/winsys/sw/gdi/': ['wgl'] '^src/intel/perf/' : ['intel']
'^src/gallium/winsys/tegra/': ['tegra'] '^src/intel/tools/' : ['intel-tools']
'^src/gallium/winsys/v3d/': ['v3d'] '^src/intel/vulkan/' : ['ANV']
'^src/gallium/winsys/vc4/': ['vc4'] '^src/intel/vulkan_hasvk/' : ['hasvk']
'^src/gallium/winsys/virgl/': ['virgl'] '^src/loader/' : ['loader']
'^src/gbm/': ['gbm'] '^src/mapi/' : ['mapi']
'^src/glx/': ['GLX'] '^src/mesa/drivers/dri/i915/' : ['i915']
'^src/imagination/': ['powervr'] '^src/mesa/drivers/dri/i965/' : ['i965']
'^src/intel/blorp/': ['blorp'] '^src/mesa/drivers/dri/nouveau/' : ['vieux']
'^src/intel/common/': ['intel'] '^src/mesa/drivers/dri/r200/' : ['r200']
'^src/intel/compiler/': ['intel-compiler'] '^src/mesa/drivers/dri/radeon/' : ['radeon']
'^src/intel/compiler/brw': ['intel-brw'] '^src/mesa/drivers/dri/swrast/' : ['swrast']
'^src/intel/compiler/elk': ['intel-elk'] '^src/mesa/drivers/osmesa' : ['osmesa']
'^src/intel/dev/': ['intel'] '^src/mesa/main/' : ['mesa']
'^src/intel/ds/': ['intel'] '^src/mesa/state_tracker/.*glsl.*' : ['GLSL']
'^src/intel/genxml/': ['intel'] '^src/mesa/state_tracker/.*tgsi.*' : ['TGSI']
'^src/intel/isl/': ['ISL'] '^src/mesa/state_tracker/.*nir.*' : ['NIR']
'^src/intel/nullhw-layer/': ['intel'] '^src/microsoft/clc/' : ['d3d12']
'^src/intel/perf/': ['intel'] '^src/microsoft/compiler/' : ['d3d12']
'^src/intel/tools/': ['intel-tools'] '^src/microsoft/spirv_to_dxil/' : ['dozen']
'^src/intel/vulkan/': ['ANV'] '^src/microsoft/vulkan/' : ['dozen']
'^src/intel/vulkan_hasvk/': ['hasvk'] '^src/nouveau/codegen/' : ['nouveau']
'^src/loader/': ['loader'] '^src/nouveau/compiler/' : ['NAK']
'^src/mapi/': ['mapi'] '^src/nouveau/drm-shim/' : ['nouveau']
'^src/mesa/drivers/dri/i915/': ['i915'] '^src/nouveau/mme/' : ['NVK']
'^src/mesa/drivers/dri/i965/': ['i965'] '^src/nouveau/nil/' : ['NVK']
'^src/mesa/drivers/dri/nouveau/': ['vieux'] '^src/nouveau/nvidia-headers/' : ['NVK']
'^src/mesa/drivers/dri/r200/': ['r200'] '^src/nouveau/vulkan/' : ['NVK']
'^src/mesa/drivers/dri/radeon/': ['radeon'] '^src/nouveau/winsys/' : ['NVK']
'^src/mesa/drivers/dri/swrast/': ['swrast'] '^src/panfrost/' : ['panfrost']
'^src/mesa/drivers/osmesa': ['osmesa'] '^src/virtio/vulkan/' : ['venus']
'^src/mesa/main/': ['mesa'] '^src/virtio/venus-protocol/' : ['venus']
'^src/mesa/state_tracker/.*glsl.*': ['GLSL'] '^src/virtio/ci/' : ['venus']
'^src/mesa/state_tracker/.*tgsi.*': ['TGSI'] '^src/util/' : ['util']
'^src/mesa/state_tracker/.*nir.*': ['NIR'] '^src/util/00-mesa-defaults.conf' : ['drirc']
'^src/microsoft/clc/': ['d3d12'] '^src/vulkan/' : ['vulkan']
'^src/microsoft/compiler/': ['d3d12'] '^VERSION$' : ['maintainer-scripts']
'^src/microsoft/spirv_to_dxil/': ['dozen']
'^src/microsoft/vulkan/': ['dozen']
'^src/nouveau/codegen/': ['nouveau']
'^src/nouveau/compiler/': ['NAK']
'^src/nouveau/drm-shim/': ['nouveau']
'^src/nouveau/drm/': ['nouveau']
'^src/nouveau/headers/': ['NVK']
'^src/nouveau/mme/': ['NVK']
'^src/nouveau/nil/': ['NVK']
'^src/nouveau/vulkan/': ['NVK']
'^src/nouveau/winsys/': ['NVK']
'^src/panfrost/': ['panfrost']
'^src/virtio/vulkan/': ['venus']
'^src/virtio/venus-protocol/': ['venus']
'^src/virtio/ci/': ['venus']
'^src/util/': ['util']
'^src/util/00-mesa-defaults.conf': ['drirc']
'^src/vulkan/': ['vulkan']
'^src/vulkan/wsi/': ['wsi']
'^VERSION$': ['maintainer-scripts']
'Android': ['android'] 'Android' : ['android']
'EGL': ['EGL'] 'EGL' : ['EGL']
'meson.build': ['meson'] 'meson.build' : ['meson']
'wayland': ['wayland'] 'wayland' : ['wayland']

File diff suppressed because it is too large Load Diff

View File

@@ -13,8 +13,8 @@ Build & install
--------------- ---------------
You can find more information in our documentation (`docs/install.rst You can find more information in our documentation (`docs/install.rst
<https://docs.mesa3d.org/install.html>`_), but the recommended way is to use <https://mesa3d.org/install.html>`_), but the recommended way is to use
Meson (`docs/meson.rst <https://docs.mesa3d.org/meson.html>`_): Meson (`docs/meson.rst <https://mesa3d.org/meson.html>`_):
.. code-block:: sh .. code-block:: sh
@@ -46,7 +46,7 @@ Bug reports
----------- -----------
If you think something isn't working properly, please file a bug report If you think something isn't working properly, please file a bug report
(`docs/bugs.rst <https://docs.mesa3d.org/bugs.html>`_). (`docs/bugs.rst <https://mesa3d.org/bugs.html>`_).
Contributing Contributing
@@ -54,6 +54,6 @@ Contributing
Contributions are welcome, and step-by-step instructions can be found in our Contributions are welcome, and step-by-step instructions can be found in our
documentation (`docs/submittingpatches.rst documentation (`docs/submittingpatches.rst
<https://docs.mesa3d.org/submittingpatches.html>`_). <https://mesa3d.org/submittingpatches.html>`_).
Note that Mesa uses gitlab for patches submission, review and discussions. Note that Mesa uses gitlab for patches submission, review and discussions.

View File

@@ -1 +1 @@
24.2.7 24.1.0-devel

View File

@@ -27,12 +27,6 @@ LOCAL_PATH := $(call my-dir)
MESA3D_TOP := $(dir $(LOCAL_PATH)) MESA3D_TOP := $(dir $(LOCAL_PATH))
LIBDRM_VERSION = $(shell cat external/libdrm/meson.build | grep -o "\<version\>\s*:\s*'\w*\.\w*\.\w*'" | grep -o "\w*\.\w*\.\w*" | head -1) LIBDRM_VERSION = $(shell cat external/libdrm/meson.build | grep -o "\<version\>\s*:\s*'\w*\.\w*\.\w*'" | grep -o "\w*\.\w*\.\w*" | head -1)
LLVM_VERSION_MAJOR = $(shell \
if [ -f external/llvm-project/cmake/Modules/LLVMVersion.cmake ]; then \
grep 'set.LLVM_VERSION_MAJOR ' external/llvm-project/cmake/Modules/LLVMVersion.cmake | grep -o '[0-9]\+'; \
else \
grep 'set.LLVM_VERSION_MAJOR ' external/llvm-project/llvm/CMakeLists.txt | grep -o '[0-9]\+'; \
fi)
MESA_VK_LIB_SUFFIX_amd := radeon MESA_VK_LIB_SUFFIX_amd := radeon
MESA_VK_LIB_SUFFIX_intel := intel MESA_VK_LIB_SUFFIX_intel := intel
@@ -92,8 +86,8 @@ MESON_GEN_PKGCONFIGS += DirectX-Headers
endif endif
ifneq ($(MESON_GEN_LLVM_STUB),) ifneq ($(MESON_GEN_LLVM_STUB),)
MESON_LLVM_VERSION := $(LLVM_VERSION_MAJOR).0.0 MESON_LLVM_VERSION := 12.0.0
LOCAL_SHARED_LIBRARIES += libLLVM$(LLVM_VERSION_MAJOR) LOCAL_SHARED_LIBRARIES += libLLVM12
endif endif
ifeq ($(shell test $(PLATFORM_SDK_VERSION) -ge 30; echo $$?), 0) ifeq ($(shell test $(PLATFORM_SDK_VERSION) -ge 30; echo $$?), 0)
@@ -163,9 +157,9 @@ endif
endef endef
ifneq ($(strip $(BOARD_MESA3D_GALLIUM_DRIVERS)),) ifneq ($(strip $(BOARD_MESA3D_GALLIUM_DRIVERS)),)
# Module 'libgallium_dri', produces '/vendor/lib{64}/libgallium_dri.so' # Module 'libgallium_dri', produces '/vendor/lib{64}/dri/libgallium_dri.so'
# This module also trigger DRI symlinks creation process # This module also trigger DRI symlinks creation process
$(eval $(call mesa3d-lib,libgallium_dri,,MESA3D_GALLIUM_BIN)) $(eval $(call mesa3d-lib,libgallium_dri,dri,MESA3D_GALLIUM_DRI_BIN))
# Module 'libglapi', produces '/vendor/lib{64}/libglapi.so' # Module 'libglapi', produces '/vendor/lib{64}/libglapi.so'
$(eval $(call mesa3d-lib,libglapi,,MESA3D_LIBGLAPI_BIN)) $(eval $(call mesa3d-lib,libglapi,,MESA3D_LIBGLAPI_BIN))

View File

@@ -63,8 +63,8 @@ MESON_OUT_DIR := $($(M_TARGET_PREFIX)TARGET_OUT_INTER
MESON_GEN_DIR := $(MESON_OUT_DIR)_GEN MESON_GEN_DIR := $(MESON_OUT_DIR)_GEN
MESON_GEN_FILES_TARGET := $(MESON_GEN_DIR)/.timestamp MESON_GEN_FILES_TARGET := $(MESON_GEN_DIR)/.timestamp
MESA3D_GALLIUM_DIR := $(MESON_OUT_DIR)/install/usr/local/lib MESA3D_GALLIUM_DRI_DIR := $(MESON_OUT_DIR)/install/usr/local/lib/dri
$(M_TARGET_PREFIX)MESA3D_GALLIUM_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/libgallium_dri.so $(M_TARGET_PREFIX)MESA3D_GALLIUM_DRI_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/libgallium_dri.so
$(M_TARGET_PREFIX)MESA3D_LIBEGL_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/libEGL.so $(M_TARGET_PREFIX)MESA3D_LIBEGL_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/libEGL.so
$(M_TARGET_PREFIX)MESA3D_LIBGLESV1_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/libGLESv1_CM.so $(M_TARGET_PREFIX)MESA3D_LIBGLESV1_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/libGLESv1_CM.so
$(M_TARGET_PREFIX)MESA3D_LIBGLESV2_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/libGLESv2.so $(M_TARGET_PREFIX)MESA3D_LIBGLESV2_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/libGLESv2.so
@@ -73,7 +73,6 @@ $(M_TARGET_PREFIX)MESA3D_LIBGBM_BIN := $(MESON_OUT_DIR)/install/usr/local/l
MESA3D_GLES_BINS := \ MESA3D_GLES_BINS := \
$($(M_TARGET_PREFIX)MESA3D_GALLIUM_BIN) \
$($(M_TARGET_PREFIX)MESA3D_LIBEGL_BIN) \ $($(M_TARGET_PREFIX)MESA3D_LIBEGL_BIN) \
$($(M_TARGET_PREFIX)MESA3D_LIBGLESV1_BIN) \ $($(M_TARGET_PREFIX)MESA3D_LIBGLESV1_BIN) \
$($(M_TARGET_PREFIX)MESA3D_LIBGLESV2_BIN) \ $($(M_TARGET_PREFIX)MESA3D_LIBGLESV2_BIN) \
@@ -285,11 +284,16 @@ endif
$(MESON_BUILD) $(MESON_BUILD)
touch $@ touch $@
MESON_COPY_LIBGALLIUM := \
cp `ls -1 $(MESA3D_GALLIUM_DRI_DIR)/* | head -1` $($(M_TARGET_PREFIX)MESA3D_GALLIUM_DRI_BIN)
$(MESON_OUT_DIR)/install/.install.timestamp: MESON_COPY_LIBGALLIUM:=$(MESON_COPY_LIBGALLIUM)
$(MESON_OUT_DIR)/install/.install.timestamp: MESON_BUILD:=$(MESON_BUILD) $(MESON_OUT_DIR)/install/.install.timestamp: MESON_BUILD:=$(MESON_BUILD)
$(MESON_OUT_DIR)/install/.install.timestamp: $(MESON_OUT_DIR)/.build.timestamp $(MESON_OUT_DIR)/install/.install.timestamp: $(MESON_OUT_DIR)/.build.timestamp
rm -rf $(dir $@) rm -rf $(dir $@)
mkdir -p $(dir $@) mkdir -p $(dir $@)
DESTDIR=$(call relative-to-absolute,$(dir $@)) $(MESON_BUILD) install DESTDIR=$(call relative-to-absolute,$(dir $@)) $(MESON_BUILD) install
$(if $(BOARD_MESA3D_GALLIUM_DRIVERS),$(MESON_COPY_LIBGALLIUM))
touch $@ touch $@
$($(M_TARGET_PREFIX)MESA3D_LIBGBM_BIN) $(MESA3D_GLES_BINS): $(MESON_OUT_DIR)/install/.install.timestamp $($(M_TARGET_PREFIX)MESA3D_LIBGBM_BIN) $(MESA3D_GLES_BINS): $(MESON_OUT_DIR)/install/.install.timestamp
@@ -304,3 +308,14 @@ $(MESON_OUT_DIR)/install/usr/local/lib/libvulkan_$(MESA_VK_LIB_SUFFIX_$1).so: $(
endef endef
$(foreach driver,$(BOARD_MESA3D_VULKAN_DRIVERS), $(eval $(call vulkan_target,$(driver)))) $(foreach driver,$(BOARD_MESA3D_VULKAN_DRIVERS), $(eval $(call vulkan_target,$(driver))))
$($(M_TARGET_PREFIX)TARGET_OUT_VENDOR_SHARED_LIBRARIES)/dri/.symlinks.timestamp: MESA3D_GALLIUM_DRI_DIR:=$(MESA3D_GALLIUM_DRI_DIR)
$($(M_TARGET_PREFIX)TARGET_OUT_VENDOR_SHARED_LIBRARIES)/dri/.symlinks.timestamp: $(MESON_OUT_DIR)/install/.install.timestamp
# Create Symlinks
mkdir -p $(dir $@)
ls -1 $(MESA3D_GALLIUM_DRI_DIR)/ | PATH=/usr/bin:$$PATH xargs -I{} ln -s -f libgallium_dri.so $(dir $@)/{}
touch $@
$($(M_TARGET_PREFIX)MESA3D_GALLIUM_DRI_BIN): $(TARGET_OUT_VENDOR)/$(MESA3D_LIB_DIR)/dri/.symlinks.timestamp
echo "Build $@"
touch $@

View File

@@ -21,7 +21,7 @@ from concurrent.futures import ThreadPoolExecutor
from functools import partial from functools import partial
from itertools import chain from itertools import chain
from subprocess import check_output, CalledProcessError from subprocess import check_output, CalledProcessError
from typing import Dict, TYPE_CHECKING, Iterable, Literal, Optional, Tuple from typing import TYPE_CHECKING, Iterable, Literal, Optional
import gitlab import gitlab
import gitlab.v4.objects import gitlab.v4.objects
@@ -53,119 +53,90 @@ STATUS_COLORS = {
"success": Fore.GREEN, "success": Fore.GREEN,
"failed": Fore.RED, "failed": Fore.RED,
"canceled": Fore.MAGENTA, "canceled": Fore.MAGENTA,
"canceling": Fore.MAGENTA,
"manual": "", "manual": "",
"pending": "", "pending": "",
"skipped": "", "skipped": "",
} }
COMPLETED_STATUSES = {"success", "failed"} COMPLETED_STATUSES = ["success", "failed"]
RUNNING_STATUSES = {"created", "pending", "running"}
def print_job_status( def print_job_status(job, new_status=False) -> None:
job: gitlab.v4.objects.ProjectPipelineJob,
new_status: bool = False,
job_name_field_pad: int = 0,
) -> None:
"""It prints a nice, colored job status with a link to the job.""" """It prints a nice, colored job status with a link to the job."""
if job.status in {"canceled", "canceling"}: if job.status == "canceled":
return return
if new_status and job.status == "created": if new_status and job.status == "created":
return return
job_name_field_pad = len(job.name) if job_name_field_pad < 1 else job_name_field_pad if job.duration:
duration = job.duration
duration = job_duration(job) elif job.started_at:
duration = time.perf_counter() - time.mktime(job.started_at.timetuple())
print( print(
STATUS_COLORS[job.status] STATUS_COLORS[job.status]
+ "🞋 target job " # U+1F78B Round target + "🞋 job "
+ link2print(job.web_url, job.name, job_name_field_pad) + URL_START
+ (f"has new status: {job.status}" if new_status else f"{job.status}") + f"{job.web_url}\a{job.name}"
+ URL_END
+ (f" has new status: {job.status}" if new_status else f" :: {job.status}")
+ (f" ({pretty_duration(duration)})" if job.started_at else "") + (f" ({pretty_duration(duration)})" if job.started_at else "")
+ Style.RESET_ALL + Style.RESET_ALL
) )
def job_duration(job: gitlab.v4.objects.ProjectPipelineJob) -> float:
"""
Given a job, report the time lapsed in execution.
:param job: Pipeline job
:return: Current time in execution
"""
if job.duration:
return job.duration
elif job.started_at:
return time.perf_counter() - time.mktime(job.started_at.timetuple())
return 0.0
def pretty_wait(sec: int) -> None: def pretty_wait(sec: int) -> None:
"""shows progressbar in dots""" """shows progressbar in dots"""
for val in range(sec, 0, -1): for val in range(sec, 0, -1):
print(f"{val} seconds", end="\r") # U+23F2 Timer clock print(f"{val} seconds", end="\r")
time.sleep(1) time.sleep(1)
def monitor_pipeline( def monitor_pipeline(
project: gitlab.v4.objects.Project, project,
pipeline: gitlab.v4.objects.ProjectPipeline, pipeline,
target_jobs_regex: re.Pattern, target_jobs_regex: re.Pattern,
dependencies: set[str], dependencies,
force_manual: bool, force_manual: bool,
stress: int, stress: int,
) -> tuple[Optional[int], Optional[int], Dict[str, Dict[int, Tuple[float, str, str]]]]: ) -> tuple[Optional[int], Optional[int]]:
"""Monitors pipeline and delegate canceling jobs""" """Monitors pipeline and delegate canceling jobs"""
statuses: dict[str, str] = defaultdict(str) statuses: dict[str, str] = defaultdict(str)
target_statuses: dict[str, str] = defaultdict(str) target_statuses: dict[str, str] = defaultdict(str)
stress_status_counter: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int)) stress_status_counter = defaultdict(lambda: defaultdict(int))
execution_times = defaultdict(lambda: defaultdict(tuple)) target_id = None
target_id: int = -1
name_field_pad: int = len(max(dependencies, key=len))+2
# Pre-populate the stress status counter for already completed target jobs.
if stress:
# When stress test, it is necessary to collect this information before start.
for job in pipeline.jobs.list(all=True, include_retried=True):
if target_jobs_regex.fullmatch(job.name) and job.status in COMPLETED_STATUSES:
stress_status_counter[job.name][job.status] += 1
execution_times[job.name][job.id] = (job_duration(job), job.status, job.web_url)
while True: while True:
deps_failed = [] deps_failed = []
to_cancel = [] to_cancel = []
for job in sorted(pipeline.jobs.list(all=True), key=lambda j: j.name): for job in pipeline.jobs.list(all=True, sort="desc"):
# target jobs # target jobs
if target_jobs_regex.fullmatch(job.name): if target_jobs_regex.fullmatch(job.name):
target_id = job.id target_id = job.id
target_status = job.status
if stress and target_status in COMPLETED_STATUSES: if stress and job.status in ["success", "failed"]:
if ( if (
stress < 0 stress < 0
or sum(stress_status_counter[job.name].values()) < stress or sum(stress_status_counter[job.name].values()) < stress
): ):
stress_status_counter[job.name][target_status] += 1 job = enable_job(project, pipeline, job, "retry", force_manual)
execution_times[job.name][job.id] = (job_duration(job), target_status, job.web_url) stress_status_counter[job.name][job.status] += 1
job = enable_job(project, pipeline, job, "retry", force_manual, name_field_pad)
else: else:
execution_times[job.name][job.id] = (job_duration(job), target_status, job.web_url) job = enable_job(project, pipeline, job, "target", force_manual)
job = enable_job(project, pipeline, job, "target", force_manual, name_field_pad)
print_job_status(job, target_status not in target_statuses[job.name], name_field_pad) print_job_status(job, job.status not in target_statuses[job.name])
target_statuses[job.name] = target_status target_statuses[job.name] = job.status
continue continue
# all other non-target jobs # all jobs
if job.status != statuses[job.name]: if job.status != statuses[job.name]:
print_job_status(job, True, name_field_pad) print_job_status(job, True)
statuses[job.name] = job.status statuses[job.name] = job.status
# run dependencies and cancel the rest # run dependencies and cancel the rest
if job.name in dependencies: if job.name in dependencies:
job = enable_job(project, pipeline, job, "dep", True, name_field_pad) job = enable_job(project, pipeline, job, "dep", True)
if job.status == "failed": if job.status == "failed":
deps_failed.append(job.name) deps_failed.append(job.name)
else: else:
@@ -175,9 +146,9 @@ def monitor_pipeline(
if stress: if stress:
enough = True enough = True
for job_name, status in sorted(stress_status_counter.items()): for job_name, status in stress_status_counter.items():
print( print(
f"* {job_name:{name_field_pad}}succ: {status['success']}; " f"{job_name}\tsucc: {status['success']}; "
f"fail: {status['failed']}; " f"fail: {status['failed']}; "
f"total: {sum(status.values())} of {stress}", f"total: {sum(status.values())} of {stress}",
flush=False, flush=False,
@@ -191,20 +162,20 @@ def monitor_pipeline(
print("---------------------------------", flush=False) print("---------------------------------", flush=False)
if len(target_statuses) == 1 and RUNNING_STATUSES.intersection( if len(target_statuses) == 1 and {"running"}.intersection(
target_statuses.values() target_statuses.values()
): ):
return target_id, None, execution_times return target_id, None
if ( if (
{"failed"}.intersection(target_statuses.values()) {"failed"}.intersection(target_statuses.values())
and not RUNNING_STATUSES.intersection(target_statuses.values()) and not set(["running", "pending"]).intersection(target_statuses.values())
): ):
return None, 1, execution_times return None, 1
if ( if (
{"skipped"}.intersection(target_statuses.values()) {"skipped"}.intersection(target_statuses.values())
and not RUNNING_STATUSES.intersection(target_statuses.values()) and not {"running", "pending"}.intersection(target_statuses.values())
): ):
print( print(
Fore.RED, Fore.RED,
@@ -212,20 +183,20 @@ def monitor_pipeline(
deps_failed, deps_failed,
Fore.RESET, Fore.RESET,
) )
return None, 1, execution_times return None, 1
if {"success", "manual"}.issuperset(target_statuses.values()): if {"success", "manual"}.issuperset(target_statuses.values()):
return None, 0, execution_times return None, 0
pretty_wait(REFRESH_WAIT_JOBS) pretty_wait(REFRESH_WAIT_JOBS)
def get_pipeline_job( def get_pipeline_job(
pipeline: gitlab.v4.objects.ProjectPipeline, pipeline: gitlab.v4.objects.ProjectPipeline,
job_id: int, id: int,
) -> gitlab.v4.objects.ProjectPipelineJob: ) -> gitlab.v4.objects.ProjectPipelineJob:
pipeline_jobs = pipeline.jobs.list(all=True) pipeline_jobs = pipeline.jobs.list(all=True)
return [j for j in pipeline_jobs if j.id == job_id][0] return [j for j in pipeline_jobs if j.id == id][0]
def enable_job( def enable_job(
@@ -234,19 +205,18 @@ def enable_job(
job: gitlab.v4.objects.ProjectPipelineJob, job: gitlab.v4.objects.ProjectPipelineJob,
action_type: Literal["target", "dep", "retry"], action_type: Literal["target", "dep", "retry"],
force_manual: bool, force_manual: bool,
job_name_field_pad: int = 0,
) -> gitlab.v4.objects.ProjectPipelineJob: ) -> gitlab.v4.objects.ProjectPipelineJob:
"""enable job""" """enable job"""
if ( if (
(job.status in COMPLETED_STATUSES and action_type != "retry") (job.status in ["success", "failed"] and action_type != "retry")
or (job.status == "manual" and not force_manual) or (job.status == "manual" and not force_manual)
or job.status in {"skipped"} | RUNNING_STATUSES or job.status in ["skipped", "running", "created", "pending"]
): ):
return job return job
pjob = project.jobs.get(job.id, lazy=True) pjob = project.jobs.get(job.id, lazy=True)
if job.status in {"success", "failed", "canceled", "canceling"}: if job.status in ["success", "failed", "canceled"]:
new_job = pjob.retry() new_job = pjob.retry()
job = get_pipeline_job(pipeline, new_job["id"]) job = get_pipeline_job(pipeline, new_job["id"])
else: else:
@@ -254,34 +224,32 @@ def enable_job(
job = get_pipeline_job(pipeline, pjob.id) job = get_pipeline_job(pipeline, pjob.id)
if action_type == "target": if action_type == "target":
jtype = "🞋 target" # U+1F78B Round target jtype = "🞋 "
elif action_type == "retry": elif action_type == "retry":
jtype = " retrying" # U+21BB Clockwise open circle arrow jtype = ""
else: else:
jtype = "dependency" # U+21AA Left Arrow Curving Right jtype = "(dependency)"
job_name_field_pad = len(job.name) if job_name_field_pad < 1 else job_name_field_pad print(Fore.MAGENTA + f"{jtype} job {job.name} manually enabled" + Style.RESET_ALL)
print(Fore.MAGENTA + f"{jtype} job {job.name:{job_name_field_pad}}manually enabled" + Style.RESET_ALL)
return job return job
def cancel_job( def cancel_job(project, job) -> None:
project: gitlab.v4.objects.Project,
job: gitlab.v4.objects.ProjectPipelineJob
) -> None:
"""Cancel GitLab job""" """Cancel GitLab job"""
if job.status not in RUNNING_STATUSES: if job.status in [
"canceled",
"success",
"failed",
"skipped",
]:
return return
pjob = project.jobs.get(job.id, lazy=True) pjob = project.jobs.get(job.id, lazy=True)
pjob.cancel() pjob.cancel()
print(f"🗙 {job.name}", end=" ") # U+1F5D9 Cancellation X print(f" {job.name}", end=" ")
def cancel_jobs( def cancel_jobs(project, to_cancel) -> None:
project: gitlab.v4.objects.Project,
to_cancel: list
) -> None:
"""Cancel unwanted GitLab jobs""" """Cancel unwanted GitLab jobs"""
if not to_cancel: if not to_cancel:
return return
@@ -292,10 +260,7 @@ def cancel_jobs(
print() print()
def print_log( def print_log(project, job_id) -> None:
project: gitlab.v4.objects.Project,
job_id: int
) -> None:
"""Print job log into output""" """Print job log into output"""
printed_lines = 0 printed_lines = 0
while True: while True:
@@ -313,7 +278,7 @@ def print_log(
pretty_wait(REFRESH_WAIT_LOG) pretty_wait(REFRESH_WAIT_LOG)
def parse_args() -> argparse.Namespace: def parse_args() -> None:
"""Parse args""" """Parse args"""
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="Tool to trigger a subset of container jobs " description="Tool to trigger a subset of container jobs "
@@ -324,8 +289,7 @@ def parse_args() -> argparse.Namespace:
parser.add_argument( parser.add_argument(
"--target", "--target",
metavar="target-job", metavar="target-job",
help="Target job regex. For multiple targets, pass multiple values, " help="Target job regex. For multiple targets, separate with pipe | character",
"eg. `--target foo bar`.",
required=True, required=True,
nargs=argparse.ONE_OR_MORE, nargs=argparse.ONE_OR_MORE,
) )
@@ -344,9 +308,7 @@ def parse_args() -> argparse.Namespace:
"--stress", "--stress",
default=0, default=0,
type=int, type=int,
help="Stresstest job(s). Specify the number of times to rerun the selected jobs, " help="Stresstest job(s). Number or repetitions or -1 for infinite.",
"or use -1 for indefinite. Defaults to 0. If jobs have already been executed, "
"this will ensure the total run count respects the specified number.",
) )
parser.add_argument( parser.add_argument(
"--project", "--project",
@@ -381,14 +343,12 @@ def parse_args() -> argparse.Namespace:
def print_detected_jobs( def print_detected_jobs(
target_dep_dag: "Dag", target_dep_dag: "Dag", dependency_jobs: Iterable[str], target_jobs: Iterable[str]
dependency_jobs: Iterable[str],
target_jobs: Iterable[str],
) -> None: ) -> None:
def print_job_set(color: str, kind: str, job_set: Iterable[str]): def print_job_set(color: str, kind: str, job_set: Iterable[str]):
print( print(
color + f"Running {len(job_set)} {kind} jobs: ", color + f"Running {len(job_set)} {kind} jobs: ",
"\n\t", "\n",
", ".join(sorted(job_set)), ", ".join(sorted(job_set)),
Fore.RESET, Fore.RESET,
"\n", "\n",
@@ -400,12 +360,10 @@ def print_detected_jobs(
print_job_set(Fore.BLUE, "target", target_jobs) print_job_set(Fore.BLUE, "target", target_jobs)
def find_dependencies( def find_dependencies(token: str | None,
token: str | None, target_jobs_regex: re.Pattern,
target_jobs_regex: re.Pattern, project_path: str,
project_path: str, iid: int) -> set[str]:
iid: int
) -> set[str]:
""" """
Find the dependencies of the target jobs in a GitLab pipeline. Find the dependencies of the target jobs in a GitLab pipeline.
@@ -442,45 +400,7 @@ def find_dependencies(
return target_jobs.union(dependency_jobs) return target_jobs.union(dependency_jobs)
def print_monitor_summary( if __name__ == "__main__":
execution_collection: Dict[str, Dict[int, Tuple[float, str, str]]],
t_start: float,
) -> None:
"""Summary of the test execution"""
t_end = time.perf_counter()
spend_minutes = (t_end - t_start) / 60
print(f"⏲ Duration of script execution: {spend_minutes:0.1f} minutes") # U+23F2 Timer clock
if len(execution_collection) == 0:
return
print(f"⏲ Jobs execution times:") # U+23F2 Timer clock
job_names = list(execution_collection.keys())
job_names.sort()
name_field_pad = len(max(job_names, key=len)) + 2
for name in job_names:
job_executions = execution_collection[name]
job_times = ', '.join([__job_duration_record(job_execution)
for job_execution in sorted(job_executions.items())])
print(f"* {name:{name_field_pad}}: ({len(job_executions)}) {job_times}")
def __job_duration_record(dict_item: tuple) -> str:
"""
Format each pair of job and its duration.
:param job_execution: item of execution_collection[name][idn]: Dict[int, Tuple[float, str, str]]
"""
job_id = f"{dict_item[0]}" # dictionary key
job_duration, job_status, job_url = dict_item[1] # dictionary value, the tuple
return (f"{STATUS_COLORS[job_status]}"
f"{link2print(job_url, job_id)}: {pretty_duration(job_duration):>8}"
f"{Style.RESET_ALL}")
def link2print(url: str, text: str, text_pad: int = 0) -> str:
text_pad = len(text) if text_pad < 1 else text_pad
return f"{URL_START}{url}\a{text:{text_pad}}{URL_END}"
def main() -> None:
try: try:
t_start = time.perf_counter() t_start = time.perf_counter()
@@ -542,7 +462,8 @@ def main() -> None:
target = '|'.join(args.target) target = '|'.join(args.target)
target = target.strip() target = target.strip()
print("🞋 target job: " + Fore.BLUE + target + Style.RESET_ALL) # U+1F78B Round target deps = set()
print("🞋 job: " + Fore.BLUE + target + Style.RESET_ALL)
# Implicitly include `parallel:` jobs # Implicitly include `parallel:` jobs
target = f'({target})' + r'( \d+/\d+)?' target = f'({target})' + r'( \d+/\d+)?'
@@ -555,19 +476,17 @@ def main() -> None:
iid=pipe.iid, iid=pipe.iid,
project_path=cur_project project_path=cur_project
) )
target_job_id, ret, exec_t = monitor_pipeline( target_job_id, ret = monitor_pipeline(
cur_project, pipe, target_jobs_regex, deps, args.force_manual, args.stress cur_project, pipe, target_jobs_regex, deps, args.force_manual, args.stress
) )
if target_job_id: if target_job_id:
print_log(cur_project, target_job_id) print_log(cur_project, target_job_id)
print_monitor_summary(exec_t, t_start) t_end = time.perf_counter()
spend_minutes = (t_end - t_start) / 60
print(f"⏲ Duration of script execution: {spend_minutes:0.1f} minutes")
sys.exit(ret) sys.exit(ret)
except KeyboardInterrupt: except KeyboardInterrupt:
sys.exit(1) sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -37,25 +37,21 @@ def pretty_duration(seconds):
hours, rem = divmod(seconds, 3600) hours, rem = divmod(seconds, 3600)
minutes, seconds = divmod(rem, 60) minutes, seconds = divmod(rem, 60)
if hours: if hours:
return f"{hours:0.0f}h{minutes:02.0f}m{seconds:02.0f}s" return f"{hours:0.0f}h{minutes:0.0f}m{seconds:0.0f}s"
if minutes: if minutes:
return f"{minutes:0.0f}m{seconds:02.0f}s" return f"{minutes:0.0f}m{seconds:0.0f}s"
return f"{seconds:0.0f}s" return f"{seconds:0.0f}s"
def get_gitlab_pipeline_from_url(gl, pipeline_url) -> tuple: def get_gitlab_pipeline_from_url(gl, pipeline_url):
""" assert pipeline_url.startswith(GITLAB_URL)
Extract the project and pipeline object from the url string url_path = pipeline_url[len(GITLAB_URL) :]
:param gl: Gitlab object url_path_components = url_path.split("/")
:param pipeline_url: string with a url to a pipeline project_name = "/".join(url_path_components[1:3])
:return: ProjectPipeline, Project objects assert url_path_components[3] == "-"
""" assert url_path_components[4] == "pipelines"
pattern = rf"^{re.escape(GITLAB_URL)}/(.*)/-/pipelines/([0-9]+)$" pipeline_id = int(url_path_components[5])
match = re.match(pattern, pipeline_url) cur_project = gl.projects.get(project_name)
if not match:
raise AssertionError(f"url {pipeline_url} doesn't follow the pattern {pattern}")
namespace_with_project, pipeline_id = match.groups()
cur_project = gl.projects.get(namespace_with_project)
pipe = cur_project.pipelines.get(pipeline_id) pipe = cur_project.pipelines.get(pipeline_id)
return pipe, cur_project return pipe, cur_project

View File

@@ -331,8 +331,10 @@ def filter_dag(dag: Dag, regex: Pattern) -> Dag:
def print_dag(dag: Dag) -> None: def print_dag(dag: Dag) -> None:
for job, data in sorted(dag.items()): for job, data in dag.items():
print(f"{job}:\n\t{' '.join(data['needs'])}\n") print(f"{job}:")
print(f"\t{' '.join(data['needs'])}")
print()
def fetch_merged_yaml(gl_gql: GitlabGQL, params) -> dict[str, Any]: def fetch_merged_yaml(gl_gql: GitlabGQL, params) -> dict[str, Any]:

View File

@@ -1,175 +0,0 @@
#!/usr/bin/env python3
# Copyright © 2020 - 2024 Collabora Ltd.
# Authors:
# David Heidelberg <david.heidelberg@collabora.com>
# Sergi Blanch Torne <sergi.blanch.torne@collabora.com>
# SPDX-License-Identifier: MIT
"""
Compare the two latest scheduled pipelines and provide information
about the jobs you're interested in.
"""
import argparse
import csv
import re
import requests
import io
from tabulate import tabulate
import gitlab
from colorama import Fore, Style
from gitlab_common import read_token
MARGE_BOT_USER_ID = 9716
def print_failures_csv(id):
url = 'https://gitlab.freedesktop.org/mesa/mesa/-/jobs/' + str(id) + '/artifacts/raw/results/failures.csv'
missing: int = 0
MAX_MISS: int = 20
try:
response = requests.get(url)
response.raise_for_status()
csv_content = io.StringIO(response.text)
csv_reader = csv.reader(csv_content)
data = list(csv_reader)
for line in data[:]:
if line[1] == "UnexpectedImprovement(Pass)":
line[1] = Fore.GREEN + line[1] + Style.RESET_ALL
elif line[1] == "UnexpectedImprovement(Fail)":
line[1] = Fore.YELLOW + line[1] + Style.RESET_ALL
elif line[1] == "Crash" or line[1] == "Fail":
line[1] = Fore.RED + line[1] + Style.RESET_ALL
elif line[1] == "Missing":
if missing > MAX_MISS:
data.remove(line)
continue
missing += 1
line[1] = Fore.YELLOW + line[1] + Style.RESET_ALL
elif line[1] == "Fail":
line[1] = Fore.RED + line[1] + Style.RESET_ALL
else:
line[1] = Fore.WHITE + line[1] + Style.RESET_ALL
if missing > MAX_MISS:
data.append([Fore.RED + f"... more than {MAX_MISS} missing tests, something crashed?", "Missing" + Style.RESET_ALL])
headers = ["Test ", "Result"]
print(tabulate(data, headers, tablefmt="plain"))
except Exception:
pass
def job_failed_before(old_jobs, job):
for old_job in old_jobs:
if job.name == old_job.name:
return old_job
def parse_args() -> None:
"""Parse args"""
parser = argparse.ArgumentParser(
description="Tool to show merge requests assigned to the marge-bot",
)
parser.add_argument(
"--target",
metavar="target-job",
help="Target job regex. For multiple targets, pass multiple values, "
"eg. `--target foo bar`.",
required=False,
nargs=argparse.ONE_OR_MORE,
)
parser.add_argument(
"--token",
metavar="token",
help="force GitLab token, otherwise it's read from ~/.config/gitlab-token",
)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
token = read_token(args.token)
gl = gitlab.Gitlab(url="https://gitlab.freedesktop.org", private_token=token)
project = gl.projects.get("mesa/mesa")
print(
"\u001b]8;;https://gitlab.freedesktop.org/mesa/mesa/-/pipelines?page=1&scope=all&source=schedule\u001b\\Scheduled pipelines overview\u001b]8;;\u001b\\"
)
pipelines = project.pipelines.list(
source="schedule", ordered_by="created_at", sort="desc", page=1, per_page=2
)
print(
f"Old pipeline: {pipelines[1].created_at}\t\u001b]8;;{pipelines[1].web_url}\u001b\\{pipelines[1].status}\u001b]8;;\u001b\\\t{pipelines[1].sha}"
)
print(
f"New pipeline: {pipelines[0].created_at}\t\u001b]8;;{pipelines[0].web_url}\u001b\\{pipelines[0].status}\u001b]8;;\u001b\\\t{pipelines[0].sha}"
)
print(
f"\nWebUI visual compare: https://gitlab.freedesktop.org/mesa/mesa/-/compare/{pipelines[1].sha}...{pipelines[0].sha}\n"
)
# regex part
if args.target:
target = "|".join(args.target)
target = target.strip()
print("🞋 jobs: " + Fore.BLUE + target + Style.RESET_ALL)
target = f"({target})" + r"( \d+/\d+)?"
else:
target = ".*"
target_jobs_regex: re.Pattern = re.compile(target)
old_failed_jobs = []
for job in pipelines[1].jobs.list(all=True):
if (
job.status != "failed"
or target_jobs_regex
and not target_jobs_regex.fullmatch(job.name)
):
continue
old_failed_jobs.append(job)
job_failed = False
for job in pipelines[0].jobs.list(all=True):
if (
job.status != "failed"
or target_jobs_regex
and not target_jobs_regex.fullmatch(job.name)
):
continue
job_failed = True
previously_failed_job = job_failed_before(old_failed_jobs, job)
if previously_failed_job:
print(
Fore.YELLOW
+ f":: \u001b]8;;{job.web_url}\u001b\\{job.name}\u001b]8;;\u001b\\"
+ Fore.MAGENTA
+ f" \u001b]8;;{previously_failed_job.web_url}\u001b\\(previous run)\u001b]8;;\u001b\\"
+ Style.RESET_ALL
)
else:
print(
Fore.RED
+ f":: \u001b]8;;{job.web_url}\u001b\\{job.name}\u001b]8;;\u001b\\"
+ Style.RESET_ALL
)
print_failures_csv(job.id)
if not job_failed:
exit(0)
print("Commits between nightly pipelines:")
commit = project.commits.get(pipelines[0].sha)
while True:
print(
f"{commit.id} \u001b]8;;{commit.web_url}\u001b\\{commit.title}\u001b]8;;\u001b\\"
)
if commit.id == pipelines[1].sha:
break
commit = project.commits.get(commit.parent_ids[0])

View File

@@ -1,11 +0,0 @@
#!/usr/bin/env bash
set -eu
this_dir=$(dirname -- "$(readlink -f -- "${BASH_SOURCE[0]}")")
readonly this_dir
exec \
"$this_dir/../python-venv.sh" \
"$this_dir/requirements.txt" \
"$this_dir/nightly_compare.py" "$@"

View File

@@ -1,12 +1,11 @@
colorama==0.4.* colorama==0.4.5
filecache==0.81 filecache==0.81
gql==3.* gql==3.4.0
kaleido==0.2.* kaleido==0.2.1
python-dateutil==2.* python-dateutil==2.8.2
pandas==2.* pandas==2.1.1
plotly==5.* plotly==5.17.0
python-gitlab==3.* python-gitlab==3.5.0
PyYAML==6.* PyYAML==6.0.1
ruamel.yaml.clib==0.2.* ruamel.yaml.clib==0.2.8
ruamel.yaml==0.17.* ruamel.yaml==0.17.21
tabulate==0.9.*

Some files were not shown because too many files have changed in this diff Show More