Compare commits
258 Commits
mesa-23.0.
...
mesa-22.2.
Author | SHA1 | Date | |
---|---|---|---|
|
4ef98ce435 | ||
|
5c4d90f1aa | ||
|
b47e856216 | ||
|
ab6ce72fd9 | ||
|
2adacf5594 | ||
|
242b498115 | ||
|
20886cf572 | ||
|
91ccbb017e | ||
|
8c36939dd6 | ||
|
a021a38f5e | ||
|
5b6aa0c7ee | ||
|
da97b8a0e1 | ||
|
fbc7e23afd | ||
|
6e574e2c81 | ||
|
4698093aff | ||
|
6ce21ce910 | ||
|
2c56768a2f | ||
|
4de1373cc4 | ||
|
d30ddb0406 | ||
|
f0dc4ee1cd | ||
|
ac210811e8 | ||
|
a861f9a0b2 | ||
|
4813bc413e | ||
|
fc8ba10f73 | ||
|
9478d1be89 | ||
|
842011a4bd | ||
|
26b95ed403 | ||
|
36ac6a48b9 | ||
|
314d56e9b8 | ||
|
4b3daadae0 | ||
|
cbcf1bb4d8 | ||
|
c916ec76ee | ||
|
3ded967693 | ||
|
609949fbcf | ||
|
f0193e09e1 | ||
|
1959ee0183 | ||
|
7d81b290d4 | ||
|
4022487538 | ||
|
9146b229fc | ||
|
dd6e95ad82 | ||
|
ca7c6dffd9 | ||
|
47aff2759f | ||
|
49fdd260a2 | ||
|
0b081167b2 | ||
|
c3cccad441 | ||
|
b5917e15f7 | ||
|
de8ec9b17a | ||
|
9b94b96829 | ||
|
9d0016594b | ||
|
7cd4905eaf | ||
|
7f79b1a010 | ||
|
d4c4412629 | ||
|
46d66bcb6f | ||
|
c559159b71 | ||
|
c9d05409ae | ||
|
726e9bbf60 | ||
|
ee37cceabd | ||
|
086b44101c | ||
|
a00e1b3a63 | ||
|
51cd3f89a1 | ||
|
84ef8f31f6 | ||
|
3632392123 | ||
|
b4c8e01393 | ||
|
02a7e1b33c | ||
|
80c165d155 | ||
|
3b11e41489 | ||
|
6a936dc090 | ||
|
0e9cbac188 | ||
|
064abf28b6 | ||
|
3f3ecdccb1 | ||
|
70dcad3214 | ||
|
8abdd60d1f | ||
|
88a188cc39 | ||
|
ccac10eb5a | ||
|
1ddca52324 | ||
|
b406a3c6ee | ||
|
f08ddee3a5 | ||
|
fc014e713a | ||
|
ad7ec70ca6 | ||
|
29415adbaa | ||
|
b84c8968c7 | ||
|
e201b751fc | ||
|
36a66a3124 | ||
|
7bcd2e6767 | ||
|
ab35b97b78 | ||
|
1a79d5e861 | ||
|
0e5bf558ef | ||
|
f5ae4c35fe | ||
|
c8ba293450 | ||
|
27fb80d64d | ||
|
2e3afb6765 | ||
|
46cf9fdb80 | ||
|
16757df4cb | ||
|
f7a3632d65 | ||
|
2f9757373e | ||
|
78e12c983e | ||
|
a3e250c789 | ||
|
d424ca0ae9 | ||
|
a606746bba | ||
|
066c682647 | ||
|
f3c5c6230f | ||
|
bb48242766 | ||
|
ec1d48938c | ||
|
123ece43a4 | ||
|
f342f7602a | ||
|
4bbf83a4a8 | ||
|
72681ac88e | ||
|
8da364d8fa | ||
|
e8112df5fb | ||
|
90ab3994de | ||
|
8c64dc4873 | ||
|
68f0becf2b | ||
|
3dfae4eec1 | ||
|
38254ea6cd | ||
|
a427499f8c | ||
|
03346d62e7 | ||
|
dd2c6e59ba | ||
|
28902b4746 | ||
|
caaab9f5e3 | ||
|
8dd736aa04 | ||
|
5c6b687e04 | ||
|
537c0efff9 | ||
|
1a57c75620 | ||
|
7e7a085317 | ||
|
08b7aa08e5 | ||
|
96073f7f98 | ||
|
5f562b1e00 | ||
|
6001a465c0 | ||
|
7b68155fa0 | ||
|
f918de9b42 | ||
|
5a267b1cf9 | ||
|
613e43c87b | ||
|
c1e5cfb01f | ||
|
7b2dc8e779 | ||
|
f73dd21d45 | ||
|
c2d094d0c7 | ||
|
9634e7cdf6 | ||
|
e7ed89f1bc | ||
|
ea6aafefd1 | ||
|
41705b8754 | ||
|
1faced9ec7 | ||
|
185b583ae1 | ||
|
16dc6872c3 | ||
|
0766a4ca0d | ||
|
2bb0d73816 | ||
|
705b30837c | ||
|
9a80d2f73b | ||
|
af2892677b | ||
|
cc504c9887 | ||
|
6561217214 | ||
|
7b1412130a | ||
|
40da2cee3d | ||
|
30ef443d23 | ||
|
499a65e88d | ||
|
35025cbb77 | ||
|
5c4028ac36 | ||
|
53cd211cb9 | ||
|
bdcffd60db | ||
|
b712253b53 | ||
|
ecc41f91ad | ||
|
f1a407de47 | ||
|
9998f8e1db | ||
|
26c1926a4a | ||
|
9a43a1f1d1 | ||
|
b3fc8cb419 | ||
|
9f305dd4e6 | ||
|
dbc956920f | ||
|
fa4c949150 | ||
|
3490712ad7 | ||
|
260b7902fe | ||
|
12f1cabeba | ||
|
4e0637a182 | ||
|
8b0343601c | ||
|
bac7da0264 | ||
|
b01498700c | ||
|
1c6c94424b | ||
|
87e006ca01 | ||
|
f88ce98ee6 | ||
|
ffc5316a7c | ||
|
5433fb705b | ||
|
773964fb8b | ||
|
75af03a653 | ||
|
61790c60dd | ||
|
6094318c4d | ||
|
6225807c85 | ||
|
df69376e68 | ||
|
7c6e24f329 | ||
|
c69f749bd8 | ||
|
42cd6b0fa0 | ||
|
0d27e5fd63 | ||
|
a4a7aa5d1a | ||
|
a77322c414 | ||
|
3b82f4eae2 | ||
|
d214aa3889 | ||
|
9b266113fe | ||
|
96df57ad5d | ||
|
9d3c4ea4ec | ||
|
853962d850 | ||
|
c10a10b3ac | ||
|
fe4bc64b9f | ||
|
1a1ded7d78 | ||
|
e2ff62782d | ||
|
df035d2894 | ||
|
23daa993df | ||
|
bc9e9c39ef | ||
|
dacab91f27 | ||
|
32ac1133d0 | ||
|
e99965a073 | ||
|
f330229d98 | ||
|
266fc5f6cc | ||
|
ec9691dbf1 | ||
|
b6973234ad | ||
|
b70516a37a | ||
|
3f18f014e4 | ||
|
17faf33ab7 | ||
|
e35dd22c6d | ||
|
797a781ffe | ||
|
517d22b3f7 | ||
|
f02522adce | ||
|
679049bf4c | ||
|
c9d2f45bf2 | ||
|
2c7c5cc016 | ||
|
fe2f7c06ae | ||
|
515faea62b | ||
|
25f9046ccd | ||
|
661d8de303 | ||
|
de6ee5b782 | ||
|
9b943044ac | ||
|
b1dbdecb27 | ||
|
f8bdbbdd90 | ||
|
38d6ae933d | ||
|
2ce1c12477 | ||
|
2f18e16512 | ||
|
eadc134dd8 | ||
|
e16a613de0 | ||
|
8cd9d2fcc0 | ||
|
167af40dae | ||
|
b525edfce6 | ||
|
aa90b5cd12 | ||
|
9234bdebed | ||
|
fda5f3f630 | ||
|
349576d92f | ||
|
0d7d35c84a | ||
|
1889d87783 | ||
|
50e133465c | ||
|
fdbabb07cf | ||
|
71b113251d | ||
|
0ee8821b83 | ||
|
3eda2a96a8 | ||
|
5c8aaa70e8 | ||
|
081fd3a4f4 | ||
|
08adb7bb9d | ||
|
c702465d56 | ||
|
5e00b2d8a7 | ||
|
46fc1b37b5 | ||
|
5814485a10 | ||
|
16d299e40b | ||
|
f8367fc41e |
@@ -8,7 +8,7 @@ charset = utf-8
|
||||
insert_final_newline = true
|
||||
tab_width = 8
|
||||
|
||||
[*.{c,h,cpp,hpp,cc,hh,y,yy}]
|
||||
[*.{c,h,cpp,hpp,cc,hh}]
|
||||
indent_style = space
|
||||
indent_size = 3
|
||||
max_line_length = 78
|
||||
@@ -35,10 +35,7 @@ trim_trailing_whitespace = false
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
|
||||
[*.ps1]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.rs]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
39
.github/workflows/ci.yml
vendored
Normal file
39
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: CI
|
||||
on: push
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
CI:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
cat > Brewfile <<EOL
|
||||
brew "bison"
|
||||
brew "expat"
|
||||
brew "gettext"
|
||||
brew "libx11"
|
||||
brew "libxcb"
|
||||
brew "libxdamage"
|
||||
brew "libxext"
|
||||
brew "meson"
|
||||
brew "pkg-config"
|
||||
brew "python@3.10"
|
||||
EOL
|
||||
|
||||
brew update
|
||||
brew bundle --verbose
|
||||
- name: Install Mako
|
||||
run: pip3 install --user mako
|
||||
- name: Configure
|
||||
run: meson . build -Dbuild-tests=true -Dosmesa=true
|
||||
- name: Build
|
||||
run: meson compile -C build
|
||||
- name: Test
|
||||
run: meson test -C build --print-errorlogs
|
||||
- name: Install
|
||||
run: meson install -C build
|
60
.github/workflows/macos.yml
vendored
60
.github/workflows/macos.yml
vendored
@@ -1,60 +0,0 @@
|
||||
name: macOS-CI
|
||||
on: push
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
macOS-CI:
|
||||
strategy:
|
||||
matrix:
|
||||
glx_option: ['dri', 'xlib']
|
||||
runs-on: macos-11
|
||||
env:
|
||||
GALLIUM_DUMP_CPU: true
|
||||
MESON_EXEC: /Users/runner/Library/Python/3.11/bin/meson
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
cat > Brewfile <<EOL
|
||||
brew "bison"
|
||||
brew "expat"
|
||||
brew "gettext"
|
||||
brew "libx11"
|
||||
brew "libxcb"
|
||||
brew "libxdamage"
|
||||
brew "libxext"
|
||||
brew "molten-vk"
|
||||
brew "ninja"
|
||||
brew "pkg-config"
|
||||
brew "python@3.10"
|
||||
EOL
|
||||
|
||||
brew update
|
||||
brew bundle --verbose
|
||||
- name: Install Mako and meson
|
||||
run: pip3 install --user mako meson
|
||||
- name: Configure
|
||||
run: |
|
||||
cat > native_config <<EOL
|
||||
[binaries]
|
||||
llvm-config = '/usr/local/opt/llvm/bin/llvm-config'
|
||||
EOL
|
||||
$MESON_EXEC . build --native-file=native_config -Dmoltenvk-dir=$(brew --prefix molten-vk) -Dbuild-tests=true -Dosmesa=true -Dgallium-drivers=swrast,zink -Dglx=${{ matrix.glx_option }}
|
||||
- name: Build
|
||||
run: $MESON_EXEC compile -C build
|
||||
- name: Test
|
||||
run: $MESON_EXEC test -C build --print-errorlogs
|
||||
- name: Install
|
||||
run: $MESON_EXEC install -C build --destdir $PWD/install
|
||||
- name: 'Upload Artifact'
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: macos-${{ matrix.glx_option }}-result
|
||||
path: |
|
||||
build/meson-logs/
|
||||
install/
|
||||
retention-days: 5
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,4 +1,3 @@
|
||||
.vscode*
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.out
|
||||
|
@@ -1,6 +1,6 @@
|
||||
variables:
|
||||
FDO_UPSTREAM_REPO: mesa/mesa
|
||||
MESA_TEMPLATES_COMMIT: &ci-templates-commit d5aa3941aa03c2f716595116354fb81eb8012acb
|
||||
MESA_TEMPLATES_COMMIT: &ci-templates-commit 290b79e0e78eab67a83766f4e9691be554fc4afd
|
||||
CI_PRE_CLONE_SCRIPT: |-
|
||||
set -o xtrace
|
||||
wget -q -O download-git-cache.sh ${CI_PROJECT_URL}/-/raw/${CI_COMMIT_SHA}/.gitlab-ci/download-git-cache.sh
|
||||
@@ -8,7 +8,7 @@ variables:
|
||||
rm download-git-cache.sh
|
||||
set +o xtrace
|
||||
CI_JOB_JWT_FILE: /minio_jwt
|
||||
MINIO_HOST: s3.freedesktop.org
|
||||
MINIO_HOST: minio-packet.freedesktop.org
|
||||
# per-pipeline artifact storage on MinIO
|
||||
PIPELINE_ARTIFACTS_BASE: ${MINIO_HOST}/artifacts/${CI_PROJECT_PATH}/${CI_PIPELINE_ID}
|
||||
# per-job artifact storage on MinIO
|
||||
@@ -22,9 +22,6 @@ variables:
|
||||
MICROSOFT_FARM: "online"
|
||||
LIMA_FARM: "online"
|
||||
IGALIA_FARM: "online"
|
||||
ANHOLT_FARM: "online"
|
||||
VALVE_FARM: "online"
|
||||
AUSTRIANCODER_FARM: "online" # only etnaviv GPUs
|
||||
|
||||
default:
|
||||
before_script:
|
||||
@@ -49,13 +46,12 @@ default:
|
||||
|
||||
include:
|
||||
- project: 'freedesktop/ci-templates'
|
||||
ref: ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2
|
||||
ref: 34f4ade99434043f88e164933f570301fd18b125
|
||||
file:
|
||||
- '/templates/ci-fairy.yml'
|
||||
- project: 'freedesktop/ci-templates'
|
||||
ref: *ci-templates-commit
|
||||
file:
|
||||
- '/templates/alpine.yml'
|
||||
- '/templates/debian.yml'
|
||||
- '/templates/fedora.yml'
|
||||
- local: '.gitlab-ci/image-tags.yml'
|
||||
@@ -74,6 +70,7 @@ include:
|
||||
- local: 'src/gallium/drivers/lima/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/llvmpipe/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/nouveau/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/radeonsi/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/softpipe/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/virgl/ci/gitlab-ci.yml'
|
||||
- local: 'src/gallium/drivers/zink/ci/gitlab-ci.yml'
|
||||
@@ -81,7 +78,6 @@ include:
|
||||
- local: 'src/intel/ci/gitlab-ci.yml'
|
||||
- local: 'src/microsoft/ci/gitlab-ci.yml'
|
||||
- local: 'src/panfrost/ci/gitlab-ci.yml'
|
||||
- local: 'src/virtio/ci/gitlab-ci.yml'
|
||||
|
||||
stages:
|
||||
- sanity
|
||||
@@ -89,7 +85,6 @@ stages:
|
||||
- git-archive
|
||||
- build-x86_64
|
||||
- build-misc
|
||||
- lint
|
||||
- amd
|
||||
- intel
|
||||
- nouveau
|
||||
@@ -130,24 +125,21 @@ stages:
|
||||
|
||||
|
||||
.docs-base:
|
||||
variables:
|
||||
BUILDER: html
|
||||
extends:
|
||||
- .fdo.ci-fairy
|
||||
- .build-rules
|
||||
artifacts:
|
||||
expose_as: 'Documentation preview'
|
||||
paths:
|
||||
- public/
|
||||
script:
|
||||
- apk --no-cache add graphviz doxygen
|
||||
- pip3 install sphinx===5.1.1 breathe===4.34.0 mako===1.2.3 sphinx_rtd_theme===1.0.0
|
||||
- pip3 install sphinx breathe mako sphinx_rtd_theme
|
||||
- docs/doxygen-wrapper.py --out-dir=docs/doxygen_xml
|
||||
- sphinx-build -W -b $BUILDER docs public
|
||||
- sphinx-build -W -b html docs public
|
||||
|
||||
pages:
|
||||
extends: .docs-base
|
||||
stage: deploy
|
||||
artifacts:
|
||||
paths:
|
||||
- public
|
||||
needs: []
|
||||
rules:
|
||||
- !reference [.no_scheduled_pipelines-rules, rules]
|
||||
@@ -176,24 +168,16 @@ test-docs-mr:
|
||||
- test-docs
|
||||
needs:
|
||||
- sanity
|
||||
artifacts:
|
||||
expose_as: 'Documentation preview'
|
||||
paths:
|
||||
- public/
|
||||
rules:
|
||||
- if: *is-pre-merge
|
||||
changes: *docs-or-ci
|
||||
when: on_success
|
||||
# Other cases default to never
|
||||
|
||||
lincheck-docs:
|
||||
extends: .docs-base
|
||||
# Cancel job if a newer commit is pushed to the same branch
|
||||
interruptible: true
|
||||
stage: deploy
|
||||
needs: []
|
||||
rules:
|
||||
- !reference [.scheduled_pipeline-rules, rules]
|
||||
allow_failure: true
|
||||
variables:
|
||||
BUILDER: linkcheck
|
||||
|
||||
# When to automatically run the CI for build jobs
|
||||
.build-rules:
|
||||
rules:
|
||||
@@ -278,7 +262,9 @@ make git archive:
|
||||
# compress the current folder
|
||||
- tar -cvzf ../$CI_PROJECT_NAME.tar.gz .
|
||||
|
||||
- ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" ../$CI_PROJECT_NAME.tar.gz https://$MINIO_HOST/git-cache/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/$CI_PROJECT_NAME.tar.gz
|
||||
# login with the JWT token file
|
||||
- ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
|
||||
- ci-fairy minio cp ../$CI_PROJECT_NAME.tar.gz minio://$MINIO_HOST/git-cache/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/$CI_PROJECT_NAME.tar.gz
|
||||
|
||||
|
||||
# Sanity checks of MR settings and commit logs
|
||||
|
@@ -3,8 +3,9 @@ version: 1
|
||||
# Rules to match for a machine to qualify
|
||||
target:
|
||||
{% if tags %}
|
||||
{% set b2ctags = tags.split(',') %}
|
||||
tags:
|
||||
{% for tag in tags %}
|
||||
{% for tag in b2ctags %}
|
||||
- '{{ tag | trim }}'
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
@@ -52,7 +53,7 @@ deployment:
|
||||
b2c.container="-ti --tls-verify=false docker://{{ '{{' }} fdo_proxy_registry }}/mupuf/valve-infra/machine_registration:latest check"
|
||||
b2c.ntp_peer=10.42.0.1 b2c.pipefail b2c.cache_device=auto b2c.poweroff_delay={{ poweroff_delay }}
|
||||
b2c.minio="gateway,{{ '{{' }} minio_url }},{{ '{{' }} job_bucket_access_key }},{{ '{{' }} job_bucket_secret_key }}"
|
||||
b2c.volume="{{ '{{' }} job_bucket }}-results,mirror=gateway/{{ '{{' }} job_bucket }},pull_on=pipeline_start,push_on=changes,overwrite{% for excl in job_volume_exclusions %},exclude={{ excl }}{% endfor %},remove,expiration=pipeline_end,preserve"
|
||||
b2c.volume="{{ '{{' }} job_bucket }}-results,mirror=gateway/{{ '{{' }} job_bucket }},pull_on=pipeline_start,push_on=changes,overwrite{% for excl in job_volume_exclusions %},exclude={{ excl }}{% endfor %},expiration=pipeline_end,preserve"
|
||||
{% for volume in volumes %}
|
||||
b2c.volume={{ volume }}
|
||||
{% endfor %}
|
||||
|
@@ -24,7 +24,6 @@
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from argparse import ArgumentParser
|
||||
from os import environ, path
|
||||
import json
|
||||
|
||||
|
||||
parser = ArgumentParser()
|
||||
@@ -70,10 +69,7 @@ values['log_level'] = args.log_level
|
||||
values['poweroff_delay'] = args.poweroff_delay
|
||||
values['session_end_regex'] = args.session_end_regex
|
||||
values['session_reboot_regex'] = args.session_reboot_regex
|
||||
try:
|
||||
values['tags'] = json.loads(args.tags)
|
||||
except json.decoder.JSONDecodeError:
|
||||
values['tags'] = args.tags.split(",")
|
||||
values['tags'] = args.tags
|
||||
values['template'] = args.template
|
||||
values['timeout_boot_minutes'] = args.timeout_boot_minutes
|
||||
values['timeout_boot_retries'] = args.timeout_boot_retries
|
||||
@@ -92,12 +88,10 @@ if args.mount_volume is not None:
|
||||
values['working_dir'] = args.working_dir
|
||||
|
||||
assert(len(args.local_container) > 0)
|
||||
|
||||
# Use the gateway's pull-through registry caches to reduce load on fd.o.
|
||||
values['local_container'] = args.local_container
|
||||
for url, replacement in [('registry.freedesktop.org', '{{ fdo_proxy_registry }}'),
|
||||
('harbor.freedesktop.org', '{{ harbor_fdo_registry }}')]:
|
||||
values['local_container'] = values['local_container'].replace(url, replacement)
|
||||
values['local_container'] = args.local_container.replace(
|
||||
# Use the gateway's pull-through registry cache to reduce load on fd.o.
|
||||
'registry.freedesktop.org', '{{ fdo_proxy_registry }}'
|
||||
)
|
||||
|
||||
if 'B2C_KERNEL_CMDLINE_EXTRAS' in environ:
|
||||
values['cmdline_extras'] = environ['B2C_KERNEL_CMDLINE_EXTRAS']
|
||||
|
26
.gitlab-ci/bare-metal/arm64_a630_egl.sh
Executable file
26
.gitlab-ci/bare-metal/arm64_a630_egl.sh
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This test script groups together a bunch of fast dEQP variant runs
|
||||
# to amortize the cost of rebooting the board.
|
||||
|
||||
set -ex
|
||||
|
||||
EXIT=0
|
||||
|
||||
# Run reset tests without parallelism:
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/reset \
|
||||
FDO_CI_CONCURRENT=1 \
|
||||
DEQP_CASELIST_FILTER='.*reset.*' \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
||||
# Then run everything else with parallelism:
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/nonrobustness \
|
||||
DEQP_CASELIST_INV_FILTER='.*reset.*' \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
@@ -164,16 +164,19 @@ def main():
|
||||
'--test-timeout', type=int, help='Test phase timeout (minutes)', required=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
servo = CrosServoRun(args.cpu, args.ec, args.test_timeout * 60)
|
||||
|
||||
while True:
|
||||
servo = CrosServoRun(args.cpu, args.ec, args.test_timeout * 60)
|
||||
retval = servo.run()
|
||||
|
||||
# power down the CPU on the device
|
||||
servo.ec_write("power off\n")
|
||||
servo.close()
|
||||
|
||||
if retval != 2:
|
||||
sys.exit(retval)
|
||||
break
|
||||
|
||||
# power down the CPU on the device
|
||||
servo.ec_write("power off\n")
|
||||
|
||||
servo.close()
|
||||
|
||||
sys.exit(retval)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@@ -106,25 +106,20 @@ if echo "$BM_KERNEL $BM_DTB" | grep -q http; then
|
||||
wget $BM_DTB -O dtb
|
||||
|
||||
cat kernel dtb > Image.gz-dtb
|
||||
rm kernel
|
||||
rm kernel dtb
|
||||
else
|
||||
cat $BM_KERNEL $BM_DTB > Image.gz-dtb
|
||||
cp $BM_DTB dtb
|
||||
fi
|
||||
|
||||
export PATH=$BM:$PATH
|
||||
|
||||
mkdir -p artifacts
|
||||
mkbootimg.py \
|
||||
--kernel Image.gz-dtb \
|
||||
--ramdisk rootfs.cpio.gz \
|
||||
--dtb dtb \
|
||||
--cmdline "$BM_CMDLINE" \
|
||||
$BM_MKBOOT_PARAMS \
|
||||
--header_version 2 \
|
||||
-o artifacts/fastboot.img
|
||||
abootimg \
|
||||
--create artifacts/fastboot.img \
|
||||
-k Image.gz-dtb \
|
||||
-r rootfs.cpio.gz \
|
||||
-c cmdline="$BM_CMDLINE"
|
||||
rm Image.gz-dtb
|
||||
|
||||
rm Image.gz-dtb dtb
|
||||
export PATH=$BM:$PATH
|
||||
|
||||
# Start background command for talking to serial if we have one.
|
||||
if [ -n "$BM_SERIAL_SCRIPT" ]; then
|
||||
|
@@ -1,569 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright 2015, The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Creates the boot image."""
|
||||
from argparse import (ArgumentParser, ArgumentTypeError,
|
||||
FileType, RawDescriptionHelpFormatter)
|
||||
from hashlib import sha1
|
||||
from os import fstat
|
||||
from struct import pack
|
||||
import array
|
||||
import collections
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import tempfile
|
||||
# Constant and structure definition is in
|
||||
# system/tools/mkbootimg/include/bootimg/bootimg.h
|
||||
BOOT_MAGIC = 'ANDROID!'
|
||||
BOOT_MAGIC_SIZE = 8
|
||||
BOOT_NAME_SIZE = 16
|
||||
BOOT_ARGS_SIZE = 512
|
||||
BOOT_EXTRA_ARGS_SIZE = 1024
|
||||
BOOT_IMAGE_HEADER_V1_SIZE = 1648
|
||||
BOOT_IMAGE_HEADER_V2_SIZE = 1660
|
||||
BOOT_IMAGE_HEADER_V3_SIZE = 1580
|
||||
BOOT_IMAGE_HEADER_V3_PAGESIZE = 4096
|
||||
BOOT_IMAGE_HEADER_V4_SIZE = 1584
|
||||
BOOT_IMAGE_V4_SIGNATURE_SIZE = 4096
|
||||
VENDOR_BOOT_MAGIC = 'VNDRBOOT'
|
||||
VENDOR_BOOT_MAGIC_SIZE = 8
|
||||
VENDOR_BOOT_NAME_SIZE = BOOT_NAME_SIZE
|
||||
VENDOR_BOOT_ARGS_SIZE = 2048
|
||||
VENDOR_BOOT_IMAGE_HEADER_V3_SIZE = 2112
|
||||
VENDOR_BOOT_IMAGE_HEADER_V4_SIZE = 2128
|
||||
VENDOR_RAMDISK_TYPE_NONE = 0
|
||||
VENDOR_RAMDISK_TYPE_PLATFORM = 1
|
||||
VENDOR_RAMDISK_TYPE_RECOVERY = 2
|
||||
VENDOR_RAMDISK_TYPE_DLKM = 3
|
||||
VENDOR_RAMDISK_NAME_SIZE = 32
|
||||
VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE = 16
|
||||
VENDOR_RAMDISK_TABLE_ENTRY_V4_SIZE = 108
|
||||
# Names with special meaning, mustn't be specified in --ramdisk_name.
|
||||
VENDOR_RAMDISK_NAME_BLOCKLIST = {b'default'}
|
||||
PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT = '--vendor_ramdisk_fragment'
|
||||
def filesize(f):
|
||||
if f is None:
|
||||
return 0
|
||||
try:
|
||||
return fstat(f.fileno()).st_size
|
||||
except OSError:
|
||||
return 0
|
||||
def update_sha(sha, f):
|
||||
if f:
|
||||
sha.update(f.read())
|
||||
f.seek(0)
|
||||
sha.update(pack('I', filesize(f)))
|
||||
else:
|
||||
sha.update(pack('I', 0))
|
||||
def pad_file(f, padding):
|
||||
pad = (padding - (f.tell() & (padding - 1))) & (padding - 1)
|
||||
f.write(pack(str(pad) + 'x'))
|
||||
def get_number_of_pages(image_size, page_size):
|
||||
"""calculates the number of pages required for the image"""
|
||||
return (image_size + page_size - 1) // page_size
|
||||
def get_recovery_dtbo_offset(args):
|
||||
"""calculates the offset of recovery_dtbo image in the boot image"""
|
||||
num_header_pages = 1 # header occupies a page
|
||||
num_kernel_pages = get_number_of_pages(filesize(args.kernel), args.pagesize)
|
||||
num_ramdisk_pages = get_number_of_pages(filesize(args.ramdisk),
|
||||
args.pagesize)
|
||||
num_second_pages = get_number_of_pages(filesize(args.second), args.pagesize)
|
||||
dtbo_offset = args.pagesize * (num_header_pages + num_kernel_pages +
|
||||
num_ramdisk_pages + num_second_pages)
|
||||
return dtbo_offset
|
||||
def write_header_v3_and_above(args):
|
||||
if args.header_version > 3:
|
||||
boot_header_size = BOOT_IMAGE_HEADER_V4_SIZE
|
||||
else:
|
||||
boot_header_size = BOOT_IMAGE_HEADER_V3_SIZE
|
||||
args.output.write(pack(f'{BOOT_MAGIC_SIZE}s', BOOT_MAGIC.encode()))
|
||||
# kernel size in bytes
|
||||
args.output.write(pack('I', filesize(args.kernel)))
|
||||
# ramdisk size in bytes
|
||||
args.output.write(pack('I', filesize(args.ramdisk)))
|
||||
# os version and patch level
|
||||
args.output.write(pack('I', (args.os_version << 11) | args.os_patch_level))
|
||||
args.output.write(pack('I', boot_header_size))
|
||||
# reserved
|
||||
args.output.write(pack('4I', 0, 0, 0, 0))
|
||||
# version of boot image header
|
||||
args.output.write(pack('I', args.header_version))
|
||||
args.output.write(pack(f'{BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE}s',
|
||||
args.cmdline))
|
||||
if args.header_version >= 4:
|
||||
# The signature used to verify boot image v4.
|
||||
args.output.write(pack('I', BOOT_IMAGE_V4_SIGNATURE_SIZE))
|
||||
pad_file(args.output, BOOT_IMAGE_HEADER_V3_PAGESIZE)
|
||||
def write_vendor_boot_header(args):
|
||||
if filesize(args.dtb) == 0:
|
||||
raise ValueError('DTB image must not be empty.')
|
||||
if args.header_version > 3:
|
||||
vendor_ramdisk_size = args.vendor_ramdisk_total_size
|
||||
vendor_boot_header_size = VENDOR_BOOT_IMAGE_HEADER_V4_SIZE
|
||||
else:
|
||||
vendor_ramdisk_size = filesize(args.vendor_ramdisk)
|
||||
vendor_boot_header_size = VENDOR_BOOT_IMAGE_HEADER_V3_SIZE
|
||||
args.vendor_boot.write(pack(f'{VENDOR_BOOT_MAGIC_SIZE}s',
|
||||
VENDOR_BOOT_MAGIC.encode()))
|
||||
# version of boot image header
|
||||
args.vendor_boot.write(pack('I', args.header_version))
|
||||
# flash page size
|
||||
args.vendor_boot.write(pack('I', args.pagesize))
|
||||
# kernel physical load address
|
||||
args.vendor_boot.write(pack('I', args.base + args.kernel_offset))
|
||||
# ramdisk physical load address
|
||||
args.vendor_boot.write(pack('I', args.base + args.ramdisk_offset))
|
||||
# ramdisk size in bytes
|
||||
args.vendor_boot.write(pack('I', vendor_ramdisk_size))
|
||||
args.vendor_boot.write(pack(f'{VENDOR_BOOT_ARGS_SIZE}s',
|
||||
args.vendor_cmdline))
|
||||
# kernel tags physical load address
|
||||
args.vendor_boot.write(pack('I', args.base + args.tags_offset))
|
||||
# asciiz product name
|
||||
args.vendor_boot.write(pack(f'{VENDOR_BOOT_NAME_SIZE}s', args.board))
|
||||
# header size in bytes
|
||||
args.vendor_boot.write(pack('I', vendor_boot_header_size))
|
||||
# dtb size in bytes
|
||||
args.vendor_boot.write(pack('I', filesize(args.dtb)))
|
||||
# dtb physical load address
|
||||
args.vendor_boot.write(pack('Q', args.base + args.dtb_offset))
|
||||
if args.header_version > 3:
|
||||
vendor_ramdisk_table_size = (args.vendor_ramdisk_table_entry_num *
|
||||
VENDOR_RAMDISK_TABLE_ENTRY_V4_SIZE)
|
||||
# vendor ramdisk table size in bytes
|
||||
args.vendor_boot.write(pack('I', vendor_ramdisk_table_size))
|
||||
# number of vendor ramdisk table entries
|
||||
args.vendor_boot.write(pack('I', args.vendor_ramdisk_table_entry_num))
|
||||
# vendor ramdisk table entry size in bytes
|
||||
args.vendor_boot.write(pack('I', VENDOR_RAMDISK_TABLE_ENTRY_V4_SIZE))
|
||||
# bootconfig section size in bytes
|
||||
args.vendor_boot.write(pack('I', filesize(args.vendor_bootconfig)))
|
||||
pad_file(args.vendor_boot, args.pagesize)
|
||||
def write_header(args):
|
||||
if args.header_version > 4:
|
||||
raise ValueError(
|
||||
f'Boot header version {args.header_version} not supported')
|
||||
if args.header_version in {3, 4}:
|
||||
return write_header_v3_and_above(args)
|
||||
ramdisk_load_address = ((args.base + args.ramdisk_offset)
|
||||
if filesize(args.ramdisk) > 0 else 0)
|
||||
second_load_address = ((args.base + args.second_offset)
|
||||
if filesize(args.second) > 0 else 0)
|
||||
args.output.write(pack(f'{BOOT_MAGIC_SIZE}s', BOOT_MAGIC.encode()))
|
||||
# kernel size in bytes
|
||||
args.output.write(pack('I', filesize(args.kernel)))
|
||||
# kernel physical load address
|
||||
args.output.write(pack('I', args.base + args.kernel_offset))
|
||||
# ramdisk size in bytes
|
||||
args.output.write(pack('I', filesize(args.ramdisk)))
|
||||
# ramdisk physical load address
|
||||
args.output.write(pack('I', ramdisk_load_address))
|
||||
# second bootloader size in bytes
|
||||
args.output.write(pack('I', filesize(args.second)))
|
||||
# second bootloader physical load address
|
||||
args.output.write(pack('I', second_load_address))
|
||||
# kernel tags physical load address
|
||||
args.output.write(pack('I', args.base + args.tags_offset))
|
||||
# flash page size
|
||||
args.output.write(pack('I', args.pagesize))
|
||||
# version of boot image header
|
||||
args.output.write(pack('I', args.header_version))
|
||||
# os version and patch level
|
||||
args.output.write(pack('I', (args.os_version << 11) | args.os_patch_level))
|
||||
# asciiz product name
|
||||
args.output.write(pack(f'{BOOT_NAME_SIZE}s', args.board))
|
||||
args.output.write(pack(f'{BOOT_ARGS_SIZE}s', args.cmdline))
|
||||
sha = sha1()
|
||||
update_sha(sha, args.kernel)
|
||||
update_sha(sha, args.ramdisk)
|
||||
update_sha(sha, args.second)
|
||||
if args.header_version > 0:
|
||||
update_sha(sha, args.recovery_dtbo)
|
||||
if args.header_version > 1:
|
||||
update_sha(sha, args.dtb)
|
||||
img_id = pack('32s', sha.digest())
|
||||
args.output.write(img_id)
|
||||
args.output.write(pack(f'{BOOT_EXTRA_ARGS_SIZE}s', args.extra_cmdline))
|
||||
if args.header_version > 0:
|
||||
if args.recovery_dtbo:
|
||||
# recovery dtbo size in bytes
|
||||
args.output.write(pack('I', filesize(args.recovery_dtbo)))
|
||||
# recovert dtbo offset in the boot image
|
||||
args.output.write(pack('Q', get_recovery_dtbo_offset(args)))
|
||||
else:
|
||||
# Set to zero if no recovery dtbo
|
||||
args.output.write(pack('I', 0))
|
||||
args.output.write(pack('Q', 0))
|
||||
# Populate boot image header size for header versions 1 and 2.
|
||||
if args.header_version == 1:
|
||||
args.output.write(pack('I', BOOT_IMAGE_HEADER_V1_SIZE))
|
||||
elif args.header_version == 2:
|
||||
args.output.write(pack('I', BOOT_IMAGE_HEADER_V2_SIZE))
|
||||
if args.header_version > 1:
|
||||
if filesize(args.dtb) == 0:
|
||||
raise ValueError('DTB image must not be empty.')
|
||||
# dtb size in bytes
|
||||
args.output.write(pack('I', filesize(args.dtb)))
|
||||
# dtb physical load address
|
||||
args.output.write(pack('Q', args.base + args.dtb_offset))
|
||||
pad_file(args.output, args.pagesize)
|
||||
return img_id
|
||||
class AsciizBytes:
|
||||
"""Parses a string and encodes it as an asciiz bytes object.
|
||||
>>> AsciizBytes(bufsize=4)('foo')
|
||||
b'foo\\x00'
|
||||
>>> AsciizBytes(bufsize=4)('foob')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
argparse.ArgumentTypeError: Encoded asciiz length exceeded: max 4, got 5
|
||||
"""
|
||||
def __init__(self, bufsize):
|
||||
self.bufsize = bufsize
|
||||
def __call__(self, arg):
|
||||
arg_bytes = arg.encode() + b'\x00'
|
||||
if len(arg_bytes) > self.bufsize:
|
||||
raise ArgumentTypeError(
|
||||
'Encoded asciiz length exceeded: '
|
||||
f'max {self.bufsize}, got {len(arg_bytes)}')
|
||||
return arg_bytes
|
||||
class VendorRamdiskTableBuilder:
|
||||
"""Vendor ramdisk table builder.
|
||||
Attributes:
|
||||
entries: A list of VendorRamdiskTableEntry namedtuple.
|
||||
ramdisk_total_size: Total size in bytes of all ramdisks in the table.
|
||||
"""
|
||||
VendorRamdiskTableEntry = collections.namedtuple( # pylint: disable=invalid-name
|
||||
'VendorRamdiskTableEntry',
|
||||
['ramdisk_path', 'ramdisk_size', 'ramdisk_offset', 'ramdisk_type',
|
||||
'ramdisk_name', 'board_id'])
|
||||
def __init__(self):
|
||||
self.entries = []
|
||||
self.ramdisk_total_size = 0
|
||||
self.ramdisk_names = set()
|
||||
def add_entry(self, ramdisk_path, ramdisk_type, ramdisk_name, board_id):
|
||||
# Strip any trailing null for simple comparison.
|
||||
stripped_ramdisk_name = ramdisk_name.rstrip(b'\x00')
|
||||
if stripped_ramdisk_name in VENDOR_RAMDISK_NAME_BLOCKLIST:
|
||||
raise ValueError(
|
||||
f'Banned vendor ramdisk name: {stripped_ramdisk_name}')
|
||||
if stripped_ramdisk_name in self.ramdisk_names:
|
||||
raise ValueError(
|
||||
f'Duplicated vendor ramdisk name: {stripped_ramdisk_name}')
|
||||
self.ramdisk_names.add(stripped_ramdisk_name)
|
||||
if board_id is None:
|
||||
board_id = array.array(
|
||||
'I', [0] * VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE)
|
||||
else:
|
||||
board_id = array.array('I', board_id)
|
||||
if len(board_id) != VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE:
|
||||
raise ValueError('board_id size must be '
|
||||
f'{VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE}')
|
||||
with open(ramdisk_path, 'rb') as f:
|
||||
ramdisk_size = filesize(f)
|
||||
self.entries.append(self.VendorRamdiskTableEntry(
|
||||
ramdisk_path, ramdisk_size, self.ramdisk_total_size, ramdisk_type,
|
||||
ramdisk_name, board_id))
|
||||
self.ramdisk_total_size += ramdisk_size
|
||||
def write_ramdisks_padded(self, fout, alignment):
|
||||
for entry in self.entries:
|
||||
with open(entry.ramdisk_path, 'rb') as f:
|
||||
fout.write(f.read())
|
||||
pad_file(fout, alignment)
|
||||
def write_entries_padded(self, fout, alignment):
|
||||
for entry in self.entries:
|
||||
fout.write(pack('I', entry.ramdisk_size))
|
||||
fout.write(pack('I', entry.ramdisk_offset))
|
||||
fout.write(pack('I', entry.ramdisk_type))
|
||||
fout.write(pack(f'{VENDOR_RAMDISK_NAME_SIZE}s',
|
||||
entry.ramdisk_name))
|
||||
fout.write(entry.board_id)
|
||||
pad_file(fout, alignment)
|
||||
def write_padded_file(f_out, f_in, padding):
|
||||
if f_in is None:
|
||||
return
|
||||
f_out.write(f_in.read())
|
||||
pad_file(f_out, padding)
|
||||
def parse_int(x):
|
||||
return int(x, 0)
|
||||
def parse_os_version(x):
|
||||
match = re.search(r'^(\d{1,3})(?:\.(\d{1,3})(?:\.(\d{1,3}))?)?', x)
|
||||
if match:
|
||||
a = int(match.group(1))
|
||||
b = c = 0
|
||||
if match.lastindex >= 2:
|
||||
b = int(match.group(2))
|
||||
if match.lastindex == 3:
|
||||
c = int(match.group(3))
|
||||
# 7 bits allocated for each field
|
||||
assert a < 128
|
||||
assert b < 128
|
||||
assert c < 128
|
||||
return (a << 14) | (b << 7) | c
|
||||
return 0
|
||||
def parse_os_patch_level(x):
|
||||
match = re.search(r'^(\d{4})-(\d{2})(?:-(\d{2}))?', x)
|
||||
if match:
|
||||
y = int(match.group(1)) - 2000
|
||||
m = int(match.group(2))
|
||||
# 7 bits allocated for the year, 4 bits for the month
|
||||
assert 0 <= y < 128
|
||||
assert 0 < m <= 12
|
||||
return (y << 4) | m
|
||||
return 0
|
||||
def parse_vendor_ramdisk_type(x):
|
||||
type_dict = {
|
||||
'none': VENDOR_RAMDISK_TYPE_NONE,
|
||||
'platform': VENDOR_RAMDISK_TYPE_PLATFORM,
|
||||
'recovery': VENDOR_RAMDISK_TYPE_RECOVERY,
|
||||
'dlkm': VENDOR_RAMDISK_TYPE_DLKM,
|
||||
}
|
||||
if x.lower() in type_dict:
|
||||
return type_dict[x.lower()]
|
||||
return parse_int(x)
|
||||
def get_vendor_boot_v4_usage():
|
||||
return """vendor boot version 4 arguments:
|
||||
--ramdisk_type {none,platform,recovery,dlkm}
|
||||
specify the type of the ramdisk
|
||||
--ramdisk_name NAME
|
||||
specify the name of the ramdisk
|
||||
--board_id{0..15} NUMBER
|
||||
specify the value of the board_id vector, defaults to 0
|
||||
--vendor_ramdisk_fragment VENDOR_RAMDISK_FILE
|
||||
path to the vendor ramdisk file
|
||||
These options can be specified multiple times, where each vendor ramdisk
|
||||
option group ends with a --vendor_ramdisk_fragment option.
|
||||
Each option group appends an additional ramdisk to the vendor boot image.
|
||||
"""
|
||||
def parse_vendor_ramdisk_args(args, args_list):
|
||||
"""Parses vendor ramdisk specific arguments.
|
||||
Args:
|
||||
args: An argparse.Namespace object. Parsed results are stored into this
|
||||
object.
|
||||
args_list: A list of argument strings to be parsed.
|
||||
Returns:
|
||||
A list argument strings that are not parsed by this method.
|
||||
"""
|
||||
parser = ArgumentParser(add_help=False)
|
||||
parser.add_argument('--ramdisk_type', type=parse_vendor_ramdisk_type,
|
||||
default=VENDOR_RAMDISK_TYPE_NONE)
|
||||
parser.add_argument('--ramdisk_name',
|
||||
type=AsciizBytes(bufsize=VENDOR_RAMDISK_NAME_SIZE),
|
||||
required=True)
|
||||
for i in range(VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE):
|
||||
parser.add_argument(f'--board_id{i}', type=parse_int, default=0)
|
||||
parser.add_argument(PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT, required=True)
|
||||
unknown_args = []
|
||||
vendor_ramdisk_table_builder = VendorRamdiskTableBuilder()
|
||||
if args.vendor_ramdisk is not None:
|
||||
vendor_ramdisk_table_builder.add_entry(
|
||||
args.vendor_ramdisk.name, VENDOR_RAMDISK_TYPE_PLATFORM, b'', None)
|
||||
while PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT in args_list:
|
||||
idx = args_list.index(PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT) + 2
|
||||
vendor_ramdisk_args = args_list[:idx]
|
||||
args_list = args_list[idx:]
|
||||
ramdisk_args, extra_args = parser.parse_known_args(vendor_ramdisk_args)
|
||||
ramdisk_args_dict = vars(ramdisk_args)
|
||||
unknown_args.extend(extra_args)
|
||||
ramdisk_path = ramdisk_args.vendor_ramdisk_fragment
|
||||
ramdisk_type = ramdisk_args.ramdisk_type
|
||||
ramdisk_name = ramdisk_args.ramdisk_name
|
||||
board_id = [ramdisk_args_dict[f'board_id{i}']
|
||||
for i in range(VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE)]
|
||||
vendor_ramdisk_table_builder.add_entry(ramdisk_path, ramdisk_type,
|
||||
ramdisk_name, board_id)
|
||||
if len(args_list) > 0:
|
||||
unknown_args.extend(args_list)
|
||||
args.vendor_ramdisk_total_size = (vendor_ramdisk_table_builder
|
||||
.ramdisk_total_size)
|
||||
args.vendor_ramdisk_table_entry_num = len(vendor_ramdisk_table_builder
|
||||
.entries)
|
||||
args.vendor_ramdisk_table_builder = vendor_ramdisk_table_builder
|
||||
return unknown_args
|
||||
def parse_cmdline():
|
||||
version_parser = ArgumentParser(add_help=False)
|
||||
version_parser.add_argument('--header_version', type=parse_int, default=0)
|
||||
if version_parser.parse_known_args()[0].header_version < 3:
|
||||
# For boot header v0 to v2, the kernel commandline field is split into
|
||||
# two fields, cmdline and extra_cmdline. Both fields are asciiz strings,
|
||||
# so we minus one here to ensure the encoded string plus the
|
||||
# null-terminator can fit in the buffer size.
|
||||
cmdline_size = BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE - 1
|
||||
else:
|
||||
cmdline_size = BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE
|
||||
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter,
|
||||
epilog=get_vendor_boot_v4_usage())
|
||||
parser.add_argument('--kernel', type=FileType('rb'),
|
||||
help='path to the kernel')
|
||||
parser.add_argument('--ramdisk', type=FileType('rb'),
|
||||
help='path to the ramdisk')
|
||||
parser.add_argument('--second', type=FileType('rb'),
|
||||
help='path to the second bootloader')
|
||||
parser.add_argument('--dtb', type=FileType('rb'), help='path to the dtb')
|
||||
dtbo_group = parser.add_mutually_exclusive_group()
|
||||
dtbo_group.add_argument('--recovery_dtbo', type=FileType('rb'),
|
||||
help='path to the recovery DTBO')
|
||||
dtbo_group.add_argument('--recovery_acpio', type=FileType('rb'),
|
||||
metavar='RECOVERY_ACPIO', dest='recovery_dtbo',
|
||||
help='path to the recovery ACPIO')
|
||||
parser.add_argument('--cmdline', type=AsciizBytes(bufsize=cmdline_size),
|
||||
default='', help='kernel command line arguments')
|
||||
parser.add_argument('--vendor_cmdline',
|
||||
type=AsciizBytes(bufsize=VENDOR_BOOT_ARGS_SIZE),
|
||||
default='',
|
||||
help='vendor boot kernel command line arguments')
|
||||
parser.add_argument('--base', type=parse_int, default=0x10000000,
|
||||
help='base address')
|
||||
parser.add_argument('--kernel_offset', type=parse_int, default=0x00008000,
|
||||
help='kernel offset')
|
||||
parser.add_argument('--ramdisk_offset', type=parse_int, default=0x01000000,
|
||||
help='ramdisk offset')
|
||||
parser.add_argument('--second_offset', type=parse_int, default=0x00f00000,
|
||||
help='second bootloader offset')
|
||||
parser.add_argument('--dtb_offset', type=parse_int, default=0x01f00000,
|
||||
help='dtb offset')
|
||||
parser.add_argument('--os_version', type=parse_os_version, default=0,
|
||||
help='operating system version')
|
||||
parser.add_argument('--os_patch_level', type=parse_os_patch_level,
|
||||
default=0, help='operating system patch level')
|
||||
parser.add_argument('--tags_offset', type=parse_int, default=0x00000100,
|
||||
help='tags offset')
|
||||
parser.add_argument('--board', type=AsciizBytes(bufsize=BOOT_NAME_SIZE),
|
||||
default='', help='board name')
|
||||
parser.add_argument('--pagesize', type=parse_int,
|
||||
choices=[2**i for i in range(11, 15)], default=2048,
|
||||
help='page size')
|
||||
parser.add_argument('--id', action='store_true',
|
||||
help='print the image ID on standard output')
|
||||
parser.add_argument('--header_version', type=parse_int, default=0,
|
||||
help='boot image header version')
|
||||
parser.add_argument('-o', '--output', type=FileType('wb'),
|
||||
help='output file name')
|
||||
parser.add_argument('--gki_signing_algorithm',
|
||||
help='GKI signing algorithm to use')
|
||||
parser.add_argument('--gki_signing_key',
|
||||
help='path to RSA private key file')
|
||||
parser.add_argument('--gki_signing_signature_args',
|
||||
help='other hash arguments passed to avbtool')
|
||||
parser.add_argument('--gki_signing_avbtool_path',
|
||||
help='path to avbtool for boot signature generation')
|
||||
parser.add_argument('--vendor_boot', type=FileType('wb'),
|
||||
help='vendor boot output file name')
|
||||
parser.add_argument('--vendor_ramdisk', type=FileType('rb'),
|
||||
help='path to the vendor ramdisk')
|
||||
parser.add_argument('--vendor_bootconfig', type=FileType('rb'),
|
||||
help='path to the vendor bootconfig file')
|
||||
args, extra_args = parser.parse_known_args()
|
||||
if args.vendor_boot is not None and args.header_version > 3:
|
||||
extra_args = parse_vendor_ramdisk_args(args, extra_args)
|
||||
if len(extra_args) > 0:
|
||||
raise ValueError(f'Unrecognized arguments: {extra_args}')
|
||||
if args.header_version < 3:
|
||||
args.extra_cmdline = args.cmdline[BOOT_ARGS_SIZE-1:]
|
||||
args.cmdline = args.cmdline[:BOOT_ARGS_SIZE-1] + b'\x00'
|
||||
assert len(args.cmdline) <= BOOT_ARGS_SIZE
|
||||
assert len(args.extra_cmdline) <= BOOT_EXTRA_ARGS_SIZE
|
||||
return args
|
||||
def add_boot_image_signature(args, pagesize):
|
||||
"""Adds the boot image signature.
|
||||
Note that the signature will only be verified in VTS to ensure a
|
||||
generic boot.img is used. It will not be used by the device
|
||||
bootloader at boot time. The bootloader should only verify
|
||||
the boot vbmeta at the end of the boot partition (or in the top-level
|
||||
vbmeta partition) via the Android Verified Boot process, when the
|
||||
device boots.
|
||||
"""
|
||||
args.output.flush() # Flush the buffer for signature calculation.
|
||||
# Appends zeros if the signing key is not specified.
|
||||
if not args.gki_signing_key or not args.gki_signing_algorithm:
|
||||
zeros = b'\x00' * BOOT_IMAGE_V4_SIGNATURE_SIZE
|
||||
args.output.write(zeros)
|
||||
pad_file(args.output, pagesize)
|
||||
return
|
||||
avbtool = 'avbtool' # Used from otatools.zip or Android build env.
|
||||
# We need to specify the path of avbtool in build/core/Makefile.
|
||||
# Because avbtool is not guaranteed to be in $PATH there.
|
||||
if args.gki_signing_avbtool_path:
|
||||
avbtool = args.gki_signing_avbtool_path
|
||||
# Need to specify a value of --partition_size for avbtool to work.
|
||||
# We use 64 MB below, but avbtool will not resize the boot image to
|
||||
# this size because --do_not_append_vbmeta_image is also specified.
|
||||
avbtool_cmd = [
|
||||
avbtool, 'add_hash_footer',
|
||||
'--partition_name', 'boot',
|
||||
'--partition_size', str(64 * 1024 * 1024),
|
||||
'--image', args.output.name,
|
||||
'--algorithm', args.gki_signing_algorithm,
|
||||
'--key', args.gki_signing_key,
|
||||
'--salt', 'd00df00d'] # TODO: use a hash of kernel/ramdisk as the salt.
|
||||
# Additional arguments passed to avbtool.
|
||||
if args.gki_signing_signature_args:
|
||||
avbtool_cmd += args.gki_signing_signature_args.split()
|
||||
# Outputs the signed vbmeta to a separate file, then append to boot.img
|
||||
# as the boot signature.
|
||||
with tempfile.TemporaryDirectory() as temp_out_dir:
|
||||
boot_signature_output = os.path.join(temp_out_dir, 'boot_signature')
|
||||
avbtool_cmd += ['--do_not_append_vbmeta_image',
|
||||
'--output_vbmeta_image', boot_signature_output]
|
||||
subprocess.check_call(avbtool_cmd)
|
||||
with open(boot_signature_output, 'rb') as boot_signature:
|
||||
if filesize(boot_signature) > BOOT_IMAGE_V4_SIGNATURE_SIZE:
|
||||
raise ValueError(
|
||||
f'boot sigature size is > {BOOT_IMAGE_V4_SIGNATURE_SIZE}')
|
||||
write_padded_file(args.output, boot_signature, pagesize)
|
||||
def write_data(args, pagesize):
|
||||
write_padded_file(args.output, args.kernel, pagesize)
|
||||
write_padded_file(args.output, args.ramdisk, pagesize)
|
||||
write_padded_file(args.output, args.second, pagesize)
|
||||
if args.header_version > 0 and args.header_version < 3:
|
||||
write_padded_file(args.output, args.recovery_dtbo, pagesize)
|
||||
if args.header_version == 2:
|
||||
write_padded_file(args.output, args.dtb, pagesize)
|
||||
if args.header_version >= 4:
|
||||
add_boot_image_signature(args, pagesize)
|
||||
def write_vendor_boot_data(args):
|
||||
if args.header_version > 3:
|
||||
builder = args.vendor_ramdisk_table_builder
|
||||
builder.write_ramdisks_padded(args.vendor_boot, args.pagesize)
|
||||
write_padded_file(args.vendor_boot, args.dtb, args.pagesize)
|
||||
builder.write_entries_padded(args.vendor_boot, args.pagesize)
|
||||
write_padded_file(args.vendor_boot, args.vendor_bootconfig,
|
||||
args.pagesize)
|
||||
else:
|
||||
write_padded_file(args.vendor_boot, args.vendor_ramdisk, args.pagesize)
|
||||
write_padded_file(args.vendor_boot, args.dtb, args.pagesize)
|
||||
def main():
|
||||
args = parse_cmdline()
|
||||
if args.vendor_boot is not None:
|
||||
if args.header_version not in {3, 4}:
|
||||
raise ValueError(
|
||||
'--vendor_boot not compatible with given header version')
|
||||
if args.header_version == 3 and args.vendor_ramdisk is None:
|
||||
raise ValueError('--vendor_ramdisk missing or invalid')
|
||||
write_vendor_boot_header(args)
|
||||
write_vendor_boot_data(args)
|
||||
if args.output is not None:
|
||||
if args.second is not None and args.header_version > 2:
|
||||
raise ValueError(
|
||||
'--second not compatible with given header version')
|
||||
img_id = write_header(args)
|
||||
if args.header_version > 2:
|
||||
write_data(args, BOOT_IMAGE_HEADER_V3_PAGESIZE)
|
||||
else:
|
||||
write_data(args, args.pagesize)
|
||||
if args.id and img_id is not None:
|
||||
print('0x' + ''.join(f'{octet:02x}' for octet in img_id))
|
||||
if __name__ == '__main__':
|
||||
main()
|
@@ -115,19 +115,6 @@ LABEL primary
|
||||
APPEND \${cbootargs} $BM_CMDLINE
|
||||
EOF
|
||||
|
||||
# Set up the pxelinux config for Jetson TK1
|
||||
cat <<EOF >/tftp/pxelinux.cfg/default-arm-tegra124-jetson-tk1
|
||||
PROMPT 0
|
||||
TIMEOUT 30
|
||||
DEFAULT primary
|
||||
MENU TITLE jetson TK1 boot options
|
||||
LABEL primary
|
||||
MENU LABEL CI kernel on TFTP
|
||||
LINUX zImage
|
||||
FDT tegra124-jetson-tk1.dtb
|
||||
APPEND \${cbootargs} $BM_CMDLINE
|
||||
EOF
|
||||
|
||||
# Create the rootfs in the NFS directory
|
||||
mkdir -p /nfs/results
|
||||
. $BM/rootfs-setup.sh /nfs
|
||||
|
@@ -74,11 +74,6 @@ class PoERun:
|
||||
self.print_error("nouveau jetson boot bug, retrying.")
|
||||
return 2
|
||||
|
||||
# network fail on tk1
|
||||
if re.search("NETDEV WATCHDOG:.* transmit queue 0 timed out", line):
|
||||
self.print_error("nouveau jetson tk1 network fail, retrying.")
|
||||
return 2
|
||||
|
||||
result = re.search("hwci: mesa: (\S*)", line)
|
||||
if result:
|
||||
if result.group(1) == "pass":
|
||||
|
@@ -1 +0,0 @@
|
||||
../bin/ci
|
@@ -4,6 +4,7 @@
|
||||
# Tomeu Vizoso <tomeu.vizoso@collabora.com>
|
||||
# David Heidelberg <david.heidelberg@collabora.com>
|
||||
#
|
||||
# TODO GraphQL for dependencies
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
@@ -11,20 +12,18 @@ Helper script to restrict running only required CI jobs
|
||||
and show the job(s) logs.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import re
|
||||
from subprocess import check_output
|
||||
import sys
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
from typing import Optional
|
||||
from functools import partial
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import argparse
|
||||
import sys
|
||||
import gitlab
|
||||
|
||||
from colorama import Fore, Style
|
||||
from gitlab_common import get_gitlab_project, read_token, wait_for_pipeline
|
||||
from gitlab_gql import GitlabGQL, create_job_needs_dag, filter_dag, print_dag
|
||||
|
||||
REFRESH_WAIT_LOG = 10
|
||||
REFRESH_WAIT_JOBS = 6
|
||||
@@ -43,9 +42,44 @@ STATUS_COLORS = {
|
||||
"skipped": "",
|
||||
}
|
||||
|
||||
# TODO: This hardcoded list should be replaced by querying the pipeline's
|
||||
# dependency graph to see which jobs the target jobs need
|
||||
DEPENDENCIES = [
|
||||
"debian/x86_build-base",
|
||||
"debian/x86_build",
|
||||
"debian/x86_test-base",
|
||||
"debian/x86_test-gl",
|
||||
"debian/arm_build",
|
||||
"debian/arm_test",
|
||||
"kernel+rootfs_amd64",
|
||||
"kernel+rootfs_arm64",
|
||||
"kernel+rootfs_armhf",
|
||||
"debian-testing",
|
||||
"debian-arm64",
|
||||
]
|
||||
|
||||
COMPLETED_STATUSES = ["success", "failed"]
|
||||
|
||||
|
||||
def get_gitlab_project(glab, name: str):
|
||||
"""Finds a specified gitlab project for given user"""
|
||||
glab.auth()
|
||||
username = glab.user.username
|
||||
return glab.projects.get(f"{username}/mesa")
|
||||
|
||||
|
||||
def wait_for_pipeline(project, sha: str):
|
||||
"""await until pipeline appears in Gitlab"""
|
||||
print("⏲ for the pipeline to appear..", end="")
|
||||
while True:
|
||||
pipelines = project.pipelines.list(sha=sha)
|
||||
if pipelines:
|
||||
print("", flush=True)
|
||||
return pipelines[0]
|
||||
print("", end=".", flush=True)
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
def print_job_status(job) -> None:
|
||||
"""It prints a nice, colored job status with a link to the job."""
|
||||
if job.status == "canceled":
|
||||
@@ -86,18 +120,15 @@ def pretty_wait(sec: int) -> None:
|
||||
|
||||
|
||||
def monitor_pipeline(
|
||||
project,
|
||||
pipeline,
|
||||
target_job: Optional[str],
|
||||
dependencies,
|
||||
force_manual: bool,
|
||||
stress: bool,
|
||||
project, pipeline, target_job: Optional[str], dependencies, force_manual: bool
|
||||
) -> tuple[Optional[int], Optional[int]]:
|
||||
"""Monitors pipeline and delegate canceling jobs"""
|
||||
statuses = {}
|
||||
target_statuses = {}
|
||||
stress_succ = 0
|
||||
stress_fail = 0
|
||||
|
||||
if not dependencies:
|
||||
dependencies = []
|
||||
dependencies.extend(DEPENDENCIES)
|
||||
|
||||
if target_job:
|
||||
target_jobs_regex = re.compile(target_job.strip())
|
||||
@@ -110,13 +141,6 @@ def monitor_pipeline(
|
||||
if force_manual and job.status == "manual":
|
||||
enable_job(project, job, True)
|
||||
|
||||
if stress and job.status in ["success", "failed"]:
|
||||
if job.status == "success":
|
||||
stress_succ += 1
|
||||
if job.status == "failed":
|
||||
stress_fail += 1
|
||||
retry_job(project, job)
|
||||
|
||||
if (job.id not in target_statuses) or (
|
||||
job.status not in target_statuses[job.id]
|
||||
):
|
||||
@@ -148,14 +172,6 @@ def monitor_pipeline(
|
||||
if target_job:
|
||||
cancel_jobs(project, to_cancel)
|
||||
|
||||
if stress:
|
||||
print(
|
||||
"∑ succ: " + str(stress_succ) + "; fail: " + str(stress_fail),
|
||||
flush=False,
|
||||
)
|
||||
pretty_wait(REFRESH_WAIT_JOBS)
|
||||
continue
|
||||
|
||||
print("---------------------------------", flush=False)
|
||||
|
||||
if len(target_statuses) == 1 and {"running"}.intersection(
|
||||
@@ -183,14 +199,6 @@ def enable_job(project, job, target: bool) -> None:
|
||||
print(Fore.MAGENTA + f"{jtype} job {job.name} manually enabled" + Style.RESET_ALL)
|
||||
|
||||
|
||||
def retry_job(project, job) -> None:
|
||||
"""retry job"""
|
||||
pjob = project.jobs.get(job.id, lazy=True)
|
||||
pjob.retry()
|
||||
jtype = "↻"
|
||||
print(Fore.MAGENTA + f"{jtype} job {job.name} manually enabled" + Style.RESET_ALL)
|
||||
|
||||
|
||||
def cancel_job(project, job) -> None:
|
||||
"""Cancel GitLab job"""
|
||||
pjob = project.jobs.get(job.id, lazy=True)
|
||||
@@ -215,7 +223,7 @@ def print_log(project, job_id) -> None:
|
||||
job = project.jobs.get(job_id)
|
||||
|
||||
# GitLab's REST API doesn't offer pagination for logs, so we have to refetch it all
|
||||
lines = job.trace().decode("raw_unicode_escape").splitlines()
|
||||
lines = job.trace().decode("unicode_escape").splitlines()
|
||||
for line in lines[printed_lines:]:
|
||||
print(line)
|
||||
printed_lines = len(lines)
|
||||
@@ -235,8 +243,9 @@ def parse_args() -> None:
|
||||
+ '--target ".*traces" ',
|
||||
)
|
||||
parser.add_argument("--target", metavar="target-job", help="Target job")
|
||||
parser.add_argument("--deps", nargs="+", help="Job dependencies")
|
||||
parser.add_argument(
|
||||
"--rev", metavar="revision", help="repository git revision (default: HEAD)"
|
||||
"--rev", metavar="revision", help="repository git revision", required=True
|
||||
)
|
||||
parser.add_argument(
|
||||
"--token",
|
||||
@@ -246,27 +255,19 @@ def parse_args() -> None:
|
||||
parser.add_argument(
|
||||
"--force-manual", action="store_true", help="Force jobs marked as manual"
|
||||
)
|
||||
parser.add_argument("--stress", action="store_true", help="Stresstest job(s)")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def find_dependencies(target_job: str, project_path: str, sha: str) -> set[str]:
|
||||
gql_instance = GitlabGQL()
|
||||
dag, _ = create_job_needs_dag(
|
||||
gql_instance, {"projectPath": project_path.path_with_namespace, "sha": sha}
|
||||
def read_token(token_arg: Optional[str]) -> str:
|
||||
"""pick token from args or file"""
|
||||
if token_arg:
|
||||
return token_arg
|
||||
return (
|
||||
open(os.path.expanduser("~/.config/gitlab-token"), encoding="utf-8")
|
||||
.readline()
|
||||
.rstrip()
|
||||
)
|
||||
|
||||
target_dep_dag = filter_dag(dag, target_job)
|
||||
if not target_dep_dag:
|
||||
print(Fore.RED + "The job(s) were not found in the pipeline." + Fore.RESET)
|
||||
sys.exit(1)
|
||||
print(Fore.YELLOW)
|
||||
print("Detected job dependencies:")
|
||||
print()
|
||||
print_dag(target_dep_dag)
|
||||
print(Fore.RESET)
|
||||
return set(chain.from_iterable(target_dep_dag.values()))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
@@ -276,26 +277,18 @@ if __name__ == "__main__":
|
||||
|
||||
token = read_token(args.token)
|
||||
|
||||
gl = gitlab.Gitlab(url="https://gitlab.freedesktop.org",
|
||||
private_token=token,
|
||||
retry_transient_errors=True)
|
||||
gl = gitlab.Gitlab(url="https://gitlab.freedesktop.org", private_token=token)
|
||||
|
||||
cur_project = get_gitlab_project(gl, "mesa")
|
||||
|
||||
REV: str = args.rev
|
||||
if not REV:
|
||||
REV = check_output(['git', 'rev-parse', 'HEAD']).decode('ascii').strip()
|
||||
print(f"Revision: {REV}")
|
||||
pipe = wait_for_pipeline(cur_project, REV)
|
||||
print(f"Revision: {args.rev}")
|
||||
pipe = wait_for_pipeline(cur_project, args.rev)
|
||||
print(f"Pipeline: {pipe.web_url}")
|
||||
deps = set()
|
||||
if args.target:
|
||||
print("🞋 job: " + Fore.BLUE + args.target + Style.RESET_ALL)
|
||||
deps = find_dependencies(
|
||||
target_job=args.target, sha=REV, project_path=cur_project
|
||||
)
|
||||
print(f"Extra dependencies: {args.deps}")
|
||||
target_job_id, ret = monitor_pipeline(
|
||||
cur_project, pipe, args.target, deps, args.force_manual, args.stress
|
||||
cur_project, pipe, args.target, args.deps, args.force_manual
|
||||
)
|
||||
|
||||
if target_job_id:
|
2
.gitlab-ci/bin/requirements.txt
Normal file
2
.gitlab-ci/bin/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
colorama==0.4.5
|
||||
python-gitlab==3.5.0
|
@@ -78,7 +78,7 @@ debian-testing:
|
||||
-D dri3=enabled
|
||||
-D gallium-va=enabled
|
||||
GALLIUM_DRIVERS: "swrast,virgl,radeonsi,zink,crocus,iris,i915"
|
||||
VULKAN_DRIVERS: "swrast,amd,intel,virtio-experimental"
|
||||
VULKAN_DRIVERS: "swrast,amd,intel"
|
||||
BUILDTYPE: "debugoptimized"
|
||||
EXTRA_OPTION: >
|
||||
-D spirv-to-dxil=true
|
||||
@@ -86,6 +86,7 @@ debian-testing:
|
||||
MINIO_ARTIFACT_NAME: mesa-amd64
|
||||
LLVM_VERSION: "13"
|
||||
script:
|
||||
- .gitlab-ci/lava/lava-pytest.sh
|
||||
- .gitlab-ci/meson/build.sh
|
||||
- .gitlab-ci/prepare-artifacts.sh
|
||||
artifacts:
|
||||
@@ -123,17 +124,19 @@ debian-testing-msan:
|
||||
GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus"
|
||||
VULKAN_DRIVERS: intel,amd,broadcom,virtio-experimental
|
||||
|
||||
.debian-cl-testing:
|
||||
debian-clover-testing:
|
||||
extends:
|
||||
- .meson-build
|
||||
- .ci-deqp-artifacts
|
||||
variables:
|
||||
LLVM_VERSION: "13"
|
||||
UNWIND: "enabled"
|
||||
DRI_LOADERS: >
|
||||
-D glx=disabled
|
||||
-D egl=disabled
|
||||
-D gbm=disabled
|
||||
GALLIUM_ST: >
|
||||
-D gallium-opencl=icd
|
||||
-D opencl-spirv=true
|
||||
GALLIUM_DRIVERS: "swrast"
|
||||
BUILDTYPE: "debugoptimized"
|
||||
EXTRA_OPTION: >
|
||||
@@ -142,23 +145,7 @@ debian-testing-msan:
|
||||
- .gitlab-ci/meson/build.sh
|
||||
- .gitlab-ci/prepare-artifacts.sh
|
||||
|
||||
debian-clover-testing:
|
||||
extends:
|
||||
- .debian-cl-testing
|
||||
variables:
|
||||
GALLIUM_ST: >
|
||||
-D gallium-opencl=icd
|
||||
-D opencl-spirv=true
|
||||
|
||||
debian-rusticl-testing:
|
||||
extends:
|
||||
- .debian-cl-testing
|
||||
variables:
|
||||
GALLIUM_ST: >
|
||||
-D gallium-rusticl=true
|
||||
-D opencl-spirv=true
|
||||
|
||||
debian-build-testing:
|
||||
debian-gallium:
|
||||
extends: .meson-build
|
||||
variables:
|
||||
UNWIND: "enabled"
|
||||
@@ -171,24 +158,19 @@ debian-build-testing:
|
||||
-D dri3=enabled
|
||||
-D gallium-extra-hud=true
|
||||
-D gallium-vdpau=enabled
|
||||
-D gallium-xvmc=enabled
|
||||
-D gallium-omx=bellagio
|
||||
-D gallium-va=enabled
|
||||
-D gallium-xa=enabled
|
||||
-D gallium-nine=true
|
||||
-D gallium-opencl=disabled
|
||||
-D gallium-rusticl=false
|
||||
GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,asahi,crocus"
|
||||
VULKAN_DRIVERS: swrast
|
||||
EXTRA_OPTION: >
|
||||
-D spirv-to-dxil=true
|
||||
-D osmesa=true
|
||||
-D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi
|
||||
-D b_lto=true
|
||||
LLVM_VERSION: 13
|
||||
-D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,xvmc,lima,panfrost,asahi
|
||||
script:
|
||||
- .gitlab-ci/lava/lava-pytest.sh
|
||||
- .gitlab-ci/run-shellcheck.sh
|
||||
- .gitlab-ci/run-yamllint.sh
|
||||
- .gitlab-ci/meson/build.sh
|
||||
- .gitlab-ci/run-shader-db.sh
|
||||
|
||||
@@ -196,7 +178,6 @@ debian-build-testing:
|
||||
debian-release:
|
||||
extends: .meson-build
|
||||
variables:
|
||||
LLVM_VERSION: "13"
|
||||
UNWIND: "enabled"
|
||||
DRI_LOADERS: >
|
||||
-D glx=dri
|
||||
@@ -207,12 +188,12 @@ debian-release:
|
||||
-D dri3=enabled
|
||||
-D gallium-extra-hud=true
|
||||
-D gallium-vdpau=enabled
|
||||
-D gallium-xvmc=disabled
|
||||
-D gallium-omx=disabled
|
||||
-D gallium-va=enabled
|
||||
-D gallium-xa=enabled
|
||||
-D gallium-nine=false
|
||||
-D gallium-opencl=disabled
|
||||
-D gallium-rusticl=false
|
||||
-D llvm=enabled
|
||||
GALLIUM_DRIVERS: "i915,iris,nouveau,kmsro,freedreno,r300,svga,swrast,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,crocus"
|
||||
VULKAN_DRIVERS: "amd,imagination-experimental,microsoft-experimental"
|
||||
@@ -226,44 +207,6 @@ debian-release:
|
||||
script:
|
||||
- .gitlab-ci/meson/build.sh
|
||||
|
||||
alpine-build-testing:
|
||||
extends:
|
||||
- .meson-build
|
||||
- .use-alpine/x86_build
|
||||
stage: build-x86_64
|
||||
variables:
|
||||
BUILDTYPE: "release"
|
||||
C_ARGS: >
|
||||
-Wno-error=cpp
|
||||
-Wno-error=array-bounds
|
||||
-Wno-error=stringop-overread
|
||||
DRI_LOADERS: >
|
||||
-D glx=disabled
|
||||
-D gbm=enabled
|
||||
-D egl=enabled
|
||||
-D glvnd=false
|
||||
-D platforms=wayland
|
||||
LLVM_VERSION: ""
|
||||
GALLIUM_DRIVERS: "crocus,etnaviv,freedreno,iris,kmsro,lima,nouveau,panfrost,r300,r600,radeonsi,svga,swrast,tegra,v3d,vc4,virgl,zink"
|
||||
GALLIUM_ST: >
|
||||
-D dri3=enabled
|
||||
-D gallium-extra-hud=true
|
||||
-D gallium-vdpau=disabled
|
||||
-D gallium-omx=disabled
|
||||
-D gallium-va=enabled
|
||||
-D gallium-xa=disabled
|
||||
-D gallium-nine=true
|
||||
-D gallium-rusticl=false
|
||||
-D gles1=disabled
|
||||
-D gles2=enabled
|
||||
-D llvm=enabled
|
||||
-D microsoft-clc=disabled
|
||||
-D shared-llvm=enabled
|
||||
UNWIND: "disabled"
|
||||
VULKAN_DRIVERS: "amd,broadcom,freedreno,intel,imagination-experimental"
|
||||
script:
|
||||
- .gitlab-ci/meson/build.sh
|
||||
|
||||
fedora-release:
|
||||
extends:
|
||||
- .meson-build
|
||||
@@ -273,36 +216,38 @@ fedora-release:
|
||||
C_ARGS: >
|
||||
-Wno-error=array-bounds
|
||||
-Wno-error=stringop-overread
|
||||
-Wno-error=uninitialized
|
||||
CPP_ARGS: >
|
||||
-Wno-error=array-bounds
|
||||
DRI_LOADERS: >
|
||||
-D glx=dri
|
||||
-D gbm=enabled
|
||||
-D egl=enabled
|
||||
-D glvnd=true
|
||||
-D platforms=x11,wayland
|
||||
# intel-clc disabled, we need llvm-spirv-translator 13.0+, Fedora 34 only packages 12.0.
|
||||
EXTRA_OPTION: >
|
||||
-D osmesa=true
|
||||
-D selinux=true
|
||||
-D tools=drm-shim,etnaviv,freedreno,glsl,intel,nir,nouveau,lima,panfrost,imagination
|
||||
-D vulkan-layers=device-select,overlay
|
||||
-D intel-clc=disabled
|
||||
-D intel-clc=enabled
|
||||
-D imagination-srv=true
|
||||
GALLIUM_DRIVERS: "crocus,etnaviv,freedreno,iris,kmsro,lima,nouveau,panfrost,r300,r600,radeonsi,svga,swrast,tegra,v3d,vc4,virgl,zink"
|
||||
GALLIUM_ST: >
|
||||
-D dri3=enabled
|
||||
-D gallium-extra-hud=true
|
||||
-D gallium-vdpau=enabled
|
||||
-D gallium-xvmc=disabled
|
||||
-D gallium-omx=disabled
|
||||
-D gallium-va=enabled
|
||||
-D gallium-xa=enabled
|
||||
-D gallium-nine=false
|
||||
-D gallium-opencl=icd
|
||||
-D gallium-rusticl=false
|
||||
-D gles1=disabled
|
||||
-D gles2=enabled
|
||||
-D llvm=enabled
|
||||
-D microsoft-clc=disabled
|
||||
-D shared-llvm=enabled
|
||||
-D vulkan-device-select-layer=true
|
||||
LLVM_VERSION: ""
|
||||
UNWIND: "disabled"
|
||||
VULKAN_DRIVERS: "amd,broadcom,freedreno,intel,imagination-experimental"
|
||||
@@ -322,6 +267,9 @@ debian-android:
|
||||
-Wno-error=initializer-overrides
|
||||
-Wno-error=missing-braces
|
||||
-Wno-error=sometimes-uninitialized
|
||||
-Wno-error=unused-function
|
||||
CPP_ARGS: >
|
||||
-Wno-error=deprecated-declarations
|
||||
DRI_LOADERS: >
|
||||
-D glx=disabled
|
||||
-D gbm=disabled
|
||||
@@ -335,12 +283,12 @@ debian-android:
|
||||
GALLIUM_ST: >
|
||||
-D dri3=disabled
|
||||
-D gallium-vdpau=disabled
|
||||
-D gallium-xvmc=disabled
|
||||
-D gallium-omx=disabled
|
||||
-D gallium-va=disabled
|
||||
-D gallium-xa=disabled
|
||||
-D gallium-nine=false
|
||||
-D gallium-opencl=disabled
|
||||
-D gallium-rusticl=false
|
||||
LLVM_VERSION: ""
|
||||
PKG_CONFIG_LIBDIR: "/disable/non/android/system/pc/files"
|
||||
script:
|
||||
@@ -362,11 +310,12 @@ debian-android:
|
||||
-D glx=dri
|
||||
-D gbm=enabled
|
||||
-D egl=enabled
|
||||
-D platforms=x11,wayland
|
||||
-D platforms=x11
|
||||
-D osmesa=false
|
||||
GALLIUM_ST: >
|
||||
-D dri3=enabled
|
||||
-D gallium-vdpau=disabled
|
||||
-D gallium-xvmc=disabled
|
||||
-D gallium-omx=disabled
|
||||
-D gallium-va=disabled
|
||||
-D gallium-xa=disabled
|
||||
@@ -395,9 +344,6 @@ debian-armhf:
|
||||
-D llvm=disabled
|
||||
-D valgrind=false
|
||||
MINIO_ARTIFACT_NAME: mesa-armhf
|
||||
# The strip command segfaults, failing to strip the binary and leaving
|
||||
# tempfiles in our artifacts.
|
||||
ARTIFACTS_DEBUG_SYMBOLS: 1
|
||||
script:
|
||||
- .gitlab-ci/meson/build.sh
|
||||
- .gitlab-ci/prepare-artifacts.sh
|
||||
@@ -421,6 +367,8 @@ debian-arm64-asan:
|
||||
extends:
|
||||
- debian-arm64
|
||||
variables:
|
||||
C_ARGS: >
|
||||
-Wno-error=stringop-truncation
|
||||
EXTRA_OPTION: >
|
||||
-D llvm=disabled
|
||||
-D b_sanitize=address
|
||||
@@ -444,64 +392,33 @@ debian-arm64-build-test:
|
||||
debian-clang:
|
||||
extends: .meson-build
|
||||
variables:
|
||||
LLVM_VERSION: "13"
|
||||
UNWIND: "enabled"
|
||||
GALLIUM_DUMP_CPU: "true"
|
||||
C_ARGS: >
|
||||
-Wno-error=constant-conversion
|
||||
-Wno-error=enum-conversion
|
||||
-Wno-error=implicit-const-int-float-conversion
|
||||
-Wno-error=initializer-overrides
|
||||
-Wno-error=sometimes-uninitialized
|
||||
-Wno-error=unused-function
|
||||
CPP_ARGS: >
|
||||
-Wno-error=c99-designator
|
||||
-Wno-error=deprecated-declarations
|
||||
-Wno-error=implicit-const-int-float-conversion
|
||||
-Wno-error=missing-braces
|
||||
-Wno-error=overloaded-virtual
|
||||
-Wno-error=tautological-constant-out-of-range-compare
|
||||
-Wno-error=unused-const-variable
|
||||
-Wno-error=unused-private-field
|
||||
DRI_LOADERS: >
|
||||
-D glx=dri
|
||||
-D gbm=enabled
|
||||
-D egl=enabled
|
||||
-D glvnd=true
|
||||
-D platforms=x11,wayland
|
||||
GALLIUM_ST: >
|
||||
-D dri3=enabled
|
||||
-D gallium-extra-hud=true
|
||||
-D gallium-vdpau=enabled
|
||||
-D gallium-omx=bellagio
|
||||
-D gallium-va=enabled
|
||||
-D gallium-xa=enabled
|
||||
-D gallium-nine=true
|
||||
-D gallium-opencl=icd
|
||||
-D gles1=enabled
|
||||
-D gles2=enabled
|
||||
-D llvm=enabled
|
||||
-D microsoft-clc=enabled
|
||||
-D shared-llvm=enabled
|
||||
-D opencl-spirv=true
|
||||
GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus,i915,asahi"
|
||||
VULKAN_DRIVERS: intel,amd,freedreno,broadcom,virtio-experimental,swrast,panfrost,imagination-experimental,microsoft-experimental
|
||||
EXTRA_OPTION:
|
||||
EXTRA_OPTIONS:
|
||||
-D spirv-to-dxil=true
|
||||
-D osmesa=true
|
||||
-D imagination-srv=true
|
||||
-D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi,imagination
|
||||
-D vulkan-layers=device-select,overlay
|
||||
-D build-aco-tests=true
|
||||
-D intel-clc=enabled
|
||||
-D imagination-srv=true
|
||||
CC: clang
|
||||
CXX: clang++
|
||||
|
||||
debian-clang-release:
|
||||
extends: debian-clang
|
||||
variables:
|
||||
BUILDTYPE: "release"
|
||||
DRI_LOADERS: >
|
||||
-D glx=xlib
|
||||
-D platforms=x11,wayland
|
||||
|
||||
windows-vs2019:
|
||||
extends:
|
||||
- .build-windows
|
||||
@@ -515,50 +432,33 @@ windows-vs2019:
|
||||
- _build/meson-logs/*.txt
|
||||
- _install/
|
||||
|
||||
.debian-cl:
|
||||
debian-clover:
|
||||
extends: .meson-build
|
||||
variables:
|
||||
LLVM_VERSION: "13"
|
||||
UNWIND: "enabled"
|
||||
DRI_LOADERS: >
|
||||
-D glx=disabled
|
||||
-D egl=disabled
|
||||
-D gbm=disabled
|
||||
EXTRA_OPTION: >
|
||||
-D valgrind=false
|
||||
|
||||
debian-clover:
|
||||
extends: .debian-cl
|
||||
variables:
|
||||
GALLIUM_DRIVERS: "r600,radeonsi,swrast"
|
||||
GALLIUM_DRIVERS: "r600,radeonsi"
|
||||
GALLIUM_ST: >
|
||||
-D dri3=disabled
|
||||
-D gallium-vdpau=disabled
|
||||
-D gallium-xvmc=disabled
|
||||
-D gallium-omx=disabled
|
||||
-D gallium-va=disabled
|
||||
-D gallium-xa=disabled
|
||||
-D gallium-nine=false
|
||||
-D gallium-opencl=icd
|
||||
-D gallium-rusticl=false
|
||||
|
||||
debian-rusticl:
|
||||
extends: .debian-cl
|
||||
variables:
|
||||
GALLIUM_DRIVERS: "iris,swrast"
|
||||
GALLIUM_ST: >
|
||||
-D dri3=disabled
|
||||
-D gallium-vdpau=disabled
|
||||
-D gallium-omx=disabled
|
||||
-D gallium-va=disabled
|
||||
-D gallium-xa=disabled
|
||||
-D gallium-nine=false
|
||||
-D gallium-opencl=disabled
|
||||
-D gallium-rusticl=true
|
||||
EXTRA_OPTION: >
|
||||
-D valgrind=false
|
||||
script:
|
||||
- LLVM_VERSION=9 GALLIUM_DRIVERS=r600,swrast .gitlab-ci/meson/build.sh
|
||||
- .gitlab-ci/meson/build.sh
|
||||
|
||||
debian-vulkan:
|
||||
extends: .meson-build
|
||||
variables:
|
||||
LLVM_VERSION: "13"
|
||||
UNWIND: "disabled"
|
||||
DRI_LOADERS: >
|
||||
-D glx=disabled
|
||||
@@ -569,12 +469,12 @@ debian-vulkan:
|
||||
GALLIUM_ST: >
|
||||
-D dri3=enabled
|
||||
-D gallium-vdpau=disabled
|
||||
-D gallium-xvmc=disabled
|
||||
-D gallium-omx=disabled
|
||||
-D gallium-va=disabled
|
||||
-D gallium-xa=disabled
|
||||
-D gallium-nine=false
|
||||
-D gallium-opencl=disabled
|
||||
-D gallium-rusticl=false
|
||||
-D b_sanitize=undefined
|
||||
-D c_args=-fno-sanitize-recover=all
|
||||
-D cpp_args=-fno-sanitize-recover=all
|
||||
@@ -583,7 +483,7 @@ debian-vulkan:
|
||||
EXTRA_OPTION: >
|
||||
-D vulkan-layers=device-select,overlay
|
||||
-D build-aco-tests=true
|
||||
-D intel-clc=disabled
|
||||
-D intel-clc=enabled
|
||||
-D imagination-srv=true
|
||||
|
||||
debian-i386:
|
||||
@@ -594,7 +494,6 @@ debian-i386:
|
||||
CROSS: i386
|
||||
VULKAN_DRIVERS: intel,amd,swrast,virtio-experimental
|
||||
GALLIUM_DRIVERS: "iris,nouveau,r300,r600,radeonsi,swrast,virgl,zink,crocus"
|
||||
LLVM_VERSION: 13
|
||||
EXTRA_OPTION: >
|
||||
-D vulkan-layers=device-select,overlay
|
||||
|
||||
@@ -608,7 +507,8 @@ debian-s390x:
|
||||
variables:
|
||||
CROSS: s390x
|
||||
GALLIUM_DRIVERS: "swrast,zink"
|
||||
LLVM_VERSION: 13
|
||||
# The lp_test_blend test times out with LLVM 11
|
||||
LLVM_VERSION: 9
|
||||
VULKAN_DRIVERS: "swrast"
|
||||
|
||||
debian-ppc64el:
|
||||
@@ -645,15 +545,11 @@ debian-mingw32-x86_64:
|
||||
VULKAN_DRIVERS: "swrast,amd,microsoft-experimental"
|
||||
GALLIUM_ST: >
|
||||
-D gallium-opencl=icd
|
||||
-D gallium-rusticl=false
|
||||
-D opencl-spirv=true
|
||||
-D microsoft-clc=enabled
|
||||
-D static-libclc=all
|
||||
-D llvm=enabled
|
||||
-D gallium-va=true
|
||||
-D video-codecs=h264dec,h264enc,h265dec,h265enc,vc1dec
|
||||
EXTRA_OPTION: >
|
||||
-D min-windows-version=7
|
||||
-D spirv-to-dxil=true
|
||||
-D gles1=enabled
|
||||
-D gles2=enabled
|
||||
|
@@ -28,6 +28,8 @@ for var in \
|
||||
CROSVM_GALLIUM_DRIVER \
|
||||
CROSVM_GPU_ARGS \
|
||||
DEQP_BIN_DIR \
|
||||
DEQP_CASELIST_FILTER \
|
||||
DEQP_CASELIST_INV_FILTER \
|
||||
DEQP_CONFIG \
|
||||
DEQP_EXPECTED_RENDERER \
|
||||
DEQP_FRACTION \
|
||||
@@ -60,7 +62,6 @@ for var in \
|
||||
HWCI_FREQ_MAX \
|
||||
HWCI_KERNEL_MODULES \
|
||||
HWCI_KVM \
|
||||
HWCI_START_WESTON \
|
||||
HWCI_START_XORG \
|
||||
HWCI_TEST_SCRIPT \
|
||||
IR3_SHADER_DEBUG \
|
||||
@@ -110,7 +111,6 @@ for var in \
|
||||
SKQP_BACKENDS \
|
||||
TU_DEBUG \
|
||||
VIRGL_HOST_API \
|
||||
WAFFLE_PLATFORM \
|
||||
VK_CPU \
|
||||
VK_DRIVER \
|
||||
VK_ICD_FILENAMES \
|
||||
|
@@ -45,16 +45,6 @@ set -ex
|
||||
echo -n $HWCI_KERNEL_MODULES | xargs -d, -n1 /usr/sbin/modprobe
|
||||
}
|
||||
|
||||
# Set up ZRAM
|
||||
HWCI_ZRAM_SIZE=2G
|
||||
if zramctl --find --size $HWCI_ZRAM_SIZE -a zstd; then
|
||||
mkswap /dev/zram0
|
||||
swapon /dev/zram0
|
||||
echo "zram: $HWCI_ZRAM_SIZE activated"
|
||||
else
|
||||
echo "zram: skipping, not supported"
|
||||
fi
|
||||
|
||||
#
|
||||
# Load the KVM module specific to the detected CPU virtualization extensions:
|
||||
# - vmx for Intel VT
|
||||
@@ -128,7 +118,6 @@ BACKGROUND_PIDS="$! $BACKGROUND_PIDS"
|
||||
if [ -n "$HWCI_START_XORG" ]; then
|
||||
echo "touch /xorg-started; sleep 100000" > /xorg-script
|
||||
env \
|
||||
VK_ICD_FILENAMES=/install/share/vulkan/icd.d/${VK_DRIVER}_icd.`uname -m`.json \
|
||||
xinit /bin/sh /xorg-script -- /usr/bin/Xorg -noreset -s 0 -dpms -logfile /Xorg.0.log &
|
||||
BACKGROUND_PIDS="$! $BACKGROUND_PIDS"
|
||||
|
||||
@@ -142,21 +131,6 @@ if [ -n "$HWCI_START_XORG" ]; then
|
||||
export DISPLAY=:0
|
||||
fi
|
||||
|
||||
if [ -n "$HWCI_START_WESTON" ]; then
|
||||
export XDG_RUNTIME_DIR=/run/user
|
||||
mkdir -p $XDG_RUNTIME_DIR
|
||||
|
||||
# Xwayland to be used when HWCI_START_XORG is not set
|
||||
export DISPLAY=:0
|
||||
mkdir -p /tmp/.X11-unix
|
||||
|
||||
env \
|
||||
VK_ICD_FILENAMES=/install/share/vulkan/icd.d/${VK_DRIVER}_icd.`uname -m`.json \
|
||||
weston -Bheadless-backend.so --use-gl -Swayland-0 --xwayland &
|
||||
export WAYLAND_DISPLAY=wayland-0
|
||||
sleep 1
|
||||
fi
|
||||
|
||||
RESULT=fail
|
||||
set +e
|
||||
sh -c "$HWCI_TEST_SCRIPT"
|
||||
@@ -175,8 +149,9 @@ cleanup
|
||||
|
||||
# upload artifacts
|
||||
if [ -n "$MINIO_RESULTS_UPLOAD" ]; then
|
||||
tar --zstd -cf results.tar.zst results/;
|
||||
ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" results.tar.zst https://"$MINIO_RESULTS_UPLOAD"/results.tar.zst;
|
||||
tar -czf results.tar.gz results/;
|
||||
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}";
|
||||
ci-fairy minio cp results.tar.gz minio://"$MINIO_RESULTS_UPLOAD"/results.tar.gz;
|
||||
fi
|
||||
|
||||
# We still need to echo the hwci: mesa message, as some scripts rely on it, such
|
||||
|
@@ -1,70 +0,0 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
|
||||
|
||||
EPHEMERAL="
|
||||
autoconf
|
||||
automake
|
||||
bzip2
|
||||
cmake
|
||||
git
|
||||
libtool
|
||||
libepoxy-dev
|
||||
libtbb-dev
|
||||
make
|
||||
openssl-dev
|
||||
unzip
|
||||
wget
|
||||
xz
|
||||
zstd-dev
|
||||
"
|
||||
|
||||
apk add \
|
||||
bison \
|
||||
ccache \
|
||||
clang-dev \
|
||||
flex \
|
||||
gcc \
|
||||
g++ \
|
||||
gettext \
|
||||
glslang \
|
||||
linux-headers \
|
||||
llvm15-dev \
|
||||
meson \
|
||||
expat-dev \
|
||||
elfutils-dev \
|
||||
libselinux-dev \
|
||||
libva-dev \
|
||||
libpciaccess-dev \
|
||||
zlib-dev \
|
||||
python3-dev \
|
||||
py3-mako \
|
||||
py3-ply \
|
||||
vulkan-headers \
|
||||
spirv-tools-dev \
|
||||
util-macros \
|
||||
$EPHEMERAL
|
||||
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
||||
pushd /usr/local
|
||||
git clone https://gitlab.freedesktop.org/mesa/shader-db.git --depth 1
|
||||
rm -rf shader-db/.git
|
||||
cd shader-db
|
||||
make
|
||||
popd
|
||||
|
||||
|
||||
############### Uninstall the build software
|
||||
|
||||
apk del $EPHEMERAL
|
||||
|
||||
. .gitlab-ci/container/container_post_build.sh
|
@@ -1,12 +1,6 @@
|
||||
CONFIG_LOCALVERSION_AUTO=y
|
||||
CONFIG_DEBUG_KERNEL=y
|
||||
|
||||
CONFIG_CRYPTO_ZSTD=y
|
||||
CONFIG_ZRAM_MEMORY_TRACKING=y
|
||||
CONFIG_ZRAM_WRITEBACK=y
|
||||
CONFIG_ZRAM=y
|
||||
CONFIG_ZSMALLOC_STAT=y
|
||||
|
||||
# abootimg with a 'dummy' rootfs fails with root=/dev/nfs
|
||||
CONFIG_BLK_DEV_INITRD=n
|
||||
|
||||
@@ -61,9 +55,3 @@ CONFIG_USB_NET_DRIVERS=y
|
||||
CONFIG_USB_RTL8152=y
|
||||
CONFIG_USB_NET_AX8817X=y
|
||||
CONFIG_USB_NET_SMSC95XX=y
|
||||
|
||||
# TK1
|
||||
CONFIG_ARM_TEGRA_DEVFREQ=y
|
||||
|
||||
# 32-bit build failure
|
||||
CONFIG_DRM_MSM=n
|
||||
|
@@ -1,12 +1,6 @@
|
||||
CONFIG_LOCALVERSION_AUTO=y
|
||||
CONFIG_DEBUG_KERNEL=y
|
||||
|
||||
CONFIG_CRYPTO_ZSTD=y
|
||||
CONFIG_ZRAM_MEMORY_TRACKING=y
|
||||
CONFIG_ZRAM_WRITEBACK=y
|
||||
CONFIG_ZRAM=y
|
||||
CONFIG_ZSMALLOC_STAT=y
|
||||
|
||||
# abootimg with a 'dummy' rootfs fails with root=/dev/nfs
|
||||
CONFIG_BLK_DEV_INITRD=n
|
||||
|
||||
@@ -22,7 +16,6 @@ CONFIG_DRM_LIMA=y
|
||||
CONFIG_DRM_PANEL_SIMPLE=y
|
||||
CONFIG_DRM_PANEL_EDP=y
|
||||
CONFIG_DRM_MSM=y
|
||||
CONFIG_DRM_ETNAVIV=y
|
||||
CONFIG_DRM_I2C_ADV7511=y
|
||||
CONFIG_PWM_CROS_EC=y
|
||||
CONFIG_BACKLIGHT_PWM=y
|
||||
@@ -66,7 +59,6 @@ CONFIG_POWER_RESET_QCOM_PON=y
|
||||
CONFIG_RTC_DRV_PM8XXX=y
|
||||
CONFIG_INTERCONNECT=y
|
||||
CONFIG_INTERCONNECT_QCOM=y
|
||||
CONFIG_INTERCONNECT_QCOM_MSM8996=y
|
||||
CONFIG_INTERCONNECT_QCOM_SDM845=y
|
||||
CONFIG_INTERCONNECT_QCOM_MSM8916=y
|
||||
CONFIG_INTERCONNECT_QCOM_OSM_L3=y
|
||||
@@ -74,9 +66,6 @@ CONFIG_INTERCONNECT_QCOM_SC7180=y
|
||||
CONFIG_CRYPTO_DEV_QCOM_RNG=y
|
||||
CONFIG_SC_DISPCC_7180=y
|
||||
CONFIG_SC_GPUCC_7180=y
|
||||
CONFIG_QCOM_SPMI_ADC5=y
|
||||
CONFIG_DRM_PARADE_PS8640=y
|
||||
CONFIG_PHY_QCOM_USB_HS=y
|
||||
|
||||
# db410c ethernet
|
||||
CONFIG_USB_RTL8152=y
|
||||
|
@@ -6,34 +6,32 @@ set -o xtrace
|
||||
# Fetch the arm-built rootfs image and unpack it in our x86 container (saves
|
||||
# network transfer, disk usage, and runtime on test jobs)
|
||||
|
||||
# shellcheck disable=SC2154 # arch is assigned in previous scripts
|
||||
if wget -q --method=HEAD "${ARTIFACTS_PREFIX}/${FDO_UPSTREAM_REPO}/${ARTIFACTS_SUFFIX}/${arch}/done"; then
|
||||
ARTIFACTS_URL="${ARTIFACTS_PREFIX}/${FDO_UPSTREAM_REPO}/${ARTIFACTS_SUFFIX}/${arch}"
|
||||
else
|
||||
ARTIFACTS_URL="${ARTIFACTS_PREFIX}/${CI_PROJECT_PATH}/${ARTIFACTS_SUFFIX}/${arch}"
|
||||
fi
|
||||
|
||||
wget "${ARTIFACTS_URL}"/lava-rootfs.tar.zst -O rootfs.tar.zst
|
||||
mkdir -p /rootfs-"$arch"
|
||||
tar -C /rootfs-"$arch" '--exclude=./dev/*' --zstd -xf rootfs.tar.zst
|
||||
rm rootfs.tar.zst
|
||||
wget ${ARTIFACTS_URL}/lava-rootfs.tgz -O rootfs.tgz
|
||||
mkdir -p /rootfs-$arch
|
||||
tar -C /rootfs-$arch '--exclude=./dev/*' -zxf rootfs.tgz
|
||||
rm rootfs.tgz
|
||||
|
||||
if [[ $arch == "arm64" ]]; then
|
||||
mkdir -p /baremetal-files
|
||||
pushd /baremetal-files
|
||||
|
||||
wget "${ARTIFACTS_URL}"/Image
|
||||
wget "${ARTIFACTS_URL}"/Image.gz
|
||||
wget "${ARTIFACTS_URL}"/cheza-kernel
|
||||
wget ${ARTIFACTS_URL}/Image
|
||||
wget ${ARTIFACTS_URL}/Image.gz
|
||||
wget ${ARTIFACTS_URL}/cheza-kernel
|
||||
|
||||
DEVICE_TREES=""
|
||||
DEVICE_TREES="$DEVICE_TREES apq8016-sbc.dtb"
|
||||
DEVICE_TREES="$DEVICE_TREES apq8096-db820c.dtb"
|
||||
DEVICE_TREES="$DEVICE_TREES tegra210-p3450-0000.dtb"
|
||||
DEVICE_TREES="$DEVICE_TREES imx8mq-nitrogen.dtb"
|
||||
|
||||
for DTB in $DEVICE_TREES; do
|
||||
wget "${ARTIFACTS_URL}/$DTB"
|
||||
wget ${ARTIFACTS_URL}/$DTB
|
||||
done
|
||||
|
||||
popd
|
||||
@@ -41,14 +39,12 @@ elif [[ $arch == "armhf" ]]; then
|
||||
mkdir -p /baremetal-files
|
||||
pushd /baremetal-files
|
||||
|
||||
wget "${ARTIFACTS_URL}"/zImage
|
||||
wget ${ARTIFACTS_URL}/zImage
|
||||
|
||||
DEVICE_TREES=""
|
||||
DEVICE_TREES="$DEVICE_TREES imx6q-cubox-i.dtb"
|
||||
DEVICE_TREES="$DEVICE_TREES tegra124-jetson-tk1.dtb"
|
||||
DEVICE_TREES="imx6q-cubox-i.dtb"
|
||||
|
||||
for DTB in $DEVICE_TREES; do
|
||||
wget "${ARTIFACTS_URL}/$DTB"
|
||||
wget ${ARTIFACTS_URL}/$DTB
|
||||
done
|
||||
|
||||
popd
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -ex
|
||||
|
||||
|
@@ -1,23 +1,24 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -ex
|
||||
|
||||
git config --global user.email "mesa@example.com"
|
||||
git config --global user.name "Mesa CI"
|
||||
SCRIPT_DIR="$(pwd)"
|
||||
|
||||
CROSVM_VERSION=504899212d626ecf42b1c459e5592891dde5bf91
|
||||
git clone --single-branch -b main --no-checkout https://chromium.googlesource.com/crosvm/crosvm /platform/crosvm
|
||||
CROSVM_VERSION=c7cd0e0114c8363b884ba56d8e12adee718dcc93
|
||||
git clone --single-branch -b main --no-checkout https://chromium.googlesource.com/chromiumos/platform/crosvm /platform/crosvm
|
||||
pushd /platform/crosvm
|
||||
git checkout "$CROSVM_VERSION"
|
||||
git submodule update --init
|
||||
# Apply all crosvm patches for Mesa CI
|
||||
cat "$SCRIPT_DIR"/.gitlab-ci/container/build-crosvm_*.patch |
|
||||
patch -p1
|
||||
|
||||
VIRGLRENDERER_VERSION=3f2685355f71201f22b98c19aa778b43732c8435
|
||||
VIRGLRENDERER_VERSION=dd301caf7e05ec9c09634fb7872067542aad89b7
|
||||
rm -rf third_party/virglrenderer
|
||||
git clone --single-branch -b master --no-checkout https://gitlab.freedesktop.org/virgl/virglrenderer.git third_party/virglrenderer
|
||||
pushd third_party/virglrenderer
|
||||
git checkout "$VIRGLRENDERER_VERSION"
|
||||
meson build/ -Drender-server=true -Drender-server-worker=process -Dvenus-experimental=true $EXTRA_MESON_ARGS
|
||||
meson build/ $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
popd
|
||||
|
||||
@@ -25,7 +26,6 @@ RUSTFLAGS='-L native=/usr/local/lib' cargo install \
|
||||
bindgen \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--root /usr/local \
|
||||
--version 0.60.1 \
|
||||
$EXTRA_CARGO_ARGS
|
||||
|
||||
RUSTFLAGS='-L native=/usr/local/lib' cargo install \
|
||||
|
43
.gitlab-ci/container/build-crosvm_no-syslog.patch
Normal file
43
.gitlab-ci/container/build-crosvm_no-syslog.patch
Normal file
@@ -0,0 +1,43 @@
|
||||
From 3c57ec558bccc67fd53363c23deea20646be5c47 Mon Sep 17 00:00:00 2001
|
||||
From: Tomeu Vizoso <tomeu.vizoso@collabora.com>
|
||||
Date: Wed, 17 Nov 2021 10:18:04 +0100
|
||||
Subject: [PATCH] Hack syslog out
|
||||
|
||||
It's causing stability problems when running several Crosvm instances in
|
||||
parallel.
|
||||
|
||||
Signed-off-by: Tomeu Vizoso <tomeu.vizoso@collabora.com>
|
||||
---
|
||||
base/src/unix/linux/syslog.rs | 2 +-
|
||||
common/sys_util/src/linux/syslog.rs | 2 +-
|
||||
2 files changed, 2 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/base/src/unix/linux/syslog.rs b/base/src/unix/linux/syslog.rs
|
||||
index 05972a3a..f0db3781 100644
|
||||
--- a/base/src/unix/linux/syslog.rs
|
||||
+++ b/base/src/unix/linux/syslog.rs
|
||||
@@ -35,7 +35,7 @@ pub struct PlatformSyslog {
|
||||
impl Syslog for PlatformSyslog {
|
||||
fn new() -> Result<Self, Error> {
|
||||
Ok(Self {
|
||||
- socket: Some(openlog_and_get_socket()?),
|
||||
+ socket: None,
|
||||
})
|
||||
}
|
||||
|
||||
diff --git a/common/sys_util/src/linux/syslog.rs b/common/sys_util/src/linux/syslog.rs
|
||||
index 05972a3a..f0db3781 100644
|
||||
--- a/common/sys_util/src/linux/syslog.rs
|
||||
+++ b/common/sys_util/src/linux/syslog.rs
|
||||
@@ -35,7 +35,7 @@ pub struct PlatformSyslog {
|
||||
impl Syslog for PlatformSyslog {
|
||||
fn new() -> Result<Self, Error> {
|
||||
Ok(Self {
|
||||
- socket: Some(openlog_and_get_socket()?),
|
||||
+ socket: None,
|
||||
})
|
||||
}
|
||||
|
||||
--
|
||||
2.25.1
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/sh
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -ex
|
||||
|
||||
@@ -16,16 +15,10 @@ if [ -n "${DEQP_RUNNER_GIT_TAG}${DEQP_RUNNER_GIT_REV}" ]; then
|
||||
DEQP_RUNNER_CARGO_ARGS="${DEQP_RUNNER_CARGO_ARGS} ${EXTRA_CARGO_ARGS}"
|
||||
else
|
||||
# Install from package registry
|
||||
DEQP_RUNNER_CARGO_ARGS="--version 0.16.0 ${EXTRA_CARGO_ARGS} -- deqp-runner"
|
||||
DEQP_RUNNER_CARGO_ARGS="--version 0.13.1 ${EXTRA_CARGO_ARGS} -- deqp-runner"
|
||||
fi
|
||||
|
||||
cargo install --locked \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--root /usr/local \
|
||||
${DEQP_RUNNER_CARGO_ARGS}
|
||||
|
||||
# remove unused test runners to shrink images for the Mesa CI build (not kernel,
|
||||
# which chooses its own deqp branch)
|
||||
if [ -z "${DEQP_RUNNER_GIT_TAG}${DEQP_RUNNER_GIT_REV}" ]; then
|
||||
rm -f /usr/local/bin/igt-runner
|
||||
fi
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -ex
|
||||
|
||||
@@ -12,13 +11,6 @@ git clone \
|
||||
/VK-GL-CTS
|
||||
pushd /VK-GL-CTS
|
||||
|
||||
# Apply a patch to update zlib link to an available version.
|
||||
# vulkan-cts-1.3.3.0 uses zlib 1.2.12 which was removed from zlib server due to
|
||||
# a CVE. See https://zlib.net/
|
||||
# FIXME: Remove this patch when uprev to 1.3.4.0+
|
||||
wget -O- https://github.com/KhronosGroup/VK-GL-CTS/commit/6bb2e7d64261bedb503947b1b251b1eeeb49be73.patch |
|
||||
git am -
|
||||
|
||||
# --insecure is due to SSL cert failures hitting sourceforge for zlib and
|
||||
# libpng (sigh). The archives get their checksums checked anyway, and git
|
||||
# always goes through ssh or https.
|
||||
@@ -40,12 +32,6 @@ cmake -S /VK-GL-CTS -B . -G Ninja \
|
||||
ninja modules/egl/deqp-egl
|
||||
cp /deqp/modules/egl/deqp-egl /deqp/modules/egl/deqp-egl-x11
|
||||
|
||||
cmake -S /VK-GL-CTS -B . -G Ninja \
|
||||
-DDEQP_TARGET=wayland \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
$EXTRA_CMAKE_ARGS
|
||||
ninja modules/egl/deqp-egl
|
||||
cp /deqp/modules/egl/deqp-egl /deqp/modules/egl/deqp-egl-wayland
|
||||
|
||||
cmake -S /VK-GL-CTS -B . -G Ninja \
|
||||
-DDEQP_TARGET=${DEQP_TARGET:-x11_glx} \
|
||||
@@ -74,9 +60,6 @@ cp \
|
||||
cp \
|
||||
/deqp/external/openglcts/modules/gl_cts/data/mustpass/gl/khronos_mustpass/4.6.1.x/*-master.txt \
|
||||
/deqp/mustpass/.
|
||||
cp \
|
||||
/deqp/external/openglcts/modules/gl_cts/data/mustpass/gl/khronos_mustpass_single/4.6.1.x/*-single.txt \
|
||||
/deqp/mustpass/.
|
||||
|
||||
# Save *some* executor utils, but otherwise strip things down
|
||||
# to reduct deqp build size:
|
||||
@@ -94,11 +77,10 @@ rm -rf /deqp/external/openglcts/modules/cts-runner
|
||||
rm -rf /deqp/modules/internal
|
||||
rm -rf /deqp/execserver
|
||||
rm -rf /deqp/framework
|
||||
# shellcheck disable=SC2038,SC2185 # TODO: rewrite find
|
||||
find -iname '*cmake*' -o -name '*ninja*' -o -name '*.o' -o -name '*.a' | xargs rm -rf
|
||||
${STRIP_CMD:-strip} external/vulkancts/modules/vulkan/deqp-vk
|
||||
${STRIP_CMD:-strip} external/openglcts/modules/glcts
|
||||
${STRIP_CMD:-strip} modules/*/deqp-*
|
||||
du -sh ./*
|
||||
du -sh *
|
||||
rm -rf /VK-GL-CTS
|
||||
popd
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -ex
|
||||
|
||||
@@ -12,15 +11,12 @@ pushd kernel
|
||||
# debian (they'll get blown away by the rm of the kernel dir at the end).
|
||||
mkdir -p ld-links
|
||||
for i in /usr/bin/*-ld /usr/bin/ld; do
|
||||
i=$(basename $i)
|
||||
i=`basename $i`
|
||||
ln -sf /usr/bin/$i.bfd ld-links/$i
|
||||
done
|
||||
export PATH=`pwd`/ld-links:$PATH
|
||||
|
||||
NEWPATH=$(pwd)/ld-links
|
||||
export PATH=$NEWPATH:$PATH
|
||||
|
||||
KERNEL_FILENAME=$(basename $KERNEL_URL)
|
||||
export LOCALVERSION="$KERNEL_FILENAME"
|
||||
export LOCALVERSION="`basename $KERNEL_URL`"
|
||||
./scripts/kconfig/merge_config.sh ${DEFCONFIG} ../.gitlab-ci/container/${KERNEL_ARCH}.config
|
||||
make ${KERNEL_IMAGE_NAME}
|
||||
for image in ${KERNEL_IMAGE_NAME}; do
|
||||
@@ -32,8 +28,10 @@ if [[ -n ${DEVICE_TREES} ]]; then
|
||||
cp ${DEVICE_TREES} /lava-files/.
|
||||
fi
|
||||
|
||||
make modules
|
||||
INSTALL_MOD_PATH=/lava-files/rootfs-${DEBIAN_ARCH}/ make modules_install
|
||||
if [[ ${DEBIAN_ARCH} = "amd64" || ${DEBIAN_ARCH} = "arm64" ]]; then
|
||||
make modules
|
||||
INSTALL_MOD_PATH=/lava-files/rootfs-${DEBIAN_ARCH}/ make modules_install
|
||||
fi
|
||||
|
||||
if [[ ${DEBIAN_ARCH} = "arm64" ]]; then
|
||||
make Image.lzma
|
||||
|
@@ -26,5 +26,5 @@ mkdir -p /usr/lib/clc
|
||||
ln -s /usr/share/clc/spirv64-mesa3d-.spv /usr/lib/clc/
|
||||
ln -s /usr/share/clc/spirv-mesa3d-.spv /usr/lib/clc/
|
||||
|
||||
du -sh ./*
|
||||
du -sh *
|
||||
rm -rf /libclc /llvm-project
|
||||
|
@@ -1,14 +1,14 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -ex
|
||||
|
||||
export LIBDRM_VERSION=libdrm-2.4.110
|
||||
|
||||
wget https://dri.freedesktop.org/libdrm/"$LIBDRM_VERSION".tar.xz
|
||||
tar -xvf "$LIBDRM_VERSION".tar.xz && rm "$LIBDRM_VERSION".tar.xz
|
||||
cd "$LIBDRM_VERSION"
|
||||
wget https://dri.freedesktop.org/libdrm/$LIBDRM_VERSION.tar.xz
|
||||
tar -xvf $LIBDRM_VERSION.tar.xz && rm $LIBDRM_VERSION.tar.xz
|
||||
cd $LIBDRM_VERSION
|
||||
meson build -D vc4=false -D freedreno=false -D etnaviv=false $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
cd ..
|
||||
rm -rf "$LIBDRM_VERSION"
|
||||
rm -rf $LIBDRM_VERSION
|
||||
|
||||
|
@@ -1,19 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
wget https://github.com/KhronosGroup/SPIRV-LLVM-Translator/archive/refs/tags/v13.0.0.tar.gz
|
||||
tar -xvf v13.0.0.tar.gz && rm v13.0.0.tar.gz
|
||||
|
||||
mkdir SPIRV-LLVM-Translator-13.0.0/build
|
||||
pushd SPIRV-LLVM-Translator-13.0.0/build
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr
|
||||
ninja
|
||||
ninja install
|
||||
# For some reason llvm-spirv is not installed by default
|
||||
ninja llvm-spirv
|
||||
cp tools/llvm-spirv/llvm-spirv /usr/bin/
|
||||
popd
|
||||
|
||||
du -sh SPIRV-LLVM-Translator-13.0.0
|
||||
rm -rf SPIRV-LLVM-Translator-13.0.0
|
@@ -1,13 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -ex
|
||||
|
||||
MOLD_VERSION="1.9.0"
|
||||
|
||||
git clone -b v"$MOLD_VERSION" --single-branch --depth 1 https://github.com/rui314/mold.git
|
||||
pushd mold
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -D BUILD_TESTING=OFF -D MOLD_LTO=ON
|
||||
cmake --build . --parallel
|
||||
cmake --install .
|
||||
popd
|
||||
rm -rf mold
|
@@ -1,19 +1,16 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -ex
|
||||
|
||||
git clone https://gitlab.freedesktop.org/mesa/piglit.git --single-branch --no-checkout /piglit
|
||||
pushd /piglit
|
||||
git checkout 1cd716180cfb6ef0c1fc54702460ef49e5115791
|
||||
git checkout b2c9d8f56b45d79f804f4cb5ac62520f0edd8988
|
||||
patch -p1 <$OLDPWD/.gitlab-ci/piglit/disable-vs_in.diff
|
||||
cmake -S . -B . -G Ninja -DCMAKE_BUILD_TYPE=Release $PIGLIT_OPTS $EXTRA_CMAKE_ARGS
|
||||
ninja $PIGLIT_BUILD_TARGETS
|
||||
# shellcheck disable=SC2038,SC2185 # TODO: rewrite find
|
||||
find -name .git -o -name '*ninja*' -o -iname '*cmake*' -o -name '*.[chao]' | xargs rm -rf
|
||||
rm -rf target_api
|
||||
if [ "$PIGLIT_BUILD_TARGETS" = "piglit_replayer" ]; then
|
||||
# shellcheck disable=SC2038,SC2185 # TODO: rewrite find
|
||||
if [ "x$PIGLIT_BUILD_TARGETS" = "xpiglit_replayer" ]; then
|
||||
find ! -regex "^\.$" \
|
||||
! -regex "^\.\/piglit.*" \
|
||||
! -regex "^\.\/framework.*" \
|
||||
|
@@ -8,24 +8,17 @@ set -ex
|
||||
# cargo (and rustup) wants to store stuff in $HOME/.cargo, and binaries in
|
||||
# $HOME/.cargo/bin. Make bin a link to a public bin directory so the commands
|
||||
# are just available to all build jobs.
|
||||
mkdir -p "$HOME"/.cargo
|
||||
ln -s /usr/local/bin "$HOME"/.cargo/bin
|
||||
|
||||
# Rusticl requires at least Rust 1.59.0
|
||||
#
|
||||
# Also, oick a specific snapshot from rustup so the compiler doesn't drift on
|
||||
# us.
|
||||
RUST_VERSION=1.59.0-2022-02-24
|
||||
mkdir -p $HOME/.cargo
|
||||
ln -s /usr/local/bin $HOME/.cargo/bin
|
||||
|
||||
# For rust in Mesa, we use rustup to install. This lets us pick an arbitrary
|
||||
# version of the compiler, rather than whatever the container's Debian comes
|
||||
# with.
|
||||
wget https://sh.rustup.rs -O - | sh -s -- \
|
||||
--default-toolchain $RUST_VERSION \
|
||||
--profile minimal \
|
||||
-y
|
||||
|
||||
rustup component add rustfmt
|
||||
#
|
||||
# Pick the rust compiler (1.48) available in Debian stable, and pick a specific
|
||||
# snapshot from rustup so the compiler doesn't drift on us.
|
||||
wget https://sh.rustup.rs -O - | \
|
||||
sh -s -- -y --default-toolchain 1.49.0-2020-12-31
|
||||
|
||||
# Set up a config script for cross compiling -- cargo needs your system cc for
|
||||
# linking in cross builds, but doesn't know what you want to use for system cc.
|
||||
|
@@ -55,9 +55,9 @@ BASE_ARGS_GN_FILE="${SCRIPT_DIR}/build-skqp_base.gn"
|
||||
SKQP_ARCH=${SKQP_ARCH:-x64}
|
||||
SKIA_DIR=${SKIA_DIR:-$(mktemp -d)}
|
||||
SKQP_OUT_DIR=${SKIA_DIR}/out/${SKQP_ARCH}
|
||||
SKQP_INSTALL_DIR=${SKQP_INSTALL_DIR:-/skqp}
|
||||
SKQP_INSTALL_DIR=/skqp
|
||||
SKQP_ASSETS_DIR="${SKQP_INSTALL_DIR}/assets"
|
||||
SKQP_BINARIES=(skqp list_gpu_unit_tests list_gms)
|
||||
SKQP_BINARIES=(skqp)
|
||||
|
||||
download_skia_source
|
||||
|
||||
|
@@ -1,18 +0,0 @@
|
||||
Nima-Cpp is not available anymore inside googlesource, revert to github one
|
||||
Simulates `git revert 49233d2521054037ded7d760427c4a0dc1e11356`
|
||||
|
||||
diff --git a/DEPS b/DEPS
|
||||
index 7e0b941..c88b064 100644
|
||||
--- a/DEPS
|
||||
+++ b/DEPS
|
||||
@@ -33,8 +33,8 @@ deps = {
|
||||
#"third_party/externals/v8" : "https://chromium.googlesource.com/v8/v8.git@5f1ae66d5634e43563b2d25ea652dfb94c31a3b4",
|
||||
"third_party/externals/wuffs" : "https://skia.googlesource.com/external/github.com/google/wuffs.git@fda3c4c9863d9f9fcec58ae66508c4621fc71ea5",
|
||||
"third_party/externals/zlib" : "https://chromium.googlesource.com/chromium/src/third_party/zlib@47af7c547f8551bd25424e56354a2ae1e9062859",
|
||||
- "third_party/externals/Nima-Cpp" : "https://skia.googlesource.com/external/github.com/2d-inc/Nima-Cpp.git@4bd02269d7d1d2e650950411325eafa15defb084",
|
||||
- "third_party/externals/Nima-Math-Cpp" : "https://skia.googlesource.com/external/github.com/2d-inc/Nima-Math-Cpp.git@e0c12772093fa8860f55358274515b86885f0108",
|
||||
+ "third_party/externals/Nima-Cpp" : "https://github.com/2d-inc/Nima-Cpp.git@4bd02269d7d1d2e650950411325eafa15defb084",
|
||||
+ "third_party/externals/Nima-Math-Cpp" : "https://github.com/2d-inc/Nima-Math-Cpp.git@e0c12772093fa8860f55358274515b86885f0108",
|
||||
|
||||
"../src": {
|
||||
"url": "https://chromium.googlesource.com/chromium/src.git@ccf3465732e5d5363f0e44a8fac54550f62dd1d0",
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -ex
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
set -ex
|
||||
|
||||
VKD3D_PROTON_COMMIT="804751ee1cb108a2ec59e182ce0c052bafef268e"
|
||||
VKD3D_PROTON_COMMIT="5b73139f182d86cd58a757e4b5f0d4cfad96d319"
|
||||
|
||||
VKD3D_PROTON_DST_DIR="/vkd3d-proton-tests"
|
||||
VKD3D_PROTON_SRC_DIR="/vkd3d-proton-src"
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -ex
|
||||
|
||||
@@ -9,7 +8,7 @@ export WAYLAND_PROTOCOLS_VERSION="1.24"
|
||||
git clone https://gitlab.freedesktop.org/wayland/wayland
|
||||
cd wayland
|
||||
git checkout "$LIBWAYLAND_VERSION"
|
||||
meson -Ddocumentation=false -Ddtd_validation=false -Dlibraries=true _build $EXTRA_MESON_ARGS
|
||||
meson -Ddocumentation=false -Ddtd_validation=false -Dlibraries=true _build
|
||||
ninja -C _build install
|
||||
cd ..
|
||||
rm -rf wayland
|
||||
@@ -17,7 +16,7 @@ rm -rf wayland
|
||||
git clone https://gitlab.freedesktop.org/wayland/wayland-protocols
|
||||
cd wayland-protocols
|
||||
git checkout "$WAYLAND_PROTOCOLS_VERSION"
|
||||
meson _build $EXTRA_MESON_ARGS
|
||||
meson _build
|
||||
ninja -C _build install
|
||||
cd ..
|
||||
rm -rf wayland-protocols
|
||||
|
@@ -2,8 +2,6 @@
|
||||
|
||||
if test -f /etc/debian_version; then
|
||||
CCACHE_PATH=/usr/lib/ccache
|
||||
elif test -f /etc/alpine-release; then
|
||||
CCACHE_PATH=/usr/lib/ccache/bin
|
||||
else
|
||||
CCACHE_PATH=/usr/lib64/ccache
|
||||
fi
|
||||
@@ -19,8 +17,7 @@ export PATH=$CCACHE_PATH:$PATH
|
||||
export CC="${CCACHE_PATH}/gcc"
|
||||
export CXX="${CCACHE_PATH}/g++"
|
||||
|
||||
# When not using the mold linker (e.g. unsupported architecture), force
|
||||
# linkers to gold, since it's so much faster for building. We can't use
|
||||
# Force linkers to gold, since it's so much faster for building. We can't use
|
||||
# lld because we're on old debian and it's buggy. ming fails meson builds
|
||||
# with it with "meson.build:21:0: ERROR: Unable to determine dynamic linker"
|
||||
find /usr/bin -name \*-ld -o -name ld | \
|
||||
@@ -30,11 +27,8 @@ find /usr/bin -name \*-ld -o -name ld | \
|
||||
ccache --show-stats
|
||||
|
||||
# Make a wrapper script for ninja to always include the -j flags
|
||||
{
|
||||
echo '#!/bin/sh -x'
|
||||
# shellcheck disable=SC2016
|
||||
echo '/usr/bin/ninja -j${FDO_CI_CONCURRENT:-4} "$@"'
|
||||
} > /usr/local/bin/ninja
|
||||
echo '#!/bin/sh -x' > /usr/local/bin/ninja
|
||||
echo '/usr/bin/ninja -j${FDO_CI_CONCURRENT:-4} "$@"' >> /usr/local/bin/ninja
|
||||
chmod +x /usr/local/bin/ninja
|
||||
|
||||
# Set MAKEFLAGS so that all make invocations in container builds include the
|
||||
@@ -45,6 +39,4 @@ export MAKEFLAGS="-j${FDO_CI_CONCURRENT:-4}"
|
||||
echo -e "retry_connrefused = on\n" \
|
||||
"read_timeout = 300\n" \
|
||||
"tries = 4\n" \
|
||||
"retry_on_host_error = on\n" \
|
||||
"retry_on_http_error = 429,500,502,503,504\n" \
|
||||
"wait_retry = 32" >> /etc/wgetrc
|
||||
|
@@ -13,7 +13,7 @@ arch2=${5:-$2}
|
||||
# and allowing it in code generation means we get unwind symbols that break
|
||||
# the libEGL and driver symbol tests.
|
||||
|
||||
cat > "$cross_file" <<EOF
|
||||
cat >$cross_file <<EOF
|
||||
[binaries]
|
||||
ar = '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/$arch-ar'
|
||||
c = ['ccache', '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch2}29-clang', '-fno-exceptions', '-fno-unwind-tables', '-fno-asynchronous-unwind-tables']
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/sh
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
# Makes a .pc file in the Android NDK for meson to find its libraries.
|
||||
|
||||
|
@@ -2,18 +2,18 @@
|
||||
|
||||
arch=$1
|
||||
cross_file="/cross_file-$arch.txt"
|
||||
meson env2mfile --cross --debarch "$arch" -o "$cross_file"
|
||||
|
||||
/usr/share/meson/debcrossgen --arch $arch -o "$cross_file"
|
||||
# Explicitly set ccache path for cross compilers
|
||||
sed -i "s|/usr/bin/\([^-]*\)-linux-gnu\([^-]*\)-g|/usr/lib/ccache/\\1-linux-gnu\\2-g|g" "$cross_file"
|
||||
|
||||
if [ "$arch" = "i386" ]; then
|
||||
# Work around a bug in debcrossgen that should be fixed in the next release
|
||||
sed -i "s|cpu_family = 'i686'|cpu_family = 'x86'|g" "$cross_file"
|
||||
fi
|
||||
# Rely on qemu-user being configured in binfmt_misc on the host
|
||||
# shellcheck disable=SC1003 # how this sed doesn't seems to work for me locally
|
||||
sed -i -e '/\[properties\]/a\' -e "needs_exe_wrapper = False" "$cross_file"
|
||||
|
||||
# Add a line for rustc, which meson env2mfile is missing.
|
||||
cc=$(sed -n "s|^c\s*=\s*\[?'\(.*\)'\]?|\1|p" < "$cross_file")
|
||||
|
||||
# Add a line for rustc, which debcrossgen is missing.
|
||||
cc=`sed -n 's|c = .\(.*\).|\1|p' < $cross_file`
|
||||
if [[ "$arch" = "arm64" ]]; then
|
||||
rust_target=aarch64-unknown-linux-gnu
|
||||
elif [[ "$arch" = "armhf" ]]; then
|
||||
@@ -27,8 +27,6 @@ elif [[ "$arch" = "s390x" ]]; then
|
||||
else
|
||||
echo "Needs rustc target mapping"
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC1003 # how this sed doesn't seems to work for me locally
|
||||
sed -i -e '/\[binaries\]/a\' -e "rust = ['rustc', '--target=$rust_target', '-C', 'linker=$cc']" "$cross_file"
|
||||
|
||||
# Set up cmake cross compile toolchain file for dEQP builds
|
||||
@@ -36,18 +34,18 @@ toolchain_file="/toolchain-$arch.cmake"
|
||||
if [[ "$arch" = "arm64" ]]; then
|
||||
GCC_ARCH="aarch64-linux-gnu"
|
||||
DE_CPU="DE_CPU_ARM_64"
|
||||
CMAKE_ARCH=arm
|
||||
elif [[ "$arch" = "armhf" ]]; then
|
||||
GCC_ARCH="arm-linux-gnueabihf"
|
||||
DE_CPU="DE_CPU_ARM"
|
||||
CMAKE_ARCH=arm
|
||||
fi
|
||||
|
||||
if [[ -n "$GCC_ARCH" ]]; then
|
||||
{
|
||||
echo "set(CMAKE_SYSTEM_NAME Linux)";
|
||||
echo "set(CMAKE_SYSTEM_PROCESSOR arm)";
|
||||
echo "set(CMAKE_C_COMPILER /usr/lib/ccache/$GCC_ARCH-gcc)";
|
||||
echo "set(CMAKE_CXX_COMPILER /usr/lib/ccache/$GCC_ARCH-g++)";
|
||||
echo "set(ENV{PKG_CONFIG} \"/usr/bin/$GCC_ARCH-pkg-config\")";
|
||||
echo "set(DE_CPU $DE_CPU)";
|
||||
} > "$toolchain_file"
|
||||
echo "set(CMAKE_SYSTEM_NAME Linux)" > "$toolchain_file"
|
||||
echo "set(CMAKE_SYSTEM_PROCESSOR arm)" >> "$toolchain_file"
|
||||
echo "set(CMAKE_C_COMPILER /usr/lib/ccache/$GCC_ARCH-gcc)" >> "$toolchain_file"
|
||||
echo "set(CMAKE_CXX_COMPILER /usr/lib/ccache/$GCC_ARCH-g++)" >> "$toolchain_file"
|
||||
echo "set(ENV{PKG_CONFIG} \"/usr/bin/$GCC_ARCH-pkg-config\")" >> "$toolchain_file"
|
||||
echo "set(DE_CPU $DE_CPU)" >> "$toolchain_file"
|
||||
fi
|
||||
|
@@ -1,7 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2140 # ugly array, remove later
|
||||
# shellcheck disable=SC2288 # ugly array, remove later
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -ex
|
||||
|
||||
@@ -18,10 +15,6 @@ elif [ $DEBIAN_ARCH = amd64 ]; then
|
||||
apt-get -y install --no-install-recommends wget gnupg2 software-properties-common
|
||||
apt-key add /llvm-snapshot.gpg.key
|
||||
add-apt-repository "deb https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-13 main"
|
||||
# Debian bullseye has older wine 5.0, we want >= 7.0 for traces.
|
||||
apt-key add /winehq.gpg.key
|
||||
apt-add-repository https://dl.winehq.org/wine-builds/debian/
|
||||
|
||||
|
||||
ARCH_PACKAGES="firmware-amd-graphics
|
||||
inetutils-syslogd
|
||||
@@ -41,10 +34,6 @@ elif [ $DEBIAN_ARCH = amd64 ]; then
|
||||
spirv-tools
|
||||
sysvinit-core
|
||||
"
|
||||
|
||||
elif [ $DEBIAN_ARCH = armhf ]; then
|
||||
ARCH_PACKAGES="firmware-misc-nonfree
|
||||
"
|
||||
fi
|
||||
|
||||
INSTALL_CI_FAIRY_PACKAGES="git
|
||||
@@ -63,7 +52,6 @@ apt-get -y install --no-install-recommends \
|
||||
ca-certificates \
|
||||
firmware-realtek \
|
||||
initramfs-tools \
|
||||
jq \
|
||||
libasan6 \
|
||||
libexpat1 \
|
||||
libpng16-16 \
|
||||
@@ -102,33 +90,13 @@ apt-get -y install --no-install-recommends \
|
||||
sntp \
|
||||
strace \
|
||||
waffle-utils \
|
||||
weston \
|
||||
wget \
|
||||
xinit \
|
||||
xserver-xorg-core \
|
||||
xwayland \
|
||||
zstd
|
||||
|
||||
|
||||
if [ "$DEBIAN_ARCH" = "amd64" ]; then
|
||||
# workaround wine needing 32-bit
|
||||
# https://bugs.winehq.org/show_bug.cgi?id=53393
|
||||
apt-get install -y --no-remove wine-stable-amd64 # a requirement for wine-stable
|
||||
WINE_PKG="wine-stable"
|
||||
WINE_PKG_DROP="wine-stable-i386"
|
||||
apt download "${WINE_PKG}"
|
||||
dpkg --ignore-depends="${WINE_PKG_DROP}" -i "${WINE_PKG}"*.deb
|
||||
rm "${WINE_PKG}"*.deb
|
||||
sed -i "/${WINE_PKG_DROP}/d" /var/lib/dpkg/status
|
||||
apt-get install -y --no-remove winehq-stable # symlinks-only, depends on wine-stable
|
||||
fi
|
||||
xserver-xorg-core
|
||||
|
||||
# Needed for ci-fairy, this revision is able to upload files to
|
||||
# MinIO and doesn't depend on git
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2
|
||||
|
||||
# Needed for manipulation with traces yaml files.
|
||||
pip3 install yq
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
|
||||
|
||||
apt-get purge -y \
|
||||
$INSTALL_CI_FAIRY_PACKAGES
|
||||
@@ -257,7 +225,7 @@ rm -rf etc/dpkg
|
||||
# Drop directories not part of ostree
|
||||
# Note that /var needs to exist as ostree bind mounts the deployment /var over
|
||||
# it
|
||||
rm -rf var/* srv share
|
||||
rm -rf var/* opt srv share
|
||||
|
||||
# ca-certificates are in /etc drop the source
|
||||
rm -rf usr/share/ca-certificates
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
@@ -18,7 +17,6 @@ apt-get install -y --no-remove \
|
||||
crossbuild-essential-$arch \
|
||||
libelf-dev:$arch \
|
||||
libexpat1-dev:$arch \
|
||||
libffi-dev:$arch \
|
||||
libpciaccess-dev:$arch \
|
||||
libstdc++6:$arch \
|
||||
libvulkan-dev:$arch \
|
||||
@@ -36,13 +34,11 @@ apt-get install -y --no-remove \
|
||||
libxrandr-dev:$arch \
|
||||
libxshmfence-dev:$arch \
|
||||
libxxf86vm-dev:$arch \
|
||||
libwayland-dev:$arch \
|
||||
wget
|
||||
|
||||
if [[ $arch != "armhf" ]]; then
|
||||
# See the list of available architectures in https://apt.llvm.org/bullseye/dists/llvm-toolchain-bullseye-13/main/
|
||||
if [[ $arch == "s390x" ]] || [[ $arch == "i386" ]] || [[ $arch == "arm64" ]]; then
|
||||
LLVM=13
|
||||
if [[ $arch == "s390x" ]]; then
|
||||
LLVM=9
|
||||
else
|
||||
LLVM=11
|
||||
fi
|
||||
@@ -50,8 +46,9 @@ if [[ $arch != "armhf" ]]; then
|
||||
# llvm-*-tools:$arch conflicts with python3:amd64. Install dependencies only
|
||||
# with apt-get, then force-install llvm-*-{dev,tools}:$arch with dpkg to get
|
||||
# around this.
|
||||
apt-get install -y --no-remove --no-install-recommends \
|
||||
apt-get install -y --no-remove \
|
||||
libclang-cpp${LLVM}:$arch \
|
||||
libffi-dev:$arch \
|
||||
libgcc-s1:$arch \
|
||||
libtinfo-dev:$arch \
|
||||
libz3-dev:$arch \
|
||||
@@ -69,8 +66,6 @@ fi
|
||||
EXTRA_MESON_ARGS="--cross-file=/cross_file-${arch}.txt -D libdir=lib/$(dpkg-architecture -A $arch -qDEB_TARGET_MULTIARCH)"
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
||||
apt-get purge -y \
|
||||
$STABLE_EPHEMERAL
|
||||
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -ex
|
||||
|
||||
@@ -20,7 +19,7 @@ rm $ndk.zip
|
||||
# duplicate files. Turn them into hardlinks to save on container space.
|
||||
rdfind -makehardlinks true -makeresultsfile false /android-ndk-r21d/
|
||||
# Drop some large tools we won't use in this build.
|
||||
find /android-ndk-r21d/ -type f | grep -E -i "clang-check|clang-tidy|lldb" | xargs rm -f
|
||||
find /android-ndk-r21d/ -type f | egrep -i "clang-check|clang-tidy|lldb" | xargs rm -f
|
||||
|
||||
sh .gitlab-ci/container/create-android-ndk-pc.sh /$ndk zlib.pc "" "-lz" "1.2.3"
|
||||
|
||||
|
@@ -1,5 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
arch=arm64 . .gitlab-ci/container/debian/arm_test.sh
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
@@ -9,15 +8,9 @@ sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list
|
||||
echo 'deb https://deb.debian.org/debian buster main' >/etc/apt/sources.list.d/buster.list
|
||||
apt-get update
|
||||
|
||||
# Ephemeral packages (installed for this script and removed again at
|
||||
# the end)
|
||||
STABLE_EPHEMERAL=" \
|
||||
libssl-dev \
|
||||
"
|
||||
|
||||
apt-get -y install \
|
||||
${EXTRA_LOCAL_PACKAGES} \
|
||||
${STABLE_EPHEMERAL} \
|
||||
abootimg \
|
||||
autoconf \
|
||||
automake \
|
||||
bc \
|
||||
@@ -50,9 +43,8 @@ apt-get -y install \
|
||||
libxrandr-dev \
|
||||
libxshmfence-dev \
|
||||
libxxf86vm-dev \
|
||||
libwayland-dev \
|
||||
llvm-11-dev \
|
||||
ninja-build \
|
||||
meson \
|
||||
pkg-config \
|
||||
python3-mako \
|
||||
python3-pil \
|
||||
@@ -62,31 +54,21 @@ apt-get -y install \
|
||||
u-boot-tools \
|
||||
wget \
|
||||
xz-utils \
|
||||
zlib1g-dev \
|
||||
zstd
|
||||
zlib1g-dev
|
||||
|
||||
# Not available anymore in bullseye
|
||||
apt-get install -y --no-remove -t buster \
|
||||
android-sdk-ext4-utils
|
||||
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2
|
||||
|
||||
# We need at least 0.61.4 for proper Rust; 0.62 for modern meson env2mfile
|
||||
pip3 install meson==0.63.3
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
|
||||
|
||||
arch=armhf
|
||||
. .gitlab-ci/container/cross_build.sh
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
. .gitlab-ci/container/build-mold.sh
|
||||
|
||||
# dependencies where we want a specific version
|
||||
EXTRA_MESON_ARGS=
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
||||
apt-get purge -y $STABLE_EPHEMERAL
|
||||
|
||||
. .gitlab-ci/container/container_post_build.sh
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2154 # arch is assigned in previous scripts
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
@@ -10,6 +9,7 @@ sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list
|
||||
apt-get update
|
||||
|
||||
apt-get install -y --no-remove \
|
||||
abootimg \
|
||||
cpio \
|
||||
fastboot \
|
||||
netcat \
|
||||
@@ -19,29 +19,21 @@ apt-get install -y --no-remove \
|
||||
python3-serial \
|
||||
rsync \
|
||||
snmp \
|
||||
wget \
|
||||
zstd
|
||||
wget
|
||||
|
||||
# setup SNMPv2 SMI MIB
|
||||
wget https://raw.githubusercontent.com/net-snmp/net-snmp/master/mibs/SNMPv2-SMI.txt \
|
||||
-O /usr/share/snmp/mibs/SNMPv2-SMI.txt
|
||||
|
||||
. .gitlab-ci/container/baremetal_build.sh
|
||||
arch=arm64 . .gitlab-ci/container/baremetal_build.sh
|
||||
arch=armhf . .gitlab-ci/container/baremetal_build.sh
|
||||
|
||||
if [[ "$arch" == "arm64" ]]; then
|
||||
# This firmware file from Debian bullseye causes hangs
|
||||
wget https://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git/plain/qcom/a530_pfp.fw?id=d5f9eea5a251d43412b07f5295d03e97b89ac4a5 \
|
||||
-O /rootfs-arm64/lib/firmware/qcom/a530_pfp.fw
|
||||
fi
|
||||
# This firmware file from Debian bullseye causes hangs
|
||||
wget https://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git/plain/qcom/a530_pfp.fw?id=d5f9eea5a251d43412b07f5295d03e97b89ac4a5 \
|
||||
-O /rootfs-arm64/lib/firmware/qcom/a530_pfp.fw
|
||||
|
||||
mkdir -p /baremetal-files/jetson-nano/boot/
|
||||
ln -s \
|
||||
/baremetal-files/Image \
|
||||
/baremetal-files/tegra210-p3450-0000.dtb \
|
||||
/baremetal-files/jetson-nano/boot/
|
||||
|
||||
mkdir -p /baremetal-files/jetson-tk1/boot/
|
||||
ln -s \
|
||||
/baremetal-files/zImage \
|
||||
/baremetal-files/tegra124-jetson-tk1.dtb \
|
||||
/baremetal-files/jetson-tk1/boot/
|
||||
|
@@ -1,5 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
arch=armhf . .gitlab-ci/container/debian/arm_test.sh
|
@@ -1,16 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
arch=s390x
|
||||
|
||||
# Ephemeral packages (installed for this script and removed again at the end)
|
||||
STABLE_EPHEMERAL="libssl-dev"
|
||||
|
||||
apt-get -y install "$STABLE_EPHEMERAL"
|
||||
|
||||
. .gitlab-ci/container/build-mold.sh
|
||||
|
||||
apt-get purge -y "$STABLE_EPHEMERAL"
|
||||
|
||||
. .gitlab-ci/container/cross_build.sh
|
||||
|
@@ -5,9 +5,12 @@ set -o xtrace
|
||||
|
||||
# Installing wine, need this for testing mingw or nine
|
||||
|
||||
# We need multiarch for Wine
|
||||
dpkg --add-architecture i386
|
||||
apt-get update
|
||||
apt-get install -y --no-remove \
|
||||
wine \
|
||||
wine32 \
|
||||
wine64 \
|
||||
xvfb
|
||||
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
@@ -28,7 +27,6 @@ apt-get install -y --no-remove \
|
||||
bison \
|
||||
ccache \
|
||||
dpkg-cross \
|
||||
findutils \
|
||||
flex \
|
||||
g++ \
|
||||
cmake \
|
||||
@@ -38,12 +36,15 @@ apt-get install -y --no-remove \
|
||||
kmod \
|
||||
libclang-13-dev \
|
||||
libclang-11-dev \
|
||||
libclang-9-dev \
|
||||
libclc-dev \
|
||||
libelf-dev \
|
||||
libepoxy-dev \
|
||||
libexpat1-dev \
|
||||
libgtk-3-dev \
|
||||
libllvm13 \
|
||||
libllvm11 \
|
||||
libllvm9 \
|
||||
libomxil-bellagio-dev \
|
||||
libpciaccess-dev \
|
||||
libunwind-dev \
|
||||
@@ -57,13 +58,13 @@ apt-get install -y --no-remove \
|
||||
libxrandr-dev \
|
||||
libxrender-dev \
|
||||
libxshmfence-dev \
|
||||
libxvmc-dev \
|
||||
libxxf86vm-dev \
|
||||
make \
|
||||
ninja-build \
|
||||
meson \
|
||||
pkg-config \
|
||||
python3-mako \
|
||||
python3-pil \
|
||||
python3-ply \
|
||||
python3-requests \
|
||||
qemu-user \
|
||||
valgrind \
|
||||
@@ -72,16 +73,10 @@ apt-get install -y --no-remove \
|
||||
x11proto-gl-dev \
|
||||
x11proto-randr-dev \
|
||||
xz-utils \
|
||||
zlib1g-dev \
|
||||
zstd
|
||||
zlib1g-dev
|
||||
|
||||
# Needed for ci-fairy, this revision is able to upload files to MinIO
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2
|
||||
|
||||
# We need at least 0.61.4 for proper Rust; 0.62 for modern meson env2mfile
|
||||
pip3 install meson==0.63.3
|
||||
|
||||
. .gitlab-ci/container/build-rust.sh
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
|
||||
|
||||
. .gitlab-ci/container/debian/x86_build-base-wine.sh
|
||||
|
||||
|
@@ -1,7 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -e
|
||||
|
||||
# Pull packages from msys2 repository that can be directly used.
|
||||
# We can use https://packages.msys2.org/ to retrieve the newest package
|
||||
|
@@ -1,18 +1,11 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -e
|
||||
|
||||
# Building libdrm (libva dependency)
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
wd=$PWD
|
||||
CMAKE_TOOLCHAIN_MINGW_PATH=$wd/.gitlab-ci/container/debian/x86_mingw-toolchain.cmake
|
||||
mkdir -p ~/tmp
|
||||
pushd ~/tmp
|
||||
|
||||
# Building DirectX-Headers
|
||||
git clone https://github.com/microsoft/DirectX-Headers -b v1.606.4 --depth 1
|
||||
git clone https://github.com/microsoft/DirectX-Headers -b v1.606.3 --depth 1
|
||||
mkdir -p DirectX-Headers/build
|
||||
pushd DirectX-Headers/build
|
||||
meson .. \
|
||||
@@ -24,24 +17,6 @@ meson .. \
|
||||
ninja install
|
||||
popd
|
||||
|
||||
# Building libva
|
||||
git clone https://github.com/intel/libva
|
||||
pushd libva/
|
||||
# libva-win32 is released with libva version 2.17 (see https://github.com/intel/libva/releases/tag/2.17.0)
|
||||
git checkout 2.17.0
|
||||
popd
|
||||
# libva already has a build dir in their repo, use builddir instead
|
||||
mkdir -p libva/builddir
|
||||
pushd libva/builddir
|
||||
meson .. \
|
||||
--backend=ninja \
|
||||
--buildtype=release \
|
||||
-Dprefix=/usr/x86_64-w64-mingw32/ \
|
||||
--cross-file=$wd/.gitlab-ci/x86_64-w64-mingw32
|
||||
|
||||
ninja install
|
||||
popd
|
||||
|
||||
export VULKAN_SDK_VERSION=1.3.211.0
|
||||
|
||||
# Building SPIRV Tools
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
@@ -13,7 +12,6 @@ STABLE_EPHEMERAL=" \
|
||||
autotools-dev \
|
||||
bzip2 \
|
||||
libtool \
|
||||
libssl-dev \
|
||||
python3-pip \
|
||||
"
|
||||
|
||||
@@ -29,6 +27,7 @@ apt-get install -y --no-remove \
|
||||
libclang-cpp11-dev \
|
||||
libgbm-dev \
|
||||
libglvnd-dev \
|
||||
libllvmspirvlib-dev \
|
||||
liblua5.3-dev \
|
||||
libxcb-dri2-0-dev \
|
||||
libxcb-dri3-dev \
|
||||
@@ -42,16 +41,14 @@ apt-get install -y --no-remove \
|
||||
libxml2-dev \
|
||||
llvm-13-dev \
|
||||
llvm-11-dev \
|
||||
llvm-9-dev \
|
||||
ocl-icd-opencl-dev \
|
||||
python3-freezegun \
|
||||
python3-pytest \
|
||||
procps \
|
||||
spirv-tools \
|
||||
shellcheck \
|
||||
strace \
|
||||
time \
|
||||
yamllint \
|
||||
zstd
|
||||
time
|
||||
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
@@ -61,17 +58,11 @@ export XORG_RELEASES=https://xorg.freedesktop.org/releases/individu
|
||||
|
||||
export XORGMACROS_VERSION=util-macros-1.19.0
|
||||
|
||||
. .gitlab-ci/container/build-mold.sh
|
||||
|
||||
wget $XORG_RELEASES/util/$XORGMACROS_VERSION.tar.bz2
|
||||
tar -xvf $XORGMACROS_VERSION.tar.bz2 && rm $XORGMACROS_VERSION.tar.bz2
|
||||
cd $XORGMACROS_VERSION; ./configure; make install; cd ..
|
||||
rm -rf $XORGMACROS_VERSION
|
||||
|
||||
. .gitlab-ci/container/build-llvm-spirv.sh
|
||||
|
||||
. .gitlab-ci/container/build-libclc.sh
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
@@ -83,7 +74,7 @@ cd shader-db
|
||||
make
|
||||
popd
|
||||
|
||||
git clone https://github.com/microsoft/DirectX-Headers -b v1.606.4 --depth 1
|
||||
git clone https://github.com/microsoft/DirectX-Headers -b v1.606.3 --depth 1
|
||||
mkdir -p DirectX-Headers/build
|
||||
pushd DirectX-Headers/build
|
||||
meson .. --backend=ninja --buildtype=release -Dbuild-test=false
|
||||
@@ -92,13 +83,7 @@ ninja install
|
||||
popd
|
||||
rm -rf DirectX-Headers
|
||||
|
||||
pip3 install lavacli==1.5.2
|
||||
|
||||
# install bindgen
|
||||
RUSTFLAGS='-L native=/usr/local/lib' cargo install \
|
||||
bindgen --version 0.59.2 \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--root /usr/local
|
||||
pip3 install git+https://git.lavasoftware.org/lava/lavacli@3db3ddc45e5358908bc6a17448059ea2340492b7
|
||||
|
||||
############### Uninstall the build software
|
||||
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
@@ -13,43 +12,11 @@ sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list
|
||||
# Ephemeral packages (installed for this script and removed again at
|
||||
# the end)
|
||||
STABLE_EPHEMERAL=" \
|
||||
autoconf \
|
||||
automake \
|
||||
bc \
|
||||
bison \
|
||||
bzip2 \
|
||||
ccache \
|
||||
cmake \
|
||||
clang-11 \
|
||||
flex \
|
||||
glslang-tools \
|
||||
g++ \
|
||||
libasound2-dev \
|
||||
libcap-dev \
|
||||
libclang-cpp11-dev \
|
||||
libegl-dev \
|
||||
libelf-dev \
|
||||
libepoxy-dev \
|
||||
libgbm-dev \
|
||||
libpciaccess-dev \
|
||||
libvulkan-dev \
|
||||
libwayland-dev \
|
||||
libx11-xcb-dev \
|
||||
libxext-dev \
|
||||
llvm-13-dev \
|
||||
llvm-11-dev \
|
||||
make \
|
||||
meson \
|
||||
patch \
|
||||
pkg-config \
|
||||
protobuf-compiler \
|
||||
cargo \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
python3-setuptools \
|
||||
python3-wheel \
|
||||
spirv-tools \
|
||||
wayland-protocols \
|
||||
xz-utils \
|
||||
"
|
||||
|
||||
# Add llvm 13 to the build image
|
||||
@@ -59,19 +26,14 @@ add-apt-repository "deb https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-1
|
||||
apt-get update
|
||||
apt-get dist-upgrade -y
|
||||
|
||||
apt-get install -y \
|
||||
sysvinit-core
|
||||
|
||||
apt-get install -y --no-remove \
|
||||
git \
|
||||
git-lfs \
|
||||
inetutils-syslogd \
|
||||
iptables \
|
||||
jq \
|
||||
libasan6 \
|
||||
libexpat1 \
|
||||
libllvm13 \
|
||||
libllvm11 \
|
||||
libllvm9 \
|
||||
liblz4-1 \
|
||||
libpng16-16 \
|
||||
libpython3.9 \
|
||||
@@ -91,69 +53,22 @@ apt-get install -y --no-remove \
|
||||
python3-requests \
|
||||
python3-six \
|
||||
python3-yaml \
|
||||
socat \
|
||||
vulkan-tools \
|
||||
waffle-utils \
|
||||
wget \
|
||||
xauth \
|
||||
xvfb \
|
||||
zlib1g \
|
||||
zstd
|
||||
zlib1g
|
||||
|
||||
apt-get install -y --no-install-recommends \
|
||||
$STABLE_EPHEMERAL
|
||||
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
############### Build kernel
|
||||
|
||||
export DEFCONFIG="arch/x86/configs/x86_64_defconfig"
|
||||
export KERNEL_IMAGE_NAME=bzImage
|
||||
export KERNEL_ARCH=x86_64
|
||||
export DEBIAN_ARCH=amd64
|
||||
|
||||
mkdir -p /lava-files/
|
||||
. .gitlab-ci/container/build-kernel.sh
|
||||
|
||||
# Needed for ci-fairy, this revision is able to upload files to MinIO
|
||||
# and doesn't depend on git
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2
|
||||
|
||||
# Needed for manipulation with traces yaml files.
|
||||
pip3 install yq
|
||||
|
||||
# Needed for crosvm compilation.
|
||||
update-alternatives --install /usr/bin/clang clang /usr/bin/clang-11 100
|
||||
|
||||
############### Build LLVM-SPIRV translator
|
||||
|
||||
. .gitlab-ci/container/build-llvm-spirv.sh
|
||||
|
||||
############### Build libclc
|
||||
|
||||
. .gitlab-ci/container/build-libclc.sh
|
||||
|
||||
############### Build libdrm
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
############### Build Wayland
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
||||
############### Build Crosvm
|
||||
|
||||
. .gitlab-ci/container/build-rust.sh
|
||||
. .gitlab-ci/container/build-crosvm.sh
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
|
||||
|
||||
############### Build dEQP runner
|
||||
. .gitlab-ci/container/build-deqp-runner.sh
|
||||
|
||||
rm -rf /root/.cargo
|
||||
rm -rf /root/.rustup
|
||||
|
||||
ccache --show-stats
|
||||
rm -rf ~/.cargo
|
||||
|
||||
apt-get purge -y $STABLE_EPHEMERAL
|
||||
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
@@ -8,30 +7,41 @@ export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Ephemeral packages (installed for this script and removed again at the end)
|
||||
STABLE_EPHEMERAL=" \
|
||||
autoconf \
|
||||
automake \
|
||||
bc \
|
||||
bison \
|
||||
bzip2 \
|
||||
ccache \
|
||||
clang-13 \
|
||||
clang-11 \
|
||||
cmake \
|
||||
flex \
|
||||
g++ \
|
||||
glslang-tools \
|
||||
libasound2-dev \
|
||||
libcap-dev \
|
||||
libclang-cpp13-dev \
|
||||
libclang-cpp11-dev \
|
||||
libelf-dev \
|
||||
libexpat1-dev \
|
||||
libfdt-dev \
|
||||
libgbm-dev \
|
||||
libgles2-mesa-dev \
|
||||
libllvmspirvlib-dev \
|
||||
libpciaccess-dev \
|
||||
libpng-dev \
|
||||
libudev-dev \
|
||||
libvulkan-dev \
|
||||
libwaffle-dev \
|
||||
libwayland-dev \
|
||||
libx11-xcb-dev \
|
||||
libxcb-dri2-0-dev \
|
||||
libxext-dev \
|
||||
libxkbcommon-dev \
|
||||
libxrender-dev \
|
||||
llvm-13-dev \
|
||||
llvm-11-dev \
|
||||
llvm-spirv \
|
||||
make \
|
||||
meson \
|
||||
ocl-icd-opencl-dev \
|
||||
@@ -53,19 +63,51 @@ apt-get install -y --no-remove \
|
||||
libclang-cpp11 \
|
||||
libcap2 \
|
||||
libegl1 \
|
||||
libepoxy0 \
|
||||
libepoxy-dev \
|
||||
libfdt1 \
|
||||
libllvmspirvlib11 \
|
||||
libxcb-shm0 \
|
||||
ocl-icd-libopencl1 \
|
||||
python3-lxml \
|
||||
python3-renderdoc \
|
||||
python3-simplejson \
|
||||
socat \
|
||||
spirv-tools \
|
||||
weston
|
||||
sysvinit-core \
|
||||
wget
|
||||
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
############### Build libdrm
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
############### Build Wayland
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
||||
############### Build Crosvm
|
||||
|
||||
. .gitlab-ci/container/build-rust.sh
|
||||
. .gitlab-ci/container/build-crosvm.sh
|
||||
rm -rf /root/.cargo
|
||||
rm -rf /root/.rustup
|
||||
|
||||
############### Build kernel
|
||||
|
||||
export DEFCONFIG="arch/x86/configs/x86_64_defconfig"
|
||||
export KERNEL_IMAGE_NAME=bzImage
|
||||
export KERNEL_ARCH=x86_64
|
||||
export DEBIAN_ARCH=amd64
|
||||
|
||||
mkdir -p /lava-files/
|
||||
. .gitlab-ci/container/build-kernel.sh
|
||||
|
||||
############### Build libclc
|
||||
|
||||
. .gitlab-ci/container/build-libclc.sh
|
||||
|
||||
############### Build piglit
|
||||
|
||||
PIGLIT_OPTS="-DPIGLIT_BUILD_CL_TESTS=ON -DPIGLIT_BUILD_DMA_BUF_TESTS=ON" . .gitlab-ci/container/build-piglit.sh
|
||||
|
@@ -1,7 +1,6 @@
|
||||
#!/bin/bash
|
||||
# The relative paths in this file only become valid at runtime.
|
||||
# shellcheck disable=SC1091
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
@@ -45,52 +44,102 @@ STABLE_EPHEMERAL=" \
|
||||
python3-setuptools \
|
||||
python3-wheel \
|
||||
software-properties-common \
|
||||
wget \
|
||||
wine64-tools \
|
||||
xz-utils \
|
||||
"
|
||||
|
||||
apt-get install -y --no-remove --no-install-recommends \
|
||||
apt-get install -y --no-remove \
|
||||
$STABLE_EPHEMERAL \
|
||||
libepoxy0 \
|
||||
libxcb-shm0 \
|
||||
pciutils \
|
||||
python3-lxml \
|
||||
python3-simplejson \
|
||||
wget \
|
||||
xinit \
|
||||
xserver-xorg-video-amdgpu \
|
||||
xserver-xorg-video-ati
|
||||
|
||||
# We need multiarch for Wine
|
||||
dpkg --add-architecture i386
|
||||
|
||||
# Install a more recent version of Wine than exists in Debian.
|
||||
apt-key add .gitlab-ci/container/debian/winehq.gpg.key
|
||||
apt-add-repository https://dl.winehq.org/wine-builds/debian/
|
||||
apt-get update -q
|
||||
apt update -qyy
|
||||
|
||||
# Needed for Valve's tracing jobs to collect information about the graphics
|
||||
# hardware on the test devices.
|
||||
pip3 install gfxinfo-mupuf==0.0.9
|
||||
|
||||
# workaround wine needing 32-bit
|
||||
# https://bugs.winehq.org/show_bug.cgi?id=53393
|
||||
apt-get install -y --no-remove wine-stable-amd64 # a requirement for wine-stable
|
||||
WINE_PKG="wine-stable"
|
||||
WINE_PKG_DROP="wine-stable-i386"
|
||||
apt-get download "${WINE_PKG}"
|
||||
dpkg --ignore-depends="${WINE_PKG_DROP}" -i "${WINE_PKG}"*.deb
|
||||
rm "${WINE_PKG}"*.deb
|
||||
sed -i "/${WINE_PKG_DROP}/d" /var/lib/dpkg/status
|
||||
apt-get install -y --no-remove winehq-stable # symlinks-only, depends on wine-stable
|
||||
apt install -y --no-remove --install-recommends winehq-stable
|
||||
|
||||
function setup_wine() {
|
||||
export WINEDEBUG="-all"
|
||||
export WINEPREFIX="$1"
|
||||
|
||||
# We don't want crash dialogs
|
||||
cat >crashdialog.reg <<EOF
|
||||
Windows Registry Editor Version 5.00
|
||||
|
||||
[HKEY_CURRENT_USER\Software\Wine\WineDbg]
|
||||
"ShowCrashDialog"=dword:00000000
|
||||
|
||||
EOF
|
||||
|
||||
# Set the wine prefix and disable the crash dialog
|
||||
wine regedit crashdialog.reg
|
||||
rm crashdialog.reg
|
||||
|
||||
# An immediate wine command may fail with: "${WINEPREFIX}: Not a
|
||||
# valid wine prefix." and that is just spit because of checking
|
||||
# the existance of the system.reg file, which fails. Just giving
|
||||
# it a bit more of time for it to be created solves the problem
|
||||
# ...
|
||||
while ! test -f "${WINEPREFIX}/system.reg"; do sleep 1; done
|
||||
}
|
||||
|
||||
############### Install DXVK
|
||||
|
||||
. .gitlab-ci/container/setup-wine.sh "/dxvk-wine64"
|
||||
. .gitlab-ci/container/install-wine-dxvk.sh
|
||||
dxvk_install_release() {
|
||||
local DXVK_VERSION=${1:-"1.10.1"}
|
||||
|
||||
wget "https://github.com/doitsujin/dxvk/releases/download/v${DXVK_VERSION}/dxvk-${DXVK_VERSION}.tar.gz"
|
||||
tar xzpf dxvk-"${DXVK_VERSION}".tar.gz
|
||||
"dxvk-${DXVK_VERSION}"/setup_dxvk.sh install
|
||||
rm -rf "dxvk-${DXVK_VERSION}"
|
||||
rm dxvk-"${DXVK_VERSION}".tar.gz
|
||||
}
|
||||
|
||||
# Install from a Github PR number
|
||||
dxvk_install_pr() {
|
||||
local __prnum=$1
|
||||
|
||||
# NOTE: Clone all the ensite history of the repo so as not to think
|
||||
# harder about cloning just enough for 'git describe' to work. 'git
|
||||
# describe' is used by the dxvk build system to generate a
|
||||
# dxvk_version Meson variable, which is nice-to-have.
|
||||
git clone https://github.com/doitsujin/dxvk
|
||||
pushd dxvk
|
||||
git fetch origin pull/"$__prnum"/head:pr
|
||||
git checkout pr
|
||||
./package-release.sh pr ../dxvk-build --no-package
|
||||
popd
|
||||
pushd ./dxvk-build/dxvk-pr
|
||||
./setup_dxvk.sh install
|
||||
popd
|
||||
rm -rf ./dxvk-build ./dxvk
|
||||
}
|
||||
|
||||
# Sets up the WINEPREFIX for the DXVK installation commands below.
|
||||
setup_wine "/dxvk-wine64"
|
||||
dxvk_install_release "1.10.1"
|
||||
#dxvk_install_pr 2359
|
||||
|
||||
############### Install apitrace binaries for wine
|
||||
|
||||
. .gitlab-ci/container/install-wine-apitrace.sh
|
||||
# Add the apitrace path to the registry
|
||||
wine64 \
|
||||
wine \
|
||||
reg add "HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment" \
|
||||
/v Path \
|
||||
/t REG_EXPAND_SZ \
|
||||
@@ -101,6 +150,14 @@ wine64 \
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
############### Build libdrm
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
############### Build Wayland
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
||||
############### Build parallel-deqp-runner's hang-detection tool
|
||||
|
||||
. .gitlab-ci/container/build-hang-detection.sh
|
||||
@@ -127,7 +184,7 @@ PIGLIT_BUILD_TARGETS="piglit_replayer" . .gitlab-ci/container/build-piglit.sh
|
||||
|
||||
############### Build VKD3D-Proton
|
||||
|
||||
. .gitlab-ci/container/setup-wine.sh "/vkd3d-proton-wine64"
|
||||
setup_wine "/vkd3d-proton-wine64"
|
||||
|
||||
. .gitlab-ci/container/build-vkd3d-proton.sh
|
||||
|
||||
|
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
@@ -9,12 +8,10 @@ EPHEMERAL="
|
||||
autoconf
|
||||
automake
|
||||
bzip2
|
||||
cmake
|
||||
git
|
||||
libtool
|
||||
pkgconfig(epoxy)
|
||||
pkgconfig(gbm)
|
||||
pkgconfig(openssl)
|
||||
unzip
|
||||
wget
|
||||
xz
|
||||
@@ -31,6 +28,7 @@ dnf install -y --setopt=install_weak_deps=False \
|
||||
glslang \
|
||||
kernel-headers \
|
||||
llvm-devel \
|
||||
clang-devel \
|
||||
meson \
|
||||
"pkgconfig(dri2proto)" \
|
||||
"pkgconfig(expat)" \
|
||||
@@ -64,7 +62,8 @@ dnf install -y --setopt=install_weak_deps=False \
|
||||
python-unversioned-command \
|
||||
python3-devel \
|
||||
python3-mako \
|
||||
python3-ply \
|
||||
python3-devel \
|
||||
python3-mako \
|
||||
vulkan-headers \
|
||||
spirv-tools-devel \
|
||||
spirv-llvm-translator-devel \
|
||||
@@ -84,8 +83,6 @@ tar -xvf $XORGMACROS_VERSION.tar.bz2 && rm $XORGMACROS_VERSION.tar.bz2
|
||||
cd $XORGMACROS_VERSION; ./configure; make install; cd ..
|
||||
rm -rf $XORGMACROS_VERSION
|
||||
|
||||
. .gitlab-ci/container/build-mold.sh
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
@@ -53,7 +53,7 @@
|
||||
variables:
|
||||
FDO_DISTRIBUTION_VERSION: bullseye-slim
|
||||
FDO_REPO_SUFFIX: $CI_JOB_NAME
|
||||
FDO_DISTRIBUTION_EXEC: 'bash .gitlab-ci/container/${CI_JOB_NAME}.sh'
|
||||
FDO_DISTRIBUTION_EXEC: 'env "WINEPATH=${WINEPATH}" FDO_CI_CONCURRENT=${FDO_CI_CONCURRENT} bash .gitlab-ci/container/${CI_JOB_NAME}.sh'
|
||||
# no need to pull the whole repo to build the container image
|
||||
GIT_STRATEGY: none
|
||||
|
||||
@@ -189,7 +189,6 @@ debian/android_build:
|
||||
debian/x86_test-base:
|
||||
extends: debian/x86_build-base
|
||||
variables:
|
||||
KERNEL_URL: &kernel-rootfs-url "https://gitlab.freedesktop.org/gfx-ci/linux/-/archive/v6.0-for-mesa-ci-93bd820c433b/linux-v6.0-for-mesa-ci-93bd820c433b.tar.bz2"
|
||||
MESA_IMAGE_TAG: &debian-x86_test-base ${DEBIAN_BASE_TAG}
|
||||
|
||||
.use-debian/x86_test-base:
|
||||
@@ -206,6 +205,8 @@ debian/x86_test-base:
|
||||
debian/x86_test-gl:
|
||||
extends: .use-debian/x86_test-base
|
||||
variables:
|
||||
FDO_DISTRIBUTION_EXEC: 'env KERNEL_URL=${KERNEL_URL} FDO_CI_CONCURRENT=${FDO_CI_CONCURRENT} bash .gitlab-ci/container/${CI_JOB_NAME}.sh'
|
||||
KERNEL_URL: &kernel-rootfs-url "https://gitlab.freedesktop.org/gfx-ci/linux/-/archive/v5.17-for-mesa-ci-b78f7870d97b/linux-v5.17-for-mesa-ci-b78f7870d97b.tar.bz2"
|
||||
MESA_IMAGE_TAG: &debian-x86_test-gl ${DEBIAN_X86_TEST_GL_TAG}
|
||||
|
||||
.use-debian/x86_test-gl:
|
||||
@@ -213,7 +214,7 @@ debian/x86_test-gl:
|
||||
- .set-image-base-tag
|
||||
variables:
|
||||
MESA_BASE_TAG: *debian-x86_test-base
|
||||
MESA_IMAGE_PATH: ${DEBIAN_X86_TEST_IMAGE_GL_PATH}
|
||||
MESA_IMAGE_PATH: ${DEBIAN_X86_TEST_IMAGE_PATH}
|
||||
MESA_IMAGE_TAG: *debian-x86_test-gl
|
||||
needs:
|
||||
- debian/x86_test-gl
|
||||
@@ -229,7 +230,7 @@ debian/x86_test-vk:
|
||||
- .set-image-base-tag
|
||||
variables:
|
||||
MESA_BASE_TAG: *debian-x86_test-base
|
||||
MESA_IMAGE_PATH: ${DEBIAN_X86_TEST_IMAGE_VK_PATH}
|
||||
MESA_IMAGE_PATH: "debian/x86_test-vk"
|
||||
MESA_IMAGE_TAG: *debian-x86_test-vk
|
||||
needs:
|
||||
- debian/x86_test-vk
|
||||
@@ -255,24 +256,6 @@ debian/arm_build:
|
||||
- debian/arm_build
|
||||
|
||||
|
||||
# Alpine based x86 build image
|
||||
alpine/x86_build:
|
||||
extends:
|
||||
- .fdo.container-build@alpine
|
||||
- .container
|
||||
variables:
|
||||
FDO_DISTRIBUTION_VERSION: "3.16"
|
||||
MESA_IMAGE_TAG: &alpine-x86_build ${ALPINE_X86_BUILD_TAG}
|
||||
|
||||
.use-alpine/x86_build:
|
||||
extends:
|
||||
- .set-image
|
||||
variables:
|
||||
MESA_IMAGE_PATH: "alpine/x86_build"
|
||||
MESA_IMAGE_TAG: *alpine-x86_build
|
||||
needs:
|
||||
- alpine/x86_build
|
||||
|
||||
# Fedora 34 based x86 build image
|
||||
fedora/x86_build:
|
||||
extends:
|
||||
@@ -340,56 +323,29 @@ kernel+rootfs_armhf:
|
||||
MESA_ROOTFS_TAG: *kernel-rootfs
|
||||
|
||||
# x86 image with ARM64 & armhf kernel & rootfs for baremetal testing
|
||||
.debian/arm_test:
|
||||
debian/arm_test:
|
||||
extends:
|
||||
- .fdo.container-build@debian
|
||||
- .container
|
||||
# Don't want the .container rules
|
||||
- .build-rules
|
||||
variables:
|
||||
FDO_DISTRIBUTION_TAG: "${MESA_IMAGE_TAG}--${MESA_ROOTFS_TAG}--${MESA_TEMPLATES_COMMIT}"
|
||||
ARTIFACTS_PREFIX: "https://${MINIO_HOST}/mesa-lava"
|
||||
ARTIFACTS_SUFFIX: "${MESA_ROOTFS_TAG}--${MESA_ARTIFACTS_TAG}--${MESA_TEMPLATES_COMMIT}"
|
||||
MESA_ARTIFACTS_TAG: *debian-arm_build
|
||||
MESA_ROOTFS_TAG: *kernel-rootfs
|
||||
|
||||
debian/armhf_test:
|
||||
extends:
|
||||
- .debian/arm_test
|
||||
needs:
|
||||
- kernel+rootfs_armhf
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &debian-armhf_test ${DEBIAN_BASE_TAG}
|
||||
|
||||
debian/arm64_test:
|
||||
extends:
|
||||
- .debian/arm_test
|
||||
needs:
|
||||
- kernel+rootfs_arm64
|
||||
- kernel+rootfs_armhf
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &debian-arm64_test ${DEBIAN_BASE_TAG}
|
||||
|
||||
.use-debian/arm_test:
|
||||
variables:
|
||||
FDO_DISTRIBUTION_EXEC: 'env ARTIFACTS_PREFIX=https://${MINIO_HOST}/mesa-lava ARTIFACTS_SUFFIX=${MESA_ROOTFS_TAG}--${MESA_ARM_BUILD_TAG}--${MESA_TEMPLATES_COMMIT} CI_PROJECT_PATH=${CI_PROJECT_PATH} FDO_CI_CONCURRENT=${FDO_CI_CONCURRENT} FDO_UPSTREAM_REPO=${FDO_UPSTREAM_REPO} bash .gitlab-ci/container/${CI_JOB_NAME}.sh'
|
||||
FDO_DISTRIBUTION_TAG: "${MESA_IMAGE_TAG}--${MESA_ROOTFS_TAG}--${MESA_ARM_BUILD_TAG}--${MESA_TEMPLATES_COMMIT}"
|
||||
MESA_ARM_BUILD_TAG: *debian-arm_build
|
||||
MESA_IMAGE_TAG: &debian-arm_test ${DEBIAN_BASE_TAG}
|
||||
MESA_ROOTFS_TAG: *kernel-rootfs
|
||||
|
||||
.use-debian/armhf_test:
|
||||
image: "$CI_REGISTRY_IMAGE/${MESA_IMAGE_PATH}:${MESA_IMAGE_TAG}--${MESA_ROOTFS_TAG}--${MESA_TEMPLATES_COMMIT}"
|
||||
extends:
|
||||
- .use-debian/arm_test
|
||||
.use-debian/arm_test:
|
||||
image: "$CI_REGISTRY_IMAGE/${MESA_IMAGE_PATH}:${MESA_IMAGE_TAG}--${MESA_ROOTFS_TAG}--${MESA_ARM_BUILD_TAG}--${MESA_TEMPLATES_COMMIT}"
|
||||
variables:
|
||||
MESA_IMAGE_PATH: "debian/armhf_test"
|
||||
MESA_IMAGE_TAG: *debian-armhf_test
|
||||
needs:
|
||||
- debian/arm_test
|
||||
|
||||
.use-debian/arm64_test:
|
||||
image: "$CI_REGISTRY_IMAGE/${MESA_IMAGE_PATH}:${MESA_IMAGE_TAG}--${MESA_ROOTFS_TAG}--${MESA_TEMPLATES_COMMIT}"
|
||||
extends:
|
||||
- .use-debian/arm_test
|
||||
variables:
|
||||
MESA_IMAGE_PATH: "debian/arm64_test"
|
||||
MESA_IMAGE_TAG: *debian-arm64_test
|
||||
MESA_ARM_BUILD_TAG: *debian-arm_build
|
||||
MESA_IMAGE_PATH: "debian/arm_test"
|
||||
MESA_IMAGE_TAG: *debian-arm_test
|
||||
MESA_ROOTFS_TAG: *kernel-rootfs
|
||||
needs:
|
||||
- debian/arm_test
|
||||
|
||||
@@ -453,7 +409,7 @@ windows_build_vs2019:
|
||||
- !reference [.build-rules, rules]
|
||||
variables:
|
||||
MESA_IMAGE_PATH: &windows_build_image_path ${WINDOWS_X64_BUILD_PATH}
|
||||
MESA_IMAGE_TAG: &windows_build_image_tag ${MESA_BASE_IMAGE_TAG}--${WINDOWS_X64_BUILD_TAG}
|
||||
MESA_IMAGE_TAG: &windows_build_image_tag ${WINDOWS_X64_BUILD_TAG}
|
||||
DOCKERFILE: Dockerfile_build
|
||||
MESA_BASE_IMAGE_PATH: *windows_vs_image_path
|
||||
MESA_BASE_IMAGE_TAG: *windows_vs_image_tag
|
||||
@@ -473,7 +429,7 @@ windows_test_vs2019:
|
||||
- !reference [.build-rules, rules]
|
||||
variables:
|
||||
MESA_IMAGE_PATH: &windows_test_image_path ${WINDOWS_X64_TEST_PATH}
|
||||
MESA_IMAGE_TAG: &windows_test_image_tag ${MESA_BASE_IMAGE_TAG}--${WINDOWS_X64_TEST_TAG}
|
||||
MESA_IMAGE_TAG: &windows_test_image_tag ${WINDOWS_X64_BUILD_TAG}--${WINDOWS_X64_TEST_TAG}
|
||||
DOCKERFILE: Dockerfile_test
|
||||
MESA_BASE_IMAGE_PATH: *windows_vs_image_path
|
||||
MESA_BASE_IMAGE_TAG: *windows_vs_image_tag
|
||||
@@ -489,7 +445,6 @@ windows_test_vs2019:
|
||||
variables:
|
||||
MESA_IMAGE_PATH: *windows_build_image_path
|
||||
MESA_IMAGE_TAG: *windows_build_image_tag
|
||||
MESA_BASE_IMAGE_TAG: *windows_vs_image_tag
|
||||
needs:
|
||||
- windows_build_vs2019
|
||||
|
||||
@@ -501,4 +456,3 @@ windows_test_vs2019:
|
||||
variables:
|
||||
MESA_IMAGE_PATH: *windows_test_image_path
|
||||
MESA_IMAGE_TAG: *windows_test_image_tag
|
||||
MESA_BASE_IMAGE_TAG: *windows_vs_image_tag
|
||||
|
@@ -1,39 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
dxvk_install_release() {
|
||||
local DXVK_VERSION=${1:-"1.10.3"}
|
||||
|
||||
wget "https://github.com/doitsujin/dxvk/releases/download/v${DXVK_VERSION}/dxvk-${DXVK_VERSION}.tar.gz"
|
||||
tar xzpf dxvk-"${DXVK_VERSION}".tar.gz
|
||||
# https://github.com/doitsujin/dxvk/issues/2921
|
||||
sed -i 's/wine="wine"/wine="wine32"/' "dxvk-${DXVK_VERSION}"/setup_dxvk.sh
|
||||
"dxvk-${DXVK_VERSION}"/setup_dxvk.sh install
|
||||
rm -rf "dxvk-${DXVK_VERSION}"
|
||||
rm dxvk-"${DXVK_VERSION}".tar.gz
|
||||
}
|
||||
|
||||
# Install from a Github PR number
|
||||
dxvk_install_pr() {
|
||||
local __prnum=$1
|
||||
|
||||
# NOTE: Clone all the ensite history of the repo so as not to think
|
||||
# harder about cloning just enough for 'git describe' to work. 'git
|
||||
# describe' is used by the dxvk build system to generate a
|
||||
# dxvk_version Meson variable, which is nice-to-have.
|
||||
git clone https://github.com/doitsujin/dxvk
|
||||
pushd dxvk
|
||||
git fetch origin pull/"$__prnum"/head:pr
|
||||
git checkout pr
|
||||
./package-release.sh pr ../dxvk-build --no-package
|
||||
popd
|
||||
pushd ./dxvk-build/dxvk-pr
|
||||
./setup_dxvk.sh install
|
||||
popd
|
||||
rm -rf ./dxvk-build ./dxvk
|
||||
}
|
||||
|
||||
dxvk_install_release "1.10.1"
|
||||
#dxvk_install_pr 2359
|
||||
|
@@ -1,7 +1,4 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC1091 # The relative paths in this file only become valid at runtime.
|
||||
# shellcheck disable=SC2034 # Variables are used in scripts called from here
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
@@ -40,8 +37,6 @@ if [[ "$DEBIAN_ARCH" = "arm64" ]]; then
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/mediatek/mt8183-kukui-jacuzzi-juniper-sku16.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/nvidia/tegra210-p3450-0000.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/qcom/sc7180-trogdor-lazor-limozeen-nots-r5.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/qcom/sc7180-trogdor-kingoftown-r1.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/freescale/imx8mq-nitrogen.dtb"
|
||||
KERNEL_IMAGE_NAME="Image"
|
||||
|
||||
elif [[ "$DEBIAN_ARCH" = "armhf" ]]; then
|
||||
@@ -52,7 +47,6 @@ elif [[ "$DEBIAN_ARCH" = "armhf" ]]; then
|
||||
DEVICE_TREES="arch/arm/boot/dts/rk3288-veyron-jaq.dtb"
|
||||
DEVICE_TREES+=" arch/arm/boot/dts/sun8i-h3-libretech-all-h3-cc.dtb"
|
||||
DEVICE_TREES+=" arch/arm/boot/dts/imx6q-cubox-i.dtb"
|
||||
DEVICE_TREES+=" arch/arm/boot/dts/tegra124-jetson-tk1.dtb"
|
||||
KERNEL_IMAGE_NAME="zImage"
|
||||
. .gitlab-ci/container/create-cross-file.sh armhf
|
||||
else
|
||||
@@ -62,7 +56,7 @@ else
|
||||
DEFCONFIG="arch/x86/configs/x86_64_defconfig"
|
||||
DEVICE_TREES=""
|
||||
KERNEL_IMAGE_NAME="bzImage"
|
||||
ARCH_PACKAGES="libasound2-dev libcap-dev libfdt-dev libva-dev wayland-protocols p7zip"
|
||||
ARCH_PACKAGES="libasound2-dev libcap-dev libfdt-dev libva-dev wayland-protocols"
|
||||
fi
|
||||
|
||||
# Determine if we're in a cross build.
|
||||
@@ -110,18 +104,15 @@ apt-get install -y --no-remove \
|
||||
libx11-xcb-dev \
|
||||
libxcb-dri2-0-dev \
|
||||
libxkbcommon-dev \
|
||||
libwayland-dev \
|
||||
ninja-build \
|
||||
patch \
|
||||
protobuf-compiler \
|
||||
python-is-python3 \
|
||||
python3-distutils \
|
||||
python3-mako \
|
||||
python3-numpy \
|
||||
python3-serial \
|
||||
unzip \
|
||||
wget \
|
||||
zstd
|
||||
wget
|
||||
|
||||
|
||||
if [[ "$DEBIAN_ARCH" = "armhf" ]]; then
|
||||
@@ -139,20 +130,6 @@ if [[ "$DEBIAN_ARCH" = "armhf" ]]; then
|
||||
libxkbcommon-dev:armhf
|
||||
fi
|
||||
|
||||
mkdir -p "/lava-files/rootfs-${DEBIAN_ARCH}"
|
||||
|
||||
############### Setuping
|
||||
if [ "$DEBIAN_ARCH" = "amd64" ]; then
|
||||
. .gitlab-ci/container/setup-wine.sh "/dxvk-wine64"
|
||||
. .gitlab-ci/container/install-wine-dxvk.sh
|
||||
mv /dxvk-wine64 "/lava-files/rootfs-${DEBIAN_ARCH}/"
|
||||
fi
|
||||
|
||||
############### Installing
|
||||
. .gitlab-ci/container/install-wine-apitrace.sh
|
||||
mkdir -p "/lava-files/rootfs-${DEBIAN_ARCH}/apitrace-msvc-win64"
|
||||
mv /apitrace-msvc-win64/bin "/lava-files/rootfs-${DEBIAN_ARCH}/apitrace-msvc-win64"
|
||||
rm -rf /apitrace-msvc-win64
|
||||
|
||||
############### Building
|
||||
STRIP_CMD="${GCC_ARCH}-strip"
|
||||
@@ -200,8 +177,6 @@ if [[ ${DEBIAN_ARCH} = "amd64" ]]; then
|
||||
. .gitlab-ci/container/build-crosvm.sh
|
||||
mv /usr/local/bin/crosvm /lava-files/rootfs-${DEBIAN_ARCH}/usr/bin/
|
||||
mv /usr/local/lib/$GCC_ARCH/libvirglrenderer.* /lava-files/rootfs-${DEBIAN_ARCH}/usr/lib/$GCC_ARCH/
|
||||
mkdir -p /lava-files/rootfs-${DEBIAN_ARCH}/usr/local/libexec/
|
||||
mv /usr/local/libexec/virgl* /lava-files/rootfs-${DEBIAN_ARCH}/usr/local/libexec/
|
||||
fi
|
||||
|
||||
############### Build libdrm
|
||||
@@ -240,19 +215,16 @@ set -e
|
||||
|
||||
cp .gitlab-ci/container/create-rootfs.sh /lava-files/rootfs-${DEBIAN_ARCH}/.
|
||||
cp .gitlab-ci/container/debian/llvm-snapshot.gpg.key /lava-files/rootfs-${DEBIAN_ARCH}/.
|
||||
cp .gitlab-ci/container/debian/winehq.gpg.key /lava-files/rootfs-${DEBIAN_ARCH}/.
|
||||
chroot /lava-files/rootfs-${DEBIAN_ARCH} sh /create-rootfs.sh
|
||||
rm /lava-files/rootfs-${DEBIAN_ARCH}/{llvm-snapshot,winehq}.gpg.key
|
||||
rm /lava-files/rootfs-${DEBIAN_ARCH}/llvm-snapshot.gpg.key
|
||||
rm /lava-files/rootfs-${DEBIAN_ARCH}/create-rootfs.sh
|
||||
cp /etc/wgetrc /lava-files/rootfs-${DEBIAN_ARCH}/etc/.
|
||||
|
||||
|
||||
############### Install the built libdrm
|
||||
# Dependencies pulled during the creation of the rootfs may overwrite
|
||||
# the built libdrm. Hence, we add it after the rootfs has been already
|
||||
# created.
|
||||
find /libdrm/ -name lib\*\.so\* \
|
||||
-exec cp -t /lava-files/rootfs-${DEBIAN_ARCH}/usr/lib/$GCC_ARCH/. {} \;
|
||||
find /libdrm/ -name lib\*\.so\* | xargs cp -t /lava-files/rootfs-${DEBIAN_ARCH}/usr/lib/$GCC_ARCH/.
|
||||
mkdir -p /lava-files/rootfs-${DEBIAN_ARCH}/libdrm/
|
||||
cp -Rp /libdrm/share /lava-files/rootfs-${DEBIAN_ARCH}/libdrm/share
|
||||
rm -rf /libdrm
|
||||
@@ -266,13 +238,14 @@ fi
|
||||
|
||||
du -ah /lava-files/rootfs-${DEBIAN_ARCH} | sort -h | tail -100
|
||||
pushd /lava-files/rootfs-${DEBIAN_ARCH}
|
||||
tar --zstd -cf /lava-files/lava-rootfs.tar.zst .
|
||||
tar czf /lava-files/lava-rootfs.tgz .
|
||||
popd
|
||||
|
||||
. .gitlab-ci/container/container_post_build.sh
|
||||
|
||||
############### Upload the files!
|
||||
FILES_TO_UPLOAD="lava-rootfs.tar.zst \
|
||||
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
|
||||
FILES_TO_UPLOAD="lava-rootfs.tgz \
|
||||
$KERNEL_IMAGE_NAME"
|
||||
|
||||
if [[ -n $DEVICE_TREES ]]; then
|
||||
@@ -280,9 +253,9 @@ if [[ -n $DEVICE_TREES ]]; then
|
||||
fi
|
||||
|
||||
for f in $FILES_TO_UPLOAD; do
|
||||
ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" /lava-files/$f \
|
||||
https://${MINIO_PATH}/$f
|
||||
ci-fairy minio cp /lava-files/$f \
|
||||
minio://${MINIO_PATH}/$f
|
||||
done
|
||||
|
||||
touch /lava-files/done
|
||||
ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" /lava-files/done https://${MINIO_PATH}/done
|
||||
ci-fairy minio cp /lava-files/done minio://${MINIO_PATH}/done
|
||||
|
@@ -1,24 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
export WINEPREFIX="$1"
|
||||
export WINEDEBUG="-all"
|
||||
|
||||
# We don't want crash dialogs
|
||||
cat >crashdialog.reg <<EOF
|
||||
Windows Registry Editor Version 5.00
|
||||
|
||||
[HKEY_CURRENT_USER\Software\Wine\WineDbg]
|
||||
"ShowCrashDialog"=dword:00000000
|
||||
|
||||
EOF
|
||||
|
||||
# Set the wine prefix and disable the crash dialog
|
||||
wine64 regedit crashdialog.reg
|
||||
rm crashdialog.reg
|
||||
|
||||
# An immediate wine command may fail with: "${WINEPREFIX}: Not a
|
||||
# valid wine prefix." and that is just spit because of checking
|
||||
# the existance of the system.reg file, which fails. Just giving
|
||||
# it a bit more of time for it to be created solves the problem
|
||||
# ...
|
||||
while ! test -f "${WINEPREFIX}/system.reg"; do sleep 1; done
|
@@ -1,12 +1,6 @@
|
||||
CONFIG_LOCALVERSION_AUTO=y
|
||||
CONFIG_DEBUG_KERNEL=y
|
||||
|
||||
CONFIG_CRYPTO_ZSTD=y
|
||||
CONFIG_ZRAM_MEMORY_TRACKING=y
|
||||
CONFIG_ZRAM_WRITEBACK=y
|
||||
CONFIG_ZRAM=y
|
||||
CONFIG_ZSMALLOC_STAT=y
|
||||
|
||||
CONFIG_PWM=y
|
||||
CONFIG_PM_DEVFREQ=y
|
||||
CONFIG_OF=y
|
||||
|
@@ -54,10 +54,9 @@ VM_SOCKET=crosvm-${THREAD}.sock
|
||||
# was terminated due to timeouts. This "vm stop" may fail if the crosvm died
|
||||
# without cleaning itself up.
|
||||
if [ -e $VM_SOCKET ]; then
|
||||
crosvm stop $VM_SOCKET || true
|
||||
crosvm stop $VM_SOCKET || rm -rf $VM_SOCKET
|
||||
# Wait for socats from that invocation to drain
|
||||
sleep 5
|
||||
rm -rf $VM_SOCKET || true
|
||||
fi
|
||||
|
||||
set_vsock_context || { echo "Could not generate crosvm vsock CID" >&2; exit 1; }
|
||||
@@ -94,12 +93,10 @@ set +e -x
|
||||
NIR_DEBUG="novalidate" \
|
||||
LIBGL_ALWAYS_SOFTWARE=${CROSVM_LIBGL_ALWAYS_SOFTWARE} \
|
||||
GALLIUM_DRIVER=${CROSVM_GALLIUM_DRIVER} \
|
||||
VK_ICD_FILENAMES=$CI_PROJECT_DIR/install/share/vulkan/icd.d/${CROSVM_VK_DRIVER}_icd.x86_64.json \
|
||||
crosvm --no-syslog run \
|
||||
--gpu "${CROSVM_GPU_ARGS}" --gpu-render-server "path=/usr/local/libexec/virgl_render_server" \
|
||||
-m "${CROSVM_MEMORY:-4096}" -c 2 --disable-sandbox \
|
||||
crosvm run \
|
||||
--gpu "${CROSVM_GPU_ARGS}" -m 4096 -c 2 --disable-sandbox \
|
||||
--shared-dir /:my_root:type=fs:writeback=true:timeout=60:cache=always \
|
||||
--host-ip "192.168.30.1" --netmask "255.255.255.0" --mac "AA:BB:CC:00:00:12" \
|
||||
--host_ip "192.168.30.1" --netmask "255.255.255.0" --mac "AA:BB:CC:00:00:12" \
|
||||
-s $VM_SOCKET \
|
||||
--cid ${VSOCK_CID} -p "${CROSVM_KERN_ARGS}" \
|
||||
/lava-files/${KERNEL_IMAGE_NAME:-bzImage} > ${VM_TEMP_DIR}/crosvm 2>&1
|
||||
|
@@ -18,7 +18,6 @@ INSTALL=`pwd`/install
|
||||
export LD_LIBRARY_PATH=`pwd`/install/lib/
|
||||
export EGL_PLATFORM=surfaceless
|
||||
export VK_ICD_FILENAMES=`pwd`/install/share/vulkan/icd.d/"$VK_DRIVER"_icd.${VK_CPU:-`uname -m`}.json
|
||||
export OCL_ICD_VENDORS=`pwd`/install/etc/OpenCL/vendors/
|
||||
|
||||
RESULTS=`pwd`/${DEQP_RESULTS_DIR:-results}
|
||||
mkdir -p $RESULTS
|
||||
@@ -86,6 +85,14 @@ if [ -z "$DEQP_SUITE" ]; then
|
||||
sed -ni $CI_NODE_INDEX~$CI_NODE_TOTAL"p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ -n "$DEQP_CASELIST_FILTER" ]; then
|
||||
sed -ni "/$DEQP_CASELIST_FILTER/p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ -n "$DEQP_CASELIST_INV_FILTER" ]; then
|
||||
sed -ni "/$DEQP_CASELIST_INV_FILTER/!p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ ! -s /tmp/case-list.txt ]; then
|
||||
echo "Caselist generation failed"
|
||||
exit 1
|
||||
|
@@ -1,31 +1,27 @@
|
||||
variables:
|
||||
DEBIAN_X86_BUILD_BASE_IMAGE: "debian/x86_build-base"
|
||||
DEBIAN_BASE_TAG: "2023-01-10-robust-wget"
|
||||
DEBIAN_BASE_TAG: "2022-07-01-bb-llvm13"
|
||||
|
||||
DEBIAN_X86_BUILD_IMAGE_PATH: "debian/x86_build"
|
||||
DEBIAN_BUILD_TAG: "2023-01-09-lavacli"
|
||||
DEBIAN_BUILD_TAG: "2022-07-14-directx-headers"
|
||||
|
||||
DEBIAN_X86_BUILD_MINGW_IMAGE_PATH: "debian/x86_build-mingw"
|
||||
DEBIAN_BUILD_MINGW_TAG: "2023-01-03-ci-libva-2.17"
|
||||
DEBIAN_BUILD_MINGW_TAG: "2022-07-14-directx-headers"
|
||||
|
||||
DEBIAN_X86_TEST_BASE_IMAGE: "debian/x86_test-base"
|
||||
|
||||
DEBIAN_X86_TEST_IMAGE_GL_PATH: "debian/x86_test-gl"
|
||||
DEBIAN_X86_TEST_IMAGE_VK_PATH: "debian/x86_test-vk"
|
||||
DEBIAN_X86_TEST_IMAGE_PATH: ${DEBIAN_X86_TEST_IMAGE_GL_PATH}
|
||||
DEBIAN_X86_TEST_IMAGE_PATH: "debian/x86_test-gl"
|
||||
DEBIAN_X86_TEST_GL_TAG: "2022-07-06-virgl-update"
|
||||
DEBIAN_X86_TEST_VK_TAG: "2022-07-18-apitrace-11-1"
|
||||
|
||||
DEBIAN_X86_TEST_GL_TAG: "2023-01-08-weston-xwayland"
|
||||
DEBIAN_X86_TEST_VK_TAG: "2022-12-12-vkd3d-proton-uprev"
|
||||
|
||||
ALPINE_X86_BUILD_TAG: "2023-01-10-robust-wget"
|
||||
FEDORA_X86_BUILD_TAG: "2023-01-10-robust-wget"
|
||||
KERNEL_ROOTFS_TAG: "2023-01-10-virglrenderer"
|
||||
FEDORA_X86_BUILD_TAG: "2022-04-24-spirv-tools-5"
|
||||
KERNEL_ROOTFS_TAG: "2022-07-06-virgl-update"
|
||||
|
||||
WINDOWS_X64_VS_PATH: "windows/x64_vs"
|
||||
WINDOWS_X64_VS_TAG: "2022-10-20-upgrade-zlib"
|
||||
WINDOWS_X64_VS_TAG: "2022-06-15-vs-winsdk"
|
||||
|
||||
WINDOWS_X64_BUILD_PATH: "windows/x64_build"
|
||||
WINDOWS_X64_BUILD_TAG: "2023-01-03-ci-libva-2.17"
|
||||
WINDOWS_X64_BUILD_TAG: "2022-06-15-vs-winsdk"
|
||||
|
||||
WINDOWS_X64_TEST_PATH: "windows/x64_test"
|
||||
WINDOWS_X64_TEST_TAG: "2023-01-03-piglit-waffle"
|
||||
WINDOWS_X64_TEST_TAG: "2022-06-15-vs-winsdk"
|
||||
|
@@ -12,9 +12,9 @@
|
||||
BASE_SYSTEM_MAINLINE_HOST_PATH: "${BASE_SYSTEM_HOST_PREFIX}/${FDO_UPSTREAM_REPO}/${DISTRIBUTION_TAG}/${ARCH}"
|
||||
BASE_SYSTEM_FORK_HOST_PATH: "${BASE_SYSTEM_HOST_PREFIX}/${CI_PROJECT_PATH}/${DISTRIBUTION_TAG}/${ARCH}"
|
||||
# per-job build artifacts
|
||||
BUILD_PATH: "${PIPELINE_ARTIFACTS_BASE}/${CI_PROJECT_NAME}-${ARCH}.tar.zst"
|
||||
BUILD_PATH: "${PIPELINE_ARTIFACTS_BASE}/${CI_PROJECT_NAME}-${ARCH}.tar.gz"
|
||||
JOB_ROOTFS_OVERLAY_PATH: "${JOB_ARTIFACTS_BASE}/job-rootfs-overlay.tar.gz"
|
||||
JOB_RESULTS_PATH: "${JOB_ARTIFACTS_BASE}/results.tar.zst"
|
||||
JOB_RESULTS_PATH: "${JOB_ARTIFACTS_BASE}/results.tar.gz"
|
||||
MINIO_RESULTS_UPLOAD: "${JOB_ARTIFACTS_BASE}"
|
||||
PIGLIT_NO_WINDOW: 1
|
||||
VISIBILITY_GROUP: "Collabora+fdo"
|
||||
@@ -27,12 +27,10 @@
|
||||
- results/
|
||||
exclude:
|
||||
- results/*.shader_cache
|
||||
reports:
|
||||
junit: results/junit.xml
|
||||
tags:
|
||||
- $RUNNER_TAG
|
||||
after_script:
|
||||
- wget -q "https://${JOB_RESULTS_PATH}" -O- | tar --zstd -x
|
||||
- wget -q "https://${JOB_RESULTS_PATH}" -O- | tar -xz
|
||||
|
||||
.lava-test:armhf:
|
||||
variables:
|
||||
|
@@ -21,12 +21,10 @@ cp artifacts/ci-common/intel-gpu-freq.sh results/job-rootfs-overlay/
|
||||
# Prepare env vars for upload.
|
||||
KERNEL_IMAGE_BASE_URL="https://${BASE_SYSTEM_HOST_PATH}" \
|
||||
artifacts/ci-common/generate-env.sh > results/job-rootfs-overlay/set-job-env-vars.sh
|
||||
echo -e "\e[0Ksection_start:$(date +%s):variables[collapsed=true]\r\e[0KVariables passed through:"
|
||||
cat results/job-rootfs-overlay/set-job-env-vars.sh
|
||||
echo -e "\e[0Ksection_end:$(date +%s):variables\r\e[0K"
|
||||
|
||||
tar zcf job-rootfs-overlay.tar.gz -C results/job-rootfs-overlay/ .
|
||||
ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" job-rootfs-overlay.tar.gz "https://${JOB_ROOTFS_OVERLAY_PATH}"
|
||||
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
|
||||
ci-fairy minio cp job-rootfs-overlay.tar.gz "minio://${JOB_ROOTFS_OVERLAY_PATH}"
|
||||
|
||||
touch results/lava.log
|
||||
tail -f results/lava.log &
|
||||
|
@@ -20,11 +20,11 @@ import traceback
|
||||
import urllib.parse
|
||||
import xmlrpc.client
|
||||
from datetime import datetime, timedelta
|
||||
from io import StringIO
|
||||
from os import getenv
|
||||
from typing import Any, Optional
|
||||
|
||||
import lavacli
|
||||
import yaml
|
||||
from lava.exceptions import (
|
||||
MesaCIException,
|
||||
MesaCIKnownIssueException,
|
||||
@@ -32,9 +32,8 @@ from lava.exceptions import (
|
||||
MesaCIRetryError,
|
||||
MesaCITimeoutError,
|
||||
)
|
||||
from lava.utils import CONSOLE_LOG
|
||||
from lava.utils import DEFAULT_GITLAB_SECTION_TIMEOUTS as GL_SECTION_TIMEOUTS
|
||||
from lava.utils import (
|
||||
CONSOLE_LOG,
|
||||
GitlabSection,
|
||||
LogFollower,
|
||||
LogSectionType,
|
||||
@@ -42,7 +41,7 @@ from lava.utils import (
|
||||
hide_sensitive_data,
|
||||
print_log,
|
||||
)
|
||||
from lavacli.utils import flow_yaml as lava_yaml
|
||||
from lavacli.utils import loader
|
||||
|
||||
# Timeout in seconds to decide if the device from the dispatched LAVA job has
|
||||
# hung or not due to the lack of new log output.
|
||||
@@ -62,7 +61,7 @@ NUMBER_OF_RETRIES_TIMEOUT_DETECTION = int(getenv("LAVA_NUMBER_OF_RETRIES_TIMEOUT
|
||||
NUMBER_OF_ATTEMPTS_LAVA_BOOT = int(getenv("LAVA_NUMBER_OF_ATTEMPTS_LAVA_BOOT", 3))
|
||||
|
||||
|
||||
def generate_lava_yaml_payload(args) -> dict[str, Any]:
|
||||
def generate_lava_yaml(args):
|
||||
# General metadata and permissions, plus also inexplicably kernel arguments
|
||||
values = {
|
||||
'job_name': 'mesa: {}'.format(args.pipeline_info),
|
||||
@@ -74,20 +73,11 @@ def generate_lava_yaml_payload(args) -> dict[str, Any]:
|
||||
},
|
||||
"timeouts": {
|
||||
"job": {"minutes": args.job_timeout},
|
||||
"action": {"minutes": 3},
|
||||
"actions": {
|
||||
"depthcharge-retry": {
|
||||
# Could take between 1 and 1.5 min in slower boots
|
||||
"minutes": 2
|
||||
},
|
||||
"depthcharge-start": {
|
||||
# Should take less than 1 min.
|
||||
"minutes": 1,
|
||||
},
|
||||
"depthcharge-action": {
|
||||
# This timeout englobes the entire depthcharge timing,
|
||||
# including retries
|
||||
"minutes": 2 * NUMBER_OF_ATTEMPTS_LAVA_BOOT,
|
||||
},
|
||||
"minutes": 3 * NUMBER_OF_ATTEMPTS_LAVA_BOOT,
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -105,8 +95,8 @@ def generate_lava_yaml_payload(args) -> dict[str, Any]:
|
||||
'url': '{}/{}'.format(args.kernel_url_prefix, args.kernel_image_name),
|
||||
},
|
||||
'nfsrootfs': {
|
||||
'url': '{}/lava-rootfs.tar.zst'.format(args.rootfs_url_prefix),
|
||||
'compression': 'zstd',
|
||||
'url': '{}/lava-rootfs.tgz'.format(args.rootfs_url_prefix),
|
||||
'compression': 'gz',
|
||||
}
|
||||
}
|
||||
if args.kernel_image_type:
|
||||
@@ -156,13 +146,8 @@ def generate_lava_yaml_payload(args) -> dict[str, Any]:
|
||||
# - fetch and unpack per-job environment from lava-submit.sh
|
||||
# - exec .gitlab-ci/common/init-stage2.sh
|
||||
|
||||
with open(args.first_stage_init, "r") as init_sh:
|
||||
run_steps += [
|
||||
x.rstrip() for x in init_sh if not x.startswith("#") and x.rstrip()
|
||||
]
|
||||
run_steps.append(
|
||||
f"wget -S --progress=dot:giga -O- {args.job_rootfs_overlay_url} | tar -xz -C /",
|
||||
)
|
||||
with open(args.first_stage_init, 'r') as init_sh:
|
||||
run_steps += [ x.rstrip() for x in init_sh if not x.startswith('#') and x.rstrip() ]
|
||||
|
||||
if args.jwt_file:
|
||||
with open(args.jwt_file) as jwt_file:
|
||||
@@ -180,7 +165,8 @@ def generate_lava_yaml_payload(args) -> dict[str, Any]:
|
||||
|
||||
run_steps += [
|
||||
'mkdir -p {}'.format(args.ci_project_dir),
|
||||
'wget -S --progress=dot:giga -O- {} | tar --zstd -x -C {}'.format(args.build_url, args.ci_project_dir),
|
||||
'wget -S --progress=dot:giga -O- {} | tar -xz -C {}'.format(args.build_url, args.ci_project_dir),
|
||||
'wget -S --progress=dot:giga -O- {} | tar -xz -C /'.format(args.job_rootfs_overlay_url),
|
||||
|
||||
# Sleep a bit to give time for bash to dump shell xtrace messages into
|
||||
# console which may cause interleaving with LAVA_SIGNAL_STARTTC in some
|
||||
@@ -198,7 +184,7 @@ def generate_lava_yaml_payload(args) -> dict[str, Any]:
|
||||
{ 'test': test },
|
||||
]
|
||||
|
||||
return values
|
||||
return yaml.dump(values, width=10000000)
|
||||
|
||||
|
||||
def setup_lava_proxy():
|
||||
@@ -285,12 +271,8 @@ class LAVAJob:
|
||||
|
||||
def _load_log_from_data(self, data) -> list[str]:
|
||||
lines = []
|
||||
if isinstance(data, xmlrpc.client.Binary):
|
||||
# We are dealing with xmlrpc.client.Binary
|
||||
# Let's extract the data
|
||||
data = data.data
|
||||
# When there is no new log data, the YAML is empty
|
||||
if loaded_lines := lava_yaml.load(data):
|
||||
if loaded_lines := yaml.load(str(data), Loader=loader(False)):
|
||||
lines = loaded_lines
|
||||
self.last_log_line += len(lines)
|
||||
return lines
|
||||
@@ -355,7 +337,7 @@ def find_exception_from_metadata(metadata, job_id):
|
||||
def find_lava_error(job) -> None:
|
||||
# Look for infrastructure errors and retry if we see them.
|
||||
results_yaml = _call_proxy(job.proxy.results.get_testjob_results_yaml, job.job_id)
|
||||
results = lava_yaml.load(results_yaml)
|
||||
results = yaml.load(results_yaml, Loader=loader(False))
|
||||
for res in results:
|
||||
metadata = res["metadata"]
|
||||
find_exception_from_metadata(metadata, job.job_id)
|
||||
@@ -365,17 +347,16 @@ def find_lava_error(job) -> None:
|
||||
job.status = "fail"
|
||||
|
||||
|
||||
def show_job_data(job, colour=f"{CONSOLE_LOG['BOLD']}{CONSOLE_LOG['FG_GREEN']}"):
|
||||
def show_job_data(job):
|
||||
with GitlabSection(
|
||||
"job_data",
|
||||
"LAVA job info",
|
||||
type=LogSectionType.LAVA_POST_PROCESSING,
|
||||
start_collapsed=True,
|
||||
colour=colour,
|
||||
):
|
||||
show = _call_proxy(job.proxy.scheduler.jobs.show, job.job_id)
|
||||
for field, value in show.items():
|
||||
print(f"{field:<15}: {value}")
|
||||
print("{}\t: {}".format(field, value))
|
||||
|
||||
|
||||
def fetch_logs(job, max_idle_time, log_follower) -> None:
|
||||
@@ -451,6 +432,8 @@ def follow_job_execution(job):
|
||||
while not job.is_finished:
|
||||
fetch_logs(job, max_idle_time, lf)
|
||||
|
||||
show_job_data(job)
|
||||
|
||||
# Mesa Developers expect to have a simple pass/fail job result.
|
||||
# If this does not happen, it probably means a LAVA infrastructure error
|
||||
# happened.
|
||||
@@ -469,7 +452,6 @@ def print_job_final_status(job):
|
||||
f"{CONSOLE_LOG['RESET']}"
|
||||
)
|
||||
|
||||
show_job_data(job, colour=f"{CONSOLE_LOG['BOLD']}{color}")
|
||||
|
||||
def retriable_follow_job(proxy, job_definition) -> LAVAJob:
|
||||
retry_count = NUMBER_OF_RETRIES_TIMEOUT_DETECTION
|
||||
@@ -515,16 +497,7 @@ def treat_mesa_job_name(args):
|
||||
def main(args):
|
||||
proxy = setup_lava_proxy()
|
||||
|
||||
# Overwrite the timeout for the testcases with the value offered by the
|
||||
# user. The testcase running time should be at least 4 times greater than
|
||||
# the other sections (boot and setup), so we can safely ignore them.
|
||||
# If LAVA fails to stop the job at this stage, it will fall back to the
|
||||
# script section timeout with a reasonable delay.
|
||||
GL_SECTION_TIMEOUTS[LogSectionType.TEST_CASE] = timedelta(minutes=args.job_timeout)
|
||||
|
||||
job_definition_stream = StringIO()
|
||||
lava_yaml.dump(generate_lava_yaml_payload(args), job_definition_stream)
|
||||
job_definition = job_definition_stream.getvalue()
|
||||
job_definition = generate_lava_yaml(args)
|
||||
|
||||
if args.dump_yaml:
|
||||
with GitlabSection(
|
||||
|
@@ -3,13 +3,9 @@ from .gitlab_section import GitlabSection
|
||||
from .log_follower import (
|
||||
LogFollower,
|
||||
fatal_err,
|
||||
fix_lava_color_log,
|
||||
fix_lava_gitlab_section_log,
|
||||
hide_sensitive_data,
|
||||
print_log,
|
||||
)
|
||||
from .log_section import (
|
||||
DEFAULT_GITLAB_SECTION_TIMEOUTS,
|
||||
FALLBACK_GITLAB_SECTION_TIMEOUT,
|
||||
LogSection,
|
||||
LogSectionType,
|
||||
)
|
||||
from .log_section import LogSection, LogSectionType
|
||||
|
@@ -49,10 +49,6 @@ class LogFollower:
|
||||
section_is_created == section_has_started
|
||||
), "Can't follow logs beginning from uninitialized GitLab sections."
|
||||
|
||||
# Initialize fix_lava_gitlab_section_log generator
|
||||
self.gl_section_fix_gen = fix_lava_gitlab_section_log()
|
||||
next(self.gl_section_fix_gen)
|
||||
|
||||
@property
|
||||
def phase(self) -> LogSectionType:
|
||||
return (
|
||||
@@ -142,7 +138,7 @@ class LogFollower:
|
||||
# job is progressing
|
||||
is_job_healthy = True
|
||||
self.manage_gl_sections(line)
|
||||
if parsed_line := self.parse_lava_line(line):
|
||||
if parsed_line := parse_lava_line(line):
|
||||
self._buffer.append(parsed_line)
|
||||
|
||||
self.log_hints.detect_failure(new_lines)
|
||||
@@ -154,61 +150,55 @@ class LogFollower:
|
||||
self._buffer = []
|
||||
return buffer
|
||||
|
||||
def parse_lava_line(self, line) -> Optional[str]:
|
||||
prefix = ""
|
||||
suffix = ""
|
||||
|
||||
if line["lvl"] in ["results", "feedback", "debug"]:
|
||||
return
|
||||
elif line["lvl"] in ["warning", "error"]:
|
||||
prefix = CONSOLE_LOG["FG_RED"]
|
||||
suffix = CONSOLE_LOG["RESET"]
|
||||
elif line["lvl"] == "input":
|
||||
prefix = "$ "
|
||||
suffix = ""
|
||||
elif line["lvl"] == "target":
|
||||
# gl_section_fix_gen will output the stored line if it can't find a
|
||||
# match for the first split line
|
||||
# So we can recover it and put it back to the buffer
|
||||
if recovered_first_line := self.gl_section_fix_gen.send(line):
|
||||
self._buffer.append(recovered_first_line)
|
||||
def fix_lava_color_log(line):
|
||||
"""This function is a temporary solution for the color escape codes mangling
|
||||
problem. There is some problem in message passing between the LAVA
|
||||
dispatcher and the device under test (DUT). Here \x1b character is missing
|
||||
before `[:digit::digit:?:digit:?m` ANSI TTY color codes, or the more
|
||||
complicated ones with number values for text format before background and
|
||||
foreground colors.
|
||||
When this problem is fixed on the LAVA side, one should remove this function.
|
||||
"""
|
||||
line["msg"] = re.sub(r"(\[(\d+;){0,2}\d{1,3}m)", "\x1b" + r"\1", line["msg"])
|
||||
|
||||
return f'{prefix}{line["msg"]}{suffix}'
|
||||
|
||||
def fix_lava_gitlab_section_log():
|
||||
def fix_lava_gitlab_section_log(line):
|
||||
"""This function is a temporary solution for the Gitlab section markers
|
||||
splitting problem. Gitlab parses the following lines to define a collapsible
|
||||
mangling problem. Gitlab parses the following lines to define a collapsible
|
||||
gitlab section in their log:
|
||||
- \x1b[0Ksection_start:timestamp:section_id[collapsible=true/false]\r\x1b[0Ksection_header
|
||||
- \x1b[0Ksection_end:timestamp:section_id\r\x1b[0K
|
||||
There is some problem in message passing between the LAVA dispatcher and the
|
||||
device under test (DUT), that replaces \r control characters into \n. When
|
||||
this problem is fixed on the LAVA side, one should remove this function.
|
||||
device under test (DUT), that digests \x1b and \r control characters
|
||||
incorrectly. When this problem is fixed on the LAVA side, one should remove
|
||||
this function.
|
||||
"""
|
||||
while True:
|
||||
line = yield False
|
||||
first_line = None
|
||||
split_line_pattern = re.compile(r"\x1b\[0K(section_\w+):(\d+):([^\s\r]+)$")
|
||||
second_line_pattern = re.compile(r"\x1b\[0K([\S ]+)?")
|
||||
if match := re.match(r"\[0K(section_\w+):(\d+):(\S+)\[0K([\S ]+)?", line["msg"]):
|
||||
marker, timestamp, id_collapsible, header = match.groups()
|
||||
# The above regex serves for both section start and end lines.
|
||||
# When the header is None, it means we are dealing with `section_end` line
|
||||
header = header or ""
|
||||
line["msg"] = f"\x1b[0K{marker}:{timestamp}:{id_collapsible}\r\x1b[0K{header}"
|
||||
|
||||
if not re.search(split_line_pattern, line["msg"]):
|
||||
continue
|
||||
|
||||
first_line = line["msg"]
|
||||
# Delete the current line and hold this log line stream to be able to
|
||||
# possibly merge it with the next line.
|
||||
line["msg"] = ""
|
||||
line = yield False
|
||||
def parse_lava_line(line) -> Optional[str]:
|
||||
prefix = ""
|
||||
suffix = ""
|
||||
|
||||
# This code reached when we detect a possible first split line
|
||||
if re.search(second_line_pattern, line["msg"]):
|
||||
assert first_line
|
||||
line["msg"] = f"{first_line}\r{line['msg']}"
|
||||
else:
|
||||
# The current line doesn't match with the previous one, send back the
|
||||
# latter to give the user the chance to recover it.
|
||||
yield first_line
|
||||
if line["lvl"] in ["results", "feedback", "debug"]:
|
||||
return
|
||||
elif line["lvl"] in ["warning", "error"]:
|
||||
prefix = CONSOLE_LOG["FG_RED"]
|
||||
suffix = CONSOLE_LOG["RESET"]
|
||||
elif line["lvl"] == "input":
|
||||
prefix = "$ "
|
||||
suffix = ""
|
||||
elif line["lvl"] == "target":
|
||||
fix_lava_color_log(line)
|
||||
fix_lava_gitlab_section_log(line)
|
||||
|
||||
return f'{prefix}{line["msg"]}{suffix}'
|
||||
|
||||
|
||||
def print_log(msg):
|
||||
@@ -224,5 +214,5 @@ def fatal_err(msg):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def hide_sensitive_data(yaml_data: str, hide_tag: str ="HIDEME"):
|
||||
def hide_sensitive_data(yaml_data, hide_tag="HIDEME"):
|
||||
return "".join(line for line in yaml_data.splitlines(True) if hide_tag not in line)
|
||||
|
@@ -2,7 +2,6 @@ import re
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from enum import Enum, auto
|
||||
from os import getenv
|
||||
from typing import Optional, Pattern, Union
|
||||
|
||||
from lava.utils.gitlab_section import GitlabSection
|
||||
@@ -16,34 +15,24 @@ class LogSectionType(Enum):
|
||||
LAVA_POST_PROCESSING = auto()
|
||||
|
||||
|
||||
# Empirically, successful device boot in LAVA time takes less than 3
|
||||
# minutes.
|
||||
# LAVA itself is configured to attempt thrice to boot the device,
|
||||
# summing up to 9 minutes.
|
||||
# It is better to retry the boot than cancel the job and re-submit to avoid
|
||||
# the enqueue delay.
|
||||
LAVA_BOOT_TIMEOUT = int(getenv("LAVA_BOOT_TIMEOUT", 9))
|
||||
|
||||
# Test suite phase is where the initialization happens.
|
||||
LAVA_TEST_SUITE_TIMEOUT = int(getenv("LAVA_TEST_SUITE_TIMEOUT", 5))
|
||||
|
||||
# Test cases may take a long time, this script has no right to interrupt
|
||||
# them. But if the test case takes almost 1h, it will never succeed due to
|
||||
# Gitlab job timeout.
|
||||
LAVA_TEST_CASE_TIMEOUT = int(getenv("JOB_TIMEOUT", 60))
|
||||
|
||||
# LAVA post processing may refer to a test suite teardown, or the
|
||||
# adjustments to start the next test_case
|
||||
LAVA_POST_PROCESSING_TIMEOUT = int(getenv("LAVA_POST_PROCESSING_TIMEOUT", 5))
|
||||
|
||||
FALLBACK_GITLAB_SECTION_TIMEOUT = timedelta(minutes=10)
|
||||
DEFAULT_GITLAB_SECTION_TIMEOUTS = {
|
||||
LogSectionType.LAVA_BOOT: timedelta(minutes=LAVA_BOOT_TIMEOUT),
|
||||
LogSectionType.TEST_SUITE: timedelta(minutes=LAVA_TEST_SUITE_TIMEOUT),
|
||||
LogSectionType.TEST_CASE: timedelta(minutes=LAVA_TEST_CASE_TIMEOUT),
|
||||
LogSectionType.LAVA_POST_PROCESSING: timedelta(
|
||||
minutes=LAVA_POST_PROCESSING_TIMEOUT
|
||||
),
|
||||
# Empirically, successful device boot in LAVA time takes less than 3
|
||||
# minutes.
|
||||
# LAVA itself is configured to attempt thrice to boot the device,
|
||||
# summing up to 9 minutes.
|
||||
# It is better to retry the boot than cancel the job and re-submit to avoid
|
||||
# the enqueue delay.
|
||||
LogSectionType.LAVA_BOOT: timedelta(minutes=9),
|
||||
# Test suite phase is where the initialization happens.
|
||||
LogSectionType.TEST_SUITE: timedelta(minutes=5),
|
||||
# Test cases may take a long time, this script has no right to interrupt
|
||||
# them. But if the test case takes almost 1h, it will never succeed due to
|
||||
# Gitlab job timeout.
|
||||
LogSectionType.TEST_CASE: timedelta(minutes=60),
|
||||
# LAVA post processing may refer to a test suite teardown, or the
|
||||
# adjustments to start the next test_case
|
||||
LogSectionType.LAVA_POST_PROCESSING: timedelta(minutes=5),
|
||||
}
|
||||
|
||||
|
||||
@@ -65,10 +54,9 @@ class LogSection:
|
||||
if match := re.search(self.regex, lava_log_line["msg"]):
|
||||
section_id = self.section_id.format(*match.groups())
|
||||
section_header = self.section_header.format(*match.groups())
|
||||
timeout = DEFAULT_GITLAB_SECTION_TIMEOUTS[self.section_type]
|
||||
return GitlabSection(
|
||||
id=section_id,
|
||||
header=f"{section_header} - Timeout: {timeout}",
|
||||
header=section_header,
|
||||
type=self.section_type,
|
||||
start_collapsed=self.collapsed,
|
||||
)
|
||||
|
@@ -24,7 +24,7 @@ fi
|
||||
# tests in their meson.build with:
|
||||
#
|
||||
# test(...,
|
||||
# should_fail: meson.get_external_property('xfail', '').contains(t),
|
||||
# should_fail: meson.get_cross_property('xfail', '').contains(t),
|
||||
# )
|
||||
#
|
||||
# where t is the name of the test, and the '' is the string to search when
|
||||
@@ -65,10 +65,9 @@ meson _build --native-file=native.file \
|
||||
-D prefix=`pwd`/install \
|
||||
-D libdir=lib \
|
||||
-D buildtype=${BUILDTYPE:-debug} \
|
||||
-D build-tests=true \
|
||||
-D build-tests=false \
|
||||
-D c_args="$(echo -n $C_ARGS)" \
|
||||
-D cpp_args="$(echo -n $CPP_ARGS)" \
|
||||
-D enable-glcpp-tests=false \
|
||||
-D libunwind=${UNWIND} \
|
||||
${DRI_LOADERS} \
|
||||
${GALLIUM_ST} \
|
||||
@@ -79,15 +78,7 @@ meson _build --native-file=native.file \
|
||||
${EXTRA_OPTION}
|
||||
cd _build
|
||||
meson configure
|
||||
if command -V mold &> /dev/null ; then
|
||||
mold --run ninja
|
||||
else
|
||||
ninja
|
||||
fi
|
||||
ninja
|
||||
LC_ALL=C.UTF-8 meson test --num-processes ${FDO_CI_CONCURRENT:-4} --print-errorlogs ${MESON_TEST_ARGS}
|
||||
if command -V mold &> /dev/null ; then
|
||||
mold --run ninja install
|
||||
else
|
||||
ninja install
|
||||
fi
|
||||
ninja install
|
||||
cd ..
|
||||
|
@@ -1,8 +1,6 @@
|
||||
#!/bin/sh
|
||||
|
||||
if [ "x$STRACEDIR" = "x" ]; then
|
||||
STRACEDIR=meson-logs/strace/$(for i in $@; do basename -z -- $i; echo -n _; done)
|
||||
fi
|
||||
STRACEDIR=meson-logs/strace/$(for i in $@; do basename -z -- $i; echo -n _; done)
|
||||
|
||||
mkdir -p $STRACEDIR
|
||||
|
||||
|
@@ -3,41 +3,11 @@
|
||||
set -ex
|
||||
|
||||
INSTALL=$(realpath -s "$PWD"/install)
|
||||
MINIO_ARGS="--token-file ${CI_JOB_JWT_FILE}"
|
||||
MINIO_ARGS="--credentials=/tmp/.minio_credentials"
|
||||
|
||||
RESULTS=$(realpath -s "$PWD"/results)
|
||||
mkdir -p "$RESULTS"
|
||||
|
||||
if [ "$PIGLIT_REPLAY_SUBCOMMAND" = "profile" ]; then
|
||||
# workaround for older Debian Bullseye libyaml 0.2.2
|
||||
sed -i "/^%YAML 1\.2$/d" "$PIGLIT_REPLAY_DESCRIPTION_FILE"
|
||||
|
||||
yq -i -Y '. | del(.traces[][] | select(.label[0,1,2,3,4,5,6,7,8,9] == "no-perf"))' \
|
||||
"$PIGLIT_REPLAY_DESCRIPTION_FILE" # label positions are a bit hack
|
||||
fi
|
||||
|
||||
# WINE
|
||||
case "$PIGLIT_REPLAY_DEVICE_NAME" in
|
||||
vk-*)
|
||||
export WINEPREFIX="/dxvk-wine64"
|
||||
;;
|
||||
*)
|
||||
export WINEPREFIX="/generic-wine64"
|
||||
;;
|
||||
esac
|
||||
|
||||
PATH="/opt/wine-stable/bin/:$PATH" # WineHQ path
|
||||
|
||||
# Avoid asking about Gecko or Mono instalation
|
||||
export WINEDLLOVERRIDES=mscoree=d;mshtml=d
|
||||
|
||||
# Set environment for DXVK.
|
||||
export DXVK_LOG_LEVEL="info"
|
||||
export DXVK_LOG="$RESULTS/dxvk"
|
||||
[ -d "$DXVK_LOG" ] || mkdir -pv "$DXVK_LOG"
|
||||
export DXVK_STATE_CACHE=0
|
||||
|
||||
|
||||
# Set up the driver environment.
|
||||
# Modifiying here directly LD_LIBRARY_PATH may cause problems when
|
||||
# using a command wrapper. Hence, we will just set it when running the
|
||||
@@ -67,10 +37,6 @@ quiet() {
|
||||
# Set environment for apitrace executable.
|
||||
export PATH="/apitrace/build:$PATH"
|
||||
|
||||
export PIGLIT_REPLAY_WINE_BINARY=wine64
|
||||
export PIGLIT_REPLAY_WINE_APITRACE_BINARY="/apitrace-msvc-win64/bin/apitrace.exe"
|
||||
export PIGLIT_REPLAY_WINE_D3DRETRACE_BINARY="/apitrace-msvc-win64/bin/d3dretrace.exe"
|
||||
|
||||
# Our rootfs may not have "less", which apitrace uses during
|
||||
# apitrace dump
|
||||
export PAGER=cat
|
||||
@@ -137,8 +103,8 @@ replay_minio_upload_images() {
|
||||
__DESTINATION_FILE_PATH="$__MINIO_TRACES_PREFIX/${line##*-}"
|
||||
fi
|
||||
|
||||
ci-fairy s3cp $MINIO_ARGS "$RESULTS/$__PREFIX/$line" \
|
||||
"https://${__MINIO_PATH}/${__DESTINATION_FILE_PATH}"
|
||||
ci-fairy minio cp $MINIO_ARGS "$RESULTS/$__PREFIX/$line" \
|
||||
"minio://${__MINIO_PATH}/${__DESTINATION_FILE_PATH}"
|
||||
done
|
||||
}
|
||||
|
||||
@@ -173,6 +139,8 @@ if [ "$RUN_CMD_WRAPPER" ]; then
|
||||
RUN_CMD="set +e; $RUN_CMD_WRAPPER "$(/usr/bin/printf "%q" "$RUN_CMD")"; set -e"
|
||||
fi
|
||||
|
||||
ci-fairy minio login $MINIO_ARGS --token-file "${CI_JOB_JWT_FILE}"
|
||||
|
||||
# The replayer doesn't do any size or checksum verification for the traces in
|
||||
# the replayer db, so if we had to restart the system due to intermittent device
|
||||
# errors (or tried to cache replayer-db between runs, which would be nice to
|
||||
@@ -203,7 +171,7 @@ __PREFIX="trace/$PIGLIT_REPLAY_DEVICE_NAME"
|
||||
__MINIO_PATH="$PIGLIT_REPLAY_ARTIFACTS_BASE_URL"
|
||||
__MINIO_TRACES_PREFIX="traces"
|
||||
|
||||
if [ "$PIGLIT_REPLAY_SUBCOMMAND" != "profile" ]; then
|
||||
if [ "x$PIGLIT_REPLAY_SUBCOMMAND" != "xprofile" ]; then
|
||||
quiet replay_minio_upload_images
|
||||
fi
|
||||
|
||||
|
75
.gitlab-ci/piglit/run_cl.sh
Executable file
75
.gitlab-ci/piglit/run_cl.sh
Executable file
@@ -0,0 +1,75 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
|
||||
VERSION=`head -1 install/VERSION`
|
||||
ROOTDIR=`pwd`
|
||||
|
||||
if [ -d results ]; then
|
||||
cd results && rm -rf ..?* .[!.]* *
|
||||
fi
|
||||
cd /piglit
|
||||
|
||||
export OCL_ICD_VENDORS=$ROOTDIR/install/etc/OpenCL/vendors/
|
||||
|
||||
set +e
|
||||
unset DISPLAY
|
||||
export LD_LIBRARY_PATH=$ROOTDIR/install/lib
|
||||
clinfo
|
||||
|
||||
# If the job is parallel at the gitlab job level, will take the corresponding
|
||||
# fraction of the caselist.
|
||||
if [ -n "$CI_NODE_INDEX" ]; then
|
||||
|
||||
if [ "$PIGLIT_PROFILES" != "${PIGLIT_PROFILES% *}" ]; then
|
||||
echo "Can't parallelize piglit with multiple profiles"
|
||||
exit 1
|
||||
fi
|
||||
USE_CASELIST=1
|
||||
fi
|
||||
|
||||
if [ -n "$USE_CASELIST" ]; then
|
||||
./piglit print-cmd $PIGLIT_TESTS $PIGLIT_PROFILES --format "{name}" > /tmp/case-list.txt
|
||||
|
||||
sed -ni $CI_NODE_INDEX~$CI_NODE_TOTAL"p" /tmp/case-list.txt
|
||||
|
||||
PIGLIT_TESTS="--test-list /tmp/case-list.txt"
|
||||
fi
|
||||
|
||||
./piglit run -c -j${FDO_CI_CONCURRENT:-4} $PIGLIT_OPTIONS $PIGLIT_TESTS $PIGLIT_PROFILES $ROOTDIR/results
|
||||
retVal=$?
|
||||
if [ $retVal -ne 0 ]; then
|
||||
echo "Found $(cat /tmp/version.txt), expected $VERSION"
|
||||
fi
|
||||
set -e
|
||||
|
||||
PIGLIT_RESULTS=${PIGLIT_RESULTS:-$PIGLIT_PROFILES}
|
||||
mkdir -p .gitlab-ci/piglit
|
||||
./piglit summary console $ROOTDIR/results \
|
||||
| tee ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.orig" \
|
||||
| head -n -1 \
|
||||
| grep -v ": pass" \
|
||||
| sed '/^summary:/Q' \
|
||||
> .gitlab-ci/piglit/$PIGLIT_RESULTS.txt
|
||||
|
||||
if [ -n "$USE_CASELIST" ]; then
|
||||
# Just filter the expected results based on the tests that were actually
|
||||
# executed, and switch to the version with no summary
|
||||
cat .gitlab-ci/piglit/$PIGLIT_RESULTS.txt.orig | sed '/^summary:/Q' | rev \
|
||||
| cut -f2- -d: | rev | sed "s/$/:/g" > /tmp/executed.txt
|
||||
grep -F -f /tmp/executed.txt $ROOTDIR/install/$PIGLIT_RESULTS.txt \
|
||||
> .gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline || true
|
||||
else
|
||||
cp $ROOTDIR/install/$PIGLIT_RESULTS.txt .gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline
|
||||
fi
|
||||
|
||||
if diff -q .gitlab-ci/piglit/$PIGLIT_RESULTS.txt{.baseline,}; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
./piglit summary html --exclude-details=pass $ROOTDIR/results/summary $ROOTDIR/results
|
||||
|
||||
echo Unexpected change in results:
|
||||
diff -u .gitlab-ci/piglit/$PIGLIT_RESULTS.txt{.baseline,}
|
||||
exit 1
|
@@ -10,7 +10,7 @@ rm -rf install/bin install/include
|
||||
|
||||
# Strip the drivers in the artifacts to cut 80% of the artifacts size.
|
||||
if [ -n "$CROSS" ]; then
|
||||
STRIP=$(sed -n -E "s/strip\s*=\s*\[?'(.*)'\]?/\1/p" "$CROSS_FILE")
|
||||
STRIP=`sed -n -E "s/strip\s*=\s*'(.*)'/\1/p" "$CROSS_FILE"`
|
||||
if [ -z "$STRIP" ]; then
|
||||
echo "Failed to find strip command in cross file"
|
||||
exit 1
|
||||
@@ -52,7 +52,8 @@ cp -Rp .gitlab-ci/b2c artifacts/
|
||||
|
||||
if [ -n "$MINIO_ARTIFACT_NAME" ]; then
|
||||
# Pass needed files to the test stage
|
||||
MINIO_ARTIFACT_NAME="$MINIO_ARTIFACT_NAME.tar.zst"
|
||||
zstd artifacts/install.tar -o ${MINIO_ARTIFACT_NAME}
|
||||
ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" ${MINIO_ARTIFACT_NAME} https://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}
|
||||
MINIO_ARTIFACT_NAME="$MINIO_ARTIFACT_NAME.tar.gz"
|
||||
gzip -c artifacts/install.tar > ${MINIO_ARTIFACT_NAME}
|
||||
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
|
||||
ci-fairy minio cp ${MINIO_ARTIFACT_NAME} minio://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}
|
||||
fi
|
||||
|
@@ -1,41 +1,27 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -v
|
||||
|
||||
echo -e "\e[0Ksection_start:$(date +%s):shader-db-prepare[collapsed=true]\r\e[0KPreparing shader-db"
|
||||
ARTIFACTSDIR=$(pwd)/shader-db
|
||||
mkdir -p "$ARTIFACTSDIR"
|
||||
ARTIFACTSDIR=`pwd`/shader-db
|
||||
mkdir -p $ARTIFACTSDIR
|
||||
export DRM_SHIM_DEBUG=true
|
||||
|
||||
LIBDIR=$(pwd)/install/lib
|
||||
LIBDIR=`pwd`/install/lib
|
||||
export LD_LIBRARY_PATH=$LIBDIR
|
||||
|
||||
cd /usr/local/shader-db
|
||||
echo -e "\e[0Ksection_end:$(date +%s):shader-db-prepare\r\e[0K"
|
||||
|
||||
for driver in freedreno intel v3d vc4; do
|
||||
echo -e "\e[0Ksection_start:$(date +%s):shader-db-${driver}[collapsed=true]\r\e[0KRunning shader-db for $driver"
|
||||
env LD_PRELOAD="$LIBDIR/lib${driver}_noop_drm_shim.so" \
|
||||
./run -j"${FDO_CI_CONCURRENT:-4}" ./shaders \
|
||||
> "$ARTIFACTSDIR/${driver}-shader-db.txt"
|
||||
echo -e "\e[0Ksection_end:$(date +%s):shader-db-${driver}\r\e[0K"
|
||||
for driver in freedreno intel v3d; do
|
||||
echo "Running drm-shim for $driver"
|
||||
env LD_PRELOAD=$LIBDIR/lib${driver}_noop_drm_shim.so \
|
||||
./run -j${FDO_CI_CONCURRENT:-4} ./shaders \
|
||||
> $ARTIFACTSDIR/${driver}-shader-db.txt
|
||||
done
|
||||
|
||||
# Run shader-db over a number of supported chipsets for nouveau
|
||||
for chipset in 40 a3 c0 e4 f0 134 162; do
|
||||
echo -e "\e[0Ksection_start:$(date +%s):shader-db-nouveau-${chipset}[collapsed=true]\r\e[0KRunning shader-db for nouveau - ${chipset}"
|
||||
env LD_PRELOAD="$LIBDIR/libnouveau_noop_drm_shim.so" \
|
||||
echo "Running drm-shim for nouveau - $chipset"
|
||||
env LD_PRELOAD=$LIBDIR/libnouveau_noop_drm_shim.so \
|
||||
NOUVEAU_CHIPSET=${chipset} \
|
||||
./run -j"${FDO_CI_CONCURRENT:-4}" ./shaders \
|
||||
> "$ARTIFACTSDIR/nouveau-${chipset}-shader-db.txt"
|
||||
echo -e "\e[0Ksection_end:$(date +%s):shader-db-nouveau-${chipset}\r\e[0K"
|
||||
done
|
||||
|
||||
# Run shader-db for r300 (RV370 and RV515)
|
||||
for chipset in 0x5460 0x7140; do
|
||||
echo -e "\e[0Ksection_start:$(date +%s):shader-db-r300-${chipset}[collapsed=true]\r\e[0KRunning shader-db for r300 - ${chipset}"
|
||||
env LD_PRELOAD="$LIBDIR/libradeon_noop_drm_shim.so" \
|
||||
RADEON_GPU_ID=${chipset} \
|
||||
./run -j"${FDO_CI_CONCURRENT:-4}" -o r300 ./shaders \
|
||||
> "$ARTIFACTSDIR/r300-${chipset}-shader-db.txt"
|
||||
echo -e "\e[0Ksection_end:$(date +%s):shader-db-r300-${chipset}\r\e[0K"
|
||||
./run -j${FDO_CI_CONCURRENT:-4} ./shaders \
|
||||
> $ARTIFACTSDIR/nouveau-${chipset}-shader-db.txt
|
||||
done
|
||||
|
@@ -1,23 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
CHECKPATH=".gitlab-ci/container" # TODO: expand to cover whole .gitlab-ci/
|
||||
|
||||
is_bash() {
|
||||
[[ $1 == *.sh ]] && return 0
|
||||
[[ $1 == */bash-completion/* ]] && return 0
|
||||
[[ $(file -b --mime-type "$1") == text/x-shellscript ]] && return 0
|
||||
return 1
|
||||
}
|
||||
|
||||
while IFS= read -r -d $'' file; do
|
||||
if is_bash "$file" ; then
|
||||
shellcheck -x -W0 -s bash "$file"
|
||||
rc=$?
|
||||
if [ "${rc}" -eq 0 ]
|
||||
then
|
||||
continue
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done < <(find $CHECKPATH -type f \! -path "./.git/*" -print0)
|
@@ -1,5 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
# Run yamllint against all traces files.
|
||||
find . -name '*traces*yml' -print0 | xargs -0 yamllint -d "{rules: {line-length: {max: 150}}}"
|
153
.gitlab-ci/skqp-runner.sh
Executable file
153
.gitlab-ci/skqp-runner.sh
Executable file
@@ -0,0 +1,153 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright (C) 2022 Collabora Limited
|
||||
# Author: Guilherme Gallo <guilherme.gallo@collabora.com>
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
# copy of this software and associated documentation files (the "Software"),
|
||||
# to deal in the Software without restriction, including without limitation
|
||||
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
# and/or sell copies of the Software, and to permit persons to whom the
|
||||
# Software is furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice (including the next
|
||||
# paragraph) shall be included in all copies or substantial portions of the
|
||||
# Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
|
||||
copy_tests_files() (
|
||||
# Copy either unit test or render test files from a specific driver given by
|
||||
# GPU VERSION variable.
|
||||
# If there is no test file at the expected location, this function will
|
||||
# return error_code 1
|
||||
SKQP_BACKEND="${1}"
|
||||
SKQP_FILE_PREFIX="${INSTALL}/${GPU_VERSION}-skqp"
|
||||
|
||||
if echo "${SKQP_BACKEND}" | grep -qE 'vk|gl(es)?'
|
||||
then
|
||||
SKQP_RENDER_TESTS_FILE="${SKQP_FILE_PREFIX}-${SKQP_BACKEND}_rendertests.txt"
|
||||
[ -f "${SKQP_RENDER_TESTS_FILE}" ] || return 1
|
||||
cp "${SKQP_RENDER_TESTS_FILE}" "${SKQP_ASSETS_DIR}"/skqp/rendertests.txt
|
||||
return 0
|
||||
fi
|
||||
|
||||
# The unittests.txt path is hardcoded inside assets directory,
|
||||
# that is why it needs to be a special case.
|
||||
if echo "${SKQP_BACKEND}" | grep -qE "unitTest"
|
||||
then
|
||||
SKQP_UNIT_TESTS_FILE="${SKQP_FILE_PREFIX}_unittests.txt"
|
||||
[ -f "${SKQP_UNIT_TESTS_FILE}" ] || return 1
|
||||
cp "${SKQP_UNIT_TESTS_FILE}" "${SKQP_ASSETS_DIR}"/skqp/unittests.txt
|
||||
fi
|
||||
)
|
||||
|
||||
test_vk_backend() {
|
||||
if echo "${SKQP_BACKENDS}" | grep -qE 'vk'
|
||||
then
|
||||
if [ -n "$VK_DRIVER" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "VK_DRIVER environment variable is missing."
|
||||
VK_DRIVERS=$(ls "$INSTALL"/share/vulkan/icd.d/ | cut -f 1 -d '_')
|
||||
if [ -n "${VK_DRIVERS}" ]
|
||||
then
|
||||
echo "Please set VK_DRIVER to the correct driver from the list:"
|
||||
echo "${VK_DRIVERS}"
|
||||
fi
|
||||
echo "No Vulkan tests will be executed, but it was requested in SKQP_BACKENDS variable. Exiting."
|
||||
exit 2
|
||||
fi
|
||||
|
||||
# Vulkan environment is not configured, but it was not requested by the job
|
||||
return 1
|
||||
}
|
||||
|
||||
setup_backends() {
|
||||
if test_vk_backend
|
||||
then
|
||||
export VK_ICD_FILENAMES="$INSTALL"/share/vulkan/icd.d/"$VK_DRIVER"_icd."${VK_CPU:-$(uname -m)}".json
|
||||
fi
|
||||
}
|
||||
|
||||
set -ex
|
||||
|
||||
# Needed so configuration files can contain paths to files in /install
|
||||
ln -sf "$CI_PROJECT_DIR"/install /install
|
||||
INSTALL=${PWD}/install
|
||||
|
||||
if [ -z "$GPU_VERSION" ]; then
|
||||
echo 'GPU_VERSION must be set to something like "llvmpipe" or
|
||||
"freedreno-a630" (it will serve as a component to find the path for files
|
||||
residing in src/**/ci/*.txt)'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LD_LIBRARY_PATH=$INSTALL:$LD_LIBRARY_PATH
|
||||
setup_backends
|
||||
|
||||
SKQP_ASSETS_DIR=/skqp/assets
|
||||
SKQP_RESULTS_DIR="${SKQP_RESULTS_DIR:-$PWD/results}"
|
||||
|
||||
mkdir -p "${SKQP_ASSETS_DIR}"/skqp
|
||||
|
||||
SKQP_EXITCODE=0
|
||||
for SKQP_BACKEND in ${SKQP_BACKENDS}
|
||||
do
|
||||
set -e
|
||||
if ! copy_tests_files "${SKQP_BACKEND}"
|
||||
then
|
||||
echo "No override test file found for ${SKQP_BACKEND}. Using the default one."
|
||||
fi
|
||||
|
||||
set +e
|
||||
SKQP_BACKEND_RESULTS_DIR="${SKQP_RESULTS_DIR}"/"${SKQP_BACKEND}"
|
||||
mkdir -p "${SKQP_BACKEND_RESULTS_DIR}"
|
||||
/skqp/skqp "${SKQP_ASSETS_DIR}" "${SKQP_BACKEND_RESULTS_DIR}" "${SKQP_BACKEND}_"
|
||||
BACKEND_EXITCODE=$?
|
||||
|
||||
if [ ! $BACKEND_EXITCODE -eq 0 ]
|
||||
then
|
||||
echo "skqp failed on ${SKQP_BACKEND} tests with ${BACKEND_EXITCODE} exit code."
|
||||
fi
|
||||
|
||||
# Propagate error codes to leverage the final job result
|
||||
SKQP_EXITCODE=$(( SKQP_EXITCODE | BACKEND_EXITCODE ))
|
||||
done
|
||||
|
||||
set +x
|
||||
|
||||
# Unit tests produce empty HTML reports, guide the user to check the TXT file.
|
||||
if echo "${SKQP_BACKENDS}" | grep -qE "unitTest"
|
||||
then
|
||||
# Remove the empty HTML report to avoid confusion
|
||||
rm -f "${SKQP_RESULTS_DIR}"/unitTest/report.html
|
||||
|
||||
echo "See skqp unit test results at:"
|
||||
echo "https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/${SKQP_RESULTS_DIR}/unitTest/unit_tests.txt"
|
||||
fi
|
||||
|
||||
REPORT_FILES=$(mktemp)
|
||||
find "${SKQP_RESULTS_DIR}"/**/report.html -type f > "${REPORT_FILES}"
|
||||
while read -r REPORT
|
||||
do
|
||||
BACKEND_NAME=$(echo "${REPORT}" | sed 's@.*/\([^/]*\)/report.html@\1@')
|
||||
echo "See skqp ${BACKEND_NAME} render tests report at:"
|
||||
echo "https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/${REPORT}"
|
||||
done < "${REPORT_FILES}"
|
||||
|
||||
# If there is no report available, tell the user that something is wrong.
|
||||
if [ ! -s "${REPORT_FILES}" ]
|
||||
then
|
||||
echo "No skqp report available. Probably some fatal error has occured during the skqp execution."
|
||||
fi
|
||||
|
||||
exit $SKQP_EXITCODE
|
@@ -14,20 +14,6 @@
|
||||
- if: *is-scheduled-pipeline
|
||||
when: never
|
||||
|
||||
# Rule for restricted traces jobs to only run for users with access to those
|
||||
# traces (both https://gitlab.freedesktop.org/gfx-ci/tracie/traces-db-private
|
||||
# for trace access, and minio bucket access for viewing result images from CI).
|
||||
#
|
||||
# This is a compromise, allowing some marked developers to have their MRs
|
||||
# blocked on regressions to non-redistributable traces, while not blocking
|
||||
# merges for other devs who would be unable to debug changes to them.
|
||||
.restricted-rules:
|
||||
rules:
|
||||
# If the triggerer has access to the restricted traces and if it is pre-merge
|
||||
- if: '($GITLAB_USER_LOGIN !~ "/^(robclark|anholt|flto|cwabbott0|Danil|tomeu|okias|gallo)$/") &&
|
||||
($GITLAB_USER_LOGIN != "marge-bot" || $CI_COMMIT_BRANCH)'
|
||||
when: never
|
||||
|
||||
# Mesa core source file dependencies that may impact any test job
|
||||
# ---------------------------------------------------------------
|
||||
.core-rules:
|
||||
@@ -128,7 +114,7 @@
|
||||
stage: software-renderer
|
||||
rules:
|
||||
- !reference [.no_scheduled_pipelines-rules, rules]
|
||||
- changes: &llvmpipe_cl_files
|
||||
- changes:
|
||||
- .gitlab-ci.yml
|
||||
- .gitlab-ci/**/*
|
||||
- meson.build
|
||||
@@ -144,21 +130,10 @@
|
||||
- changes:
|
||||
*llvmpipe_file_list
|
||||
when: on_success
|
||||
|
||||
.llvmpipe-clover-rules:
|
||||
rules:
|
||||
- !reference [.llvmpipe-cl-rules, rules]
|
||||
- changes:
|
||||
- changes: &clover_file_list
|
||||
- src/gallium/frontends/clover/**/*
|
||||
when: on_success
|
||||
|
||||
.llvmpipe-rusticl-rules:
|
||||
rules:
|
||||
- !reference [.llvmpipe-cl-rules, rules]
|
||||
- changes:
|
||||
- src/gallium/frontends/rusticl/**/*
|
||||
when: on_success
|
||||
|
||||
.collabora-farm-rules:
|
||||
rules:
|
||||
- if: '$COLLABORA_FARM == "offline" && $RUNNER_TAG =~ /^mesa-ci-x86-64-lava-/'
|
||||
@@ -169,26 +144,6 @@
|
||||
- if: '$IGALIA_FARM == "offline"'
|
||||
when: never
|
||||
|
||||
.anholt-farm-rules:
|
||||
rules:
|
||||
- if: '$ANHOLT_FARM == "offline"'
|
||||
when: never
|
||||
|
||||
.valve-farm-rules:
|
||||
rules:
|
||||
- if: '$VALVE_FARM == "offline"'
|
||||
when: never
|
||||
# The Valve CI Farm uses allow lists for projects/users to prevent abuse,
|
||||
# so only enable automatic testing when running in the context of Mesa to
|
||||
# prevent failures in forks.
|
||||
- if: '$CI_PROJECT_NAMESPACE != "mesa"'
|
||||
when: never
|
||||
|
||||
.austriancoder-farm-rules:
|
||||
rules:
|
||||
- if: '$AUSTRIANCODER_FARM == "offline"'
|
||||
when: never
|
||||
|
||||
# Skips freedreno jobs if either of the farms we use are offline.
|
||||
.freedreno-farm-rules:
|
||||
rules:
|
||||
@@ -199,7 +154,6 @@
|
||||
# Rules for changes that impact either freedreno or turnip.
|
||||
.freedreno-common-rules:
|
||||
rules:
|
||||
- !reference [.no_scheduled_pipelines-rules, rules]
|
||||
- !reference [.freedreno-farm-rules, rules]
|
||||
- changes: &freedreno_core_file_list
|
||||
- src/freedreno/ci/**/*
|
||||
@@ -214,8 +168,8 @@
|
||||
.freedreno-rules:
|
||||
stage: freedreno
|
||||
rules:
|
||||
- !reference [.freedreno-common-rules, rules]
|
||||
- !reference [.gl-rules, rules]
|
||||
- !reference [.freedreno-common-rules, rules]
|
||||
- changes: &freedreno_gl_file_list
|
||||
- src/freedreno/ir2/**/*
|
||||
- src/gallium/drivers/freedreno/**/*
|
||||
@@ -225,8 +179,8 @@
|
||||
.turnip-rules:
|
||||
stage: freedreno
|
||||
rules:
|
||||
- !reference [.freedreno-common-rules, rules]
|
||||
- !reference [.vulkan-rules, rules]
|
||||
- !reference [.freedreno-common-rules, rules]
|
||||
- changes:
|
||||
- src/freedreno/vulkan/**/*
|
||||
when: on_success
|
||||
@@ -240,7 +194,10 @@
|
||||
.freedreno-rules-restricted:
|
||||
stage: freedreno
|
||||
rules:
|
||||
- !reference [.restricted-rules, rules]
|
||||
# If the triggerer has access to the restricted traces and if it is pre-merge
|
||||
- if: '($GITLAB_USER_LOGIN !~ "/^(robclark|anholt|flto|cwabbott0|Danil|tomeu|okias)$/") &&
|
||||
($GITLAB_USER_LOGIN != "marge-bot" || $CI_COMMIT_BRANCH)'
|
||||
when: never
|
||||
- !reference [.freedreno-rules, rules]
|
||||
|
||||
# Rules for GL driver performance tracking. We want them to run as code is
|
||||
@@ -277,7 +234,6 @@
|
||||
.nouveau-rules:
|
||||
stage: nouveau
|
||||
rules:
|
||||
- !reference [.anholt-farm-rules, rules]
|
||||
- !reference [.gl-rules, rules]
|
||||
- changes:
|
||||
- src/nouveau/**/*
|
||||
@@ -322,21 +278,7 @@
|
||||
- src/panfrost/vulkan/*
|
||||
when: on_success
|
||||
- changes:
|
||||
- src/panfrost/compiler/**/*
|
||||
when: on_success
|
||||
|
||||
.broadcom-common-rules:
|
||||
rules:
|
||||
- changes:
|
||||
- src/broadcom/meson.build
|
||||
- src/broadcom/ci/**/*
|
||||
- src/broadcom/cle/**/*
|
||||
- src/broadcom/clif/**/*
|
||||
- src/broadcom/common/**/*
|
||||
- src/broadcom/compiler/**/*
|
||||
- src/broadcom/drm-shim/**/*
|
||||
- src/broadcom/qpu/**/*
|
||||
- src/broadcom/simulator/**/*
|
||||
- src/panfrost/bifrost/**/*
|
||||
when: on_success
|
||||
|
||||
.vc4-rules:
|
||||
@@ -344,8 +286,8 @@
|
||||
rules:
|
||||
- !reference [.igalia-farm-rules, rules]
|
||||
- !reference [.gl-rules, rules]
|
||||
- !reference [.broadcom-common-rules, rules]
|
||||
- changes:
|
||||
- src/broadcom/**/*
|
||||
- src/gallium/drivers/vc4/**/*
|
||||
- src/gallium/winsys/kmsro/**/*
|
||||
- src/gallium/winsys/vc4/**/*
|
||||
@@ -356,8 +298,8 @@
|
||||
rules:
|
||||
- !reference [.igalia-farm-rules, rules]
|
||||
- !reference [.gl-rules, rules]
|
||||
- !reference [.broadcom-common-rules, rules]
|
||||
- changes:
|
||||
- src/broadcom/**/*
|
||||
- src/gallium/drivers/v3d/**/*
|
||||
- src/gallium/winsys/kmsro/**/*
|
||||
- src/gallium/winsys/v3d/**/*
|
||||
@@ -387,24 +329,13 @@
|
||||
.radv-rules:
|
||||
stage: amd
|
||||
rules:
|
||||
- !reference [.collabora-farm-rules, rules]
|
||||
- !reference [.vulkan-rules, rules]
|
||||
- changes: &radv_file_list
|
||||
- src/amd/**/*
|
||||
- src/vulkan/**/*
|
||||
when: on_success
|
||||
|
||||
.radv-collabora-rules:
|
||||
stage: amd
|
||||
rules:
|
||||
- !reference [.collabora-farm-rules, rules]
|
||||
- !reference [.radv-rules, rules]
|
||||
|
||||
.radv-valve-rules:
|
||||
stage: amd
|
||||
rules:
|
||||
- !reference [.valve-farm-rules, rules]
|
||||
- !reference [.radv-rules, rules]
|
||||
|
||||
.virgl-rules:
|
||||
stage: layered-backends
|
||||
rules:
|
||||
@@ -432,15 +363,6 @@
|
||||
*virgl_file_list
|
||||
when: manual
|
||||
|
||||
.venus-rules:
|
||||
stage: layered-backends
|
||||
rules:
|
||||
- !reference [.lavapipe-rules, rules]
|
||||
- changes: &venus_file_list
|
||||
- src/virtio/**/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.radeonsi-rules:
|
||||
stage: amd
|
||||
rules:
|
||||
@@ -452,24 +374,11 @@
|
||||
- src/gallium/winsys/amdgpu/**/*
|
||||
- src/amd/*
|
||||
- src/amd/addrlib/**/*
|
||||
- src/amd/ci/*
|
||||
- src/amd/common/**/*
|
||||
- src/amd/llvm/**/*
|
||||
- src/amd/registers/**/*
|
||||
when: on_success
|
||||
|
||||
.radeonsi+radv-rules:
|
||||
stage: amd
|
||||
rules:
|
||||
- !reference [.collabora-farm-rules, rules]
|
||||
- !reference [.gl-rules, rules]
|
||||
- changes:
|
||||
*radeonsi_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*radv_file_list
|
||||
when: on_success
|
||||
|
||||
.radeonsi-vaapi-rules:
|
||||
stage: amd
|
||||
rules:
|
||||
@@ -559,27 +468,20 @@
|
||||
.zink-lvp-rules:
|
||||
stage: layered-backends
|
||||
rules:
|
||||
- !reference [.lavapipe-rules, rules]
|
||||
- !reference [.zink-common-rules, rules]
|
||||
- !reference [.lavapipe-rules, rules]
|
||||
|
||||
.zink-anv-rules:
|
||||
stage: layered-backends
|
||||
rules:
|
||||
- !reference [.anv-rules, rules]
|
||||
- !reference [.zink-common-rules, rules]
|
||||
|
||||
.zink-anv-rules-restricted:
|
||||
stage: layered-backends
|
||||
rules:
|
||||
- !reference [.restricted-rules, rules]
|
||||
- !reference [.anv-rules, rules]
|
||||
- !reference [.zink-common-rules, rules]
|
||||
|
||||
.zink-turnip-rules:
|
||||
stage: layered-backends
|
||||
rules:
|
||||
- !reference [.turnip-rules, rules]
|
||||
- !reference [.zink-common-rules, rules]
|
||||
- !reference [.turnip-rules, rules]
|
||||
|
||||
# Unfortunately YAML doesn't let us concatenate arrays, so we have to do the
|
||||
# rules duplication manually
|
||||
@@ -604,8 +506,6 @@
|
||||
when: on_success
|
||||
- changes:
|
||||
- src/microsoft/**/*
|
||||
- src/gallium/frontends/va/*
|
||||
- src/gallium/targets/va/*
|
||||
when: on_success
|
||||
- changes:
|
||||
*radv_file_list
|
||||
@@ -649,7 +549,6 @@
|
||||
.etnaviv-rules:
|
||||
stage: etnaviv
|
||||
rules:
|
||||
- !reference [.austriancoder-farm-rules, rules]
|
||||
- !reference [.gl-rules, rules]
|
||||
- changes:
|
||||
- src/etnaviv/**/*
|
||||
@@ -701,12 +600,3 @@
|
||||
- changes:
|
||||
*lavapipe_file_list
|
||||
when: on_success
|
||||
|
||||
# Rules for linters
|
||||
.lint-rustfmt-rules:
|
||||
rules:
|
||||
- !reference [.no_scheduled_pipelines-rules, rules]
|
||||
- !reference [.core-rules, rules]
|
||||
- changes:
|
||||
- src/**/*.rs
|
||||
when: on_success
|
||||
|
@@ -17,18 +17,6 @@
|
||||
paths:
|
||||
- results/
|
||||
|
||||
rustfmt:
|
||||
# Cancel job if a newer commit is pushed to the same branch
|
||||
interruptible: true
|
||||
stage: lint
|
||||
extends:
|
||||
- .use-debian/x86_build
|
||||
- .lint-rustfmt-rules
|
||||
variables:
|
||||
GIT_STRATEGY: fetch
|
||||
script:
|
||||
- git ls-files */{lib,app}.rs | xargs rustfmt --check
|
||||
|
||||
.test-gl:
|
||||
extends:
|
||||
- .test
|
||||
@@ -51,6 +39,7 @@ rustfmt:
|
||||
- .use-debian/x86_test-gl
|
||||
needs:
|
||||
- debian/x86_test-gl
|
||||
- debian-clover-testing
|
||||
|
||||
.vkd3d-proton-test:
|
||||
artifacts:
|
||||
@@ -77,21 +66,21 @@ rustfmt:
|
||||
.piglit-traces-test:
|
||||
extends:
|
||||
- .piglit-test
|
||||
cache:
|
||||
key: ${CI_JOB_NAME}
|
||||
paths:
|
||||
- replayer-db/
|
||||
artifacts:
|
||||
when: on_failure
|
||||
name: "mesa_${CI_JOB_NAME}"
|
||||
reports:
|
||||
junit: results/junit.xml
|
||||
paths:
|
||||
- results/
|
||||
exclude:
|
||||
- results/*.shader_cache
|
||||
- results/summary/
|
||||
- results/*.txt
|
||||
variables:
|
||||
PIGLIT_REPLAY_EXTRA_ARGS: --keep-image --db-path ${CI_PROJECT_DIR}/replayer-db/ --minio_bucket=mesa-tracie-public --jwt-file=${CI_JOB_JWT_FILE}
|
||||
PIGLIT_REPLAY_EXTRA_ARGS: --keep-image --db-path ${CI_PROJECT_DIR}/replayer-db/ --minio_host=minio-packet.freedesktop.org --minio_bucket=mesa-tracie-public --role-session-name=${CI_PROJECT_PATH}:${CI_JOB_ID} --jwt-file=${CI_JOB_JWT_FILE}
|
||||
script:
|
||||
- echo -e "\e[0Ksection_start:$(date +%s):variables[collapsed=true]\r\e[0KVariables passed through:"
|
||||
- install/common/generate-env.sh
|
||||
- echo -e "\e[0Ksection_end:$(date +%s):variables\r\e[0K"
|
||||
- install/piglit/piglit-traces.sh
|
||||
|
||||
.deqp-test:
|
||||
@@ -109,6 +98,11 @@ rustfmt:
|
||||
variables:
|
||||
DEQP_VER: vk
|
||||
|
||||
.skqp-test:
|
||||
variables:
|
||||
HWCI_START_XORG: 1
|
||||
HWCI_TEST_SCRIPT: "/install/skqp-runner.sh"
|
||||
|
||||
.fossilize-test:
|
||||
script:
|
||||
- ./install/fossilize-runner.sh
|
||||
@@ -130,9 +124,7 @@ rustfmt:
|
||||
# improve it even more (see https://docs.mesa3d.org/ci/bare-metal.html for
|
||||
# setup).
|
||||
- echo -e "\e[0Ksection_start:$(date +%s):artifacts_download[collapsed=true]\r\e[0KDownloading artifacts from minio"
|
||||
# Note: Build dir (and thus install) may be dirty due to GIT_STRATEGY
|
||||
- rm -rf install
|
||||
- wget ${FDO_HTTP_CACHE_URI:-}https://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}.tar.zst -S --progress=dot:giga -O- | tar --zstd -x
|
||||
- wget ${FDO_HTTP_CACHE_URI:-}https://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}.tar.gz -S --progress=dot:giga -O- | tar -xz
|
||||
- echo -e "\e[0Ksection_end:$(date +%s):artifacts_download\r\e[0K"
|
||||
artifacts:
|
||||
when: always
|
||||
@@ -149,12 +141,12 @@ rustfmt:
|
||||
.baremetal-test-armhf:
|
||||
extends:
|
||||
- .baremetal-test
|
||||
- .use-debian/armhf_test
|
||||
- .use-debian/arm_test
|
||||
variables:
|
||||
BM_ROOTFS: /rootfs-armhf
|
||||
MINIO_ARTIFACT_NAME: mesa-armhf
|
||||
needs:
|
||||
- debian/armhf_test
|
||||
- debian/arm_test
|
||||
- job: debian-armhf
|
||||
artifacts: false
|
||||
|
||||
@@ -162,12 +154,12 @@ rustfmt:
|
||||
.baremetal-test-arm64:
|
||||
extends:
|
||||
- .baremetal-test
|
||||
- .use-debian/arm64_test
|
||||
- .use-debian/arm_test
|
||||
variables:
|
||||
BM_ROOTFS: /rootfs-arm64
|
||||
MINIO_ARTIFACT_NAME: mesa-arm64
|
||||
needs:
|
||||
- debian/arm64_test
|
||||
- debian/arm_test
|
||||
- job: debian-arm64
|
||||
artifacts: false
|
||||
|
||||
@@ -175,12 +167,12 @@ rustfmt:
|
||||
.baremetal-arm64-asan-test:
|
||||
extends:
|
||||
- .baremetal-test
|
||||
- .use-debian/arm64_test
|
||||
- .use-debian/arm_test
|
||||
variables:
|
||||
DEQP_RUNNER_OPTIONS: "--env LD_PRELOAD=libasan.so.6:/install/lib/libdlclose-skip.so"
|
||||
MINIO_ARTIFACT_NAME: mesa-arm64-asan
|
||||
needs:
|
||||
- debian/arm64_test
|
||||
- debian/arm_test
|
||||
- job: debian-arm64-asan
|
||||
artifacts: false
|
||||
|
||||
@@ -189,6 +181,11 @@ rustfmt:
|
||||
HWCI_TEST_SCRIPT: "/install/deqp-runner.sh"
|
||||
FDO_CI_CONCURRENT: 0 # Default to number of CPUs
|
||||
|
||||
.baremetal-skqp-test:
|
||||
variables:
|
||||
HWCI_START_XORG: 1
|
||||
HWCI_TEST_SCRIPT: "/install/skqp-runner.sh"
|
||||
|
||||
# For Valve's bare-metal testing farm jobs.
|
||||
.b2c-test:
|
||||
# It would be nice to use ci-templates within Mesa CI for this job's
|
||||
@@ -201,14 +198,14 @@ rustfmt:
|
||||
# like FDO_DISTRIBUTION_TAG for *the* image, there is no way to
|
||||
# depend on more than one image per job. So, the job container is
|
||||
# built as part of the CI in the boot2container project.
|
||||
image: registry.freedesktop.org/mupuf/valve-infra/mesa-trigger:2022-12-08.1
|
||||
image: registry.freedesktop.org/mupuf/valve-infra/mesa-trigger:2022-03-03.2
|
||||
timeout: 1h 40m
|
||||
variables:
|
||||
# No need by default to pull the whole repo
|
||||
GIT_STRATEGY: none
|
||||
# boot2container initrd configuration parameters.
|
||||
B2C_KERNEL_URL: 'https://gitlab.freedesktop.org/mupuf/valve-infra/-/package_files/519/download' # Linux 6.1
|
||||
B2C_INITRAMFS_URL: 'https://gitlab.freedesktop.org/mupuf/boot2container/-/releases/v0.9.8/downloads/initramfs.linux_amd64.cpio.xz'
|
||||
B2C_KERNEL_URL: 'https://gitlab.freedesktop.org/mupuf/valve-infra/-/package_files/144/download' # 5.17.1
|
||||
B2C_INITRAMFS_URL: 'https://gitlab.freedesktop.org/mupuf/boot2container/-/releases/v0.9.6/downloads/initramfs.linux_amd64.cpio.xz'
|
||||
B2C_JOB_SUCCESS_REGEX: '\[.*\]: Execution is over, pipeline status: 0\r$'
|
||||
B2C_JOB_WARN_REGEX: '\*ERROR\* ring .* timeout, but soft recovered'
|
||||
B2C_LOG_LEVEL: 6
|
||||
@@ -236,11 +233,9 @@ rustfmt:
|
||||
GENERATE_ENV_SCRIPT: "${CI_COMMON_SCRIPTS}/generate-env.sh"
|
||||
B2C_JOB_TEMPLATE: "${CI_B2C_ARTIFACTS}/b2c.yml.jinja2.jinja2"
|
||||
JOB_FOLDER: "job_folder"
|
||||
|
||||
before_script:
|
||||
# We don't want the tarball unpacking of .test, but will take the JWT bits.
|
||||
- !reference [default, before_script]
|
||||
|
||||
- |
|
||||
set -x
|
||||
|
||||
@@ -320,6 +315,7 @@ rustfmt:
|
||||
env PYTHONUNBUFFERED=1 executorctl \
|
||||
run -w b2c.yml.jinja2 -j $(slugify "$CI_JOB_NAME") -s ${JOB_FOLDER}
|
||||
|
||||
ls -l
|
||||
# Anything our job places in results/ will be collected by the
|
||||
# Gitlab coordinator for status presentation. results/junit.xml
|
||||
# will be parsed by the UI for more detailed explanations of
|
||||
@@ -330,7 +326,7 @@ rustfmt:
|
||||
paths:
|
||||
- ${JOB_FOLDER}/results
|
||||
reports:
|
||||
junit: ${JOB_FOLDER}/results/**/junit.xml
|
||||
junit: ${JOB_FOLDER}/results/junit.xml
|
||||
|
||||
.b2c-test-vk:
|
||||
extends:
|
||||
|
@@ -1,22 +1,15 @@
|
||||
from contextlib import nullcontext as does_not_raise
|
||||
from datetime import datetime
|
||||
from io import StringIO
|
||||
from itertools import cycle
|
||||
from typing import Any, Callable, Generator, Iterable, Optional, Tuple, Union
|
||||
from typing import Callable, Generator, Iterable, Optional, Tuple, Union
|
||||
|
||||
import yaml
|
||||
from freezegun import freeze_time
|
||||
from lava.utils.log_section import (
|
||||
DEFAULT_GITLAB_SECTION_TIMEOUTS,
|
||||
FALLBACK_GITLAB_SECTION_TIMEOUT,
|
||||
LogSectionType,
|
||||
)
|
||||
from lavacli.utils import flow_yaml as lava_yaml
|
||||
|
||||
|
||||
def yaml_dump(data: dict[str, Any]) -> str:
|
||||
stream = StringIO()
|
||||
lava_yaml.dump(data, stream)
|
||||
return stream.getvalue()
|
||||
|
||||
|
||||
def section_timeout(section_type: LogSectionType) -> int:
|
||||
@@ -53,7 +46,7 @@ def jobs_logs_response(
|
||||
|
||||
logs = [timed_msg] if msg is None else msg
|
||||
|
||||
return finished, yaml_dump(logs)
|
||||
return finished, yaml.safe_dump(logs)
|
||||
|
||||
|
||||
def section_aware_message_generator(
|
||||
|
@@ -298,9 +298,11 @@ def test_parse_job_result_from_log(message, expectation, mock_proxy):
|
||||
reason="Slow and sketchy test. Needs a LAVA log raw file at /tmp/log.yaml"
|
||||
)
|
||||
def test_full_yaml_log(mock_proxy, frozen_time):
|
||||
import itertools
|
||||
import random
|
||||
from datetime import datetime
|
||||
|
||||
from lavacli.utils import flow_yaml as lava_yaml
|
||||
import yaml
|
||||
|
||||
def time_travel_from_log_chunk(data_chunk):
|
||||
if not data_chunk:
|
||||
@@ -319,28 +321,26 @@ def test_full_yaml_log(mock_proxy, frozen_time):
|
||||
# the same of from the job submitter execution
|
||||
with open("/tmp/log.yaml", "r") as f:
|
||||
first_log = f.readline()
|
||||
first_log_time = lava_yaml.load(first_log)[0]["dt"]
|
||||
first_log_time = yaml.safe_load(first_log)[0]["dt"]
|
||||
frozen_time.move_to(first_log_time)
|
||||
|
||||
def load_lines() -> list:
|
||||
with open("/tmp/log.yaml", "r") as f:
|
||||
# data = yaml.safe_load(f)
|
||||
data = f.readlines()
|
||||
stream = chain(data)
|
||||
data = yaml.safe_load(f)
|
||||
chain = itertools.chain(data)
|
||||
try:
|
||||
while True:
|
||||
data_chunk = [next(stream) for _ in range(random.randint(0, 50))]
|
||||
serial_message = "".join(data_chunk)
|
||||
data_chunk = [next(chain) for _ in range(random.randint(0, 50))]
|
||||
# Suppose that the first message timestamp is the same of
|
||||
# log fetch RPC call
|
||||
time_travel_from_log_chunk(data_chunk)
|
||||
yield False, "[]"
|
||||
yield False, []
|
||||
# Travel to the same datetime of the last fetched log line
|
||||
# in the chunk
|
||||
time_travel_from_log_chunk(data_chunk)
|
||||
yield False, serial_message
|
||||
yield False, data_chunk
|
||||
except StopIteration:
|
||||
yield True, serial_message
|
||||
yield True, data_chunk
|
||||
return
|
||||
|
||||
proxy = mock_proxy()
|
||||
|
@@ -8,16 +8,18 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
from lava.exceptions import MesaCIKnownIssueException, MesaCITimeoutError
|
||||
from lava.utils import (
|
||||
GitlabSection,
|
||||
LogFollower,
|
||||
LogSectionType,
|
||||
fix_lava_color_log,
|
||||
fix_lava_gitlab_section_log,
|
||||
hide_sensitive_data,
|
||||
)
|
||||
|
||||
from ..lava.helpers import create_lava_yaml_msg, does_not_raise, lava_yaml, yaml_dump
|
||||
from ..lava.helpers import create_lava_yaml_msg, does_not_raise
|
||||
|
||||
GITLAB_SECTION_SCENARIOS = {
|
||||
"start collapsed": (
|
||||
@@ -156,49 +158,91 @@ SENSITIVE_DATA_SCENARIOS = {
|
||||
ids=SENSITIVE_DATA_SCENARIOS.keys(),
|
||||
)
|
||||
def test_hide_sensitive_data(input, expectation, tag):
|
||||
yaml_data = yaml_dump(input)
|
||||
yaml_data = yaml.safe_dump(input)
|
||||
yaml_result = hide_sensitive_data(yaml_data, tag)
|
||||
result = lava_yaml.load(yaml_result)
|
||||
result = yaml.safe_load(yaml_result)
|
||||
|
||||
assert result == expectation
|
||||
|
||||
|
||||
GITLAB_SECTION_SPLIT_SCENARIOS = {
|
||||
"Split section_start at target level": (
|
||||
"\x1b[0Ksection_start:1668454947:test_post_process[collapsed=true]\r\x1b[0Kpost-processing test results",
|
||||
(
|
||||
"\x1b[0Ksection_start:1668454947:test_post_process[collapsed=true]",
|
||||
"\x1b[0Kpost-processing test results",
|
||||
COLOR_MANGLED_SCENARIOS = {
|
||||
"Mangled error message at target level": (
|
||||
create_lava_yaml_msg(msg="[0m[0m[31mERROR - dEQP error: ", lvl="target"),
|
||||
"\x1b[0m\x1b[0m\x1b[31mERROR - dEQP error: ",
|
||||
),
|
||||
"Mangled pass message at target level": (
|
||||
create_lava_yaml_msg(
|
||||
msg="[0mPass: 26718, ExpectedFail: 95, Skip: 25187, Duration: 8:18, Remaining: 13",
|
||||
lvl="target",
|
||||
),
|
||||
"\x1b[0mPass: 26718, ExpectedFail: 95, Skip: 25187, Duration: 8:18, Remaining: 13",
|
||||
),
|
||||
"Split section_end at target level": (
|
||||
"\x1b[0Ksection_end:1666309222:test_post_process\r\x1b[0K",
|
||||
("\x1b[0Ksection_end:1666309222:test_post_process", "\x1b[0K"),
|
||||
"Mangled error message with bold formatting at target level": (
|
||||
create_lava_yaml_msg(msg="[1;31mReview the image changes...", lvl="target"),
|
||||
"\x1b[1;31mReview the image changes...",
|
||||
),
|
||||
"Second line is not split from the first": (
|
||||
("\x1b[0Ksection_end:1666309222:test_post_process", "Any message"),
|
||||
("\x1b[0Ksection_end:1666309222:test_post_process", "Any message"),
|
||||
"Mangled error message with high intensity background at target level": (
|
||||
create_lava_yaml_msg(msg="[100mReview the image changes...", lvl="target"),
|
||||
"\x1b[100mReview the image changes...",
|
||||
),
|
||||
"Mangled error message with underline+bg color+fg color at target level": (
|
||||
create_lava_yaml_msg(msg="[4;41;97mReview the image changes...", lvl="target"),
|
||||
"\x1b[4;41;97mReview the image changes...",
|
||||
),
|
||||
"Bad input for color code.": (
|
||||
create_lava_yaml_msg(
|
||||
msg="[4;97 This message is missing the `m`.", lvl="target"
|
||||
),
|
||||
"[4;97 This message is missing the `m`.",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"expected_message, messages",
|
||||
GITLAB_SECTION_SPLIT_SCENARIOS.values(),
|
||||
ids=GITLAB_SECTION_SPLIT_SCENARIOS.keys(),
|
||||
"message, fixed_message",
|
||||
COLOR_MANGLED_SCENARIOS.values(),
|
||||
ids=COLOR_MANGLED_SCENARIOS.keys(),
|
||||
)
|
||||
def test_fix_lava_gitlab_section_log(expected_message, messages):
|
||||
fixed_messages = []
|
||||
gen = fix_lava_gitlab_section_log()
|
||||
next(gen)
|
||||
def test_fix_lava_color_log(message, fixed_message):
|
||||
fix_lava_color_log(message)
|
||||
|
||||
for message in messages:
|
||||
lava_log = create_lava_yaml_msg(msg=message, lvl="target")
|
||||
if recovered_line := gen.send(lava_log):
|
||||
fixed_messages.append((recovered_line, lava_log["msg"]))
|
||||
fixed_messages.append(lava_log["msg"])
|
||||
assert message["msg"] == fixed_message
|
||||
|
||||
assert expected_message in fixed_messages
|
||||
|
||||
GITLAB_SECTION_MANGLED_SCENARIOS = {
|
||||
"Mangled section_start at target level": (
|
||||
create_lava_yaml_msg(
|
||||
msg="[0Ksection_start:1652658415:deqp[collapsed=false][0Kdeqp-runner",
|
||||
lvl="target",
|
||||
),
|
||||
"\x1b[0Ksection_start:1652658415:deqp[collapsed=false]\r\x1b[0Kdeqp-runner",
|
||||
),
|
||||
"Mangled section_start at target level with header with spaces": (
|
||||
create_lava_yaml_msg(
|
||||
msg="[0Ksection_start:1652658415:deqp[collapsed=false][0Kdeqp runner stats",
|
||||
lvl="target",
|
||||
),
|
||||
"\x1b[0Ksection_start:1652658415:deqp[collapsed=false]\r\x1b[0Kdeqp runner stats",
|
||||
),
|
||||
"Mangled section_end at target level": (
|
||||
create_lava_yaml_msg(
|
||||
msg="[0Ksection_end:1652658415:test_setup[0K",
|
||||
lvl="target",
|
||||
),
|
||||
"\x1b[0Ksection_end:1652658415:test_setup\r\x1b[0K",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"message, fixed_message",
|
||||
GITLAB_SECTION_MANGLED_SCENARIOS.values(),
|
||||
ids=GITLAB_SECTION_MANGLED_SCENARIOS.keys(),
|
||||
)
|
||||
def test_fix_lava_gitlab_section_log(message, fixed_message):
|
||||
fix_lava_gitlab_section_log(message)
|
||||
|
||||
assert message["msg"] == fixed_message
|
||||
|
||||
|
||||
WATCHDOG_SCENARIOS = {
|
||||
|
@@ -1,26 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eu
|
||||
|
||||
function execute_testsuite {
|
||||
local RESULTS_FOLDER EXEC_DONE_FILE
|
||||
|
||||
RESULTS_FOLDER="results/$1"
|
||||
EXEC_DONE_FILE="$RESULTS_FOLDER/.done"
|
||||
|
||||
if [ ! -f "$EXEC_DONE_FILE" ]; then
|
||||
DEQP_RESULTS_DIR="$RESULTS_FOLDER" PIGLIT_RESULTS_DIR="$RESULTS_FOLDER" $2
|
||||
touch "$EXEC_DONE_FILE"
|
||||
else
|
||||
echo "--> Skipped, as it already was executed"
|
||||
fi
|
||||
}
|
||||
|
||||
echo -e "\n# GL CTS testing"
|
||||
DEQP_VER=gl46 execute_testsuite gl ./install/deqp-runner.sh
|
||||
|
||||
echo -e "\n# GLES CTS testing"
|
||||
DEQP_SUITE=zink-radv execute_testsuite gles ./install/deqp-runner.sh
|
||||
|
||||
echo -e "\n# Piglit testing"
|
||||
execute_testsuite piglit ./install/piglit/piglit-runner.sh
|
@@ -48,7 +48,7 @@ sleep 1
|
||||
# when asked to load PE executables.
|
||||
# TODO: Have boot2container mount this filesystem for all jobs?
|
||||
mount -t binfmt_misc none /proc/sys/fs/binfmt_misc
|
||||
echo ':DOSWin:M::MZ::/usr/bin/wine64:' > /proc/sys/fs/binfmt_misc/register
|
||||
echo ':DOSWin:M::MZ::/usr/bin/wine:' > /proc/sys/fs/binfmt_misc/register
|
||||
|
||||
# Set environment for DXVK.
|
||||
export DXVK_LOG_LEVEL="info"
|
||||
@@ -68,7 +68,7 @@ if [ ${TEST_START_XORG:-0} -eq 1 ]; then
|
||||
export DISPLAY=:0
|
||||
fi
|
||||
|
||||
wine64 --version
|
||||
wine --version
|
||||
|
||||
SANITY_MESA_VERSION_CMD="$SANITY_MESA_VERSION_CMD | tee /tmp/version.txt | grep \"Mesa $MESA_VERSION\(\s\|$\)\""
|
||||
|
||||
|
@@ -1,3 +1,10 @@
|
||||
$dxil_dll = cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 -no_logo && where dxil.dll" 2>&1
|
||||
if ($dxil_dll -notmatch "dxil.dll$") {
|
||||
Write-Output "Couldn't get path to dxil.dll"
|
||||
exit 1
|
||||
}
|
||||
$env:Path = "$(Split-Path $dxil_dll);$env:Path"
|
||||
|
||||
# VK_ICD_FILENAMES environment variable is not used when running with
|
||||
# elevated privileges. Add a key to the registry instead.
|
||||
$hkey_path = "HKLM:\SOFTWARE\Khronos\Vulkan\Drivers\"
|
||||
|
@@ -38,8 +38,8 @@ Push-Location $builddir
|
||||
|
||||
meson `
|
||||
--default-library=shared `
|
||||
-Dzlib:default_library=static `
|
||||
--buildtype=release `
|
||||
--wrap-mode=nodownload `
|
||||
-Db_ndebug=false `
|
||||
-Db_vscrt=mt `
|
||||
--cmake-prefix-path="$depsInstallPath" `
|
||||
@@ -49,22 +49,18 @@ meson `
|
||||
-Dshared-llvm=disabled `
|
||||
-Dvulkan-drivers="swrast,amd,microsoft-experimental" `
|
||||
-Dgallium-drivers="swrast,d3d12,zink" `
|
||||
-Dgallium-va=true `
|
||||
-Dvideo-codecs="h264dec,h264enc,h265dec,h265enc,vc1dec" `
|
||||
-Dshared-glapi=enabled `
|
||||
-Dgles1=enabled `
|
||||
-Dgles2=enabled `
|
||||
-Dgallium-opencl=icd `
|
||||
-Dgallium-rusticl=false `
|
||||
-Dopencl-spirv=true `
|
||||
-Dmicrosoft-clc=enabled `
|
||||
-Dstatic-libclc=all `
|
||||
-Dspirv-to-dxil=true `
|
||||
-Dbuild-tests=true `
|
||||
-Dwerror=true `
|
||||
-Dwarning_level=2 `
|
||||
-Dzlib:warning_level=1 `
|
||||
-Dlibelf:warning_level=1 `
|
||||
$sourcedir && `
|
||||
meson install && `
|
||||
meson install --skip-subprojects && `
|
||||
meson test --num-processes 32 --print-errorlogs
|
||||
|
||||
$buildstatus = $?
|
||||
@@ -83,5 +79,6 @@ Copy-Item ".\.gitlab-ci\windows\spirv2dxil_check.ps1" -Destination $installdir
|
||||
Copy-Item ".\.gitlab-ci\windows\spirv2dxil_run.ps1" -Destination $installdir
|
||||
|
||||
Copy-Item ".\.gitlab-ci\windows\deqp_runner_run.ps1" -Destination $installdir
|
||||
Copy-Item ".\src\microsoft\ci\deqp-dozen.toml" -Destination $installdir
|
||||
|
||||
Get-ChildItem -Recurse -Filter "ci" | Get-ChildItem -Include "*.txt","*.toml" | Copy-Item -Destination $installdir
|
||||
Get-ChildItem -Recurse -Filter "ci" | Get-ChildItem -Filter "*.txt" | Copy-Item -Destination $installdir
|
||||
|
@@ -8,81 +8,6 @@ $MyPath = $MyInvocation.MyCommand.Path | Split-Path -Parent
|
||||
|
||||
Remove-Item -Recurse -Force -ErrorAction SilentlyContinue "deps" | Out-Null
|
||||
|
||||
$depsInstallPath="C:\mesa-deps"
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning DirectX-Headers"
|
||||
git clone -b v1.606.4 --depth=1 https://github.com/microsoft/DirectX-Headers deps/DirectX-Headers
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone DirectX-Headers repository"
|
||||
Exit 1
|
||||
}
|
||||
Write-Host "Building DirectX-Headers"
|
||||
$dxheaders_build = New-Item -ItemType Directory -Path ".\deps\DirectX-Headers" -Name "build"
|
||||
Push-Location -Path $dxheaders_build.FullName
|
||||
meson .. --backend=ninja -Dprefix="$depsInstallPath" --buildtype=release -Db_vscrt=mt && `
|
||||
ninja -j32 install
|
||||
$buildstatus = $?
|
||||
Pop-Location
|
||||
Remove-Item -Recurse -Force -ErrorAction SilentlyContinue -Path $dxheaders_build
|
||||
if (!$buildstatus) {
|
||||
Write-Host "Failed to compile DirectX-Headers"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning zlib"
|
||||
git clone -b v1.2.13 --depth=1 https://github.com/madler/zlib deps/zlib
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone zlib repository"
|
||||
Exit 1
|
||||
}
|
||||
Write-Host "Downloading zlib meson build files"
|
||||
Invoke-WebRequest -Uri "https://wrapdb.mesonbuild.com/v2/zlib_1.2.13-1/get_patch" -OutFile deps/zlib.zip
|
||||
Expand-Archive -Path deps/zlib.zip -Destination deps/zlib
|
||||
# Wrap archive puts build files in a version subdir
|
||||
Move-Item deps/zlib/zlib-1.2.13/* deps/zlib
|
||||
$zlib_build = New-Item -ItemType Directory -Path ".\deps\zlib" -Name "build"
|
||||
Push-Location -Path $zlib_build.FullName
|
||||
meson .. --backend=ninja -Dprefix="$depsInstallPath" --default-library=static --buildtype=release -Db_vscrt=mt && `
|
||||
ninja -j32 install
|
||||
$buildstatus = $?
|
||||
Pop-Location
|
||||
Remove-Item -Recurse -Force -ErrorAction SilentlyContinue -Path $zlib_build
|
||||
if (!$buildstatus) {
|
||||
Write-Host "Failed to compile zlib"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning libva"
|
||||
git clone https://github.com/intel/libva.git deps/libva
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone libva repository"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Push-Location -Path ".\deps\libva"
|
||||
Write-Host "Checking out libva df3c584bb79d1a1e521372d62fa62e8b1c52ce6c"
|
||||
# libva-win32 is released with libva version 2.17 (see https://github.com/intel/libva/releases/tag/2.17.0)
|
||||
git checkout 2.17.0
|
||||
Pop-Location
|
||||
|
||||
Write-Host "Building libva"
|
||||
# libva already has a build dir in their repo, use builddir instead
|
||||
$libva_build = New-Item -ItemType Directory -Path ".\deps\libva" -Name "builddir"
|
||||
Push-Location -Path $libva_build.FullName
|
||||
meson .. -Dprefix="$depsInstallPath"
|
||||
ninja -j32 install
|
||||
$buildstatus = $?
|
||||
Pop-Location
|
||||
Remove-Item -Recurse -Force -ErrorAction SilentlyContinue -Path $libva_build
|
||||
if (!$buildstatus) {
|
||||
Write-Host "Failed to compile libva"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning LLVM release/12.x"
|
||||
git clone -b release/12.x --depth=1 https://github.com/llvm/llvm-project deps/llvm-project
|
||||
@@ -105,6 +30,8 @@ Push-Location deps/llvm-project/llvm/projects/SPIRV-LLVM-Translator
|
||||
git checkout 5b641633b3bcc3251a52260eee11db13a79d7258
|
||||
Pop-Location
|
||||
|
||||
$depsInstallPath="C:\mesa-deps"
|
||||
|
||||
Get-Date
|
||||
# slightly convoluted syntax but avoids the CWD being under the PS filesystem meta-path
|
||||
$llvm_build = New-Item -ItemType Directory -ErrorAction SilentlyContinue -Force -Path ".\deps\llvm-project" -Name "build"
|
||||
|
@@ -71,25 +71,3 @@ if (!$?) {
|
||||
Exit 1
|
||||
}
|
||||
Remove-Item C:\vulkan-runtime.exe -Force
|
||||
|
||||
Get-Date
|
||||
Write-Host "Installing graphics tools (DirectX debug layer)"
|
||||
Set-Service -Name wuauserv -StartupType Manual
|
||||
if (!$?) {
|
||||
Write-Host "Failed to enable Windows Update"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
For ($i = 0; $i -lt 5; $i++) {
|
||||
Dism /online /quiet /add-capability /capabilityname:Tools.Graphics.DirectX~~~~0.0.1.0
|
||||
$graphics_tools_installed = $?
|
||||
if ($graphics_tools_installed) {
|
||||
Break
|
||||
}
|
||||
}
|
||||
|
||||
if (!$graphics_tools_installed) {
|
||||
Write-Host "Failed to install graphics tools"
|
||||
Get-Content C:\Windows\Logs\DISM\dism.log
|
||||
Exit 1
|
||||
}
|
||||
|
@@ -1,36 +1,33 @@
|
||||
Get-Date
|
||||
Write-Host "Cloning Waffle"
|
||||
Write-Host "Downloading Freeglut"
|
||||
|
||||
$freeglut_zip = 'freeglut-MSVC.zip'
|
||||
$freeglut_url = "https://www.transmissionzero.co.uk/files/software/development/GLUT/$freeglut_zip"
|
||||
|
||||
For ($i = 0; $i -lt 5; $i++) {
|
||||
Invoke-WebRequest -Uri $freeglut_url -OutFile $freeglut_zip
|
||||
$freeglut_downloaded = $?
|
||||
if ($freeglut_downloaded) {
|
||||
Break
|
||||
}
|
||||
}
|
||||
|
||||
if (!$freeglut_downloaded) {
|
||||
Write-Host "Failed to download Freeglut"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Host "Installing Freeglut"
|
||||
Expand-Archive $freeglut_zip -DestinationPath C:\
|
||||
if (!$?) {
|
||||
Write-Host "Failed to install Freeglut"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
$MyPath = $MyInvocation.MyCommand.Path | Split-Path -Parent
|
||||
. "$MyPath\mesa_vs_init.ps1"
|
||||
|
||||
git clone --no-progress --single-branch --no-checkout https://gitlab.freedesktop.org/mesa/waffle.git 'C:\src\waffle'
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone Waffle repository"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Push-Location -Path C:\src\waffle
|
||||
git checkout 950a1f35a718bc2a8e1dda75845e52651bb331a7
|
||||
Pop-Location
|
||||
|
||||
Get-Date
|
||||
$waffle_build = New-Item -ItemType Directory -Path "C:\src\waffle" -Name "build"
|
||||
Push-Location -Path $waffle_build.FullName
|
||||
Write-Host "Compiling Waffle"
|
||||
meson setup `
|
||||
--buildtype=release `
|
||||
--default-library=static `
|
||||
--prefix="C:\Waffle" && `
|
||||
ninja -j32 install
|
||||
$buildstatus = $?
|
||||
Pop-Location
|
||||
Remove-Item -Recurse -Path $waffle_build
|
||||
if (!$buildstatus) {
|
||||
Write-Host "Failed to compile or install Waffle"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Host "Downloading glext.h"
|
||||
New-Item -ItemType Directory -Path ".\glext" -Name "GL"
|
||||
@@ -39,31 +36,39 @@ Invoke-WebRequest -Uri 'https://www.khronos.org/registry/OpenGL/api/GL/glext.h'
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning Piglit"
|
||||
git clone --no-progress --single-branch --no-checkout https://gitlab.freedesktop.org/mesa/piglit.git 'C:\piglit'
|
||||
git clone --no-progress --single-branch --no-checkout https://gitlab.freedesktop.org/mesa/piglit.git 'C:\src\piglit'
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone Piglit repository"
|
||||
Exit 1
|
||||
}
|
||||
Push-Location -Path C:\piglit
|
||||
git checkout b41accc83689966f91217fc5b57dbe06202b8c8c
|
||||
Push-Location -Path C:\src\piglit
|
||||
git checkout f7f2a6c2275cae023a27b6cc81be3dda8c99492d
|
||||
Pop-Location
|
||||
|
||||
Get-Date
|
||||
$piglit_build = New-Item -ItemType Directory -Path "C:\src\piglit" -Name "build"
|
||||
Push-Location -Path $piglit_build.FullName
|
||||
Write-Host "Compiling Piglit"
|
||||
cmake -S . -B . `
|
||||
cmake .. `
|
||||
-GNinja `
|
||||
-DCMAKE_BUILD_TYPE=Release `
|
||||
-DPIGLIT_USE_WAFFLE=ON `
|
||||
-DWaffle_INCLUDE_DIRS=C:\Waffle\include\waffle-1 `
|
||||
-DWaffle_LDFLAGS=C:\Waffle\lib\libwaffle-1.a `
|
||||
-DCMAKE_INSTALL_PREFIX="C:\Piglit" `
|
||||
-DGLUT_INCLUDE_DIR=C:\freeglut\include `
|
||||
-DGLUT_glut_LIBRARY_RELEASE=C:\freeglut\lib\x64\freeglut.lib `
|
||||
-DGLEXT_INCLUDE_DIR=.\glext && `
|
||||
ninja -j32
|
||||
$buildstatus = $?
|
||||
ninja -j32 install | Out-Null
|
||||
$installstatus = $?
|
||||
Pop-Location
|
||||
if (!$buildstatus) {
|
||||
Write-Host "Failed to compile Piglit"
|
||||
Remove-Item -Recurse -Path $piglit_build
|
||||
if (!$buildstatus -Or !$installstatus) {
|
||||
Write-Host "Failed to compile or install Piglit"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Copy-Item -Path C:\freeglut\bin\x64\freeglut.dll -Destination C:\Piglit\lib\piglit\bin\freeglut.dll
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning spirv-samples"
|
||||
git clone --no-progress --single-branch --no-checkout https://github.com/dneto0/spirv-samples.git C:\spirv-samples\
|
||||
@@ -74,7 +79,7 @@ Pop-Location
|
||||
Get-Date
|
||||
Write-Host "Cloning Vulkan and GL Conformance Tests"
|
||||
$deqp_source = "C:\src\VK-GL-CTS\"
|
||||
git clone --no-progress --single-branch https://github.com/KhronosGroup/VK-GL-CTS.git -b vulkan-cts-1.3.4 $deqp_source
|
||||
git clone --no-progress --single-branch https://github.com/lfrb/VK-GL-CTS.git -b windows-flush $deqp_source
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone deqp repository"
|
||||
Exit 1
|
||||
@@ -99,8 +104,8 @@ cmake -S $($deqp_source) `
|
||||
ninja -j32
|
||||
$buildstatus = $?
|
||||
Pop-Location
|
||||
if (!$buildstatus) {
|
||||
Write-Host "Failed to compile deqp"
|
||||
if (!$buildstatus -Or !$installstatus) {
|
||||
Write-Host "Failed to compile or install deqp"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
@@ -110,10 +115,10 @@ Copy-Item -Path "$($deqp_source)\doc\testlog-stylesheet\testlog.xsl" -Destinatio
|
||||
|
||||
# Copy Vulkan must-pass list
|
||||
$deqp_mustpass = New-Item -ItemType Directory -Path $deqp_build -Name "mustpass"
|
||||
$root_mustpass = Join-Path -Path $deqp_source -ChildPath "external\vulkancts\mustpass\main"
|
||||
$root_mustpass = Join-Path -Path $deqp_source -ChildPath "external\vulkancts\mustpass\master"
|
||||
$files = Get-Content "$($root_mustpass)\vk-default.txt"
|
||||
foreach($file in $files) {
|
||||
Get-Content "$($root_mustpass)\$($file)" | Add-Content -Path "$($deqp_mustpass)\vk-main.txt"
|
||||
Get-Content "$($root_mustpass)\$($file)" | Add-Content -Path "$($deqp_mustpass)\vk-master.txt"
|
||||
}
|
||||
Remove-Item -Force -Recurse $deqp_source
|
||||
|
||||
@@ -130,50 +135,5 @@ Write-Host "Installing deqp-runner"
|
||||
$env:Path += ";$($env:USERPROFILE)\.cargo\bin"
|
||||
cargo install --git https://gitlab.freedesktop.org/anholt/deqp-runner.git
|
||||
|
||||
Get-Date
|
||||
Write-Host "Downloading DirectX 12 Agility SDK"
|
||||
Invoke-WebRequest -Uri https://www.nuget.org/api/v2/package/Microsoft.Direct3D.D3D12/1.706.3-preview -OutFile 'agility.zip'
|
||||
Expand-Archive -Path 'agility.zip' -DestinationPath 'C:\agility'
|
||||
Remove-Item 'agility.zip'
|
||||
|
||||
$piglit_bin = 'C:\Piglit\bin'
|
||||
$vk_cts_bin = "$deqp_build\external\vulkancts\modules\vulkan"
|
||||
|
||||
# Copy Agility SDK into subfolder of piglit and Vulkan CTS
|
||||
$agility_dest = New-Item -ItemType Directory -Path $piglit_bin -Name 'D3D12'
|
||||
Copy-Item 'C:\agility\build\native\bin\x64\*.dll' -Destination $agility_dest
|
||||
$agility_dest = New-Item -ItemType Directory -Path $vk_cts_bin -Name 'D3D12'
|
||||
Copy-Item 'C:\agility\build\native\bin\x64\*.dll' -Destination $agility_dest
|
||||
Remove-Item -Recurse 'C:\agility'
|
||||
|
||||
Get-Date
|
||||
Write-Host "Downloading Updated WARP"
|
||||
Invoke-WebRequest -Uri https://www.nuget.org/api/v2/package/Microsoft.Direct3D.WARP/1.0.2 -OutFile 'warp.zip'
|
||||
Expand-Archive -Path 'warp.zip' -DestinationPath 'C:\warp'
|
||||
Remove-Item 'warp.zip'
|
||||
|
||||
# Copy WARP next to piglit and Vulkan CTS
|
||||
Copy-Item 'C:\warp\build\native\amd64\d3d10warp.dll' -Destination $piglit_bin
|
||||
Copy-Item 'C:\warp\build\native\amd64\d3d10warp.dll' -Destination $vk_cts_bin
|
||||
Remove-Item -Recurse 'C:\warp'
|
||||
|
||||
Get-Date
|
||||
Write-Host "Downloading DirectXShaderCompiler release"
|
||||
Invoke-WebRequest -Uri https://github.com/microsoft/DirectXShaderCompiler/releases/download/v1.7.2207/dxc_2022_07_18.zip -OutFile 'DXC.zip'
|
||||
Expand-Archive -Path 'DXC.zip' -DestinationPath 'C:\DXC'
|
||||
# No more need to get dxil.dll from the VS install
|
||||
Copy-Item 'C:\DXC\bin\x64\*.dll' -Destination 'C:\Windows\System32'
|
||||
|
||||
Get-Date
|
||||
Write-Host "Enabling developer mode"
|
||||
# Create AppModelUnlock if it doesn't exist, required for enabling Developer Mode
|
||||
$RegistryKeyPath = "HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\AppModelUnlock"
|
||||
if (-not(Test-Path -Path $RegistryKeyPath)) {
|
||||
New-Item -Path $RegistryKeyPath -ItemType Directory -Force
|
||||
}
|
||||
|
||||
# Add registry value to enable Developer Mode
|
||||
New-ItemProperty -Path $RegistryKeyPath -Name AllowDevelopmentWithoutDevLicense -PropertyType DWORD -Value 1 -Force
|
||||
|
||||
Get-Date
|
||||
Write-Host "Complete"
|
||||
|
@@ -27,7 +27,6 @@ Start-Process -NoNewWindow -Wait -FilePath C:\vs_buildtools.exe `
|
||||
"--add", "Microsoft.VisualStudio.Component.VC.ATL", `
|
||||
"--add", "Microsoft.VisualStudio.Component.VC.ATLMFC", `
|
||||
"--add", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", `
|
||||
"--add", "Microsoft.VisualStudio.Component.VC.Llvm.Clang", `
|
||||
"--add", "Microsoft.VisualStudio.Component.Graphics.Tools", `
|
||||
"--add", "Microsoft.VisualStudio.Component.Windows10SDK.20348"
|
||||
|
||||
|
@@ -1,13 +1,28 @@
|
||||
$env:PIGLIT_NO_FAST_SKIP = 1
|
||||
|
||||
Copy-Item -Path _install\bin\opengl32.dll -Destination C:\Piglit\bin\opengl32.dll
|
||||
Copy-Item -Path _install\bin\libgallium_wgl.dll -Destination C:\Piglit\bin\libgallium_wgl.dll
|
||||
Copy-Item -Path _install\bin\libglapi.dll -Destination C:\Piglit\bin\libglapi.dll
|
||||
Copy-Item -Path _install\bin\opengl32.dll -Destination C:\Piglit\lib\piglit\bin\opengl32.dll
|
||||
Copy-Item -Path _install\bin\libgallium_wgl.dll -Destination C:\Piglit\lib\piglit\bin\libgallium_wgl.dll
|
||||
Copy-Item -Path _install\bin\libglapi.dll -Destination C:\Piglit\lib\piglit\bin\libglapi.dll
|
||||
|
||||
deqp-runner suite --output .\logs --suite "_install/$env:PIGLIT_SUITE" `
|
||||
--skips "_install/$env:PIGLIT_SKIPS" `
|
||||
--baseline "_install/$env:PIGLIT_BASELINE" `
|
||||
--flakes "_install/$env:PIGLIT_FLAKES"
|
||||
if (!$?) {
|
||||
# Run this using VsDevCmd.bat to ensure DXIL.dll is in %PATH%
|
||||
cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && py -3 C:\Piglit\bin\piglit.py run `"$env:PIGLIT_PROFILE`" $env:PIGLIT_OPTIONS $env:PIGLIT_TESTS .\results"
|
||||
|
||||
py -3 C:\Piglit\bin\piglit.py summary console .\results | Select -SkipLast 1 | Select-String -NotMatch -Pattern ': pass' | Set-Content -Path .\result.txt
|
||||
|
||||
$reference = Get-Content ".\_install\$env:PIGLIT_RESULTS.txt"
|
||||
$result = Get-Content .\result.txt
|
||||
if (-Not ($reference -And $result)) {
|
||||
Exit 1
|
||||
}
|
||||
|
||||
$diff = Compare-Object -ReferenceObject $reference -DifferenceObject $result
|
||||
if (-Not $diff) {
|
||||
Exit 0
|
||||
}
|
||||
|
||||
py -3 C:\Piglit\bin\piglit.py summary html --exclude-details=pass .\summary .\results
|
||||
|
||||
Write-Host "Unexpected change in results:"
|
||||
Write-Output $diff | Format-Table -Property SideIndicator,InputObject -Wrap
|
||||
|
||||
Exit 1
|
||||
|
@@ -1,3 +1,11 @@
|
||||
# Ensure that dxil.dll in on the %PATH%
|
||||
$dxil_dll = cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 -no_logo && where dxil.dll" 2>&1
|
||||
if ($dxil_dll -notmatch "dxil.dll$") {
|
||||
Write-Output "Couldn't get path to dxil.dll"
|
||||
exit 1
|
||||
}
|
||||
$env:Path = "$(Split-Path $dxil_dll);$env:Path"
|
||||
|
||||
$exec_mode_to_stage = @{ Fragment = "fragment"; Vertex = "vertex"; GLCompute = "compute" }
|
||||
|
||||
$spvasm_files = (Get-ChildItem C:\spirv-samples\spvasm\*.spvasm) | Sort-Object Name
|
||||
|
@@ -1,2 +0,0 @@
|
||||
schema: 'schema.graphql'
|
||||
documents: 'src/**/*.{graphql,js,ts,jsx,tsx}'
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user