Compare commits
228 Commits
main
...
mesa-20.2.
Author | SHA1 | Date | |
---|---|---|---|
|
663d464366 | ||
|
f1b6e50108 | ||
|
a2f1b6d268 | ||
|
01285ded24 | ||
|
81a826636f | ||
|
0996ea1175 | ||
|
76b2f293c6 | ||
|
98fa336d34 | ||
|
c3eaad7e76 | ||
|
74e7c86189 | ||
|
6d31ec1543 | ||
|
cbe737040d | ||
|
3ec090f1ac | ||
|
30fe359cbb | ||
|
b1779e92e1 | ||
|
38b65b603d | ||
|
5fbb4771a7 | ||
|
424203520f | ||
|
6ca0035ffc | ||
|
2ca018d0dd | ||
|
02f652a58b | ||
|
063bbeddc8 | ||
|
41d6216ddb | ||
|
22dd8299fe | ||
|
e41bd211e9 | ||
|
27a7ceefc4 | ||
|
486827826b | ||
|
dd665f2762 | ||
|
6394d94536 | ||
|
2b496c9bd2 | ||
|
39e1613c32 | ||
|
9eab6d9958 | ||
|
5e3732f6ce | ||
|
6195f7b703 | ||
|
8b1f207720 | ||
|
c8e3af5686 | ||
|
1be49cd0ba | ||
|
409c87a297 | ||
|
ae25477194 | ||
|
b9b6ccc913 | ||
|
cb115979db | ||
|
0a67c03128 | ||
|
dc559b6177 | ||
|
85721f885d | ||
|
992e1d053b | ||
|
abc8c94f38 | ||
|
0aa0b2d495 | ||
|
ce8184e3a3 | ||
|
6667e2de46 | ||
|
da0ed80410 | ||
|
3a09792bfc | ||
|
ae817c0f0f | ||
|
102fed30b9 | ||
|
fc5ea646ab | ||
|
068640e23b | ||
|
f6f1995898 | ||
|
efd20aa5b2 | ||
|
66a14f9c41 | ||
|
b617a52308 | ||
|
fe13f9b8f5 | ||
|
64ff6b112e | ||
|
1a906eea25 | ||
|
3850f6b578 | ||
|
3b68d713f5 | ||
|
04cb446d4a | ||
|
fc3e3d7ba4 | ||
|
c45536f929 | ||
|
ee1d4d5ee3 | ||
|
e98a278dcd | ||
|
4c6cc7277f | ||
|
4866958ece | ||
|
78764a41c2 | ||
|
d2208acffe | ||
|
0f535785bb | ||
|
928e13d8fd | ||
|
7958051f43 | ||
|
937405eda6 | ||
|
3e325d431c | ||
|
996971c946 | ||
|
dae04016ed | ||
|
0bc75626a0 | ||
|
d5d434c599 | ||
|
8b9213e0b7 | ||
|
0cf9af563b | ||
|
99f96537db | ||
|
9e13702af7 | ||
|
a88482e316 | ||
|
7b91ba4d58 | ||
|
e02d81072c | ||
|
82973aa14e | ||
|
8225f619ba | ||
|
b69312343b | ||
|
61121108ad | ||
|
8335d497e5 | ||
|
cbad87a206 | ||
|
b89a76b7ce | ||
|
525a5b763d | ||
|
3d1a71aa2b | ||
|
b6e4106024 | ||
|
fd1dbd904c | ||
|
1629fe89a6 | ||
|
7a63155052 | ||
|
22b4120de4 | ||
|
4e57b4680d | ||
|
83dda7b35f | ||
|
7518930a99 | ||
|
57d65d2f76 | ||
|
d37550f8e7 | ||
|
43bd915ff5 | ||
|
db2fa8208c | ||
|
fe8d18e752 | ||
|
8f4094bb1c | ||
|
372f8f5c53 | ||
|
3d0f9e3dc3 | ||
|
64039dffc4 | ||
|
367be430b5 | ||
|
eefc95d601 | ||
|
fb53f1937a | ||
|
93659e4ed3 | ||
|
a020620c25 | ||
|
948a2fbec5 | ||
|
17837b6637 | ||
|
9a66f08cfe | ||
|
90ccdf4270 | ||
|
ed64b3d8e9 | ||
|
a024de19ac | ||
|
643ae5ea24 | ||
|
01c1323180 | ||
|
bbc9b56a67 | ||
|
51c029f77a | ||
|
9699e42a3c | ||
|
75462d48ec | ||
|
42f6206a84 | ||
|
f63e1edefb | ||
|
b445b12c0d | ||
|
10571677e5 | ||
|
d4056d25d1 | ||
|
bd1d4bba42 | ||
|
33653e6dde | ||
|
c1e2e33752 | ||
|
d27e1776e1 | ||
|
83df231acd | ||
|
786e6a2af8 | ||
|
dee7acec82 | ||
|
db0a93de12 | ||
|
dae7b5156f | ||
|
c2a441c6a2 | ||
|
831473e56e | ||
|
b3510789c2 | ||
|
9a15390174 | ||
|
83c32b9d90 | ||
|
b4c5c7cbf5 | ||
|
f41a82869f | ||
|
e26c08622b | ||
|
3e0565c262 | ||
|
e834ac4863 | ||
|
f7a4d89b19 | ||
|
d74b2bf797 | ||
|
99540aa4fa | ||
|
fc698d8ce7 | ||
|
99d5727d2d | ||
|
748a7e1a44 | ||
|
7c47874d45 | ||
|
9ea9fbf6cb | ||
|
bb2e75ec7d | ||
|
b6a1444ed1 | ||
|
39cec65c22 | ||
|
9b09d4ca95 | ||
|
17f9aa36b6 | ||
|
801e3a72bc | ||
|
f79e81a38b | ||
|
522e62ac5b | ||
|
8cdfbd3a07 | ||
|
81b1dcf83a | ||
|
2891ddb996 | ||
|
a9024558ba | ||
|
750bb69ba4 | ||
|
43b0e51f43 | ||
|
9aa25a340c | ||
|
cb55aca266 | ||
|
ed256fd804 | ||
|
66fc0ad7d6 | ||
|
9adbc862f7 | ||
|
a19dd637e8 | ||
|
a65c597897 | ||
|
55ef5d5a22 | ||
|
a4dc1baf77 | ||
|
11edf2e357 | ||
|
6d14d91a80 | ||
|
7dc972d4b6 | ||
|
e60a1d5bc0 | ||
|
b454e44579 | ||
|
ee63146f37 | ||
|
6b6a38a8be | ||
|
2d2ba264e9 | ||
|
d584839b9b | ||
|
6e5874416d | ||
|
0ba14f96fc | ||
|
8acf2493a9 | ||
|
e196f50d76 | ||
|
6b823dfc01 | ||
|
c612c06cf1 | ||
|
16da4a483c | ||
|
5b25e9de0c | ||
|
c09c88fbac | ||
|
f3c3a1ceff | ||
|
9f5c75a90f | ||
|
53f5f43627 | ||
|
7665280b28 | ||
|
a6ccd24636 | ||
|
3f0a10b7da | ||
|
d4d36010a8 | ||
|
1edc9549d2 | ||
|
ee4ccf0031 | ||
|
9c719ad7c9 | ||
|
3bf0368f9e | ||
|
b0b55fa939 | ||
|
3183610228 | ||
|
6eadb68e98 | ||
|
81df3a4a4c | ||
|
d0b0165808 | ||
|
9a5b5cdf9c | ||
|
1ed360d24b | ||
|
761c979ae0 | ||
|
20663891c1 | ||
|
c2f85ade7d | ||
|
cb341c7f86 | ||
|
0b8f4381b1 |
66
.appveyor/appveyor_msvc.bat
Normal file
66
.appveyor/appveyor_msvc.bat
Normal file
@@ -0,0 +1,66 @@
|
||||
goto %1
|
||||
|
||||
:install
|
||||
rem Check pip
|
||||
python --version
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip --version
|
||||
if "%buildsystem%" == "scons" (
|
||||
rem Install Mako
|
||||
python -m pip install Mako==1.1.3
|
||||
rem Install pywin32 extensions, needed by SCons
|
||||
python -m pip install pypiwin32
|
||||
rem Install python wheels, necessary to install SCons via pip
|
||||
python -m pip install wheel
|
||||
rem Install SCons
|
||||
python -m pip install scons==3.1.2
|
||||
call scons --version
|
||||
) else (
|
||||
python -m pip install Mako meson
|
||||
meson --version
|
||||
|
||||
rem Install pkg-config, which meson requires even on windows
|
||||
cinst -y pkgconfiglite
|
||||
)
|
||||
|
||||
rem Install flex/bison
|
||||
set WINFLEXBISON_ARCHIVE=win_flex_bison-%WINFLEXBISON_VERSION%.zip
|
||||
if not exist "%WINFLEXBISON_ARCHIVE%" appveyor DownloadFile "https://github.com/lexxmark/winflexbison/releases/download/v%WINFLEXBISON_VERSION%/%WINFLEXBISON_ARCHIVE%"
|
||||
7z x -y -owinflexbison\ "%WINFLEXBISON_ARCHIVE%" > nul
|
||||
set Path=%CD%\winflexbison;%Path%
|
||||
win_flex --version
|
||||
win_bison --version
|
||||
rem Download and extract LLVM
|
||||
if not exist "%LLVM_ARCHIVE%" appveyor DownloadFile "https://people.freedesktop.org/~jrfonseca/llvm/%LLVM_ARCHIVE%"
|
||||
7z x -y "%LLVM_ARCHIVE%" > nul
|
||||
if "%buildsystem%" == "scons" (
|
||||
mkdir llvm\bin
|
||||
set LLVM=%CD%\llvm
|
||||
) else (
|
||||
move llvm subprojects\
|
||||
copy .appveyor\llvm-wrap.meson subprojects\llvm\meson.build
|
||||
)
|
||||
goto :eof
|
||||
|
||||
:build_script
|
||||
if "%buildsystem%" == "scons" (
|
||||
call scons -j%NUMBER_OF_PROCESSORS% MSVC_VERSION=14.2 machine=x86 llvm=1
|
||||
) else (
|
||||
call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\Common7\Tools\VsDevCmd.bat" -arch=x86
|
||||
rem We use default-library as static to affect any wraps (such as expat and zlib)
|
||||
rem it would be better if we could set subprojects buildtype independently,
|
||||
rem but I haven't written that patch yet :)
|
||||
call meson builddir --backend=vs2017 --default-library=static -Dbuild-tests=true -Db_vscrt=mtd --buildtype=release -Dllvm=true -Dgallium-drivers=swrast -Dosmesa=gallium
|
||||
pushd builddir
|
||||
call msbuild mesa.sln /m
|
||||
popd
|
||||
)
|
||||
goto :eof
|
||||
|
||||
:test_script
|
||||
if "%buildsystem%" == "scons" (
|
||||
call scons -j%NUMBER_OF_PROCESSORS% MSVC_VERSION=14.2 machine=x86 llvm=1 check
|
||||
) else (
|
||||
call meson test -C builddir
|
||||
)
|
||||
goto :eof
|
36
.appveyor/llvm-wrap.meson
Normal file
36
.appveyor/llvm-wrap.meson
Normal file
@@ -0,0 +1,36 @@
|
||||
# A meson.build file for binary wrapping the LLVM used in the appvyeor CI
|
||||
project('llvm', ['cpp'])
|
||||
|
||||
cpp = meson.get_compiler('cpp')
|
||||
|
||||
_deps = []
|
||||
_search = join_paths(meson.current_source_dir(), 'lib')
|
||||
foreach d : ['LLVMAnalysis', 'LLVMAsmParser', 'LLVMAsmPrinter',
|
||||
'LLVMBinaryFormat', 'LLVMBitReader', 'LLVMBitWriter',
|
||||
'LLVMCodeGen', 'LLVMCore', 'LLVMCoroutines', 'LLVMCoverage',
|
||||
'LLVMDebugInfoCodeView', 'LLVMDebugInfoDWARF',
|
||||
'LLVMDebugInfoMSF', 'LLVMDebugInfoPDB', 'LLVMDemangle',
|
||||
'LLVMDlltoolDriver', 'LLVMExecutionEngine', 'LLVMGlobalISel',
|
||||
'LLVMInstCombine', 'LLVMInstrumentation', 'LLVMInterpreter',
|
||||
'LLVMipo', 'LLVMIRReader', 'LLVMLibDriver', 'LLVMLineEditor',
|
||||
'LLVMLinker', 'LLVMLTO', 'LLVMMCDisassembler', 'LLVMMCJIT',
|
||||
'LLVMMC', 'LLVMMCParser', 'LLVMMIRParser', 'LLVMObjCARCOpts',
|
||||
'LLVMObject', 'LLVMObjectYAML', 'LLVMOption', 'LLVMOrcJIT',
|
||||
'LLVMPasses', 'LLVMProfileData', 'LLVMRuntimeDyld',
|
||||
'LLVMScalarOpts', 'LLVMSelectionDAG', 'LLVMSupport',
|
||||
'LLVMSymbolize', 'LLVMTableGen', 'LLVMTarget',
|
||||
'LLVMTransformUtils', 'LLVMVectorize', 'LLVMX86AsmParser',
|
||||
'LLVMX86AsmPrinter', 'LLVMX86CodeGen', 'LLVMX86Desc',
|
||||
'LLVMX86Disassembler', 'LLVMX86Info', 'LLVMX86Utils',
|
||||
'LLVMXRay']
|
||||
_deps += cpp.find_library(d, dirs : _search)
|
||||
endforeach
|
||||
|
||||
dep_llvm = declare_dependency(
|
||||
include_directories : include_directories('include'),
|
||||
dependencies : _deps,
|
||||
version : '5.0.1',
|
||||
)
|
||||
|
||||
has_rtti = false
|
||||
irbuilder_h = files('include/llvm/IR/IRBuilder.h')
|
@@ -1,2 +0,0 @@
|
||||
# Vendored code
|
||||
src/amd/vulkan/radix_sort/*
|
@@ -1,10 +0,0 @@
|
||||
# The following files are opted into `ninja clang-format` and
|
||||
# enforcement in the CI.
|
||||
|
||||
src/gallium/drivers/i915
|
||||
src/gallium/drivers/r300/compiler/*
|
||||
src/gallium/targets/teflon/**/*
|
||||
src/amd/vulkan/**/*
|
||||
src/amd/compiler/**/*
|
||||
src/egl/**/*
|
||||
src/etnaviv/isa/**/*
|
@@ -8,7 +8,7 @@ charset = utf-8
|
||||
insert_final_newline = true
|
||||
tab_width = 8
|
||||
|
||||
[*.{c,h,cpp,hpp,cc,hh,y,yy}]
|
||||
[*.{c,h,cpp,hpp,cc,hh}]
|
||||
indent_style = space
|
||||
indent_size = 3
|
||||
max_line_length = 78
|
||||
@@ -16,29 +16,29 @@ max_line_length = 78
|
||||
[{Makefile*,*.mk}]
|
||||
indent_style = tab
|
||||
|
||||
[*.py]
|
||||
[{*.py,SCons*}]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[*.pl]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[*.m4]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.yml]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.rst]
|
||||
[*.html]
|
||||
indent_style = space
|
||||
indent_size = 3
|
||||
indent_size = 2
|
||||
|
||||
[*.patch]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[{meson.build,meson.options}]
|
||||
[{meson.build,meson_options.txt}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.ps1]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.rs]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
@@ -1,79 +0,0 @@
|
||||
# List of commits to ignore when using `git blame`.
|
||||
# Enable with:
|
||||
# git config blame.ignoreRevsFile .git-blame-ignore-revs
|
||||
#
|
||||
# Per git-blame(1):
|
||||
# Ignore revisions listed in the file, one unabbreviated object name
|
||||
# per line, in git-blame. Whitespace and comments beginning with # are
|
||||
# ignored.
|
||||
#
|
||||
# Please keep these in chronological order :)
|
||||
#
|
||||
# You can add a new commit with the following command:
|
||||
# git log -1 --pretty=format:'%n# %s%n%H%n' >> .git-blame-ignore-revs $COMMIT
|
||||
|
||||
# pvr: Fix clang-format error.
|
||||
0ad5b0a74ef73f5fcbe1406ad9d57fe5dc00a5b1
|
||||
|
||||
# panfrost: Fix up some formatting for clang-format
|
||||
a4705afe63412498d13ded73cba969c66be67907
|
||||
|
||||
# asahi: clang-format the world again
|
||||
26c51bb8d8a33098b1990425a391f56ffba5728c
|
||||
|
||||
# perfetto: Add a .clang-format for the directory.
|
||||
da78d5d729b1800136dd713b68492cb339993f4a
|
||||
|
||||
# panfrost/winsys: Clang-format
|
||||
c90f036516a5376002be6550a917e8bad6a8a3b8
|
||||
|
||||
# panfrost: Re-run clang-format
|
||||
4ccf174009af6732cbffa5d8ebb4687da7517505
|
||||
|
||||
# panvk: Clang-format
|
||||
c7bf3b69ebc8f2252dbf724a4de638e6bb2ac402
|
||||
|
||||
# pan/mdg: Fix icky formatting
|
||||
133af0d6c945d3aaca8989edd15283a2b7dcc6c7
|
||||
|
||||
# mapi: clang-format _glapi_add_dispatch()
|
||||
30332529663268a6406e910848e906e725e6fda7
|
||||
|
||||
# radv: reformat according to its .clang-format
|
||||
8b319c6db8bd93603b18bd783eb75225fcfd51b7
|
||||
|
||||
# aco: reformat according to its .clang-format
|
||||
6b21653ab4d3a67e711fe10e3d403128b6d26eb2
|
||||
|
||||
# egl: re-format using clang-format
|
||||
2f670d89db038d5a29f6b72732fd7ad63dfaf4c6
|
||||
|
||||
# panfrost: clang-format the tree
|
||||
0afd691f29683f6e9dde60f79eca094373521806
|
||||
|
||||
# aco: Format.
|
||||
1e2639026fec7069806449f9ba2a124ce4eb5569
|
||||
|
||||
# radv: Format.
|
||||
59c501ca353f8ec9d2717c98af2bfa1a1dbf4d75
|
||||
|
||||
# pvr: clang-format fixes
|
||||
953c04ebd39c52d457301bdd8ac803949001da2d
|
||||
|
||||
# freedreno: Re-indent
|
||||
2d439343ea1aee146d4ce32800992cd389bd505d
|
||||
|
||||
# ir3: Reformat source with clang-format
|
||||
177138d8cb0b4f6a42ef0a1f8593e14d79f17c54
|
||||
|
||||
# ir3: reformat after refactoring in previous commit
|
||||
8ae5b27ee0331a739d14b42e67586784d6840388
|
||||
|
||||
# ir3: don't use deprecated NIR_PASS_V anymore
|
||||
2fedc82c0cc9d3fb2e54707b57941b79553b640c
|
||||
|
||||
# ir3: reformat after previous commit
|
||||
7210054db8cfb445a8ccdeacfdcfecccf44fa266
|
||||
|
||||
# freedreno/a6xx: The great register renaming
|
||||
7fd99c88b9cd5c0c8c1cb3e92383acac5cb8220b
|
7
.gitattributes
vendored
7
.gitattributes
vendored
@@ -1,7 +0,0 @@
|
||||
*.csv eol=crlf
|
||||
* text=auto
|
||||
*.jpg binary
|
||||
*.png binary
|
||||
*.gif binary
|
||||
*.ico binary
|
||||
*.cl gitlab-language=c
|
60
.github/workflows/macos.yml
vendored
60
.github/workflows/macos.yml
vendored
@@ -1,60 +0,0 @@
|
||||
name: macOS-CI
|
||||
on: push
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
macOS-CI:
|
||||
strategy:
|
||||
matrix:
|
||||
glx_option: ['dri', 'xlib']
|
||||
runs-on: macos-11
|
||||
env:
|
||||
GALLIUM_DUMP_CPU: true
|
||||
MESON_EXEC: /Users/runner/Library/Python/3.11/bin/meson
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
cat > Brewfile <<EOL
|
||||
brew "bison"
|
||||
brew "expat"
|
||||
brew "gettext"
|
||||
brew "libx11"
|
||||
brew "libxcb"
|
||||
brew "libxdamage"
|
||||
brew "libxext"
|
||||
brew "molten-vk"
|
||||
brew "ninja"
|
||||
brew "pkg-config"
|
||||
brew "python@3.10"
|
||||
EOL
|
||||
|
||||
brew update
|
||||
brew bundle --verbose
|
||||
- name: Install Mako and meson
|
||||
run: pip3 install --user mako meson
|
||||
- name: Configure
|
||||
run: |
|
||||
cat > native_config <<EOL
|
||||
[binaries]
|
||||
llvm-config = '/usr/local/opt/llvm/bin/llvm-config'
|
||||
EOL
|
||||
$MESON_EXEC . build --native-file=native_config -Dmoltenvk-dir=$(brew --prefix molten-vk) -Dbuild-tests=true -Dgallium-drivers=swrast,zink -Dglx=${{ matrix.glx_option }}
|
||||
- name: Build
|
||||
run: $MESON_EXEC compile -C build
|
||||
- name: Test
|
||||
run: $MESON_EXEC test -C build --print-errorlogs
|
||||
- name: Install
|
||||
run: $MESON_EXEC install -C build --destdir $PWD/install
|
||||
- name: 'Upload Artifact'
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: macos-${{ matrix.glx_option }}-result
|
||||
path: |
|
||||
build/meson-logs/
|
||||
install/
|
||||
retention-days: 5
|
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,7 +1,4 @@
|
||||
.cache
|
||||
.vscode*
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.out
|
||||
/build
|
||||
.venv/
|
||||
build
|
||||
|
1737
.gitlab-ci.yml
1737
.gitlab-ci.yml
File diff suppressed because it is too large
Load Diff
@@ -1,33 +0,0 @@
|
||||
[flake8]
|
||||
exclude = .venv*,
|
||||
|
||||
# PEP 8 Style Guide limits line length to 79 characters
|
||||
max-line-length = 159
|
||||
|
||||
ignore =
|
||||
# continuation line under-indented for hanging indent
|
||||
E121
|
||||
# continuation line over-indented for hanging indent
|
||||
E126,
|
||||
# continuation line under-indented for visual indent
|
||||
E128,
|
||||
# whitespace before ':'
|
||||
E203,
|
||||
# missing whitespace around arithmetic operator
|
||||
E226,
|
||||
# missing whitespace after ','
|
||||
E231,
|
||||
# expected 2 blank lines, found 1
|
||||
E302,
|
||||
# too many blank lines
|
||||
E303,
|
||||
# imported but unused
|
||||
F401,
|
||||
# f-string is missing placeholders
|
||||
F541,
|
||||
# local variable assigned to but never used
|
||||
F841,
|
||||
# line break before binary operator
|
||||
W503,
|
||||
# line break after binary operator
|
||||
W504,
|
@@ -1,115 +0,0 @@
|
||||
# Note: skips lists for CI are just a list of lines that, when
|
||||
# non-zero-length and not starting with '#', will regex match to
|
||||
# delete lines from the test list. Be careful.
|
||||
|
||||
# This test checks the driver's reported conformance version against the
|
||||
# version of the CTS we're running. This check fails every few months
|
||||
# and everyone has to go and bump the number in every driver.
|
||||
# Running this check only makes sense while preparing a conformance
|
||||
# submission, so skip it in the regular CI.
|
||||
dEQP-VK.api.driver_properties.conformance_version
|
||||
|
||||
# Exclude this test which might fail when a new extension is implemented.
|
||||
dEQP-VK.info.device_extensions
|
||||
|
||||
# These are tremendously slow (pushing toward a minute), and aren't
|
||||
# reliable to be run in parallel with other tests due to CPU-side timing.
|
||||
dEQP-GLES[0-9]*.functional.flush_finish.*
|
||||
|
||||
# piglit: WGL is Windows-only
|
||||
wgl@.*
|
||||
|
||||
# These are sensitive to CPU timing, and would need to be run in isolation
|
||||
# on the system rather than in parallel with other tests.
|
||||
glx@glx_arb_sync_control@timing.*
|
||||
|
||||
# This test is not built with waffle, while we do build tests with waffle
|
||||
spec@!opengl 1.1@windowoverlap
|
||||
|
||||
# These tests all read from the front buffer after a swap. Given that we
|
||||
# run piglit tests in parallel in Mesa CI, and don't have a compositor
|
||||
# running, the frontbuffer reads may end up with undefined results from
|
||||
# windows overlapping us.
|
||||
#
|
||||
# Piglit does mark these tests as not to be run in parallel, but deqp-runner
|
||||
# doesn't respect that. We need to extend deqp-runner to allow some tests to be
|
||||
# marked as single-threaded and run after the rayon loop if we want to support
|
||||
# them.
|
||||
#
|
||||
# Note that "glx-" tests don't appear in x11-skips.txt because they can be
|
||||
# run even if PIGLIT_PLATFORM=gbm (for example)
|
||||
glx@glx-copy-sub-buffer.*
|
||||
|
||||
# A majority of the tests introduced in CTS 1.3.7.0 are experiencing failures and flakes.
|
||||
# Disable these tests until someone with a more deeper understanding of EGL examines them.
|
||||
#
|
||||
# Note: on sc8280xp/a690 I get identical results (same passes and fails)
|
||||
# between freedreno, zink, and llvmpipe, so I believe this is either a
|
||||
# deqp bug or egl/wayland bug, rather than driver issue.
|
||||
#
|
||||
# With llvmpipe, the failing tests have the error message:
|
||||
#
|
||||
# "Illegal sampler view creation without bind flag"
|
||||
#
|
||||
# which might be a hint. (But some passing tests also have the same
|
||||
# error message.)
|
||||
#
|
||||
# more context from David Heidelberg on IRC: the deqp commit where these
|
||||
# started failing is: https://github.com/KhronosGroup/VK-GL-CTS/commit/79b25659bcbced0cfc2c3fe318951c585f682abe
|
||||
# prior to that they were skipping.
|
||||
wayland-dEQP-EGL.functional.color_clears.single_context.gles1.other
|
||||
wayland-dEQP-EGL.functional.color_clears.single_context.gles2.other
|
||||
wayland-dEQP-EGL.functional.color_clears.single_context.gles3.other
|
||||
wayland-dEQP-EGL.functional.color_clears.multi_context.gles1.other
|
||||
wayland-dEQP-EGL.functional.color_clears.multi_context.gles2.other
|
||||
wayland-dEQP-EGL.functional.color_clears.multi_context.gles3.other
|
||||
wayland-dEQP-EGL.functional.color_clears.multi_context.gles1_gles2.other
|
||||
wayland-dEQP-EGL.functional.color_clears.multi_context.gles1_gles2_gles3.other
|
||||
wayland-dEQP-EGL.functional.color_clears.multi_thread.gles1.other
|
||||
wayland-dEQP-EGL.functional.color_clears.multi_thread.gles2.other
|
||||
wayland-dEQP-EGL.functional.color_clears.multi_thread.gles3.other
|
||||
wayland-dEQP-EGL.functional.color_clears.multi_thread.gles1_gles2.other
|
||||
wayland-dEQP-EGL.functional.color_clears.multi_thread.gles1_gles2_gles3.other
|
||||
|
||||
# Seems to be the same is as wayland-dEQP-EGL.functional.color_clears.*
|
||||
wayland-dEQP-EGL.functional.render.single_context.gles2.other
|
||||
wayland-dEQP-EGL.functional.render.single_context.gles3.other
|
||||
wayland-dEQP-EGL.functional.render.multi_context.gles2.other
|
||||
wayland-dEQP-EGL.functional.render.multi_context.gles3.other
|
||||
wayland-dEQP-EGL.functional.render.multi_context.gles2_gles3.other
|
||||
wayland-dEQP-EGL.functional.render.multi_thread.gles2.other
|
||||
wayland-dEQP-EGL.functional.render.multi_thread.gles3.other
|
||||
wayland-dEQP-EGL.functional.render.multi_thread.gles2_gles3.other
|
||||
|
||||
# These test the loader more than the implementation and are broken because the
|
||||
# Vulkan loader in Debian is too old
|
||||
dEQP-VK.api.get_device_proc_addr.non_enabled
|
||||
dEQP-VK.api.version_check.unavailable_entry_points
|
||||
|
||||
# These tests are flaking too much recently on almost all drivers, so better skip them until the cause is identified
|
||||
spec@arb_program_interface_query@arb_program_interface_query-getprogramresourceindex
|
||||
spec@arb_program_interface_query@arb_program_interface_query-getprogramresourceindex@'vs_input2[1][0]' on GL_PROGRAM_INPUT
|
||||
|
||||
# These tests attempt to read from the front buffer after a swap. They are skipped
|
||||
# on both X11 and gbm, but for different reasons:
|
||||
#
|
||||
# On X11: Given that we run piglit tests in parallel in Mesa CI, and don't have a
|
||||
# compositor running, the frontbuffer reads may end up with undefined results from
|
||||
# windows overlapping us.
|
||||
# Piglit does mark these tests as not to be run in parallel, but deqp-runner
|
||||
# doesn't respect that. We need to extend deqp-runner to allow some tests to be
|
||||
# marked as single-threaded and run after the rayon loop if we want to support
|
||||
# them.
|
||||
# Other front-buffer access tests like fbo-sys-blit, fbo-sys-sub-blit, or
|
||||
# fcc-front-buffer-distraction don't appear here, because the DRI3 fake-front
|
||||
# handling should be holding the pixels drawn by the test even if we happen to fail
|
||||
# GL's window system pixel occlusion test.
|
||||
# Note that glx skips don't appear here, they're in all-skips.txt (in case someone
|
||||
# sets PIGLIT_PLATFORM=gbm to mostly use gbm, but still has an X server running).
|
||||
#
|
||||
# On gbm: gbm does not support reading the front buffer after a swapbuffers, and
|
||||
# that's intentional. Don't bother running these tests when PIGLIT_PLATFORM=gbm.
|
||||
# Note that this doesn't include tests like fbo-sys-blit, which draw/read front
|
||||
# but don't swap.
|
||||
spec@!opengl 1.0@gl-1.0-swapbuffers-behavior
|
||||
spec@!opengl 1.1@read-front
|
@@ -1,74 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
# shellcheck disable=SC1091 # paths only become valid at runtime
|
||||
|
||||
. "${SCRIPTS_DIR}/setup-test-env.sh"
|
||||
|
||||
ci_tag_test_time_check "ANDROID_CTS_TAG"
|
||||
|
||||
export PATH=/android-tools/build-tools:/android-cts/jdk/bin/:$PATH
|
||||
export JAVA_HOME=/android-cts/jdk
|
||||
|
||||
# Wait for the appops service to show up
|
||||
while [ "$($ADB shell dumpsys -l | grep appops)" = "" ] ; do sleep 1; done
|
||||
|
||||
SKIP_FILE="$INSTALL/${GPU_VERSION}-android-cts-skips.txt"
|
||||
|
||||
EXCLUDE_FILTERS=""
|
||||
if [ -e "$SKIP_FILE" ]; then
|
||||
EXCLUDE_FILTERS="$(grep -v -E "(^#|^[[:space:]]*$)" "$SKIP_FILE" | sed -e 's/\s*$//g' -e 's/.*/--exclude-filter "\0" /g')"
|
||||
fi
|
||||
|
||||
INCLUDE_FILE="$INSTALL/${GPU_VERSION}-android-cts-include.txt"
|
||||
|
||||
if [ ! -e "$INCLUDE_FILE" ]; then
|
||||
set +x
|
||||
echo "ERROR: No include file (${GPU_VERSION}-android-cts-include.txt) found."
|
||||
echo "This means that we are running the all available CTS modules."
|
||||
echo "But the time to run it might be too long, please provide an include file instead."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
INCLUDE_FILTERS="$(grep -v -E "(^#|^[[:space:]]*$)" "$INCLUDE_FILE" | sed -e 's/\s*$//g' -e 's/.*/--include-filter "\0" /g')"
|
||||
|
||||
if [ -n "${ANDROID_CTS_PREPARE_COMMAND:-}" ]; then
|
||||
eval "$ANDROID_CTS_PREPARE_COMMAND"
|
||||
fi
|
||||
|
||||
uncollapsed_section_switch android_cts_test "Android CTS: testing"
|
||||
|
||||
set +e
|
||||
eval "/android-cts/tools/cts-tradefed" run commandAndExit cts-dev \
|
||||
$INCLUDE_FILTERS \
|
||||
$EXCLUDE_FILTERS
|
||||
|
||||
SUMMARY_FILE=/android-cts/results/latest/invocation_summary.txt
|
||||
|
||||
# Parse a line like `x/y modules completed` to check that all modules completed
|
||||
COMPLETED_MODULES=$(sed -n -e '/modules completed/s/^\([0-9]\+\)\/\([0-9]\+\) .*$/\1/p' "$SUMMARY_FILE")
|
||||
AVAILABLE_MODULES=$(sed -n -e '/modules completed/s/^\([0-9]\+\)\/\([0-9]\+\) .*$/\2/p' "$SUMMARY_FILE")
|
||||
[ "$COMPLETED_MODULES" = "$AVAILABLE_MODULES" ]
|
||||
# shellcheck disable=SC2319 # False-positive see https://github.com/koalaman/shellcheck/issues/2937#issuecomment-2660891195
|
||||
MODULES_FAILED=$?
|
||||
|
||||
# Parse a line like `FAILED : x` to check that no tests failed
|
||||
[ "$(grep "^FAILED" "$SUMMARY_FILE" | tr -d ' ' | cut -d ':' -f 2)" = "0" ]
|
||||
# shellcheck disable=SC2319 # False-positive see https://github.com/koalaman/shellcheck/issues/2937#issuecomment-2660891195
|
||||
TESTS_FAILED=$?
|
||||
|
||||
[ "$MODULES_FAILED" = "0" ] && [ "$TESTS_FAILED" = "0" ]
|
||||
|
||||
# shellcheck disable=SC2034 # EXIT_CODE is used by the script that sources this one
|
||||
EXIT_CODE=$?
|
||||
set -e
|
||||
|
||||
mkdir "${RESULTS_DIR}/android-cts"
|
||||
cp -r "/android-cts/results/latest/" "${RESULTS_DIR}/android-cts/results"
|
||||
cp -r "/android-cts/logs/latest/" "${RESULTS_DIR}/android-cts/logs"
|
||||
|
||||
if [ -n "${ARTIFACTS_BASE_URL:-}" ]; then
|
||||
echo "============================================"
|
||||
echo "Review the Android CTS test results at: ${ARTIFACTS_BASE_URL}/results/android-cts/results/test_result.html"
|
||||
fi
|
||||
|
||||
section_end android_cts_test
|
@@ -1,110 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
# shellcheck disable=SC1091 # paths only become valid at runtime
|
||||
|
||||
. "${SCRIPTS_DIR}/setup-test-env.sh"
|
||||
|
||||
# deqp
|
||||
|
||||
$ADB shell mkdir -p /data/deqp
|
||||
$ADB push /deqp-gles/modules/egl/deqp-egl-android /data/deqp
|
||||
$ADB push /deqp-gles/mustpass/egl-main.txt.zst /data/deqp
|
||||
$ADB push /deqp-gles/modules/gles2/deqp-gles2 /data/deqp
|
||||
$ADB push /deqp-gles/mustpass/gles2-main.txt.zst /data/deqp
|
||||
$ADB push /deqp-vk/external/vulkancts/modules/vulkan/* /data/deqp
|
||||
$ADB push /deqp-vk/mustpass/vk-main.txt.zst /data/deqp
|
||||
$ADB push /deqp-tools/* /data/deqp
|
||||
$ADB push /deqp-runner/deqp-runner /data/deqp
|
||||
|
||||
$ADB push "$INSTALL/all-skips.txt" /data/deqp
|
||||
$ADB push "$INSTALL/android-skips.txt" /data/deqp
|
||||
$ADB push "$INSTALL/angle-skips.txt" /data/deqp
|
||||
if [ -e "$INSTALL/$GPU_VERSION-flakes.txt" ]; then
|
||||
$ADB push "$INSTALL/$GPU_VERSION-flakes.txt" /data/deqp
|
||||
fi
|
||||
if [ -e "$INSTALL/$GPU_VERSION-fails.txt" ]; then
|
||||
$ADB push "$INSTALL/$GPU_VERSION-fails.txt" /data/deqp
|
||||
fi
|
||||
if [ -e "$INSTALL/$GPU_VERSION-skips.txt" ]; then
|
||||
$ADB push "$INSTALL/$GPU_VERSION-skips.txt" /data/deqp
|
||||
fi
|
||||
$ADB push "$INSTALL/deqp-$DEQP_SUITE.toml" /data/deqp
|
||||
|
||||
BASELINE=""
|
||||
if [ -e "$INSTALL/$GPU_VERSION-fails.txt" ]; then
|
||||
BASELINE="--baseline /data/deqp/$GPU_VERSION-fails.txt"
|
||||
fi
|
||||
|
||||
# Default to an empty known flakes file if it doesn't exist.
|
||||
$ADB shell "touch /data/deqp/$GPU_VERSION-flakes.txt"
|
||||
|
||||
DEQP_SKIPS=""
|
||||
if [ -e "$INSTALL/$GPU_VERSION-skips.txt" ]; then
|
||||
DEQP_SKIPS="$DEQP_SKIPS /data/deqp/$GPU_VERSION-skips.txt"
|
||||
fi
|
||||
|
||||
if [ -n "${ANGLE_TAG:-}" ]; then
|
||||
DEQP_SKIPS="$DEQP_SKIPS /data/deqp/angle-skips.txt"
|
||||
fi
|
||||
|
||||
AOSP_RESULTS=/data/deqp/results
|
||||
uncollapsed_section_switch cuttlefish_test "cuttlefish: testing"
|
||||
|
||||
# Print the detailed version with the list of backports and local patches
|
||||
{ set +x; } 2>/dev/null
|
||||
for api in vk-main vk gl gles; do
|
||||
deqp_version_log=/deqp-$api/deqp-$api-version
|
||||
if [ -r "$deqp_version_log" ]; then
|
||||
cat "$deqp_version_log"
|
||||
fi
|
||||
done
|
||||
set -x
|
||||
|
||||
set +e
|
||||
$ADB shell "mkdir ${AOSP_RESULTS}; cd ${AOSP_RESULTS}/..; \
|
||||
XDG_CACHE_HOME=/data/local/tmp \
|
||||
./deqp-runner \
|
||||
suite \
|
||||
--suite /data/deqp/deqp-$DEQP_SUITE.toml \
|
||||
--output $AOSP_RESULTS \
|
||||
--skips /data/deqp/all-skips.txt $DEQP_SKIPS \
|
||||
--flakes /data/deqp/$GPU_VERSION-flakes.txt \
|
||||
--testlog-to-xml /data/deqp/testlog-to-xml \
|
||||
--shader-cache-dir /data/local/tmp \
|
||||
--fraction-start ${CI_NODE_INDEX:-1} \
|
||||
--fraction $(( CI_NODE_TOTAL * ${DEQP_FRACTION:-1})) \
|
||||
--jobs ${FDO_CI_CONCURRENT:-4} \
|
||||
$BASELINE \
|
||||
${DEQP_RUNNER_MAX_FAILS:+--max-fails \"$DEQP_RUNNER_MAX_FAILS\"} \
|
||||
"
|
||||
|
||||
# shellcheck disable=SC2034 # EXIT_CODE is used by the script that sources this one
|
||||
EXIT_CODE=$?
|
||||
set -e
|
||||
section_switch cuttlefish_results "cuttlefish: gathering the results"
|
||||
|
||||
$ADB pull "$AOSP_RESULTS/." "$RESULTS_DIR"
|
||||
|
||||
# Remove all but the first 50 individual XML files uploaded as artifacts, to
|
||||
# save fd.o space when you break everything.
|
||||
find $RESULTS_DIR -name \*.xml | \
|
||||
sort -n |
|
||||
sed -n '1,+49!p' | \
|
||||
xargs rm -f
|
||||
|
||||
# If any QPA XMLs are there, then include the XSL/CSS in our artifacts.
|
||||
find $RESULTS_DIR -name \*.xml \
|
||||
-exec cp /deqp-tools/testlog.css /deqp-tools/testlog.xsl "$RESULTS_DIR/" ";" \
|
||||
-quit
|
||||
|
||||
$ADB shell "cd ${AOSP_RESULTS}/..; \
|
||||
./deqp-runner junit \
|
||||
--testsuite dEQP \
|
||||
--results $AOSP_RESULTS/failures.csv \
|
||||
--output $AOSP_RESULTS/junit.xml \
|
||||
--limit 50 \
|
||||
--template \"See $ARTIFACTS_BASE_URL/results/{{testcase}}.xml\""
|
||||
|
||||
$ADB pull "$AOSP_RESULTS/junit.xml" "$RESULTS_DIR"
|
||||
|
||||
section_end cuttlefish_results
|
@@ -1,150 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
# shellcheck disable=SC1091 # paths only become valid at runtime
|
||||
|
||||
set -uex
|
||||
|
||||
# Set default ADB command if not set already
|
||||
|
||||
: "${ADB:=adb}"
|
||||
|
||||
$ADB wait-for-device root
|
||||
sleep 1
|
||||
|
||||
# overlay
|
||||
|
||||
REMOUNT_PATHS="/vendor"
|
||||
if [ "$ANDROID_VERSION" -ge 15 ]; then
|
||||
REMOUNT_PATHS="$REMOUNT_PATHS /system"
|
||||
fi
|
||||
|
||||
OV_TMPFS="/data/overlay-remount"
|
||||
$ADB shell mkdir -p "$OV_TMPFS"
|
||||
$ADB shell mount -t tmpfs none "$OV_TMPFS"
|
||||
|
||||
for path in $REMOUNT_PATHS; do
|
||||
$ADB shell mkdir -p "${OV_TMPFS}${path}-upper"
|
||||
$ADB shell mkdir -p "${OV_TMPFS}${path}-work"
|
||||
|
||||
opts="lowerdir=${path},upperdir=${OV_TMPFS}${path}-upper,workdir=${OV_TMPFS}${path}-work"
|
||||
$ADB shell mount -t overlay -o "$opts" none ${path}
|
||||
done
|
||||
|
||||
$ADB shell setenforce 0
|
||||
|
||||
$ADB push /android-tools/eglinfo /data
|
||||
$ADB push /android-tools/vulkaninfo /data
|
||||
|
||||
get_gles_runtime_renderer() {
|
||||
while [ "$($ADB shell XDG_CACHE_HOME=/data/local/tmp /data/eglinfo | grep 'OpenGL ES profile renderer':)" = "" ] ; do sleep 1; done
|
||||
$ADB shell XDG_CACHE_HOME=/data/local/tmp /data/eglinfo | grep 'OpenGL ES profile renderer' | head -1
|
||||
}
|
||||
|
||||
get_gles_runtime_version() {
|
||||
while [ "$($ADB shell XDG_CACHE_HOME=/data/local/tmp /data/eglinfo | grep 'OpenGL ES profile version:')" = "" ] ; do sleep 1; done
|
||||
$ADB shell XDG_CACHE_HOME=/data/local/tmp /data/eglinfo | grep 'OpenGL ES profile version:' | head -1
|
||||
}
|
||||
|
||||
get_vk_runtime_device_name() {
|
||||
$ADB shell XDG_CACHE_HOME=/data/local/tmp /data/vulkaninfo | grep deviceName | head -1
|
||||
}
|
||||
|
||||
get_vk_runtime_version() {
|
||||
$ADB shell XDG_CACHE_HOME=/data/local/tmp /data/vulkaninfo | grep driverInfo | head -1
|
||||
}
|
||||
|
||||
# Check what GLES & VK implementation is used before uploading the new libraries
|
||||
get_gles_runtime_renderer
|
||||
get_gles_runtime_version
|
||||
get_vk_runtime_device_name
|
||||
get_vk_runtime_version
|
||||
|
||||
# replace libraries
|
||||
|
||||
$ADB shell rm -f /vendor/lib64/libgallium_dri.so*
|
||||
$ADB shell rm -f /vendor/lib64/egl/libEGL_mesa.so*
|
||||
$ADB shell rm -f /vendor/lib64/egl/libGLESv1_CM_mesa.so*
|
||||
$ADB shell rm -f /vendor/lib64/egl/libGLESv2_mesa.so*
|
||||
|
||||
$ADB push "$INSTALL/lib/libgallium_dri.so" /vendor/lib64/libgallium_dri.so
|
||||
$ADB push "$INSTALL/lib/libEGL.so" /vendor/lib64/egl/libEGL_mesa.so
|
||||
$ADB push "$INSTALL/lib/libGLESv1_CM.so" /vendor/lib64/egl/libGLESv1_CM_mesa.so
|
||||
$ADB push "$INSTALL/lib/libGLESv2.so" /vendor/lib64/egl/libGLESv2_mesa.so
|
||||
|
||||
$ADB shell rm -f /vendor/lib64/hw/vulkan.lvp.so*
|
||||
$ADB shell rm -f /vendor/lib64/hw/vulkan.virtio.so*
|
||||
$ADB shell rm -f /vendor/lib64/hw/vulkan.intel.so*
|
||||
|
||||
$ADB push "$INSTALL/lib/libvulkan_lvp.so" /vendor/lib64/hw/vulkan.lvp.so
|
||||
$ADB push "$INSTALL/lib/libvulkan_virtio.so" /vendor/lib64/hw/vulkan.virtio.so
|
||||
$ADB push "$INSTALL/lib/libvulkan_intel.so" /vendor/lib64/hw/vulkan.intel.so
|
||||
|
||||
$ADB shell rm -f /vendor/lib64/egl/libEGL_emulation.so*
|
||||
$ADB shell rm -f /vendor/lib64/egl/libGLESv1_CM_emulation.so*
|
||||
$ADB shell rm -f /vendor/lib64/egl/libGLESv2_emulation.so*
|
||||
|
||||
if [ -n "${ANGLE_TAG:-}" ]; then
|
||||
ANGLE_DEST_PATH=/vendor/lib64/egl
|
||||
if [ "$ANDROID_VERSION" -ge 15 ]; then
|
||||
ANGLE_DEST_PATH=/system/lib64
|
||||
fi
|
||||
|
||||
$ADB shell rm -f "$ANGLE_DEST_PATH/libEGL_angle.so"*
|
||||
$ADB shell rm -f "$ANGLE_DEST_PATH/libGLESv1_CM_angle.so"*
|
||||
$ADB shell rm -f "$ANGLE_DEST_PATH/libGLESv2_angle.so"*
|
||||
|
||||
$ADB push /angle/libEGL_angle.so "$ANGLE_DEST_PATH/libEGL_angle.so"
|
||||
$ADB push /angle/libGLESv1_CM_angle.so "$ANGLE_DEST_PATH/libGLESv1_CM_angle.so"
|
||||
$ADB push /angle/libGLESv2_angle.so "$ANGLE_DEST_PATH/libGLESv2_angle.so"
|
||||
fi
|
||||
|
||||
# Check what GLES & VK implementation is used after uploading the new libraries
|
||||
MESA_BUILD_VERSION=$(cat "$INSTALL/VERSION")
|
||||
get_gles_runtime_renderer
|
||||
GLES_RUNTIME_VERSION="$(get_gles_runtime_version)"
|
||||
get_vk_runtime_device_name
|
||||
VK_RUNTIME_VERSION="$(get_vk_runtime_version)"
|
||||
|
||||
if [ -n "${ANGLE_TAG:-}" ]; then
|
||||
# Note: we are injecting the ANGLE libs too, so we need to check if the
|
||||
# new ANGLE libs are being used.
|
||||
ANGLE_HASH=$(head -c 12 /angle/version)
|
||||
if ! printf "%s" "$GLES_RUNTIME_VERSION" | grep --quiet "${ANGLE_HASH}"; then
|
||||
echo "Fatal: Android is loading a wrong version of the ANGLE libs: ${ANGLE_HASH}" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! printf "%s" "$VK_RUNTIME_VERSION" | grep -Fq -- "${MESA_BUILD_VERSION}"; then
|
||||
echo "Fatal: Android is loading a wrong version of the Mesa3D Vulkan libs: ${VK_RUNTIME_VERSION}" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
get_surfaceflinger_pid() {
|
||||
while [ "$($ADB shell dumpsys -l | grep 'SurfaceFlinger$')" = "" ] ; do sleep 1; done
|
||||
$ADB shell ps -A | grep -i surfaceflinger | tr -s ' ' | cut -d ' ' -f 2
|
||||
}
|
||||
|
||||
OLD_SF_PID=$(get_surfaceflinger_pid)
|
||||
|
||||
# restart Android shell, so that services use the new libraries
|
||||
$ADB shell stop
|
||||
$ADB shell start
|
||||
|
||||
# Check that SurfaceFlinger restarted, to ensure that new libraries have been picked up
|
||||
NEW_SF_PID=$(get_surfaceflinger_pid)
|
||||
|
||||
if [ "$OLD_SF_PID" == "$NEW_SF_PID" ]; then
|
||||
echo "Fatal: check that SurfaceFlinger restarted" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -n "${ANDROID_CTS_TAG:-}" ]; then
|
||||
# The script sets EXIT_CODE
|
||||
. "$(dirname "$0")/android-cts-runner.sh"
|
||||
else
|
||||
# The script sets EXIT_CODE
|
||||
. "$(dirname "$0")/android-deqp-runner.sh"
|
||||
fi
|
||||
|
||||
exit $EXIT_CODE
|
@@ -1,11 +0,0 @@
|
||||
# Skip these tests when running fractional dEQP batches, as the AHB tests are expected
|
||||
# to be handled separately in a non-fractional run within the deqp-runner suite.
|
||||
dEQP-VK.api.external.memory.android_hardware_buffer.*
|
||||
|
||||
# Skip all WSI tests: the DEQP_ANDROID_EXE build used can't create native windows, as
|
||||
# only APKs support window creation on Android.
|
||||
dEQP-VK.image.swapchain_mutable.*
|
||||
dEQP-VK.wsi.*
|
||||
|
||||
# These tests cause hangs and need to be skipped for now.
|
||||
dEQP-VK.synchronization*
|
@@ -1,7 +0,0 @@
|
||||
# Unlike zink which does support it, ANGLE relies on a waiver to not implement
|
||||
# capturing individual array elements (see waivers.xml and gles3-waivers.txt in the CTS)
|
||||
dEQP-GLES3.functional.transform_feedback.array_element.*
|
||||
dEQP-GLES3.functional.transform_feedback.random.*
|
||||
dEQP-GLES31.functional.program_interface_query.transform_feedback_varying.*_array_element
|
||||
dEQP-GLES31.functional.program_interface_query.transform_feedback_varying.type.*.array.*
|
||||
KHR-GLES31.core.program_interface_query.transform-feedback-types
|
54
.gitlab-ci/arm.config
Normal file
54
.gitlab-ci/arm.config
Normal file
@@ -0,0 +1,54 @@
|
||||
CONFIG_LOCALVERSION="ccu"
|
||||
|
||||
CONFIG_DEBUG_KERNEL=y
|
||||
|
||||
CONFIG_DEVFREQ_GOV_PERFORMANCE=y
|
||||
CONFIG_DEVFREQ_GOV_POWERSAVE=y
|
||||
CONFIG_DEVFREQ_GOV_USERSPACE=y
|
||||
CONFIG_DEVFREQ_GOV_PASSIVE=y
|
||||
CONFIG_DEVFREQ_GOV_SIMPLE_ONDEMAND=y
|
||||
|
||||
CONFIG_DRM=y
|
||||
CONFIG_DRM_ROCKCHIP=y
|
||||
CONFIG_DRM_PANFROST=y
|
||||
CONFIG_DRM_LIMA=y
|
||||
CONFIG_DRM_PANEL_SIMPLE=y
|
||||
CONFIG_PWM_CROS_EC=y
|
||||
CONFIG_BACKLIGHT_PWM=y
|
||||
|
||||
CONFIG_ROCKCHIP_CDN_DP=n
|
||||
|
||||
CONFIG_SPI_ROCKCHIP=y
|
||||
CONFIG_PWM_ROCKCHIP=y
|
||||
CONFIG_PHY_ROCKCHIP_DP=y
|
||||
CONFIG_DWMAC_ROCKCHIP=y
|
||||
|
||||
CONFIG_MFD_RK808=y
|
||||
CONFIG_REGULATOR_RK808=y
|
||||
CONFIG_RTC_DRV_RK808=y
|
||||
CONFIG_COMMON_CLK_RK808=y
|
||||
|
||||
CONFIG_REGULATOR_FAN53555=y
|
||||
CONFIG_REGULATOR=y
|
||||
|
||||
CONFIG_REGULATOR_VCTRL=y
|
||||
|
||||
CONFIG_KASAN=n
|
||||
CONFIG_KASAN_INLINE=n
|
||||
CONFIG_STACKTRACE=n
|
||||
|
||||
CONFIG_TMPFS=y
|
||||
|
||||
CONFIG_PROVE_LOCKING=n
|
||||
CONFIG_DEBUG_LOCKDEP=n
|
||||
CONFIG_SOFTLOCKUP_DETECTOR=n
|
||||
CONFIG_BOOTPARAM_SOFTLOCKUP_PANIC=n
|
||||
|
||||
CONFIG_FW_LOADER_COMPRESS=y
|
||||
|
||||
CONFIG_USB_USBNET=y
|
||||
CONFIG_NETDEVICES=y
|
||||
CONFIG_USB_NET_DRIVERS=y
|
||||
CONFIG_USB_RTL8152=y
|
||||
CONFIG_USB_NET_AX8817X=y
|
||||
CONFIG_USB_NET_SMSC95XX=y
|
124
.gitlab-ci/arm64.config
Normal file
124
.gitlab-ci/arm64.config
Normal file
@@ -0,0 +1,124 @@
|
||||
CONFIG_LOCALVERSION="ccu"
|
||||
|
||||
CONFIG_DEBUG_KERNEL=y
|
||||
|
||||
CONFIG_DEVFREQ_GOV_PERFORMANCE=y
|
||||
CONFIG_DEVFREQ_GOV_POWERSAVE=y
|
||||
CONFIG_DEVFREQ_GOV_USERSPACE=y
|
||||
CONFIG_DEVFREQ_GOV_PASSIVE=y
|
||||
|
||||
CONFIG_DRM=y
|
||||
CONFIG_DRM_ROCKCHIP=y
|
||||
CONFIG_DRM_PANFROST=y
|
||||
CONFIG_DRM_LIMA=y
|
||||
CONFIG_DRM_PANEL_SIMPLE=y
|
||||
CONFIG_DRM_MSM=y
|
||||
CONFIG_DRM_I2C_ADV7511=y
|
||||
CONFIG_DRM_I2C_ADV7533=y
|
||||
CONFIG_PWM_CROS_EC=y
|
||||
CONFIG_BACKLIGHT_PWM=y
|
||||
|
||||
CONFIG_ROCKCHIP_CDN_DP=n
|
||||
|
||||
CONFIG_SPI_ROCKCHIP=y
|
||||
CONFIG_PWM_ROCKCHIP=y
|
||||
CONFIG_PHY_ROCKCHIP_DP=y
|
||||
CONFIG_DWMAC_ROCKCHIP=y
|
||||
CONFIG_STMMAC_ETH=y
|
||||
CONFIG_TYPEC_FUSB302=y
|
||||
CONFIG_TYPEC=y
|
||||
CONFIG_TYPEC_TCPM=y
|
||||
|
||||
# Cheza platform bits
|
||||
CONFIG_QCOM_RPMHPD=y
|
||||
CONFIG_SDM_GPUCC_845=y
|
||||
CONFIG_SDM_VIDEOCC_845=y
|
||||
CONFIG_SDM_DISPCC_845=y
|
||||
CONFIG_SDM_LPASSCC_845=y
|
||||
CONFIG_SDM_CAMCC_845=y
|
||||
CONFIG_RESET_QCOM_PDC=y
|
||||
CONFIG_DRM_TI_SN65DSI86=y
|
||||
CONFIG_I2C_QCOM_GENI=y
|
||||
CONFIG_SPI_QCOM_GENI=y
|
||||
CONFIG_PHY_QCOM_QUSB2=y
|
||||
CONFIG_PHY_QCOM_QMP=y
|
||||
CONFIG_QCOM_LLCC=y
|
||||
CONFIG_QCOM_SPMI_TEMP_ALARM=y
|
||||
CONFIG_POWER_RESET_QCOM_PON=y
|
||||
CONFIG_RTC_DRV_PM8XXX=y
|
||||
CONFIG_INTERCONNECT=y
|
||||
CONFIG_INTERCONNECT_QCOM_SDM845=y
|
||||
CONFIG_QCOM_WDT=y
|
||||
|
||||
# db410c ethernet
|
||||
CONFIG_USB_RTL8152=y
|
||||
|
||||
CONFIG_ARCH_ALPINE=n
|
||||
CONFIG_ARCH_BCM2835=n
|
||||
CONFIG_ARCH_BCM_IPROC=n
|
||||
CONFIG_ARCH_BERLIN=n
|
||||
CONFIG_ARCH_BRCMSTB=n
|
||||
CONFIG_ARCH_EXYNOS=n
|
||||
CONFIG_ARCH_K3=n
|
||||
CONFIG_ARCH_LAYERSCAPE=n
|
||||
CONFIG_ARCH_LG1K=n
|
||||
CONFIG_ARCH_HISI=n
|
||||
CONFIG_ARCH_MEDIATEK=n
|
||||
CONFIG_ARCH_MVEBU=n
|
||||
CONFIG_ARCH_SEATTLE=n
|
||||
CONFIG_ARCH_SYNQUACER=n
|
||||
CONFIG_ARCH_RENESAS=n
|
||||
CONFIG_ARCH_R8A774A1=n
|
||||
CONFIG_ARCH_R8A774C0=n
|
||||
CONFIG_ARCH_R8A7795=n
|
||||
CONFIG_ARCH_R8A7796=n
|
||||
CONFIG_ARCH_R8A77965=n
|
||||
CONFIG_ARCH_R8A77970=n
|
||||
CONFIG_ARCH_R8A77980=n
|
||||
CONFIG_ARCH_R8A77990=n
|
||||
CONFIG_ARCH_R8A77995=n
|
||||
CONFIG_ARCH_STRATIX10=n
|
||||
CONFIG_ARCH_TEGRA=n
|
||||
CONFIG_ARCH_SPRD=n
|
||||
CONFIG_ARCH_THUNDER=n
|
||||
CONFIG_ARCH_THUNDER2=n
|
||||
CONFIG_ARCH_UNIPHIER=n
|
||||
CONFIG_ARCH_VEXPRESS=n
|
||||
CONFIG_ARCH_XGENE=n
|
||||
CONFIG_ARCH_ZX=n
|
||||
CONFIG_ARCH_ZYNQMP=n
|
||||
|
||||
# Strip out some stuff we don't need for graphics testing, to reduce
|
||||
# the build.
|
||||
CONFIG_CAN=n
|
||||
CONFIG_WIRELESS=n
|
||||
CONFIG_RFKILL=n
|
||||
CONFIG_WLAN=n
|
||||
|
||||
CONFIG_REGULATOR_FAN53555=y
|
||||
CONFIG_REGULATOR=y
|
||||
|
||||
CONFIG_REGULATOR_VCTRL=y
|
||||
|
||||
CONFIG_KASAN=n
|
||||
CONFIG_KASAN_INLINE=n
|
||||
CONFIG_STACKTRACE=n
|
||||
|
||||
CONFIG_TMPFS=y
|
||||
|
||||
CONFIG_PROVE_LOCKING=n
|
||||
CONFIG_DEBUG_LOCKDEP=n
|
||||
CONFIG_SOFTLOCKUP_DETECTOR=y
|
||||
CONFIG_BOOTPARAM_SOFTLOCKUP_PANIC=y
|
||||
|
||||
CONFIG_DETECT_HUNG_TASK=y
|
||||
|
||||
CONFIG_FW_LOADER_COMPRESS=y
|
||||
CONFIG_FW_LOADER_USER_HELPER=n
|
||||
|
||||
CONFIG_USB_USBNET=y
|
||||
CONFIG_NETDEVICES=y
|
||||
CONFIG_USB_NET_DRIVERS=y
|
||||
CONFIG_USB_RTL8152=y
|
||||
CONFIG_USB_NET_AX8817X=y
|
||||
CONFIG_USB_NET_SMSC95XX=y
|
@@ -1,15 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Init entrypoint for bare-metal devices; calls common init code.
|
||||
|
||||
# First stage: very basic setup to bring up network and /dev etc
|
||||
/init-stage1.sh
|
||||
|
||||
export CURRENT_SECTION=dut_boot
|
||||
|
||||
# Second stage: run jobs
|
||||
test $? -eq 0 && /init-stage2.sh
|
||||
|
||||
# Wait until the job would have timed out anyway, so we don't spew a "init
|
||||
# exited" panic.
|
||||
sleep 6000
|
14
.gitlab-ci/bare-metal/capture-devcoredump.sh
Executable file
14
.gitlab-ci/bare-metal/capture-devcoredump.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/bin/sh
|
||||
|
||||
while true; do
|
||||
devcds=`find /sys/devices/virtual/devcoredump/ -name data`
|
||||
for i in $devcds; do
|
||||
echo "Found a devcoredump at $i."
|
||||
if cp $i /results/first.devcore; then
|
||||
echo 1 > $i
|
||||
echo "Saved to the job artifacts at /first.devcore"
|
||||
exit 0
|
||||
fi
|
||||
done
|
||||
sleep 10
|
||||
done
|
105
.gitlab-ci/bare-metal/cros-servo.sh
Executable file
105
.gitlab-ci/bare-metal/cros-servo.sh
Executable file
@@ -0,0 +1,105 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Boot script for Chrome OS devices attached to a servo debug connector, using
|
||||
# NFS and TFTP to boot.
|
||||
|
||||
# We're run from the root of the repo, make a helper var for our paths
|
||||
BM=$CI_PROJECT_DIR/install/bare-metal
|
||||
|
||||
# Runner config checks
|
||||
if [ -z "$BM_SERIAL" ]; then
|
||||
echo "Must set BM_SERIAL in your gitlab-runner config.toml [[runners]] environment"
|
||||
echo "This is the CPU serial device."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_SERIAL_EC" ]; then
|
||||
echo "Must set BM_SERIAL in your gitlab-runner config.toml [[runners]] environment"
|
||||
echo "This is the EC serial device for controlling board power"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d /nfs ]; then
|
||||
echo "NFS rootfs directory needs to be mounted at /nfs by the gitlab runner"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d /tftp ]; then
|
||||
echo "TFTP directory for this board needs to be mounted at /tftp by the gitlab runner"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# job config checks
|
||||
if [ -z "$BM_KERNEL" ]; then
|
||||
echo "Must set BM_KERNEL to your board's kernel FIT image"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_ROOTFS" ]; then
|
||||
echo "Must set BM_ROOTFS to your board's rootfs directory in the job's variables"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_CMDLINE" ]; then
|
||||
echo "Must set BM_CMDLINE to your board's kernel command line arguments"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -ex
|
||||
|
||||
# Clear out any previous run's artifacts.
|
||||
rm -rf results/
|
||||
mkdir -p results
|
||||
find artifacts/ -name serial\*.txt | xargs rm -f
|
||||
|
||||
# Create the rootfs in the NFS directory. rm to make sure it's in a pristine
|
||||
# state, since it's volume-mounted on the host.
|
||||
rsync -a --delete $BM_ROOTFS/ /nfs/
|
||||
mkdir -p /nfs/results
|
||||
. $BM/rootfs-setup.sh /nfs
|
||||
|
||||
# Set up the TFTP kernel/cmdline. When we support more than one board with
|
||||
# this method, we'll need to do some check on the runner name or something.
|
||||
rm -rf /tftp/*
|
||||
cp $BM_KERNEL /tftp/vmlinuz
|
||||
echo "$BM_CMDLINE" > /tftp/cmdline
|
||||
|
||||
# Start watching serials, and power up the device.
|
||||
$BM/serial-buffer.py $BM_SERIAL_EC | tee serial-ec-output.txt | sed -u 's|^|SERIAL-EC> |g' &
|
||||
$BM/serial-buffer.py $BM_SERIAL | tee serial-output.txt | sed -u 's|^|SERIAL-CPU> |g' &
|
||||
while [ ! -e serial-output.txt ]; do
|
||||
sleep 1
|
||||
done
|
||||
# Flush any partial commands in the EC's prompt, then ask for a reboot.
|
||||
$BM/write-serial.py $BM_SERIAL_EC ""
|
||||
$BM/write-serial.py $BM_SERIAL_EC reboot
|
||||
|
||||
# This is emitted right when the bootloader pauses to check for input. Emit a
|
||||
# ^N character to request network boot, because we don't have a
|
||||
# direct-to-netboot firmware on cheza.
|
||||
$BM/expect-output.sh serial-output.txt -f "load_archive: loading locale_en.bin"
|
||||
$BM/write-serial.py $BM_SERIAL `printf '\016'`
|
||||
|
||||
# Wait for the device to complete the deqp run
|
||||
$BM/expect-output.sh serial-output.txt \
|
||||
-f "bare-metal result" \
|
||||
-e "---. end Kernel panic" \
|
||||
-e "POWER_GOOD not seen in time"
|
||||
|
||||
# power down the CPU on the device
|
||||
$BM/write-serial.py $BM_SERIAL_EC 'power off'
|
||||
|
||||
set -ex
|
||||
|
||||
# Bring artifacts back from the NFS dir to the build dir where gitlab-runner
|
||||
# will look for them. Note that results/ may already exist, so be careful
|
||||
# with cp.
|
||||
mkdir -p results
|
||||
cp -Rp /nfs/results/. results/
|
||||
|
||||
set +e
|
||||
if grep -q "bare-metal result: pass" serial-output.txt; then
|
||||
exit 0
|
||||
else
|
||||
exit 1
|
||||
fi
|
30
.gitlab-ci/bare-metal/expect-output.sh
Executable file
30
.gitlab-ci/bare-metal/expect-output.sh
Executable file
@@ -0,0 +1,30 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
STRINGS=$(mktemp)
|
||||
ERRORS=$(mktemp)
|
||||
|
||||
trap "rm $STRINGS; rm $ERRORS;" EXIT
|
||||
|
||||
FILE=$1
|
||||
shift 1
|
||||
|
||||
while getopts "f:e:" opt; do
|
||||
case $opt in
|
||||
f) echo "$OPTARG" >> $STRINGS;;
|
||||
e) echo "$OPTARG" >> $STRINGS ; echo "$OPTARG" >> $ERRORS;;
|
||||
esac
|
||||
done
|
||||
shift $((OPTIND -1))
|
||||
|
||||
echo "Waiting for $FILE to say one of following strings"
|
||||
cat $STRINGS
|
||||
|
||||
while ! egrep -wf $STRINGS $FILE; do
|
||||
sleep 2
|
||||
done
|
||||
|
||||
if egrep -wf $ERRORS $FILE; then
|
||||
exit 1
|
||||
fi
|
127
.gitlab-ci/bare-metal/fastboot.sh
Executable file
127
.gitlab-ci/bare-metal/fastboot.sh
Executable file
@@ -0,0 +1,127 @@
|
||||
#!/bin/bash
|
||||
|
||||
BM=$CI_PROJECT_DIR/install/bare-metal
|
||||
|
||||
if [ -z "$BM_SERIAL" -a -z "$BM_SERIAL_SCRIPT" ]; then
|
||||
echo "Must set BM_SERIAL OR BM_SERIAL_SCRIPT in your gitlab-runner config.toml [[runners]] environment"
|
||||
echo "BM_SERIAL:"
|
||||
echo " This is the serial device to talk to for waiting for fastboot to be ready and logging from the kernel."
|
||||
echo "BM_SERIAL_SCRIPT:"
|
||||
echo " This is a shell script to talk to for waiting for fastboot to be ready and logging from the kernel."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_POWERUP" ]; then
|
||||
echo "Must set BM_POWERUP in your gitlab-runner config.toml [[runners]] environment"
|
||||
echo "This is a shell script that should reset the device and begin its boot sequence"
|
||||
echo "such that it pauses at fastboot."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_POWERDOWN" ]; then
|
||||
echo "Must set BM_POWERDOWN in your gitlab-runner config.toml [[runners]] environment"
|
||||
echo "This is a shell script that should power off the device."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_FASTBOOT_SERIAL" ]; then
|
||||
echo "Must set BM_FASTBOOT_SERIAL in your gitlab-runner config.toml [[runners]] environment"
|
||||
echo "This must be the a stable-across-resets fastboot serial number."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_KERNEL" ]; then
|
||||
echo "Must set BM_KERNEL to your board's kernel vmlinuz or Image.gz in the job's variables:"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_DTB" ]; then
|
||||
echo "Must set BM_DTB to your board's DTB file in the job's variables:"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_ROOTFS" ]; then
|
||||
echo "Must set BM_ROOTFS to your board's rootfs directory in the job's variables:"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_WEBDAV_IP" -o -z "$BM_WEBDAV_PORT" ]; then
|
||||
echo "BM_WEBDAV_IP and/or BM_WEBDAV_PORT is not set - no results will be uploaded from DUT!"
|
||||
WEBDAV_CMDLINE=""
|
||||
else
|
||||
WEBDAV_CMDLINE="webdav=http://$BM_WEBDAV_IP:$BM_WEBDAV_PORT"
|
||||
fi
|
||||
|
||||
set -ex
|
||||
|
||||
# Clear out any previous run's artifacts.
|
||||
rm -rf results/
|
||||
mkdir -p results
|
||||
find artifacts/ -name serial\*.txt | xargs rm -f
|
||||
|
||||
# Create the rootfs in a temp dir
|
||||
rsync -a --delete $BM_ROOTFS/ rootfs/
|
||||
. $BM/rootfs-setup.sh rootfs
|
||||
|
||||
# Finally, pack it up into a cpio rootfs. Skip the vulkan CTS since none of
|
||||
# these devices use it and it would take up space in the initrd.
|
||||
pushd rootfs
|
||||
find -H | \
|
||||
egrep -v "external/(openglcts|vulkancts|amber|glslang|spirv-tools)" |
|
||||
egrep -v "traces-db|apitrace|renderdoc|python" | \
|
||||
cpio -H newc -o | \
|
||||
xz --check=crc32 -T4 - > $CI_PROJECT_DIR/rootfs.cpio.gz
|
||||
popd
|
||||
|
||||
cat $BM_KERNEL $BM_DTB > Image.gz-dtb
|
||||
|
||||
abootimg \
|
||||
--create artifacts/fastboot.img \
|
||||
-k Image.gz-dtb \
|
||||
-r rootfs.cpio.gz \
|
||||
-c cmdline="$BM_CMDLINE $WEBDAV_CMDLINE"
|
||||
rm Image.gz-dtb
|
||||
|
||||
# Start nginx to get results from DUT
|
||||
if [ -n "$WEBDAV_CMDLINE" ]; then
|
||||
ln -s `pwd`/results /results
|
||||
sed -i s/80/$BM_WEBDAV_PORT/g /etc/nginx/sites-enabled/default
|
||||
sed -i s/www-data/root/g /etc/nginx/nginx.conf
|
||||
nginx
|
||||
fi
|
||||
|
||||
# Start watching serial, and power up the device.
|
||||
if [ -n "$BM_SERIAL" ]; then
|
||||
$BM/serial-buffer.py $BM_SERIAL | tee artifacts/serial-output.txt &
|
||||
else
|
||||
PATH=$BM:$PATH $BM_SERIAL_SCRIPT | tee artifacts/serial-output.txt &
|
||||
fi
|
||||
|
||||
while [ ! -e artifacts/serial-output.txt ]; do
|
||||
sleep 1
|
||||
done
|
||||
PATH=$BM:$PATH $BM_POWERUP
|
||||
|
||||
# Once fastboot is ready, boot our image.
|
||||
$BM/expect-output.sh artifacts/serial-output.txt \
|
||||
-f "fastboot: processing commands" \
|
||||
-f "Listening for fastboot command on" \
|
||||
-e "data abort"
|
||||
|
||||
fastboot boot -s $BM_FASTBOOT_SERIAL artifacts/fastboot.img
|
||||
|
||||
# Wait for the device to complete the deqp run
|
||||
$BM/expect-output.sh artifacts/serial-output.txt \
|
||||
-f "bare-metal result" \
|
||||
-e "---. end Kernel panic"
|
||||
|
||||
# power down the device
|
||||
PATH=$BM:$PATH $BM_POWERDOWN
|
||||
|
||||
set +e
|
||||
if grep -q "bare-metal result: pass" artifacts/serial-output.txt; then
|
||||
exit 0
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
|
@@ -1,129 +0,0 @@
|
||||
.baremetal-test:
|
||||
extends:
|
||||
- .test
|
||||
# Cancel job if a newer commit is pushed to the same branch
|
||||
interruptible: true
|
||||
before_script:
|
||||
- !reference [.download_s3, before_script]
|
||||
variables:
|
||||
BM_ROOTFS: /rootfs-${DEBIAN_ARCH}
|
||||
artifacts:
|
||||
when: always
|
||||
name: "${CI_PROJECT_NAME}_${CI_JOB_NAME_SLUG}"
|
||||
paths:
|
||||
- results/
|
||||
- serial*.txt
|
||||
exclude:
|
||||
- results/*.shader_cache
|
||||
reports:
|
||||
junit: results/junit.xml
|
||||
|
||||
# ARM testing of bare-metal boards attached to an x86 gitlab-runner system
|
||||
.baremetal-test-arm32-gl:
|
||||
extends:
|
||||
- .baremetal-test
|
||||
- .use-debian/baremetal_arm32_test-gl
|
||||
variables:
|
||||
DEBIAN_ARCH: armhf
|
||||
S3_ARTIFACT_NAME: mesa-arm32-default-debugoptimized
|
||||
needs:
|
||||
- job: debian/baremetal_arm32_test-gl
|
||||
optional: true
|
||||
- job: debian-arm32
|
||||
artifacts: false
|
||||
- !reference [.required-for-hardware-jobs, needs]
|
||||
|
||||
# ARM64 testing of bare-metal boards attached to an x86 gitlab-runner system
|
||||
.baremetal-test-arm64-gl:
|
||||
extends:
|
||||
- .baremetal-test
|
||||
- .use-debian/baremetal_arm64_test-gl
|
||||
variables:
|
||||
DEBIAN_ARCH: arm64
|
||||
S3_ARTIFACT_NAME: mesa-arm64-default-debugoptimized
|
||||
needs:
|
||||
- job: debian/baremetal_arm64_test-gl
|
||||
optional: true
|
||||
- job: debian-arm64
|
||||
artifacts: false
|
||||
- !reference [.required-for-hardware-jobs, needs]
|
||||
|
||||
# ARM64 testing of bare-metal boards attached to an x86 gitlab-runner system
|
||||
.baremetal-test-arm64-vk:
|
||||
extends:
|
||||
- .baremetal-test
|
||||
- .use-debian/baremetal_arm64_test-vk
|
||||
variables:
|
||||
DEBIAN_ARCH: arm64
|
||||
S3_ARTIFACT_NAME: mesa-arm64-default-debugoptimized
|
||||
needs:
|
||||
- job: debian/baremetal_arm64_test-vk
|
||||
optional: true
|
||||
- job: debian-arm64
|
||||
artifacts: false
|
||||
- !reference [.required-for-hardware-jobs, needs]
|
||||
|
||||
# ARM32/64 testing of bare-metal boards attached to an x86 gitlab-runner system, using an asan mesa build
|
||||
.baremetal-arm32-asan-test-gl:
|
||||
variables:
|
||||
S3_ARTIFACT_NAME: mesa-arm32-asan-debugoptimized
|
||||
DEQP_FORCE_ASAN: 1
|
||||
needs:
|
||||
- job: debian/baremetal_arm32_test-gl
|
||||
optional: true
|
||||
- job: debian-arm32-asan
|
||||
artifacts: false
|
||||
- !reference [.required-for-hardware-jobs, needs]
|
||||
|
||||
.baremetal-arm64-asan-test-gl:
|
||||
variables:
|
||||
S3_ARTIFACT_NAME: mesa-arm64-asan-debugoptimized
|
||||
DEQP_FORCE_ASAN: 1
|
||||
needs:
|
||||
- job: debian/baremetal_arm64_test-gl
|
||||
optional: true
|
||||
- job: debian-arm64-asan
|
||||
artifacts: false
|
||||
- !reference [.required-for-hardware-jobs, needs]
|
||||
|
||||
.baremetal-arm64-asan-test-vk:
|
||||
variables:
|
||||
S3_ARTIFACT_NAME: mesa-arm64-asan-debugoptimized
|
||||
DEQP_FORCE_ASAN: 1
|
||||
needs:
|
||||
- job: debian/baremetal_arm64_test-vk
|
||||
optional: true
|
||||
- job: debian-arm64-asan
|
||||
artifacts: false
|
||||
- !reference [.required-for-hardware-jobs, needs]
|
||||
|
||||
.baremetal-arm64-ubsan-test-gl:
|
||||
extends:
|
||||
- .baremetal-test
|
||||
- .use-debian/baremetal_arm64_test-gl
|
||||
variables:
|
||||
S3_ARTIFACT_NAME: mesa-arm64-ubsan-debugoptimized
|
||||
needs:
|
||||
- job: debian/baremetal_arm64_test-gl
|
||||
optional: true
|
||||
- job: debian-arm64-ubsan
|
||||
artifacts: false
|
||||
- !reference [.required-for-hardware-jobs, needs]
|
||||
|
||||
.baremetal-arm64-ubsan-test-vk:
|
||||
extends:
|
||||
- .baremetal-test
|
||||
- .use-debian/baremetal_arm64_test-vk
|
||||
variables:
|
||||
S3_ARTIFACT_NAME: mesa-arm64-ubsan-debugoptimized
|
||||
needs:
|
||||
- job: debian/baremetal_arm64_test-vk
|
||||
optional: true
|
||||
- job: debian-arm64-ubsan
|
||||
artifacts: false
|
||||
- !reference [.required-for-hardware-jobs, needs]
|
||||
|
||||
.baremetal-deqp-test:
|
||||
variables:
|
||||
HWCI_TEST_SCRIPT: "/install/deqp-runner.sh"
|
||||
FDO_CI_CONCURRENT: 0 # Default to number of CPUs
|
10
.gitlab-ci/bare-metal/google-power-down.sh
Executable file
10
.gitlab-ci/bare-metal/google-power-down.sh
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
relay=$1
|
||||
|
||||
if [ -z "$relay" ]; then
|
||||
echo "Must supply a relay arg"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
$CI_PROJECT_DIR/install/bare-metal/google-power-relay.py off $relay
|
19
.gitlab-ci/bare-metal/google-power-relay.py
Executable file
19
.gitlab-ci/bare-metal/google-power-relay.py
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import sys
|
||||
import serial
|
||||
|
||||
mode = sys.argv[1]
|
||||
relay = sys.argv[2]
|
||||
|
||||
# our relays are "off" means "board is powered".
|
||||
mode_swap = {
|
||||
"on" : "off",
|
||||
"off" : "on",
|
||||
}
|
||||
mode = mode_swap[mode]
|
||||
|
||||
ser = serial.Serial('/dev/ttyACM0', 115200, timeout=2)
|
||||
command = "relay {} {}\n\r".format(mode, relay)
|
||||
ser.write(command.encode())
|
||||
ser.close()
|
12
.gitlab-ci/bare-metal/google-power-up.sh
Executable file
12
.gitlab-ci/bare-metal/google-power-up.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
|
||||
relay=$1
|
||||
|
||||
if [ -z "$relay" ]; then
|
||||
echo "Must supply a relay arg"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
$CI_PROJECT_DIR/install/bare-metal/google-power-relay.py off $relay
|
||||
sleep 5
|
||||
$CI_PROJECT_DIR/install/bare-metal/google-power-relay.py on $relay
|
49
.gitlab-ci/bare-metal/init.sh
Executable file
49
.gitlab-ci/bare-metal/init.sh
Executable file
@@ -0,0 +1,49 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
mount -t proc none /proc
|
||||
mount -t sysfs none /sys
|
||||
mount -t devtmpfs none /dev || echo possibly already mounted
|
||||
mkdir -p /dev/pts
|
||||
mount -t devpts devpts /dev/pts
|
||||
mount -t tmpfs tmpfs /tmp
|
||||
|
||||
. /set-job-env-vars.sh
|
||||
|
||||
# Store Mesa's disk cache under /tmp, rather than sending it out over NFS.
|
||||
export XDG_CACHE_HOME=/tmp
|
||||
|
||||
echo "nameserver 8.8.8.8" > /etc/resolv.conf
|
||||
|
||||
# Not all DUTs have network
|
||||
sntp -sS pool.ntp.org || true
|
||||
|
||||
# Overwrite traces.yml file with the baremetal version
|
||||
cp /install/traces-baremetal.yml /install/traces.yml
|
||||
|
||||
# Start a little daemon to capture the first devcoredump we encounter. (They
|
||||
# expire after 5 minutes, so we poll for them).
|
||||
./capture-devcoredump.sh &
|
||||
|
||||
if sh $BARE_METAL_TEST_SCRIPT; then
|
||||
OK=1
|
||||
else
|
||||
OK=0
|
||||
fi
|
||||
|
||||
# upload artifacts via webdav
|
||||
WEBDAV=$(cat /proc/cmdline | tr " " "\n" | grep webdav | cut -d '=' -f 2 || true)
|
||||
if [ -n "$WEBDAV" ]; then
|
||||
find /results -type f -exec curl -T {} $WEBDAV/{} \;
|
||||
fi
|
||||
|
||||
if [ $OK -eq 1 ]; then
|
||||
echo "bare-metal result: pass"
|
||||
else
|
||||
echo "bare-metal result: fail"
|
||||
fi
|
||||
|
||||
# Wait until the job would have timed out anyway, so we don't spew a "init
|
||||
# exited" panic.
|
||||
sleep 6000
|
20
.gitlab-ci/bare-metal/nginx-default-site
Normal file
20
.gitlab-ci/bare-metal/nginx-default-site
Normal file
@@ -0,0 +1,20 @@
|
||||
server {
|
||||
listen 80 default_server;
|
||||
listen [::]:80 default_server;
|
||||
|
||||
server_name _;
|
||||
|
||||
location / {
|
||||
dav_methods PUT;
|
||||
dav_ext_methods PROPFIND OPTIONS;
|
||||
dav_access user:rw group:rw all:r;
|
||||
|
||||
client_body_temp_path /tmp;
|
||||
client_max_body_size 0;
|
||||
create_full_put_path on;
|
||||
|
||||
root /results;
|
||||
|
||||
autoindex on;
|
||||
}
|
||||
}
|
@@ -1,16 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ -z "$BM_POE_INTERFACE" ]; then
|
||||
echo "Must supply the PoE Interface to power up"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_POE_ADDRESS" ]; then
|
||||
echo "Must supply the PoE Switch host"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SNMP_KEY="SNMPv2-SMI::mib-2.105.1.1.1.3.1.$((${BM_POE_BASE:-0} + BM_POE_INTERFACE))"
|
||||
SNMP_OFF="i 2"
|
||||
|
||||
flock /var/run/poe.lock -c "snmpset -v2c -r 3 -t 30 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_OFF"
|
@@ -1,19 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ -z "$BM_POE_INTERFACE" ]; then
|
||||
echo "Must supply the PoE Interface to power up"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_POE_ADDRESS" ]; then
|
||||
echo "Must supply the PoE Switch host"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SNMP_KEY="SNMPv2-SMI::mib-2.105.1.1.1.3.1.$((${BM_POE_BASE:-0} + BM_POE_INTERFACE))"
|
||||
SNMP_ON="i 1"
|
||||
SNMP_OFF="i 2"
|
||||
|
||||
flock /var/run/poe.lock -c "snmpset -v2c -r 3 -t 30 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_OFF"
|
||||
sleep 3s
|
||||
flock /var/run/poe.lock -c "snmpset -v2c -r 3 -t 30 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_ON"
|
@@ -1,191 +0,0 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC1091
|
||||
# shellcheck disable=SC2034
|
||||
# shellcheck disable=SC2059
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
. "$SCRIPTS_DIR"/setup-test-env.sh
|
||||
|
||||
# Boot script for devices attached to a PoE switch, using NFS for the root
|
||||
# filesystem.
|
||||
|
||||
# We're run from the root of the repo, make a helper var for our paths
|
||||
BM=$CI_PROJECT_DIR/install/bare-metal
|
||||
CI_COMMON=$CI_PROJECT_DIR/install/common
|
||||
CI_INSTALL=$CI_PROJECT_DIR/install
|
||||
|
||||
# Runner config checks
|
||||
if [ -z "$BM_SERIAL" ]; then
|
||||
echo "Must set BM_SERIAL in your gitlab-runner config.toml [[runners]] environment"
|
||||
echo "This is the serial port to listen the device."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_POE_ADDRESS" ]; then
|
||||
echo "Must set BM_POE_ADDRESS in your gitlab-runner config.toml [[runners]] environment"
|
||||
echo "This is the PoE switch address to connect for powering up/down devices."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_POE_INTERFACE" ]; then
|
||||
echo "Must set BM_POE_INTERFACE in your gitlab-runner config.toml [[runners]] environment"
|
||||
echo "This is the PoE switch interface where the device is connected."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_POWERUP" ]; then
|
||||
echo "Must set BM_POWERUP in your gitlab-runner config.toml [[runners]] environment"
|
||||
echo "This is a shell script that should power up the device and begin its boot sequence."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_POWERDOWN" ]; then
|
||||
echo "Must set BM_POWERDOWN in your gitlab-runner config.toml [[runners]] environment"
|
||||
echo "This is a shell script that should power off the device."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d /nfs ]; then
|
||||
echo "NFS rootfs directory needs to be mounted at /nfs by the gitlab runner"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d /tftp ]; then
|
||||
echo "TFTP directory for this board needs to be mounted at /tftp by the gitlab runner"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# job config checks
|
||||
if [ -z "$BM_ROOTFS" ]; then
|
||||
echo "Must set BM_ROOTFS to your board's rootfs directory in the job's variables"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_BOOTFS" ] && { [ -z "$BM_KERNEL" ] || [ -z "$BM_DTB" ]; } ; then
|
||||
echo "Must set /boot files for the TFTP boot in the job's variables or set kernel and dtb"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_CMDLINE" ]; then
|
||||
echo "Must set BM_CMDLINE to your board's kernel command line arguments"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
section_start prepare_rootfs "Preparing rootfs components"
|
||||
|
||||
set -ex
|
||||
|
||||
date +'%F %T'
|
||||
|
||||
# Clear out any previous run's artifacts.
|
||||
rm -rf results/
|
||||
mkdir -p results
|
||||
|
||||
# Create the rootfs in the NFS directory. rm to make sure it's in a pristine
|
||||
# state, since it's volume-mounted on the host.
|
||||
rsync -a --delete $BM_ROOTFS/ /nfs/
|
||||
|
||||
date +'%F %T'
|
||||
|
||||
# If BM_BOOTFS is an URL, download it
|
||||
if echo $BM_BOOTFS | grep -q http; then
|
||||
curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \
|
||||
"${FDO_HTTP_CACHE_URI:-}$BM_BOOTFS" -o /tmp/bootfs.tar
|
||||
BM_BOOTFS=/tmp/bootfs.tar
|
||||
fi
|
||||
|
||||
date +'%F %T'
|
||||
|
||||
# If BM_BOOTFS is a file, assume it is a tarball and uncompress it
|
||||
if [ -f "${BM_BOOTFS}" ]; then
|
||||
mkdir -p /tmp/bootfs
|
||||
tar xf $BM_BOOTFS -C /tmp/bootfs
|
||||
BM_BOOTFS=/tmp/bootfs
|
||||
fi
|
||||
|
||||
date +'%F %T'
|
||||
|
||||
# Install kernel modules (it could be either in /lib/modules or
|
||||
# /usr/lib/modules, but we want to install in the latter)
|
||||
if [ -n "${BM_BOOTFS}" ]; then
|
||||
[ -d $BM_BOOTFS/usr/lib/modules ] && rsync -a $BM_BOOTFS/usr/lib/modules/ /nfs/usr/lib/modules/
|
||||
[ -d $BM_BOOTFS/lib/modules ] && rsync -a $BM_BOOTFS/lib/modules/ /nfs/lib/modules/
|
||||
else
|
||||
echo "No modules!"
|
||||
fi
|
||||
|
||||
|
||||
date +'%F %T'
|
||||
|
||||
# Install kernel image + bootloader files
|
||||
if [ -z "$BM_BOOTFS" ]; then
|
||||
mv "${BM_KERNEL}" "${BM_DTB}.dtb" /tftp/
|
||||
else # BM_BOOTFS
|
||||
rsync -aL --delete $BM_BOOTFS/boot/ /tftp/
|
||||
fi
|
||||
|
||||
date +'%F %T'
|
||||
|
||||
# Create the rootfs in the NFS directory
|
||||
. $BM/rootfs-setup.sh /nfs
|
||||
|
||||
date +'%F %T'
|
||||
|
||||
echo "$BM_CMDLINE" > /tftp/cmdline.txt
|
||||
|
||||
# Add some options in config.txt, if defined
|
||||
if [ -n "$BM_BOOTCONFIG" ]; then
|
||||
printf "$BM_BOOTCONFIG" >> /tftp/config.txt
|
||||
fi
|
||||
|
||||
section_end prepare_rootfs
|
||||
|
||||
set +e
|
||||
STRUCTURED_LOG_FILE=results/job_detail.json
|
||||
python3 $CI_INSTALL/custom_logger.py ${STRUCTURED_LOG_FILE} --update dut_job_type "${DEVICE_TYPE}"
|
||||
python3 $CI_INSTALL/custom_logger.py ${STRUCTURED_LOG_FILE} --update farm "${FARM}"
|
||||
ATTEMPTS=3
|
||||
first_attempt=True
|
||||
while [ $((ATTEMPTS--)) -gt 0 ]; do
|
||||
section_start dut_boot "Booting hardware device ..."
|
||||
python3 $CI_INSTALL/custom_logger.py ${STRUCTURED_LOG_FILE} --create-dut-job dut_name "${CI_RUNNER_DESCRIPTION}"
|
||||
# Update subtime time to CI_JOB_STARTED_AT only for the first run
|
||||
if [ "$first_attempt" = "True" ]; then
|
||||
python3 $CI_INSTALL/custom_logger.py ${STRUCTURED_LOG_FILE} --update-dut-time submit "${CI_JOB_STARTED_AT}"
|
||||
else
|
||||
python3 $CI_INSTALL/custom_logger.py ${STRUCTURED_LOG_FILE} --update-dut-time submit
|
||||
fi
|
||||
python3 $BM/poe_run.py \
|
||||
--dev="$BM_SERIAL" \
|
||||
--powerup="$BM_POWERUP" \
|
||||
--powerdown="$BM_POWERDOWN" \
|
||||
--boot-timeout-seconds ${BOOT_PHASE_TIMEOUT_SECONDS:-300} \
|
||||
--test-timeout-minutes ${TEST_PHASE_TIMEOUT_MINUTES:-$((CI_JOB_TIMEOUT/60 - ${TEST_SETUP_AND_UPLOAD_MARGIN_MINUTES:-5}))}
|
||||
ret=$?
|
||||
|
||||
if [ $ret -eq 2 ]; then
|
||||
python3 $CI_INSTALL/custom_logger.py ${STRUCTURED_LOG_FILE} --close-dut-job
|
||||
first_attempt=False
|
||||
error "Device failed to boot; will retry"
|
||||
else
|
||||
# We're no longer in dut_boot by this point
|
||||
unset CURRENT_SECTION
|
||||
ATTEMPTS=0
|
||||
fi
|
||||
done
|
||||
|
||||
section_start dut_cleanup "Cleaning up after job"
|
||||
python3 $CI_INSTALL/custom_logger.py ${STRUCTURED_LOG_FILE} --close-dut-job
|
||||
python3 $CI_INSTALL/custom_logger.py ${STRUCTURED_LOG_FILE} --close
|
||||
set -e
|
||||
|
||||
date +'%F %T'
|
||||
|
||||
# Bring artifacts back from the NFS dir to the build dir where gitlab-runner
|
||||
# will look for them.
|
||||
cp -Rp /nfs/results/. results/
|
||||
|
||||
date +'%F %T'
|
||||
section_end dut_cleanup
|
||||
|
||||
exit $ret
|
@@ -1,133 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright © 2020 Igalia, S.L.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
# copy of this software and associated documentation files (the "Software"),
|
||||
# to deal in the Software without restriction, including without limitation
|
||||
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
# and/or sell copies of the Software, and to permit persons to whom the
|
||||
# Software is furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice (including the next
|
||||
# paragraph) shall be included in all copies or substantial portions of the
|
||||
# Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from custom_logger import CustomLogger
|
||||
from serial_buffer import SerialBuffer
|
||||
|
||||
class PoERun:
|
||||
def __init__(self, args, boot_timeout, test_timeout, logger):
|
||||
self.powerup = args.powerup
|
||||
self.powerdown = args.powerdown
|
||||
self.ser = SerialBuffer(
|
||||
args.dev, "results/serial-output.txt", ": ")
|
||||
self.boot_timeout = boot_timeout
|
||||
self.test_timeout = test_timeout
|
||||
self.logger = logger
|
||||
|
||||
def print_error(self, message):
|
||||
RED = '\033[0;31m'
|
||||
NO_COLOR = '\033[0m'
|
||||
print(RED + message + NO_COLOR)
|
||||
self.logger.update_status_fail(message)
|
||||
|
||||
def logged_system(self, cmd):
|
||||
print("Running '{}'".format(cmd))
|
||||
return os.system(cmd)
|
||||
|
||||
def run(self):
|
||||
if self.logged_system(self.powerup) != 0:
|
||||
self.logger.update_status_fail("powerup failed")
|
||||
return 1
|
||||
|
||||
boot_detected = False
|
||||
self.logger.create_job_phase("boot")
|
||||
for line in self.ser.lines(timeout=self.boot_timeout, phase="bootloader"):
|
||||
if re.search("Booting Linux", line):
|
||||
boot_detected = True
|
||||
break
|
||||
|
||||
if not boot_detected:
|
||||
self.print_error(
|
||||
"Something wrong; couldn't detect the boot start up sequence")
|
||||
return 2
|
||||
|
||||
self.logger.create_job_phase("test")
|
||||
for line in self.ser.lines(timeout=self.test_timeout, phase="test"):
|
||||
if re.search("---. end Kernel panic", line):
|
||||
self.logger.update_status_fail("kernel panic")
|
||||
return 1
|
||||
|
||||
# Binning memory problems
|
||||
if re.search("binner overflow mem", line):
|
||||
self.print_error("Memory overflow in the binner; GPU hang")
|
||||
return 1
|
||||
|
||||
if re.search("nouveau 57000000.gpu: bus: MMIO read of 00000000 FAULT at 137000", line):
|
||||
self.print_error("nouveau jetson boot bug, abandoning run.")
|
||||
return 1
|
||||
|
||||
# network fail on tk1
|
||||
if re.search("NETDEV WATCHDOG:.* transmit queue 0 timed out", line):
|
||||
self.print_error("nouveau jetson tk1 network fail, abandoning run.")
|
||||
return 1
|
||||
|
||||
result = re.search(r"hwci: mesa: exit_code: (\d+)", line)
|
||||
if result:
|
||||
exit_code = int(result.group(1))
|
||||
|
||||
if exit_code == 0:
|
||||
self.logger.update_dut_job("status", "pass")
|
||||
else:
|
||||
self.logger.update_status_fail("test fail")
|
||||
|
||||
self.logger.update_dut_job("exit_code", exit_code)
|
||||
return exit_code
|
||||
|
||||
self.print_error(
|
||||
"Reached the end of the CPU serial log without finding a result")
|
||||
return 1
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--dev', type=str,
|
||||
help='Serial device to monitor', required=True)
|
||||
parser.add_argument('--powerup', type=str,
|
||||
help='shell command for rebooting', required=True)
|
||||
parser.add_argument('--powerdown', type=str,
|
||||
help='shell command for powering off', required=True)
|
||||
parser.add_argument(
|
||||
'--boot-timeout-seconds', type=int, help='Boot phase timeout (seconds)', required=True)
|
||||
parser.add_argument(
|
||||
'--test-timeout-minutes', type=int, help='Test phase timeout (minutes)', required=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
logger = CustomLogger("results/job_detail.json")
|
||||
logger.update_dut_time("start", None)
|
||||
poe = PoERun(args, args.boot_timeout_seconds, args.test_timeout_minutes * 60, logger)
|
||||
retval = poe.run()
|
||||
|
||||
poe.logged_system(args.powerdown)
|
||||
logger.update_dut_time("end", None)
|
||||
|
||||
sys.exit(retval)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@@ -1,34 +1,63 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
#!/bin/bash
|
||||
|
||||
rootfs_dst=$1
|
||||
|
||||
mkdir -p $rootfs_dst/results
|
||||
|
||||
# Set up the init script that brings up the system.
|
||||
cp $BM/bm-init.sh $rootfs_dst/init
|
||||
cp $CI_COMMON/init*.sh $rootfs_dst/
|
||||
cp $BM/init.sh $rootfs_dst/init
|
||||
|
||||
date +'%F %T'
|
||||
|
||||
# Make JWT token available as file in the bare-metal storage to enable access
|
||||
# to MinIO
|
||||
cp "${S3_JWT_FILE}" "${rootfs_dst}${S3_JWT_FILE}"
|
||||
|
||||
date +'%F %T'
|
||||
|
||||
cp "$SCRIPTS_DIR/setup-test-env.sh" "$rootfs_dst/"
|
||||
cp $BM/capture-devcoredump.sh $rootfs_dst/
|
||||
|
||||
set +x
|
||||
|
||||
# Pass through relevant env vars from the gitlab job to the baremetal init script
|
||||
touch $rootfs_dst/set-job-env-vars.sh
|
||||
chmod +x $rootfs_dst/set-job-env-vars.sh
|
||||
for var in \
|
||||
BARE_METAL_TEST_SCRIPT \
|
||||
CI_COMMIT_BRANCH \
|
||||
CI_COMMIT_TITLE \
|
||||
CI_JOB_JWT \
|
||||
CI_JOB_ID \
|
||||
CI_JOB_URL \
|
||||
CI_MERGE_REQUEST_SOURCE_BRANCH_NAME \
|
||||
CI_MERGE_REQUEST_TITLE \
|
||||
CI_NODE_INDEX \
|
||||
CI_NODE_TOTAL \
|
||||
CI_PIPELINE_ID \
|
||||
CI_PROJECT_PATH \
|
||||
CI_RUNNER_DESCRIPTION \
|
||||
DEQP_CASELIST_FILTER \
|
||||
DEQP_EXPECTED_FAILS \
|
||||
DEQP_EXPECTED_RENDERER \
|
||||
DEQP_NO_SAVE_RESULTS \
|
||||
DEQP_PARALLEL \
|
||||
DEQP_RUN_SUFFIX \
|
||||
DEQP_SKIPS \
|
||||
DEQP_VER \
|
||||
DEVICE_NAME \
|
||||
FD_MESA_DEBUG \
|
||||
FLAKES_CHANNEL \
|
||||
IR3_SHADER_DEBUG \
|
||||
MESA_GL_VERSION_OVERRIDE \
|
||||
MESA_GLSL_VERSION_OVERRIDE \
|
||||
MESA_GLES_VERSION_OVERRIDE \
|
||||
NIR_VALIDATE \
|
||||
TRACIE_NO_UNIT_TESTS \
|
||||
TRACIE_UPLOAD_TO_MINIO \
|
||||
TU_DEBUG \
|
||||
VK_DRIVER \
|
||||
; do
|
||||
val=`echo ${!var} | sed 's|"||g'`
|
||||
if [ -n "$val" ]; then
|
||||
echo "export $var=\"${val}\"" >> $rootfs_dst/set-job-env-vars.sh
|
||||
fi
|
||||
done
|
||||
echo "Variables passed through:"
|
||||
filter_env_vars | tee $rootfs_dst/set-job-env-vars.sh
|
||||
|
||||
cat $rootfs_dst/set-job-env-vars.sh
|
||||
set -x
|
||||
|
||||
# Add the Mesa drivers we built, and make a consistent symlink to them.
|
||||
mkdir -p $rootfs_dst/$CI_PROJECT_DIR
|
||||
rsync -aH --delete $CI_PROJECT_DIR/install/ $rootfs_dst/$CI_PROJECT_DIR/install/
|
||||
|
||||
date +'%F %T'
|
||||
tar -C $rootfs_dst/$CI_PROJECT_DIR/ -xf $CI_PROJECT_DIR/artifacts/install.tar
|
||||
ln -sf $CI_PROJECT_DIR/install $rootfs_dst/install
|
||||
|
46
.gitlab-ci/bare-metal/serial-buffer.py
Executable file
46
.gitlab-ci/bare-metal/serial-buffer.py
Executable file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Copyright © 2020 Google LLC
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
# copy of this software and associated documentation files (the "Software"),
|
||||
# to deal in the Software without restriction, including without limitation
|
||||
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
# and/or sell copies of the Software, and to permit persons to whom the
|
||||
# Software is furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice (including the next
|
||||
# paragraph) shall be included in all copies or substantial portions of the
|
||||
# Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
# Tiny script to read bytes from serial, and write the output to stdout, with a
|
||||
# buffer in between so we don't lose serial output from its buffer.
|
||||
#
|
||||
# We don't use 'cu' because it requires stdin to be hooked up and I never
|
||||
# managed to make that work without getting blocked somewhere. We don't use
|
||||
# 'conserver' because it's non-free.
|
||||
|
||||
import sys
|
||||
import serial
|
||||
import select
|
||||
import os
|
||||
import posix
|
||||
|
||||
dev=sys.argv[1]
|
||||
|
||||
ser = serial.Serial(dev, 115200, timeout=10)
|
||||
|
||||
while True:
|
||||
bytes = ser.read()
|
||||
sys.stdout.buffer.write(bytes)
|
||||
sys.stdout.flush()
|
||||
|
||||
ser.close()
|
@@ -1,186 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright © 2020 Google LLC
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
# copy of this software and associated documentation files (the "Software"),
|
||||
# to deal in the Software without restriction, including without limitation
|
||||
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
# and/or sell copies of the Software, and to permit persons to whom the
|
||||
# Software is furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice (including the next
|
||||
# paragraph) shall be included in all copies or substantial portions of the
|
||||
# Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import argparse
|
||||
from datetime import datetime, UTC
|
||||
import queue
|
||||
import serial
|
||||
import threading
|
||||
import time
|
||||
|
||||
|
||||
class SerialBuffer:
|
||||
def __init__(self, dev, filename, prefix, timeout=None, line_queue=None):
|
||||
self.filename = filename
|
||||
self.dev = dev
|
||||
|
||||
if dev:
|
||||
self.f = open(filename, "wb+")
|
||||
self.serial = serial.Serial(dev, 115200, timeout=timeout)
|
||||
else:
|
||||
self.f = open(filename, "rb")
|
||||
self.serial = None
|
||||
|
||||
self.byte_queue = queue.Queue()
|
||||
# allow multiple SerialBuffers to share a line queue so you can merge
|
||||
# servo's CPU and EC streams into one thing to watch the boot/test
|
||||
# progress on.
|
||||
if line_queue:
|
||||
self.line_queue = line_queue
|
||||
else:
|
||||
self.line_queue = queue.Queue()
|
||||
self.prefix = prefix
|
||||
self.timeout = timeout
|
||||
self.sentinel = object()
|
||||
self.closing = False
|
||||
|
||||
if self.dev:
|
||||
self.read_thread = threading.Thread(
|
||||
target=self.serial_read_thread_loop, daemon=True)
|
||||
else:
|
||||
self.read_thread = threading.Thread(
|
||||
target=self.serial_file_read_thread_loop, daemon=True)
|
||||
self.read_thread.start()
|
||||
|
||||
self.lines_thread = threading.Thread(
|
||||
target=self.serial_lines_thread_loop, daemon=True)
|
||||
self.lines_thread.start()
|
||||
|
||||
def close(self):
|
||||
self.closing = True
|
||||
if self.serial:
|
||||
self.serial.cancel_read()
|
||||
self.read_thread.join()
|
||||
self.lines_thread.join()
|
||||
if self.serial:
|
||||
self.serial.close()
|
||||
|
||||
# Thread that just reads the bytes from the serial device to try to keep from
|
||||
# buffer overflowing it. If nothing is received in 1 minute, it finalizes.
|
||||
def serial_read_thread_loop(self):
|
||||
greet = "Serial thread reading from %s\n" % self.dev
|
||||
self.byte_queue.put(greet.encode())
|
||||
|
||||
while not self.closing:
|
||||
try:
|
||||
b = self.serial.read()
|
||||
if len(b) == 0:
|
||||
break
|
||||
self.byte_queue.put(b)
|
||||
except Exception as err:
|
||||
print(self.prefix + str(err))
|
||||
break
|
||||
self.byte_queue.put(self.sentinel)
|
||||
|
||||
# Thread that just reads the bytes from the file of serial output that some
|
||||
# other process is appending to.
|
||||
def serial_file_read_thread_loop(self):
|
||||
greet = "Serial thread reading from %s\n" % self.filename
|
||||
self.byte_queue.put(greet.encode())
|
||||
|
||||
while not self.closing:
|
||||
line = self.f.readline()
|
||||
if line:
|
||||
self.byte_queue.put(line)
|
||||
else:
|
||||
time.sleep(0.1)
|
||||
self.byte_queue.put(self.sentinel)
|
||||
|
||||
# Thread that processes the stream of bytes to 1) log to stdout, 2) log to
|
||||
# file, 3) add to the queue of lines to be read by program logic
|
||||
|
||||
def serial_lines_thread_loop(self):
|
||||
line = bytearray()
|
||||
while True:
|
||||
bytes = self.byte_queue.get(block=True)
|
||||
|
||||
if bytes == self.sentinel:
|
||||
self.read_thread.join()
|
||||
self.line_queue.put(self.sentinel)
|
||||
break
|
||||
|
||||
# Write our data to the output file if we're the ones reading from
|
||||
# the serial device
|
||||
if self.dev:
|
||||
self.f.write(bytes)
|
||||
self.f.flush()
|
||||
|
||||
for b in bytes:
|
||||
line.append(b)
|
||||
if b == b'\n'[0]:
|
||||
line = line.decode(errors="replace")
|
||||
|
||||
ts = datetime.now(tz=UTC)
|
||||
ts_str = f"{ts.hour:02}:{ts.minute:02}:{ts.second:02}.{int(ts.microsecond / 1000):03}"
|
||||
print("{endc}{time}{prefix}{line}".format(
|
||||
time=ts_str, prefix=self.prefix, line=line, endc='\033[0m'), flush=True, end='')
|
||||
|
||||
self.line_queue.put(line)
|
||||
line = bytearray()
|
||||
|
||||
def lines(self, timeout=None, phase=None):
|
||||
start_time = time.monotonic()
|
||||
while True:
|
||||
read_timeout = None
|
||||
if timeout:
|
||||
read_timeout = timeout - (time.monotonic() - start_time)
|
||||
if read_timeout <= 0:
|
||||
print("read timeout waiting for serial during {}".format(phase))
|
||||
self.close()
|
||||
break
|
||||
|
||||
try:
|
||||
line = self.line_queue.get(timeout=read_timeout)
|
||||
except queue.Empty:
|
||||
print("read timeout waiting for serial during {}".format(phase))
|
||||
self.close()
|
||||
break
|
||||
|
||||
if line == self.sentinel:
|
||||
print("End of serial output")
|
||||
self.lines_thread.join()
|
||||
break
|
||||
|
||||
yield line
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument('--dev', type=str, help='Serial device')
|
||||
parser.add_argument('--file', type=str,
|
||||
help='Filename for serial output', required=True)
|
||||
parser.add_argument('--prefix', type=str,
|
||||
help='Prefix for logging serial to stdout', nargs='?')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
ser = SerialBuffer(args.dev, args.file, args.prefix or "")
|
||||
for line in ser.lines():
|
||||
# We're just using this as a logger, so eat the produced lines and drop
|
||||
# them
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
11
.gitlab-ci/bare-metal/write-serial.py
Executable file
11
.gitlab-ci/bare-metal/write-serial.py
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import sys
|
||||
import serial
|
||||
|
||||
dev = sys.argv[1]
|
||||
command = sys.argv[2] + '\n'
|
||||
|
||||
ser = serial.Serial(dev, 115200, timeout=5)
|
||||
ser.write(command.encode())
|
||||
ser.close()
|
@@ -1 +0,0 @@
|
||||
../bin/ci
|
33
.gitlab-ci/build-apitrace.sh
Normal file
33
.gitlab-ci/build-apitrace.sh
Normal file
@@ -0,0 +1,33 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
# Need an unreleased version of Waffle for surfaceless support in apitrace
|
||||
# Replace this build with the Debian package once that's possible
|
||||
|
||||
WAFFLE_VERSION="e3c995d9a2693b687501715b6550619922346089"
|
||||
git clone https://gitlab.freedesktop.org/mesa/waffle.git --single-branch --no-checkout /waffle
|
||||
pushd /waffle
|
||||
git checkout "$WAFFLE_VERSION"
|
||||
cmake -B_build -DCMAKE_INSTALL_LIBDIR=lib -DCMAKE_BUILD_TYPE=Release $EXTRA_CMAKE_ARGS .
|
||||
make -C _build install
|
||||
mkdir -p build/lib build/bin
|
||||
cp _build/lib/libwaffle-1.so build/lib/libwaffle-1.so.0
|
||||
cp _build/bin/wflinfo build/bin/wflinfo
|
||||
${STRIP_CMD:-strip} build/lib/* build/bin/*
|
||||
find . -not -path './build' -not -path './build/*' -delete
|
||||
popd
|
||||
|
||||
APITRACE_VERSION="9.0"
|
||||
|
||||
git clone https://github.com/apitrace/apitrace.git --single-branch --no-checkout /apitrace
|
||||
pushd /apitrace
|
||||
git checkout "$APITRACE_VERSION"
|
||||
cmake -G Ninja -B_build -H. -DCMAKE_BUILD_TYPE=Release -DENABLE_GUI=False -DENABLE_WAFFLE=on -DWaffle_DIR=/usr/local/lib/cmake/Waffle/ $EXTRA_CMAKE_ARGS
|
||||
ninja -C _build
|
||||
mkdir build
|
||||
cp _build/apitrace build
|
||||
cp _build/eglretrace build
|
||||
${STRIP_CMD:-strip} build/*
|
||||
find . -not -path './build' -not -path './build/*' -delete
|
||||
popd
|
10
.gitlab-ci/build-cts-runner.sh
Normal file
10
.gitlab-ci/build-cts-runner.sh
Normal file
@@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
git clone https://gitlab.freedesktop.org/mesa/parallel-deqp-runner.git --depth 1 -b mesa-ci-2020-06-15 /parallel-deqp-runner
|
||||
pushd /parallel-deqp-runner
|
||||
meson build/ $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
popd
|
||||
rm -rf /parallel-deqp-runner
|
68
.gitlab-ci/build-deqp-gl.sh
Normal file
68
.gitlab-ci/build-deqp-gl.sh
Normal file
@@ -0,0 +1,68 @@
|
||||
#!/bin/bash
|
||||
|
||||
git config --global user.email "mesa@example.com"
|
||||
git config --global user.name "Mesa CI"
|
||||
git clone \
|
||||
--depth 1 \
|
||||
https://github.com/KhronosGroup/VK-GL-CTS.git \
|
||||
-b opengl-es-cts-3.2.6.1 \
|
||||
/VK-GL-CTS
|
||||
pushd /VK-GL-CTS
|
||||
|
||||
# surfaceless links against libkms and such despite not using it.
|
||||
sed -i '/gbm/d' targets/surfaceless/surfaceless.cmake
|
||||
sed -i '/libkms/d' targets/surfaceless/surfaceless.cmake
|
||||
sed -i '/libgbm/d' targets/surfaceless/surfaceless.cmake
|
||||
|
||||
# --insecure is due to SSL cert failures hitting sourceforge for zlib and
|
||||
# libpng (sigh). The archives get their checksums checked anyway, and git
|
||||
# always goes through ssh or https.
|
||||
python3 external/fetch_sources.py --insecure
|
||||
|
||||
mkdir -p /deqp
|
||||
|
||||
# Save the testlog stylesheets:
|
||||
cp doc/testlog-stylesheet/testlog.{css,xsl} /deqp
|
||||
popd
|
||||
|
||||
pushd /deqp
|
||||
cmake -G Ninja \
|
||||
-DDEQP_TARGET=surfaceless \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
$EXTRA_CMAKE_ARGS \
|
||||
/VK-GL-CTS
|
||||
ninja
|
||||
|
||||
# Copy out the mustpass lists we want from a bunch of other junk.
|
||||
mkdir /deqp/mustpass
|
||||
for gles in gles2 gles3 gles31; do
|
||||
cp \
|
||||
/deqp/external/openglcts/modules/gl_cts/data/mustpass/gles/aosp_mustpass/3.2.6.x/$gles-master.txt \
|
||||
/deqp/mustpass/$gles-master.txt
|
||||
done
|
||||
cp \
|
||||
/deqp/external/openglcts/modules/gl_cts/data/mustpass/gl/khronos_mustpass/4.6.1.x/*-master.txt \
|
||||
/deqp/mustpass/.
|
||||
|
||||
|
||||
|
||||
# Save *some* executor utils, but otherwise strip things down
|
||||
# to reduct deqp build size:
|
||||
mkdir /deqp/executor.save
|
||||
cp /deqp/executor/testlog-to-* /deqp/executor.save
|
||||
rm -rf /deqp/executor
|
||||
mv /deqp/executor.save /deqp/executor
|
||||
|
||||
ls /deqp/external | grep -v openglcts | xargs rm -rf
|
||||
rm -rf /deqp/modules/internal
|
||||
rm -rf /deqp/execserver
|
||||
rm -rf /deqp/modules/egl
|
||||
rm -rf /deqp/framework
|
||||
rm -rf /deqp/external/openglcts/modules/gl_cts/data/mustpass
|
||||
rm -rf /deqp/external/openglcts/modules/cts-runner
|
||||
rm -rf /deqp/external/vulkancts/modules/vulkan/vk-build-programs
|
||||
find -iname '*cmake*' -o -name '*ninja*' -o -name '*.o' -o -name '*.a' | xargs rm -rf
|
||||
${STRIP_CMD:-strip} modules/*/deqp-* external/openglcts/modules/glcts
|
||||
du -sh *
|
||||
rm -rf /VK-GL-CTS
|
||||
popd
|
60
.gitlab-ci/build-deqp-vk.sh
Normal file
60
.gitlab-ci/build-deqp-vk.sh
Normal file
@@ -0,0 +1,60 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
git config --global user.email "mesa@example.com"
|
||||
git config --global user.name "Mesa CI"
|
||||
git clone \
|
||||
https://github.com/KhronosGroup/VK-GL-CTS.git \
|
||||
-b vulkan-cts-1.2.3.0 \
|
||||
--depth 1 \
|
||||
/VK-GL-CTS
|
||||
pushd /VK-GL-CTS
|
||||
|
||||
# --insecure is due to SSL cert failures hitting sourceforge for zlib and
|
||||
# libpng (sigh). The archives get their checksums checked anyway, and git
|
||||
# always goes through ssh or https.
|
||||
python3 external/fetch_sources.py --insecure
|
||||
|
||||
mkdir -p /deqp
|
||||
|
||||
# Save the testlog stylesheets:
|
||||
cp doc/testlog-stylesheet/testlog.{css,xsl} /deqp
|
||||
popd
|
||||
|
||||
pushd /deqp
|
||||
cmake -G Ninja \
|
||||
-DDEQP_TARGET=${DEQP_TARGET:-x11_glx} \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
$EXTRA_CMAKE_ARGS \
|
||||
/VK-GL-CTS
|
||||
ninja
|
||||
|
||||
# Copy out the mustpass lists we want.
|
||||
mkdir /deqp/mustpass
|
||||
cp /VK-GL-CTS/external/vulkancts/mustpass/master/vk-default.txt \
|
||||
/deqp/mustpass/vk-master.txt
|
||||
|
||||
for gles in gles2 gles3 gles31; do
|
||||
cp \
|
||||
/deqp/external/openglcts/modules/gl_cts/data/mustpass/gles/aosp_mustpass/3.2.6.x/$gles-master.txt \
|
||||
/deqp/mustpass/$gles-master.txt
|
||||
done
|
||||
|
||||
# Save *some* executor utils, but otherwise strip things down
|
||||
# to reduct deqp build size:
|
||||
mkdir /deqp/executor.save
|
||||
cp /deqp/executor/testlog-to-* /deqp/executor.save
|
||||
rm -rf /deqp/executor
|
||||
mv /deqp/executor.save /deqp/executor
|
||||
|
||||
rm -rf /deqp/modules/internal
|
||||
rm -rf /deqp/execserver
|
||||
rm -rf /deqp/modules/egl
|
||||
rm -rf /deqp/framework
|
||||
find -iname '*cmake*' -o -name '*ninja*' -o -name '*.o' -o -name '*.a' | xargs rm -rf
|
||||
${STRIP_CMD:-strip} external/vulkancts/modules/vulkan/deqp-vk
|
||||
${STRIP_CMD:-strip} modules/*/deqp-*
|
||||
du -sh *
|
||||
rm -rf /VK-GL-CTS
|
||||
popd
|
14
.gitlab-ci/build-fossilize.sh
Normal file
14
.gitlab-ci/build-fossilize.sh
Normal file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
git clone https://github.com/ValveSoftware/Fossilize.git
|
||||
cd Fossilize
|
||||
git checkout 6b5b570008c9ab5269e341f04c811fe49a1bb72c
|
||||
git submodule update --init
|
||||
mkdir build
|
||||
cd build
|
||||
cmake .. -DCMAKE_BUILD_TYPE=Release -G Ninja
|
||||
ninja -C . install
|
||||
cd ../..
|
||||
rm -rf Fossilize
|
21
.gitlab-ci/build-gfxreconstruct.sh
Normal file
21
.gitlab-ci/build-gfxreconstruct.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
GFXRECONSTRUCT_VERSION=57c588c04af631d1d6d381a48e2b9283f9d9d528
|
||||
|
||||
# Using the "dev" branch by now because it solves a crash and will allow us to
|
||||
# use the gfxreconstruct-info tool
|
||||
git clone https://github.com/LunarG/gfxreconstruct.git --single-branch -b dev --no-checkout /gfxreconstruct
|
||||
pushd /gfxreconstruct
|
||||
git checkout "$GFXRECONSTRUCT_VERSION"
|
||||
git submodule update --init
|
||||
git submodule update
|
||||
cmake -G Ninja -B_build -H. -DCMAKE_BUILD_TYPE=Release
|
||||
ninja -C _build gfxrecon-replay gfxrecon-info
|
||||
mkdir -p build/bin
|
||||
install _build/tools/replay/gfxrecon-replay build/bin
|
||||
install _build/tools/info/gfxrecon-info build/bin
|
||||
strip build/bin/*
|
||||
find . -not -path './build' -not -path './build/*' -delete
|
||||
popd
|
14
.gitlab-ci/build-libdrm.sh
Normal file
14
.gitlab-ci/build-libdrm.sh
Normal file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
export LIBDRM_VERSION=libdrm-2.4.102
|
||||
|
||||
wget https://dri.freedesktop.org/libdrm/$LIBDRM_VERSION.tar.xz
|
||||
tar -xvf $LIBDRM_VERSION.tar.xz && rm $LIBDRM_VERSION.tar.xz
|
||||
cd $LIBDRM_VERSION
|
||||
meson build -D vc4=true -D freedreno=true -D etnaviv=true $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
cd ..
|
||||
rm -rf $LIBDRM_VERSION
|
||||
|
13
.gitlab-ci/build-piglit.sh
Normal file
13
.gitlab-ci/build-piglit.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
git clone https://gitlab.freedesktop.org/mesa/piglit.git --single-branch --no-checkout /piglit
|
||||
pushd /piglit
|
||||
git checkout 404862743cf8a7b37a4e3a93b4ba1858d59cd4ab
|
||||
patch -p1 <$OLDPWD/.gitlab-ci/piglit/disable-vs_in.diff
|
||||
cmake -G Ninja -DCMAKE_BUILD_TYPE=Release
|
||||
ninja
|
||||
find -name .git -o -name '*ninja*' -o -iname '*cmake*' -o -name '*.[chao]' | xargs rm -rf
|
||||
rm -rf target_api
|
||||
popd
|
17
.gitlab-ci/build-renderdoc.sh
Normal file
17
.gitlab-ci/build-renderdoc.sh
Normal file
@@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
RENDERDOC_VERSION=da02e88201dc3b64316fc33ce6ff69cc729689aa
|
||||
|
||||
git clone https://github.com/baldurk/renderdoc.git --single-branch --no-checkout /renderdoc
|
||||
pushd /renderdoc
|
||||
git checkout "$RENDERDOC_VERSION"
|
||||
cmake -G Ninja -B_build -H. -DENABLE_QRENDERDOC=false -DCMAKE_BUILD_TYPE=Release $EXTRA_CMAKE_ARGS
|
||||
ninja -C _build
|
||||
mkdir -p build/lib
|
||||
${STRIP_CMD:-strip} _build/lib/*.so
|
||||
cp _build/lib/renderdoc.so build/lib
|
||||
cp _build/lib/librenderdoc.so build/lib
|
||||
find . -not -path './build' -not -path './build/*' -delete
|
||||
popd
|
20
.gitlab-ci/build-virglrenderer.sh
Normal file
20
.gitlab-ci/build-virglrenderer.sh
Normal file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
mkdir -p /epoxy
|
||||
pushd /epoxy
|
||||
wget -qO- https://github.com/anholt/libepoxy/releases/download/1.5.4/libepoxy-1.5.4.tar.xz | tar -xJ --strip-components=1
|
||||
meson build/ $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
popd
|
||||
rm -rf /epoxy
|
||||
|
||||
VIRGLRENDERER_VERSION=43148d1115a12219a0560a538c9872d07c28c558
|
||||
git clone https://gitlab.freedesktop.org/virgl/virglrenderer.git --single-branch --no-checkout /virglrenderer
|
||||
pushd /virglrenderer
|
||||
git checkout "$VIRGLRENDERER_VERSION"
|
||||
meson build/ $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
popd
|
||||
rm -rf /virglrenderer
|
29
.gitlab-ci/build-vulkantools.sh
Normal file
29
.gitlab-ci/build-vulkantools.sh
Normal file
@@ -0,0 +1,29 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
VULKANTOOLS_VERSION=1862c6a47b64cd09156205d7f7e6b3bfcea76390
|
||||
|
||||
git clone https://github.com/LunarG/VulkanTools.git --single-branch --no-checkout /VulkanTools
|
||||
pushd /VulkanTools
|
||||
git checkout "$VULKANTOOLS_VERSION"
|
||||
./update_external_sources.sh
|
||||
mkdir _build
|
||||
./scripts/update_deps.py --dir=_build --config=release --generator=Ninja
|
||||
cmake -G Ninja -B_build -H. \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_INSTALL_PREFIX=/VulkanTools/build \
|
||||
-DBUILD_TESTS=OFF \
|
||||
-DBUILD_VLF=OFF \
|
||||
-DBUILD_VKTRACE=OFF \
|
||||
-DBUILD_VIA=OFF \
|
||||
-DBUILD_VKTRACE_REPLAY=OFF \
|
||||
-C_build/helper.cmake
|
||||
ninja -C _build VkLayer_screenshot VkLayer_screenshot-staging-json
|
||||
mkdir -p build/etc/vulkan/explicit_layer.d
|
||||
mkdir build/lib
|
||||
install _build/layersvt/staging-json/VkLayer_screenshot.json build/etc/vulkan/explicit_layer.d
|
||||
install _build/layersvt/libVkLayer_screenshot.so build/lib
|
||||
strip build/lib/*
|
||||
find . -not -path './build' -not -path './build/*' -delete
|
||||
popd
|
@@ -1,95 +0,0 @@
|
||||
.meson-build-for-tests:
|
||||
extends:
|
||||
- .build-linux
|
||||
stage: build-for-tests
|
||||
script:
|
||||
- &meson-build timeout --verbose ${BUILD_JOB_TIMEOUT_OVERRIDE:-$BUILD_JOB_TIMEOUT} bash --login .gitlab-ci/meson/build.sh
|
||||
- .gitlab-ci/prepare-artifacts.sh
|
||||
|
||||
.meson-build-only:
|
||||
extends:
|
||||
- .meson-build-for-tests
|
||||
- .build-only-delayed-rules
|
||||
stage: build-only
|
||||
script:
|
||||
- *meson-build
|
||||
|
||||
# Shared between windows and Linux
|
||||
.build-common:
|
||||
extends: .build-rules
|
||||
# Cancel job if a newer commit is pushed to the same branch
|
||||
interruptible: true
|
||||
variables:
|
||||
# Build jobs are typically taking between 5-12 minutes, depending on how
|
||||
# much they build and how many new Rust compilers we have to build twice.
|
||||
# Allow 25 minutes as a reasonable margin: beyond this point, something
|
||||
# has gone badly wrong, and we should try again to see if we can get
|
||||
# something from it.
|
||||
#
|
||||
# Some jobs not in the critical path use a higher timeout, particularly
|
||||
# when building with ASan or UBSan.
|
||||
BUILD_JOB_TIMEOUT: 12m
|
||||
RUN_MESON_TESTS: "true"
|
||||
timeout: 16m
|
||||
# We don't want to download any previous job's artifacts
|
||||
dependencies: []
|
||||
artifacts:
|
||||
name: "${CI_PROJECT_NAME}_${CI_JOB_NAME_SLUG}"
|
||||
when: always
|
||||
paths:
|
||||
- _build/meson-logs/*.txt
|
||||
- _build/meson-logs/strace
|
||||
- _build/.ninja_log
|
||||
- artifacts
|
||||
|
||||
.build-run-long:
|
||||
variables:
|
||||
BUILD_JOB_TIMEOUT: 18m
|
||||
timeout: 25m
|
||||
|
||||
|
||||
# Just Linux
|
||||
.build-linux:
|
||||
extends: .build-common
|
||||
variables:
|
||||
C_ARGS: >
|
||||
-Wno-error=deprecated-declarations
|
||||
CCACHE_COMPILERCHECK: "content"
|
||||
CCACHE_COMPRESS: "true"
|
||||
CCACHE_DIR: /cache/mesa/ccache
|
||||
# Use ccache transparently, and print stats before/after
|
||||
before_script:
|
||||
- !reference [default, before_script]
|
||||
- |
|
||||
export PATH="/usr/lib/ccache:$PATH"
|
||||
export CCACHE_BASEDIR="$PWD"
|
||||
if test -x /usr/bin/ccache; then
|
||||
section_start ccache_before "ccache stats before build"
|
||||
ccache --show-stats
|
||||
section_end ccache_before
|
||||
fi
|
||||
after_script:
|
||||
- if test -x /usr/bin/ccache; then ccache --show-stats | grep "Hits:"; fi
|
||||
- !reference [default, after_script]
|
||||
|
||||
.build-windows:
|
||||
extends:
|
||||
- .build-common
|
||||
- .windows-docker-tags
|
||||
cache:
|
||||
key: ${CI_JOB_NAME}
|
||||
paths:
|
||||
- subprojects/packagecache
|
||||
|
||||
.ci-deqp-artifacts:
|
||||
artifacts:
|
||||
name: "${CI_PROJECT_NAME}_${CI_JOB_NAME_SLUG}"
|
||||
when: always
|
||||
untracked: false
|
||||
paths:
|
||||
# Watch out! Artifacts are relative to the build dir.
|
||||
# https://gitlab.com/gitlab-org/gitlab-ce/commit/8788fb925706cad594adf6917a6c5f6587dd1521
|
||||
- artifacts
|
||||
- _build/meson-logs/*.txt
|
||||
- _build/meson-logs/strace
|
||||
- _build/.ninja_log
|
@@ -1,792 +0,0 @@
|
||||
include:
|
||||
- local: '.gitlab-ci/build/gitlab-ci-inc.yml'
|
||||
|
||||
# Git archive
|
||||
|
||||
make-git-archive:
|
||||
extends:
|
||||
- .fdo.ci-fairy
|
||||
stage: git-archive
|
||||
rules:
|
||||
- !reference [.scheduled_pipeline-rules, rules]
|
||||
script:
|
||||
# Compactify the .git directory
|
||||
- git gc --aggressive
|
||||
# Download & cache the perfetto subproject as well.
|
||||
- rm -rf subprojects/perfetto ; mkdir -p subprojects/perfetto && curl --fail https://android.googlesource.com/platform/external/perfetto/+archive/$(grep 'revision =' subprojects/perfetto.wrap | cut -d ' ' -f3).tar.gz | tar zxf - -C subprojects/perfetto
|
||||
# compress the current folder
|
||||
- tar -cvzf ../$CI_PROJECT_NAME.tar.gz .
|
||||
|
||||
- ci-fairy s3cp --token-file "${S3_JWT_FILE}" ../$CI_PROJECT_NAME.tar.gz https://$S3_HOST/git-cache/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/$CI_PROJECT_NAME.tar.gz
|
||||
|
||||
debian-x86_64:
|
||||
extends:
|
||||
- .meson-build-for-tests
|
||||
- .use-debian/x86_64_build
|
||||
- .build-run-long # but it really shouldn't! tracked in mesa#12544
|
||||
- .ci-deqp-artifacts
|
||||
variables:
|
||||
UNWIND: "enabled"
|
||||
DRI_LOADERS: >
|
||||
-D egl=enabled
|
||||
-D gbm=enabled
|
||||
-D glvnd=disabled
|
||||
-D glx=dri
|
||||
-D platforms=x11,wayland
|
||||
GALLIUM_ST: >
|
||||
-D gallium-rusticl=true
|
||||
-D gallium-va=enabled
|
||||
GALLIUM_DRIVERS: "llvmpipe,softpipe,virgl,radeonsi,zink,iris,svga"
|
||||
VULKAN_DRIVERS: "swrast,amd,intel,virtio"
|
||||
BUILDTYPE: "debugoptimized"
|
||||
EXTRA_OPTION: >
|
||||
-D intel-elk=false
|
||||
-D spirv-to-dxil=true
|
||||
-D tools=drm-shim
|
||||
-D valgrind=disabled
|
||||
S3_ARTIFACT_NAME: mesa-x86_64-default-${BUILDTYPE}
|
||||
RUN_MESON_TESTS: "false" # debian-build-x86_64 already runs these
|
||||
artifacts:
|
||||
reports:
|
||||
junit: artifacts/ci_scripts_report.xml
|
||||
|
||||
debian-x86_64-asan:
|
||||
extends:
|
||||
- debian-x86_64
|
||||
- .meson-build-for-tests
|
||||
- .build-run-long
|
||||
variables:
|
||||
VULKAN_DRIVERS: "swrast"
|
||||
GALLIUM_DRIVERS: "llvmpipe,softpipe"
|
||||
C_ARGS: >
|
||||
-Wno-error=stringop-truncation
|
||||
-Wno-error=deprecated-declarations
|
||||
EXTRA_OPTION: >
|
||||
-D b_sanitize=address
|
||||
-D gallium-va=false
|
||||
-D gallium-rusticl=false
|
||||
-D mesa-clc=system
|
||||
-D tools=dlclose-skip
|
||||
-D valgrind=disabled
|
||||
S3_ARTIFACT_NAME: mesa-x86_64-asan-${BUILDTYPE}
|
||||
ARTIFACTS_DEBUG_SYMBOLS: 1
|
||||
RUN_MESON_TESTS: "false" # just too slow
|
||||
# Do a host build for mesa-clc (asan complains not being loaded as
|
||||
# the first library)
|
||||
HOST_BUILD_OPTIONS: >
|
||||
-D build-tests=false
|
||||
-D enable-glcpp-tests=false
|
||||
-D gallium-rusticl=false
|
||||
-D gallium-drivers=
|
||||
-D glx=disabled
|
||||
-D install-mesa-clc=true
|
||||
-D mesa-clc=enabled
|
||||
-D platforms=
|
||||
-D video-codecs=
|
||||
-D vulkan-drivers=
|
||||
|
||||
debian-x86_64-msan:
|
||||
# https://github.com/google/sanitizers/wiki/MemorySanitizerLibcxxHowTo
|
||||
# msan cannot fully work until it's used together with msan libc
|
||||
extends:
|
||||
- debian-clang
|
||||
- .meson-build-only
|
||||
- .build-run-long
|
||||
variables:
|
||||
# l_undef is incompatible with msan
|
||||
EXTRA_OPTION:
|
||||
-D b_sanitize=memory
|
||||
-D b_lundef=false
|
||||
-D mesa-clc=system
|
||||
-D precomp-compiler=system
|
||||
S3_ARTIFACT_NAME: ""
|
||||
ARTIFACTS_DEBUG_SYMBOLS: 1
|
||||
# Don't run all the tests yet:
|
||||
# GLSL has some issues in sexpression reading.
|
||||
# gtest has issues in its test initialization.
|
||||
MESON_TEST_ARGS: "--suite glcpp --suite format"
|
||||
GALLIUM_DRIVERS: "freedreno,iris,nouveau,r300,r600,llvmpipe,softpipe,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus"
|
||||
VULKAN_DRIVERS: intel,amd,broadcom,virtio
|
||||
C_ARGS: >
|
||||
-Wno-error=constant-conversion
|
||||
-Wno-error=enum-conversion
|
||||
-Wno-error=initializer-overrides
|
||||
-Wno-error=sometimes-uninitialized
|
||||
-Werror=misleading-indentation
|
||||
-Wno-error=deprecated-declarations
|
||||
CPP_ARGS: >
|
||||
-Wno-error=c99-designator
|
||||
-Wno-error=overloaded-virtual
|
||||
-Wno-error=tautological-constant-out-of-range-compare
|
||||
-Wno-error=unused-private-field
|
||||
-Wno-error=vla-cxx-extension
|
||||
-Wno-error=deprecated-declarations
|
||||
RUN_MESON_TESTS: "false" # just too slow
|
||||
# Do a host build for mesa-clc and precomp-compiler (msan complains about uninitialized
|
||||
# values in the LLVM libs)
|
||||
HOST_BUILD_OPTIONS: >
|
||||
-D build-tests=false
|
||||
-D enable-glcpp-tests=false
|
||||
-D gallium-drivers=
|
||||
-D vulkan-drivers=
|
||||
-D video-codecs=
|
||||
-D glx=disabled
|
||||
-D platforms=
|
||||
-D mesa-clc=enabled
|
||||
-D install-mesa-clc=true
|
||||
-D precomp-compiler=enabled
|
||||
-D install-precomp-compiler=true
|
||||
-D tools=panfrost
|
||||
|
||||
debian-x86_64-ubsan:
|
||||
extends:
|
||||
- debian-x86_64
|
||||
- .meson-build-for-tests
|
||||
- .build-run-long
|
||||
variables:
|
||||
C_ARGS: >
|
||||
-Wno-error=stringop-overflow
|
||||
-Wno-error=stringop-truncation
|
||||
-Wno-error=deprecated-declarations
|
||||
CPP_ARGS: >
|
||||
-Wno-error=array-bounds
|
||||
GALLIUM_DRIVERS: "llvmpipe,softpipe"
|
||||
VULKAN_DRIVERS: "swrast"
|
||||
EXTRA_OPTION: >
|
||||
-D b_sanitize=undefined
|
||||
-D mesa-clc=system
|
||||
-D gallium-rusticl=false
|
||||
-D gallium-va=false
|
||||
S3_ARTIFACT_NAME: ""
|
||||
ARTIFACTS_DEBUG_SYMBOLS: 1
|
||||
RUN_MESON_TESTS: "false" # just too slow
|
||||
HOST_BUILD_OPTIONS: >
|
||||
-D build-tests=false
|
||||
-D enable-glcpp-tests=false
|
||||
-D gallium-rusticl=false
|
||||
-D gallium-drivers=
|
||||
-D vulkan-drivers=
|
||||
-D video-codecs=
|
||||
-D glx=disabled
|
||||
-D platforms=
|
||||
-D mesa-clc=enabled
|
||||
-D install-mesa-clc=true
|
||||
|
||||
debian-build-x86_64:
|
||||
extends:
|
||||
- .meson-build-for-tests
|
||||
- .use-debian/x86_64_build
|
||||
variables:
|
||||
UNWIND: "enabled"
|
||||
C_ARGS: >
|
||||
-Wno-error=deprecated-declarations
|
||||
CPP_ARGS: >
|
||||
-Wno-error=deprecated-declarations
|
||||
DRI_LOADERS: >
|
||||
-D glx=dri
|
||||
-D gbm=enabled
|
||||
-D egl=enabled
|
||||
-D glvnd=disabled
|
||||
-D platforms=x11,wayland
|
||||
GALLIUM_ST: >
|
||||
-D gallium-extra-hud=true
|
||||
-D gallium-vdpau=enabled
|
||||
-D gallium-va=enabled
|
||||
-D gallium-rusticl=false
|
||||
-D legacy-wayland=bind-wayland-display
|
||||
GALLIUM_DRIVERS: "i915,iris,nouveau,r300,r600,freedreno,llvmpipe,softpipe,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,asahi,crocus"
|
||||
VULKAN_DRIVERS: "intel_hasvk,imagination-experimental,microsoft-experimental,nouveau,swrast"
|
||||
BUILDTYPE: "debugoptimized"
|
||||
EXTRA_OPTION: >
|
||||
-D spirv-to-dxil=true
|
||||
-D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi
|
||||
-D perfetto=true
|
||||
S3_ARTIFACT_NAME: debian-build-x86_64
|
||||
|
||||
# Test a release build with -Werror so new warnings don't sneak in.
|
||||
debian-release:
|
||||
extends:
|
||||
- .meson-build-only
|
||||
- .use-debian/x86_64_build
|
||||
variables:
|
||||
UNWIND: "enabled"
|
||||
C_ARGS: >
|
||||
-Wno-error=stringop-overread
|
||||
-Wno-error=deprecated-declarations
|
||||
CPP_ARGS: >
|
||||
-Wno-error=deprecated-declarations
|
||||
DRI_LOADERS: >
|
||||
-D glx=dri
|
||||
-D gbm=enabled
|
||||
-D egl=enabled
|
||||
-D glvnd=disabled
|
||||
-D platforms=x11,wayland
|
||||
GALLIUM_ST: >
|
||||
-D gallium-extra-hud=true
|
||||
-D gallium-vdpau=enabled
|
||||
-D gallium-va=enabled
|
||||
-D gallium-rusticl=false
|
||||
-D llvm=enabled
|
||||
GALLIUM_DRIVERS: "i915,iris,nouveau,r300,freedreno,llvmpipe,softpipe,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,asahi,crocus"
|
||||
VULKAN_DRIVERS: "swrast,intel_hasvk,imagination-experimental,microsoft-experimental"
|
||||
EXTRA_OPTION: >
|
||||
-D spirv-to-dxil=true
|
||||
-D tools=all
|
||||
-D mesa-clc=enabled
|
||||
-D precomp-compiler=enabled
|
||||
-D intel-rt=enabled
|
||||
-D imagination-srv=true
|
||||
BUILDTYPE: "release"
|
||||
S3_ARTIFACT_NAME: "mesa-x86_64-default-${BUILDTYPE}"
|
||||
script:
|
||||
- !reference [.meson-build-only, script]
|
||||
- 'if [ -n "$MESA_CI_PERFORMANCE_ENABLED" ]; then .gitlab-ci/prepare-artifacts.sh; fi'
|
||||
|
||||
alpine-build-testing:
|
||||
extends:
|
||||
- .meson-build-only
|
||||
- .use-alpine/x86_64_build
|
||||
variables:
|
||||
BUILDTYPE: "release"
|
||||
C_ARGS: >
|
||||
-Wno-error=cpp
|
||||
-Wno-error=array-bounds
|
||||
-Wno-error=stringop-overflow
|
||||
-Wno-error=stringop-overread
|
||||
-Wno-error=deprecated-declarations
|
||||
DRI_LOADERS: >
|
||||
-D glx=disabled
|
||||
-D gbm=enabled
|
||||
-D egl=enabled
|
||||
-D glvnd=disabled
|
||||
-D platforms=wayland
|
||||
GALLIUM_DRIVERS: "crocus,etnaviv,freedreno,iris,lima,nouveau,panfrost,r300,r600,radeonsi,svga,llvmpipe,softpipe,tegra,v3d,vc4,virgl,zink"
|
||||
GALLIUM_ST: >
|
||||
-D gallium-extra-hud=true
|
||||
-D gallium-vdpau=disabled
|
||||
-D gallium-va=enabled
|
||||
-D gallium-rusticl=false
|
||||
-D gles1=disabled
|
||||
-D gles2=enabled
|
||||
-D llvm=enabled
|
||||
-D llvm-orcjit=true
|
||||
-D microsoft-clc=disabled
|
||||
-D shared-llvm=enabled
|
||||
UNWIND: "disabled"
|
||||
VULKAN_DRIVERS: "amd,broadcom,freedreno,intel,imagination-experimental"
|
||||
|
||||
fedora-release:
|
||||
extends:
|
||||
- .meson-build-only
|
||||
- .use-fedora/x86_64_build
|
||||
- .build-run-long
|
||||
# LTO builds can be really very slow, and we have no way to specify different
|
||||
# timeouts for pre-merge and nightly jobs
|
||||
timeout: 1h
|
||||
variables:
|
||||
BUILDTYPE: "release"
|
||||
# array-bounds are pure non-LTO gcc buggy warning
|
||||
# maybe-uninitialized is misfiring in nir_lower_gs_intrinsics.c, and
|
||||
# a "maybe" warning should never be an error anyway.
|
||||
C_ARGS: >
|
||||
-Wno-error=stringop-overflow
|
||||
-Wno-error=stringop-overread
|
||||
-Wno-error=array-bounds
|
||||
-Wno-error=maybe-uninitialized
|
||||
-Wno-error=deprecated-declarations
|
||||
CPP_ARGS: >
|
||||
-Wno-error=dangling-reference
|
||||
-Wno-error=overloaded-virtual
|
||||
-Wno-error=deprecated-declarations
|
||||
DRI_LOADERS: >
|
||||
-D glx=dri
|
||||
-D gbm=enabled
|
||||
-D egl=enabled
|
||||
-D glvnd=enabled
|
||||
-D platforms=x11,wayland
|
||||
EXTRA_OPTION: >
|
||||
-D tools=drm-shim,etnaviv,freedreno,glsl,intel,nir,nouveau,lima,panfrost,imagination
|
||||
-D vulkan-layers=device-select,overlay
|
||||
-D intel-rt=enabled
|
||||
-D imagination-srv=true
|
||||
-D teflon=true
|
||||
GALLIUM_DRIVERS: "crocus,etnaviv,freedreno,i915,iris,lima,nouveau,panfrost,r300,r600,radeonsi,svga,llvmpipe,softpipe,tegra,v3d,vc4,virgl,zink"
|
||||
GALLIUM_ST: >
|
||||
-D gallium-extra-hud=true
|
||||
-D gallium-vdpau=enabled
|
||||
-D gallium-va=enabled
|
||||
-D gallium-rusticl=true
|
||||
-D gles1=disabled
|
||||
-D gles2=enabled
|
||||
-D llvm=enabled
|
||||
-D microsoft-clc=disabled
|
||||
-D shared-llvm=enabled
|
||||
UNWIND: "disabled"
|
||||
VULKAN_DRIVERS: "amd,asahi,broadcom,freedreno,imagination-experimental,intel,intel_hasvk"
|
||||
|
||||
debian-android:
|
||||
extends:
|
||||
- .android-variables
|
||||
- .meson-cross
|
||||
- .use-debian/android_build
|
||||
- .ci-deqp-artifacts
|
||||
- .meson-build-for-tests
|
||||
variables:
|
||||
BUILDTYPE: debug
|
||||
UNWIND: "disabled"
|
||||
C_ARGS: >
|
||||
-Wno-error=asm-operand-widths
|
||||
-Wno-error=constant-conversion
|
||||
-Wno-error=enum-conversion
|
||||
-Wno-error=initializer-overrides
|
||||
-Wno-error=sometimes-uninitialized
|
||||
-Wno-error=deprecated-declarations
|
||||
CPP_ARGS: >
|
||||
-Wno-error=c99-designator
|
||||
-Wno-error=unused-variable
|
||||
-Wno-error=unused-but-set-variable
|
||||
-Wno-error=self-assign
|
||||
-Wno-error=deprecated-declarations
|
||||
DRI_LOADERS: >
|
||||
-D glx=disabled
|
||||
-D gbm=disabled
|
||||
-D egl=enabled
|
||||
-D glvnd=disabled
|
||||
-D platforms=android
|
||||
FORCE_FALLBACK_FOR: llvm
|
||||
EXTRA_OPTION: >
|
||||
-D amd-use-llvm=false
|
||||
-D android-stub=true
|
||||
-D platform-sdk-version=${ANDROID_SDK_VERSION}
|
||||
-D cpp_rtti=false
|
||||
-D valgrind=disabled
|
||||
-D android-libbacktrace=disabled
|
||||
-D mesa-clc=system
|
||||
-D precomp-compiler=system
|
||||
GALLIUM_ST: >
|
||||
-D gallium-vdpau=disabled
|
||||
-D gallium-va=disabled
|
||||
-D gallium-rusticl=false
|
||||
PKG_CONFIG_LIBDIR: "/disable/non/android/system/pc/files"
|
||||
HOST_BUILD_OPTIONS: >
|
||||
-D build-tests=false
|
||||
-D enable-glcpp-tests=false
|
||||
-D gallium-drivers=
|
||||
-D vulkan-drivers=
|
||||
-D video-codecs=
|
||||
-D glx=disabled
|
||||
-D platforms=
|
||||
-D mesa-clc=enabled
|
||||
-D install-mesa-clc=true
|
||||
-D precomp-compiler=enabled
|
||||
-D install-precomp-compiler=true
|
||||
-D tools=panfrost
|
||||
S3_ARTIFACT_NAME: mesa-x86_64-android-${BUILDTYPE}
|
||||
script:
|
||||
# x86_64 build:
|
||||
- export CROSS=x86_64-linux-android
|
||||
- export GALLIUM_DRIVERS=iris,radeonsi,softpipe,virgl,zink
|
||||
- export VULKAN_DRIVERS=amd,intel,swrast,virtio
|
||||
- .gitlab-ci/create-llvm-meson-wrap-file.sh
|
||||
- !reference [.meson-build-for-tests, script]
|
||||
# remove all the files created by the previous build before the next build
|
||||
- git clean -dxf .
|
||||
# aarch64 build:
|
||||
# build-only, to catch compilation regressions
|
||||
# without calling .gitlab-ci/prepare-artifacts.sh so that the
|
||||
# artifacts are not shipped in mesa-x86_64-android-${BUILDTYPE}
|
||||
- export CROSS=aarch64-linux-android
|
||||
- export GALLIUM_DRIVERS=etnaviv,freedreno,lima,panfrost,vc4,v3d
|
||||
- export VULKAN_DRIVERS=freedreno,broadcom,virtio
|
||||
- !reference [.meson-build-only, script]
|
||||
|
||||
.meson-cross:
|
||||
extends:
|
||||
- .meson-build-only
|
||||
- .use-debian/x86_64_build
|
||||
variables:
|
||||
UNWIND: "disabled"
|
||||
DRI_LOADERS: >
|
||||
-D glx=dri
|
||||
-D gbm=enabled
|
||||
-D egl=enabled
|
||||
-D platforms=x11,wayland
|
||||
GALLIUM_ST: >
|
||||
-D gallium-vdpau=disabled
|
||||
-D gallium-va=disabled
|
||||
|
||||
.meson-arm:
|
||||
extends:
|
||||
- .meson-cross
|
||||
- .use-debian/arm64_build
|
||||
variables:
|
||||
VULKAN_DRIVERS: "asahi,broadcom,freedreno"
|
||||
GALLIUM_DRIVERS: "etnaviv,freedreno,lima,nouveau,panfrost,llvmpipe,softpipe,tegra,v3d,vc4,zink"
|
||||
BUILDTYPE: "debugoptimized"
|
||||
|
||||
debian-arm32:
|
||||
extends:
|
||||
- .meson-arm
|
||||
- .ci-deqp-artifacts
|
||||
- .meson-build-for-tests
|
||||
variables:
|
||||
CROSS: armhf
|
||||
DRI_LOADERS:
|
||||
-D glvnd=disabled
|
||||
# remove asahi & llvmpipe from the .meson-arm list because here we have llvm=disabled
|
||||
VULKAN_DRIVERS: "broadcom,freedreno"
|
||||
GALLIUM_DRIVERS: "etnaviv,freedreno,lima,nouveau,panfrost,softpipe,tegra,v3d,vc4,zink"
|
||||
EXTRA_OPTION: >
|
||||
-D llvm=disabled
|
||||
-D valgrind=disabled
|
||||
-D gallium-rusticl=false
|
||||
-D mesa-clc=system
|
||||
-D precomp-compiler=system
|
||||
HOST_BUILD_OPTIONS: >
|
||||
-D build-tests=false
|
||||
-D enable-glcpp-tests=false
|
||||
-D gallium-drivers=
|
||||
-D vulkan-drivers=
|
||||
-D video-codecs=
|
||||
-D glx=disabled
|
||||
-D platforms=
|
||||
-D mesa-clc=enabled
|
||||
-D install-mesa-clc=true
|
||||
-D precomp-compiler=enabled
|
||||
-D install-precomp-compiler=true
|
||||
-D tools=panfrost
|
||||
S3_ARTIFACT_NAME: mesa-arm32-default-${BUILDTYPE}
|
||||
# The strip command segfaults, failing to strip the binary and leaving
|
||||
# tempfiles in our artifacts.
|
||||
ARTIFACTS_DEBUG_SYMBOLS: 1
|
||||
|
||||
debian-arm32-asan:
|
||||
extends:
|
||||
- debian-arm32
|
||||
- .meson-build-for-tests
|
||||
- .build-run-long
|
||||
variables:
|
||||
GALLIUM_DRIVERS: "etnaviv"
|
||||
VULKAN_DRIVERS: ""
|
||||
DRI_LOADERS:
|
||||
-D glvnd=disabled
|
||||
EXTRA_OPTION: >
|
||||
-D llvm=disabled
|
||||
-D b_sanitize=address
|
||||
-D valgrind=disabled
|
||||
-D tools=dlclose-skip
|
||||
-D gallium-rusticl=false
|
||||
ARTIFACTS_DEBUG_SYMBOLS: 1
|
||||
RUN_MESON_TESTS: "false" # just too slow
|
||||
S3_ARTIFACT_NAME: mesa-arm32-asan-${BUILDTYPE}
|
||||
|
||||
debian-arm64:
|
||||
extends:
|
||||
- .meson-arm
|
||||
- .ci-deqp-artifacts
|
||||
- .meson-build-for-tests
|
||||
variables:
|
||||
C_ARGS: >
|
||||
-Wno-error=array-bounds
|
||||
-Wno-error=stringop-truncation
|
||||
-Wno-error=deprecated-declarations
|
||||
GALLIUM_DRIVERS: "etnaviv,freedreno,lima,panfrost,v3d,vc4,zink"
|
||||
VULKAN_DRIVERS: "broadcom,freedreno,panfrost"
|
||||
DRI_LOADERS:
|
||||
-D glvnd=disabled
|
||||
EXTRA_OPTION: >
|
||||
-D valgrind=disabled
|
||||
-D imagination-srv=true
|
||||
-D freedreno-kmds=msm,virtio
|
||||
-D teflon=true
|
||||
GALLIUM_ST:
|
||||
-D gallium-rusticl=true
|
||||
RUN_MESON_TESTS: "false" # run by debian-arm64-build-testing
|
||||
S3_ARTIFACT_NAME: mesa-arm64-default-${BUILDTYPE}
|
||||
|
||||
debian-arm64-asan:
|
||||
extends:
|
||||
- debian-arm64
|
||||
- .meson-build-for-tests
|
||||
- .build-run-long
|
||||
variables:
|
||||
VULKAN_DRIVERS: "broadcom,freedreno"
|
||||
GALLIUM_DRIVERS: "freedreno,vc4,v3d"
|
||||
C_ARGS: >
|
||||
-Wno-error=deprecated-declarations
|
||||
DRI_LOADERS:
|
||||
-D glvnd=disabled
|
||||
EXTRA_OPTION: >
|
||||
-D b_sanitize=address
|
||||
-D valgrind=disabled
|
||||
-D tools=dlclose-skip
|
||||
-D gallium-rusticl=false
|
||||
ARTIFACTS_DEBUG_SYMBOLS: 1
|
||||
RUN_MESON_TESTS: "false" # just too slow
|
||||
S3_ARTIFACT_NAME: mesa-arm64-asan-${BUILDTYPE}
|
||||
|
||||
debian-arm64-ubsan:
|
||||
extends:
|
||||
- debian-arm64
|
||||
- .meson-build-for-tests
|
||||
- .build-run-long
|
||||
variables:
|
||||
VULKAN_DRIVERS: "broadcom"
|
||||
GALLIUM_DRIVERS: "v3d,vc4"
|
||||
C_ARGS: >
|
||||
-Wno-error=array-bounds
|
||||
-Wno-error=stringop-overflow
|
||||
-Wno-error=stringop-truncation
|
||||
-Wno-error=deprecated-declarations
|
||||
CPP_ARGS: >
|
||||
-Wno-error=array-bounds
|
||||
-fno-var-tracking-assignments
|
||||
DRI_LOADERS:
|
||||
-D glvnd=disabled
|
||||
EXTRA_OPTION: >
|
||||
-D b_sanitize=undefined
|
||||
-D gallium-rusticl=false
|
||||
ARTIFACTS_DEBUG_SYMBOLS: 1
|
||||
RUN_MESON_TESTS: "false" # just too slow
|
||||
S3_ARTIFACT_NAME: mesa-arm64-ubsan-${BUILDTYPE}
|
||||
|
||||
debian-arm64-build-test:
|
||||
extends:
|
||||
- .meson-arm
|
||||
- .ci-deqp-artifacts
|
||||
- .meson-build-only
|
||||
variables:
|
||||
VULKAN_DRIVERS: "amd,asahi,imagination-experimental,nouveau"
|
||||
DRI_LOADERS:
|
||||
-D glvnd=disabled
|
||||
EXTRA_OPTION: >
|
||||
-D tools=panfrost,imagination
|
||||
-D perfetto=true
|
||||
|
||||
debian-arm64-release:
|
||||
extends:
|
||||
- debian-arm64
|
||||
- .meson-build-only
|
||||
variables:
|
||||
BUILDTYPE: release
|
||||
S3_ARTIFACT_NAME: mesa-arm64-default-${BUILDTYPE}
|
||||
C_ARGS: >
|
||||
-Wno-error=array-bounds
|
||||
-Wno-error=stringop-overread
|
||||
-Wno-error=deprecated-declarations
|
||||
script:
|
||||
- !reference [.meson-build-only, script]
|
||||
- 'if [ -n "$MESA_CI_PERFORMANCE_ENABLED" ]; then .gitlab-ci/prepare-artifacts.sh; fi'
|
||||
|
||||
debian-no-libdrm:
|
||||
extends:
|
||||
- .meson-arm
|
||||
- .meson-build-only
|
||||
variables:
|
||||
VULKAN_DRIVERS: freedreno
|
||||
GALLIUM_DRIVERS: "zink,llvmpipe"
|
||||
BUILDTYPE: release
|
||||
C_ARGS: >
|
||||
-Wno-error=stringop-overread
|
||||
-Wno-error=deprecated-declarations
|
||||
EXTRA_OPTION: >
|
||||
-D freedreno-kmds=kgsl
|
||||
-D glx=disabled
|
||||
-D gbm=disabled
|
||||
-D egl=disabled
|
||||
-D perfetto=true
|
||||
|
||||
debian-clang:
|
||||
extends:
|
||||
- .meson-build-only
|
||||
- .use-debian/x86_64_build
|
||||
variables:
|
||||
BUILDTYPE: debug
|
||||
UNWIND: "enabled"
|
||||
C_ARGS: >
|
||||
-Wno-error=constant-conversion
|
||||
-Wno-error=enum-conversion
|
||||
-Wno-error=initializer-overrides
|
||||
-Wno-error=sometimes-uninitialized
|
||||
-Werror=misleading-indentation
|
||||
-Wno-error=deprecated-declarations
|
||||
CPP_ARGS: >
|
||||
-Wno-error=c99-designator
|
||||
-Wno-error=overloaded-virtual
|
||||
-Wno-error=tautological-constant-out-of-range-compare
|
||||
-Wno-error=unused-private-field
|
||||
-Wno-error=vla-cxx-extension
|
||||
-Wno-error=deprecated-declarations
|
||||
DRI_LOADERS: >
|
||||
-D glx=dri
|
||||
-D gbm=enabled
|
||||
-D egl=enabled
|
||||
-D glvnd=enabled
|
||||
-D platforms=x11,wayland
|
||||
GALLIUM_ST: >
|
||||
-D gallium-extra-hud=true
|
||||
-D gallium-vdpau=enabled
|
||||
-D gallium-va=enabled
|
||||
-D gles1=enabled
|
||||
-D gles2=enabled
|
||||
-D llvm=enabled
|
||||
-D microsoft-clc=disabled
|
||||
-D shared-llvm=enabled
|
||||
GALLIUM_DRIVERS: "iris,nouveau,r300,r600,freedreno,llvmpipe,softpipe,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus,i915,asahi"
|
||||
VULKAN_DRIVERS: intel,amd,freedreno,broadcom,virtio,swrast,panfrost,imagination-experimental,microsoft-experimental,nouveau
|
||||
EXTRA_OPTION:
|
||||
-D spirv-to-dxil=true
|
||||
-D imagination-srv=true
|
||||
-D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi,imagination
|
||||
-D vulkan-layers=device-select,overlay
|
||||
-D build-radv-tests=true
|
||||
-D build-aco-tests=true
|
||||
-D mesa-clc=enabled
|
||||
-D precomp-compiler=enabled
|
||||
-D intel-rt=enabled
|
||||
-D imagination-srv=true
|
||||
-D teflon=true
|
||||
CC: clang-${LLVM_VERSION}
|
||||
CXX: clang++-${LLVM_VERSION}
|
||||
|
||||
debian-clang-release:
|
||||
extends:
|
||||
- debian-clang
|
||||
- .meson-build-only
|
||||
- .build-run-long
|
||||
variables:
|
||||
BUILDTYPE: "release"
|
||||
C_ARGS: >
|
||||
-Wno-error=constant-conversion
|
||||
-Wno-error=enum-conversion
|
||||
-Wno-error=initializer-overrides
|
||||
-Wno-error=sometimes-uninitialized
|
||||
-Wno-error=deprecated-declarations
|
||||
CPP_ARGS: >
|
||||
-Wno-error=c99-designator
|
||||
-Wno-error=overloaded-virtual
|
||||
-Wno-error=tautological-constant-out-of-range-compare
|
||||
-Wno-error=unused-private-field
|
||||
-Wno-error=vla-cxx-extension
|
||||
-Wno-error=deprecated-declarations
|
||||
DRI_LOADERS: >
|
||||
-D glx=xlib
|
||||
-D platforms=x11,wayland
|
||||
GALLIUM_ST: >
|
||||
-D gallium-extra-hud=true
|
||||
-D gallium-vdpau=enabled
|
||||
-D gallium-va=enabled
|
||||
-D gles1=disabled
|
||||
-D gles2=disabled
|
||||
-D llvm=enabled
|
||||
-D microsoft-clc=disabled
|
||||
-D shared-llvm=enabled
|
||||
|
||||
windows-msvc:
|
||||
extends:
|
||||
- .build-windows
|
||||
- .use-windows_build_msvc
|
||||
- .windows-build-rules
|
||||
stage: build-for-tests
|
||||
script:
|
||||
- pwsh -ExecutionPolicy RemoteSigned .\.gitlab-ci\windows\mesa_build.ps1
|
||||
artifacts:
|
||||
paths:
|
||||
- _build/meson-logs/*.txt
|
||||
- _install/
|
||||
|
||||
debian-vulkan:
|
||||
extends:
|
||||
- .meson-build-only
|
||||
- .use-debian/x86_64_build
|
||||
variables:
|
||||
BUILDTYPE: debug
|
||||
UNWIND: "disabled"
|
||||
DRI_LOADERS: >
|
||||
-D glx=disabled
|
||||
-D gbm=disabled
|
||||
-D egl=disabled
|
||||
-D opengl=false
|
||||
-D gles1=disabled
|
||||
-D gles2=disabled
|
||||
-D glvnd=disabled
|
||||
-D platforms=x11,wayland
|
||||
GALLIUM_ST: >
|
||||
-D gallium-vdpau=disabled
|
||||
-D gallium-va=disabled
|
||||
-D gallium-rusticl=false
|
||||
-D b_sanitize=undefined
|
||||
-D c_args=-fno-sanitize-recover=all
|
||||
-D cpp_args=-fno-sanitize-recover=all
|
||||
UBSAN_OPTIONS: "print_stacktrace=1"
|
||||
VULKAN_DRIVERS: amd,asahi,broadcom,freedreno,intel,intel_hasvk,panfrost,virtio,imagination-experimental,microsoft-experimental,nouveau
|
||||
EXTRA_OPTION: >
|
||||
-D vulkan-layers=device-select,overlay
|
||||
-D build-radv-tests=true
|
||||
-D build-aco-tests=true
|
||||
-D intel-rt=disabled
|
||||
-D imagination-srv=true
|
||||
|
||||
debian-x86_32:
|
||||
extends:
|
||||
- .meson-cross
|
||||
- .use-debian/x86_32_build
|
||||
- .meson-build-only
|
||||
- .build-run-long # it's not clear why this runs long, but it also doesn't matter much
|
||||
variables:
|
||||
BUILDTYPE: debug
|
||||
CROSS: i386
|
||||
VULKAN_DRIVERS: intel,amd,swrast,virtio,panfrost
|
||||
GALLIUM_DRIVERS: "iris,nouveau,r300,r600,radeonsi,llvmpipe,softpipe,virgl,zink,crocus,d3d12,panfrost"
|
||||
DRI_LOADERS:
|
||||
-D glvnd=disabled
|
||||
EXTRA_OPTION: >
|
||||
-D vulkan-layers=device-select,overlay
|
||||
-D mesa-clc=system
|
||||
CPP_ARGS: >
|
||||
-Wno-error=deprecated-declarations
|
||||
C_LINK_ARGS: >
|
||||
-Wl,--no-warn-rwx-segments
|
||||
CPP_LINK_ARGS: >
|
||||
-Wl,--no-warn-rwx-segments
|
||||
HOST_BUILD_OPTIONS: >
|
||||
-D build-tests=false
|
||||
-D enable-glcpp-tests=false
|
||||
-D gallium-drivers=
|
||||
-D vulkan-drivers=
|
||||
-D video-codecs=
|
||||
-D glx=disabled
|
||||
-D platforms=
|
||||
-D mesa-clc=enabled
|
||||
-D install-mesa-clc=true
|
||||
|
||||
# While s390 is dead, s390x is very much alive, and one of the last major
|
||||
# big-endian platforms, so it provides useful coverage.
|
||||
# In case of issues with this job, contact @ajax
|
||||
debian-s390x:
|
||||
extends:
|
||||
- .meson-cross
|
||||
- .use-debian/s390x_build
|
||||
- .meson-build-only
|
||||
tags:
|
||||
- $FDO_RUNNER_JOB_PRIORITY_TAG_X86_64_KVM
|
||||
variables:
|
||||
BUILDTYPE: debug
|
||||
CROSS: s390x
|
||||
GALLIUM_DRIVERS: "llvmpipe,virgl,zink"
|
||||
VULKAN_DRIVERS: "swrast,virtio"
|
||||
DRI_LOADERS:
|
||||
-D glvnd=disabled
|
||||
|
||||
debian-ppc64el:
|
||||
extends:
|
||||
- .meson-cross
|
||||
- .use-debian/ppc64el_build
|
||||
- .meson-build-only
|
||||
variables:
|
||||
BUILDTYPE: debug
|
||||
CROSS: ppc64el
|
||||
GALLIUM_DRIVERS: "nouveau,llvmpipe,softpipe,virgl,zink"
|
||||
VULKAN_DRIVERS: "swrast"
|
||||
DRI_LOADERS:
|
||||
-D glvnd=disabled
|
@@ -1,268 +0,0 @@
|
||||
# For CI-tron based testing farm jobs.
|
||||
.ci-tron-test:
|
||||
extends:
|
||||
- .ci-tron-b2c-job-v1
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
B2C_VERSION: v0.9.15.1 # Linux 6.13.7
|
||||
|
||||
SCRIPTS_DIR: install
|
||||
|
||||
CI_TRON_PATTERN__JOB_SUCCESS__REGEX: 'hwci: mesa: exit_code: 0\r$'
|
||||
CI_TRON_PATTERN__SESSION_END__REGEX: '^.*It''s now safe to turn off your computer\r$'
|
||||
|
||||
CI_TRON_TIMEOUT__FIRST_CONSOLE_ACTIVITY__MINUTES: 2
|
||||
CI_TRON_TIMEOUT__FIRST_CONSOLE_ACTIVITY__RETRIES: 3
|
||||
CI_TRON_TIMEOUT__CONSOLE_ACTIVITY__MINUTES: 5
|
||||
|
||||
CI_TRON__B2C_ARTIFACT_EXCLUSION: "*.shader_cache,install/*,*/install/*,*/vkd3d-proton.cache*,vkd3d-proton.cache*,*.qpa"
|
||||
CI_TRON_HTTP_ARTIFACT__INSTALL__PATH: "/install.tar.zst"
|
||||
CI_TRON_HTTP_ARTIFACT__INSTALL__URL: "https://$PIPELINE_ARTIFACTS_BASE/$S3_ARTIFACT_NAME.tar.zst"
|
||||
|
||||
CI_TRON__B2C_MACHINE_REGISTRATION_CMD: "setup --tags $CI_TRON_DUT_SETUP_TAGS"
|
||||
CI_TRON__B2C_IMAGE_UNDER_TEST: $MESA_IMAGE
|
||||
CI_TRON__B2C_EXEC_CMD: "curl --silent --fail-with-body {{ job.http.url }}$CI_TRON_HTTP_ARTIFACT__INSTALL__PATH | tar --zstd --extract && $SCRIPTS_DIR/common/init-stage2.sh"
|
||||
|
||||
# Assume by default this is running deqp, as that's almost always true
|
||||
HWCI_TEST_SCRIPT: install/deqp-runner.sh
|
||||
|
||||
# Keep the job script in the artifacts
|
||||
CI_TRON_JOB_SCRIPT_PATH: results/job_script.sh
|
||||
needs:
|
||||
- !reference [.required-for-hardware-jobs, needs]
|
||||
tags:
|
||||
- farm:$RUNNER_FARM_LOCATION
|
||||
- $CI_TRON_DUT_SETUP_TAGS
|
||||
|
||||
# Override the default before_script, as it is not compatible with the CI-tron environment. We just keep the clearing
|
||||
# of the JWT token for security reasons
|
||||
before_script:
|
||||
- |
|
||||
set -eu
|
||||
|
||||
eval "$S3_JWT_FILE_SCRIPT"
|
||||
|
||||
for var in CI_TRON_DUT_SETUP_TAGS; do
|
||||
if [[ -z "$(eval echo \${$var:-})" ]]; then
|
||||
echo "The required variable '$var' is missing"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
# Open a section that will be closed by b2c
|
||||
echo -e "\n\e[0Ksection_start:`date +%s`:b2c_kernel_boot[collapsed=true]\r\e[0K\e[0;36m[$(cut -d ' ' -f1 /proc/uptime)]: Submitting the CI-tron job and booting the DUT\e[0m\n"
|
||||
|
||||
# Anything our job places in results/ will be collected by the
|
||||
# Gitlab coordinator for status presentation. results/junit.xml
|
||||
# will be parsed by the UI for more detailed explanations of
|
||||
# test execution.
|
||||
artifacts:
|
||||
when: always
|
||||
name: "${CI_PROJECT_NAME}_${CI_JOB_NAME_SLUG}"
|
||||
paths:
|
||||
- results
|
||||
reports:
|
||||
junit: results/**/junit.xml
|
||||
|
||||
.ci-tron-x86_64-test:
|
||||
extends:
|
||||
- .ci-tron-test
|
||||
variables:
|
||||
CI_TRON_INITRAMFS__B2C__URL: 'https://gitlab.freedesktop.org/gfx-ci/boot2container/-/releases/$B2C_VERSION/downloads/initramfs.linux_amd64.cpio.xz'
|
||||
CI_TRON_KERNEL__URL: 'https://gitlab.freedesktop.org/gfx-ci/boot2container/-/releases/$B2C_VERSION/downloads/linux-x86_64'
|
||||
|
||||
# Set the following variables if you need AMD, Intel, or NVIDIA support
|
||||
# CI_TRON_INITRAMFS__DEPMOD__URL: "https://gitlab.freedesktop.org/gfx-ci/boot2container/-/releases/$B2C_VERSION/downloads/linux-x86_64.depmod.cpio.xz"
|
||||
# CI_TRON_INITRAMFS__GPU__URL: "https://gitlab.freedesktop.org/gfx-ci/boot2container/-/releases/$B2C_VERSION/downloads/linux-x86_64.gpu.cpio"
|
||||
# CI_TRON_INITRAMFS__GPU__FORMAT__0__ARCHIVE__KEEP__0__PATH: "(lib/(modules|firmware/amdgpu)/.*)"
|
||||
|
||||
S3_ARTIFACT_NAME: "mesa-x86_64-default-debugoptimized"
|
||||
|
||||
.ci-tron-x86_64-test-vk:
|
||||
extends:
|
||||
- .use-debian/x86_64_test-vk
|
||||
- .ci-tron-x86_64-test
|
||||
needs:
|
||||
- job: debian/x86_64_test-vk
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: debian-x86_64
|
||||
artifacts: false
|
||||
- !reference [.ci-tron-test, needs]
|
||||
|
||||
.ci-tron-x86_64-test-vk-manual:
|
||||
extends:
|
||||
- .use-debian/x86_64_test-vk
|
||||
- .ci-tron-x86_64-test
|
||||
variables:
|
||||
S3_ARTIFACT_NAME: "debian-build-x86_64"
|
||||
needs:
|
||||
- job: debian/x86_64_test-vk
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: debian-build-x86_64
|
||||
artifacts: false
|
||||
- !reference [.ci-tron-test, needs]
|
||||
|
||||
.ci-tron-x86_64-test-gl:
|
||||
extends:
|
||||
- .use-debian/x86_64_test-gl
|
||||
- .ci-tron-x86_64-test
|
||||
needs:
|
||||
- job: debian/x86_64_test-gl
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: debian-x86_64
|
||||
artifacts: false
|
||||
- !reference [.ci-tron-test, needs]
|
||||
|
||||
.ci-tron-x86_64-test-gl-manual:
|
||||
extends:
|
||||
- .use-debian/x86_64_test-gl
|
||||
- .ci-tron-x86_64-test
|
||||
variables:
|
||||
S3_ARTIFACT_NAME: "debian-build-x86_64"
|
||||
needs:
|
||||
- job: debian/x86_64_test-gl
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: debian-build-x86_64
|
||||
artifacts: false
|
||||
- !reference [.ci-tron-test, needs]
|
||||
|
||||
.ci-tron-arm64-test:
|
||||
extends:
|
||||
- .ci-tron-test
|
||||
variables:
|
||||
CI_TRON_INITRAMFS__B2C__URL: 'https://gitlab.freedesktop.org/gfx-ci/boot2container/-/releases/$B2C_VERSION/downloads/initramfs.linux_arm64.cpio.xz'
|
||||
CI_TRON_KERNEL__URL: 'https://gitlab.freedesktop.org/gfx-ci/boot2container/-/releases/$B2C_VERSION/downloads/linux-arm64'
|
||||
S3_ARTIFACT_NAME: "mesa-arm64-default-debugoptimized"
|
||||
|
||||
.ci-tron-arm64-test-vk:
|
||||
extends:
|
||||
- .use-debian/arm64_test-vk
|
||||
- .ci-tron-arm64-test
|
||||
needs:
|
||||
- job: debian/arm64_test-vk
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: debian-arm64
|
||||
artifacts: false
|
||||
- !reference [.ci-tron-test, needs]
|
||||
|
||||
.ci-tron-arm64-test-asan-vk:
|
||||
extends:
|
||||
- .use-debian/arm64_test-vk
|
||||
- .ci-tron-arm64-test
|
||||
variables:
|
||||
S3_ARTIFACT_NAME: "mesa-arm64-asan-debugoptimized"
|
||||
DEQP_FORCE_ASAN: 1
|
||||
needs:
|
||||
- job: debian/arm64_test-vk
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: debian-arm64-asan
|
||||
artifacts: false
|
||||
- !reference [.ci-tron-test, needs]
|
||||
|
||||
.ci-tron-arm64-test-ubsan-vk:
|
||||
extends:
|
||||
- .use-debian/arm64_test-vk
|
||||
- .ci-tron-arm64-test
|
||||
variables:
|
||||
S3_ARTIFACT_NAME: "mesa-arm64-ubsan-debugoptimized"
|
||||
needs:
|
||||
- job: debian/arm64_test-vk
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: debian-arm64-ubsan
|
||||
artifacts: false
|
||||
- !reference [.ci-tron-test, needs]
|
||||
|
||||
.ci-tron-arm64-test-gl:
|
||||
extends:
|
||||
- .use-debian/arm64_test-gl
|
||||
- .ci-tron-arm64-test
|
||||
needs:
|
||||
- job: debian/arm64_test-gl
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: debian-arm64
|
||||
artifacts: false
|
||||
- !reference [.ci-tron-test, needs]
|
||||
|
||||
.ci-tron-arm64-test-asan-gl:
|
||||
extends:
|
||||
- .use-debian/arm64_test-gl
|
||||
- .ci-tron-arm64-test
|
||||
variables:
|
||||
S3_ARTIFACT_NAME: "mesa-arm64-asan-debugoptimized"
|
||||
DEQP_FORCE_ASAN: 1
|
||||
needs:
|
||||
- job: debian/arm64_test-gl
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: debian-arm64-asan
|
||||
artifacts: false
|
||||
- !reference [.ci-tron-test, needs]
|
||||
|
||||
.ci-tron-arm64-test-ubsan-gl:
|
||||
extends:
|
||||
- .use-debian/arm64_test-gl
|
||||
- .ci-tron-arm64-test
|
||||
variables:
|
||||
S3_ARTIFACT_NAME: "mesa-arm64-ubsan-debugoptimized"
|
||||
needs:
|
||||
- job: debian/arm64_test-gl
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: debian-arm64-ubsan
|
||||
artifacts: false
|
||||
- !reference [.ci-tron-test, needs]
|
||||
|
||||
.ci-tron-arm32-test:
|
||||
extends:
|
||||
- .ci-tron-test
|
||||
variables:
|
||||
CI_TRON_INITRAMFS__B2C__URL: 'https://gitlab.freedesktop.org/gfx-ci/boot2container/-/releases/$B2C_VERSION/downloads/initramfs.linux_arm.cpio.xz'
|
||||
CI_TRON_KERNEL__URL: 'https://gitlab.freedesktop.org/gfx-ci/boot2container/-/releases/$B2C_VERSION/downloads/linux-arm'
|
||||
S3_ARTIFACT_NAME: "mesa-arm32-default-debugoptimized"
|
||||
|
||||
.ci-tron-arm32-test-vk:
|
||||
extends:
|
||||
- .use-debian/arm32_test-vk
|
||||
- .ci-tron-arm32-test
|
||||
needs:
|
||||
- job: debian/arm32_test-vk
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: debian-arm32
|
||||
artifacts: false
|
||||
- !reference [.ci-tron-test, needs]
|
||||
|
||||
.ci-tron-arm32-test-gl:
|
||||
extends:
|
||||
- .use-debian/arm32_test-gl
|
||||
- .ci-tron-arm32-test
|
||||
needs:
|
||||
- job: debian/arm32_test-gl
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: debian-arm32
|
||||
artifacts: false
|
||||
- !reference [.ci-tron-test, needs]
|
||||
|
||||
.ci-tron-arm32-test-asan-gl:
|
||||
extends:
|
||||
- .use-debian/arm32_test-gl
|
||||
- .ci-tron-arm32-test
|
||||
variables:
|
||||
S3_ARTIFACT_NAME: "mesa-arm32-asan-debugoptimized"
|
||||
DEQP_FORCE_ASAN: 1
|
||||
needs:
|
||||
- job: debian/arm32_test-gl
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: debian-arm32-asan
|
||||
artifacts: false
|
||||
- !reference [.ci-tron-test, needs]
|
@@ -1,35 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2035
|
||||
# shellcheck disable=SC2061
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
while true; do
|
||||
devcds=$(find /sys/devices/virtual/devcoredump/ -name data 2>/dev/null)
|
||||
for i in $devcds; do
|
||||
echo "Found a devcoredump at $i."
|
||||
if cp $i $RESULTS_DIR/first.devcore; then
|
||||
echo 1 > $i
|
||||
echo "Saved to the job artifacts at /first.devcore"
|
||||
exit 0
|
||||
fi
|
||||
done
|
||||
i915_error_states=$(find /sys/devices/ -path */drm/card*/error)
|
||||
for i in $i915_error_states; do
|
||||
tmpfile=$(mktemp)
|
||||
cp "$i" "$tmpfile"
|
||||
filesize=$(stat --printf="%s" "$tmpfile")
|
||||
# Does the file contain "No error state collected" ?
|
||||
if [ "$filesize" = 25 ]; then
|
||||
rm "$tmpfile"
|
||||
else
|
||||
echo "Found an i915 error state at $i size=$filesize."
|
||||
if cp "$tmpfile" $RESULTS_DIR/first.i915_error_state; then
|
||||
rm "$tmpfile"
|
||||
echo 1 > "$i"
|
||||
echo "Saved to the job artifacts at /first.i915_error_state"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
done
|
||||
sleep 10
|
||||
done
|
@@ -1,34 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Very early init, used to make sure devices and network are set up and
|
||||
# reachable.
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# ALPINE_X86_64_LAVA_TRIGGER_TAG
|
||||
|
||||
set -ex
|
||||
|
||||
cd /
|
||||
|
||||
findmnt --mountpoint /proc || mount -t proc none /proc
|
||||
findmnt --mountpoint /sys || mount -t sysfs none /sys
|
||||
mount -t debugfs none /sys/kernel/debug
|
||||
findmnt --mountpoint /dev || mount -t devtmpfs none /dev
|
||||
mkdir -p /dev/pts
|
||||
mount -t devpts devpts /dev/pts
|
||||
mkdir /dev/shm
|
||||
mount -t tmpfs -o noexec,nodev,nosuid tmpfs /dev/shm
|
||||
mount -t tmpfs tmpfs /tmp
|
||||
|
||||
echo "nameserver 8.8.8.8" > /etc/resolv.conf
|
||||
[ -z "$NFS_SERVER_IP" ] || echo "$NFS_SERVER_IP caching-proxy" >> /etc/hosts
|
||||
|
||||
# Set the time so we can validate certificates before we fetch anything;
|
||||
# however as not all DUTs have network, make this non-fatal.
|
||||
for _ in 1 2 3; do sntp -sS pool.ntp.org && break || sleep 2; done || true
|
||||
|
||||
# Create a symlink from /dev/fd to /proc/self/fd if /dev/fd is missing.
|
||||
if [ ! -e /dev/fd ]; then
|
||||
ln -s /proc/self/fd /dev/fd
|
||||
fi
|
@@ -1,240 +0,0 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC1090
|
||||
# shellcheck disable=SC1091
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
# shellcheck disable=SC2155
|
||||
|
||||
# Second-stage init, used to set up devices and our job environment before
|
||||
# running tests.
|
||||
|
||||
shopt -s extglob
|
||||
|
||||
# Make sure to kill itself and all the children process from this script on
|
||||
# exiting, since any console output may interfere with LAVA signals handling,
|
||||
# which based on the log console.
|
||||
cleanup() {
|
||||
if [ "$BACKGROUND_PIDS" = "" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
set +x
|
||||
echo "Killing all child processes"
|
||||
for pid in $BACKGROUND_PIDS
|
||||
do
|
||||
kill "$pid" 2>/dev/null || true
|
||||
done
|
||||
|
||||
# Sleep just a little to give enough time for subprocesses to be gracefully
|
||||
# killed. Then apply a SIGKILL if necessary.
|
||||
sleep 5
|
||||
for pid in $BACKGROUND_PIDS
|
||||
do
|
||||
kill -9 "$pid" 2>/dev/null || true
|
||||
done
|
||||
|
||||
BACKGROUND_PIDS=
|
||||
set -x
|
||||
}
|
||||
trap cleanup INT TERM EXIT
|
||||
|
||||
# Space separated values with the PIDS of the processes started in the
|
||||
# background by this script
|
||||
BACKGROUND_PIDS=
|
||||
|
||||
|
||||
for path in '/dut-env-vars.sh' '/set-job-env-vars.sh' './set-job-env-vars.sh'; do
|
||||
[ -f "$path" ] && source "$path"
|
||||
done
|
||||
. "$SCRIPTS_DIR"/setup-test-env.sh
|
||||
|
||||
# Flush out anything which might be stuck in a serial buffer
|
||||
echo
|
||||
echo
|
||||
echo
|
||||
|
||||
section_switch init_stage2 "Pre-testing hardware setup"
|
||||
|
||||
set -ex
|
||||
|
||||
# Set up any devices required by the jobs
|
||||
[ -z "$HWCI_KERNEL_MODULES" ] || {
|
||||
echo -n $HWCI_KERNEL_MODULES | xargs -d, -n1 /usr/sbin/modprobe
|
||||
}
|
||||
|
||||
# Set up ZRAM
|
||||
HWCI_ZRAM_SIZE=2G
|
||||
if /sbin/zramctl --find --size $HWCI_ZRAM_SIZE -a zstd; then
|
||||
mkswap /dev/zram0
|
||||
swapon /dev/zram0
|
||||
echo "zram: $HWCI_ZRAM_SIZE activated"
|
||||
else
|
||||
echo "zram: skipping, not supported"
|
||||
fi
|
||||
|
||||
#
|
||||
# Load the KVM module specific to the detected CPU virtualization extensions:
|
||||
# - vmx for Intel VT
|
||||
# - svm for AMD-V
|
||||
#
|
||||
if [ -n "$HWCI_ENABLE_X86_KVM" ]; then
|
||||
unset KVM_KERNEL_MODULE
|
||||
{
|
||||
grep -qs '\bvmx\b' /proc/cpuinfo && KVM_KERNEL_MODULE=kvm_intel
|
||||
} || {
|
||||
grep -qs '\bsvm\b' /proc/cpuinfo && KVM_KERNEL_MODULE=kvm_amd
|
||||
}
|
||||
|
||||
{
|
||||
[ -z "${KVM_KERNEL_MODULE}" ] && \
|
||||
echo "WARNING: Failed to detect CPU virtualization extensions"
|
||||
} || \
|
||||
modprobe ${KVM_KERNEL_MODULE}
|
||||
fi
|
||||
|
||||
# Fix prefix confusion: the build installs to $CI_PROJECT_DIR, but we expect
|
||||
# it in /install
|
||||
ln -sf $CI_PROJECT_DIR/install /install
|
||||
export LD_LIBRARY_PATH=/install/lib
|
||||
export LIBGL_DRIVERS_PATH=/install/lib/dri
|
||||
|
||||
# https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/22495#note_1876691
|
||||
# The navi21 boards seem to have trouble with ld.so.cache, so try explicitly
|
||||
# telling it to look in /usr/local/lib.
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib
|
||||
|
||||
# The Broadcom devices need /usr/local/bin unconditionally added to the path
|
||||
export PATH=/usr/local/bin:$PATH
|
||||
|
||||
# Store Mesa's disk cache under /tmp, rather than sending it out over NFS.
|
||||
export XDG_CACHE_HOME=/tmp
|
||||
|
||||
# Make sure Python can find all our imports
|
||||
export PYTHONPATH=$(python3 -c "import sys;print(\":\".join(sys.path))")
|
||||
|
||||
# If we need to specify a driver, it means several drivers could pick up this gpu;
|
||||
# ensure that the other driver can't accidentally be used
|
||||
if [ -n "$MESA_LOADER_DRIVER_OVERRIDE" ]; then
|
||||
rm /install/lib/dri/!($MESA_LOADER_DRIVER_OVERRIDE)_dri.so
|
||||
fi
|
||||
ls -1 /install/lib/dri/*_dri.so || true
|
||||
|
||||
if [ "$HWCI_FREQ_MAX" = "true" ]; then
|
||||
# Ensure initialization of the DRM device (needed by MSM)
|
||||
head -0 /dev/dri/renderD128
|
||||
|
||||
# Disable GPU frequency scaling
|
||||
DEVFREQ_GOVERNOR=$(find /sys/devices -name governor | grep gpu || true)
|
||||
test -z "$DEVFREQ_GOVERNOR" || echo performance > $DEVFREQ_GOVERNOR || true
|
||||
|
||||
# Disable CPU frequency scaling
|
||||
echo performance | tee -a /sys/devices/system/cpu/cpufreq/policy*/scaling_governor || true
|
||||
|
||||
# Disable GPU runtime power management
|
||||
GPU_AUTOSUSPEND=$(find /sys/devices -name autosuspend_delay_ms | grep gpu | head -1)
|
||||
test -z "$GPU_AUTOSUSPEND" || echo -1 > $GPU_AUTOSUSPEND || true
|
||||
# Lock Intel GPU frequency to 70% of the maximum allowed by hardware
|
||||
# and enable throttling detection & reporting.
|
||||
# Additionally, set the upper limit for CPU scaling frequency to 65% of the
|
||||
# maximum permitted, as an additional measure to mitigate thermal throttling.
|
||||
/install/common/intel-gpu-freq.sh -s 70% --cpu-set-max 65% -g all -d
|
||||
fi
|
||||
|
||||
# Start a little daemon to capture sysfs records and produce a JSON file
|
||||
KDL_PATH=/install/common/kdl.sh
|
||||
if [ -x "$KDL_PATH" ]; then
|
||||
echo "launch kdl.sh!"
|
||||
$KDL_PATH &
|
||||
BACKGROUND_PIDS="$! $BACKGROUND_PIDS"
|
||||
else
|
||||
echo "kdl.sh not found!"
|
||||
fi
|
||||
|
||||
# Increase freedreno hangcheck timer because it's right at the edge of the
|
||||
# spilling tests timing out (and some traces, too)
|
||||
if [ -n "$FREEDRENO_HANGCHECK_MS" ]; then
|
||||
echo $FREEDRENO_HANGCHECK_MS | tee -a /sys/kernel/debug/dri/128/hangcheck_period_ms
|
||||
fi
|
||||
|
||||
# Start a little daemon to capture the first devcoredump we encounter. (They
|
||||
# expire after 5 minutes, so we poll for them).
|
||||
CAPTURE_DEVCOREDUMP=/install/common/capture-devcoredump.sh
|
||||
if [ -x "$CAPTURE_DEVCOREDUMP" ]; then
|
||||
$CAPTURE_DEVCOREDUMP &
|
||||
BACKGROUND_PIDS="$! $BACKGROUND_PIDS"
|
||||
fi
|
||||
|
||||
ARCH=$(uname -m)
|
||||
export VK_DRIVER_FILES="/install/share/vulkan/icd.d/${VK_DRIVER}_icd.$ARCH.json"
|
||||
|
||||
# If we want Xorg to be running for the test, then we start it up before the
|
||||
# HWCI_TEST_SCRIPT because we need to use xinit to start X (otherwise
|
||||
# without using -displayfd you can race with Xorg's startup), but xinit will eat
|
||||
# your client's return code
|
||||
if [ -n "$HWCI_START_XORG" ]; then
|
||||
echo "touch /xorg-started; sleep 100000" > /xorg-script
|
||||
env \
|
||||
xinit /bin/sh /xorg-script -- /usr/bin/Xorg -noreset -s 0 -dpms -logfile "$RESULTS_DIR/Xorg.0.log" &
|
||||
BACKGROUND_PIDS="$! $BACKGROUND_PIDS"
|
||||
|
||||
# Wait for xorg to be ready for connections.
|
||||
for _ in 1 2 3 4 5; do
|
||||
if [ -e /xorg-started ]; then
|
||||
break
|
||||
fi
|
||||
sleep 5
|
||||
done
|
||||
export DISPLAY=:0
|
||||
fi
|
||||
|
||||
if [ -n "$HWCI_START_WESTON" ]; then
|
||||
WESTON_X11_SOCK="/tmp/.X11-unix/X0"
|
||||
if [ -n "$HWCI_START_XORG" ]; then
|
||||
echo "Please consider dropping HWCI_START_XORG and instead using Weston XWayland for testing."
|
||||
WESTON_X11_SOCK="/tmp/.X11-unix/X1"
|
||||
fi
|
||||
export WAYLAND_DISPLAY=wayland-0
|
||||
|
||||
# Display server is Weston Xwayland when HWCI_START_XORG is not set or Xorg when it's
|
||||
export DISPLAY=:0
|
||||
mkdir -p /tmp/.X11-unix
|
||||
|
||||
env weston --config="/install/common/weston.ini" -Swayland-0 --use-gl &
|
||||
BACKGROUND_PIDS="$! $BACKGROUND_PIDS"
|
||||
|
||||
while [ ! -S "$WESTON_X11_SOCK" ]; do sleep 1; done
|
||||
fi
|
||||
|
||||
set +x
|
||||
|
||||
section_end init_stage2
|
||||
|
||||
echo "Running ${HWCI_TEST_SCRIPT} ${HWCI_TEST_ARGS} ..."
|
||||
|
||||
set +e
|
||||
$HWCI_TEST_SCRIPT ${HWCI_TEST_ARGS:-}; EXIT_CODE=$?
|
||||
set -e
|
||||
|
||||
section_start post_test_cleanup "Cleaning up after testing, uploading results"
|
||||
set -x
|
||||
|
||||
# Make sure that capture-devcoredump is done before we start trying to tar up
|
||||
# artifacts -- if it's writing while tar is reading, tar will throw an error and
|
||||
# kill the job.
|
||||
cleanup
|
||||
|
||||
# upload artifacts (lava jobs)
|
||||
if [ -n "$S3_RESULTS_UPLOAD" ]; then
|
||||
tar --zstd -cf results.tar.zst results/;
|
||||
ci-fairy s3cp --token-file "${S3_JWT_FILE}" results.tar.zst https://"$S3_RESULTS_UPLOAD"/results.tar.zst
|
||||
fi
|
||||
|
||||
set +x
|
||||
section_end post_test_cleanup
|
||||
|
||||
# Print the final result; both bare-metal and LAVA look for this string to get
|
||||
# the result of our run, so try really hard to get it out rather than losing
|
||||
# the run. The device gets shut down right at this point, and a630 seems to
|
||||
# enjoy corrupting the last line of serial output before shutdown.
|
||||
for _ in $(seq 0 3); do echo "hwci: mesa: exit_code: $EXIT_CODE"; sleep 1; echo; done
|
||||
|
||||
exit $EXIT_CODE
|
@@ -1,820 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2013
|
||||
# shellcheck disable=SC2015
|
||||
# shellcheck disable=SC2034
|
||||
# shellcheck disable=SC2046
|
||||
# shellcheck disable=SC2059
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
# shellcheck disable=SC2154
|
||||
# shellcheck disable=SC2155
|
||||
# shellcheck disable=SC2162
|
||||
# shellcheck disable=SC2229
|
||||
#
|
||||
# This is an utility script to manage Intel GPU frequencies.
|
||||
# It can be used for debugging performance problems or trying to obtain a stable
|
||||
# frequency while benchmarking.
|
||||
#
|
||||
# Note the Intel i915 GPU driver allows to change the minimum, maximum and boost
|
||||
# frequencies in steps of 50 MHz via:
|
||||
#
|
||||
# /sys/class/drm/card<n>/<freq_info>
|
||||
#
|
||||
# Where <n> is the DRM card index and <freq_info> one of the following:
|
||||
#
|
||||
# - gt_max_freq_mhz (enforced maximum freq)
|
||||
# - gt_min_freq_mhz (enforced minimum freq)
|
||||
# - gt_boost_freq_mhz (enforced boost freq)
|
||||
#
|
||||
# The hardware capabilities can be accessed via:
|
||||
#
|
||||
# - gt_RP0_freq_mhz (supported maximum freq)
|
||||
# - gt_RPn_freq_mhz (supported minimum freq)
|
||||
# - gt_RP1_freq_mhz (most efficient freq)
|
||||
#
|
||||
# The current frequency can be read from:
|
||||
# - gt_act_freq_mhz (the actual GPU freq)
|
||||
# - gt_cur_freq_mhz (the last requested freq)
|
||||
#
|
||||
# Intel later switched to per-tile sysfs interfaces, which is what the Xe DRM
|
||||
# driver exlusively uses, and the capabilites are now located under the
|
||||
# following directory for the first tile:
|
||||
#
|
||||
# /sys/class/drm/card<n>/device/tile0/gt0/freq0/<freq_info>
|
||||
#
|
||||
# Where <n> is the DRM card index and <freq_info> one of the following:
|
||||
#
|
||||
# - max_freq (enforced maximum freq)
|
||||
# - min_freq (enforced minimum freq)
|
||||
#
|
||||
# The hardware capabilities can be accessed via:
|
||||
#
|
||||
# - rp0_freq (supported maximum freq)
|
||||
# - rpn_freq (supported minimum freq)
|
||||
# - rpe_freq (most efficient freq)
|
||||
#
|
||||
# The current frequency can be read from:
|
||||
# - act_freq (the actual GPU freq)
|
||||
# - cur_freq (the last requested freq)
|
||||
#
|
||||
# Also note that in addition to GPU management, the script offers the
|
||||
# possibility to adjust CPU operating frequencies. However, this is currently
|
||||
# limited to just setting the maximum scaling frequency as percentage of the
|
||||
# maximum frequency allowed by the hardware.
|
||||
#
|
||||
# Copyright (C) 2022 Collabora Ltd.
|
||||
# Author: Cristian Ciocaltea <cristian.ciocaltea@collabora.com>
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
#
|
||||
# Constants
|
||||
#
|
||||
|
||||
# Check if any /sys/class/drm/cardX/device/tile0 directory exists to detect Xe
|
||||
USE_XE=0
|
||||
for i in $(seq 0 15); do
|
||||
if [ -d "/sys/class/drm/card$i/device/tile0" ]; then
|
||||
USE_XE=1
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# GPU
|
||||
if [ "$USE_XE" -eq 1 ]; then
|
||||
DRM_FREQ_SYSFS_PATTERN="/sys/class/drm/card%d/device/tile0/gt0/freq0/%s_freq"
|
||||
ENF_FREQ_INFO="max min"
|
||||
CAP_FREQ_INFO="rp0 rpn rpe"
|
||||
else
|
||||
DRM_FREQ_SYSFS_PATTERN="/sys/class/drm/card%d/gt_%s_freq_mhz"
|
||||
ENF_FREQ_INFO="max min boost"
|
||||
CAP_FREQ_INFO="RP0 RPn RP1"
|
||||
fi
|
||||
ACT_FREQ_INFO="act cur"
|
||||
THROTT_DETECT_SLEEP_SEC=2
|
||||
THROTT_DETECT_PID_FILE_PATH=/tmp/thrott-detect.pid
|
||||
|
||||
# CPU
|
||||
CPU_SYSFS_PREFIX=/sys/devices/system/cpu
|
||||
CPU_PSTATE_SYSFS_PATTERN="${CPU_SYSFS_PREFIX}/intel_pstate/%s"
|
||||
CPU_FREQ_SYSFS_PATTERN="${CPU_SYSFS_PREFIX}/cpu%s/cpufreq/%s_freq"
|
||||
CAP_CPU_FREQ_INFO="cpuinfo_max cpuinfo_min"
|
||||
ENF_CPU_FREQ_INFO="scaling_max scaling_min"
|
||||
ACT_CPU_FREQ_INFO="scaling_cur"
|
||||
|
||||
#
|
||||
# Global variables.
|
||||
#
|
||||
unset INTEL_DRM_CARD_INDEX
|
||||
unset GET_ACT_FREQ GET_ENF_FREQ GET_CAP_FREQ
|
||||
unset SET_MIN_FREQ SET_MAX_FREQ
|
||||
unset MONITOR_FREQ
|
||||
unset CPU_SET_MAX_FREQ
|
||||
unset DETECT_THROTT
|
||||
unset DRY_RUN
|
||||
|
||||
#
|
||||
# Simple printf based stderr logger.
|
||||
#
|
||||
log() {
|
||||
local msg_type=$1
|
||||
|
||||
shift
|
||||
printf "%s: %s: " "${msg_type}" "${0##*/}" >&2
|
||||
printf "$@" >&2
|
||||
printf "\n" >&2
|
||||
}
|
||||
|
||||
#
|
||||
# Helper to print sysfs path for the given card index and freq info.
|
||||
#
|
||||
# arg1: Frequency info sysfs name, one of *_FREQ_INFO constants above
|
||||
# arg2: Video card index, defaults to INTEL_DRM_CARD_INDEX
|
||||
#
|
||||
print_freq_sysfs_path() {
|
||||
printf ${DRM_FREQ_SYSFS_PATTERN} "${2:-${INTEL_DRM_CARD_INDEX}}" "$1"
|
||||
}
|
||||
|
||||
#
|
||||
# Helper to set INTEL_DRM_CARD_INDEX for the first identified Intel video card.
|
||||
#
|
||||
identify_intel_gpu() {
|
||||
local i=0 vendor path
|
||||
|
||||
while [ ${i} -lt 16 ]; do
|
||||
[ -c "/dev/dri/card$i" ] || {
|
||||
i=$((i + 1))
|
||||
continue
|
||||
}
|
||||
|
||||
path=$(print_freq_sysfs_path "" ${i})
|
||||
if [ "$USE_XE" -eq 1 ]; then
|
||||
path=${path%/*/*/*/*/*}/device/vendor
|
||||
else
|
||||
path=${path%/*}/device/vendor
|
||||
fi
|
||||
|
||||
[ -r "${path}" ] && read vendor < "${path}" && \
|
||||
[ "${vendor}" = "0x8086" ] && INTEL_DRM_CARD_INDEX=$i && return 0
|
||||
|
||||
i=$((i + 1))
|
||||
done
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
#
|
||||
# Read the specified freq info from sysfs.
|
||||
#
|
||||
# arg1: Flag (y/n) to also enable printing the freq info.
|
||||
# arg2...: Frequency info sysfs name(s), see *_FREQ_INFO constants above
|
||||
# return: Global variable(s) FREQ_${arg} containing the requested information
|
||||
#
|
||||
read_freq_info() {
|
||||
local var val info path print=0 ret=0
|
||||
|
||||
[ "$1" = "y" ] && print=1
|
||||
shift
|
||||
|
||||
while [ $# -gt 0 ]; do
|
||||
info=$1
|
||||
shift
|
||||
var=FREQ_${info}
|
||||
path=$(print_freq_sysfs_path "${info}")
|
||||
|
||||
[ -r ${path} ] && read ${var} < ${path} || {
|
||||
log ERROR "Failed to read freq info from: %s" "${path}"
|
||||
ret=1
|
||||
continue
|
||||
}
|
||||
|
||||
[ -n "${var}" ] || {
|
||||
log ERROR "Got empty freq info from: %s" "${path}"
|
||||
ret=1
|
||||
continue
|
||||
}
|
||||
|
||||
[ ${print} -eq 1 ] && {
|
||||
eval val=\$${var}
|
||||
printf "%6s: %4s MHz\n" "${info}" "${val}"
|
||||
}
|
||||
done
|
||||
|
||||
return ${ret}
|
||||
}
|
||||
|
||||
#
|
||||
# Display requested info.
|
||||
#
|
||||
print_freq_info() {
|
||||
local req_freq
|
||||
|
||||
[ -n "${GET_CAP_FREQ}" ] && {
|
||||
printf "* Hardware capabilities\n"
|
||||
read_freq_info y ${CAP_FREQ_INFO}
|
||||
printf "\n"
|
||||
}
|
||||
|
||||
[ -n "${GET_ENF_FREQ}" ] && {
|
||||
printf "* Enforcements\n"
|
||||
read_freq_info y ${ENF_FREQ_INFO}
|
||||
printf "\n"
|
||||
}
|
||||
|
||||
[ -n "${GET_ACT_FREQ}" ] && {
|
||||
printf "* Actual\n"
|
||||
read_freq_info y ${ACT_FREQ_INFO}
|
||||
printf "\n"
|
||||
}
|
||||
}
|
||||
|
||||
#
|
||||
# Helper to print frequency value as requested by user via '-s, --set' option.
|
||||
# arg1: user requested freq value
|
||||
#
|
||||
compute_freq_set() {
|
||||
local val
|
||||
|
||||
case "$1" in
|
||||
+)
|
||||
val=$(eval "echo \${FREQ_$(echo $CAP_FREQ_INFO | cut -d' ' -f1)}") # FREQ_rp0 or FREQ_RP0
|
||||
;;
|
||||
-)
|
||||
val=$(eval "echo \${FREQ_$(echo $CAP_FREQ_INFO | cut -d' ' -f2)}") # FREQ_rpn or FREQ_RPn
|
||||
;;
|
||||
*%)
|
||||
val=$((${1%?} * $(eval "echo \${FREQ_$(echo $CAP_FREQ_INFO | cut -d' ' -f1)}") / 100))
|
||||
# Adjust freq to comply with 50 MHz increments
|
||||
val=$((val / 50 * 50))
|
||||
;;
|
||||
*[!0-9]*)
|
||||
log ERROR "Cannot set freq to invalid value: %s" "$1"
|
||||
return 1
|
||||
;;
|
||||
"")
|
||||
log ERROR "Cannot set freq to unspecified value"
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
# Adjust freq to comply with 50 MHz increments
|
||||
val=$(($1 / 50 * 50))
|
||||
;;
|
||||
esac
|
||||
|
||||
printf "%s" "${val}"
|
||||
}
|
||||
|
||||
#
|
||||
# Helper for set_freq().
|
||||
#
|
||||
set_freq_max() {
|
||||
log INFO "Setting GPU max freq to %s MHz" "${SET_MAX_FREQ}"
|
||||
|
||||
read_freq_info n min || return $?
|
||||
|
||||
# FREQ_rp0 or FREQ_RP0
|
||||
[ ${SET_MAX_FREQ} -gt $(eval "echo \${FREQ_$(echo $CAP_FREQ_INFO | cut -d' ' -f1)}") ] && {
|
||||
log ERROR "Cannot set GPU max freq (%s) to be greater than hw max freq (%s)" \
|
||||
"${SET_MAX_FREQ}" "$(eval "echo \${FREQ_$(echo $CAP_FREQ_INFO | cut -d' ' -f1)}")"
|
||||
return 1
|
||||
}
|
||||
|
||||
# FREQ_rpn or FREQ_RPn
|
||||
[ ${SET_MAX_FREQ} -lt $(eval "echo \${FREQ_$(echo $CAP_FREQ_INFO | cut -d' ' -f2)}") ] && {
|
||||
log ERROR "Cannot set GPU max freq (%s) to be less than hw min freq (%s)" \
|
||||
"${SET_MIN_FREQ}" "$(eval "echo \${FREQ_$(echo $CAP_FREQ_INFO | cut -d' ' -f2)}")"
|
||||
return 1
|
||||
}
|
||||
|
||||
[ ${SET_MAX_FREQ} -lt ${FREQ_min} ] && {
|
||||
log ERROR "Cannot set GPU max freq (%s) to be less than min freq (%s)" \
|
||||
"${SET_MAX_FREQ}" "${FREQ_min}"
|
||||
return 1
|
||||
}
|
||||
|
||||
[ -z "${DRY_RUN}" ] || return 0
|
||||
|
||||
# Write to max freq path
|
||||
if ! printf "%s" ${SET_MAX_FREQ} | tee $(print_freq_sysfs_path max) > /dev/null;
|
||||
then
|
||||
log ERROR "Failed to set GPU max frequency"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Only write to boost if the sysfs file exists, as it's removed in Xe
|
||||
if [ -e "$(print_freq_sysfs_path boost)" ]; then
|
||||
if ! printf "%s" ${SET_MAX_FREQ} | tee $(print_freq_sysfs_path boost) > /dev/null;
|
||||
then
|
||||
log ERROR "Failed to set GPU boost frequency"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# Helper for set_freq().
|
||||
#
|
||||
set_freq_min() {
|
||||
log INFO "Setting GPU min freq to %s MHz" "${SET_MIN_FREQ}"
|
||||
|
||||
read_freq_info n max || return $?
|
||||
|
||||
[ ${SET_MIN_FREQ} -gt ${FREQ_max} ] && {
|
||||
log ERROR "Cannot set GPU min freq (%s) to be greater than max freq (%s)" \
|
||||
"${SET_MIN_FREQ}" "${FREQ_max}"
|
||||
return 1
|
||||
}
|
||||
|
||||
[ ${SET_MIN_FREQ} -lt $(eval "echo \${FREQ_$(echo $CAP_FREQ_INFO | cut -d' ' -f2)}") ] && {
|
||||
log ERROR "Cannot set GPU min freq (%s) to be less than hw min freq (%s)" \
|
||||
"${SET_MIN_FREQ}" "$(eval "echo \${FREQ_$(echo $CAP_FREQ_INFO | cut -d' ' -f2)}")"
|
||||
return 1
|
||||
}
|
||||
|
||||
[ -z "${DRY_RUN}" ] || return 0
|
||||
|
||||
if ! printf "%s" ${SET_MIN_FREQ} > $(print_freq_sysfs_path min);
|
||||
then
|
||||
log ERROR "Failed to set GPU min frequency"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# Set min or max or both GPU frequencies to the user indicated values.
|
||||
#
|
||||
set_freq() {
|
||||
# Get hw max & min frequencies
|
||||
read_freq_info n $(echo $CAP_FREQ_INFO | cut -d' ' -f1,2) || return $? # RP0 RPn
|
||||
|
||||
[ -z "${SET_MAX_FREQ}" ] || {
|
||||
SET_MAX_FREQ=$(compute_freq_set "${SET_MAX_FREQ}")
|
||||
[ -z "${SET_MAX_FREQ}" ] && return 1
|
||||
}
|
||||
|
||||
[ -z "${SET_MIN_FREQ}" ] || {
|
||||
SET_MIN_FREQ=$(compute_freq_set "${SET_MIN_FREQ}")
|
||||
[ -z "${SET_MIN_FREQ}" ] && return 1
|
||||
}
|
||||
|
||||
#
|
||||
# Ensure correct operation order, to avoid setting min freq
|
||||
# to a value which is larger than max freq.
|
||||
#
|
||||
# E.g.:
|
||||
# crt_min=crt_max=600; new_min=new_max=700
|
||||
# > operation order: max=700; min=700
|
||||
#
|
||||
# crt_min=crt_max=600; new_min=new_max=500
|
||||
# > operation order: min=500; max=500
|
||||
#
|
||||
if [ -n "${SET_MAX_FREQ}" ] && [ -n "${SET_MIN_FREQ}" ]; then
|
||||
[ ${SET_MAX_FREQ} -lt ${SET_MIN_FREQ} ] && {
|
||||
log ERROR "Cannot set GPU max freq to be less than min freq"
|
||||
return 1
|
||||
}
|
||||
|
||||
read_freq_info n min || return $?
|
||||
|
||||
if [ ${SET_MAX_FREQ} -lt ${FREQ_min} ]; then
|
||||
set_freq_min || return $?
|
||||
set_freq_max
|
||||
else
|
||||
set_freq_max || return $?
|
||||
set_freq_min
|
||||
fi
|
||||
elif [ -n "${SET_MAX_FREQ}" ]; then
|
||||
set_freq_max
|
||||
elif [ -n "${SET_MIN_FREQ}" ]; then
|
||||
set_freq_min
|
||||
else
|
||||
log "Unexpected call to set_freq()"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# Helper for detect_throttling().
|
||||
#
|
||||
get_thrott_detect_pid() {
|
||||
[ -e ${THROTT_DETECT_PID_FILE_PATH} ] || return 0
|
||||
|
||||
local pid
|
||||
read pid < ${THROTT_DETECT_PID_FILE_PATH} || {
|
||||
log ERROR "Failed to read pid from: %s" "${THROTT_DETECT_PID_FILE_PATH}"
|
||||
return 1
|
||||
}
|
||||
|
||||
local proc_path=/proc/${pid:-invalid}/cmdline
|
||||
[ -r ${proc_path} ] && grep -qs "${0##*/}" ${proc_path} && {
|
||||
printf "%s" "${pid}"
|
||||
return 0
|
||||
}
|
||||
|
||||
# Remove orphaned PID file
|
||||
rm -rf ${THROTT_DETECT_PID_FILE_PATH}
|
||||
return 1
|
||||
}
|
||||
|
||||
#
|
||||
# Control detection and reporting of GPU throttling events.
|
||||
# arg1: start - run throttle detector in background
|
||||
# stop - stop throttle detector process, if any
|
||||
# status - verify if throttle detector is running
|
||||
#
|
||||
detect_throttling() {
|
||||
local pid
|
||||
pid=$(get_thrott_detect_pid)
|
||||
|
||||
case "$1" in
|
||||
status)
|
||||
printf "Throttling detector is "
|
||||
[ -z "${pid}" ] && printf "not running\n" && return 0
|
||||
printf "running (pid=%s)\n" ${pid}
|
||||
;;
|
||||
|
||||
stop)
|
||||
[ -z "${pid}" ] && return 0
|
||||
|
||||
log INFO "Stopping throttling detector (pid=%s)" "${pid}"
|
||||
kill ${pid}; sleep 1; kill -0 ${pid} 2>/dev/null && kill -9 ${pid}
|
||||
rm -rf ${THROTT_DETECT_PID_FILE_PATH}
|
||||
;;
|
||||
|
||||
start)
|
||||
[ -n "${pid}" ] && {
|
||||
log WARN "Throttling detector is already running (pid=%s)" ${pid}
|
||||
return 0
|
||||
}
|
||||
|
||||
(
|
||||
read_freq_info n $(echo $CAP_FREQ_INFO | cut -d' ' -f2) || return $? # RPn
|
||||
|
||||
while true; do
|
||||
sleep ${THROTT_DETECT_SLEEP_SEC}
|
||||
read_freq_info n act min cur || exit $?
|
||||
|
||||
#
|
||||
# The throttling seems to occur when act freq goes below min.
|
||||
# However, it's necessary to exclude the idle states, where
|
||||
# act freq normally reaches rpn and cur goes below min.
|
||||
#
|
||||
[ ${FREQ_act} -lt ${FREQ_min} ] && \
|
||||
[ ${FREQ_act} -gt $(eval "echo \${FREQ_$(echo $CAP_FREQ_INFO | cut -d' ' -f2)}") ] && \
|
||||
[ ${FREQ_cur} -ge ${FREQ_min} ] && \
|
||||
printf "GPU throttling detected: act=%s min=%s cur=%s rpn=%s\n" \
|
||||
${FREQ_act} ${FREQ_min} ${FREQ_cur} $(eval "echo \${FREQ_$(echo $CAP_FREQ_INFO | cut -d' ' -f2)}")
|
||||
done
|
||||
) &
|
||||
|
||||
pid=$!
|
||||
log INFO "Started GPU throttling detector (pid=%s)" ${pid}
|
||||
|
||||
printf "%s\n" ${pid} > ${THROTT_DETECT_PID_FILE_PATH} || \
|
||||
log WARN "Failed to write throttle detector PID file"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
#
|
||||
# Retrieve the list of online CPUs.
|
||||
#
|
||||
get_online_cpus() {
|
||||
local path cpu_index
|
||||
|
||||
printf "0"
|
||||
for path in $(grep 1 ${CPU_SYSFS_PREFIX}/cpu*/online); do
|
||||
cpu_index=${path##*/cpu}
|
||||
printf " %s" ${cpu_index%%/*}
|
||||
done
|
||||
}
|
||||
|
||||
#
|
||||
# Helper to print sysfs path for the given CPU index and freq info.
|
||||
#
|
||||
# arg1: Frequency info sysfs name, one of *_CPU_FREQ_INFO constants above
|
||||
# arg2: CPU index
|
||||
#
|
||||
print_cpu_freq_sysfs_path() {
|
||||
printf ${CPU_FREQ_SYSFS_PATTERN} "$2" "$1"
|
||||
}
|
||||
|
||||
#
|
||||
# Read the specified CPU freq info from sysfs.
|
||||
#
|
||||
# arg1: CPU index
|
||||
# arg2: Flag (y/n) to also enable printing the freq info.
|
||||
# arg3...: Frequency info sysfs name(s), see *_CPU_FREQ_INFO constants above
|
||||
# return: Global variable(s) CPU_FREQ_${arg} containing the requested information
|
||||
#
|
||||
read_cpu_freq_info() {
|
||||
local var val info path cpu_index print=0 ret=0
|
||||
|
||||
cpu_index=$1
|
||||
[ "$2" = "y" ] && print=1
|
||||
shift 2
|
||||
|
||||
while [ $# -gt 0 ]; do
|
||||
info=$1
|
||||
shift
|
||||
var=CPU_FREQ_${info}
|
||||
path=$(print_cpu_freq_sysfs_path "${info}" ${cpu_index})
|
||||
|
||||
[ -r ${path} ] && read ${var} < ${path} || {
|
||||
log ERROR "Failed to read CPU freq info from: %s" "${path}"
|
||||
ret=1
|
||||
continue
|
||||
}
|
||||
|
||||
[ -n "${var}" ] || {
|
||||
log ERROR "Got empty CPU freq info from: %s" "${path}"
|
||||
ret=1
|
||||
continue
|
||||
}
|
||||
|
||||
[ ${print} -eq 1 ] && {
|
||||
eval val=\$${var}
|
||||
printf "%6s: %4s Hz\n" "${info}" "${val}"
|
||||
}
|
||||
done
|
||||
|
||||
return ${ret}
|
||||
}
|
||||
|
||||
#
|
||||
# Helper to print freq. value as requested by user via '--cpu-set-max' option.
|
||||
# arg1: user requested freq value
|
||||
#
|
||||
compute_cpu_freq_set() {
|
||||
local val
|
||||
|
||||
case "$1" in
|
||||
+)
|
||||
val=${CPU_FREQ_cpuinfo_max}
|
||||
;;
|
||||
-)
|
||||
val=${CPU_FREQ_cpuinfo_min}
|
||||
;;
|
||||
*%)
|
||||
val=$((${1%?} * CPU_FREQ_cpuinfo_max / 100))
|
||||
;;
|
||||
*[!0-9]*)
|
||||
log ERROR "Cannot set CPU freq to invalid value: %s" "$1"
|
||||
return 1
|
||||
;;
|
||||
"")
|
||||
log ERROR "Cannot set CPU freq to unspecified value"
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
log ERROR "Cannot set CPU freq to custom value; use +, -, or % instead"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
|
||||
printf "%s" "${val}"
|
||||
}
|
||||
|
||||
#
|
||||
# Adjust CPU max scaling frequency.
|
||||
#
|
||||
set_cpu_freq_max() {
|
||||
local target_freq res=0
|
||||
case "${CPU_SET_MAX_FREQ}" in
|
||||
+)
|
||||
target_freq=100
|
||||
;;
|
||||
-)
|
||||
target_freq=1
|
||||
;;
|
||||
*%)
|
||||
target_freq=${CPU_SET_MAX_FREQ%?}
|
||||
;;
|
||||
*)
|
||||
log ERROR "Invalid CPU freq"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
|
||||
local pstate_info=$(printf "${CPU_PSTATE_SYSFS_PATTERN}" max_perf_pct)
|
||||
[ -e "${pstate_info}" ] && {
|
||||
log INFO "Setting intel_pstate max perf to %s" "${target_freq}%"
|
||||
if ! printf "%s" "${target_freq}" > "${pstate_info}";
|
||||
then
|
||||
log ERROR "Failed to set intel_pstate max perf"
|
||||
res=1
|
||||
fi
|
||||
}
|
||||
|
||||
local cpu_index
|
||||
for cpu_index in $(get_online_cpus); do
|
||||
read_cpu_freq_info ${cpu_index} n ${CAP_CPU_FREQ_INFO} || { res=$?; continue; }
|
||||
|
||||
target_freq=$(compute_cpu_freq_set "${CPU_SET_MAX_FREQ}")
|
||||
tf_res=$?
|
||||
[ -z "${target_freq}" ] && { res=$tf_res; continue; }
|
||||
|
||||
log INFO "Setting CPU%s max scaling freq to %s Hz" ${cpu_index} "${target_freq}"
|
||||
[ -n "${DRY_RUN}" ] && continue
|
||||
|
||||
if ! printf "%s" ${target_freq} > $(print_cpu_freq_sysfs_path scaling_max ${cpu_index});
|
||||
then
|
||||
res=1
|
||||
log ERROR "Failed to set CPU%s max scaling frequency" ${cpu_index}
|
||||
fi
|
||||
done
|
||||
|
||||
return ${res}
|
||||
}
|
||||
|
||||
#
|
||||
# Show help message.
|
||||
#
|
||||
print_usage() {
|
||||
cat <<EOF
|
||||
Usage: ${0##*/} [OPTION]...
|
||||
|
||||
A script to manage Intel GPU frequencies. Can be used for debugging performance
|
||||
problems or trying to obtain a stable frequency while benchmarking.
|
||||
|
||||
Note Intel GPUs only accept specific frequencies, usually multiples of 50 MHz.
|
||||
|
||||
Options:
|
||||
-g, --get [act|enf|cap|all]
|
||||
Get frequency information: active (default), enforced,
|
||||
hardware capabilities or all of them.
|
||||
|
||||
-s, --set [{min|max}=]{FREQUENCY[%]|+|-}
|
||||
Set min or max frequency to the given value (MHz).
|
||||
Append '%' to interpret FREQUENCY as % of hw max.
|
||||
Use '+' or '-' to set frequency to hardware max or min.
|
||||
Omit min/max prefix to set both frequencies.
|
||||
|
||||
-r, --reset Reset frequencies to hardware defaults.
|
||||
|
||||
-m, --monitor [act|enf|cap|all]
|
||||
Monitor the indicated frequencies via 'watch' utility.
|
||||
See '-g, --get' option for more details.
|
||||
|
||||
-d|--detect-thrott [start|stop|status]
|
||||
Start (default operation) the throttling detector
|
||||
as a background process. Use 'stop' or 'status' to
|
||||
terminate the detector process or verify its status.
|
||||
|
||||
--cpu-set-max [FREQUENCY%|+|-}
|
||||
Set CPU max scaling frequency as % of hw max.
|
||||
Use '+' or '-' to set frequency to hardware max or min.
|
||||
|
||||
-r, --reset Reset frequencies to hardware defaults.
|
||||
|
||||
--dry-run See what the script will do without applying any
|
||||
frequency changes.
|
||||
|
||||
-h, --help Display this help text and exit.
|
||||
EOF
|
||||
}
|
||||
|
||||
#
|
||||
# Parse user input for '-g, --get' option.
|
||||
# Returns 0 if a value has been provided, otherwise 1.
|
||||
#
|
||||
parse_option_get() {
|
||||
local ret=0
|
||||
|
||||
case "$1" in
|
||||
act) GET_ACT_FREQ=1;;
|
||||
enf) GET_ENF_FREQ=1;;
|
||||
cap) GET_CAP_FREQ=1;;
|
||||
all) GET_ACT_FREQ=1; GET_ENF_FREQ=1; GET_CAP_FREQ=1;;
|
||||
-*|"")
|
||||
# No value provided, using default.
|
||||
GET_ACT_FREQ=1
|
||||
ret=1
|
||||
;;
|
||||
*)
|
||||
print_usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
return ${ret}
|
||||
}
|
||||
|
||||
#
|
||||
# Validate user input for '-s, --set' option.
|
||||
# arg1: input value to be validated
|
||||
# arg2: optional flag indicating input is restricted to %
|
||||
#
|
||||
validate_option_set() {
|
||||
case "$1" in
|
||||
+|-|[0-9]%|[0-9][0-9]%)
|
||||
return 0
|
||||
;;
|
||||
*[!0-9]*|"")
|
||||
print_usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
[ -z "$2" ] || { print_usage; exit 1; }
|
||||
}
|
||||
|
||||
#
|
||||
# Parse script arguments.
|
||||
#
|
||||
[ $# -eq 0 ] && { print_usage; exit 1; }
|
||||
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
-g|--get)
|
||||
parse_option_get "$2" && shift
|
||||
;;
|
||||
|
||||
-s|--set)
|
||||
shift
|
||||
case "$1" in
|
||||
min=*)
|
||||
SET_MIN_FREQ=${1#min=}
|
||||
validate_option_set "${SET_MIN_FREQ}"
|
||||
;;
|
||||
max=*)
|
||||
SET_MAX_FREQ=${1#max=}
|
||||
validate_option_set "${SET_MAX_FREQ}"
|
||||
;;
|
||||
*)
|
||||
SET_MIN_FREQ=$1
|
||||
validate_option_set "${SET_MIN_FREQ}"
|
||||
SET_MAX_FREQ=${SET_MIN_FREQ}
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
|
||||
-r|--reset)
|
||||
RESET_FREQ=1
|
||||
SET_MIN_FREQ="-"
|
||||
SET_MAX_FREQ="+"
|
||||
;;
|
||||
|
||||
-m|--monitor)
|
||||
MONITOR_FREQ=act
|
||||
parse_option_get "$2" && MONITOR_FREQ=$2 && shift
|
||||
;;
|
||||
|
||||
-d|--detect-thrott)
|
||||
DETECT_THROTT=start
|
||||
case "$2" in
|
||||
start|stop|status)
|
||||
DETECT_THROTT=$2
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
|
||||
--cpu-set-max)
|
||||
shift
|
||||
CPU_SET_MAX_FREQ=$1
|
||||
validate_option_set "${CPU_SET_MAX_FREQ}" restricted
|
||||
;;
|
||||
|
||||
--dry-run)
|
||||
DRY_RUN=1
|
||||
;;
|
||||
|
||||
-h|--help)
|
||||
print_usage
|
||||
exit 0
|
||||
;;
|
||||
|
||||
*)
|
||||
print_usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
shift
|
||||
done
|
||||
|
||||
#
|
||||
# Main
|
||||
#
|
||||
RET=0
|
||||
|
||||
identify_intel_gpu || {
|
||||
log INFO "No Intel GPU detected"
|
||||
exit 0
|
||||
}
|
||||
|
||||
[ -n "${SET_MIN_FREQ}${SET_MAX_FREQ}" ] && { set_freq || RET=$?; }
|
||||
print_freq_info
|
||||
|
||||
[ -n "${DETECT_THROTT}" ] && detect_throttling ${DETECT_THROTT}
|
||||
|
||||
[ -n "${CPU_SET_MAX_FREQ}" ] && { set_cpu_freq_max || RET=$?; }
|
||||
|
||||
[ -n "${MONITOR_FREQ}" ] && {
|
||||
log INFO "Entering frequency monitoring mode"
|
||||
sleep 2
|
||||
exec watch -d -n 1 "$0" -g "${MONITOR_FREQ}"
|
||||
}
|
||||
|
||||
exit ${RET}
|
@@ -1,18 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC1091 # the path is created in build-kdl and
|
||||
# here is check if exist
|
||||
# shellcheck disable=SC2086 # we want the arguments to be expanded
|
||||
|
||||
if ! [ -f /ci-kdl/bin/activate ]; then
|
||||
echo -e "ci-kdl not installed; not monitoring temperature"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
KDL_ARGS="
|
||||
--output-file=${RESULTS_DIR}/kdl.json
|
||||
--log-level=WARNING
|
||||
--num-samples=-1
|
||||
"
|
||||
|
||||
source /ci-kdl/bin/activate
|
||||
exec /ci-kdl/bin/ci-kdl ${KDL_ARGS}
|
@@ -1,7 +0,0 @@
|
||||
[core]
|
||||
backend=headless-backend.so
|
||||
xwayland=true
|
||||
idle-time=0
|
||||
|
||||
[xwayland]
|
||||
path=/usr/local/bin/Xwayland
|
@@ -1,7 +0,0 @@
|
||||
variables:
|
||||
CONDITIONAL_BUILD_ANDROID_CTS_TAG: b018634d732f438027ec58c0383615e7
|
||||
CONDITIONAL_BUILD_ANGLE_TAG: f62910e55be46e37cc867d037e4a8121
|
||||
CONDITIONAL_BUILD_CROSVM_TAG: 0f59350b1052bdbb28b65a832b494377
|
||||
CONDITIONAL_BUILD_FLUSTER_TAG: 3bc3afd7468e106afcbfd569a85f34f9
|
||||
CONDITIONAL_BUILD_PIGLIT_TAG: 827b708ab7309721395ea28cec512968
|
||||
CONDITIONAL_BUILD_VKD3D_PROTON_TAG: 82cadf35246e64a8228bf759c9c19e5b
|
@@ -1,70 +0,0 @@
|
||||
# Build the CI Alpine docker images.
|
||||
#
|
||||
# MESA_IMAGE_TAG is the tag of the docker image used by later stage jobs. If the
|
||||
# image doesn't exist yet, the container stage job generates it.
|
||||
#
|
||||
# In order to generate a new image, one should generally change the tag.
|
||||
# While removing the image from the registry would also work, that's not
|
||||
# recommended except for ephemeral images during development: Replacing
|
||||
# an image after a significant amount of time might pull in newer
|
||||
# versions of gcc/clang or other packages, which might break the build
|
||||
# with older commits using the same tag.
|
||||
#
|
||||
# After merging a change resulting in generating a new image to the
|
||||
# main repository, it's recommended to remove the image from the source
|
||||
# repository's container registry, so that the image from the main
|
||||
# repository's registry will be used there as well.
|
||||
|
||||
# Alpine based x86_64 build image
|
||||
.alpine/x86_64_build-base:
|
||||
extends:
|
||||
- .fdo.container-build@alpine
|
||||
- .container
|
||||
variables:
|
||||
FDO_DISTRIBUTION_VERSION: "3.21"
|
||||
FDO_BASE_IMAGE: alpine:$FDO_DISTRIBUTION_VERSION # since cbuild ignores it
|
||||
|
||||
# Alpine based x86_64 build image
|
||||
alpine/x86_64_build:
|
||||
extends:
|
||||
- .alpine/x86_64_build-base
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &alpine-x86_64_build ${ALPINE_X86_64_BUILD_TAG}
|
||||
LLVM_VERSION: &alpine-llvm_version 19
|
||||
rules:
|
||||
- !reference [.container, rules]
|
||||
# Note: the next three lines must remain in that order, so that the rules
|
||||
# in `linkcheck-docs` catch nightly pipelines before the rules in `deploy-docs`
|
||||
# exclude them.
|
||||
- !reference [linkcheck-docs, rules]
|
||||
- !reference [deploy-docs, rules]
|
||||
- !reference [test-docs, rules]
|
||||
|
||||
.use-alpine/x86_64_build:
|
||||
tags:
|
||||
- $FDO_RUNNER_JOB_PRIORITY_TAG_X86_64
|
||||
extends:
|
||||
- .set-image
|
||||
variables:
|
||||
MESA_IMAGE_PATH: "alpine/x86_64_build"
|
||||
MESA_IMAGE_TAG: *alpine-x86_64_build
|
||||
LLVM_VERSION: *alpine-llvm_version
|
||||
needs:
|
||||
- job: sanity
|
||||
optional: true
|
||||
- job: alpine/x86_64_build
|
||||
optional: true
|
||||
|
||||
# Alpine based x86_64 image for LAVA SSH dockerized client
|
||||
alpine/x86_64_lava_ssh_client:
|
||||
extends:
|
||||
- .alpine/x86_64_build-base
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &alpine-x86_64_lava_ssh_client ${ALPINE_X86_64_LAVA_SSH_TAG}
|
||||
|
||||
# Alpine based x86_64 image to run LAVA jobs
|
||||
alpine/x86_64_lava-trigger:
|
||||
extends:
|
||||
- .alpine/x86_64_build-base
|
||||
variables:
|
||||
MESA_IMAGE_TAG: &alpine-x86_64_lava_trigger ${ALPINE_X86_64_LAVA_TRIGGER_TAG}
|
@@ -1,83 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC1091
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# ALPINE_X86_64_BUILD_TAG
|
||||
|
||||
set -e
|
||||
|
||||
. .gitlab-ci/setup-test-env.sh
|
||||
|
||||
set -o xtrace
|
||||
|
||||
EPHEMERAL=(
|
||||
)
|
||||
|
||||
|
||||
DEPS=(
|
||||
bash
|
||||
bison
|
||||
ccache
|
||||
"clang${LLVM_VERSION}-dev"
|
||||
clang-dev
|
||||
cmake
|
||||
coreutils
|
||||
curl
|
||||
elfutils-dev
|
||||
expat-dev
|
||||
flex
|
||||
g++
|
||||
gcc
|
||||
gettext
|
||||
git
|
||||
glslang
|
||||
graphviz
|
||||
libclc-dev
|
||||
libdrm-dev
|
||||
libpciaccess-dev
|
||||
libva-dev
|
||||
linux-headers
|
||||
"llvm${LLVM_VERSION}-dev"
|
||||
"llvm${LLVM_VERSION}-static"
|
||||
mold
|
||||
musl-dev
|
||||
py3-clang
|
||||
py3-cparser
|
||||
py3-mako
|
||||
py3-packaging
|
||||
py3-pip
|
||||
py3-ply
|
||||
py3-yaml
|
||||
python3-dev
|
||||
samurai
|
||||
spirv-llvm-translator-dev
|
||||
spirv-tools-dev
|
||||
util-macros
|
||||
vulkan-headers
|
||||
zlib-dev
|
||||
)
|
||||
|
||||
apk --no-cache add "${DEPS[@]}" "${EPHEMERAL[@]}"
|
||||
|
||||
pip3 install --break-system-packages sphinx===8.2.3 hawkmoth===0.19.0
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
. .gitlab-ci/container/install-meson.sh
|
||||
|
||||
EXTRA_MESON_ARGS='--prefix=/usr' \
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
||||
############### Uninstall the build software
|
||||
|
||||
# too many vendor binarise, just keep the ones we need
|
||||
find /usr/share/clc \
|
||||
\( -type f -o -type l \) \
|
||||
! -name 'spirv-mesa3d-.spv' \
|
||||
! -name 'spirv64-mesa3d-.spv' \
|
||||
-delete
|
||||
|
||||
apk del "${EPHEMERAL[@]}"
|
||||
|
||||
. .gitlab-ci/container/container_post_build.sh
|
@@ -1,50 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This is a ci-templates build script to generate a container for triggering LAVA jobs.
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# ALPINE_X86_64_LAVA_TRIGGER_TAG
|
||||
|
||||
# shellcheck disable=SC1091
|
||||
set -e
|
||||
|
||||
. .gitlab-ci/setup-test-env.sh
|
||||
|
||||
set -o xtrace
|
||||
|
||||
uncollapsed_section_start alpine_setup "Base Alpine system setup"
|
||||
|
||||
# Ephemeral packages (installed for this script and removed again at the end)
|
||||
EPHEMERAL=(
|
||||
git
|
||||
py3-pip
|
||||
)
|
||||
|
||||
# We only need these very basic packages to run the LAVA jobs
|
||||
DEPS=(
|
||||
curl
|
||||
python3
|
||||
tar
|
||||
zstd
|
||||
)
|
||||
|
||||
apk --no-cache add "${DEPS[@]}" "${EPHEMERAL[@]}"
|
||||
|
||||
pip3 install --break-system-packages -r bin/ci/requirements-lava.txt
|
||||
|
||||
cp -Rp .gitlab-ci/lava /
|
||||
cp -Rp .gitlab-ci/bin/*_logger.py /lava
|
||||
cp -Rp .gitlab-ci/common/init-stage1.sh /lava
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
############### Uninstall the build software
|
||||
|
||||
uncollapsed_section_switch alpine_cleanup "Cleaning up base Alpine system"
|
||||
|
||||
apk del "${EPHEMERAL[@]}"
|
||||
|
||||
. .gitlab-ci/container/container_post_build.sh
|
||||
|
||||
section_end alpine_cleanup
|
@@ -1,32 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This is a ci-templates build script to generate a container for LAVA SSH client.
|
||||
|
||||
# shellcheck disable=SC1091
|
||||
set -e
|
||||
|
||||
. .gitlab-ci/setup-test-env.sh
|
||||
|
||||
set -o xtrace
|
||||
|
||||
EPHEMERAL=(
|
||||
)
|
||||
|
||||
# We only need these very basic packages to run the tests.
|
||||
DEPS=(
|
||||
openssh-client # for ssh
|
||||
iputils # for ping
|
||||
bash
|
||||
curl
|
||||
)
|
||||
|
||||
|
||||
apk --no-cache add "${DEPS[@]}" "${EPHEMERAL[@]}"
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
############### Uninstall the build software
|
||||
|
||||
apk del "${EPHEMERAL[@]}"
|
||||
|
||||
. .gitlab-ci/container/container_post_build.sh
|
5
.gitlab-ci/container/arm64_test.sh
Normal file
5
.gitlab-ci/container/arm64_test.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
arch=arm64
|
||||
|
||||
. .gitlab-ci/container/baremetal_build.sh
|
55
.gitlab-ci/container/arm_build.sh
Normal file
55
.gitlab-ci/container/arm_build.sh
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
|
||||
apt-get -y install ca-certificates
|
||||
sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list
|
||||
echo 'deb https://deb.debian.org/debian buster-backports main' >/etc/apt/sources.list.d/backports.list
|
||||
apt-get update
|
||||
apt-get -y install \
|
||||
abootimg \
|
||||
android-sdk-ext4-utils \
|
||||
autoconf \
|
||||
automake \
|
||||
bc \
|
||||
bison \
|
||||
ccache \
|
||||
cmake \
|
||||
debootstrap \
|
||||
fastboot \
|
||||
flex \
|
||||
g++ \
|
||||
git \
|
||||
lavacli \
|
||||
libdrm-dev \
|
||||
libelf-dev \
|
||||
libexpat1-dev \
|
||||
llvm-8-dev \
|
||||
pkg-config \
|
||||
python \
|
||||
python3-mako \
|
||||
python3-pil \
|
||||
python3-requests \
|
||||
python3-pip \
|
||||
python3-setuptools \
|
||||
unzip \
|
||||
wget \
|
||||
xz-utils \
|
||||
zlib1g-dev
|
||||
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@6f5af7e5574509726c79109e3c147cee95e81366
|
||||
|
||||
apt install -y --no-remove -t buster-backports \
|
||||
meson
|
||||
|
||||
arch=armhf
|
||||
. .gitlab-ci/container/cross_build.sh
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
# dependencies where we want a specific version
|
||||
EXTRA_MESON_ARGS=
|
||||
. .gitlab-ci/build-libdrm.sh
|
||||
|
||||
. .gitlab-ci/container/container_post_build.sh
|
45
.gitlab-ci/container/arm_test-base.sh
Normal file
45
.gitlab-ci/container/arm_test-base.sh
Normal file
@@ -0,0 +1,45 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
|
||||
############### Install packages for building
|
||||
apt-get install -y ca-certificates
|
||||
sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list
|
||||
echo 'deb https://deb.debian.org/debian buster-backports main' >/etc/apt/sources.list.d/backports.list
|
||||
apt-get update
|
||||
|
||||
apt-get install -y --no-remove \
|
||||
abootimg \
|
||||
android-sdk-ext4-utils \
|
||||
bc \
|
||||
bison \
|
||||
bzip2 \
|
||||
ccache \
|
||||
cmake \
|
||||
cpio \
|
||||
g++ \
|
||||
debootstrap \
|
||||
fastboot \
|
||||
flex \
|
||||
git \
|
||||
netcat \
|
||||
nginx-full \
|
||||
python3-distutils \
|
||||
python3-minimal \
|
||||
python3-serial \
|
||||
python3.7 \
|
||||
pkg-config \
|
||||
procps \
|
||||
rsync \
|
||||
u-boot-tools \
|
||||
unzip
|
||||
|
||||
apt install -t buster-backports -y --no-remove \
|
||||
meson
|
||||
|
||||
# setup nginx
|
||||
sed -i '/gzip_/ s/#\ //g' /etc/nginx/nginx.conf
|
||||
cp .gitlab-ci/bare-metal/nginx-default-site /etc/nginx/sites-enabled/default
|
||||
|
||||
. .gitlab-ci/container/container_post_build.sh
|
@@ -1,33 +1,60 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2154 # arch is assigned in previous scripts
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
|
||||
# Fetch the arm-built rootfs image and unpack it in our x86_64 container (saves
|
||||
# network transfer, disk usage, and runtime on test jobs)
|
||||
ROOTFS=/lava-files/rootfs-${arch}
|
||||
|
||||
S3_PATH="https://${S3_HOST}/${S3_KERNEL_BUCKET}"
|
||||
dpkg --add-architecture $arch
|
||||
apt-get update
|
||||
|
||||
if curl -L --retry 3 -f --retry-delay 10 -s --head "${S3_PATH}/${FDO_UPSTREAM_REPO}/${LAVA_DISTRIBUTION_TAG}/lava-rootfs.tar.zst"; then
|
||||
ARTIFACTS_URL="${S3_PATH}/${FDO_UPSTREAM_REPO}/${LAVA_DISTRIBUTION_TAG}"
|
||||
else
|
||||
ARTIFACTS_URL="${S3_PATH}/${CI_PROJECT_PATH}/${LAVA_DISTRIBUTION_TAG}"
|
||||
fi
|
||||
# Cross-build test deps
|
||||
BAREMETAL_EPHEMERAL=" \
|
||||
autoconf \
|
||||
automake \
|
||||
crossbuild-essential-$arch \
|
||||
git-lfs \
|
||||
libdrm-dev:$arch \
|
||||
libboost-dev:$arch \
|
||||
libegl1-mesa-dev:$arch \
|
||||
libelf-dev:$arch \
|
||||
libexpat1-dev:$arch \
|
||||
libffi-dev:$arch \
|
||||
libgbm-dev:$arch \
|
||||
libgles2-mesa-dev:$arch \
|
||||
libpciaccess-dev:$arch \
|
||||
libpcre3-dev:$arch \
|
||||
libpng-dev:$arch \
|
||||
libpython3-dev:$arch \
|
||||
libstdc++6:$arch \
|
||||
libtinfo-dev:$arch \
|
||||
libegl1-mesa-dev:$arch \
|
||||
libvulkan-dev:$arch \
|
||||
libxcb-keysyms1-dev:$arch \
|
||||
libpython3-dev:$arch \
|
||||
python3-dev \
|
||||
qt5-default \
|
||||
qt5-qmake \
|
||||
qtbase5-dev:$arch \
|
||||
"
|
||||
|
||||
curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \
|
||||
"${ARTIFACTS_URL}"/lava-rootfs.tar.zst -o rootfs.tar.zst
|
||||
mkdir -p /rootfs-"$arch"
|
||||
tar -C /rootfs-"$arch" '--exclude=./dev/*' --zstd -xf rootfs.tar.zst
|
||||
rm rootfs.tar.zst
|
||||
apt-get install -y --no-remove $BAREMETAL_EPHEMERAL
|
||||
|
||||
if [[ $arch == "arm64" ]]; then
|
||||
mkdir -p /baremetal-files
|
||||
pushd /baremetal-files
|
||||
mkdir /var/cache/apt/archives/$arch
|
||||
|
||||
curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \
|
||||
-O "${KERNEL_IMAGE_BASE}"/arm64/Image
|
||||
curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \
|
||||
-O "${KERNEL_IMAGE_BASE}"/arm64/Image.gz
|
||||
popd
|
||||
fi
|
||||
############### Create cross-files
|
||||
|
||||
. .gitlab-ci/create-cross-file.sh $arch
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
############### Create rootfs
|
||||
KERNEL_URL=https://gitlab.freedesktop.org/drm/msm/-/archive/drm-msm-fixes-2020-06-25/msm-drm-msm-fixes-2020-06-25.tar.gz
|
||||
|
||||
DEBIAN_ARCH=$arch INCLUDE_VK_CTS=1 . .gitlab-ci/container/lava_build.sh
|
||||
|
||||
ccache --show-stats
|
||||
|
||||
. .gitlab-ci/container/container_post_build.sh
|
||||
|
||||
apt-get purge -y $BAREMETAL_EPHEMERAL
|
||||
|
@@ -1,67 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# DEBIAN_TEST_ANDROID_TAG
|
||||
|
||||
# This script runs in a container to:
|
||||
# 1. Download the Android CTS (Compatibility Test Suite)
|
||||
# 2. Filter out unneeded test modules
|
||||
# 3. Compress and upload the stripped version to S3
|
||||
# Note: The 'build-' prefix in the filename is only to make it compatible
|
||||
# with the bin/ci/update_tag.py script.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
section_start android-cts "Downloading Android CTS"
|
||||
|
||||
# xtrace is getting lost with the section switching
|
||||
set -x
|
||||
|
||||
# Do a very early check to make sure the tag is correct without the need of
|
||||
# setting up the environment variables locally
|
||||
ci_tag_build_time_check "ANDROID_CTS_TAG"
|
||||
|
||||
# List of all CTS modules we might want to run in CI
|
||||
# This should be the union of all modules required by our CI jobs
|
||||
# Specific modules to run are selected via the ${GPU_VERSION}-android-cts-include.txt files
|
||||
ANDROID_CTS_MODULES=(
|
||||
"CtsDeqpTestCases"
|
||||
"CtsGraphicsTestCases"
|
||||
"CtsNativeHardwareTestCases"
|
||||
"CtsSkQPTestCases"
|
||||
)
|
||||
|
||||
ANDROID_CTS_VERSION="${ANDROID_VERSION}_r1"
|
||||
ANDROID_CTS_DEVICE_ARCH="x86"
|
||||
|
||||
# Download the stripped CTS from S3, because the CTS download from Google can take 20 minutes
|
||||
CTS_FILENAME="android-cts-${ANDROID_CTS_VERSION}-linux_x86-${ANDROID_CTS_DEVICE_ARCH}"
|
||||
ARTIFACT_PATH="${DATA_STORAGE_PATH}/android-cts/${ANDROID_CTS_TAG}.tar.zst"
|
||||
|
||||
if FOUND_ARTIFACT_URL="$(find_s3_project_artifact "${ARTIFACT_PATH}")"; then
|
||||
echo "Found Android CTS at: ${FOUND_ARTIFACT_URL}"
|
||||
curl-with-retry "${FOUND_ARTIFACT_URL}" | tar --zstd -x -C /
|
||||
else
|
||||
echo "No cached CTS found, downloading from Google and uploading to S3..."
|
||||
curl-with-retry --remote-name "https://dl.google.com/dl/android/cts/${CTS_FILENAME}.zip"
|
||||
|
||||
# Disable zipbomb detection, because the CTS zip file is too big
|
||||
# At least locally, it is detected as a zipbomb
|
||||
UNZIP_DISABLE_ZIPBOMB_DETECTION=true \
|
||||
unzip -q -d / "${CTS_FILENAME}.zip"
|
||||
rm "${CTS_FILENAME}.zip"
|
||||
|
||||
# Keep only the interesting tests to save space
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
ANDROID_CTS_MODULES_KEEP_EXPRESSION=$(printf "%s|" "${ANDROID_CTS_MODULES[@]}" | sed -e 's/|$//g')
|
||||
find /android-cts/testcases/ -mindepth 1 -type d | grep -v -E "$ANDROID_CTS_MODULES_KEEP_EXPRESSION" | xargs rm -rf
|
||||
|
||||
# Using zstd compressed tarball instead of zip, the compression ratio is almost the same, but
|
||||
# the extraction is faster, also LAVA overlays don't support zip compression.
|
||||
tar --zstd -cf "${CTS_FILENAME}.tar.zst" /android-cts
|
||||
ci-fairy s3cp --token-file "${S3_JWT_FILE}" "${CTS_FILENAME}.tar.zst" \
|
||||
"https://${S3_BASE_PATH}/${CI_PROJECT_PATH}/${ARTIFACT_PATH}"
|
||||
fi
|
||||
|
||||
section_end android-cts
|
@@ -1,121 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml and .gitlab-ci/container/gitlab-ci.yml tags:
|
||||
# DEBIAN_BUILD_TAG
|
||||
# ANDROID_LLVM_ARTIFACT_NAME
|
||||
|
||||
set -exu
|
||||
|
||||
# If CI vars are not set, assign an empty value, this prevents -u to fail
|
||||
: "${CI:=}"
|
||||
: "${CI_PROJECT_PATH:=}"
|
||||
|
||||
# Early check for required env variables, relies on `set -u`
|
||||
: "$ANDROID_NDK_VERSION"
|
||||
: "$ANDROID_SDK_VERSION"
|
||||
: "$ANDROID_LLVM_VERSION"
|
||||
: "$ANDROID_LLVM_ARTIFACT_NAME"
|
||||
: "$S3_JWT_FILE"
|
||||
: "$S3_HOST"
|
||||
: "$S3_ANDROID_BUCKET"
|
||||
|
||||
# Check for CI if the auth file used later on is non-empty
|
||||
if [ -n "$CI" ] && [ ! -s "${S3_JWT_FILE}" ]; then
|
||||
echo "Error: ${S3_JWT_FILE} is empty." 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if curl -s -o /dev/null -I -L -f --retry 4 --retry-delay 15 "https://${S3_HOST}/${S3_ANDROID_BUCKET}/${CI_PROJECT_PATH}/${ANDROID_LLVM_ARTIFACT_NAME}.tar.zst"; then
|
||||
echo "Artifact ${ANDROID_LLVM_ARTIFACT_NAME}.tar.zst already exists, skip re-building."
|
||||
|
||||
# Download prebuilt LLVM libraries for Android when they have not changed,
|
||||
# to save some time
|
||||
curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \
|
||||
-o "/${ANDROID_LLVM_ARTIFACT_NAME}.tar.zst" "https://${S3_HOST}/${S3_ANDROID_BUCKET}/${CI_PROJECT_PATH}/${ANDROID_LLVM_ARTIFACT_NAME}.tar.zst"
|
||||
tar -C / --zstd -xf "/${ANDROID_LLVM_ARTIFACT_NAME}.tar.zst"
|
||||
rm "/${ANDROID_LLVM_ARTIFACT_NAME}.tar.zst"
|
||||
|
||||
exit
|
||||
fi
|
||||
|
||||
# Ephemeral packages (installed for this script and removed again at the end)
|
||||
EPHEMERAL=(
|
||||
unzip
|
||||
)
|
||||
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends --no-remove "${EPHEMERAL[@]}"
|
||||
|
||||
ANDROID_NDK="android-ndk-${ANDROID_NDK_VERSION}"
|
||||
ANDROID_NDK_ROOT="/${ANDROID_NDK}"
|
||||
if [ ! -d "$ANDROID_NDK_ROOT" ];
|
||||
then
|
||||
curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \
|
||||
-o "${ANDROID_NDK}.zip" \
|
||||
"https://dl.google.com/android/repository/${ANDROID_NDK}-linux.zip"
|
||||
unzip -d / "${ANDROID_NDK}.zip" "$ANDROID_NDK/source.properties" "$ANDROID_NDK/build/cmake/*" "$ANDROID_NDK/toolchains/llvm/*"
|
||||
rm "${ANDROID_NDK}.zip"
|
||||
fi
|
||||
|
||||
if [ ! -d "/llvm-project" ];
|
||||
then
|
||||
mkdir "/llvm-project"
|
||||
pushd "/llvm-project"
|
||||
git init
|
||||
git remote add origin https://github.com/llvm/llvm-project.git
|
||||
git fetch --depth 1 origin "$ANDROID_LLVM_VERSION"
|
||||
git checkout FETCH_HEAD
|
||||
popd
|
||||
fi
|
||||
|
||||
pushd "/llvm-project"
|
||||
|
||||
# Checkout again the intended version, just in case of a pre-existing full clone
|
||||
git checkout "$ANDROID_LLVM_VERSION" || true
|
||||
|
||||
LLVM_INSTALL_PREFIX="/${ANDROID_LLVM_ARTIFACT_NAME}"
|
||||
|
||||
rm -rf build/
|
||||
cmake -GNinja -S llvm -B build/ \
|
||||
-DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK_ROOT}/build/cmake/android.toolchain.cmake" \
|
||||
-DANDROID_ABI=x86_64 \
|
||||
-DANDROID_PLATFORM="android-${ANDROID_SDK_VERSION}" \
|
||||
-DANDROID_NDK="${ANDROID_NDK_ROOT}" \
|
||||
-DCMAKE_ANDROID_ARCH_ABI=x86_64 \
|
||||
-DCMAKE_ANDROID_NDK="${ANDROID_NDK_ROOT}" \
|
||||
-DCMAKE_BUILD_TYPE=MinSizeRel \
|
||||
-DCMAKE_SYSTEM_NAME=Android \
|
||||
-DCMAKE_SYSTEM_VERSION="${ANDROID_SDK_VERSION}" \
|
||||
-DCMAKE_INSTALL_PREFIX="${LLVM_INSTALL_PREFIX}" \
|
||||
-DCMAKE_CXX_FLAGS="-march=x86-64 --target=x86_64-linux-android${ANDROID_SDK_VERSION} -fno-rtti" \
|
||||
-DLLVM_HOST_TRIPLE="x86_64-linux-android${ANDROID_SDK_VERSION}" \
|
||||
-DLLVM_TARGETS_TO_BUILD=X86 \
|
||||
-DLLVM_BUILD_LLVM_DYLIB=OFF \
|
||||
-DLLVM_BUILD_TESTS=OFF \
|
||||
-DLLVM_BUILD_EXAMPLES=OFF \
|
||||
-DLLVM_BUILD_DOCS=OFF \
|
||||
-DLLVM_BUILD_TOOLS=OFF \
|
||||
-DLLVM_ENABLE_RTTI=OFF \
|
||||
-DLLVM_BUILD_INSTRUMENTED_COVERAGE=OFF \
|
||||
-DLLVM_NATIVE_TOOL_DIR="${ANDROID_NDK_ROOT}/toolchains/llvm/prebuilt/linux-x86_64/bin" \
|
||||
-DLLVM_ENABLE_PIC=False \
|
||||
-DLLVM_OPTIMIZED_TABLEGEN=ON
|
||||
|
||||
ninja "-j${FDO_CI_CONCURRENT:-4}" -C build/ install
|
||||
|
||||
popd
|
||||
|
||||
rm -rf /llvm-project
|
||||
|
||||
tar --zstd -cf "${ANDROID_LLVM_ARTIFACT_NAME}.tar.zst" "$LLVM_INSTALL_PREFIX"
|
||||
|
||||
# If run in CI upload the tar.zst archive to S3 to avoid rebuilding it if the
|
||||
# version does not change, and delete it.
|
||||
# The file is not deleted for non-CI because it can be useful in local runs.
|
||||
if [ -n "$CI" ]; then
|
||||
ci-fairy s3cp --token-file "${S3_JWT_FILE}" "${ANDROID_LLVM_ARTIFACT_NAME}.tar.zst" "https://${S3_HOST}/${S3_ANDROID_BUCKET}/${CI_PROJECT_PATH}/${ANDROID_LLVM_ARTIFACT_NAME}.tar.zst"
|
||||
rm "${ANDROID_LLVM_ARTIFACT_NAME}.tar.zst"
|
||||
fi
|
||||
|
||||
apt-get purge -y "${EPHEMERAL[@]}"
|
@@ -1,171 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# DEBIAN_TEST_ANDROID_TAG
|
||||
# DEBIAN_TEST_GL_TAG
|
||||
|
||||
set -uex
|
||||
|
||||
section_start angle "Building ANGLE"
|
||||
|
||||
# Do a very early check to make sure the tag is correct without the need of
|
||||
# setting up the environment variables locally
|
||||
ci_tag_build_time_check "ANGLE_TAG"
|
||||
|
||||
ANGLE_REV="c39f4a5c553cbee39af8f866aa82a9ffa4f02f5b"
|
||||
DEPOT_REV="5982a1aeb33dc36382ed8c62eddf52a6135e7dd3"
|
||||
|
||||
# Set ANGLE_ARCH based on DEBIAN_ARCH if it hasn't been explicitly defined
|
||||
if [[ -z "${ANGLE_ARCH:-}" ]]; then
|
||||
case "$DEBIAN_ARCH" in
|
||||
amd64) ANGLE_ARCH=x64;;
|
||||
arm64) ANGLE_ARCH=arm64;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# DEPOT tools
|
||||
mkdir /depot-tools
|
||||
pushd /depot-tools
|
||||
git init
|
||||
git remote add origin https://chromium.googlesource.com/chromium/tools/depot_tools.git
|
||||
git fetch --depth 1 origin "$DEPOT_REV"
|
||||
git checkout FETCH_HEAD
|
||||
export PATH=/depot-tools:$PATH
|
||||
export DEPOT_TOOLS_UPDATE=0
|
||||
popd
|
||||
|
||||
mkdir /angle-build
|
||||
mkdir /angle
|
||||
pushd /angle-build
|
||||
git init
|
||||
git remote add origin https://chromium.googlesource.com/angle/angle.git
|
||||
git fetch --depth 1 origin "$ANGLE_REV"
|
||||
git checkout FETCH_HEAD
|
||||
|
||||
echo "$ANGLE_REV" > /angle/version
|
||||
|
||||
GCLIENT_CUSTOM_VARS=()
|
||||
GCLIENT_CUSTOM_VARS+=('--custom-var=angle_enable_cl=False')
|
||||
GCLIENT_CUSTOM_VARS+=('--custom-var=angle_enable_cl_testing=False')
|
||||
GCLIENT_CUSTOM_VARS+=('--custom-var=angle_enable_vulkan_validation_layers=False')
|
||||
GCLIENT_CUSTOM_VARS+=('--custom-var=angle_enable_wgpu=False')
|
||||
GCLIENT_CUSTOM_VARS+=('--custom-var=build_angle_deqp_tests=False')
|
||||
GCLIENT_CUSTOM_VARS+=('--custom-var=build_angle_perftests=False')
|
||||
if [[ "$ANGLE_TARGET" == "android" ]]; then
|
||||
GCLIENT_CUSTOM_VARS+=('--custom-var=checkout_android=True')
|
||||
fi
|
||||
|
||||
# source preparation
|
||||
gclient config --name REPLACE-WITH-A-DOT --unmanaged \
|
||||
"${GCLIENT_CUSTOM_VARS[@]}" \
|
||||
https://chromium.googlesource.com/angle/angle.git
|
||||
sed -e 's/REPLACE-WITH-A-DOT/./;' -i .gclient
|
||||
sed -e 's|"custom_deps" : {|"custom_deps" : {\
|
||||
"third_party/clspv/src": None,\
|
||||
"third_party/dawn": None,\
|
||||
"third_party/glmark2/src": None,\
|
||||
"third_party/libjpeg_turbo": None,\
|
||||
"third_party/llvm/src": None,\
|
||||
"third_party/OpenCL-CTS/src": None,\
|
||||
"third_party/SwiftShader": None,\
|
||||
"third_party/VK-GL-CTS/src": None,\
|
||||
"third_party/vulkan-validation-layers/src": None,|' -i .gclient
|
||||
gclient sync --no-history -j"${FDO_CI_CONCURRENT:-4}"
|
||||
|
||||
mkdir -p out/Release
|
||||
cat > out/Release/args.gn <<EOF
|
||||
angle_assert_always_on=false
|
||||
angle_build_all=false
|
||||
angle_build_tests=false
|
||||
angle_enable_cl=false
|
||||
angle_enable_cl_testing=false
|
||||
angle_enable_gl=false
|
||||
angle_enable_gl_desktop_backend=false
|
||||
angle_enable_null=false
|
||||
angle_enable_swiftshader=false
|
||||
angle_enable_trace=false
|
||||
angle_enable_wgpu=false
|
||||
angle_enable_vulkan=true
|
||||
angle_enable_vulkan_api_dump_layer=false
|
||||
angle_enable_vulkan_validation_layers=false
|
||||
angle_has_frame_capture=false
|
||||
angle_has_histograms=false
|
||||
angle_has_rapidjson=false
|
||||
angle_use_custom_libvulkan=false
|
||||
build_angle_deqp_tests=false
|
||||
dcheck_always_on=true
|
||||
enable_expensive_dchecks=false
|
||||
is_component_build=false
|
||||
is_debug=false
|
||||
target_cpu="${ANGLE_ARCH}"
|
||||
target_os="${ANGLE_TARGET}"
|
||||
treat_warnings_as_errors=false
|
||||
EOF
|
||||
|
||||
case "$ANGLE_TARGET" in
|
||||
linux) cat >> out/Release/args.gn <<EOF
|
||||
angle_egl_extension="so.1"
|
||||
angle_glesv2_extension="so.2"
|
||||
use_custom_libcxx=false
|
||||
custom_toolchain="//build/toolchain/linux/unbundle:default"
|
||||
host_toolchain="//build/toolchain/linux/unbundle:default"
|
||||
EOF
|
||||
;;
|
||||
android) cat >> out/Release/args.gn <<EOF
|
||||
android_ndk_version="${ANDROID_NDK_VERSION}"
|
||||
android64_ndk_api_level=${ANDROID_SDK_VERSION}
|
||||
android32_ndk_api_level=${ANDROID_SDK_VERSION}
|
||||
use_custom_libcxx=true
|
||||
EOF
|
||||
;;
|
||||
*) echo "Unexpected ANGLE_TARGET value: $ANGLE_TARGET"; exit 1;;
|
||||
esac
|
||||
|
||||
if [[ "$DEBIAN_ARCH" = "arm64" ]]; then
|
||||
# We need to get an AArch64 sysroot - because ANGLE isn't great friends with
|
||||
# system dependencies - but use the default system toolchain, because the
|
||||
# 'arm64' toolchain you get from Google infrastructure is a cross-compiler
|
||||
# from x86-64
|
||||
build/linux/sysroot_scripts/install-sysroot.py --arch=arm64
|
||||
fi
|
||||
|
||||
(
|
||||
# The 'unbundled' toolchain configuration requires clang, and it also needs to
|
||||
# be configured via environment variables.
|
||||
export CC="clang-${LLVM_VERSION}"
|
||||
export HOST_CC="$CC"
|
||||
export CFLAGS="-Wno-unknown-warning-option"
|
||||
export HOST_CFLAGS="$CFLAGS"
|
||||
export CXX="clang++-${LLVM_VERSION}"
|
||||
export HOST_CXX="$CXX"
|
||||
export CXXFLAGS="-Wno-unknown-warning-option"
|
||||
export HOST_CXXFLAGS="$CXXFLAGS"
|
||||
export AR="ar"
|
||||
export HOST_AR="$AR"
|
||||
export NM="nm"
|
||||
export HOST_NM="$NM"
|
||||
export LDFLAGS="-fuse-ld=lld-${LLVM_VERSION} -lpthread -ldl"
|
||||
export HOST_LDFLAGS="$LDFLAGS"
|
||||
|
||||
gn gen out/Release
|
||||
# depot_tools overrides ninja with a version that doesn't work. We want
|
||||
# ninja with FDO_CI_CONCURRENT anyway.
|
||||
/usr/local/bin/ninja -C out/Release/ libEGL libGLESv1_CM libGLESv2
|
||||
)
|
||||
|
||||
rm -f out/Release/libvulkan.so* out/Release/*.so*.TOC
|
||||
cp out/Release/lib*.so* /angle/
|
||||
|
||||
if [[ "$ANGLE_TARGET" == "linux" ]]; then
|
||||
ln -s libEGL.so.1 /angle/libEGL.so
|
||||
ln -s libGLESv2.so.2 /angle/libGLESv2.so
|
||||
fi
|
||||
|
||||
rm -rf out
|
||||
|
||||
popd
|
||||
rm -rf /depot-tools
|
||||
rm -rf /angle-build
|
||||
|
||||
section_end angle
|
@@ -1,27 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# DEBIAN_BASE_TAG
|
||||
|
||||
set -uex
|
||||
|
||||
uncollapsed_section_start apitrace "Building apitrace"
|
||||
|
||||
APITRACE_VERSION="b6102d10960c9f43b1b473903fc67937dd19fb98"
|
||||
|
||||
git clone https://github.com/apitrace/apitrace.git --single-branch --no-checkout /apitrace
|
||||
pushd /apitrace
|
||||
git checkout "$APITRACE_VERSION"
|
||||
git submodule update --init --depth 1 --recursive
|
||||
cmake -S . -B _build -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_GUI=False -DENABLE_WAFFLE=on ${EXTRA_CMAKE_ARGS:-}
|
||||
cmake --build _build --parallel --target apitrace eglretrace
|
||||
mkdir build
|
||||
cp _build/apitrace build
|
||||
cp _build/eglretrace build
|
||||
${STRIP_CMD:-strip} build/*
|
||||
find . -not -path './build' -not -path './build/*' -delete
|
||||
popd
|
||||
|
||||
section_end apitrace
|
@@ -1,28 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# DEBIAN_BUILD_TAG
|
||||
# FEDORA_X86_64_BUILD_TAG
|
||||
|
||||
uncollapsed_section_start bindgen "Building bindgen"
|
||||
|
||||
BINDGEN_VER=0.71.1
|
||||
CBINDGEN_VER=0.26.0
|
||||
|
||||
# bindgen
|
||||
RUSTFLAGS='-L native=/usr/local/lib' cargo install \
|
||||
bindgen-cli --version ${BINDGEN_VER} \
|
||||
--locked \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--root /usr/local
|
||||
|
||||
# cbindgen
|
||||
RUSTFLAGS='-L native=/usr/local/lib' cargo install \
|
||||
cbindgen --version ${CBINDGEN_VER} \
|
||||
--locked \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--root /usr/local
|
||||
|
||||
section_end bindgen
|
@@ -1,56 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# DEBIAN_BASE_TAG
|
||||
|
||||
# Do a very early check to make sure the tag is correct without the need of
|
||||
# setting up the environment variables locally
|
||||
ci_tag_build_time_check "CROSVM_TAG"
|
||||
|
||||
set -uex
|
||||
|
||||
section_start crosvm "Building crosvm"
|
||||
|
||||
git config --global user.email "mesa@example.com"
|
||||
git config --global user.name "Mesa CI"
|
||||
|
||||
CROSVM_VERSION=4a6b4316155742fbfa1be7087c2ee578cfee884d
|
||||
git clone --single-branch -b main --no-checkout https://chromium.googlesource.com/crosvm/crosvm /platform/crosvm
|
||||
pushd /platform/crosvm
|
||||
git checkout "$CROSVM_VERSION"
|
||||
git submodule update --init
|
||||
|
||||
VIRGLRENDERER_VERSION=06d43ce974b664f9dc521b706a0ad7f91dbf2866
|
||||
rm -rf third_party/virglrenderer
|
||||
git clone --single-branch -b main --no-checkout https://gitlab.freedesktop.org/virgl/virglrenderer.git third_party/virglrenderer
|
||||
pushd third_party/virglrenderer
|
||||
git checkout "$VIRGLRENDERER_VERSION"
|
||||
meson setup build/ -D libdir=lib -D render-server-worker=process -D venus=true ${EXTRA_MESON_ARGS:-}
|
||||
meson install -C build
|
||||
popd
|
||||
|
||||
rm rust-toolchain
|
||||
|
||||
RUSTFLAGS='-L native=/usr/local/lib' cargo install \
|
||||
bindgen-cli \
|
||||
--locked \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--root /usr/local \
|
||||
--version 0.71.1 \
|
||||
${EXTRA_CARGO_ARGS:-}
|
||||
|
||||
CROSVM_USE_SYSTEM_MINIGBM=1 CROSVM_USE_SYSTEM_VIRGLRENDERER=1 RUSTFLAGS='-L native=/usr/local/lib' cargo install \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--locked \
|
||||
--features 'default-no-sandbox gpu x virgl_renderer' \
|
||||
--path . \
|
||||
--root /usr/local \
|
||||
${EXTRA_CARGO_ARGS:-}
|
||||
|
||||
popd
|
||||
|
||||
rm -rf /platform/crosvm
|
||||
|
||||
section_end crosvm
|
@@ -1,99 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# DEBIAN_TEST_ANDROID_TAG
|
||||
# DEBIAN_BASE_TAG
|
||||
|
||||
set -uex
|
||||
|
||||
section_start deqp-runner "Building deqp-runner"
|
||||
|
||||
DEQP_RUNNER_VERSION=0.20.3
|
||||
|
||||
commits_to_backport=(
|
||||
)
|
||||
|
||||
patch_files=(
|
||||
)
|
||||
|
||||
DEQP_RUNNER_GIT_URL="${DEQP_RUNNER_GIT_URL:-https://gitlab.freedesktop.org/mesa/deqp-runner.git}"
|
||||
|
||||
if [ -n "${DEQP_RUNNER_GIT_TAG:-}" ]; then
|
||||
DEQP_RUNNER_GIT_CHECKOUT="$DEQP_RUNNER_GIT_TAG"
|
||||
elif [ -n "${DEQP_RUNNER_GIT_REV:-}" ]; then
|
||||
DEQP_RUNNER_GIT_CHECKOUT="$DEQP_RUNNER_GIT_REV"
|
||||
else
|
||||
DEQP_RUNNER_GIT_CHECKOUT="v$DEQP_RUNNER_VERSION"
|
||||
fi
|
||||
|
||||
BASE_PWD=$PWD
|
||||
|
||||
mkdir -p /deqp-runner
|
||||
pushd /deqp-runner
|
||||
mkdir deqp-runner-git
|
||||
pushd deqp-runner-git
|
||||
git init
|
||||
git remote add origin "$DEQP_RUNNER_GIT_URL"
|
||||
git fetch --depth 1 origin "$DEQP_RUNNER_GIT_CHECKOUT"
|
||||
git checkout FETCH_HEAD
|
||||
|
||||
for commit in "${commits_to_backport[@]}"
|
||||
do
|
||||
PATCH_URL="https://gitlab.freedesktop.org/mesa/deqp-runner/-/commit/$commit.patch"
|
||||
echo "Backport deqp-runner commit $commit from $PATCH_URL"
|
||||
curl -L --retry 4 -f --retry-all-errors --retry-delay 60 $PATCH_URL | git am
|
||||
done
|
||||
|
||||
for patch in "${patch_files[@]}"
|
||||
do
|
||||
echo "Apply patch to deqp-runner from $patch"
|
||||
git am "$BASE_PWD/.gitlab-ci/container/patches/$patch"
|
||||
done
|
||||
|
||||
if [ -z "${RUST_TARGET:-}" ]; then
|
||||
RUST_TARGET=""
|
||||
fi
|
||||
|
||||
if [[ "$RUST_TARGET" != *-android ]]; then
|
||||
# When CC (/usr/lib/ccache/gcc) variable is set, the rust compiler uses
|
||||
# this variable when cross-compiling arm32 and build fails for zsys-sys.
|
||||
# So unset the CC variable when cross-compiling for arm32.
|
||||
SAVEDCC=${CC:-}
|
||||
if [ "$RUST_TARGET" = "armv7-unknown-linux-gnueabihf" ]; then
|
||||
unset CC
|
||||
fi
|
||||
cargo install --locked \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--root /usr/local \
|
||||
${EXTRA_CARGO_ARGS:-} \
|
||||
--path .
|
||||
CC=$SAVEDCC
|
||||
else
|
||||
cargo install --locked \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--root /usr/local --version 2.10.0 \
|
||||
cargo-ndk
|
||||
|
||||
rustup target add $RUST_TARGET
|
||||
RUSTFLAGS='-C target-feature=+crt-static' cargo ndk --target $RUST_TARGET build --release
|
||||
|
||||
mv target/$RUST_TARGET/release/deqp-runner /deqp-runner
|
||||
|
||||
cargo uninstall --locked \
|
||||
--root /usr/local \
|
||||
cargo-ndk
|
||||
fi
|
||||
|
||||
popd
|
||||
rm -rf deqp-runner-git
|
||||
popd
|
||||
|
||||
# remove unused test runners to shrink images for the Mesa CI build (not kernel,
|
||||
# which chooses its own deqp branch)
|
||||
if [ -z "${DEQP_RUNNER_GIT_TAG:-}${DEQP_RUNNER_GIT_REV:-}" ]; then
|
||||
rm -f /usr/local/bin/igt-runner
|
||||
fi
|
||||
|
||||
section_end deqp-runner
|
@@ -1,314 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# DEBIAN_TEST_ANDROID_TAG
|
||||
# DEBIAN_TEST_GL_TAG
|
||||
# DEBIAN_TEST_VK_TAG
|
||||
|
||||
set -ue -o pipefail
|
||||
|
||||
# shellcheck disable=SC2153
|
||||
deqp_api=${DEQP_API,,}
|
||||
|
||||
section_start deqp-$deqp_api "Building dEQP $DEQP_API"
|
||||
|
||||
set -x
|
||||
|
||||
# See `deqp_build_targets` below for which release is used to produce which
|
||||
# binary. Unless this comment has bitrotten:
|
||||
# - the commit from the main branch produces the deqp tools and `deqp-vk`,
|
||||
# - the VK release produces `deqp-vk`,
|
||||
# - the GL release produces `glcts`, and
|
||||
# - the GLES release produces `deqp-gles*` and `deqp-egl`
|
||||
|
||||
DEQP_MAIN_COMMIT=9cc8e038994c32534b3d2c4ba88c1dc49ef53228
|
||||
DEQP_VK_VERSION=1.4.1.1
|
||||
DEQP_GL_VERSION=4.6.6.0
|
||||
DEQP_GLES_VERSION=3.2.12.0
|
||||
|
||||
# Patches to VulkanCTS may come from commits in their repo (listed in
|
||||
# cts_commits_to_backport) or patch files stored in our repo (in the patch
|
||||
# directory `$OLDPWD/.gitlab-ci/container/patches/` listed in cts_patch_files).
|
||||
# Both list variables would have comments explaining the reasons behind the
|
||||
# patches.
|
||||
|
||||
# shellcheck disable=SC2034
|
||||
main_cts_commits_to_backport=(
|
||||
# If you find yourself wanting to add something in here, consider whether
|
||||
# bumping DEQP_MAIN_COMMIT is not a better solution :)
|
||||
)
|
||||
|
||||
# shellcheck disable=SC2034
|
||||
main_cts_patch_files=(
|
||||
)
|
||||
|
||||
# shellcheck disable=SC2034
|
||||
vk_cts_commits_to_backport=(
|
||||
# Stop querying device address from unbound buffers
|
||||
046343f46f7d39d53b47842d7fd8ed3279528046
|
||||
)
|
||||
|
||||
# shellcheck disable=SC2034
|
||||
vk_cts_patch_files=(
|
||||
)
|
||||
|
||||
# shellcheck disable=SC2034
|
||||
gl_cts_commits_to_backport=(
|
||||
# Add testing for GL_PRIMITIVES_SUBMITTED_ARB query.
|
||||
e075ce73ddc5973aa46a5236c715bb281c9501fa
|
||||
)
|
||||
|
||||
# shellcheck disable=SC2034
|
||||
gl_cts_patch_files=(
|
||||
build-deqp-gl_Build-Don-t-build-Vulkan-utilities-for-GL-builds.patch
|
||||
build-deqp-gl_Revert-Add-missing-context-deletion.patch
|
||||
build-deqp-gl_Revert-Fix-issues-with-GLX-reset-notification-strate.patch
|
||||
build-deqp-gl_Revert-Fix-spurious-failures-when-using-a-config-wit.patch
|
||||
)
|
||||
|
||||
# shellcheck disable=SC2034
|
||||
# GLES builds also EGL
|
||||
gles_cts_commits_to_backport=(
|
||||
)
|
||||
|
||||
# shellcheck disable=SC2034
|
||||
gles_cts_patch_files=(
|
||||
build-deqp-gl_Build-Don-t-build-Vulkan-utilities-for-GL-builds.patch
|
||||
build-deqp-gl_Revert-Add-missing-context-deletion.patch
|
||||
build-deqp-gl_Revert-Fix-issues-with-GLX-reset-notification-strate.patch
|
||||
build-deqp-gl_Revert-Fix-spurious-failures-when-using-a-config-wit.patch
|
||||
)
|
||||
|
||||
|
||||
### Careful editing anything below this line
|
||||
|
||||
|
||||
git config --global user.email "mesa@example.com"
|
||||
git config --global user.name "Mesa CI"
|
||||
|
||||
# shellcheck disable=SC2153
|
||||
case "${DEQP_API}" in
|
||||
tools) DEQP_VERSION="$DEQP_MAIN_COMMIT";;
|
||||
*-main) DEQP_VERSION="$DEQP_MAIN_COMMIT";;
|
||||
VK) DEQP_VERSION="vulkan-cts-$DEQP_VK_VERSION";;
|
||||
GL) DEQP_VERSION="opengl-cts-$DEQP_GL_VERSION";;
|
||||
GLES) DEQP_VERSION="opengl-es-cts-$DEQP_GLES_VERSION";;
|
||||
*) echo "Unexpected DEQP_API value: $DEQP_API"; exit 1;;
|
||||
esac
|
||||
|
||||
mkdir -p /VK-GL-CTS
|
||||
pushd /VK-GL-CTS
|
||||
[ -e .git ] || {
|
||||
git init
|
||||
git remote add origin https://github.com/KhronosGroup/VK-GL-CTS.git
|
||||
}
|
||||
git fetch --depth 1 origin "$DEQP_VERSION"
|
||||
git checkout FETCH_HEAD
|
||||
DEQP_COMMIT=$(git rev-parse FETCH_HEAD)
|
||||
|
||||
if [ "$DEQP_VERSION" = "$DEQP_MAIN_COMMIT" ]; then
|
||||
merge_base="$(curl-with-retry -s https://api.github.com/repos/KhronosGroup/VK-GL-CTS/compare/main...$DEQP_MAIN_COMMIT | jq -r .merge_base_commit.sha)"
|
||||
if [[ "$merge_base" != "$DEQP_MAIN_COMMIT" ]]; then
|
||||
echo "VK-GL-CTS commit $DEQP_MAIN_COMMIT is not a commit from the main branch."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
mkdir -p /deqp-$deqp_api
|
||||
|
||||
if [ "$DEQP_VERSION" = "$DEQP_MAIN_COMMIT" ]; then
|
||||
prefix="main"
|
||||
else
|
||||
prefix="$deqp_api"
|
||||
fi
|
||||
|
||||
cts_commits_to_backport="${prefix}_cts_commits_to_backport[@]"
|
||||
for commit in "${!cts_commits_to_backport}"
|
||||
do
|
||||
PATCH_URL="https://github.com/KhronosGroup/VK-GL-CTS/commit/$commit.patch"
|
||||
echo "Apply patch to ${DEQP_API} CTS from $PATCH_URL"
|
||||
curl-with-retry $PATCH_URL | GIT_COMMITTER_DATE=$(LC_TIME=C date -d@0) git am -
|
||||
done
|
||||
|
||||
cts_patch_files="${prefix}_cts_patch_files[@]"
|
||||
for patch in "${!cts_patch_files}"
|
||||
do
|
||||
echo "Apply patch to ${DEQP_API} CTS from $patch"
|
||||
GIT_COMMITTER_DATE=$(LC_TIME=C date -d@0) git am < $OLDPWD/.gitlab-ci/container/patches/$patch
|
||||
done
|
||||
|
||||
{
|
||||
if [ "$DEQP_VERSION" = "$DEQP_MAIN_COMMIT" ]; then
|
||||
commit_desc=$(git show --no-patch --format='commit %h on %ci' --abbrev=10 "$DEQP_COMMIT")
|
||||
echo "dEQP $DEQP_API at $commit_desc"
|
||||
else
|
||||
echo "dEQP $DEQP_API version $DEQP_VERSION"
|
||||
fi
|
||||
if [ "$(git rev-parse HEAD)" != "$DEQP_COMMIT" ]; then
|
||||
echo "The following local patches are applied on top:"
|
||||
git log --reverse --oneline "$DEQP_COMMIT".. --format='- %s'
|
||||
fi
|
||||
} > /deqp-$deqp_api/deqp-$deqp_api-version
|
||||
|
||||
# --insecure is due to SSL cert failures hitting sourceforge for zlib and
|
||||
# libpng (sigh). The archives get their checksums checked anyway, and git
|
||||
# always goes through ssh or https.
|
||||
python3 external/fetch_sources.py --insecure
|
||||
|
||||
case "${DEQP_API}" in
|
||||
VK-main)
|
||||
# Video tests rely on external files
|
||||
python3 external/fetch_video_decode_samples.py
|
||||
python3 external/fetch_video_encode_samples.py
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$DEQP_API" = tools ]]; then
|
||||
# Save the testlog stylesheets:
|
||||
cp doc/testlog-stylesheet/testlog.{css,xsl} /deqp-$deqp_api
|
||||
fi
|
||||
|
||||
popd
|
||||
|
||||
deqp_build_targets=()
|
||||
case "${DEQP_API}" in
|
||||
VK|VK-main)
|
||||
deqp_build_targets+=(deqp-vk)
|
||||
;;
|
||||
GL)
|
||||
deqp_build_targets+=(glcts)
|
||||
;;
|
||||
GLES)
|
||||
deqp_build_targets+=(deqp-gles{2,3,31})
|
||||
deqp_build_targets+=(glcts) # needed for gles*-khr tests
|
||||
# deqp-egl also comes from this build, but it is handled separately below.
|
||||
;;
|
||||
tools)
|
||||
deqp_build_targets+=(testlog-to-xml)
|
||||
deqp_build_targets+=(testlog-to-csv)
|
||||
deqp_build_targets+=(testlog-to-junit)
|
||||
;;
|
||||
esac
|
||||
|
||||
OLD_IFS="$IFS"
|
||||
IFS=";"
|
||||
CMAKE_SBT="${deqp_build_targets[*]}"
|
||||
IFS="$OLD_IFS"
|
||||
|
||||
pushd /deqp-$deqp_api
|
||||
|
||||
if [ "${DEQP_API}" = 'GLES' ]; then
|
||||
if [ "${DEQP_TARGET}" = 'android' ]; then
|
||||
cmake -S /VK-GL-CTS -B . -G Ninja \
|
||||
-DDEQP_TARGET=android \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DSELECTED_BUILD_TARGETS="deqp-egl" \
|
||||
${EXTRA_CMAKE_ARGS:-}
|
||||
ninja modules/egl/deqp-egl
|
||||
mv modules/egl/deqp-egl{,-android}
|
||||
else
|
||||
# When including EGL/X11 testing, do that build first and save off its
|
||||
# deqp-egl binary.
|
||||
cmake -S /VK-GL-CTS -B . -G Ninja \
|
||||
-DDEQP_TARGET=x11_egl_glx \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DSELECTED_BUILD_TARGETS="deqp-egl" \
|
||||
${EXTRA_CMAKE_ARGS:-}
|
||||
ninja modules/egl/deqp-egl
|
||||
mv modules/egl/deqp-egl{,-x11}
|
||||
|
||||
cmake -S /VK-GL-CTS -B . -G Ninja \
|
||||
-DDEQP_TARGET=wayland \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DSELECTED_BUILD_TARGETS="deqp-egl" \
|
||||
${EXTRA_CMAKE_ARGS:-}
|
||||
ninja modules/egl/deqp-egl
|
||||
mv modules/egl/deqp-egl{,-wayland}
|
||||
fi
|
||||
fi
|
||||
|
||||
cmake -S /VK-GL-CTS -B . -G Ninja \
|
||||
-DDEQP_TARGET=${DEQP_TARGET} \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DSELECTED_BUILD_TARGETS="${CMAKE_SBT}" \
|
||||
${EXTRA_CMAKE_ARGS:-}
|
||||
|
||||
# Make sure `default` doesn't silently stop detecting one of the platforms we care about
|
||||
if [ "${DEQP_TARGET}" = 'default' ]; then
|
||||
grep -q DEQP_SUPPORT_WAYLAND=1 build.ninja
|
||||
grep -q DEQP_SUPPORT_X11=1 build.ninja
|
||||
grep -q DEQP_SUPPORT_XCB=1 build.ninja
|
||||
fi
|
||||
|
||||
ninja "${deqp_build_targets[@]}"
|
||||
|
||||
if [ "$DEQP_API" != tools ]; then
|
||||
# Copy out the mustpass lists we want.
|
||||
mkdir -p mustpass
|
||||
|
||||
if [ "${DEQP_API}" = 'VK' ] || [ "${DEQP_API}" = 'VK-main' ]; then
|
||||
for mustpass in $(< /VK-GL-CTS/external/vulkancts/mustpass/main/vk-default.txt) ; do
|
||||
cat /VK-GL-CTS/external/vulkancts/mustpass/main/$mustpass \
|
||||
>> mustpass/vk-main.txt
|
||||
done
|
||||
fi
|
||||
|
||||
if [ "${DEQP_API}" = 'GL' ]; then
|
||||
cp \
|
||||
/VK-GL-CTS/external/openglcts/data/gl_cts/data/mustpass/gl/khronos_mustpass/main/*-main.txt \
|
||||
mustpass/
|
||||
cp \
|
||||
/VK-GL-CTS/external/openglcts/data/gl_cts/data/mustpass/gl/khronos_mustpass_single/main/*-single.txt \
|
||||
mustpass/
|
||||
fi
|
||||
|
||||
if [ "${DEQP_API}" = 'GLES' ]; then
|
||||
cp \
|
||||
/VK-GL-CTS/external/openglcts/data/gl_cts/data/mustpass/gles/aosp_mustpass/main/*.txt \
|
||||
mustpass/
|
||||
cp \
|
||||
/VK-GL-CTS/external/openglcts/data/gl_cts/data/mustpass/egl/aosp_mustpass/main/egl-main.txt \
|
||||
mustpass/
|
||||
cp \
|
||||
/VK-GL-CTS/external/openglcts/data/gl_cts/data/mustpass/gles/khronos_mustpass/main/*-main.txt \
|
||||
mustpass/
|
||||
fi
|
||||
|
||||
# Compress the caselists, since Vulkan's in particular are gigantic; higher
|
||||
# compression levels provide no real measurable benefit.
|
||||
zstd -f -1 --rm mustpass/*.txt
|
||||
fi
|
||||
|
||||
if [ "$DEQP_API" = tools ]; then
|
||||
# Save *some* executor utils, but otherwise strip things down
|
||||
# to reduct deqp build size:
|
||||
mv executor/testlog-to-* .
|
||||
rm -rf executor
|
||||
fi
|
||||
|
||||
# Remove other mustpass files, since we saved off the ones we wanted to conventient locations above.
|
||||
rm -rf assets/**/mustpass/
|
||||
rm -rf external/**/mustpass/
|
||||
rm -rf external/vulkancts/modules/vulkan/vk-main*
|
||||
rm -rf external/vulkancts/modules/vulkan/vk-default
|
||||
|
||||
rm -rf external/openglcts/modules/cts-runner
|
||||
rm -rf modules/internal
|
||||
rm -rf execserver
|
||||
rm -rf framework
|
||||
find . -depth \( -iname '*cmake*' -o -name '*ninja*' -o -name '*.o' -o -name '*.a' \) -exec rm -rf {} \;
|
||||
if [ "${DEQP_API}" = 'VK' ] || [ "${DEQP_API}" = 'VK-main' ]; then
|
||||
${STRIP_CMD:-strip} external/vulkancts/modules/vulkan/deqp-vk
|
||||
fi
|
||||
if [ "${DEQP_API}" = 'GL' ] || [ "${DEQP_API}" = 'GLES' ]; then
|
||||
${STRIP_CMD:-strip} external/openglcts/modules/glcts
|
||||
fi
|
||||
if [ "${DEQP_API}" = 'GLES' ]; then
|
||||
${STRIP_CMD:-strip} modules/*/deqp-*
|
||||
fi
|
||||
du -sh ./*
|
||||
popd
|
||||
|
||||
section_end deqp-$deqp_api
|
@@ -1,19 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# DEBIAN_BUILD_TAG
|
||||
|
||||
set -uex
|
||||
|
||||
uncollapsed_section_start directx-headers "Building directx-headers"
|
||||
|
||||
git clone https://github.com/microsoft/DirectX-Headers -b v1.614.1 --depth 1
|
||||
pushd DirectX-Headers
|
||||
meson setup build --backend=ninja --buildtype=release -Dbuild-test=false ${EXTRA_MESON_ARGS:-}
|
||||
meson install -C build
|
||||
popd
|
||||
rm -rf DirectX-Headers
|
||||
|
||||
section_end directx-headers
|
@@ -1,52 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# shellcheck disable=SC1091 # The relative paths in this file only become valid at runtime.
|
||||
# shellcheck disable=SC2034 # Variables are used in scripts called from here
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# DEBIAN_TEST_VIDEO_TAG
|
||||
|
||||
# Install fluster in /fluster.
|
||||
|
||||
set -uex
|
||||
|
||||
section_start fluster "Installing Fluster"
|
||||
|
||||
# Do a very early check to make sure the tag is correct without the need of
|
||||
# setting up the environment variables locally
|
||||
ci_tag_build_time_check "FLUSTER_TAG"
|
||||
|
||||
FLUSTER_REVISION="e997402978f62428fffc8e5a4a709690d9ca9bc5"
|
||||
|
||||
git clone https://github.com/fluendo/fluster.git --single-branch --no-checkout
|
||||
|
||||
pushd fluster || exit
|
||||
git checkout "${FLUSTER_REVISION}"
|
||||
popd || exit
|
||||
|
||||
ARTIFACT_PATH="${DATA_STORAGE_PATH}/fluster/${FLUSTER_TAG}/vectors.tar.zst"
|
||||
|
||||
if FOUND_ARTIFACT_URL="$(find_s3_project_artifact "${ARTIFACT_PATH}")"; then
|
||||
echo "Found fluster vectors at: ${FOUND_ARTIFACT_URL}"
|
||||
mv fluster/ /
|
||||
curl-with-retry "${FOUND_ARTIFACT_URL}" | tar --zstd -x -C /
|
||||
else
|
||||
echo "No cached vectors found, rebuilding..."
|
||||
# Download the necessary vectors: H264, H265 and VP9
|
||||
# When updating FLUSTER_REVISION, make sure to update the vectors if necessary or
|
||||
# fluster-runner will report Missing results.
|
||||
fluster/fluster.py download -j ${FDO_CI_CONCURRENT:-4} \
|
||||
JVT-AVC_V1 JVT-FR-EXT JVT-MVC JVT-SVC_V1 \
|
||||
JCT-VC-3D-HEVC JCT-VC-HEVC_V1 JCT-VC-MV-HEVC JCT-VC-RExt JCT-VC-SCC JCT-VC-SHVC \
|
||||
VP9-TEST-VECTORS-HIGH VP9-TEST-VECTORS
|
||||
|
||||
# Build fluster vectors archive and upload it
|
||||
tar --zstd -cf "vectors.tar.zst" fluster/resources/
|
||||
ci-fairy s3cp --token-file "${S3_JWT_FILE}" "vectors.tar.zst" \
|
||||
"https://${S3_BASE_PATH}/${CI_PROJECT_PATH}/${ARTIFACT_PATH}"
|
||||
mv fluster/ /
|
||||
fi
|
||||
|
||||
section_end fluster
|
@@ -1,22 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# DEBIAN_TEST_VK_TAG
|
||||
|
||||
set -ex
|
||||
|
||||
uncollapsed_section_start fossilize "Building fossilize"
|
||||
|
||||
git clone https://github.com/ValveSoftware/Fossilize.git
|
||||
cd Fossilize
|
||||
git checkout b43ee42bbd5631ea21fe9a2dee4190d5d875c327
|
||||
git submodule update --init
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -S .. -B . -G Ninja -DCMAKE_BUILD_TYPE=Release
|
||||
ninja -C . install
|
||||
cd ../..
|
||||
rm -rf Fossilize
|
||||
|
||||
section_end fossilize
|
@@ -1,23 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -ex
|
||||
|
||||
uncollapsed_section_start gfxreconstruct "Building gfxreconstruct"
|
||||
|
||||
GFXRECONSTRUCT_VERSION=761837794a1e57f918a85af7000b12e531b178ae
|
||||
|
||||
git clone https://github.com/LunarG/gfxreconstruct.git \
|
||||
--single-branch \
|
||||
-b master \
|
||||
--no-checkout \
|
||||
/gfxreconstruct
|
||||
pushd /gfxreconstruct
|
||||
git checkout "$GFXRECONSTRUCT_VERSION"
|
||||
git submodule update --init
|
||||
git submodule update
|
||||
cmake -S . -B _build -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX:PATH=/gfxreconstruct/build -DBUILD_WERROR=OFF
|
||||
cmake --build _build --parallel --target tools/{replay,info}/install/strip
|
||||
find . -not -path './build' -not -path './build/*' -delete
|
||||
popd
|
||||
|
||||
section_end gfxreconstruct
|
@@ -1,32 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC1091 # the path is created by the script
|
||||
|
||||
set -ex
|
||||
|
||||
uncollapsed_section_start kdl "Building kdl"
|
||||
|
||||
KDL_REVISION="cbbe5fd54505fd03ee34f35bfd16794f0c30074f"
|
||||
KDL_CHECKOUT_DIR="/tmp/ci-kdl.git"
|
||||
|
||||
mkdir -p ${KDL_CHECKOUT_DIR}
|
||||
pushd ${KDL_CHECKOUT_DIR}
|
||||
git init
|
||||
git remote add origin https://gitlab.freedesktop.org/gfx-ci/ci-kdl.git
|
||||
git fetch --depth 1 origin ${KDL_REVISION}
|
||||
git checkout FETCH_HEAD
|
||||
popd
|
||||
|
||||
# Run venv in a subshell, so we don't accidentally leak the venv state into
|
||||
# calling scripts
|
||||
(
|
||||
python3 -m venv /ci-kdl
|
||||
source /ci-kdl/bin/activate &&
|
||||
pushd ${KDL_CHECKOUT_DIR} &&
|
||||
pip install -r requirements.txt &&
|
||||
pip install . &&
|
||||
popd
|
||||
)
|
||||
|
||||
rm -rf ${KDL_CHECKOUT_DIR}
|
||||
|
||||
section_end kdl
|
@@ -1,35 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -uex
|
||||
|
||||
uncollapsed_section_start libclc "Building libclc"
|
||||
|
||||
export LLVM_CONFIG="llvm-config-${LLVM_VERSION:?"llvm unset!"}"
|
||||
LLVM_TAG="llvmorg-15.0.7"
|
||||
|
||||
$LLVM_CONFIG --version
|
||||
|
||||
git config --global user.email "mesa@example.com"
|
||||
git config --global user.name "Mesa CI"
|
||||
git clone \
|
||||
https://github.com/llvm/llvm-project \
|
||||
--depth 1 \
|
||||
-b "${LLVM_TAG}" \
|
||||
/llvm-project
|
||||
|
||||
mkdir /libclc
|
||||
pushd /libclc
|
||||
cmake -S /llvm-project/libclc -B . -G Ninja -DLLVM_CONFIG="$LLVM_CONFIG" -DLIBCLC_TARGETS_TO_BUILD="spirv-mesa3d-;spirv64-mesa3d-" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr -DLLVM_SPIRV=/usr/bin/llvm-spirv
|
||||
ninja
|
||||
ninja install
|
||||
popd
|
||||
|
||||
# workaroud cmake vs debian packaging.
|
||||
mkdir -p /usr/lib/clc
|
||||
ln -s /usr/share/clc/spirv64-mesa3d-.spv /usr/lib/clc/
|
||||
ln -s /usr/share/clc/spirv-mesa3d-.spv /usr/lib/clc/
|
||||
|
||||
du -sh ./*
|
||||
rm -rf /libclc /llvm-project
|
||||
|
||||
section_end libclc
|
@@ -1,21 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Script used for Android and Fedora builds (Debian builds get their libdrm version
|
||||
# from https://gitlab.freedesktop.org/gfx-ci/ci-deb-repo - see PKG_REPO_REV)
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
|
||||
set -uex
|
||||
|
||||
uncollapsed_section_start libdrm "Building libdrm"
|
||||
|
||||
export LIBDRM_VERSION=libdrm-2.4.122
|
||||
|
||||
curl -L -O --retry 4 -f --retry-all-errors --retry-delay 60 \
|
||||
https://dri.freedesktop.org/libdrm/"$LIBDRM_VERSION".tar.xz
|
||||
tar -xvf "$LIBDRM_VERSION".tar.xz && rm "$LIBDRM_VERSION".tar.xz
|
||||
cd "$LIBDRM_VERSION"
|
||||
meson setup build -D vc4=disabled -D freedreno=disabled -D etnaviv=disabled ${EXTRA_MESON_ARGS:-}
|
||||
meson install -C build
|
||||
cd ..
|
||||
rm -rf "$LIBDRM_VERSION"
|
||||
|
||||
section_end libdrm
|
@@ -1,30 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -ex
|
||||
|
||||
uncollapsed_section_start llvm-spirv "Building LLVM-SPIRV-Translator"
|
||||
|
||||
if [ "${LLVM_VERSION:?llvm version not set}" -ge 18 ]; then
|
||||
VER="${LLVM_VERSION}.1.0"
|
||||
else
|
||||
VER="${LLVM_VERSION}.0.0"
|
||||
fi
|
||||
|
||||
curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \
|
||||
-O "https://github.com/KhronosGroup/SPIRV-LLVM-Translator/archive/refs/tags/v${VER}.tar.gz"
|
||||
tar -xvf "v${VER}.tar.gz" && rm "v${VER}.tar.gz"
|
||||
|
||||
mkdir "SPIRV-LLVM-Translator-${VER}/build"
|
||||
pushd "SPIRV-LLVM-Translator-${VER}/build"
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr
|
||||
ninja
|
||||
ninja install
|
||||
# For some reason llvm-spirv is not installed by default
|
||||
ninja llvm-spirv
|
||||
cp tools/llvm-spirv/llvm-spirv /usr/bin/
|
||||
popd
|
||||
|
||||
du -sh "SPIRV-LLVM-Translator-${VER}"
|
||||
rm -rf "SPIRV-LLVM-Translator-${VER}"
|
||||
|
||||
section_end llvm-spirv
|
@@ -1,31 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -ex
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# ALPINE_X86_64_BUILD_TAG
|
||||
# DEBIAN_BASE_TAG
|
||||
# DEBIAN_BUILD_TAG
|
||||
# FEDORA_X86_64_BUILD_TAG
|
||||
|
||||
uncollapsed_section_start mold "Building mold"
|
||||
|
||||
MOLD_VERSION="2.32.0"
|
||||
|
||||
git clone -b v"$MOLD_VERSION" --single-branch --depth 1 https://github.com/rui314/mold.git
|
||||
pushd mold
|
||||
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -D BUILD_TESTING=OFF -D MOLD_LTO=ON
|
||||
cmake --build . --parallel "${FDO_CI_CONCURRENT:-4}"
|
||||
cmake --install . --strip
|
||||
|
||||
# Always use mold from now on
|
||||
find /usr/bin \( -name '*-ld' -o -name 'ld' \) \
|
||||
-exec ln -sf /usr/local/bin/ld.mold {} \; \
|
||||
-exec ls -l {} +
|
||||
|
||||
popd
|
||||
rm -rf mold
|
||||
|
||||
section_end mold
|
@@ -1,41 +0,0 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC2086 # we want word splitting
|
||||
set -uex
|
||||
|
||||
section_start piglit "Building piglit"
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# DEBIAN_TEST_GL_TAG
|
||||
# DEBIAN_TEST_VK_TAG
|
||||
|
||||
# Do a very early check to make sure the tag is correct without the need of
|
||||
# setting up the environment variables locally
|
||||
ci_tag_build_time_check "PIGLIT_TAG"
|
||||
|
||||
REV="a0a27e528f643dfeb785350a1213bfff09681950"
|
||||
|
||||
git clone https://gitlab.freedesktop.org/mesa/piglit.git --single-branch --no-checkout /piglit
|
||||
pushd /piglit
|
||||
git checkout "$REV"
|
||||
patch -p1 <$OLDPWD/.gitlab-ci/piglit/disable-vs_in.diff
|
||||
cmake -S . -B . -G Ninja -DCMAKE_BUILD_TYPE=Release $PIGLIT_OPTS ${EXTRA_CMAKE_ARGS:-}
|
||||
ninja ${PIGLIT_BUILD_TARGETS:-}
|
||||
find . -depth \( -name .git -o -name '*ninja*' -o -iname '*cmake*' -o -name '*.[chao]' \) \
|
||||
! -name 'include_test.h' -exec rm -rf {} \;
|
||||
rm -rf target_api
|
||||
if [ "${PIGLIT_BUILD_TARGETS:-}" = "piglit_replayer" ]; then
|
||||
find . -depth \
|
||||
! -regex "^\.$" \
|
||||
! -regex "^\.\/piglit.*" \
|
||||
! -regex "^\.\/framework.*" \
|
||||
! -regex "^\.\/bin$" \
|
||||
! -regex "^\.\/bin\/replayer\.py" \
|
||||
! -regex "^\.\/templates.*" \
|
||||
! -regex "^\.\/tests$" \
|
||||
! -regex "^\.\/tests\/replay\.py" \
|
||||
-exec rm -rf {} \; 2>/dev/null
|
||||
fi
|
||||
popd
|
||||
|
||||
section_end piglit
|
@@ -1,38 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Note that this script is not actually "building" rust, but build- is the
|
||||
# convention for the shared helpers for putting stuff in our containers.
|
||||
|
||||
set -ex
|
||||
|
||||
section_start rust "Building Rust toolchain"
|
||||
|
||||
# Pick a specific snapshot from rustup so the compiler doesn't drift on us.
|
||||
RUST_VERSION=1.81.0-2024-09-05
|
||||
|
||||
# For rust in Mesa, we use rustup to install. This lets us pick an arbitrary
|
||||
# version of the compiler, rather than whatever the container's Debian comes
|
||||
# with.
|
||||
curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \
|
||||
--proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- \
|
||||
--default-toolchain $RUST_VERSION \
|
||||
--profile minimal \
|
||||
-y
|
||||
|
||||
# Make rustup tools available in the PATH environment variable
|
||||
# shellcheck disable=SC1091
|
||||
. "$HOME/.cargo/env"
|
||||
|
||||
rustup component add clippy rustfmt
|
||||
|
||||
# Set up a config script for cross compiling -- cargo needs your system cc for
|
||||
# linking in cross builds, but doesn't know what you want to use for system cc.
|
||||
cat > "$HOME/.cargo/config" <<EOF
|
||||
[target.armv7-unknown-linux-gnueabihf]
|
||||
linker = "arm-linux-gnueabihf-gcc"
|
||||
|
||||
[target.aarch64-unknown-linux-gnu]
|
||||
linker = "aarch64-linux-gnu-gcc"
|
||||
EOF
|
||||
|
||||
section_end rust
|
@@ -1,18 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# DEBIAN_BUILD_TAG
|
||||
|
||||
set -ex
|
||||
|
||||
uncollapsed_section_start shader-db "Building shader-db"
|
||||
|
||||
pushd /usr/local
|
||||
git clone https://gitlab.freedesktop.org/mesa/shader-db.git --depth 1
|
||||
rm -rf shader-db/.git
|
||||
cd shader-db
|
||||
make
|
||||
popd
|
||||
|
||||
section_end shader-db
|
@@ -1,104 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
# Copyright © 2022 Collabora Limited
|
||||
# Author: Guilherme Gallo <guilherme.gallo@collabora.com>
|
||||
#
|
||||
# When changing this file, you need to bump the following
|
||||
# .gitlab-ci/image-tags.yml tags:
|
||||
# DEBIAN_TEST_GL_TAG
|
||||
|
||||
set -uex
|
||||
|
||||
uncollapsed_section_start skqp "Building SkQP"
|
||||
|
||||
SKQP_BRANCH=android-cts-12.1_r5
|
||||
|
||||
SCRIPT_DIR="$(pwd)/.gitlab-ci/container"
|
||||
SKQP_PATCH_DIR="${SCRIPT_DIR}/patches"
|
||||
BASE_ARGS_GN_FILE="${SCRIPT_DIR}/build-skqp_base.gn"
|
||||
|
||||
case "$DEBIAN_ARCH" in
|
||||
amd64)
|
||||
SKQP_ARCH=x64
|
||||
;;
|
||||
armhf)
|
||||
SKQP_ARCH=arm
|
||||
;;
|
||||
arm64)
|
||||
SKQP_ARCH=arm64
|
||||
;;
|
||||
esac
|
||||
|
||||
SKIA_DIR=${SKIA_DIR:-$(mktemp -d)}
|
||||
SKQP_OUT_DIR=${SKIA_DIR}/out/${SKQP_ARCH}
|
||||
SKQP_INSTALL_DIR=${SKQP_INSTALL_DIR:-/skqp}
|
||||
SKQP_ASSETS_DIR="${SKQP_INSTALL_DIR}/assets"
|
||||
SKQP_BINARIES=(skqp list_gpu_unit_tests list_gms)
|
||||
|
||||
create_gn_args() {
|
||||
# gn can be configured to cross-compile skia and its tools
|
||||
# It is important to set the target_cpu to guarantee the intended target
|
||||
# machine
|
||||
cp "${BASE_ARGS_GN_FILE}" "${SKQP_OUT_DIR}"/args.gn
|
||||
echo "target_cpu = \"${SKQP_ARCH}\"" >> "${SKQP_OUT_DIR}"/args.gn
|
||||
}
|
||||
|
||||
|
||||
download_skia_source() {
|
||||
if [ -z ${SKIA_DIR+x} ]
|
||||
then
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Skia cloned from https://android.googlesource.com/platform/external/skqp
|
||||
# has all needed assets tracked on git-fs
|
||||
SKQP_REPO=https://android.googlesource.com/platform/external/skqp
|
||||
|
||||
git clone --branch "${SKQP_BRANCH}" --depth 1 "${SKQP_REPO}" "${SKIA_DIR}"
|
||||
}
|
||||
|
||||
download_skia_source
|
||||
|
||||
pushd "${SKIA_DIR}"
|
||||
|
||||
# Apply all skqp patches for Mesa CI
|
||||
cat "${SKQP_PATCH_DIR}"/build-skqp_*.patch |
|
||||
patch -p1
|
||||
|
||||
# hack for skqp see the clang
|
||||
pushd /usr/bin/
|
||||
ln -s "../lib/llvm-${LLVM_VERSION}/bin/clang" clang
|
||||
ln -s "../lib/llvm-${LLVM_VERSION}/bin/clang++" clang++
|
||||
popd
|
||||
|
||||
# Fetch some needed build tools needed to build skia/skqp.
|
||||
# Basically, it clones repositories with commits SHAs from ${SKIA_DIR}/DEPS
|
||||
# directory.
|
||||
python3 tools/git-sync-deps
|
||||
|
||||
mkdir -p "${SKQP_OUT_DIR}"
|
||||
mkdir -p "${SKQP_INSTALL_DIR}"
|
||||
|
||||
create_gn_args
|
||||
|
||||
# Build and install skqp binaries
|
||||
bin/gn gen "${SKQP_OUT_DIR}"
|
||||
|
||||
for BINARY in "${SKQP_BINARIES[@]}"
|
||||
do
|
||||
/usr/bin/ninja -C "${SKQP_OUT_DIR}" "${BINARY}"
|
||||
# Strip binary, since gn is not stripping it even when `is_debug == false`
|
||||
${STRIP_CMD:-strip} "${SKQP_OUT_DIR}/${BINARY}"
|
||||
install -m 0755 "${SKQP_OUT_DIR}/${BINARY}" "${SKQP_INSTALL_DIR}"
|
||||
done
|
||||
|
||||
# Move assets to the target directory, which will reside in rootfs.
|
||||
mv platform_tools/android/apps/skqp/src/main/assets/ "${SKQP_ASSETS_DIR}"
|
||||
|
||||
popd
|
||||
rm -Rf "${SKIA_DIR}"
|
||||
|
||||
set +ex
|
||||
|
||||
section_end skqp
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user