Compare commits

..

1 Commits

Author SHA1 Message Date
Dylan Baker
f8367fc41e VERSION: bump for 22.2.0-rc1 2022-08-03 11:11:03 -07:00
2902 changed files with 186954 additions and 331133 deletions

View File

@@ -35,10 +35,7 @@ trim_trailing_whitespace = false
indent_style = space
indent_size = 2
[*.ps1]
indent_style = space
indent_size = 2
[*.rs]
indent_style = space
indent_size = 4

39
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,39 @@
name: CI
on: push
permissions:
contents: read
jobs:
CI:
runs-on: macos-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Install Dependencies
run: |
cat > Brewfile <<EOL
brew "bison"
brew "expat"
brew "gettext"
brew "libx11"
brew "libxcb"
brew "libxdamage"
brew "libxext"
brew "meson"
brew "pkg-config"
brew "python@3.10"
EOL
brew update
brew bundle --verbose
- name: Install Mako
run: pip3 install --user mako
- name: Configure
run: meson . build -Dbuild-tests=true -Dosmesa=true
- name: Build
run: meson compile -C build
- name: Test
run: meson test -C build --print-errorlogs
- name: Install
run: meson install -C build

View File

@@ -1,59 +0,0 @@
name: macOS-CI
on: push
permissions:
contents: read
jobs:
macOS-CI:
strategy:
matrix:
glx_option: ['dri', 'xlib']
runs-on: macos-11
env:
GALLIUM_DUMP_CPU: true
MESON_EXEC: /Users/runner/Library/Python/3.11/bin/meson
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Install Dependencies
run: |
cat > Brewfile <<EOL
brew "bison"
brew "expat"
brew "gettext"
brew "libx11"
brew "libxcb"
brew "libxdamage"
brew "libxext"
brew "ninja"
brew "pkg-config"
brew "python@3.10"
EOL
brew update
brew bundle --verbose
- name: Install Mako and meson
run: pip3 install --user mako meson
- name: Configure
run: |
cat > native_config <<EOL
[binaries]
llvm-config = '/usr/local/opt/llvm/bin/llvm-config'
EOL
$MESON_EXEC . build --native-file=native_config -Dbuild-tests=true -Dosmesa=true -Dgallium-drivers=swrast -Dglx=${{ matrix.glx_option }}
- name: Build
run: $MESON_EXEC compile -C build
- name: Test
run: $MESON_EXEC test -C build --print-errorlogs
- name: Install
run: $MESON_EXEC install -C build --destdir $PWD/install
- name: 'Upload Artifact'
if: always()
uses: actions/upload-artifact@v3
with:
name: macos-${{ matrix.glx_option }}-result
path: |
build/meson-logs/
install/
retention-days: 5

View File

@@ -1,6 +1,6 @@
variables:
FDO_UPSTREAM_REPO: mesa/mesa
MESA_TEMPLATES_COMMIT: &ci-templates-commit d5aa3941aa03c2f716595116354fb81eb8012acb
MESA_TEMPLATES_COMMIT: &ci-templates-commit 290b79e0e78eab67a83766f4e9691be554fc4afd
CI_PRE_CLONE_SCRIPT: |-
set -o xtrace
wget -q -O download-git-cache.sh ${CI_PROJECT_URL}/-/raw/${CI_COMMIT_SHA}/.gitlab-ci/download-git-cache.sh
@@ -8,7 +8,7 @@ variables:
rm download-git-cache.sh
set +o xtrace
CI_JOB_JWT_FILE: /minio_jwt
MINIO_HOST: s3.freedesktop.org
MINIO_HOST: minio-packet.freedesktop.org
# per-pipeline artifact storage on MinIO
PIPELINE_ARTIFACTS_BASE: ${MINIO_HOST}/artifacts/${CI_PROJECT_PATH}/${CI_PIPELINE_ID}
# per-job artifact storage on MinIO
@@ -22,7 +22,6 @@ variables:
MICROSOFT_FARM: "online"
LIMA_FARM: "online"
IGALIA_FARM: "online"
ANHOLT_FARM: "online"
default:
before_script:
@@ -47,7 +46,7 @@ default:
include:
- project: 'freedesktop/ci-templates'
ref: ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2
ref: 34f4ade99434043f88e164933f570301fd18b125
file:
- '/templates/ci-fairy.yml'
- project: 'freedesktop/ci-templates'
@@ -71,6 +70,7 @@ include:
- local: 'src/gallium/drivers/lima/ci/gitlab-ci.yml'
- local: 'src/gallium/drivers/llvmpipe/ci/gitlab-ci.yml'
- local: 'src/gallium/drivers/nouveau/ci/gitlab-ci.yml'
- local: 'src/gallium/drivers/radeonsi/ci/gitlab-ci.yml'
- local: 'src/gallium/drivers/softpipe/ci/gitlab-ci.yml'
- local: 'src/gallium/drivers/virgl/ci/gitlab-ci.yml'
- local: 'src/gallium/drivers/zink/ci/gitlab-ci.yml'
@@ -78,7 +78,6 @@ include:
- local: 'src/intel/ci/gitlab-ci.yml'
- local: 'src/microsoft/ci/gitlab-ci.yml'
- local: 'src/panfrost/ci/gitlab-ci.yml'
- local: 'src/virtio/ci/gitlab-ci.yml'
stages:
- sanity
@@ -86,7 +85,6 @@ stages:
- git-archive
- build-x86_64
- build-misc
- lint
- amd
- intel
- nouveau
@@ -132,7 +130,7 @@ stages:
- .build-rules
script:
- apk --no-cache add graphviz doxygen
- pip3 install sphinx===5.1.1 breathe===4.34.0 mako===1.2.3 sphinx_rtd_theme===1.0.0
- pip3 install sphinx breathe mako sphinx_rtd_theme
- docs/doxygen-wrapper.py --out-dir=docs/doxygen_xml
- sphinx-build -W -b html docs public
@@ -264,7 +262,9 @@ make git archive:
# compress the current folder
- tar -cvzf ../$CI_PROJECT_NAME.tar.gz .
- ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" ../$CI_PROJECT_NAME.tar.gz https://$MINIO_HOST/git-cache/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/$CI_PROJECT_NAME.tar.gz
# login with the JWT token file
- ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
- ci-fairy minio cp ../$CI_PROJECT_NAME.tar.gz minio://$MINIO_HOST/git-cache/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/$CI_PROJECT_NAME.tar.gz
# Sanity checks of MR settings and commit logs

View File

@@ -3,8 +3,9 @@ version: 1
# Rules to match for a machine to qualify
target:
{% if tags %}
{% set b2ctags = tags.split(',') %}
tags:
{% for tag in tags %}
{% for tag in b2ctags %}
- '{{ tag | trim }}'
{% endfor %}
{% endif %}

View File

@@ -24,7 +24,6 @@
from jinja2 import Environment, FileSystemLoader
from argparse import ArgumentParser
from os import environ, path
import json
parser = ArgumentParser()
@@ -70,10 +69,7 @@ values['log_level'] = args.log_level
values['poweroff_delay'] = args.poweroff_delay
values['session_end_regex'] = args.session_end_regex
values['session_reboot_regex'] = args.session_reboot_regex
try:
values['tags'] = json.loads(args.tags)
except json.decoder.JSONDecodeError:
values['tags'] = args.tags.split(",")
values['tags'] = args.tags
values['template'] = args.template
values['timeout_boot_minutes'] = args.timeout_boot_minutes
values['timeout_boot_retries'] = args.timeout_boot_retries

View File

@@ -164,16 +164,19 @@ def main():
'--test-timeout', type=int, help='Test phase timeout (minutes)', required=True)
args = parser.parse_args()
servo = CrosServoRun(args.cpu, args.ec, args.test_timeout * 60)
while True:
servo = CrosServoRun(args.cpu, args.ec, args.test_timeout * 60)
retval = servo.run()
# power down the CPU on the device
servo.ec_write("power off\n")
servo.close()
if retval != 2:
sys.exit(retval)
break
# power down the CPU on the device
servo.ec_write("power off\n")
servo.close()
sys.exit(retval)
if __name__ == '__main__':

View File

@@ -106,25 +106,20 @@ if echo "$BM_KERNEL $BM_DTB" | grep -q http; then
wget $BM_DTB -O dtb
cat kernel dtb > Image.gz-dtb
rm kernel
rm kernel dtb
else
cat $BM_KERNEL $BM_DTB > Image.gz-dtb
cp $BM_DTB dtb
fi
export PATH=$BM:$PATH
mkdir -p artifacts
mkbootimg.py \
--kernel Image.gz-dtb \
--ramdisk rootfs.cpio.gz \
--dtb dtb \
--cmdline "$BM_CMDLINE" \
$BM_MKBOOT_PARAMS \
--header_version 2 \
-o artifacts/fastboot.img
abootimg \
--create artifacts/fastboot.img \
-k Image.gz-dtb \
-r rootfs.cpio.gz \
-c cmdline="$BM_CMDLINE"
rm Image.gz-dtb
rm Image.gz-dtb dtb
export PATH=$BM:$PATH
# Start background command for talking to serial if we have one.
if [ -n "$BM_SERIAL_SCRIPT" ]; then

View File

@@ -1,569 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2015, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Creates the boot image."""
from argparse import (ArgumentParser, ArgumentTypeError,
FileType, RawDescriptionHelpFormatter)
from hashlib import sha1
from os import fstat
from struct import pack
import array
import collections
import os
import re
import subprocess
import tempfile
# Constant and structure definition is in
# system/tools/mkbootimg/include/bootimg/bootimg.h
BOOT_MAGIC = 'ANDROID!'
BOOT_MAGIC_SIZE = 8
BOOT_NAME_SIZE = 16
BOOT_ARGS_SIZE = 512
BOOT_EXTRA_ARGS_SIZE = 1024
BOOT_IMAGE_HEADER_V1_SIZE = 1648
BOOT_IMAGE_HEADER_V2_SIZE = 1660
BOOT_IMAGE_HEADER_V3_SIZE = 1580
BOOT_IMAGE_HEADER_V3_PAGESIZE = 4096
BOOT_IMAGE_HEADER_V4_SIZE = 1584
BOOT_IMAGE_V4_SIGNATURE_SIZE = 4096
VENDOR_BOOT_MAGIC = 'VNDRBOOT'
VENDOR_BOOT_MAGIC_SIZE = 8
VENDOR_BOOT_NAME_SIZE = BOOT_NAME_SIZE
VENDOR_BOOT_ARGS_SIZE = 2048
VENDOR_BOOT_IMAGE_HEADER_V3_SIZE = 2112
VENDOR_BOOT_IMAGE_HEADER_V4_SIZE = 2128
VENDOR_RAMDISK_TYPE_NONE = 0
VENDOR_RAMDISK_TYPE_PLATFORM = 1
VENDOR_RAMDISK_TYPE_RECOVERY = 2
VENDOR_RAMDISK_TYPE_DLKM = 3
VENDOR_RAMDISK_NAME_SIZE = 32
VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE = 16
VENDOR_RAMDISK_TABLE_ENTRY_V4_SIZE = 108
# Names with special meaning, mustn't be specified in --ramdisk_name.
VENDOR_RAMDISK_NAME_BLOCKLIST = {b'default'}
PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT = '--vendor_ramdisk_fragment'
def filesize(f):
if f is None:
return 0
try:
return fstat(f.fileno()).st_size
except OSError:
return 0
def update_sha(sha, f):
if f:
sha.update(f.read())
f.seek(0)
sha.update(pack('I', filesize(f)))
else:
sha.update(pack('I', 0))
def pad_file(f, padding):
pad = (padding - (f.tell() & (padding - 1))) & (padding - 1)
f.write(pack(str(pad) + 'x'))
def get_number_of_pages(image_size, page_size):
"""calculates the number of pages required for the image"""
return (image_size + page_size - 1) // page_size
def get_recovery_dtbo_offset(args):
"""calculates the offset of recovery_dtbo image in the boot image"""
num_header_pages = 1 # header occupies a page
num_kernel_pages = get_number_of_pages(filesize(args.kernel), args.pagesize)
num_ramdisk_pages = get_number_of_pages(filesize(args.ramdisk),
args.pagesize)
num_second_pages = get_number_of_pages(filesize(args.second), args.pagesize)
dtbo_offset = args.pagesize * (num_header_pages + num_kernel_pages +
num_ramdisk_pages + num_second_pages)
return dtbo_offset
def write_header_v3_and_above(args):
if args.header_version > 3:
boot_header_size = BOOT_IMAGE_HEADER_V4_SIZE
else:
boot_header_size = BOOT_IMAGE_HEADER_V3_SIZE
args.output.write(pack(f'{BOOT_MAGIC_SIZE}s', BOOT_MAGIC.encode()))
# kernel size in bytes
args.output.write(pack('I', filesize(args.kernel)))
# ramdisk size in bytes
args.output.write(pack('I', filesize(args.ramdisk)))
# os version and patch level
args.output.write(pack('I', (args.os_version << 11) | args.os_patch_level))
args.output.write(pack('I', boot_header_size))
# reserved
args.output.write(pack('4I', 0, 0, 0, 0))
# version of boot image header
args.output.write(pack('I', args.header_version))
args.output.write(pack(f'{BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE}s',
args.cmdline))
if args.header_version >= 4:
# The signature used to verify boot image v4.
args.output.write(pack('I', BOOT_IMAGE_V4_SIGNATURE_SIZE))
pad_file(args.output, BOOT_IMAGE_HEADER_V3_PAGESIZE)
def write_vendor_boot_header(args):
if filesize(args.dtb) == 0:
raise ValueError('DTB image must not be empty.')
if args.header_version > 3:
vendor_ramdisk_size = args.vendor_ramdisk_total_size
vendor_boot_header_size = VENDOR_BOOT_IMAGE_HEADER_V4_SIZE
else:
vendor_ramdisk_size = filesize(args.vendor_ramdisk)
vendor_boot_header_size = VENDOR_BOOT_IMAGE_HEADER_V3_SIZE
args.vendor_boot.write(pack(f'{VENDOR_BOOT_MAGIC_SIZE}s',
VENDOR_BOOT_MAGIC.encode()))
# version of boot image header
args.vendor_boot.write(pack('I', args.header_version))
# flash page size
args.vendor_boot.write(pack('I', args.pagesize))
# kernel physical load address
args.vendor_boot.write(pack('I', args.base + args.kernel_offset))
# ramdisk physical load address
args.vendor_boot.write(pack('I', args.base + args.ramdisk_offset))
# ramdisk size in bytes
args.vendor_boot.write(pack('I', vendor_ramdisk_size))
args.vendor_boot.write(pack(f'{VENDOR_BOOT_ARGS_SIZE}s',
args.vendor_cmdline))
# kernel tags physical load address
args.vendor_boot.write(pack('I', args.base + args.tags_offset))
# asciiz product name
args.vendor_boot.write(pack(f'{VENDOR_BOOT_NAME_SIZE}s', args.board))
# header size in bytes
args.vendor_boot.write(pack('I', vendor_boot_header_size))
# dtb size in bytes
args.vendor_boot.write(pack('I', filesize(args.dtb)))
# dtb physical load address
args.vendor_boot.write(pack('Q', args.base + args.dtb_offset))
if args.header_version > 3:
vendor_ramdisk_table_size = (args.vendor_ramdisk_table_entry_num *
VENDOR_RAMDISK_TABLE_ENTRY_V4_SIZE)
# vendor ramdisk table size in bytes
args.vendor_boot.write(pack('I', vendor_ramdisk_table_size))
# number of vendor ramdisk table entries
args.vendor_boot.write(pack('I', args.vendor_ramdisk_table_entry_num))
# vendor ramdisk table entry size in bytes
args.vendor_boot.write(pack('I', VENDOR_RAMDISK_TABLE_ENTRY_V4_SIZE))
# bootconfig section size in bytes
args.vendor_boot.write(pack('I', filesize(args.vendor_bootconfig)))
pad_file(args.vendor_boot, args.pagesize)
def write_header(args):
if args.header_version > 4:
raise ValueError(
f'Boot header version {args.header_version} not supported')
if args.header_version in {3, 4}:
return write_header_v3_and_above(args)
ramdisk_load_address = ((args.base + args.ramdisk_offset)
if filesize(args.ramdisk) > 0 else 0)
second_load_address = ((args.base + args.second_offset)
if filesize(args.second) > 0 else 0)
args.output.write(pack(f'{BOOT_MAGIC_SIZE}s', BOOT_MAGIC.encode()))
# kernel size in bytes
args.output.write(pack('I', filesize(args.kernel)))
# kernel physical load address
args.output.write(pack('I', args.base + args.kernel_offset))
# ramdisk size in bytes
args.output.write(pack('I', filesize(args.ramdisk)))
# ramdisk physical load address
args.output.write(pack('I', ramdisk_load_address))
# second bootloader size in bytes
args.output.write(pack('I', filesize(args.second)))
# second bootloader physical load address
args.output.write(pack('I', second_load_address))
# kernel tags physical load address
args.output.write(pack('I', args.base + args.tags_offset))
# flash page size
args.output.write(pack('I', args.pagesize))
# version of boot image header
args.output.write(pack('I', args.header_version))
# os version and patch level
args.output.write(pack('I', (args.os_version << 11) | args.os_patch_level))
# asciiz product name
args.output.write(pack(f'{BOOT_NAME_SIZE}s', args.board))
args.output.write(pack(f'{BOOT_ARGS_SIZE}s', args.cmdline))
sha = sha1()
update_sha(sha, args.kernel)
update_sha(sha, args.ramdisk)
update_sha(sha, args.second)
if args.header_version > 0:
update_sha(sha, args.recovery_dtbo)
if args.header_version > 1:
update_sha(sha, args.dtb)
img_id = pack('32s', sha.digest())
args.output.write(img_id)
args.output.write(pack(f'{BOOT_EXTRA_ARGS_SIZE}s', args.extra_cmdline))
if args.header_version > 0:
if args.recovery_dtbo:
# recovery dtbo size in bytes
args.output.write(pack('I', filesize(args.recovery_dtbo)))
# recovert dtbo offset in the boot image
args.output.write(pack('Q', get_recovery_dtbo_offset(args)))
else:
# Set to zero if no recovery dtbo
args.output.write(pack('I', 0))
args.output.write(pack('Q', 0))
# Populate boot image header size for header versions 1 and 2.
if args.header_version == 1:
args.output.write(pack('I', BOOT_IMAGE_HEADER_V1_SIZE))
elif args.header_version == 2:
args.output.write(pack('I', BOOT_IMAGE_HEADER_V2_SIZE))
if args.header_version > 1:
if filesize(args.dtb) == 0:
raise ValueError('DTB image must not be empty.')
# dtb size in bytes
args.output.write(pack('I', filesize(args.dtb)))
# dtb physical load address
args.output.write(pack('Q', args.base + args.dtb_offset))
pad_file(args.output, args.pagesize)
return img_id
class AsciizBytes:
"""Parses a string and encodes it as an asciiz bytes object.
>>> AsciizBytes(bufsize=4)('foo')
b'foo\\x00'
>>> AsciizBytes(bufsize=4)('foob')
Traceback (most recent call last):
...
argparse.ArgumentTypeError: Encoded asciiz length exceeded: max 4, got 5
"""
def __init__(self, bufsize):
self.bufsize = bufsize
def __call__(self, arg):
arg_bytes = arg.encode() + b'\x00'
if len(arg_bytes) > self.bufsize:
raise ArgumentTypeError(
'Encoded asciiz length exceeded: '
f'max {self.bufsize}, got {len(arg_bytes)}')
return arg_bytes
class VendorRamdiskTableBuilder:
"""Vendor ramdisk table builder.
Attributes:
entries: A list of VendorRamdiskTableEntry namedtuple.
ramdisk_total_size: Total size in bytes of all ramdisks in the table.
"""
VendorRamdiskTableEntry = collections.namedtuple( # pylint: disable=invalid-name
'VendorRamdiskTableEntry',
['ramdisk_path', 'ramdisk_size', 'ramdisk_offset', 'ramdisk_type',
'ramdisk_name', 'board_id'])
def __init__(self):
self.entries = []
self.ramdisk_total_size = 0
self.ramdisk_names = set()
def add_entry(self, ramdisk_path, ramdisk_type, ramdisk_name, board_id):
# Strip any trailing null for simple comparison.
stripped_ramdisk_name = ramdisk_name.rstrip(b'\x00')
if stripped_ramdisk_name in VENDOR_RAMDISK_NAME_BLOCKLIST:
raise ValueError(
f'Banned vendor ramdisk name: {stripped_ramdisk_name}')
if stripped_ramdisk_name in self.ramdisk_names:
raise ValueError(
f'Duplicated vendor ramdisk name: {stripped_ramdisk_name}')
self.ramdisk_names.add(stripped_ramdisk_name)
if board_id is None:
board_id = array.array(
'I', [0] * VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE)
else:
board_id = array.array('I', board_id)
if len(board_id) != VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE:
raise ValueError('board_id size must be '
f'{VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE}')
with open(ramdisk_path, 'rb') as f:
ramdisk_size = filesize(f)
self.entries.append(self.VendorRamdiskTableEntry(
ramdisk_path, ramdisk_size, self.ramdisk_total_size, ramdisk_type,
ramdisk_name, board_id))
self.ramdisk_total_size += ramdisk_size
def write_ramdisks_padded(self, fout, alignment):
for entry in self.entries:
with open(entry.ramdisk_path, 'rb') as f:
fout.write(f.read())
pad_file(fout, alignment)
def write_entries_padded(self, fout, alignment):
for entry in self.entries:
fout.write(pack('I', entry.ramdisk_size))
fout.write(pack('I', entry.ramdisk_offset))
fout.write(pack('I', entry.ramdisk_type))
fout.write(pack(f'{VENDOR_RAMDISK_NAME_SIZE}s',
entry.ramdisk_name))
fout.write(entry.board_id)
pad_file(fout, alignment)
def write_padded_file(f_out, f_in, padding):
if f_in is None:
return
f_out.write(f_in.read())
pad_file(f_out, padding)
def parse_int(x):
return int(x, 0)
def parse_os_version(x):
match = re.search(r'^(\d{1,3})(?:\.(\d{1,3})(?:\.(\d{1,3}))?)?', x)
if match:
a = int(match.group(1))
b = c = 0
if match.lastindex >= 2:
b = int(match.group(2))
if match.lastindex == 3:
c = int(match.group(3))
# 7 bits allocated for each field
assert a < 128
assert b < 128
assert c < 128
return (a << 14) | (b << 7) | c
return 0
def parse_os_patch_level(x):
match = re.search(r'^(\d{4})-(\d{2})(?:-(\d{2}))?', x)
if match:
y = int(match.group(1)) - 2000
m = int(match.group(2))
# 7 bits allocated for the year, 4 bits for the month
assert 0 <= y < 128
assert 0 < m <= 12
return (y << 4) | m
return 0
def parse_vendor_ramdisk_type(x):
type_dict = {
'none': VENDOR_RAMDISK_TYPE_NONE,
'platform': VENDOR_RAMDISK_TYPE_PLATFORM,
'recovery': VENDOR_RAMDISK_TYPE_RECOVERY,
'dlkm': VENDOR_RAMDISK_TYPE_DLKM,
}
if x.lower() in type_dict:
return type_dict[x.lower()]
return parse_int(x)
def get_vendor_boot_v4_usage():
return """vendor boot version 4 arguments:
--ramdisk_type {none,platform,recovery,dlkm}
specify the type of the ramdisk
--ramdisk_name NAME
specify the name of the ramdisk
--board_id{0..15} NUMBER
specify the value of the board_id vector, defaults to 0
--vendor_ramdisk_fragment VENDOR_RAMDISK_FILE
path to the vendor ramdisk file
These options can be specified multiple times, where each vendor ramdisk
option group ends with a --vendor_ramdisk_fragment option.
Each option group appends an additional ramdisk to the vendor boot image.
"""
def parse_vendor_ramdisk_args(args, args_list):
"""Parses vendor ramdisk specific arguments.
Args:
args: An argparse.Namespace object. Parsed results are stored into this
object.
args_list: A list of argument strings to be parsed.
Returns:
A list argument strings that are not parsed by this method.
"""
parser = ArgumentParser(add_help=False)
parser.add_argument('--ramdisk_type', type=parse_vendor_ramdisk_type,
default=VENDOR_RAMDISK_TYPE_NONE)
parser.add_argument('--ramdisk_name',
type=AsciizBytes(bufsize=VENDOR_RAMDISK_NAME_SIZE),
required=True)
for i in range(VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE):
parser.add_argument(f'--board_id{i}', type=parse_int, default=0)
parser.add_argument(PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT, required=True)
unknown_args = []
vendor_ramdisk_table_builder = VendorRamdiskTableBuilder()
if args.vendor_ramdisk is not None:
vendor_ramdisk_table_builder.add_entry(
args.vendor_ramdisk.name, VENDOR_RAMDISK_TYPE_PLATFORM, b'', None)
while PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT in args_list:
idx = args_list.index(PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT) + 2
vendor_ramdisk_args = args_list[:idx]
args_list = args_list[idx:]
ramdisk_args, extra_args = parser.parse_known_args(vendor_ramdisk_args)
ramdisk_args_dict = vars(ramdisk_args)
unknown_args.extend(extra_args)
ramdisk_path = ramdisk_args.vendor_ramdisk_fragment
ramdisk_type = ramdisk_args.ramdisk_type
ramdisk_name = ramdisk_args.ramdisk_name
board_id = [ramdisk_args_dict[f'board_id{i}']
for i in range(VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE)]
vendor_ramdisk_table_builder.add_entry(ramdisk_path, ramdisk_type,
ramdisk_name, board_id)
if len(args_list) > 0:
unknown_args.extend(args_list)
args.vendor_ramdisk_total_size = (vendor_ramdisk_table_builder
.ramdisk_total_size)
args.vendor_ramdisk_table_entry_num = len(vendor_ramdisk_table_builder
.entries)
args.vendor_ramdisk_table_builder = vendor_ramdisk_table_builder
return unknown_args
def parse_cmdline():
version_parser = ArgumentParser(add_help=False)
version_parser.add_argument('--header_version', type=parse_int, default=0)
if version_parser.parse_known_args()[0].header_version < 3:
# For boot header v0 to v2, the kernel commandline field is split into
# two fields, cmdline and extra_cmdline. Both fields are asciiz strings,
# so we minus one here to ensure the encoded string plus the
# null-terminator can fit in the buffer size.
cmdline_size = BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE - 1
else:
cmdline_size = BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter,
epilog=get_vendor_boot_v4_usage())
parser.add_argument('--kernel', type=FileType('rb'),
help='path to the kernel')
parser.add_argument('--ramdisk', type=FileType('rb'),
help='path to the ramdisk')
parser.add_argument('--second', type=FileType('rb'),
help='path to the second bootloader')
parser.add_argument('--dtb', type=FileType('rb'), help='path to the dtb')
dtbo_group = parser.add_mutually_exclusive_group()
dtbo_group.add_argument('--recovery_dtbo', type=FileType('rb'),
help='path to the recovery DTBO')
dtbo_group.add_argument('--recovery_acpio', type=FileType('rb'),
metavar='RECOVERY_ACPIO', dest='recovery_dtbo',
help='path to the recovery ACPIO')
parser.add_argument('--cmdline', type=AsciizBytes(bufsize=cmdline_size),
default='', help='kernel command line arguments')
parser.add_argument('--vendor_cmdline',
type=AsciizBytes(bufsize=VENDOR_BOOT_ARGS_SIZE),
default='',
help='vendor boot kernel command line arguments')
parser.add_argument('--base', type=parse_int, default=0x10000000,
help='base address')
parser.add_argument('--kernel_offset', type=parse_int, default=0x00008000,
help='kernel offset')
parser.add_argument('--ramdisk_offset', type=parse_int, default=0x01000000,
help='ramdisk offset')
parser.add_argument('--second_offset', type=parse_int, default=0x00f00000,
help='second bootloader offset')
parser.add_argument('--dtb_offset', type=parse_int, default=0x01f00000,
help='dtb offset')
parser.add_argument('--os_version', type=parse_os_version, default=0,
help='operating system version')
parser.add_argument('--os_patch_level', type=parse_os_patch_level,
default=0, help='operating system patch level')
parser.add_argument('--tags_offset', type=parse_int, default=0x00000100,
help='tags offset')
parser.add_argument('--board', type=AsciizBytes(bufsize=BOOT_NAME_SIZE),
default='', help='board name')
parser.add_argument('--pagesize', type=parse_int,
choices=[2**i for i in range(11, 15)], default=2048,
help='page size')
parser.add_argument('--id', action='store_true',
help='print the image ID on standard output')
parser.add_argument('--header_version', type=parse_int, default=0,
help='boot image header version')
parser.add_argument('-o', '--output', type=FileType('wb'),
help='output file name')
parser.add_argument('--gki_signing_algorithm',
help='GKI signing algorithm to use')
parser.add_argument('--gki_signing_key',
help='path to RSA private key file')
parser.add_argument('--gki_signing_signature_args',
help='other hash arguments passed to avbtool')
parser.add_argument('--gki_signing_avbtool_path',
help='path to avbtool for boot signature generation')
parser.add_argument('--vendor_boot', type=FileType('wb'),
help='vendor boot output file name')
parser.add_argument('--vendor_ramdisk', type=FileType('rb'),
help='path to the vendor ramdisk')
parser.add_argument('--vendor_bootconfig', type=FileType('rb'),
help='path to the vendor bootconfig file')
args, extra_args = parser.parse_known_args()
if args.vendor_boot is not None and args.header_version > 3:
extra_args = parse_vendor_ramdisk_args(args, extra_args)
if len(extra_args) > 0:
raise ValueError(f'Unrecognized arguments: {extra_args}')
if args.header_version < 3:
args.extra_cmdline = args.cmdline[BOOT_ARGS_SIZE-1:]
args.cmdline = args.cmdline[:BOOT_ARGS_SIZE-1] + b'\x00'
assert len(args.cmdline) <= BOOT_ARGS_SIZE
assert len(args.extra_cmdline) <= BOOT_EXTRA_ARGS_SIZE
return args
def add_boot_image_signature(args, pagesize):
"""Adds the boot image signature.
Note that the signature will only be verified in VTS to ensure a
generic boot.img is used. It will not be used by the device
bootloader at boot time. The bootloader should only verify
the boot vbmeta at the end of the boot partition (or in the top-level
vbmeta partition) via the Android Verified Boot process, when the
device boots.
"""
args.output.flush() # Flush the buffer for signature calculation.
# Appends zeros if the signing key is not specified.
if not args.gki_signing_key or not args.gki_signing_algorithm:
zeros = b'\x00' * BOOT_IMAGE_V4_SIGNATURE_SIZE
args.output.write(zeros)
pad_file(args.output, pagesize)
return
avbtool = 'avbtool' # Used from otatools.zip or Android build env.
# We need to specify the path of avbtool in build/core/Makefile.
# Because avbtool is not guaranteed to be in $PATH there.
if args.gki_signing_avbtool_path:
avbtool = args.gki_signing_avbtool_path
# Need to specify a value of --partition_size for avbtool to work.
# We use 64 MB below, but avbtool will not resize the boot image to
# this size because --do_not_append_vbmeta_image is also specified.
avbtool_cmd = [
avbtool, 'add_hash_footer',
'--partition_name', 'boot',
'--partition_size', str(64 * 1024 * 1024),
'--image', args.output.name,
'--algorithm', args.gki_signing_algorithm,
'--key', args.gki_signing_key,
'--salt', 'd00df00d'] # TODO: use a hash of kernel/ramdisk as the salt.
# Additional arguments passed to avbtool.
if args.gki_signing_signature_args:
avbtool_cmd += args.gki_signing_signature_args.split()
# Outputs the signed vbmeta to a separate file, then append to boot.img
# as the boot signature.
with tempfile.TemporaryDirectory() as temp_out_dir:
boot_signature_output = os.path.join(temp_out_dir, 'boot_signature')
avbtool_cmd += ['--do_not_append_vbmeta_image',
'--output_vbmeta_image', boot_signature_output]
subprocess.check_call(avbtool_cmd)
with open(boot_signature_output, 'rb') as boot_signature:
if filesize(boot_signature) > BOOT_IMAGE_V4_SIGNATURE_SIZE:
raise ValueError(
f'boot sigature size is > {BOOT_IMAGE_V4_SIGNATURE_SIZE}')
write_padded_file(args.output, boot_signature, pagesize)
def write_data(args, pagesize):
write_padded_file(args.output, args.kernel, pagesize)
write_padded_file(args.output, args.ramdisk, pagesize)
write_padded_file(args.output, args.second, pagesize)
if args.header_version > 0 and args.header_version < 3:
write_padded_file(args.output, args.recovery_dtbo, pagesize)
if args.header_version == 2:
write_padded_file(args.output, args.dtb, pagesize)
if args.header_version >= 4:
add_boot_image_signature(args, pagesize)
def write_vendor_boot_data(args):
if args.header_version > 3:
builder = args.vendor_ramdisk_table_builder
builder.write_ramdisks_padded(args.vendor_boot, args.pagesize)
write_padded_file(args.vendor_boot, args.dtb, args.pagesize)
builder.write_entries_padded(args.vendor_boot, args.pagesize)
write_padded_file(args.vendor_boot, args.vendor_bootconfig,
args.pagesize)
else:
write_padded_file(args.vendor_boot, args.vendor_ramdisk, args.pagesize)
write_padded_file(args.vendor_boot, args.dtb, args.pagesize)
def main():
args = parse_cmdline()
if args.vendor_boot is not None:
if args.header_version not in {3, 4}:
raise ValueError(
'--vendor_boot not compatible with given header version')
if args.header_version == 3 and args.vendor_ramdisk is None:
raise ValueError('--vendor_ramdisk missing or invalid')
write_vendor_boot_header(args)
write_vendor_boot_data(args)
if args.output is not None:
if args.second is not None and args.header_version > 2:
raise ValueError(
'--second not compatible with given header version')
img_id = write_header(args)
if args.header_version > 2:
write_data(args, BOOT_IMAGE_HEADER_V3_PAGESIZE)
else:
write_data(args, args.pagesize)
if args.id and img_id is not None:
print('0x' + ''.join(f'{octet:02x}' for octet in img_id))
if __name__ == '__main__':
main()

View File

@@ -115,19 +115,6 @@ LABEL primary
APPEND \${cbootargs} $BM_CMDLINE
EOF
# Set up the pxelinux config for Jetson TK1
cat <<EOF >/tftp/pxelinux.cfg/default-arm-tegra124-jetson-tk1
PROMPT 0
TIMEOUT 30
DEFAULT primary
MENU TITLE jetson TK1 boot options
LABEL primary
MENU LABEL CI kernel on TFTP
LINUX zImage
FDT tegra124-jetson-tk1.dtb
APPEND \${cbootargs} $BM_CMDLINE
EOF
# Create the rootfs in the NFS directory
mkdir -p /nfs/results
. $BM/rootfs-setup.sh /nfs

View File

@@ -74,11 +74,6 @@ class PoERun:
self.print_error("nouveau jetson boot bug, retrying.")
return 2
# network fail on tk1
if re.search("NETDEV WATCHDOG:.* transmit queue 0 timed out", line):
self.print_error("nouveau jetson tk1 network fail, retrying.")
return 2
result = re.search("hwci: mesa: (\S*)", line)
if result:
if result.group(1) == "pass":

View File

@@ -1,2 +0,0 @@
schema.graphql
gitlab_gql.py.cache.db

View File

@@ -4,6 +4,7 @@
# Tomeu Vizoso <tomeu.vizoso@collabora.com>
# David Heidelberg <david.heidelberg@collabora.com>
#
# TODO GraphQL for dependencies
# SPDX-License-Identifier: MIT
"""
@@ -11,19 +12,18 @@ Helper script to restrict running only required CI jobs
and show the job(s) logs.
"""
import argparse
import re
import sys
import time
from concurrent.futures import ThreadPoolExecutor
from functools import partial
from itertools import chain
from typing import Optional
from functools import partial
from concurrent.futures import ThreadPoolExecutor
import os
import re
import time
import argparse
import sys
import gitlab
from colorama import Fore, Style
from gitlab_common import get_gitlab_project, read_token, wait_for_pipeline
from gitlab_gql import GitlabGQL, create_job_needs_dag, filter_dag, print_dag
REFRESH_WAIT_LOG = 10
REFRESH_WAIT_JOBS = 6
@@ -42,9 +42,44 @@ STATUS_COLORS = {
"skipped": "",
}
# TODO: This hardcoded list should be replaced by querying the pipeline's
# dependency graph to see which jobs the target jobs need
DEPENDENCIES = [
"debian/x86_build-base",
"debian/x86_build",
"debian/x86_test-base",
"debian/x86_test-gl",
"debian/arm_build",
"debian/arm_test",
"kernel+rootfs_amd64",
"kernel+rootfs_arm64",
"kernel+rootfs_armhf",
"debian-testing",
"debian-arm64",
]
COMPLETED_STATUSES = ["success", "failed"]
def get_gitlab_project(glab, name: str):
"""Finds a specified gitlab project for given user"""
glab.auth()
username = glab.user.username
return glab.projects.get(f"{username}/mesa")
def wait_for_pipeline(project, sha: str):
"""await until pipeline appears in Gitlab"""
print("⏲ for the pipeline to appear..", end="")
while True:
pipelines = project.pipelines.list(sha=sha)
if pipelines:
print("", flush=True)
return pipelines[0]
print("", end=".", flush=True)
time.sleep(1)
def print_job_status(job) -> None:
"""It prints a nice, colored job status with a link to the job."""
if job.status == "canceled":
@@ -85,18 +120,15 @@ def pretty_wait(sec: int) -> None:
def monitor_pipeline(
project,
pipeline,
target_job: Optional[str],
dependencies,
force_manual: bool,
stress: bool,
project, pipeline, target_job: Optional[str], dependencies, force_manual: bool
) -> tuple[Optional[int], Optional[int]]:
"""Monitors pipeline and delegate canceling jobs"""
statuses = {}
target_statuses = {}
stress_succ = 0
stress_fail = 0
if not dependencies:
dependencies = []
dependencies.extend(DEPENDENCIES)
if target_job:
target_jobs_regex = re.compile(target_job.strip())
@@ -109,13 +141,6 @@ def monitor_pipeline(
if force_manual and job.status == "manual":
enable_job(project, job, True)
if stress and job.status in ["success", "failed"]:
if job.status == "success":
stress_succ += 1
if job.status == "failed":
stress_fail += 1
retry_job(project, job)
if (job.id not in target_statuses) or (
job.status not in target_statuses[job.id]
):
@@ -147,14 +172,6 @@ def monitor_pipeline(
if target_job:
cancel_jobs(project, to_cancel)
if stress:
print(
"∑ succ: " + str(stress_succ) + "; fail: " + str(stress_fail),
flush=False,
)
pretty_wait(REFRESH_WAIT_JOBS)
continue
print("---------------------------------", flush=False)
if len(target_statuses) == 1 and {"running"}.intersection(
@@ -182,14 +199,6 @@ def enable_job(project, job, target: bool) -> None:
print(Fore.MAGENTA + f"{jtype} job {job.name} manually enabled" + Style.RESET_ALL)
def retry_job(project, job) -> None:
"""retry job"""
pjob = project.jobs.get(job.id, lazy=True)
pjob.retry()
jtype = ""
print(Fore.MAGENTA + f"{jtype} job {job.name} manually enabled" + Style.RESET_ALL)
def cancel_job(project, job) -> None:
"""Cancel GitLab job"""
pjob = project.jobs.get(job.id, lazy=True)
@@ -234,6 +243,7 @@ def parse_args() -> None:
+ '--target ".*traces" ',
)
parser.add_argument("--target", metavar="target-job", help="Target job")
parser.add_argument("--deps", nargs="+", help="Job dependencies")
parser.add_argument(
"--rev", metavar="revision", help="repository git revision", required=True
)
@@ -245,24 +255,19 @@ def parse_args() -> None:
parser.add_argument(
"--force-manual", action="store_true", help="Force jobs marked as manual"
)
parser.add_argument("--stress", action="store_true", help="Stresstest job(s)")
return parser.parse_args()
def find_dependencies(target_job: str, project_path: str, sha: str) -> set[str]:
gql_instance = GitlabGQL()
dag, _ = create_job_needs_dag(
gql_instance, {"projectPath": project_path.path_with_namespace, "sha": sha}
def read_token(token_arg: Optional[str]) -> str:
"""pick token from args or file"""
if token_arg:
return token_arg
return (
open(os.path.expanduser("~/.config/gitlab-token"), encoding="utf-8")
.readline()
.rstrip()
)
target_dep_dag = filter_dag(dag, target_job)
print(Fore.YELLOW)
print("Detected job dependencies:")
print()
print_dag(target_dep_dag)
print(Fore.RESET)
return set(chain.from_iterable(target_dep_dag.values()))
if __name__ == "__main__":
try:
@@ -279,14 +284,11 @@ if __name__ == "__main__":
print(f"Revision: {args.rev}")
pipe = wait_for_pipeline(cur_project, args.rev)
print(f"Pipeline: {pipe.web_url}")
deps = set()
if args.target:
print("🞋 job: " + Fore.BLUE + args.target + Style.RESET_ALL)
deps = find_dependencies(
target_job=args.target, sha=args.rev, project_path=cur_project
)
print(f"Extra dependencies: {args.deps}")
target_job_id, ret = monitor_pipeline(
cur_project, pipe, args.target, deps, args.force_manual, args.stress
cur_project, pipe, args.target, args.deps, args.force_manual
)
if target_job_id:

View File

@@ -1,11 +0,0 @@
#!/bin/sh
# Helper script to download the schema GraphQL from Gitlab to enable IDEs to
# assist the developer to edit gql files
SOURCE_DIR=$(dirname "$(realpath "$0")")
(
cd $SOURCE_DIR || exit 1
gql-cli https://gitlab.freedesktop.org/api/graphql --print-schema > schema.graphql
)

View File

@@ -1,42 +0,0 @@
#!/usr/bin/env python3
# Copyright © 2020 - 2022 Collabora Ltd.
# Authors:
# Tomeu Vizoso <tomeu.vizoso@collabora.com>
# David Heidelberg <david.heidelberg@collabora.com>
#
# SPDX-License-Identifier: MIT
'''Shared functions between the scripts.'''
import os
import time
from typing import Optional
def get_gitlab_project(glab, name: str):
"""Finds a specified gitlab project for given user"""
glab.auth()
username = glab.user.username
return glab.projects.get(f"{username}/mesa")
def read_token(token_arg: Optional[str]) -> str:
"""pick token from args or file"""
if token_arg:
return token_arg
return (
open(os.path.expanduser("~/.config/gitlab-token"), encoding="utf-8")
.readline()
.rstrip()
)
def wait_for_pipeline(project, sha: str):
"""await until pipeline appears in Gitlab"""
print("⏲ for the pipeline to appear..", end="")
while True:
pipelines = project.pipelines.list(sha=sha)
if pipelines:
print("", flush=True)
return pipelines[0]
print("", end=".", flush=True)
time.sleep(1)

View File

@@ -1,303 +0,0 @@
#!/usr/bin/env python3
import re
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace
from dataclasses import dataclass, field
from os import getenv
from pathlib import Path
from typing import Any, Iterable, Optional, Pattern, Union
import yaml
from filecache import DAY, filecache
from gql import Client, gql
from gql.transport.aiohttp import AIOHTTPTransport
from graphql import DocumentNode
Dag = dict[str, list[str]]
TOKEN_DIR = Path(getenv("XDG_CONFIG_HOME") or Path.home() / ".config")
def get_token_from_default_dir() -> str:
try:
token_file = TOKEN_DIR / "gitlab-token"
return token_file.resolve()
except FileNotFoundError as ex:
print(
f"Could not find {token_file}, please provide a token file as an argument"
)
raise ex
def get_project_root_dir():
root_path = Path(__file__).parent.parent.parent.resolve()
gitlab_file = root_path / ".gitlab-ci.yml"
assert gitlab_file.exists()
return root_path
@dataclass
class GitlabGQL:
_transport: Any = field(init=False)
client: Client = field(init=False)
url: str = "https://gitlab.freedesktop.org/api/graphql"
token: Optional[str] = None
def __post_init__(self):
self._setup_gitlab_gql_client()
def _setup_gitlab_gql_client(self) -> Client:
# Select your transport with a defined url endpoint
headers = {}
if self.token:
headers["Authorization"] = f"Bearer {self.token}"
self._transport = AIOHTTPTransport(url=self.url, headers=headers)
# Create a GraphQL client using the defined transport
self.client = Client(
transport=self._transport, fetch_schema_from_transport=True
)
@filecache(DAY)
def query(
self, gql_file: Union[Path, str], params: dict[str, Any]
) -> dict[str, Any]:
# Provide a GraphQL query
source_path = Path(__file__).parent
pipeline_query_file = source_path / gql_file
query: DocumentNode
with open(pipeline_query_file, "r") as f:
pipeline_query = f.read()
query = gql(pipeline_query)
# Execute the query on the transport
return self.client.execute(query, variable_values=params)
def invalidate_query_cache(self):
self.query._db.clear()
def create_job_needs_dag(
gl_gql: GitlabGQL, params
) -> tuple[Dag, dict[str, dict[str, Any]]]:
result = gl_gql.query("pipeline_details.gql", params)
dag = {}
jobs = {}
pipeline = result["project"]["pipeline"]
if not pipeline:
raise RuntimeError(f"Could not find any pipelines for {params}")
for stage in pipeline["stages"]["nodes"]:
for stage_job in stage["groups"]["nodes"]:
for job in stage_job["jobs"]["nodes"]:
needs = job.pop("needs")["nodes"]
jobs[job["name"]] = job
dag[job["name"]] = {node["name"] for node in needs}
for job, needs in dag.items():
needs: set
partial = True
while partial:
next_depth = {n for dn in needs for n in dag[dn]}
partial = not needs.issuperset(next_depth)
needs = needs.union(next_depth)
dag[job] = needs
return dag, jobs
def filter_dag(dag: Dag, regex: Pattern) -> Dag:
return {job: needs for job, needs in dag.items() if re.match(regex, job)}
def print_dag(dag: Dag) -> None:
for job, needs in dag.items():
print(f"{job}:")
print(f"\t{' '.join(needs)}")
print()
def fetch_merged_yaml(gl_gql: GitlabGQL, params) -> dict[Any]:
gitlab_yml_file = get_project_root_dir() / ".gitlab-ci.yml"
content = Path(gitlab_yml_file).read_text().strip()
params["content"] = content
raw_response = gl_gql.query("job_details.gql", params)
if merged_yaml := raw_response["ciConfig"]["mergedYaml"]:
return yaml.safe_load(merged_yaml)
gl_gql.invalidate_query_cache()
raise ValueError(
"""
Could not fetch any content for merged YAML,
please verify if the git SHA exists in remote.
Maybe you forgot to `git push`? """
)
def recursive_fill(job, relationship_field, target_data, acc_data: dict, merged_yaml):
if relatives := job.get(relationship_field):
if isinstance(relatives, str):
relatives = [relatives]
for relative in relatives:
parent_job = merged_yaml[relative]
acc_data = recursive_fill(parent_job, acc_data, merged_yaml)
acc_data |= job.get(target_data, {})
return acc_data
def get_variables(job, merged_yaml, project_path, sha) -> dict[str, str]:
p = get_project_root_dir() / ".gitlab-ci" / "image-tags.yml"
image_tags = yaml.safe_load(p.read_text())
variables = image_tags["variables"]
variables |= merged_yaml["variables"]
variables |= job["variables"]
variables["CI_PROJECT_PATH"] = project_path
variables["CI_PROJECT_NAME"] = project_path.split("/")[1]
variables["CI_REGISTRY_IMAGE"] = "registry.freedesktop.org/${CI_PROJECT_PATH}"
variables["CI_COMMIT_SHA"] = sha
while recurse_among_variables_space(variables):
pass
return variables
# Based on: https://stackoverflow.com/a/2158532/1079223
def flatten(xs):
for x in xs:
if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):
yield from flatten(x)
else:
yield x
def get_full_script(job) -> list[str]:
script = []
for script_part in ("before_script", "script", "after_script"):
script.append(f"# {script_part}")
lines = flatten(job.get(script_part, []))
script.extend(lines)
script.append("")
return script
def recurse_among_variables_space(var_graph) -> bool:
updated = False
for var, value in var_graph.items():
value = str(value)
dep_vars = []
if match := re.findall(r"(\$[{]?[\w\d_]*[}]?)", value):
all_dep_vars = [v.lstrip("${").rstrip("}") for v in match]
# print(value, match, all_dep_vars)
dep_vars = [v for v in all_dep_vars if v in var_graph]
for dep_var in dep_vars:
dep_value = str(var_graph[dep_var])
new_value = var_graph[var]
new_value = new_value.replace(f"${{{dep_var}}}", dep_value)
new_value = new_value.replace(f"${dep_var}", dep_value)
var_graph[var] = new_value
updated |= dep_value != new_value
return updated
def get_job_final_definiton(job_name, merged_yaml, project_path, sha):
job = merged_yaml[job_name]
variables = get_variables(job, merged_yaml, project_path, sha)
print("# --------- variables ---------------")
for var, value in sorted(variables.items()):
print(f"export {var}={value!r}")
# TODO: Recurse into needs to get full script
# TODO: maybe create a extra yaml file to avoid too much rework
script = get_full_script(job)
print()
print()
print("# --------- full script ---------------")
print("\n".join(script))
if image := variables.get("MESA_IMAGE"):
print()
print()
print("# --------- container image ---------------")
print(image)
def parse_args() -> Namespace:
parser = ArgumentParser(
formatter_class=ArgumentDefaultsHelpFormatter,
description="CLI and library with utility functions to debug jobs via Gitlab GraphQL",
epilog=f"""Example:
{Path(__file__).name} --rev $(git rev-parse HEAD) --print-job-dag""",
)
parser.add_argument("-pp", "--project-path", type=str, default="mesa/mesa")
parser.add_argument("--sha", "--rev", type=str, required=True)
parser.add_argument(
"--regex",
type=str,
required=False,
help="Regex pattern for the job name to be considered",
)
parser.add_argument("--print-dag", action="store_true", help="Print job needs DAG")
parser.add_argument(
"--print-merged-yaml",
action="store_true",
help="Print the resulting YAML for the specific SHA",
)
parser.add_argument(
"--print-job-manifest", type=str, help="Print the resulting job data"
)
parser.add_argument(
"--gitlab-token-file",
type=str,
default=get_token_from_default_dir(),
help="force GitLab token, otherwise it's read from $XDG_CONFIG_HOME/gitlab-token",
)
args = parser.parse_args()
args.gitlab_token = Path(args.gitlab_token_file).read_text()
return args
def main():
args = parse_args()
gl_gql = GitlabGQL(token=args.gitlab_token)
if args.print_dag:
dag, jobs = create_job_needs_dag(
gl_gql, {"projectPath": args.project_path, "sha": args.sha}
)
if args.regex:
dag = filter_dag(dag, re.compile(args.regex))
print_dag(dag)
if args.print_merged_yaml:
print(
fetch_merged_yaml(
gl_gql, {"projectPath": args.project_path, "sha": args.sha}
)
)
if args.print_job_manifest:
merged_yaml = fetch_merged_yaml(
gl_gql, {"projectPath": args.project_path, "sha": args.sha}
)
get_job_final_definiton(
args.print_job_manifest, merged_yaml, args.project_path, args.sha
)
if __name__ == "__main__":
main()

View File

@@ -1,7 +0,0 @@
query getCiConfigData($projectPath: ID!, $sha: String, $content: String!) {
ciConfig(projectPath: $projectPath, sha: $sha, content: $content) {
errors
mergedYaml
__typename
}
}

View File

@@ -1,86 +0,0 @@
fragment LinkedPipelineData on Pipeline {
id
iid
path
cancelable
retryable
userPermissions {
updatePipeline
}
status: detailedStatus {
id
group
label
icon
}
sourceJob {
id
name
}
project {
id
name
fullPath
}
}
query getPipelineDetails($projectPath: ID!, $sha: String!) {
project(fullPath: $projectPath) {
id
pipeline(sha: $sha) {
id
iid
complete
downstream {
nodes {
...LinkedPipelineData
}
}
upstream {
...LinkedPipelineData
}
stages {
nodes {
id
name
status: detailedStatus {
id
action {
id
icon
path
title
}
}
groups {
nodes {
id
status: detailedStatus {
id
label
group
icon
}
name
size
jobs {
nodes {
id
name
kind
scheduledAt
needs {
nodes {
id
name
}
}
}
}
}
}
}
}
}
}
}

View File

@@ -1,8 +1,2 @@
aiohttp==3.8.1
colorama==0.4.5
filecache==0.81
gql==3.4.0
python-gitlab==3.5.0
PyYAML==6.0
ruamel.yaml.clib==0.2.6
ruamel.yaml==0.17.21

View File

@@ -1,140 +0,0 @@
#!/usr/bin/env python3
# Copyright © 2022 Collabora Ltd.
# Authors:
# David Heidelberg <david.heidelberg@collabora.com>
#
# SPDX-License-Identifier: MIT
"""
Helper script to update traces checksums
"""
import argparse
import bz2
import glob
import re
import json
import sys
from ruamel.yaml import YAML
import gitlab
from gitlab_common import get_gitlab_project, read_token, wait_for_pipeline
DESCRIPTION_FILE = "export PIGLIT_REPLAY_DESCRIPTION_FILE='.*/install/(.*)'$"
DEVICE_NAME = "export PIGLIT_REPLAY_DEVICE_NAME='(.*)'$"
def gather_results(
project,
pipeline,
) -> None:
"""Gather results"""
target_jobs_regex = re.compile(".*-traces([:].*)?$")
for job in pipeline.jobs.list(all=True, sort="desc"):
if target_jobs_regex.match(job.name) and job.status == "failed":
cur_job = project.jobs.get(job.id)
# get variables
print(f"👁 Looking through logs for the device variable and traces.yml file in {job.name}...")
log = cur_job.trace().decode("unicode_escape").splitlines()
filename: str = ''
dev_name: str = ''
for logline in log:
desc_file = re.search(DESCRIPTION_FILE, logline)
device_name = re.search(DEVICE_NAME, logline)
if desc_file:
filename = desc_file.group(1)
if device_name:
dev_name = device_name.group(1)
if not filename or not dev_name:
print("! Couldn't find device name or YML file in the logs!")
return
print(f"👁 Found {dev_name} and file {filename}")
# find filename in Mesa source
traces_file = glob.glob('./**/' + filename, recursive=True)
# write into it
with open(traces_file[0], 'r', encoding='utf-8') as target_file:
yaml = YAML()
yaml.compact(seq_seq=False, seq_map=False)
yaml.version = 1,2
yaml.width = 2048 # do not break the text fields
yaml.default_flow_style = None
target = yaml.load(target_file)
# parse artifact
results_json_bz2 = cur_job.artifact(path="results/results.json.bz2", streamed=False)
results_json = bz2.decompress(results_json_bz2).decode("utf-8")
results = json.loads(results_json)
for _, value in results["tests"].items():
if (
not value['images'] or
not value['images'][0] or
"image_desc" not in value['images'][0]
):
continue
trace: str = value['images'][0]['image_desc']
checksum: str = value['images'][0]['checksum_render']
if not checksum:
print(f"Trace {trace} checksum is missing! Abort.")
continue
if checksum == "error":
print(f"Trace {trace} crashed")
continue
if target['traces'][trace][dev_name].get('checksum') == checksum:
continue
if "label" in target['traces'][trace][dev_name]:
print(f'{trace}: {dev_name}: has label: {target["traces"][trace][dev_name]["label"]}, is it still right?')
target['traces'][trace][dev_name]['checksum'] = checksum
with open(traces_file[0], 'w', encoding='utf-8') as target_file:
yaml.dump(target, target_file)
def parse_args() -> None:
"""Parse args"""
parser = argparse.ArgumentParser(
description="Tool to generate patch from checksums ",
epilog="Example: update_traces_checksum.py --rev $(git rev-parse HEAD) "
)
parser.add_argument(
"--rev", metavar="revision", help="repository git revision", required=True
)
parser.add_argument(
"--token",
metavar="token",
help="force GitLab token, otherwise it's read from ~/.config/gitlab-token",
)
return parser.parse_args()
if __name__ == "__main__":
try:
args = parse_args()
token = read_token(args.token)
gl = gitlab.Gitlab(url="https://gitlab.freedesktop.org", private_token=token)
cur_project = get_gitlab_project(gl, "mesa")
print(f"Revision: {args.rev}")
pipe = wait_for_pipeline(cur_project, args.rev)
print(f"Pipeline: {pipe.web_url}")
gather_results(cur_project, pipe)
sys.exit()
except KeyboardInterrupt:
sys.exit(1)

View File

@@ -78,7 +78,7 @@ debian-testing:
-D dri3=enabled
-D gallium-va=enabled
GALLIUM_DRIVERS: "swrast,virgl,radeonsi,zink,crocus,iris,i915"
VULKAN_DRIVERS: "swrast,amd,intel,virtio-experimental"
VULKAN_DRIVERS: "swrast,amd,intel"
BUILDTYPE: "debugoptimized"
EXTRA_OPTION: >
-D spirv-to-dxil=true
@@ -86,6 +86,7 @@ debian-testing:
MINIO_ARTIFACT_NAME: mesa-amd64
LLVM_VERSION: "13"
script:
- .gitlab-ci/lava/lava-pytest.sh
- .gitlab-ci/meson/build.sh
- .gitlab-ci/prepare-artifacts.sh
artifacts:
@@ -123,17 +124,19 @@ debian-testing-msan:
GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus"
VULKAN_DRIVERS: intel,amd,broadcom,virtio-experimental
.debian-cl-testing:
debian-clover-testing:
extends:
- .meson-build
- .ci-deqp-artifacts
variables:
LLVM_VERSION: "13"
UNWIND: "enabled"
DRI_LOADERS: >
-D glx=disabled
-D egl=disabled
-D gbm=disabled
GALLIUM_ST: >
-D gallium-opencl=icd
-D opencl-spirv=true
GALLIUM_DRIVERS: "swrast"
BUILDTYPE: "debugoptimized"
EXTRA_OPTION: >
@@ -142,23 +145,7 @@ debian-testing-msan:
- .gitlab-ci/meson/build.sh
- .gitlab-ci/prepare-artifacts.sh
debian-clover-testing:
extends:
- .debian-cl-testing
variables:
GALLIUM_ST: >
-D gallium-opencl=icd
-D opencl-spirv=true
debian-rusticl-testing:
extends:
- .debian-cl-testing
variables:
GALLIUM_ST: >
-D gallium-rusticl=true
-D opencl-spirv=true
debian-build-testing:
debian-gallium:
extends: .meson-build
variables:
UNWIND: "enabled"
@@ -171,22 +158,19 @@ debian-build-testing:
-D dri3=enabled
-D gallium-extra-hud=true
-D gallium-vdpau=enabled
-D gallium-xvmc=enabled
-D gallium-omx=bellagio
-D gallium-va=enabled
-D gallium-xa=enabled
-D gallium-nine=true
-D gallium-opencl=disabled
-D gallium-rusticl=false
GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,asahi,crocus"
VULKAN_DRIVERS: swrast
EXTRA_OPTION: >
-D spirv-to-dxil=true
-D osmesa=true
-D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi
-D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,xvmc,lima,panfrost,asahi
script:
- .gitlab-ci/lava/lava-pytest.sh
- .gitlab-ci/run-shellcheck.sh
- .gitlab-ci/run-yamllint.sh
- .gitlab-ci/meson/build.sh
- .gitlab-ci/run-shader-db.sh
@@ -194,7 +178,6 @@ debian-build-testing:
debian-release:
extends: .meson-build
variables:
LLVM_VERSION: "13"
UNWIND: "enabled"
DRI_LOADERS: >
-D glx=dri
@@ -205,12 +188,12 @@ debian-release:
-D dri3=enabled
-D gallium-extra-hud=true
-D gallium-vdpau=enabled
-D gallium-xvmc=disabled
-D gallium-omx=disabled
-D gallium-va=enabled
-D gallium-xa=enabled
-D gallium-nine=false
-D gallium-opencl=disabled
-D gallium-rusticl=false
-D llvm=enabled
GALLIUM_DRIVERS: "i915,iris,nouveau,kmsro,freedreno,r300,svga,swrast,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,crocus"
VULKAN_DRIVERS: "amd,imagination-experimental,microsoft-experimental"
@@ -242,30 +225,29 @@ fedora-release:
-D egl=enabled
-D glvnd=true
-D platforms=x11,wayland
# intel-clc disabled, we need llvm-spirv-translator 13.0+, Fedora 34 only packages 12.0.
EXTRA_OPTION: >
-D osmesa=true
-D selinux=true
-D tools=drm-shim,etnaviv,freedreno,glsl,intel,nir,nouveau,lima,panfrost,imagination
-D vulkan-layers=device-select,overlay
-D intel-clc=disabled
-D intel-clc=enabled
-D imagination-srv=true
GALLIUM_DRIVERS: "crocus,etnaviv,freedreno,iris,kmsro,lima,nouveau,panfrost,r300,r600,radeonsi,svga,swrast,tegra,v3d,vc4,virgl,zink"
GALLIUM_ST: >
-D dri3=enabled
-D gallium-extra-hud=true
-D gallium-vdpau=enabled
-D gallium-xvmc=disabled
-D gallium-omx=disabled
-D gallium-va=enabled
-D gallium-xa=enabled
-D gallium-nine=false
-D gallium-opencl=icd
-D gallium-rusticl=false
-D gles1=disabled
-D gles2=enabled
-D llvm=enabled
-D microsoft-clc=disabled
-D shared-llvm=enabled
-D vulkan-device-select-layer=true
LLVM_VERSION: ""
UNWIND: "disabled"
VULKAN_DRIVERS: "amd,broadcom,freedreno,intel,imagination-experimental"
@@ -301,12 +283,12 @@ debian-android:
GALLIUM_ST: >
-D dri3=disabled
-D gallium-vdpau=disabled
-D gallium-xvmc=disabled
-D gallium-omx=disabled
-D gallium-va=disabled
-D gallium-xa=disabled
-D gallium-nine=false
-D gallium-opencl=disabled
-D gallium-rusticl=false
LLVM_VERSION: ""
PKG_CONFIG_LIBDIR: "/disable/non/android/system/pc/files"
script:
@@ -333,6 +315,7 @@ debian-android:
GALLIUM_ST: >
-D dri3=enabled
-D gallium-vdpau=disabled
-D gallium-xvmc=disabled
-D gallium-omx=disabled
-D gallium-va=disabled
-D gallium-xa=disabled
@@ -384,6 +367,8 @@ debian-arm64-asan:
extends:
- debian-arm64
variables:
C_ARGS: >
-Wno-error=stringop-truncation
EXTRA_OPTION: >
-D llvm=disabled
-D b_sanitize=address
@@ -407,65 +392,33 @@ debian-arm64-build-test:
debian-clang:
extends: .meson-build
variables:
LLVM_VERSION: "13"
UNWIND: "enabled"
GALLIUM_DUMP_CPU: "true"
C_ARGS: >
-Wno-error=constant-conversion
-Wno-error=enum-conversion
-Wno-error=implicit-const-int-float-conversion
-Wno-error=initializer-overrides
-Wno-error=sometimes-uninitialized
-Wno-error=unused-function
CPP_ARGS: >
-Wno-error=c99-designator
-Wno-error=deprecated-declarations
-Wno-error=implicit-const-int-float-conversion
-Wno-error=missing-braces
-Wno-error=overloaded-virtual
-Wno-error=tautological-constant-out-of-range-compare
-Wno-error=unused-const-variable
-Wno-error=unused-private-field
DRI_LOADERS: >
-D glx=dri
-D gbm=enabled
-D egl=enabled
-D glvnd=true
-D platforms=x11,wayland
GALLIUM_ST: >
-D dri3=enabled
-D gallium-extra-hud=true
-D gallium-vdpau=enabled
-D gallium-omx=bellagio
-D gallium-va=enabled
-D gallium-xa=enabled
-D gallium-nine=true
-D gallium-opencl=icd
-D gles1=enabled
-D gles2=enabled
-D llvm=enabled
-D microsoft-clc=enabled
-D shared-llvm=enabled
-D opencl-spirv=true
GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus,i915,asahi"
VULKAN_DRIVERS: intel,amd,freedreno,broadcom,virtio-experimental,swrast,panfrost,imagination-experimental,microsoft-experimental
EXTRA_OPTION:
EXTRA_OPTIONS:
-D spirv-to-dxil=true
-D osmesa=true
-D imagination-srv=true
-D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi,imagination
-D vulkan-layers=device-select,overlay
-D build-aco-tests=true
-D intel-clc=enabled
-D imagination-srv=true
CC: clang
CXX: clang++
debian-clang-release:
extends: debian-clang
variables:
BUILDTYPE: "release"
DRI_LOADERS: >
-D glx=xlib
-D platforms=x11,wayland
windows-vs2019:
extends:
- .build-windows
@@ -479,50 +432,33 @@ windows-vs2019:
- _build/meson-logs/*.txt
- _install/
.debian-cl:
debian-clover:
extends: .meson-build
variables:
LLVM_VERSION: "13"
UNWIND: "enabled"
DRI_LOADERS: >
-D glx=disabled
-D egl=disabled
-D gbm=disabled
EXTRA_OPTION: >
-D valgrind=false
debian-clover:
extends: .debian-cl
variables:
GALLIUM_DRIVERS: "r600,radeonsi,swrast"
GALLIUM_DRIVERS: "r600,radeonsi"
GALLIUM_ST: >
-D dri3=disabled
-D gallium-vdpau=disabled
-D gallium-xvmc=disabled
-D gallium-omx=disabled
-D gallium-va=disabled
-D gallium-xa=disabled
-D gallium-nine=false
-D gallium-opencl=icd
-D gallium-rusticl=false
debian-rusticl:
extends: .debian-cl
variables:
GALLIUM_DRIVERS: "iris,swrast"
GALLIUM_ST: >
-D dri3=disabled
-D gallium-vdpau=disabled
-D gallium-omx=disabled
-D gallium-va=disabled
-D gallium-xa=disabled
-D gallium-nine=false
-D gallium-opencl=disabled
-D gallium-rusticl=true
EXTRA_OPTION: >
-D valgrind=false
script:
- LLVM_VERSION=9 GALLIUM_DRIVERS=r600,swrast .gitlab-ci/meson/build.sh
- .gitlab-ci/meson/build.sh
debian-vulkan:
extends: .meson-build
variables:
LLVM_VERSION: "13"
UNWIND: "disabled"
DRI_LOADERS: >
-D glx=disabled
@@ -533,12 +469,12 @@ debian-vulkan:
GALLIUM_ST: >
-D dri3=enabled
-D gallium-vdpau=disabled
-D gallium-xvmc=disabled
-D gallium-omx=disabled
-D gallium-va=disabled
-D gallium-xa=disabled
-D gallium-nine=false
-D gallium-opencl=disabled
-D gallium-rusticl=false
-D b_sanitize=undefined
-D c_args=-fno-sanitize-recover=all
-D cpp_args=-fno-sanitize-recover=all
@@ -547,7 +483,7 @@ debian-vulkan:
EXTRA_OPTION: >
-D vulkan-layers=device-select,overlay
-D build-aco-tests=true
-D intel-clc=disabled
-D intel-clc=enabled
-D imagination-srv=true
debian-i386:
@@ -558,7 +494,6 @@ debian-i386:
CROSS: i386
VULKAN_DRIVERS: intel,amd,swrast,virtio-experimental
GALLIUM_DRIVERS: "iris,nouveau,r300,r600,radeonsi,swrast,virgl,zink,crocus"
LLVM_VERSION: 13
EXTRA_OPTION: >
-D vulkan-layers=device-select,overlay
@@ -572,7 +507,8 @@ debian-s390x:
variables:
CROSS: s390x
GALLIUM_DRIVERS: "swrast,zink"
LLVM_VERSION: 13
# The lp_test_blend test times out with LLVM 11
LLVM_VERSION: 9
VULKAN_DRIVERS: "swrast"
debian-ppc64el:
@@ -609,15 +545,11 @@ debian-mingw32-x86_64:
VULKAN_DRIVERS: "swrast,amd,microsoft-experimental"
GALLIUM_ST: >
-D gallium-opencl=icd
-D gallium-rusticl=false
-D opencl-spirv=true
-D microsoft-clc=enabled
-D static-libclc=all
-D llvm=enabled
-D gallium-va=true
-D video-codecs=h264dec,h264enc,h265dec,h265enc,vc1dec
EXTRA_OPTION: >
-D min-windows-version=7
-D spirv-to-dxil=true
-D gles1=enabled
-D gles2=enabled

View File

@@ -111,7 +111,6 @@ for var in \
SKQP_BACKENDS \
TU_DEBUG \
VIRGL_HOST_API \
WAFFLE_PLATFORM \
VK_CPU \
VK_DRIVER \
VK_ICD_FILENAMES \

View File

@@ -149,8 +149,9 @@ cleanup
# upload artifacts
if [ -n "$MINIO_RESULTS_UPLOAD" ]; then
tar --zstd -cf results.tar.zst results/;
ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" results.tar.zst https://"$MINIO_RESULTS_UPLOAD"/results.tar.zst;
tar -czf results.tar.gz results/;
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}";
ci-fairy minio cp results.tar.gz minio://"$MINIO_RESULTS_UPLOAD"/results.tar.gz;
fi
# We still need to echo the hwci: mesa message, as some scripts rely on it, such

View File

@@ -55,9 +55,3 @@ CONFIG_USB_NET_DRIVERS=y
CONFIG_USB_RTL8152=y
CONFIG_USB_NET_AX8817X=y
CONFIG_USB_NET_SMSC95XX=y
# TK1
CONFIG_ARM_TEGRA_DEVFREQ=y
# 32-bit build failure
CONFIG_DRM_MSM=n

View File

@@ -16,7 +16,6 @@ CONFIG_DRM_LIMA=y
CONFIG_DRM_PANEL_SIMPLE=y
CONFIG_DRM_PANEL_EDP=y
CONFIG_DRM_MSM=y
CONFIG_DRM_ETNAVIV=y
CONFIG_DRM_I2C_ADV7511=y
CONFIG_PWM_CROS_EC=y
CONFIG_BACKLIGHT_PWM=y

View File

@@ -6,34 +6,32 @@ set -o xtrace
# Fetch the arm-built rootfs image and unpack it in our x86 container (saves
# network transfer, disk usage, and runtime on test jobs)
# shellcheck disable=SC2154 # arch is assigned in previous scripts
if wget -q --method=HEAD "${ARTIFACTS_PREFIX}/${FDO_UPSTREAM_REPO}/${ARTIFACTS_SUFFIX}/${arch}/done"; then
ARTIFACTS_URL="${ARTIFACTS_PREFIX}/${FDO_UPSTREAM_REPO}/${ARTIFACTS_SUFFIX}/${arch}"
else
ARTIFACTS_URL="${ARTIFACTS_PREFIX}/${CI_PROJECT_PATH}/${ARTIFACTS_SUFFIX}/${arch}"
fi
wget "${ARTIFACTS_URL}"/lava-rootfs.tar.zst -O rootfs.tar.zst
mkdir -p /rootfs-"$arch"
tar -C /rootfs-"$arch" '--exclude=./dev/*' --zstd -xf rootfs.tar.zst
rm rootfs.tar.zst
wget ${ARTIFACTS_URL}/lava-rootfs.tgz -O rootfs.tgz
mkdir -p /rootfs-$arch
tar -C /rootfs-$arch '--exclude=./dev/*' -zxf rootfs.tgz
rm rootfs.tgz
if [[ $arch == "arm64" ]]; then
mkdir -p /baremetal-files
pushd /baremetal-files
wget "${ARTIFACTS_URL}"/Image
wget "${ARTIFACTS_URL}"/Image.gz
wget "${ARTIFACTS_URL}"/cheza-kernel
wget ${ARTIFACTS_URL}/Image
wget ${ARTIFACTS_URL}/Image.gz
wget ${ARTIFACTS_URL}/cheza-kernel
DEVICE_TREES=""
DEVICE_TREES="$DEVICE_TREES apq8016-sbc.dtb"
DEVICE_TREES="$DEVICE_TREES apq8096-db820c.dtb"
DEVICE_TREES="$DEVICE_TREES tegra210-p3450-0000.dtb"
DEVICE_TREES="$DEVICE_TREES imx8mq-nitrogen.dtb"
for DTB in $DEVICE_TREES; do
wget "${ARTIFACTS_URL}/$DTB"
wget ${ARTIFACTS_URL}/$DTB
done
popd
@@ -41,14 +39,12 @@ elif [[ $arch == "armhf" ]]; then
mkdir -p /baremetal-files
pushd /baremetal-files
wget "${ARTIFACTS_URL}"/zImage
wget ${ARTIFACTS_URL}/zImage
DEVICE_TREES=""
DEVICE_TREES="$DEVICE_TREES imx6q-cubox-i.dtb"
DEVICE_TREES="$DEVICE_TREES tegra124-jetson-tk1.dtb"
DEVICE_TREES="imx6q-cubox-i.dtb"
for DTB in $DEVICE_TREES; do
wget "${ARTIFACTS_URL}/$DTB"
wget ${ARTIFACTS_URL}/$DTB
done
popd

View File

@@ -1,5 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -ex

View File

@@ -1,23 +1,24 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -ex
git config --global user.email "mesa@example.com"
git config --global user.name "Mesa CI"
SCRIPT_DIR="$(pwd)"
CROSVM_VERSION=acd262cb42111c53b580a67355e795775545cced
git clone --single-branch -b main --no-checkout https://chromium.googlesource.com/crosvm/crosvm /platform/crosvm
CROSVM_VERSION=c7cd0e0114c8363b884ba56d8e12adee718dcc93
git clone --single-branch -b main --no-checkout https://chromium.googlesource.com/chromiumos/platform/crosvm /platform/crosvm
pushd /platform/crosvm
git checkout "$CROSVM_VERSION"
git submodule update --init
# Apply all crosvm patches for Mesa CI
cat "$SCRIPT_DIR"/.gitlab-ci/container/build-crosvm_*.patch |
patch -p1
VIRGLRENDERER_VERSION=3c5a9bbb7464e0e91e446991055300f4f989f6a9
VIRGLRENDERER_VERSION=dd301caf7e05ec9c09634fb7872067542aad89b7
rm -rf third_party/virglrenderer
git clone --single-branch -b master --no-checkout https://gitlab.freedesktop.org/virgl/virglrenderer.git third_party/virglrenderer
pushd third_party/virglrenderer
git checkout "$VIRGLRENDERER_VERSION"
meson build/ -Drender-server=true -Drender-server-worker=process -Dvenus-experimental=true $EXTRA_MESON_ARGS
meson build/ $EXTRA_MESON_ARGS
ninja -C build install
popd
@@ -25,7 +26,6 @@ RUSTFLAGS='-L native=/usr/local/lib' cargo install \
bindgen \
-j ${FDO_CI_CONCURRENT:-4} \
--root /usr/local \
--version 0.60.1 \
$EXTRA_CARGO_ARGS
RUSTFLAGS='-L native=/usr/local/lib' cargo install \

View File

@@ -0,0 +1,43 @@
From 3c57ec558bccc67fd53363c23deea20646be5c47 Mon Sep 17 00:00:00 2001
From: Tomeu Vizoso <tomeu.vizoso@collabora.com>
Date: Wed, 17 Nov 2021 10:18:04 +0100
Subject: [PATCH] Hack syslog out
It's causing stability problems when running several Crosvm instances in
parallel.
Signed-off-by: Tomeu Vizoso <tomeu.vizoso@collabora.com>
---
base/src/unix/linux/syslog.rs | 2 +-
common/sys_util/src/linux/syslog.rs | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/base/src/unix/linux/syslog.rs b/base/src/unix/linux/syslog.rs
index 05972a3a..f0db3781 100644
--- a/base/src/unix/linux/syslog.rs
+++ b/base/src/unix/linux/syslog.rs
@@ -35,7 +35,7 @@ pub struct PlatformSyslog {
impl Syslog for PlatformSyslog {
fn new() -> Result<Self, Error> {
Ok(Self {
- socket: Some(openlog_and_get_socket()?),
+ socket: None,
})
}
diff --git a/common/sys_util/src/linux/syslog.rs b/common/sys_util/src/linux/syslog.rs
index 05972a3a..f0db3781 100644
--- a/common/sys_util/src/linux/syslog.rs
+++ b/common/sys_util/src/linux/syslog.rs
@@ -35,7 +35,7 @@ pub struct PlatformSyslog {
impl Syslog for PlatformSyslog {
fn new() -> Result<Self, Error> {
Ok(Self {
- socket: Some(openlog_and_get_socket()?),
+ socket: None,
})
}
--
2.25.1

View File

@@ -1,5 +1,4 @@
#!/bin/sh
# shellcheck disable=SC2086 # we want word splitting
set -ex
@@ -16,16 +15,10 @@ if [ -n "${DEQP_RUNNER_GIT_TAG}${DEQP_RUNNER_GIT_REV}" ]; then
DEQP_RUNNER_CARGO_ARGS="${DEQP_RUNNER_CARGO_ARGS} ${EXTRA_CARGO_ARGS}"
else
# Install from package registry
DEQP_RUNNER_CARGO_ARGS="--version 0.15.0 ${EXTRA_CARGO_ARGS} -- deqp-runner"
DEQP_RUNNER_CARGO_ARGS="--version 0.13.1 ${EXTRA_CARGO_ARGS} -- deqp-runner"
fi
cargo install --locked \
-j ${FDO_CI_CONCURRENT:-4} \
--root /usr/local \
${DEQP_RUNNER_CARGO_ARGS}
# remove unused test runners to shrink images for the Mesa CI build (not kernel,
# which chooses its own deqp branch)
if [ -z "${DEQP_RUNNER_GIT_TAG}${DEQP_RUNNER_GIT_REV}" ]; then
rm -f /usr/local/bin/igt-runner
fi

View File

@@ -1,5 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -ex
@@ -12,13 +11,6 @@ git clone \
/VK-GL-CTS
pushd /VK-GL-CTS
# Apply a patch to update zlib link to an available version.
# vulkan-cts-1.3.3.0 uses zlib 1.2.12 which was removed from zlib server due to
# a CVE. See https://zlib.net/
# FIXME: Remove this patch when uprev to 1.3.4.0+
wget -O- https://github.com/KhronosGroup/VK-GL-CTS/commit/6bb2e7d64261bedb503947b1b251b1eeeb49be73.patch |
git am -
# --insecure is due to SSL cert failures hitting sourceforge for zlib and
# libpng (sigh). The archives get their checksums checked anyway, and git
# always goes through ssh or https.
@@ -68,9 +60,6 @@ cp \
cp \
/deqp/external/openglcts/modules/gl_cts/data/mustpass/gl/khronos_mustpass/4.6.1.x/*-master.txt \
/deqp/mustpass/.
cp \
/deqp/external/openglcts/modules/gl_cts/data/mustpass/gl/khronos_mustpass_single/4.6.1.x/*-single.txt \
/deqp/mustpass/.
# Save *some* executor utils, but otherwise strip things down
# to reduct deqp build size:
@@ -88,11 +77,10 @@ rm -rf /deqp/external/openglcts/modules/cts-runner
rm -rf /deqp/modules/internal
rm -rf /deqp/execserver
rm -rf /deqp/framework
# shellcheck disable=SC2038,SC2185 # TODO: rewrite find
find -iname '*cmake*' -o -name '*ninja*' -o -name '*.o' -o -name '*.a' | xargs rm -rf
${STRIP_CMD:-strip} external/vulkancts/modules/vulkan/deqp-vk
${STRIP_CMD:-strip} external/openglcts/modules/glcts
${STRIP_CMD:-strip} modules/*/deqp-*
du -sh ./*
du -sh *
rm -rf /VK-GL-CTS
popd

View File

@@ -1,5 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -ex
@@ -12,15 +11,12 @@ pushd kernel
# debian (they'll get blown away by the rm of the kernel dir at the end).
mkdir -p ld-links
for i in /usr/bin/*-ld /usr/bin/ld; do
i=$(basename $i)
i=`basename $i`
ln -sf /usr/bin/$i.bfd ld-links/$i
done
export PATH=`pwd`/ld-links:$PATH
NEWPATH=$(pwd)/ld-links
export PATH=$NEWPATH:$PATH
KERNEL_FILENAME=$(basename $KERNEL_URL)
export LOCALVERSION="$KERNEL_FILENAME"
export LOCALVERSION="`basename $KERNEL_URL`"
./scripts/kconfig/merge_config.sh ${DEFCONFIG} ../.gitlab-ci/container/${KERNEL_ARCH}.config
make ${KERNEL_IMAGE_NAME}
for image in ${KERNEL_IMAGE_NAME}; do
@@ -32,8 +28,10 @@ if [[ -n ${DEVICE_TREES} ]]; then
cp ${DEVICE_TREES} /lava-files/.
fi
make modules
INSTALL_MOD_PATH=/lava-files/rootfs-${DEBIAN_ARCH}/ make modules_install
if [[ ${DEBIAN_ARCH} = "amd64" || ${DEBIAN_ARCH} = "arm64" ]]; then
make modules
INSTALL_MOD_PATH=/lava-files/rootfs-${DEBIAN_ARCH}/ make modules_install
fi
if [[ ${DEBIAN_ARCH} = "arm64" ]]; then
make Image.lzma

View File

@@ -26,5 +26,5 @@ mkdir -p /usr/lib/clc
ln -s /usr/share/clc/spirv64-mesa3d-.spv /usr/lib/clc/
ln -s /usr/share/clc/spirv-mesa3d-.spv /usr/lib/clc/
du -sh ./*
du -sh *
rm -rf /libclc /llvm-project

View File

@@ -1,14 +1,14 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -ex
export LIBDRM_VERSION=libdrm-2.4.110
wget https://dri.freedesktop.org/libdrm/"$LIBDRM_VERSION".tar.xz
tar -xvf "$LIBDRM_VERSION".tar.xz && rm "$LIBDRM_VERSION".tar.xz
cd "$LIBDRM_VERSION"
wget https://dri.freedesktop.org/libdrm/$LIBDRM_VERSION.tar.xz
tar -xvf $LIBDRM_VERSION.tar.xz && rm $LIBDRM_VERSION.tar.xz
cd $LIBDRM_VERSION
meson build -D vc4=false -D freedreno=false -D etnaviv=false $EXTRA_MESON_ARGS
ninja -C build install
cd ..
rm -rf "$LIBDRM_VERSION"
rm -rf $LIBDRM_VERSION

View File

@@ -1,19 +0,0 @@
#!/bin/bash
set -ex
wget https://github.com/KhronosGroup/SPIRV-LLVM-Translator/archive/refs/tags/v13.0.0.tar.gz
tar -xvf v13.0.0.tar.gz && rm v13.0.0.tar.gz
mkdir SPIRV-LLVM-Translator-13.0.0/build
pushd SPIRV-LLVM-Translator-13.0.0/build
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr
ninja
ninja install
# For some reason llvm-spirv is not installed by default
ninja llvm-spirv
cp tools/llvm-spirv/llvm-spirv /usr/bin/
popd
du -sh SPIRV-LLVM-Translator-13.0.0
rm -rf SPIRV-LLVM-Translator-13.0.0

View File

@@ -1,12 +0,0 @@
#!/bin/bash
set -ex
MOLD_VERSION="1.6.0"
git clone -b v"$MOLD_VERSION" --single-branch --depth 1 https://github.com/rui314/mold.git
cd mold
make
make install
cd ..
rm -rf mold

View File

@@ -1,24 +1,16 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -ex
git clone https://gitlab.freedesktop.org/mesa/piglit.git --single-branch --no-checkout /piglit
pushd /piglit
git checkout 591c91865012de4224bea551eac5d2274acf06ad
# TODO: Remove the following patch when piglit commit got past
# 1cd716180cfb6ef0c1fc54702460ef49e5115791
git apply $OLDPWD/.gitlab-ci/piglit/build-piglit_backport-s3-migration.diff
git checkout b2c9d8f56b45d79f804f4cb5ac62520f0edd8988
patch -p1 <$OLDPWD/.gitlab-ci/piglit/disable-vs_in.diff
cmake -S . -B . -G Ninja -DCMAKE_BUILD_TYPE=Release $PIGLIT_OPTS $EXTRA_CMAKE_ARGS
ninja $PIGLIT_BUILD_TARGETS
# shellcheck disable=SC2038,SC2185 # TODO: rewrite find
find -name .git -o -name '*ninja*' -o -iname '*cmake*' -o -name '*.[chao]' | xargs rm -rf
rm -rf target_api
if [ "$PIGLIT_BUILD_TARGETS" = "piglit_replayer" ]; then
# shellcheck disable=SC2038,SC2185 # TODO: rewrite find
if [ "x$PIGLIT_BUILD_TARGETS" = "xpiglit_replayer" ]; then
find ! -regex "^\.$" \
! -regex "^\.\/piglit.*" \
! -regex "^\.\/framework.*" \

View File

@@ -8,24 +8,17 @@ set -ex
# cargo (and rustup) wants to store stuff in $HOME/.cargo, and binaries in
# $HOME/.cargo/bin. Make bin a link to a public bin directory so the commands
# are just available to all build jobs.
mkdir -p "$HOME"/.cargo
ln -s /usr/local/bin "$HOME"/.cargo/bin
# Rusticl requires at least Rust 1.59.0
#
# Also, oick a specific snapshot from rustup so the compiler doesn't drift on
# us.
RUST_VERSION=1.59.0-2022-02-24
mkdir -p $HOME/.cargo
ln -s /usr/local/bin $HOME/.cargo/bin
# For rust in Mesa, we use rustup to install. This lets us pick an arbitrary
# version of the compiler, rather than whatever the container's Debian comes
# with.
wget https://sh.rustup.rs -O - | sh -s -- \
--default-toolchain $RUST_VERSION \
--profile minimal \
-y
rustup component add rustfmt
#
# Pick the rust compiler (1.48) available in Debian stable, and pick a specific
# snapshot from rustup so the compiler doesn't drift on us.
wget https://sh.rustup.rs -O - | \
sh -s -- -y --default-toolchain 1.49.0-2020-12-31
# Set up a config script for cross compiling -- cargo needs your system cc for
# linking in cross builds, but doesn't know what you want to use for system cc.

View File

@@ -55,9 +55,9 @@ BASE_ARGS_GN_FILE="${SCRIPT_DIR}/build-skqp_base.gn"
SKQP_ARCH=${SKQP_ARCH:-x64}
SKIA_DIR=${SKIA_DIR:-$(mktemp -d)}
SKQP_OUT_DIR=${SKIA_DIR}/out/${SKQP_ARCH}
SKQP_INSTALL_DIR=${SKQP_INSTALL_DIR:-/skqp}
SKQP_INSTALL_DIR=/skqp
SKQP_ASSETS_DIR="${SKQP_INSTALL_DIR}/assets"
SKQP_BINARIES=(skqp list_gpu_unit_tests list_gms)
SKQP_BINARIES=(skqp)
download_skia_source

View File

@@ -1,18 +0,0 @@
Nima-Cpp is not available anymore inside googlesource, revert to github one
Simulates `git revert 49233d2521054037ded7d760427c4a0dc1e11356`
diff --git a/DEPS b/DEPS
index 7e0b941..c88b064 100644
--- a/DEPS
+++ b/DEPS
@@ -33,8 +33,8 @@ deps = {
#"third_party/externals/v8" : "https://chromium.googlesource.com/v8/v8.git@5f1ae66d5634e43563b2d25ea652dfb94c31a3b4",
"third_party/externals/wuffs" : "https://skia.googlesource.com/external/github.com/google/wuffs.git@fda3c4c9863d9f9fcec58ae66508c4621fc71ea5",
"third_party/externals/zlib" : "https://chromium.googlesource.com/chromium/src/third_party/zlib@47af7c547f8551bd25424e56354a2ae1e9062859",
- "third_party/externals/Nima-Cpp" : "https://skia.googlesource.com/external/github.com/2d-inc/Nima-Cpp.git@4bd02269d7d1d2e650950411325eafa15defb084",
- "third_party/externals/Nima-Math-Cpp" : "https://skia.googlesource.com/external/github.com/2d-inc/Nima-Math-Cpp.git@e0c12772093fa8860f55358274515b86885f0108",
+ "third_party/externals/Nima-Cpp" : "https://github.com/2d-inc/Nima-Cpp.git@4bd02269d7d1d2e650950411325eafa15defb084",
+ "third_party/externals/Nima-Math-Cpp" : "https://github.com/2d-inc/Nima-Math-Cpp.git@e0c12772093fa8860f55358274515b86885f0108",
"../src": {
"url": "https://chromium.googlesource.com/chromium/src.git@ccf3465732e5d5363f0e44a8fac54550f62dd1d0",

View File

@@ -1,5 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -ex

View File

@@ -17,8 +17,7 @@ export PATH=$CCACHE_PATH:$PATH
export CC="${CCACHE_PATH}/gcc"
export CXX="${CCACHE_PATH}/g++"
# When not using the mold linker (e.g. unsupported architecture), force
# linkers to gold, since it's so much faster for building. We can't use
# Force linkers to gold, since it's so much faster for building. We can't use
# lld because we're on old debian and it's buggy. ming fails meson builds
# with it with "meson.build:21:0: ERROR: Unable to determine dynamic linker"
find /usr/bin -name \*-ld -o -name ld | \
@@ -28,11 +27,8 @@ find /usr/bin -name \*-ld -o -name ld | \
ccache --show-stats
# Make a wrapper script for ninja to always include the -j flags
{
echo '#!/bin/sh -x'
# shellcheck disable=SC2016
echo '/usr/bin/ninja -j${FDO_CI_CONCURRENT:-4} "$@"'
} > /usr/local/bin/ninja
echo '#!/bin/sh -x' > /usr/local/bin/ninja
echo '/usr/bin/ninja -j${FDO_CI_CONCURRENT:-4} "$@"' >> /usr/local/bin/ninja
chmod +x /usr/local/bin/ninja
# Set MAKEFLAGS so that all make invocations in container builds include the

View File

@@ -13,7 +13,7 @@ arch2=${5:-$2}
# and allowing it in code generation means we get unwind symbols that break
# the libEGL and driver symbol tests.
cat > "$cross_file" <<EOF
cat >$cross_file <<EOF
[binaries]
ar = '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/$arch-ar'
c = ['ccache', '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch2}29-clang', '-fno-exceptions', '-fno-unwind-tables', '-fno-asynchronous-unwind-tables']

View File

@@ -1,5 +1,4 @@
#!/bin/sh
# shellcheck disable=SC2086 # we want word splitting
# Makes a .pc file in the Android NDK for meson to find its libraries.

View File

@@ -2,7 +2,7 @@
arch=$1
cross_file="/cross_file-$arch.txt"
/usr/share/meson/debcrossgen --arch "$arch" -o "$cross_file"
/usr/share/meson/debcrossgen --arch $arch -o "$cross_file"
# Explicitly set ccache path for cross compilers
sed -i "s|/usr/bin/\([^-]*\)-linux-gnu\([^-]*\)-g|/usr/lib/ccache/\\1-linux-gnu\\2-g|g" "$cross_file"
if [ "$arch" = "i386" ]; then
@@ -10,11 +10,10 @@ if [ "$arch" = "i386" ]; then
sed -i "s|cpu_family = 'i686'|cpu_family = 'x86'|g" "$cross_file"
fi
# Rely on qemu-user being configured in binfmt_misc on the host
# shellcheck disable=SC1003 # how this sed doesn't seems to work for me locally
sed -i -e '/\[properties\]/a\' -e "needs_exe_wrapper = False" "$cross_file"
# Add a line for rustc, which debcrossgen is missing.
cc=$(sed -n 's|c = .\(.*\).|\1|p' < "$cross_file")
cc=`sed -n 's|c = .\(.*\).|\1|p' < $cross_file`
if [[ "$arch" = "arm64" ]]; then
rust_target=aarch64-unknown-linux-gnu
elif [[ "$arch" = "armhf" ]]; then
@@ -28,7 +27,6 @@ elif [[ "$arch" = "s390x" ]]; then
else
echo "Needs rustc target mapping"
fi
# shellcheck disable=SC1003 # how this sed doesn't seems to work for me locally
sed -i -e '/\[binaries\]/a\' -e "rust = ['rustc', '--target=$rust_target', '-C', 'linker=$cc']" "$cross_file"
# Set up cmake cross compile toolchain file for dEQP builds
@@ -36,18 +34,18 @@ toolchain_file="/toolchain-$arch.cmake"
if [[ "$arch" = "arm64" ]]; then
GCC_ARCH="aarch64-linux-gnu"
DE_CPU="DE_CPU_ARM_64"
CMAKE_ARCH=arm
elif [[ "$arch" = "armhf" ]]; then
GCC_ARCH="arm-linux-gnueabihf"
DE_CPU="DE_CPU_ARM"
CMAKE_ARCH=arm
fi
if [[ -n "$GCC_ARCH" ]]; then
{
echo "set(CMAKE_SYSTEM_NAME Linux)";
echo "set(CMAKE_SYSTEM_PROCESSOR arm)";
echo "set(CMAKE_C_COMPILER /usr/lib/ccache/$GCC_ARCH-gcc)";
echo "set(CMAKE_CXX_COMPILER /usr/lib/ccache/$GCC_ARCH-g++)";
echo "set(ENV{PKG_CONFIG} \"/usr/bin/$GCC_ARCH-pkg-config\")";
echo "set(DE_CPU $DE_CPU)";
} > "$toolchain_file"
echo "set(CMAKE_SYSTEM_NAME Linux)" > "$toolchain_file"
echo "set(CMAKE_SYSTEM_PROCESSOR arm)" >> "$toolchain_file"
echo "set(CMAKE_C_COMPILER /usr/lib/ccache/$GCC_ARCH-gcc)" >> "$toolchain_file"
echo "set(CMAKE_CXX_COMPILER /usr/lib/ccache/$GCC_ARCH-g++)" >> "$toolchain_file"
echo "set(ENV{PKG_CONFIG} \"/usr/bin/$GCC_ARCH-pkg-config\")" >> "$toolchain_file"
echo "set(DE_CPU $DE_CPU)" >> "$toolchain_file"
fi

View File

@@ -1,7 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2140 # ugly array, remove later
# shellcheck disable=SC2288 # ugly array, remove later
# shellcheck disable=SC2086 # we want word splitting
set -ex
@@ -18,10 +15,6 @@ elif [ $DEBIAN_ARCH = amd64 ]; then
apt-get -y install --no-install-recommends wget gnupg2 software-properties-common
apt-key add /llvm-snapshot.gpg.key
add-apt-repository "deb https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-13 main"
# Debian bullseye has older wine 5.0, we want >= 7.0 for traces.
apt-key add /winehq.gpg.key
apt-add-repository https://dl.winehq.org/wine-builds/debian/
ARCH_PACKAGES="firmware-amd-graphics
inetutils-syslogd
@@ -41,10 +34,6 @@ elif [ $DEBIAN_ARCH = amd64 ]; then
spirv-tools
sysvinit-core
"
elif [ $DEBIAN_ARCH = armhf ]; then
ARCH_PACKAGES="firmware-misc-nonfree
"
fi
INSTALL_CI_FAIRY_PACKAGES="git
@@ -63,7 +52,6 @@ apt-get -y install --no-install-recommends \
ca-certificates \
firmware-realtek \
initramfs-tools \
jq \
libasan6 \
libexpat1 \
libpng16-16 \
@@ -104,29 +92,11 @@ apt-get -y install --no-install-recommends \
waffle-utils \
wget \
xinit \
xserver-xorg-core \
zstd
if [ "$DEBIAN_ARCH" = "amd64" ]; then
# workaround wine needing 32-bit
# https://bugs.winehq.org/show_bug.cgi?id=53393
apt-get install -y --no-remove wine-stable-amd64 # a requirement for wine-stable
WINE_PKG="wine-stable"
WINE_PKG_DROP="wine-stable-i386"
apt download "${WINE_PKG}"
dpkg --ignore-depends="${WINE_PKG_DROP}" -i "${WINE_PKG}"*.deb
rm "${WINE_PKG}"*.deb
sed -i "/${WINE_PKG_DROP}/d" /var/lib/dpkg/status
apt-get install -y --no-remove winehq-stable # symlinks-only, depends on wine-stable
fi
xserver-xorg-core
# Needed for ci-fairy, this revision is able to upload files to
# MinIO and doesn't depend on git
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2
# Needed for manipulation with traces yaml files.
pip3 install yq
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
apt-get purge -y \
$INSTALL_CI_FAIRY_PACKAGES
@@ -255,7 +225,7 @@ rm -rf etc/dpkg
# Drop directories not part of ostree
# Note that /var needs to exist as ostree bind mounts the deployment /var over
# it
rm -rf var/* srv share
rm -rf var/* opt srv share
# ca-certificates are in /etc drop the source
rm -rf usr/share/ca-certificates

View File

@@ -1,5 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -e
set -o xtrace
@@ -38,9 +37,8 @@ apt-get install -y --no-remove \
wget
if [[ $arch != "armhf" ]]; then
# See the list of available architectures in https://apt.llvm.org/bullseye/dists/llvm-toolchain-bullseye-13/main/
if [[ $arch == "s390x" ]] || [[ $arch == "i386" ]] || [[ $arch == "arm64" ]]; then
LLVM=13
if [[ $arch == "s390x" ]]; then
LLVM=9
else
LLVM=11
fi
@@ -48,7 +46,7 @@ if [[ $arch != "armhf" ]]; then
# llvm-*-tools:$arch conflicts with python3:amd64. Install dependencies only
# with apt-get, then force-install llvm-*-{dev,tools}:$arch with dpkg to get
# around this.
apt-get install -y --no-remove --no-install-recommends \
apt-get install -y --no-remove \
libclang-cpp${LLVM}:$arch \
libffi-dev:$arch \
libgcc-s1:$arch \

View File

@@ -1,5 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -ex
@@ -20,7 +19,7 @@ rm $ndk.zip
# duplicate files. Turn them into hardlinks to save on container space.
rdfind -makehardlinks true -makeresultsfile false /android-ndk-r21d/
# Drop some large tools we won't use in this build.
find /android-ndk-r21d/ -type f | grep -E -i "clang-check|clang-tidy|lldb" | xargs rm -f
find /android-ndk-r21d/ -type f | egrep -i "clang-check|clang-tidy|lldb" | xargs rm -f
sh .gitlab-ci/container/create-android-ndk-pc.sh /$ndk zlib.pc "" "-lz" "1.2.3"

View File

@@ -1,5 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -e
set -o xtrace
@@ -9,15 +8,9 @@ sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list
echo 'deb https://deb.debian.org/debian buster main' >/etc/apt/sources.list.d/buster.list
apt-get update
# Ephemeral packages (installed for this script and removed again at
# the end)
STABLE_EPHEMERAL=" \
libssl-dev \
"
apt-get -y install \
${EXTRA_LOCAL_PACKAGES} \
${STABLE_EPHEMERAL} \
abootimg \
autoconf \
automake \
bc \
@@ -61,26 +54,21 @@ apt-get -y install \
u-boot-tools \
wget \
xz-utils \
zlib1g-dev \
zstd
zlib1g-dev
# Not available anymore in bullseye
apt-get install -y --no-remove -t buster \
android-sdk-ext4-utils
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
arch=armhf
. .gitlab-ci/container/cross_build.sh
. .gitlab-ci/container/container_pre_build.sh
. .gitlab-ci/container/build-mold.sh
# dependencies where we want a specific version
EXTRA_MESON_ARGS=
. .gitlab-ci/container/build-libdrm.sh
apt-get purge -y $STABLE_EPHEMERAL
. .gitlab-ci/container/container_post_build.sh

View File

@@ -9,6 +9,7 @@ sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list
apt-get update
apt-get install -y --no-remove \
abootimg \
cpio \
fastboot \
netcat \
@@ -18,8 +19,7 @@ apt-get install -y --no-remove \
python3-serial \
rsync \
snmp \
wget \
zstd
wget
# setup SNMPv2 SMI MIB
wget https://raw.githubusercontent.com/net-snmp/net-snmp/master/mibs/SNMPv2-SMI.txt \
@@ -37,9 +37,3 @@ ln -s \
/baremetal-files/Image \
/baremetal-files/tegra210-p3450-0000.dtb \
/baremetal-files/jetson-nano/boot/
mkdir -p /baremetal-files/jetson-tk1/boot/
ln -s \
/baremetal-files/zImage \
/baremetal-files/tegra124-jetson-tk1.dtb \
/baremetal-files/jetson-tk1/boot/

View File

@@ -1,16 +1,5 @@
#!/bin/bash
set -e
arch=s390x
# Ephemeral packages (installed for this script and removed again at the end)
STABLE_EPHEMERAL="libssl-dev"
apt-get -y install "$STABLE_EPHEMERAL"
. .gitlab-ci/container/build-mold.sh
apt-get purge -y "$STABLE_EPHEMERAL"
. .gitlab-ci/container/cross_build.sh

View File

@@ -5,9 +5,12 @@ set -o xtrace
# Installing wine, need this for testing mingw or nine
# We need multiarch for Wine
dpkg --add-architecture i386
apt-get update
apt-get install -y --no-remove \
wine \
wine32 \
wine64 \
xvfb

View File

@@ -1,5 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -e
set -o xtrace
@@ -28,7 +27,6 @@ apt-get install -y --no-remove \
bison \
ccache \
dpkg-cross \
findutils \
flex \
g++ \
cmake \
@@ -38,12 +36,15 @@ apt-get install -y --no-remove \
kmod \
libclang-13-dev \
libclang-11-dev \
libclang-9-dev \
libclc-dev \
libelf-dev \
libepoxy-dev \
libexpat1-dev \
libgtk-3-dev \
libllvm13 \
libllvm11 \
libllvm9 \
libomxil-bellagio-dev \
libpciaccess-dev \
libunwind-dev \
@@ -57,13 +58,13 @@ apt-get install -y --no-remove \
libxrandr-dev \
libxrender-dev \
libxshmfence-dev \
libxvmc-dev \
libxxf86vm-dev \
make \
meson \
pkg-config \
python3-mako \
python3-pil \
python3-ply \
python3-requests \
qemu-user \
valgrind \
@@ -72,16 +73,10 @@ apt-get install -y --no-remove \
x11proto-gl-dev \
x11proto-randr-dev \
xz-utils \
zlib1g-dev \
zstd
zlib1g-dev
# Needed for ci-fairy, this revision is able to upload files to MinIO
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2
# We need at least 0.61.4 for proper Rust
pip3 install meson==0.61.5
. .gitlab-ci/container/build-rust.sh
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
. .gitlab-ci/container/debian/x86_build-base-wine.sh

View File

@@ -1,7 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -e
# Pull packages from msys2 repository that can be directly used.
# We can use https://packages.msys2.org/ to retrieve the newest package

View File

@@ -1,18 +1,11 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -e
# Building libdrm (libva dependency)
. .gitlab-ci/container/build-libdrm.sh
wd=$PWD
CMAKE_TOOLCHAIN_MINGW_PATH=$wd/.gitlab-ci/container/debian/x86_mingw-toolchain.cmake
mkdir -p ~/tmp
pushd ~/tmp
# Building DirectX-Headers
git clone https://github.com/microsoft/DirectX-Headers -b v1.606.4 --depth 1
git clone https://github.com/microsoft/DirectX-Headers -b v1.606.3 --depth 1
mkdir -p DirectX-Headers/build
pushd DirectX-Headers/build
meson .. \
@@ -24,25 +17,6 @@ meson .. \
ninja install
popd
# Building libva
git clone https://github.com/intel/libva
pushd libva/
# Checking out commit hash with libva-win32 support
# This feature will be released with libva version 2.17
git checkout 2579eb0f77897dc01a02c1e43defc63c40fd2988
popd
# libva already has a build dir in their repo, use builddir instead
mkdir -p libva/builddir
pushd libva/builddir
meson .. \
--backend=ninja \
--buildtype=release \
-Dprefix=/usr/x86_64-w64-mingw32/ \
--cross-file=$wd/.gitlab-ci/x86_64-w64-mingw32
ninja install
popd
export VULKAN_SDK_VERSION=1.3.211.0
# Building SPIRV Tools

View File

@@ -1,5 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -e
set -o xtrace
@@ -13,7 +12,6 @@ STABLE_EPHEMERAL=" \
autotools-dev \
bzip2 \
libtool \
libssl-dev \
python3-pip \
"
@@ -29,6 +27,7 @@ apt-get install -y --no-remove \
libclang-cpp11-dev \
libgbm-dev \
libglvnd-dev \
libllvmspirvlib-dev \
liblua5.3-dev \
libxcb-dri2-0-dev \
libxcb-dri3-dev \
@@ -42,16 +41,14 @@ apt-get install -y --no-remove \
libxml2-dev \
llvm-13-dev \
llvm-11-dev \
llvm-9-dev \
ocl-icd-opencl-dev \
python3-freezegun \
python3-pytest \
procps \
spirv-tools \
shellcheck \
strace \
time \
yamllint \
zstd
time
. .gitlab-ci/container/container_pre_build.sh
@@ -61,17 +58,11 @@ export XORG_RELEASES=https://xorg.freedesktop.org/releases/individu
export XORGMACROS_VERSION=util-macros-1.19.0
. .gitlab-ci/container/build-mold.sh
wget $XORG_RELEASES/util/$XORGMACROS_VERSION.tar.bz2
tar -xvf $XORGMACROS_VERSION.tar.bz2 && rm $XORGMACROS_VERSION.tar.bz2
cd $XORGMACROS_VERSION; ./configure; make install; cd ..
rm -rf $XORGMACROS_VERSION
. .gitlab-ci/container/build-llvm-spirv.sh
. .gitlab-ci/container/build-libclc.sh
. .gitlab-ci/container/build-libdrm.sh
. .gitlab-ci/container/build-wayland.sh
@@ -83,7 +74,7 @@ cd shader-db
make
popd
git clone https://github.com/microsoft/DirectX-Headers -b v1.606.4 --depth 1
git clone https://github.com/microsoft/DirectX-Headers -b v1.606.3 --depth 1
mkdir -p DirectX-Headers/build
pushd DirectX-Headers/build
meson .. --backend=ninja --buildtype=release -Dbuild-test=false
@@ -94,12 +85,6 @@ rm -rf DirectX-Headers
pip3 install git+https://git.lavasoftware.org/lava/lavacli@3db3ddc45e5358908bc6a17448059ea2340492b7
# install bindgen
RUSTFLAGS='-L native=/usr/local/lib' cargo install \
bindgen --version 0.59.2 \
-j ${FDO_CI_CONCURRENT:-4} \
--root /usr/local
############### Uninstall the build software
apt-get purge -y \

View File

@@ -1,5 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -e
set -o xtrace
@@ -13,43 +12,11 @@ sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list
# Ephemeral packages (installed for this script and removed again at
# the end)
STABLE_EPHEMERAL=" \
autoconf \
automake \
bc \
bison \
bzip2 \
ccache \
cmake \
clang-11 \
flex \
glslang-tools \
g++ \
libasound2-dev \
libcap-dev \
libclang-cpp11-dev \
libegl-dev \
libelf-dev \
libepoxy-dev \
libgbm-dev \
libpciaccess-dev \
libvulkan-dev \
libwayland-dev \
libx11-xcb-dev \
libxext-dev \
llvm-13-dev \
llvm-11-dev \
make \
meson \
patch \
pkg-config \
protobuf-compiler \
cargo \
python3-dev \
python3-pip \
python3-setuptools \
python3-wheel \
spirv-tools \
wayland-protocols \
xz-utils \
"
# Add llvm 13 to the build image
@@ -59,19 +26,14 @@ add-apt-repository "deb https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-1
apt-get update
apt-get dist-upgrade -y
apt-get install -y \
sysvinit-core
apt-get install -y --no-remove \
git \
git-lfs \
inetutils-syslogd \
iptables \
jq \
libasan6 \
libexpat1 \
libllvm13 \
libllvm11 \
libllvm9 \
liblz4-1 \
libpng16-16 \
libpython3.9 \
@@ -91,69 +53,22 @@ apt-get install -y --no-remove \
python3-requests \
python3-six \
python3-yaml \
socat \
vulkan-tools \
waffle-utils \
wget \
xauth \
xvfb \
zlib1g \
zstd
zlib1g
apt-get install -y --no-install-recommends \
$STABLE_EPHEMERAL
. .gitlab-ci/container/container_pre_build.sh
############### Build kernel
export DEFCONFIG="arch/x86/configs/x86_64_defconfig"
export KERNEL_IMAGE_NAME=bzImage
export KERNEL_ARCH=x86_64
export DEBIAN_ARCH=amd64
mkdir -p /lava-files/
. .gitlab-ci/container/build-kernel.sh
# Needed for ci-fairy, this revision is able to upload files to MinIO
# and doesn't depend on git
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2
# Needed for manipulation with traces yaml files.
pip3 install yq
# Needed for crosvm compilation.
update-alternatives --install /usr/bin/clang clang /usr/bin/clang-11 100
############### Build LLVM-SPIRV translator
. .gitlab-ci/container/build-llvm-spirv.sh
############### Build libclc
. .gitlab-ci/container/build-libclc.sh
############### Build libdrm
. .gitlab-ci/container/build-libdrm.sh
############### Build Wayland
. .gitlab-ci/container/build-wayland.sh
############### Build Crosvm
. .gitlab-ci/container/build-rust.sh
. .gitlab-ci/container/build-crosvm.sh
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
############### Build dEQP runner
. .gitlab-ci/container/build-deqp-runner.sh
rm -rf /root/.cargo
rm -rf /root/.rustup
ccache --show-stats
rm -rf ~/.cargo
apt-get purge -y $STABLE_EPHEMERAL

View File

@@ -1,5 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -e
set -o xtrace
@@ -8,18 +7,28 @@ export DEBIAN_FRONTEND=noninteractive
# Ephemeral packages (installed for this script and removed again at the end)
STABLE_EPHEMERAL=" \
autoconf \
automake \
bc \
bison \
bzip2 \
ccache \
clang-13 \
clang-11 \
cmake \
flex \
g++ \
glslang-tools \
libasound2-dev \
libcap-dev \
libclang-cpp13-dev \
libclang-cpp11-dev \
libelf-dev \
libexpat1-dev \
libfdt-dev \
libgbm-dev \
libgles2-mesa-dev \
libllvmspirvlib-dev \
libpciaccess-dev \
libpng-dev \
libudev-dev \
@@ -27,10 +36,12 @@ STABLE_EPHEMERAL=" \
libwaffle-dev \
libx11-xcb-dev \
libxcb-dri2-0-dev \
libxext-dev \
libxkbcommon-dev \
libxrender-dev \
llvm-13-dev \
llvm-11-dev \
llvm-spirv \
make \
meson \
ocl-icd-opencl-dev \
@@ -52,18 +63,51 @@ apt-get install -y --no-remove \
libclang-cpp11 \
libcap2 \
libegl1 \
libepoxy0 \
libepoxy-dev \
libfdt1 \
libllvmspirvlib11 \
libxcb-shm0 \
ocl-icd-libopencl1 \
python3-lxml \
python3-renderdoc \
python3-simplejson \
spirv-tools
socat \
spirv-tools \
sysvinit-core \
wget
. .gitlab-ci/container/container_pre_build.sh
############### Build libdrm
. .gitlab-ci/container/build-libdrm.sh
############### Build Wayland
. .gitlab-ci/container/build-wayland.sh
############### Build Crosvm
. .gitlab-ci/container/build-rust.sh
. .gitlab-ci/container/build-crosvm.sh
rm -rf /root/.cargo
rm -rf /root/.rustup
############### Build kernel
export DEFCONFIG="arch/x86/configs/x86_64_defconfig"
export KERNEL_IMAGE_NAME=bzImage
export KERNEL_ARCH=x86_64
export DEBIAN_ARCH=amd64
mkdir -p /lava-files/
. .gitlab-ci/container/build-kernel.sh
############### Build libclc
. .gitlab-ci/container/build-libclc.sh
############### Build piglit
PIGLIT_OPTS="-DPIGLIT_BUILD_CL_TESTS=ON -DPIGLIT_BUILD_DMA_BUF_TESTS=ON" . .gitlab-ci/container/build-piglit.sh

View File

@@ -1,7 +1,6 @@
#!/bin/bash
# The relative paths in this file only become valid at runtime.
# shellcheck disable=SC1091
# shellcheck disable=SC2086 # we want word splitting
set -e
set -o xtrace
@@ -50,9 +49,8 @@ STABLE_EPHEMERAL=" \
xz-utils \
"
apt-get install -y --no-remove --no-install-recommends \
apt-get install -y --no-remove \
$STABLE_EPHEMERAL \
libepoxy0 \
libxcb-shm0 \
pciutils \
python3-lxml \
@@ -61,36 +59,87 @@ apt-get install -y --no-remove --no-install-recommends \
xserver-xorg-video-amdgpu \
xserver-xorg-video-ati
# We need multiarch for Wine
dpkg --add-architecture i386
# Install a more recent version of Wine than exists in Debian.
apt-key add .gitlab-ci/container/debian/winehq.gpg.key
apt-add-repository https://dl.winehq.org/wine-builds/debian/
apt-get update -q
apt update -qyy
# Needed for Valve's tracing jobs to collect information about the graphics
# hardware on the test devices.
pip3 install gfxinfo-mupuf==0.0.9
# workaround wine needing 32-bit
# https://bugs.winehq.org/show_bug.cgi?id=53393
apt-get install -y --no-remove wine-stable-amd64 # a requirement for wine-stable
WINE_PKG="wine-stable"
WINE_PKG_DROP="wine-stable-i386"
apt-get download "${WINE_PKG}"
dpkg --ignore-depends="${WINE_PKG_DROP}" -i "${WINE_PKG}"*.deb
rm "${WINE_PKG}"*.deb
sed -i "/${WINE_PKG_DROP}/d" /var/lib/dpkg/status
apt-get install -y --no-remove winehq-stable # symlinks-only, depends on wine-stable
apt install -y --no-remove --install-recommends winehq-stable
function setup_wine() {
export WINEDEBUG="-all"
export WINEPREFIX="$1"
# We don't want crash dialogs
cat >crashdialog.reg <<EOF
Windows Registry Editor Version 5.00
[HKEY_CURRENT_USER\Software\Wine\WineDbg]
"ShowCrashDialog"=dword:00000000
EOF
# Set the wine prefix and disable the crash dialog
wine regedit crashdialog.reg
rm crashdialog.reg
# An immediate wine command may fail with: "${WINEPREFIX}: Not a
# valid wine prefix." and that is just spit because of checking
# the existance of the system.reg file, which fails. Just giving
# it a bit more of time for it to be created solves the problem
# ...
while ! test -f "${WINEPREFIX}/system.reg"; do sleep 1; done
}
############### Install DXVK
. .gitlab-ci/container/setup-wine.sh "/dxvk-wine64"
. .gitlab-ci/container/install-wine-dxvk.sh
dxvk_install_release() {
local DXVK_VERSION=${1:-"1.10.1"}
wget "https://github.com/doitsujin/dxvk/releases/download/v${DXVK_VERSION}/dxvk-${DXVK_VERSION}.tar.gz"
tar xzpf dxvk-"${DXVK_VERSION}".tar.gz
"dxvk-${DXVK_VERSION}"/setup_dxvk.sh install
rm -rf "dxvk-${DXVK_VERSION}"
rm dxvk-"${DXVK_VERSION}".tar.gz
}
# Install from a Github PR number
dxvk_install_pr() {
local __prnum=$1
# NOTE: Clone all the ensite history of the repo so as not to think
# harder about cloning just enough for 'git describe' to work. 'git
# describe' is used by the dxvk build system to generate a
# dxvk_version Meson variable, which is nice-to-have.
git clone https://github.com/doitsujin/dxvk
pushd dxvk
git fetch origin pull/"$__prnum"/head:pr
git checkout pr
./package-release.sh pr ../dxvk-build --no-package
popd
pushd ./dxvk-build/dxvk-pr
./setup_dxvk.sh install
popd
rm -rf ./dxvk-build ./dxvk
}
# Sets up the WINEPREFIX for the DXVK installation commands below.
setup_wine "/dxvk-wine64"
dxvk_install_release "1.10.1"
#dxvk_install_pr 2359
############### Install apitrace binaries for wine
. .gitlab-ci/container/install-wine-apitrace.sh
# Add the apitrace path to the registry
wine64 \
wine \
reg add "HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment" \
/v Path \
/t REG_EXPAND_SZ \
@@ -101,6 +150,14 @@ wine64 \
. .gitlab-ci/container/container_pre_build.sh
############### Build libdrm
. .gitlab-ci/container/build-libdrm.sh
############### Build Wayland
. .gitlab-ci/container/build-wayland.sh
############### Build parallel-deqp-runner's hang-detection tool
. .gitlab-ci/container/build-hang-detection.sh
@@ -127,7 +184,7 @@ PIGLIT_BUILD_TARGETS="piglit_replayer" . .gitlab-ci/container/build-piglit.sh
############### Build VKD3D-Proton
. .gitlab-ci/container/setup-wine.sh "/vkd3d-proton-wine64"
setup_wine "/vkd3d-proton-wine64"
. .gitlab-ci/container/build-vkd3d-proton.sh

View File

@@ -1,5 +1,4 @@
#!/bin/bash
# shellcheck disable=SC2086 # we want word splitting
set -e
set -o xtrace
@@ -9,12 +8,10 @@ EPHEMERAL="
autoconf
automake
bzip2
cmake
git
libtool
pkgconfig(epoxy)
pkgconfig(gbm)
pkgconfig(openssl)
unzip
wget
xz
@@ -67,7 +64,6 @@ dnf install -y --setopt=install_weak_deps=False \
python3-mako \
python3-devel \
python3-mako \
python3-ply \
vulkan-headers \
spirv-tools-devel \
spirv-llvm-translator-devel \
@@ -87,8 +83,6 @@ tar -xvf $XORGMACROS_VERSION.tar.bz2 && rm $XORGMACROS_VERSION.tar.bz2
cd $XORGMACROS_VERSION; ./configure; make install; cd ..
rm -rf $XORGMACROS_VERSION
. .gitlab-ci/container/build-mold.sh
. .gitlab-ci/container/build-libdrm.sh
. .gitlab-ci/container/build-wayland.sh

View File

@@ -53,7 +53,7 @@
variables:
FDO_DISTRIBUTION_VERSION: bullseye-slim
FDO_REPO_SUFFIX: $CI_JOB_NAME
FDO_DISTRIBUTION_EXEC: 'bash .gitlab-ci/container/${CI_JOB_NAME}.sh'
FDO_DISTRIBUTION_EXEC: 'env "WINEPATH=${WINEPATH}" FDO_CI_CONCURRENT=${FDO_CI_CONCURRENT} bash .gitlab-ci/container/${CI_JOB_NAME}.sh'
# no need to pull the whole repo to build the container image
GIT_STRATEGY: none
@@ -189,7 +189,6 @@ debian/android_build:
debian/x86_test-base:
extends: debian/x86_build-base
variables:
KERNEL_URL: &kernel-rootfs-url "https://gitlab.freedesktop.org/gfx-ci/linux/-/archive/v5.19-for-mesa-ci-d4efddaec194/linux-v5.17-for-mesa-ci-b78f7870d97b.tar.bz2"
MESA_IMAGE_TAG: &debian-x86_test-base ${DEBIAN_BASE_TAG}
.use-debian/x86_test-base:
@@ -206,6 +205,8 @@ debian/x86_test-base:
debian/x86_test-gl:
extends: .use-debian/x86_test-base
variables:
FDO_DISTRIBUTION_EXEC: 'env KERNEL_URL=${KERNEL_URL} FDO_CI_CONCURRENT=${FDO_CI_CONCURRENT} bash .gitlab-ci/container/${CI_JOB_NAME}.sh'
KERNEL_URL: &kernel-rootfs-url "https://gitlab.freedesktop.org/gfx-ci/linux/-/archive/v5.17-for-mesa-ci-b78f7870d97b/linux-v5.17-for-mesa-ci-b78f7870d97b.tar.bz2"
MESA_IMAGE_TAG: &debian-x86_test-gl ${DEBIAN_X86_TEST_GL_TAG}
.use-debian/x86_test-gl:
@@ -332,9 +333,8 @@ debian/arm_test:
- kernel+rootfs_arm64
- kernel+rootfs_armhf
variables:
FDO_DISTRIBUTION_EXEC: 'env ARTIFACTS_PREFIX=https://${MINIO_HOST}/mesa-lava ARTIFACTS_SUFFIX=${MESA_ROOTFS_TAG}--${MESA_ARM_BUILD_TAG}--${MESA_TEMPLATES_COMMIT} CI_PROJECT_PATH=${CI_PROJECT_PATH} FDO_CI_CONCURRENT=${FDO_CI_CONCURRENT} FDO_UPSTREAM_REPO=${FDO_UPSTREAM_REPO} bash .gitlab-ci/container/${CI_JOB_NAME}.sh'
FDO_DISTRIBUTION_TAG: "${MESA_IMAGE_TAG}--${MESA_ROOTFS_TAG}--${MESA_ARM_BUILD_TAG}--${MESA_TEMPLATES_COMMIT}"
ARTIFACTS_PREFIX: "https://${MINIO_HOST}/mesa-lava"
ARTIFACTS_SUFFIX: "${MESA_ROOTFS_TAG}--${MESA_ARM_BUILD_TAG}--${MESA_TEMPLATES_COMMIT}"
MESA_ARM_BUILD_TAG: *debian-arm_build
MESA_IMAGE_TAG: &debian-arm_test ${DEBIAN_BASE_TAG}
MESA_ROOTFS_TAG: *kernel-rootfs
@@ -409,7 +409,7 @@ windows_build_vs2019:
- !reference [.build-rules, rules]
variables:
MESA_IMAGE_PATH: &windows_build_image_path ${WINDOWS_X64_BUILD_PATH}
MESA_IMAGE_TAG: &windows_build_image_tag ${MESA_BASE_IMAGE_TAG}--${WINDOWS_X64_BUILD_TAG}
MESA_IMAGE_TAG: &windows_build_image_tag ${WINDOWS_X64_BUILD_TAG}
DOCKERFILE: Dockerfile_build
MESA_BASE_IMAGE_PATH: *windows_vs_image_path
MESA_BASE_IMAGE_TAG: *windows_vs_image_tag
@@ -429,7 +429,7 @@ windows_test_vs2019:
- !reference [.build-rules, rules]
variables:
MESA_IMAGE_PATH: &windows_test_image_path ${WINDOWS_X64_TEST_PATH}
MESA_IMAGE_TAG: &windows_test_image_tag ${MESA_BASE_IMAGE_TAG}--${WINDOWS_X64_TEST_TAG}
MESA_IMAGE_TAG: &windows_test_image_tag ${WINDOWS_X64_BUILD_TAG}--${WINDOWS_X64_TEST_TAG}
DOCKERFILE: Dockerfile_test
MESA_BASE_IMAGE_PATH: *windows_vs_image_path
MESA_BASE_IMAGE_TAG: *windows_vs_image_tag
@@ -445,7 +445,6 @@ windows_test_vs2019:
variables:
MESA_IMAGE_PATH: *windows_build_image_path
MESA_IMAGE_TAG: *windows_build_image_tag
MESA_BASE_IMAGE_TAG: *windows_vs_image_tag
needs:
- windows_build_vs2019
@@ -457,4 +456,3 @@ windows_test_vs2019:
variables:
MESA_IMAGE_PATH: *windows_test_image_path
MESA_IMAGE_TAG: *windows_test_image_tag
MESA_BASE_IMAGE_TAG: *windows_vs_image_tag

View File

@@ -1,39 +0,0 @@
#!/bin/bash
set -e
dxvk_install_release() {
local DXVK_VERSION=${1:-"1.10.3"}
wget "https://github.com/doitsujin/dxvk/releases/download/v${DXVK_VERSION}/dxvk-${DXVK_VERSION}.tar.gz"
tar xzpf dxvk-"${DXVK_VERSION}".tar.gz
# https://github.com/doitsujin/dxvk/issues/2921
sed -i 's/wine="wine"/wine="wine32"/' "dxvk-${DXVK_VERSION}"/setup_dxvk.sh
"dxvk-${DXVK_VERSION}"/setup_dxvk.sh install
rm -rf "dxvk-${DXVK_VERSION}"
rm dxvk-"${DXVK_VERSION}".tar.gz
}
# Install from a Github PR number
dxvk_install_pr() {
local __prnum=$1
# NOTE: Clone all the ensite history of the repo so as not to think
# harder about cloning just enough for 'git describe' to work. 'git
# describe' is used by the dxvk build system to generate a
# dxvk_version Meson variable, which is nice-to-have.
git clone https://github.com/doitsujin/dxvk
pushd dxvk
git fetch origin pull/"$__prnum"/head:pr
git checkout pr
./package-release.sh pr ../dxvk-build --no-package
popd
pushd ./dxvk-build/dxvk-pr
./setup_dxvk.sh install
popd
rm -rf ./dxvk-build ./dxvk
}
dxvk_install_release "1.10.1"
#dxvk_install_pr 2359

View File

@@ -1,7 +1,4 @@
#!/bin/bash
# shellcheck disable=SC1091 # The relative paths in this file only become valid at runtime.
# shellcheck disable=SC2034 # Variables are used in scripts called from here
# shellcheck disable=SC2086 # we want word splitting
set -e
set -o xtrace
@@ -40,7 +37,6 @@ if [[ "$DEBIAN_ARCH" = "arm64" ]]; then
DEVICE_TREES+=" arch/arm64/boot/dts/mediatek/mt8183-kukui-jacuzzi-juniper-sku16.dtb"
DEVICE_TREES+=" arch/arm64/boot/dts/nvidia/tegra210-p3450-0000.dtb"
DEVICE_TREES+=" arch/arm64/boot/dts/qcom/sc7180-trogdor-lazor-limozeen-nots-r5.dtb"
DEVICE_TREES+=" arch/arm64/boot/dts/freescale/imx8mq-nitrogen.dtb"
KERNEL_IMAGE_NAME="Image"
elif [[ "$DEBIAN_ARCH" = "armhf" ]]; then
@@ -51,7 +47,6 @@ elif [[ "$DEBIAN_ARCH" = "armhf" ]]; then
DEVICE_TREES="arch/arm/boot/dts/rk3288-veyron-jaq.dtb"
DEVICE_TREES+=" arch/arm/boot/dts/sun8i-h3-libretech-all-h3-cc.dtb"
DEVICE_TREES+=" arch/arm/boot/dts/imx6q-cubox-i.dtb"
DEVICE_TREES+=" arch/arm/boot/dts/tegra124-jetson-tk1.dtb"
KERNEL_IMAGE_NAME="zImage"
. .gitlab-ci/container/create-cross-file.sh armhf
else
@@ -61,7 +56,7 @@ else
DEFCONFIG="arch/x86/configs/x86_64_defconfig"
DEVICE_TREES=""
KERNEL_IMAGE_NAME="bzImage"
ARCH_PACKAGES="libasound2-dev libcap-dev libfdt-dev libva-dev wayland-protocols p7zip"
ARCH_PACKAGES="libasound2-dev libcap-dev libfdt-dev libva-dev wayland-protocols"
fi
# Determine if we're in a cross build.
@@ -111,15 +106,13 @@ apt-get install -y --no-remove \
libxkbcommon-dev \
ninja-build \
patch \
protobuf-compiler \
python-is-python3 \
python3-distutils \
python3-mako \
python3-numpy \
python3-serial \
unzip \
wget \
zstd
wget
if [[ "$DEBIAN_ARCH" = "armhf" ]]; then
@@ -137,20 +130,6 @@ if [[ "$DEBIAN_ARCH" = "armhf" ]]; then
libxkbcommon-dev:armhf
fi
mkdir -p "/lava-files/rootfs-${DEBIAN_ARCH}"
############### Setuping
if [ "$DEBIAN_ARCH" = "amd64" ]; then
. .gitlab-ci/container/setup-wine.sh "/dxvk-wine64"
. .gitlab-ci/container/install-wine-dxvk.sh
mv /dxvk-wine64 "/lava-files/rootfs-${DEBIAN_ARCH}/"
fi
############### Installing
. .gitlab-ci/container/install-wine-apitrace.sh
mkdir -p "/lava-files/rootfs-${DEBIAN_ARCH}/apitrace-msvc-win64"
mv /apitrace-msvc-win64/bin "/lava-files/rootfs-${DEBIAN_ARCH}/apitrace-msvc-win64"
rm -rf /apitrace-msvc-win64
############### Building
STRIP_CMD="${GCC_ARCH}-strip"
@@ -236,9 +215,8 @@ set -e
cp .gitlab-ci/container/create-rootfs.sh /lava-files/rootfs-${DEBIAN_ARCH}/.
cp .gitlab-ci/container/debian/llvm-snapshot.gpg.key /lava-files/rootfs-${DEBIAN_ARCH}/.
cp .gitlab-ci/container/debian/winehq.gpg.key /lava-files/rootfs-${DEBIAN_ARCH}/.
chroot /lava-files/rootfs-${DEBIAN_ARCH} sh /create-rootfs.sh
rm /lava-files/rootfs-${DEBIAN_ARCH}/{llvm-snapshot,winehq}.gpg.key
rm /lava-files/rootfs-${DEBIAN_ARCH}/llvm-snapshot.gpg.key
rm /lava-files/rootfs-${DEBIAN_ARCH}/create-rootfs.sh
@@ -246,8 +224,7 @@ rm /lava-files/rootfs-${DEBIAN_ARCH}/create-rootfs.sh
# Dependencies pulled during the creation of the rootfs may overwrite
# the built libdrm. Hence, we add it after the rootfs has been already
# created.
find /libdrm/ -name lib\*\.so\* \
-exec cp -t /lava-files/rootfs-${DEBIAN_ARCH}/usr/lib/$GCC_ARCH/. {} \;
find /libdrm/ -name lib\*\.so\* | xargs cp -t /lava-files/rootfs-${DEBIAN_ARCH}/usr/lib/$GCC_ARCH/.
mkdir -p /lava-files/rootfs-${DEBIAN_ARCH}/libdrm/
cp -Rp /libdrm/share /lava-files/rootfs-${DEBIAN_ARCH}/libdrm/share
rm -rf /libdrm
@@ -261,13 +238,14 @@ fi
du -ah /lava-files/rootfs-${DEBIAN_ARCH} | sort -h | tail -100
pushd /lava-files/rootfs-${DEBIAN_ARCH}
tar --zstd -cf /lava-files/lava-rootfs.tar.zst .
tar czf /lava-files/lava-rootfs.tgz .
popd
. .gitlab-ci/container/container_post_build.sh
############### Upload the files!
FILES_TO_UPLOAD="lava-rootfs.tar.zst \
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
FILES_TO_UPLOAD="lava-rootfs.tgz \
$KERNEL_IMAGE_NAME"
if [[ -n $DEVICE_TREES ]]; then
@@ -275,9 +253,9 @@ if [[ -n $DEVICE_TREES ]]; then
fi
for f in $FILES_TO_UPLOAD; do
ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" /lava-files/$f \
https://${MINIO_PATH}/$f
ci-fairy minio cp /lava-files/$f \
minio://${MINIO_PATH}/$f
done
touch /lava-files/done
ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" /lava-files/done https://${MINIO_PATH}/done
ci-fairy minio cp /lava-files/done minio://${MINIO_PATH}/done

View File

@@ -1,24 +0,0 @@
#!/bin/bash
export WINEPREFIX="$1"
export WINEDEBUG="-all"
# We don't want crash dialogs
cat >crashdialog.reg <<EOF
Windows Registry Editor Version 5.00
[HKEY_CURRENT_USER\Software\Wine\WineDbg]
"ShowCrashDialog"=dword:00000000
EOF
# Set the wine prefix and disable the crash dialog
wine64 regedit crashdialog.reg
rm crashdialog.reg
# An immediate wine command may fail with: "${WINEPREFIX}: Not a
# valid wine prefix." and that is just spit because of checking
# the existance of the system.reg file, which fails. Just giving
# it a bit more of time for it to be created solves the problem
# ...
while ! test -f "${WINEPREFIX}/system.reg"; do sleep 1; done

View File

@@ -54,10 +54,9 @@ VM_SOCKET=crosvm-${THREAD}.sock
# was terminated due to timeouts. This "vm stop" may fail if the crosvm died
# without cleaning itself up.
if [ -e $VM_SOCKET ]; then
crosvm stop $VM_SOCKET || true
crosvm stop $VM_SOCKET || rm -rf $VM_SOCKET
# Wait for socats from that invocation to drain
sleep 5
rm -rf $VM_SOCKET || true
fi
set_vsock_context || { echo "Could not generate crosvm vsock CID" >&2; exit 1; }
@@ -94,11 +93,10 @@ set +e -x
NIR_DEBUG="novalidate" \
LIBGL_ALWAYS_SOFTWARE=${CROSVM_LIBGL_ALWAYS_SOFTWARE} \
GALLIUM_DRIVER=${CROSVM_GALLIUM_DRIVER} \
VK_ICD_FILENAMES=$CI_PROJECT_DIR/install/share/vulkan/icd.d/${CROSVM_VK_DRIVER}_icd.x86_64.json \
crosvm --no-syslog run \
--gpu "${CROSVM_GPU_ARGS}" -m "${CROSVM_MEMORY:-4096}" -c 2 --disable-sandbox \
crosvm run \
--gpu "${CROSVM_GPU_ARGS}" -m 4096 -c 2 --disable-sandbox \
--shared-dir /:my_root:type=fs:writeback=true:timeout=60:cache=always \
--host-ip "192.168.30.1" --netmask "255.255.255.0" --mac "AA:BB:CC:00:00:12" \
--host_ip "192.168.30.1" --netmask "255.255.255.0" --mac "AA:BB:CC:00:00:12" \
-s $VM_SOCKET \
--cid ${VSOCK_CID} -p "${CROSVM_KERN_ARGS}" \
/lava-files/${KERNEL_IMAGE_NAME:-bzImage} > ${VM_TEMP_DIR}/crosvm 2>&1

View File

@@ -1,27 +1,27 @@
variables:
DEBIAN_X86_BUILD_BASE_IMAGE: "debian/x86_build-base"
DEBIAN_BASE_TAG: "2022-11-15-ci-fairy"
DEBIAN_BASE_TAG: "2022-07-01-bb-llvm13"
DEBIAN_X86_BUILD_IMAGE_PATH: "debian/x86_build"
DEBIAN_BUILD_TAG: "2022-11-15-ci-fairy"
DEBIAN_BUILD_TAG: "2022-07-14-directx-headers"
DEBIAN_X86_BUILD_MINGW_IMAGE_PATH: "debian/x86_build-mingw"
DEBIAN_BUILD_MINGW_TAG: "2022-10-18-dx-headers-va"
DEBIAN_BUILD_MINGW_TAG: "2022-07-14-directx-headers"
DEBIAN_X86_TEST_BASE_IMAGE: "debian/x86_test-base"
DEBIAN_X86_TEST_IMAGE_PATH: "debian/x86_test-gl"
DEBIAN_X86_TEST_GL_TAG: "2022-11-15-ci-fairy"
DEBIAN_X86_TEST_VK_TAG: "2022-11-15-ci-fairy"
DEBIAN_X86_TEST_GL_TAG: "2022-07-06-virgl-update"
DEBIAN_X86_TEST_VK_TAG: "2022-07-18-apitrace-11-1"
FEDORA_X86_BUILD_TAG: "2022-09-22-python3-ply-2"
KERNEL_ROOTFS_TAG: "2022-11-03-piglit_mesa-22.3"
FEDORA_X86_BUILD_TAG: "2022-04-24-spirv-tools-5"
KERNEL_ROOTFS_TAG: "2022-07-06-virgl-update"
WINDOWS_X64_VS_PATH: "windows/x64_vs"
WINDOWS_X64_VS_TAG: "2022-10-20-upgrade-zlib"
WINDOWS_X64_VS_TAG: "2022-06-15-vs-winsdk"
WINDOWS_X64_BUILD_PATH: "windows/x64_build"
WINDOWS_X64_BUILD_TAG: "2022-10-18-wrap-nodownload-va"
WINDOWS_X64_BUILD_TAG: "2022-06-15-vs-winsdk"
WINDOWS_X64_TEST_PATH: "windows/x64_test"
WINDOWS_X64_TEST_TAG: "2022-08-17-bump"
WINDOWS_X64_TEST_TAG: "2022-06-15-vs-winsdk"

View File

@@ -12,9 +12,9 @@
BASE_SYSTEM_MAINLINE_HOST_PATH: "${BASE_SYSTEM_HOST_PREFIX}/${FDO_UPSTREAM_REPO}/${DISTRIBUTION_TAG}/${ARCH}"
BASE_SYSTEM_FORK_HOST_PATH: "${BASE_SYSTEM_HOST_PREFIX}/${CI_PROJECT_PATH}/${DISTRIBUTION_TAG}/${ARCH}"
# per-job build artifacts
BUILD_PATH: "${PIPELINE_ARTIFACTS_BASE}/${CI_PROJECT_NAME}-${ARCH}.tar.zst"
BUILD_PATH: "${PIPELINE_ARTIFACTS_BASE}/${CI_PROJECT_NAME}-${ARCH}.tar.gz"
JOB_ROOTFS_OVERLAY_PATH: "${JOB_ARTIFACTS_BASE}/job-rootfs-overlay.tar.gz"
JOB_RESULTS_PATH: "${JOB_ARTIFACTS_BASE}/results.tar.zst"
JOB_RESULTS_PATH: "${JOB_ARTIFACTS_BASE}/results.tar.gz"
MINIO_RESULTS_UPLOAD: "${JOB_ARTIFACTS_BASE}"
PIGLIT_NO_WINDOW: 1
VISIBILITY_GROUP: "Collabora+fdo"
@@ -27,12 +27,10 @@
- results/
exclude:
- results/*.shader_cache
reports:
junit: results/junit.xml
tags:
- $RUNNER_TAG
after_script:
- wget -q "https://${JOB_RESULTS_PATH}" -O- | tar --zstd -x
- wget -q "https://${JOB_RESULTS_PATH}" -O- | tar -xz
.lava-test:armhf:
variables:

View File

@@ -21,12 +21,10 @@ cp artifacts/ci-common/intel-gpu-freq.sh results/job-rootfs-overlay/
# Prepare env vars for upload.
KERNEL_IMAGE_BASE_URL="https://${BASE_SYSTEM_HOST_PATH}" \
artifacts/ci-common/generate-env.sh > results/job-rootfs-overlay/set-job-env-vars.sh
echo -e "\e[0Ksection_start:$(date +%s):variables[collapsed=true]\r\e[0KVariables passed through:"
cat results/job-rootfs-overlay/set-job-env-vars.sh
echo -e "\e[0Ksection_end:$(date +%s):variables\r\e[0K"
tar zcf job-rootfs-overlay.tar.gz -C results/job-rootfs-overlay/ .
ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" job-rootfs-overlay.tar.gz "https://${JOB_ROOTFS_OVERLAY_PATH}"
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
ci-fairy minio cp job-rootfs-overlay.tar.gz "minio://${JOB_ROOTFS_OVERLAY_PATH}"
touch results/lava.log
tail -f results/lava.log &

View File

@@ -32,9 +32,8 @@ from lava.exceptions import (
MesaCIRetryError,
MesaCITimeoutError,
)
from lava.utils import CONSOLE_LOG
from lava.utils import DEFAULT_GITLAB_SECTION_TIMEOUTS as GL_SECTION_TIMEOUTS
from lava.utils import (
CONSOLE_LOG,
GitlabSection,
LogFollower,
LogSectionType,
@@ -96,8 +95,8 @@ def generate_lava_yaml(args):
'url': '{}/{}'.format(args.kernel_url_prefix, args.kernel_image_name),
},
'nfsrootfs': {
'url': '{}/lava-rootfs.tar.zst'.format(args.rootfs_url_prefix),
'compression': 'zstd',
'url': '{}/lava-rootfs.tgz'.format(args.rootfs_url_prefix),
'compression': 'gz',
}
}
if args.kernel_image_type:
@@ -166,7 +165,7 @@ def generate_lava_yaml(args):
run_steps += [
'mkdir -p {}'.format(args.ci_project_dir),
'wget -S --progress=dot:giga -O- {} | tar --zstd -x -C {}'.format(args.build_url, args.ci_project_dir),
'wget -S --progress=dot:giga -O- {} | tar -xz -C {}'.format(args.build_url, args.ci_project_dir),
'wget -S --progress=dot:giga -O- {} | tar -xz -C /'.format(args.job_rootfs_overlay_url),
# Sleep a bit to give time for bash to dump shell xtrace messages into
@@ -498,13 +497,6 @@ def treat_mesa_job_name(args):
def main(args):
proxy = setup_lava_proxy()
# Overwrite the timeout for the testcases with the value offered by the
# user. The testcase running time should be at least 4 times greater than
# the other sections (boot and setup), so we can safely ignore them.
# If LAVA fails to stop the job at this stage, it will fall back to the
# script section timeout with a reasonable delay.
GL_SECTION_TIMEOUTS[LogSectionType.TEST_CASE] = timedelta(minutes=args.job_timeout)
job_definition = generate_lava_yaml(args)
if args.dump_yaml:

View File

@@ -8,9 +8,4 @@ from .log_follower import (
hide_sensitive_data,
print_log,
)
from .log_section import (
DEFAULT_GITLAB_SECTION_TIMEOUTS,
FALLBACK_GITLAB_SECTION_TIMEOUT,
LogSection,
LogSectionType,
)
from .log_section import LogSection, LogSectionType

View File

@@ -2,7 +2,6 @@ import re
from dataclasses import dataclass
from datetime import timedelta
from enum import Enum, auto
from os import getenv
from typing import Optional, Pattern, Union
from lava.utils.gitlab_section import GitlabSection
@@ -16,34 +15,24 @@ class LogSectionType(Enum):
LAVA_POST_PROCESSING = auto()
# Empirically, successful device boot in LAVA time takes less than 3
# minutes.
# LAVA itself is configured to attempt thrice to boot the device,
# summing up to 9 minutes.
# It is better to retry the boot than cancel the job and re-submit to avoid
# the enqueue delay.
LAVA_BOOT_TIMEOUT = int(getenv("LAVA_BOOT_TIMEOUT", 9))
# Test suite phase is where the initialization happens.
LAVA_TEST_SUITE_TIMEOUT = int(getenv("LAVA_TEST_SUITE_TIMEOUT", 5))
# Test cases may take a long time, this script has no right to interrupt
# them. But if the test case takes almost 1h, it will never succeed due to
# Gitlab job timeout.
LAVA_TEST_CASE_TIMEOUT = int(getenv("JOB_TIMEOUT", 60))
# LAVA post processing may refer to a test suite teardown, or the
# adjustments to start the next test_case
LAVA_POST_PROCESSING_TIMEOUT = int(getenv("LAVA_POST_PROCESSING_TIMEOUT", 5))
FALLBACK_GITLAB_SECTION_TIMEOUT = timedelta(minutes=10)
DEFAULT_GITLAB_SECTION_TIMEOUTS = {
LogSectionType.LAVA_BOOT: timedelta(minutes=LAVA_BOOT_TIMEOUT),
LogSectionType.TEST_SUITE: timedelta(minutes=LAVA_TEST_SUITE_TIMEOUT),
LogSectionType.TEST_CASE: timedelta(minutes=LAVA_TEST_CASE_TIMEOUT),
LogSectionType.LAVA_POST_PROCESSING: timedelta(
minutes=LAVA_POST_PROCESSING_TIMEOUT
),
# Empirically, successful device boot in LAVA time takes less than 3
# minutes.
# LAVA itself is configured to attempt thrice to boot the device,
# summing up to 9 minutes.
# It is better to retry the boot than cancel the job and re-submit to avoid
# the enqueue delay.
LogSectionType.LAVA_BOOT: timedelta(minutes=9),
# Test suite phase is where the initialization happens.
LogSectionType.TEST_SUITE: timedelta(minutes=5),
# Test cases may take a long time, this script has no right to interrupt
# them. But if the test case takes almost 1h, it will never succeed due to
# Gitlab job timeout.
LogSectionType.TEST_CASE: timedelta(minutes=60),
# LAVA post processing may refer to a test suite teardown, or the
# adjustments to start the next test_case
LogSectionType.LAVA_POST_PROCESSING: timedelta(minutes=5),
}
@@ -65,10 +54,9 @@ class LogSection:
if match := re.search(self.regex, lava_log_line["msg"]):
section_id = self.section_id.format(*match.groups())
section_header = self.section_header.format(*match.groups())
timeout = DEFAULT_GITLAB_SECTION_TIMEOUTS[self.section_type]
return GitlabSection(
id=section_id,
header=f"{section_header} - Timeout: {timeout}",
header=section_header,
type=self.section_type,
start_collapsed=self.collapsed,
)

View File

@@ -65,10 +65,9 @@ meson _build --native-file=native.file \
-D prefix=`pwd`/install \
-D libdir=lib \
-D buildtype=${BUILDTYPE:-debug} \
-D build-tests=true \
-D build-tests=false \
-D c_args="$(echo -n $C_ARGS)" \
-D cpp_args="$(echo -n $CPP_ARGS)" \
-D enable-glcpp-tests=false \
-D libunwind=${UNWIND} \
${DRI_LOADERS} \
${GALLIUM_ST} \
@@ -79,15 +78,7 @@ meson _build --native-file=native.file \
${EXTRA_OPTION}
cd _build
meson configure
if command -V mold &> /dev/null ; then
mold --run ninja
else
ninja
fi
ninja
LC_ALL=C.UTF-8 meson test --num-processes ${FDO_CI_CONCURRENT:-4} --print-errorlogs ${MESON_TEST_ARGS}
if command -V mold &> /dev/null ; then
mold --run ninja install
else
ninja install
fi
ninja install
cd ..

View File

@@ -1,8 +1,6 @@
#!/bin/sh
if [ "x$STRACEDIR" = "x" ]; then
STRACEDIR=meson-logs/strace/$(for i in $@; do basename -z -- $i; echo -n _; done)
fi
STRACEDIR=meson-logs/strace/$(for i in $@; do basename -z -- $i; echo -n _; done)
mkdir -p $STRACEDIR

View File

@@ -1,89 +0,0 @@
diff --git a/framework/replay/download_utils.py b/framework/replay/download_utils.py
index 36322b000..5c3fe140d 100644
--- a/framework/replay/download_utils.py
+++ b/framework/replay/download_utils.py
@@ -27,20 +27,20 @@ import base64
import hashlib
import hmac
import xml.etree.ElementTree as ET
-
-from typing import Dict
from email.utils import formatdate
from os import path
from time import time
+from typing import Dict
+from urllib.parse import urlparse
+
import requests
from requests.adapters import HTTPAdapter, Retry
-from framework.replay.local_file_adapter import LocalFileAdapter
from requests.utils import requote_uri
from framework import core, exceptions
+from framework.replay.local_file_adapter import LocalFileAdapter
from framework.replay.options import OPTIONS
-
__all__ = ['ensure_file']
minio_credentials = None
@@ -90,7 +90,7 @@ def get_minio_credentials(url):
minio_credentials['SessionToken'])
-def get_authorization_headers(url, resource):
+def get_minio_authorization_headers(url, resource):
minio_key, minio_secret, minio_token = get_minio_credentials(url)
date = formatdate(timeval=None, localtime=False, usegmt=True)
@@ -107,6 +107,17 @@ def get_authorization_headers(url, resource):
return headers
+def get_jwt_authorization_headers(url, resource):
+ date = formatdate(timeval=None, localtime=False, usegmt=True)
+ jwt = OPTIONS.download['jwt']
+ host = urlparse(url).netloc
+
+ headers = {'Host': host,
+ 'Date': date,
+ 'Authorization': 'Bearer %s' % (jwt)}
+ return headers
+
+
def download(url: str, file_path: str, headers: Dict[str, str], attempts: int = 2) -> None:
"""Downloads a URL content into a file
@@ -178,7 +189,9 @@ def ensure_file(file_path):
assert OPTIONS.download['minio_bucket']
assert OPTIONS.download['role_session_name']
assert OPTIONS.download['jwt']
- headers = get_authorization_headers(url, file_path)
+ headers = get_minio_authorization_headers(url, file_path)
+ elif OPTIONS.download['jwt']:
+ headers = get_jwt_authorization_headers(url, file_path)
else:
headers = None
diff --git a/unittests/framework/replay/test_download_utils.py b/unittests/framework/replay/test_download_utils.py
index 1e78b26e7..749c5d835 100644
--- a/unittests/framework/replay/test_download_utils.py
+++ b/unittests/framework/replay/test_download_utils.py
@@ -195,3 +195,17 @@ class TestDownloadUtils(object):
get_request = requests_mock.request_history[1]
assert(get_request.method == 'GET')
assert(requests_mock.request_history[1].headers['Authorization'].startswith('AWS Key'))
+
+ def test_jwt_authorization(self, requests_mock):
+ """download_utils.ensure_file: Check we send the authentication headers to the server"""
+ # reset minio_host from previous tests
+ OPTIONS.download['minio_host'] = ''
+ OPTIONS.download['jwt'] = 'jwt'
+
+ assert not self.trace_file.check()
+ download_utils.ensure_file(self.trace_path)
+ TestDownloadUtils.check_same_file(self.trace_file, "remote")
+
+ get_request = requests_mock.request_history[0]
+ assert(get_request.method == 'GET')
+ assert(requests_mock.request_history[0].headers['Authorization'].startswith('Bearer'))

View File

@@ -3,41 +3,11 @@
set -ex
INSTALL=$(realpath -s "$PWD"/install)
MINIO_ARGS="--token-file ${CI_JOB_JWT_FILE}"
MINIO_ARGS="--credentials=/tmp/.minio_credentials"
RESULTS=$(realpath -s "$PWD"/results)
mkdir -p "$RESULTS"
if [ "$PIGLIT_REPLAY_SUBCOMMAND" = "profile" ]; then
# workaround for older Debian Bullseye libyaml 0.2.2
sed -i "/^%YAML 1\.2$/d" "$PIGLIT_REPLAY_DESCRIPTION_FILE"
yq -i -Y '. | del(.traces[][] | select(.label[0,1,2,3,4,5,6,7,8,9] == "no-perf"))' \
"$PIGLIT_REPLAY_DESCRIPTION_FILE" # label positions are a bit hack
fi
# WINE
case "$PIGLIT_REPLAY_DEVICE_NAME" in
vk-*)
export WINEPREFIX="/dxvk-wine64"
;;
*)
export WINEPREFIX="/generic-wine64"
;;
esac
PATH="/opt/wine-stable/bin/:$PATH" # WineHQ path
# Avoid asking about Gecko or Mono instalation
export WINEDLLOVERRIDES=mscoree=d;mshtml=d
# Set environment for DXVK.
export DXVK_LOG_LEVEL="info"
export DXVK_LOG="$RESULTS/dxvk"
[ -d "$DXVK_LOG" ] || mkdir -pv "$DXVK_LOG"
export DXVK_STATE_CACHE=0
# Set up the driver environment.
# Modifiying here directly LD_LIBRARY_PATH may cause problems when
# using a command wrapper. Hence, we will just set it when running the
@@ -67,10 +37,6 @@ quiet() {
# Set environment for apitrace executable.
export PATH="/apitrace/build:$PATH"
export PIGLIT_REPLAY_WINE_BINARY=wine64
export PIGLIT_REPLAY_WINE_APITRACE_BINARY="/apitrace-msvc-win64/bin/apitrace.exe"
export PIGLIT_REPLAY_WINE_D3DRETRACE_BINARY="/apitrace-msvc-win64/bin/d3dretrace.exe"
# Our rootfs may not have "less", which apitrace uses during
# apitrace dump
export PAGER=cat
@@ -137,8 +103,8 @@ replay_minio_upload_images() {
__DESTINATION_FILE_PATH="$__MINIO_TRACES_PREFIX/${line##*-}"
fi
ci-fairy s3cp $MINIO_ARGS "$RESULTS/$__PREFIX/$line" \
"https://${__MINIO_PATH}/${__DESTINATION_FILE_PATH}"
ci-fairy minio cp $MINIO_ARGS "$RESULTS/$__PREFIX/$line" \
"minio://${__MINIO_PATH}/${__DESTINATION_FILE_PATH}"
done
}
@@ -173,6 +139,8 @@ if [ "$RUN_CMD_WRAPPER" ]; then
RUN_CMD="set +e; $RUN_CMD_WRAPPER "$(/usr/bin/printf "%q" "$RUN_CMD")"; set -e"
fi
ci-fairy minio login $MINIO_ARGS --token-file "${CI_JOB_JWT_FILE}"
# The replayer doesn't do any size or checksum verification for the traces in
# the replayer db, so if we had to restart the system due to intermittent device
# errors (or tried to cache replayer-db between runs, which would be nice to
@@ -203,7 +171,7 @@ __PREFIX="trace/$PIGLIT_REPLAY_DEVICE_NAME"
__MINIO_PATH="$PIGLIT_REPLAY_ARTIFACTS_BASE_URL"
__MINIO_TRACES_PREFIX="traces"
if [ "$PIGLIT_REPLAY_SUBCOMMAND" != "profile" ]; then
if [ "x$PIGLIT_REPLAY_SUBCOMMAND" != "xprofile" ]; then
quiet replay_minio_upload_images
fi

View File

@@ -52,7 +52,8 @@ cp -Rp .gitlab-ci/b2c artifacts/
if [ -n "$MINIO_ARTIFACT_NAME" ]; then
# Pass needed files to the test stage
MINIO_ARTIFACT_NAME="$MINIO_ARTIFACT_NAME.tar.zst"
zstd artifacts/install.tar -o ${MINIO_ARTIFACT_NAME}
ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" ${MINIO_ARTIFACT_NAME} https://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}
MINIO_ARTIFACT_NAME="$MINIO_ARTIFACT_NAME.tar.gz"
gzip -c artifacts/install.tar > ${MINIO_ARTIFACT_NAME}
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
ci-fairy minio cp ${MINIO_ARTIFACT_NAME} minio://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}
fi

View File

@@ -1,23 +0,0 @@
#!/usr/bin/env bash
CHECKPATH=".gitlab-ci/container" # TODO: expand to cover whole .gitlab-ci/
is_bash() {
[[ $1 == *.sh ]] && return 0
[[ $1 == */bash-completion/* ]] && return 0
[[ $(file -b --mime-type "$1") == text/x-shellscript ]] && return 0
return 1
}
while IFS= read -r -d $'' file; do
if is_bash "$file" ; then
shellcheck -x -W0 -s bash "$file"
rc=$?
if [ "${rc}" -eq 0 ]
then
continue
else
exit 1
fi
fi
done < <(find $CHECKPATH -type f \! -path "./.git/*" -print0)

View File

@@ -1,5 +0,0 @@
#!/usr/bin/env bash
set -e
# Run yamllint against all traces files.
find . -name '*traces*yml' -print0 | xargs -0 yamllint -d "{rules: {line-length: {max: 150}}}"

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/bin/sh
#
# Copyright (C) 2022 Collabora Limited
# Author: Guilherme Gallo <guilherme.gallo@collabora.com>
@@ -22,165 +22,6 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Args:
# $1: section id
# $2: section header
gitlab_section_start() {
echo -e "\e[0Ksection_start:$(date +%s):$1[collapsed=${GL_COLLAPSED:-false}]\r\e[0K\e[32;1m$2\e[0m"
}
# Args:
# $1: section id
gitlab_section_end() {
echo -e "\e[0Ksection_end:$(date +%s):$1\r\e[0K"
}
# sponge allows piping to files that are being used as input.
# E.g.: sort file.txt | sponge file.txt
# In order to avoid installing moreutils just to have the sponge binary, we can
# use a bash function for it
# Source https://unix.stackexchange.com/a/561346/310927
sponge () (
set +x
append=false
while getopts 'a' opt; do
case $opt in
a) append=true ;;
*) echo error; exit 1
esac
done
shift "$(( OPTIND - 1 ))"
outfile=$1
tmpfile=$(mktemp "$(dirname "$outfile")/tmp-sponge.XXXXXXXX") &&
cat >"$tmpfile" &&
if "$append"; then
cat "$tmpfile" >>"$outfile"
else
if [ -f "$outfile" ]; then
chmod --reference="$outfile" "$tmpfile"
fi
if [ -f "$outfile" ]; then
mv "$tmpfile" "$outfile"
elif [ -n "$outfile" ] && [ ! -e "$outfile" ]; then
cat "$tmpfile" >"$outfile"
else
cat "$tmpfile"
fi
fi &&
rm -f "$tmpfile"
)
remove_comments_from_files() (
INPUT_FILES="$*"
for INPUT_FILE in ${INPUT_FILES}
do
[ -f "${INPUT_FILE}" ] || continue
sed -i '/#/d' "${INPUT_FILE}"
sed -i '/^\s*$/d' "${INPUT_FILE}"
done
)
subtract_test_lists() (
MINUEND=$1
sort "${MINUEND}" | sponge "${MINUEND}"
shift
for SUBTRAHEND in "$@"
do
sort "${SUBTRAHEND}" | sponge "${SUBTRAHEND}"
join -v 1 "${MINUEND}" "${SUBTRAHEND}" |
sponge "${MINUEND}"
done
)
merge_rendertests_files() {
BASE_FILE=$1
shift
FILES="$*"
# shellcheck disable=SC2086
cat $FILES "$BASE_FILE" |
sort --unique --stable --field-separator=, --key=1,1 |
sponge "$BASE_FILE"
}
assure_files() (
for CASELIST_FILE in $*
do
>&2 echo "Looking for ${CASELIST_FILE}..."
[ -f ${CASELIST_FILE} ] || (
>&2 echo "Not found. Creating empty."
touch ${CASELIST_FILE}
)
done
)
# Generate rendertests from scratch, customizing with fails/flakes/crashes files
generate_rendertests() (
set -e
GENERATED_FILE=$(mktemp)
TESTS_FILE_PREFIX="${SKQP_FILE_PREFIX}-${SKQP_BACKEND}_rendertests"
FLAKES_FILE="${TESTS_FILE_PREFIX}-flakes.txt"
FAILS_FILE="${TESTS_FILE_PREFIX}-fails.txt"
CRASHES_FILE="${TESTS_FILE_PREFIX}-crashes.txt"
RENDER_TESTS_FILE="${TESTS_FILE_PREFIX}.txt"
# Default to an empty known flakes file if it doesn't exist.
assure_files ${FLAKES_FILE} ${FAILS_FILE} ${CRASHES_FILE}
# skqp does not support comments in rendertests.txt file
remove_comments_from_files "${FLAKES_FILE}" "${FAILS_FILE}" "${CRASHES_FILE}"
# create an exhaustive rendertest list
"${SKQP_BIN_DIR}"/list_gms | sort > "$GENERATED_FILE"
# Remove undesirable tests from the list
subtract_test_lists "${GENERATED_FILE}" "${CRASHES_FILE}" "${FLAKES_FILE}"
# Add ",0" to each test to set the expected diff sum to zero
sed -i 's/$/,0/g' "$GENERATED_FILE"
merge_rendertests_files "$GENERATED_FILE" "${FAILS_FILE}"
mv "${GENERATED_FILE}" "${RENDER_TESTS_FILE}"
echo "${RENDER_TESTS_FILE}"
)
generate_unittests() (
set -e
GENERATED_FILE=$(mktemp)
TESTS_FILE_PREFIX="${SKQP_FILE_PREFIX}_unittests"
FLAKES_FILE="${TESTS_FILE_PREFIX}-flakes.txt"
FAILS_FILE="${TESTS_FILE_PREFIX}-fails.txt"
CRASHES_FILE="${TESTS_FILE_PREFIX}-crashes.txt"
UNIT_TESTS_FILE="${TESTS_FILE_PREFIX}.txt"
# Default to an empty known flakes file if it doesn't exist.
assure_files ${FLAKES_FILE} ${FAILS_FILE} ${CRASHES_FILE}
# Remove unitTest_ prefix
for UT_FILE in "${FAILS_FILE}" "${CRASHES_FILE}" "${FLAKES_FILE}"; do
sed -i 's/^unitTest_//g' "${UT_FILE}"
done
# create an exhaustive unittests list
"${SKQP_BIN_DIR}"/list_gpu_unit_tests > "${GENERATED_FILE}"
# Remove undesirable tests from the list
subtract_test_lists "${GENERATED_FILE}" "${CRASHES_FILE}" "${FLAKES_FILE}" "${FAILS_FILE}"
remove_comments_from_files "${GENERATED_FILE}"
mv "${GENERATED_FILE}" "${UNIT_TESTS_FILE}"
echo "${UNIT_TESTS_FILE}"
)
run_all_tests() {
rm -f "${SKQP_ASSETS_DIR}"/skqp/*.txt
}
copy_tests_files() (
# Copy either unit test or render test files from a specific driver given by
@@ -192,11 +33,9 @@ copy_tests_files() (
if echo "${SKQP_BACKEND}" | grep -qE 'vk|gl(es)?'
then
echo "Generating rendertests.txt file"
GENERATED_RENDERTESTS=$(generate_rendertests)
cp "${GENERATED_RENDERTESTS}" "${SKQP_ASSETS_DIR}"/skqp/rendertests.txt
mkdir -p "${SKQP_RESULTS_DIR}/${SKQP_BACKEND}"
cp "${GENERATED_RENDERTESTS}" "${SKQP_RESULTS_DIR}/${SKQP_BACKEND}/generated_rendertests.txt"
SKQP_RENDER_TESTS_FILE="${SKQP_FILE_PREFIX}-${SKQP_BACKEND}_rendertests.txt"
[ -f "${SKQP_RENDER_TESTS_FILE}" ] || return 1
cp "${SKQP_RENDER_TESTS_FILE}" "${SKQP_ASSETS_DIR}"/skqp/rendertests.txt
return 0
fi
@@ -204,37 +43,20 @@ copy_tests_files() (
# that is why it needs to be a special case.
if echo "${SKQP_BACKEND}" | grep -qE "unitTest"
then
echo "Generating unittests.txt file"
GENERATED_UNITTESTS=$(generate_unittests)
cp "${GENERATED_UNITTESTS}" "${SKQP_ASSETS_DIR}"/skqp/unittests.txt
mkdir -p "${SKQP_RESULTS_DIR}/${SKQP_BACKEND}"
cp "${GENERATED_UNITTESTS}" "${SKQP_RESULTS_DIR}/${SKQP_BACKEND}/generated_unittests.txt"
SKQP_UNIT_TESTS_FILE="${SKQP_FILE_PREFIX}_unittests.txt"
[ -f "${SKQP_UNIT_TESTS_FILE}" ] || return 1
cp "${SKQP_UNIT_TESTS_FILE}" "${SKQP_ASSETS_DIR}"/skqp/unittests.txt
fi
)
resolve_tests_files() {
if [ -n "${RUN_ALL_TESTS}" ]
then
run_all_tests
return
fi
SKQP_BACKEND=${1}
if ! copy_tests_files "${SKQP_BACKEND}"
then
echo "No override test file found for ${SKQP_BACKEND}. Using the default one."
fi
}
test_vk_backend() {
if echo "${SKQP_BACKENDS:?}" | grep -qE 'vk'
if echo "${SKQP_BACKENDS}" | grep -qE 'vk'
then
if [ -n "$VK_DRIVER" ]; then
return 0
fi
echo "VK_DRIVER environment variable is missing."
# shellcheck disable=SC2012
VK_DRIVERS=$(ls "$INSTALL"/share/vulkan/icd.d/ | cut -f 1 -d '_')
if [ -n "${VK_DRIVERS}" ]
then
@@ -256,74 +78,11 @@ setup_backends() {
fi
}
show_reports() (
set +xe
# Unit tests produce empty HTML reports, guide the user to check the TXT file.
if echo "${SKQP_BACKENDS}" | grep -qE "unitTest"
then
# Remove the empty HTML report to avoid confusion
rm -f "${SKQP_RESULTS_DIR}"/unitTest/report.html
echo "See skqp unit test results at:"
echo "https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts${SKQP_RESULTS_DIR}/unitTest/unit_tests.txt"
fi
REPORT_FILES=$(mktemp)
find "${SKQP_RESULTS_DIR}"/**/report.html -type f > "${REPORT_FILES}"
while read -r REPORT
do
# shellcheck disable=SC2001
BACKEND_NAME=$(echo "${REPORT}" | sed 's@.*/\([^/]*\)/report.html@\1@')
echo "See skqp ${BACKEND_NAME} render tests report at:"
echo "https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts${REPORT}"
done < "${REPORT_FILES}"
# If there is no report available, tell the user that something is wrong.
if [ ! -s "${REPORT_FILES}" ]
then
echo "No skqp report available. Probably some fatal error has occured during the skqp execution."
fi
)
usage() {
cat <<EOF
Usage: $(basename "$0") [-a]
Arguments:
-a: Run all unit tests and render tests, useful when introducing a new driver to skqp.
EOF
}
parse_args() {
while getopts ':ah' opt; do
case "$opt" in
a)
echo "Running all skqp tests"
export RUN_ALL_TESTS=1
shift
;;
h)
usage
exit 0
;;
?)
echo "Invalid command option."
usage
exit 1
;;
esac
done
}
set -e
parse_args "${@}"
set -ex
# Needed so configuration files can contain paths to files in /install
INSTALL="$CI_PROJECT_DIR"/install
ln -sf "$CI_PROJECT_DIR"/install /install
INSTALL=${PWD}/install
if [ -z "$GPU_VERSION" ]; then
echo 'GPU_VERSION must be set to something like "llvmpipe" or
@@ -335,37 +94,60 @@ fi
LD_LIBRARY_PATH=$INSTALL:$LD_LIBRARY_PATH
setup_backends
SKQP_BIN_DIR=${SKQP_BIN_DIR:-/skqp}
SKQP_ASSETS_DIR="${SKQP_BIN_DIR}"/assets
SKQP_RESULTS_DIR="${SKQP_RESULTS_DIR:-${PWD}/results}"
SKQP_ASSETS_DIR=/skqp/assets
SKQP_RESULTS_DIR="${SKQP_RESULTS_DIR:-$PWD/results}"
mkdir -p "${SKQP_ASSETS_DIR}"/skqp
# Show the reports on exit, even when a test crashes
trap show_reports INT TERM EXIT
SKQP_EXITCODE=0
for SKQP_BACKEND in ${SKQP_BACKENDS}
do
resolve_tests_files "${SKQP_BACKEND}"
set -e
if ! copy_tests_files "${SKQP_BACKEND}"
then
echo "No override test file found for ${SKQP_BACKEND}. Using the default one."
fi
set +e
SKQP_BACKEND_RESULTS_DIR="${SKQP_RESULTS_DIR}"/"${SKQP_BACKEND}"
mkdir -p "${SKQP_BACKEND_RESULTS_DIR}"
BACKEND_EXITCODE=0
GL_COLLAPSED=true gitlab_section_start "skqp_${SKQP_BACKEND}" "skqp logs for ${SKQP_BACKEND}"
"${SKQP_BIN_DIR}"/skqp "${SKQP_ASSETS_DIR}" "${SKQP_BACKEND_RESULTS_DIR}" "${SKQP_BACKEND}_" ||
BACKEND_EXITCODE=$?
gitlab_section_end "skqp_${SKQP_BACKEND}"
/skqp/skqp "${SKQP_ASSETS_DIR}" "${SKQP_BACKEND_RESULTS_DIR}" "${SKQP_BACKEND}_"
BACKEND_EXITCODE=$?
if [ ! $BACKEND_EXITCODE -eq 0 ]
then
echo "skqp failed on ${SKQP_BACKEND} tests with exit code: ${BACKEND_EXITCODE}."
else
echo "skqp succeeded on ${SKQP_BACKEND}."
echo "skqp failed on ${SKQP_BACKEND} tests with ${BACKEND_EXITCODE} exit code."
fi
# Propagate error codes to leverage the final job result
SKQP_EXITCODE=$(( SKQP_EXITCODE | BACKEND_EXITCODE ))
done
set +x
# Unit tests produce empty HTML reports, guide the user to check the TXT file.
if echo "${SKQP_BACKENDS}" | grep -qE "unitTest"
then
# Remove the empty HTML report to avoid confusion
rm -f "${SKQP_RESULTS_DIR}"/unitTest/report.html
echo "See skqp unit test results at:"
echo "https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/${SKQP_RESULTS_DIR}/unitTest/unit_tests.txt"
fi
REPORT_FILES=$(mktemp)
find "${SKQP_RESULTS_DIR}"/**/report.html -type f > "${REPORT_FILES}"
while read -r REPORT
do
BACKEND_NAME=$(echo "${REPORT}" | sed 's@.*/\([^/]*\)/report.html@\1@')
echo "See skqp ${BACKEND_NAME} render tests report at:"
echo "https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/${REPORT}"
done < "${REPORT_FILES}"
# If there is no report available, tell the user that something is wrong.
if [ ! -s "${REPORT_FILES}" ]
then
echo "No skqp report available. Probably some fatal error has occured during the skqp execution."
fi
exit $SKQP_EXITCODE

View File

@@ -114,7 +114,7 @@
stage: software-renderer
rules:
- !reference [.no_scheduled_pipelines-rules, rules]
- changes: &llvmpipe_cl_files
- changes:
- .gitlab-ci.yml
- .gitlab-ci/**/*
- meson.build
@@ -130,21 +130,10 @@
- changes:
*llvmpipe_file_list
when: on_success
.llvmpipe-clover-rules:
rules:
- !reference [.llvmpipe-cl-rules, rules]
- changes:
- changes: &clover_file_list
- src/gallium/frontends/clover/**/*
when: on_success
.llvmpipe-rusticl-rules:
rules:
- !reference [.llvmpipe-cl-rules, rules]
- changes:
- src/gallium/frontends/rusticl/**/*
when: on_success
.collabora-farm-rules:
rules:
- if: '$COLLABORA_FARM == "offline" && $RUNNER_TAG =~ /^mesa-ci-x86-64-lava-/'
@@ -155,11 +144,6 @@
- if: '$IGALIA_FARM == "offline"'
when: never
.anholt-farm-rules:
rules:
- if: '$ANHOLT_FARM == "offline"'
when: never
# Skips freedreno jobs if either of the farms we use are offline.
.freedreno-farm-rules:
rules:
@@ -184,8 +168,8 @@
.freedreno-rules:
stage: freedreno
rules:
- !reference [.freedreno-common-rules, rules]
- !reference [.gl-rules, rules]
- !reference [.freedreno-common-rules, rules]
- changes: &freedreno_gl_file_list
- src/freedreno/ir2/**/*
- src/gallium/drivers/freedreno/**/*
@@ -195,8 +179,8 @@
.turnip-rules:
stage: freedreno
rules:
- !reference [.freedreno-common-rules, rules]
- !reference [.vulkan-rules, rules]
- !reference [.freedreno-common-rules, rules]
- changes:
- src/freedreno/vulkan/**/*
when: on_success
@@ -211,7 +195,7 @@
stage: freedreno
rules:
# If the triggerer has access to the restricted traces and if it is pre-merge
- if: '($GITLAB_USER_LOGIN !~ "/^(robclark|anholt|flto|cwabbott0|Danil|tomeu|okias|gallo)$/") &&
- if: '($GITLAB_USER_LOGIN !~ "/^(robclark|anholt|flto|cwabbott0|Danil|tomeu|okias)$/") &&
($GITLAB_USER_LOGIN != "marge-bot" || $CI_COMMIT_BRANCH)'
when: never
- !reference [.freedreno-rules, rules]
@@ -250,7 +234,6 @@
.nouveau-rules:
stage: nouveau
rules:
- !reference [.anholt-farm-rules, rules]
- !reference [.gl-rules, rules]
- changes:
- src/nouveau/**/*
@@ -298,27 +281,13 @@
- src/panfrost/bifrost/**/*
when: on_success
.broadcom-common-rules:
rules:
- changes:
- src/broadcom/meson.build
- src/broadcom/ci/**/*
- src/broadcom/cle/**/*
- src/broadcom/clif/**/*
- src/broadcom/common/**/*
- src/broadcom/compiler/**/*
- src/broadcom/drm-shim/**/*
- src/broadcom/qpu/**/*
- src/broadcom/simulator/**/*
when: on_success
.vc4-rules:
stage: broadcom
rules:
- !reference [.igalia-farm-rules, rules]
- !reference [.gl-rules, rules]
- !reference [.broadcom-common-rules, rules]
- changes:
- src/broadcom/**/*
- src/gallium/drivers/vc4/**/*
- src/gallium/winsys/kmsro/**/*
- src/gallium/winsys/vc4/**/*
@@ -329,8 +298,8 @@
rules:
- !reference [.igalia-farm-rules, rules]
- !reference [.gl-rules, rules]
- !reference [.broadcom-common-rules, rules]
- changes:
- src/broadcom/**/*
- src/gallium/drivers/v3d/**/*
- src/gallium/winsys/kmsro/**/*
- src/gallium/winsys/v3d/**/*
@@ -394,15 +363,6 @@
*virgl_file_list
when: manual
.venus-rules:
stage: layered-backends
rules:
- !reference [.lavapipe-rules, rules]
- changes: &venus_file_list
- src/virtio/**/*
when: on_success
- when: never
.radeonsi-rules:
stage: amd
rules:
@@ -414,24 +374,11 @@
- src/gallium/winsys/amdgpu/**/*
- src/amd/*
- src/amd/addrlib/**/*
- src/amd/ci/*
- src/amd/common/**/*
- src/amd/llvm/**/*
- src/amd/registers/**/*
when: on_success
.radeonsi+radv-rules:
stage: amd
rules:
- !reference [.collabora-farm-rules, rules]
- !reference [.gl-rules, rules]
- changes:
*radeonsi_file_list
when: on_success
- changes:
*radv_file_list
when: on_success
.radeonsi-vaapi-rules:
stage: amd
rules:
@@ -521,20 +468,20 @@
.zink-lvp-rules:
stage: layered-backends
rules:
- !reference [.lavapipe-rules, rules]
- !reference [.zink-common-rules, rules]
- !reference [.lavapipe-rules, rules]
.zink-anv-rules:
stage: layered-backends
rules:
- !reference [.anv-rules, rules]
- !reference [.zink-common-rules, rules]
- !reference [.anv-rules, rules]
.zink-turnip-rules:
stage: layered-backends
rules:
- !reference [.turnip-rules, rules]
- !reference [.zink-common-rules, rules]
- !reference [.turnip-rules, rules]
# Unfortunately YAML doesn't let us concatenate arrays, so we have to do the
# rules duplication manually
@@ -559,8 +506,6 @@
when: on_success
- changes:
- src/microsoft/**/*
- src/gallium/frontends/va/*
- src/gallium/targets/va/*
when: on_success
- changes:
*radv_file_list
@@ -655,12 +600,3 @@
- changes:
*lavapipe_file_list
when: on_success
# Rules for linters
.lint-rustfmt-rules:
rules:
- !reference [.no_scheduled_pipelines-rules, rules]
- !reference [.core-rules, rules]
- changes:
- src/**/*.rs
when: on_success

View File

@@ -17,18 +17,6 @@
paths:
- results/
rustfmt:
# Cancel job if a newer commit is pushed to the same branch
interruptible: true
stage: lint
extends:
- .use-debian/x86_build
- .lint-rustfmt-rules
variables:
GIT_STRATEGY: fetch
script:
- git ls-files */{lib,app}.rs | xargs rustfmt --check
.test-gl:
extends:
- .test
@@ -51,6 +39,7 @@ rustfmt:
- .use-debian/x86_test-gl
needs:
- debian/x86_test-gl
- debian-clover-testing
.vkd3d-proton-test:
artifacts:
@@ -77,21 +66,21 @@ rustfmt:
.piglit-traces-test:
extends:
- .piglit-test
cache:
key: ${CI_JOB_NAME}
paths:
- replayer-db/
artifacts:
when: on_failure
name: "mesa_${CI_JOB_NAME}"
reports:
junit: results/junit.xml
paths:
- results/
exclude:
- results/*.shader_cache
- results/summary/
- results/*.txt
variables:
PIGLIT_REPLAY_EXTRA_ARGS: --keep-image --db-path ${CI_PROJECT_DIR}/replayer-db/ --minio_bucket=mesa-tracie-public --jwt-file=${CI_JOB_JWT_FILE}
PIGLIT_REPLAY_EXTRA_ARGS: --keep-image --db-path ${CI_PROJECT_DIR}/replayer-db/ --minio_host=minio-packet.freedesktop.org --minio_bucket=mesa-tracie-public --role-session-name=${CI_PROJECT_PATH}:${CI_JOB_ID} --jwt-file=${CI_JOB_JWT_FILE}
script:
- echo -e "\e[0Ksection_start:$(date +%s):variables[collapsed=true]\r\e[0KVariables passed through:"
- install/common/generate-env.sh
- echo -e "\e[0Ksection_end:$(date +%s):variables\r\e[0K"
- install/piglit/piglit-traces.sh
.deqp-test:
@@ -135,7 +124,7 @@ rustfmt:
# improve it even more (see https://docs.mesa3d.org/ci/bare-metal.html for
# setup).
- echo -e "\e[0Ksection_start:$(date +%s):artifacts_download[collapsed=true]\r\e[0KDownloading artifacts from minio"
- wget ${FDO_HTTP_CACHE_URI:-}https://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}.tar.zst -S --progress=dot:giga -O- | tar --zstd -x
- wget ${FDO_HTTP_CACHE_URI:-}https://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}.tar.gz -S --progress=dot:giga -O- | tar -xz
- echo -e "\e[0Ksection_end:$(date +%s):artifacts_download\r\e[0K"
artifacts:
when: always
@@ -192,6 +181,11 @@ rustfmt:
HWCI_TEST_SCRIPT: "/install/deqp-runner.sh"
FDO_CI_CONCURRENT: 0 # Default to number of CPUs
.baremetal-skqp-test:
variables:
HWCI_START_XORG: 1
HWCI_TEST_SCRIPT: "/install/skqp-runner.sh"
# For Valve's bare-metal testing farm jobs.
.b2c-test:
# It would be nice to use ci-templates within Mesa CI for this job's
@@ -211,7 +205,7 @@ rustfmt:
GIT_STRATEGY: none
# boot2container initrd configuration parameters.
B2C_KERNEL_URL: 'https://gitlab.freedesktop.org/mupuf/valve-infra/-/package_files/144/download' # 5.17.1
B2C_INITRAMFS_URL: 'https://gitlab.freedesktop.org/mupuf/boot2container/-/releases/v0.9.8/downloads/initramfs.linux_amd64.cpio.xz'
B2C_INITRAMFS_URL: 'https://gitlab.freedesktop.org/mupuf/boot2container/-/releases/v0.9.6/downloads/initramfs.linux_amd64.cpio.xz'
B2C_JOB_SUCCESS_REGEX: '\[.*\]: Execution is over, pipeline status: 0\r$'
B2C_JOB_WARN_REGEX: '\*ERROR\* ring .* timeout, but soft recovered'
B2C_LOG_LEVEL: 6

View File

@@ -48,7 +48,7 @@ sleep 1
# when asked to load PE executables.
# TODO: Have boot2container mount this filesystem for all jobs?
mount -t binfmt_misc none /proc/sys/fs/binfmt_misc
echo ':DOSWin:M::MZ::/usr/bin/wine64:' > /proc/sys/fs/binfmt_misc/register
echo ':DOSWin:M::MZ::/usr/bin/wine:' > /proc/sys/fs/binfmt_misc/register
# Set environment for DXVK.
export DXVK_LOG_LEVEL="info"
@@ -68,7 +68,7 @@ if [ ${TEST_START_XORG:-0} -eq 1 ]; then
export DISPLAY=:0
fi
wine64 --version
wine --version
SANITY_MESA_VERSION_CMD="$SANITY_MESA_VERSION_CMD | tee /tmp/version.txt | grep \"Mesa $MESA_VERSION\(\s\|$\)\""

View File

@@ -38,8 +38,8 @@ Push-Location $builddir
meson `
--default-library=shared `
-Dzlib:default_library=static `
--buildtype=release `
--wrap-mode=nodownload `
-Db_ndebug=false `
-Db_vscrt=mt `
--cmake-prefix-path="$depsInstallPath" `
@@ -49,22 +49,18 @@ meson `
-Dshared-llvm=disabled `
-Dvulkan-drivers="swrast,amd,microsoft-experimental" `
-Dgallium-drivers="swrast,d3d12,zink" `
-Dgallium-va=true `
-Dvideo-codecs="h264dec,h264enc,h265dec,h265enc,vc1dec" `
-Dshared-glapi=enabled `
-Dgles1=enabled `
-Dgles2=enabled `
-Dgallium-opencl=icd `
-Dgallium-rusticl=false `
-Dopencl-spirv=true `
-Dmicrosoft-clc=enabled `
-Dstatic-libclc=all `
-Dspirv-to-dxil=true `
-Dbuild-tests=true `
-Dwerror=true `
-Dwarning_level=2 `
-Dzlib:warning_level=1 `
-Dlibelf:warning_level=1 `
$sourcedir && `
meson install && `
meson install --skip-subprojects && `
meson test --num-processes 32 --print-errorlogs
$buildstatus = $?

View File

@@ -8,82 +8,6 @@ $MyPath = $MyInvocation.MyCommand.Path | Split-Path -Parent
Remove-Item -Recurse -Force -ErrorAction SilentlyContinue "deps" | Out-Null
$depsInstallPath="C:\mesa-deps"
Get-Date
Write-Host "Cloning DirectX-Headers"
git clone -b v1.606.4 --depth=1 https://github.com/microsoft/DirectX-Headers deps/DirectX-Headers
if (!$?) {
Write-Host "Failed to clone DirectX-Headers repository"
Exit 1
}
Write-Host "Building DirectX-Headers"
$dxheaders_build = New-Item -ItemType Directory -Path ".\deps\DirectX-Headers" -Name "build"
Push-Location -Path $dxheaders_build.FullName
meson .. --backend=ninja -Dprefix="$depsInstallPath" --buildtype=release -Db_vscrt=mt && `
ninja -j32 install
$buildstatus = $?
Pop-Location
Remove-Item -Recurse -Force -ErrorAction SilentlyContinue -Path $dxheaders_build
if (!$buildstatus) {
Write-Host "Failed to compile DirectX-Headers"
Exit 1
}
Get-Date
Write-Host "Cloning zlib"
git clone -b v1.2.13 --depth=1 https://github.com/madler/zlib deps/zlib
if (!$?) {
Write-Host "Failed to clone zlib repository"
Exit 1
}
Write-Host "Downloading zlib meson build files"
Invoke-WebRequest -Uri "https://wrapdb.mesonbuild.com/v2/zlib_1.2.13-1/get_patch" -OutFile deps/zlib.zip
Expand-Archive -Path deps/zlib.zip -Destination deps/zlib
# Wrap archive puts build files in a version subdir
Move-Item deps/zlib/zlib-1.2.13/* deps/zlib
$zlib_build = New-Item -ItemType Directory -Path ".\deps\zlib" -Name "build"
Push-Location -Path $zlib_build.FullName
meson .. --backend=ninja -Dprefix="$depsInstallPath" --default-library=static --buildtype=release -Db_vscrt=mt && `
ninja -j32 install
$buildstatus = $?
Pop-Location
Remove-Item -Recurse -Force -ErrorAction SilentlyContinue -Path $zlib_build
if (!$buildstatus) {
Write-Host "Failed to compile zlib"
Exit 1
}
Get-Date
Write-Host "Cloning libva"
git clone https://github.com/intel/libva.git deps/libva
if (!$?) {
Write-Host "Failed to clone libva repository"
Exit 1
}
Push-Location -Path ".\deps\libva"
Write-Host "Checking out libva commit 2579eb0f77897dc01a02c1e43defc63c40fd2988"
# Checking out commit hash with libva-win32 support
# This feature will be released with libva version 2.17
git checkout 2579eb0f77897dc01a02c1e43defc63c40fd2988
Pop-Location
Write-Host "Building libva"
# libva already has a build dir in their repo, use builddir instead
$libva_build = New-Item -ItemType Directory -Path ".\deps\libva" -Name "builddir"
Push-Location -Path $libva_build.FullName
meson .. -Dprefix="$depsInstallPath"
ninja -j32 install
$buildstatus = $?
Pop-Location
Remove-Item -Recurse -Force -ErrorAction SilentlyContinue -Path $libva_build
if (!$buildstatus) {
Write-Host "Failed to compile libva"
Exit 1
}
Get-Date
Write-Host "Cloning LLVM release/12.x"
git clone -b release/12.x --depth=1 https://github.com/llvm/llvm-project deps/llvm-project
@@ -106,6 +30,8 @@ Push-Location deps/llvm-project/llvm/projects/SPIRV-LLVM-Translator
git checkout 5b641633b3bcc3251a52260eee11db13a79d7258
Pop-Location
$depsInstallPath="C:\mesa-deps"
Get-Date
# slightly convoluted syntax but avoids the CWD being under the PS filesystem meta-path
$llvm_build = New-Item -ItemType Directory -ErrorAction SilentlyContinue -Force -Path ".\deps\llvm-project" -Name "build"

View File

@@ -71,25 +71,3 @@ if (!$?) {
Exit 1
}
Remove-Item C:\vulkan-runtime.exe -Force
Get-Date
Write-Host "Installing graphics tools (DirectX debug layer)"
Set-Service -Name wuauserv -StartupType Manual
if (!$?) {
Write-Host "Failed to enable Windows Update"
Exit 1
}
For ($i = 0; $i -lt 5; $i++) {
Dism /online /quiet /add-capability /capabilityname:Tools.Graphics.DirectX~~~~0.0.1.0
$graphics_tools_installed = $?
if ($graphics_tools_installed) {
Break
}
}
if (!$graphics_tools_installed) {
Write-Host "Failed to install graphics tools"
Get-Content C:\Windows\Logs\DISM\dism.log
Exit 1
}

View File

@@ -79,7 +79,7 @@ Pop-Location
Get-Date
Write-Host "Cloning Vulkan and GL Conformance Tests"
$deqp_source = "C:\src\VK-GL-CTS\"
git clone --no-progress --single-branch https://github.com/KhronosGroup/VK-GL-CTS.git -b vulkan-cts-1.3.4 $deqp_source
git clone --no-progress --single-branch https://github.com/lfrb/VK-GL-CTS.git -b windows-flush $deqp_source
if (!$?) {
Write-Host "Failed to clone deqp repository"
Exit 1
@@ -115,10 +115,10 @@ Copy-Item -Path "$($deqp_source)\doc\testlog-stylesheet\testlog.xsl" -Destinatio
# Copy Vulkan must-pass list
$deqp_mustpass = New-Item -ItemType Directory -Path $deqp_build -Name "mustpass"
$root_mustpass = Join-Path -Path $deqp_source -ChildPath "external\vulkancts\mustpass\main"
$root_mustpass = Join-Path -Path $deqp_source -ChildPath "external\vulkancts\mustpass\master"
$files = Get-Content "$($root_mustpass)\vk-default.txt"
foreach($file in $files) {
Get-Content "$($root_mustpass)\$($file)" | Add-Content -Path "$($deqp_mustpass)\vk-main.txt"
Get-Content "$($root_mustpass)\$($file)" | Add-Content -Path "$($deqp_mustpass)\vk-master.txt"
}
Remove-Item -Force -Recurse $deqp_source

View File

@@ -27,7 +27,6 @@ Start-Process -NoNewWindow -Wait -FilePath C:\vs_buildtools.exe `
"--add", "Microsoft.VisualStudio.Component.VC.ATL", `
"--add", "Microsoft.VisualStudio.Component.VC.ATLMFC", `
"--add", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", `
"--add", "Microsoft.VisualStudio.Component.VC.Llvm.Clang", `
"--add", "Microsoft.VisualStudio.Component.Graphics.Tools", `
"--add", "Microsoft.VisualStudio.Component.Windows10SDK.20348"

View File

@@ -1,2 +0,0 @@
schema: 'schema.graphql'
documents: 'src/**/*.{graphql,js,ts,jsx,tsx}'

View File

@@ -161,7 +161,7 @@ Colin McDonald <cjmmail10-bz@yahoo.co.uk> <cjmcdonald@qinetiq.com>
Connor Abbott <cwabbott0@gmail.com> <connor.w.abbott@intel.com>
Connor Abbott <cwabbott0@gmail.com> <connor.abbott@intel.com>
Konstantin Kharlamov <Hi-Angel@yandex.ru>
Constantine Kharlamov <Hi-Angel@yandex.ru>
Corbin Simpson <MostAwesomeDude@gmail.com> <mostawesomed...@gmail.com>
Corbin Simpson <MostAwesomeDude@gmail.com> <mostawesomedude@gmail.com>

File diff suppressed because it is too large Load Diff

View File

@@ -123,8 +123,8 @@ meson.build @dbaker @eric
/src/gallium/drivers/freedreno/ @robclark
# Imagination
/include/drm-uapi/pvr_drm.h @CreativeCylon @frankbinns
/src/imagination/ @CreativeCylon @frankbinns
/include/drm-uapi/pvr_drm.h @CreativeCylon @frankbinns @rajnesh-kanwal
/src/imagination/ @CreativeCylon @frankbinns @rajnesh-kanwal
/src/imagination/rogue/ @simon-perretta-img
# Intel

View File

@@ -1 +1 @@
22.3.1
22.2.0-rc1

View File

@@ -30,7 +30,6 @@ LIBDRM_VERSION = $(shell cat external/libdrm/meson.build | grep -o "\<version\>\
MESA_VK_LIB_SUFFIX_amd := radeon
MESA_VK_LIB_SUFFIX_intel := intel
MESA_VK_LIB_SUFFIX_intel_hasvk := intel_hasvk
MESA_VK_LIB_SUFFIX_freedreno := freedreno
MESA_VK_LIB_SUFFIX_broadcom := broadcom
MESA_VK_LIB_SUFFIX_panfrost := panfrost
@@ -101,12 +100,6 @@ endif
__MY_SHARED_LIBRARIES := $(LOCAL_SHARED_LIBRARIES)
ifeq ($(shell test $(PLATFORM_SDK_VERSION) -ge 30; echo $$?), 0)
MESA_LIBGBM_NAME := libgbm_mesa
else
MESA_LIBGBM_NAME := libgbm
endif
ifeq ($(TARGET_IS_64_BIT),true)
LOCAL_MULTILIB := 64
else
@@ -177,7 +170,7 @@ $(foreach driver,$(BOARD_MESA3D_VULKAN_DRIVERS), \
ifneq ($(filter true, $(BOARD_MESA3D_BUILD_LIBGBM)),)
# Modules 'libgbm', produces '/vendor/lib{64}/libgbm.so'
$(eval $(call mesa3d-lib,$(MESA_LIBGBM_NAME),.so.1,,MESA3D_LIBGBM_BIN,$(MESA3D_TOP)/src/gbm/main))
$(eval $(call mesa3d-lib,libgbm,.so.1,,MESA3D_LIBGBM_BIN,$(MESA3D_TOP)/src/gbm/main))
endif
#-------------------------------------------------------------------------------

View File

@@ -69,7 +69,7 @@ $(M_TARGET_PREFIX)MESA3D_LIBEGL_BIN := $(MESON_OUT_DIR)/install/usr/local/l
$(M_TARGET_PREFIX)MESA3D_LIBGLESV1_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/libGLESv1_CM.so.1.1.0
$(M_TARGET_PREFIX)MESA3D_LIBGLESV2_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/libGLESv2.so.2.0.0
$(M_TARGET_PREFIX)MESA3D_LIBGLAPI_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/libglapi.so.0.0.0
$(M_TARGET_PREFIX)MESA3D_LIBGBM_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/$(MESA_LIBGBM_NAME).so.1.0.0
$(M_TARGET_PREFIX)MESA3D_LIBGBM_BIN := $(MESON_OUT_DIR)/install/usr/local/lib/libgbm.so.1.0.0
MESA3D_GLES_BINS := \
@@ -85,12 +85,12 @@ MESON_GEN_NINJA := \
-Ddri-search-path=/vendor/$(MESA3D_LIB_DIR)/dri \
-Dplatforms=android \
-Dplatform-sdk-version=$(PLATFORM_SDK_VERSION) \
-Ddri-drivers= \
-Dgallium-drivers=$(subst $(space),$(comma),$(BOARD_MESA3D_GALLIUM_DRIVERS)) \
-Dvulkan-drivers=$(subst $(space),$(comma),$(subst radeon,amd,$(BOARD_MESA3D_VULKAN_DRIVERS))) \
-Dgbm=enabled \
-Degl=enabled \
-Dcpp_rtti=false \
-Dlmsensors=disabled \
MESON_BUILD := PATH=/usr/bin:/bin:/sbin:$$PATH ninja -C $(MESON_OUT_DIR)/build
@@ -148,7 +148,6 @@ $(MESON_GEN_FILES_TARGET): PRIVATE_TARGET_CRTEND_SO_O := $(my_target_crtend_so_o
##
define m-lld-flags
-Wl,-e,main \
-nostdlib -Wl,--gc-sections \
$(PRIVATE_TARGET_CRTBEGIN_SO_O) \
$(PRIVATE_ALL_OBJECTS) \
@@ -169,14 +168,13 @@ define m-lld-flags
endef
define m-lld-flags-cleaned
$(patsubst -Wl$(comma)--build-id=%,, \
$(subst prebuilts/,$(AOSP_ABSOLUTE_PATH)/prebuilts/, \
$(subst $(OUT_DIR)/,$(call relative-to-absolute,$(OUT_DIR))/, \
$(subst -Wl$(comma)--fatal-warnings,, \
$(subst -Wl$(comma)--no-undefined-version,, \
$(subst -Wl$(comma)--gc-sections,, \
$(patsubst %dummy.o,, \
$(m-lld-flags))))))))
$(m-lld-flags)))))))
endef
define m-cpp-flags
@@ -202,7 +200,7 @@ define m-c-flags
endef
define filter-c-flags
$(filter-out -std=gnu++17 -std=gnu++14 -std=gnu99 -fno-rtti, \
$(filter-out -std=gnu++17 -std=gnu99 -fno-rtti, \
$(patsubst -W%,, $1))
endef

View File

@@ -62,7 +62,7 @@ def branch_has_backport_of_commit(upstream: str, branch: str, commit: str) -> st
or an empty string if is hasn't
"""
out = subprocess.check_output(['git', 'log', '--format=%H',
upstream + '..' + upstream + '/' + branch,
branch + '-branchpoint..' + upstream + '/' + branch,
'--grep', 'cherry picked from commit ' + commit],
stderr=subprocess.DEVNULL)
return out.decode().strip()
@@ -89,7 +89,7 @@ def validate_branch(branch: str) -> str:
out = subprocess.check_output(['git', 'remote', '--verbose'],
stderr=subprocess.DEVNULL)
remotes = out.decode().splitlines()
upstream, _ = branch.split('/', 1)
(upstream, _) = branch.split('/')
valid_remote = False
for line in remotes:
if line.startswith(upstream + '\t'):
@@ -125,7 +125,7 @@ if __name__ == "__main__":
help='colorize output (default: true if stdout is a terminal)')
args = parser.parse_args()
upstream, branch = args.branch.split('/', 1)
(upstream, branch) = args.branch.split('/')
if branch_has_commit(upstream, branch, args.commit):
print_(args, True, 'Commit ' + args.commit + ' is in branch ' + branch)

View File

@@ -46,7 +46,6 @@ def test_canonicalize_commit(commit: str, expected: bool) -> None:
'commit, expected',
[
(get_upstream() + '/20.1', True),
(get_upstream() + '/staging/20.1', True),
(get_upstream() + '/main', True),
('20.1', False),
('main', False),
@@ -74,7 +73,6 @@ def test_validate_branch(commit: str, expected: bool) -> None:
('20.1-branchpoint', True),
('20.1', False),
(get_upstream() + '/20.1', True),
(get_upstream() + '/staging/20.1', True),
('e58a10af640ba58b6001f5c5ad750b782547da76', True),
('d043d24654c851f0be57dbbf48274b5373dea42b', True),
('dd2bd68fa69124c86cd008b256d06f44fab8e6cd', True),
@@ -93,7 +91,6 @@ def test_is_commit_valid(commit: str, expected: bool) -> None:
('20.1', 'main', False),
('20.1', 'e58a10af640ba58b6001f5c5ad750b782547da76', True),
('20.1', 'd043d24654c851f0be57dbbf48274b5373dea42b', True),
('staging/20.1', 'd043d24654c851f0be57dbbf48274b5373dea42b', True),
('20.1', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', False),
('main', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', True),
('20.0', 'd043d24654c851f0be57dbbf48274b5373dea42b', False),
@@ -107,7 +104,6 @@ def test_branch_has_commit(branch: str, commit: str, expected: bool) -> None:
'branch, commit, expected',
[
('20.1', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', 'd043d24654c851f0be57dbbf48274b5373dea42b'),
('staging/20.1', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', 'd043d24654c851f0be57dbbf48274b5373dea42b'),
('20.1', '20.1-branchpoint', ''),
('20.1', '20.0', ''),
('20.1', '20.2', ''),

View File

@@ -40,7 +40,7 @@ import docutils.utils
import docutils.parsers.rst.states as states
CURRENT_GL_VERSION = '4.6'
CURRENT_VK_VERSION = '1.3'
CURRENT_VK_VERSION = '1.2'
TEMPLATE = Template(textwrap.dedent("""\
${header}

View File

@@ -35,7 +35,7 @@ For gcc/x86,gcc/arm
`stw_get_device` => `stw_get_device`
"""
def gen_vs_module_def(in_file: str, out_file: str, compiler_abi: str, compiler_id: str, cpu_family: str) -> None:
def gen_vs_module_def(in_file: str, out_file: str, compiler_id: str, cpu_family: str) -> None:
out_file_lines = ['EXPORTS']
with open(in_file, 'r', encoding='utf-8') as f:
lines = f.readlines()
@@ -57,7 +57,7 @@ def gen_vs_module_def(in_file: str, out_file: str, compiler_abi: str, compiler_i
continue
order_info = '' if len(def_infos) <= 1 else def_infos[1]
if def_infos[0] != name_infos[0] and \
(compiler_abi == 'gcc' and compiler_id != 'clang') and (cpu_family not in {'x86_64', 'aarch64'}):
(compiler_id == 'gcc') and (cpu_family not in {'x86_64', 'aarch64'}):
if order_info:
out_file_lines.append('\t' + def_infos[0] + ' ' + order_info + ' == ' + name_infos[0])
else:
@@ -71,29 +71,28 @@ def gen_vs_module_def(in_file: str, out_file: str, compiler_abi: str, compiler_i
out_file_content = '\n'.join(out_file_lines) + '\n'
f.write(out_file_content.encode('utf-8'))
'''
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/libgl-gdi/opengl32.def.in --out_file src/gallium/targets/libgl-gdi/opengl32.def --compiler_abi gcc --cpu_family x86_64
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/libgl-gdi/opengl32.def.in --out_file src/gallium/targets/libgl-gdi/opengl32.mingw.def --compiler_abi gcc --cpu_family x86
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/libgl-gdi/opengl32.def.in --out_file src/gallium/targets/libgl-gdi/opengl32.def --compiler_id gcc --cpu_family x86_64
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/libgl-gdi/opengl32.def.in --out_file src/gallium/targets/libgl-gdi/opengl32.mingw.def --compiler_id gcc --cpu_family x86
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/osmesa/osmesa.def.in --out_file src/gallium/targets/osmesa/osmesa.def --compiler_abi gcc --cpu_family x86_64
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/osmesa/osmesa.def.in --out_file src/gallium/targets/osmesa/osmesa.mingw.def --compiler_abi gcc --cpu_family x86
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/osmesa/osmesa.def.in --out_file src/gallium/targets/osmesa/osmesa.def --compiler_id gcc --cpu_family x86_64
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/osmesa/osmesa.def.in --out_file src/gallium/targets/osmesa/osmesa.mingw.def --compiler_id gcc --cpu_family x86
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/wgl/gallium_wgl.def.in --out_file src/gallium/targets/wgl/gallium_wgl.def --compiler_abi gcc --cpu_family x86_64
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/wgl/gallium_wgl.def.in --out_file src/gallium/targets/wgl/gallium_wgl.mingw.def --compiler_abi gcc --cpu_family x86
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/wgl/gallium_wgl.def.in --out_file src/gallium/targets/wgl/gallium_wgl.def --compiler_id gcc --cpu_family x86_64
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/wgl/gallium_wgl.def.in --out_file src/gallium/targets/wgl/gallium_wgl.mingw.def --compiler_id gcc --cpu_family x86
python ./bin/gen_vs_module_defs.py --in_file src/egl/main/egl.def.in --out_file src/egl/main/egl.def --compiler_abi gcc --cpu_family x86_64
python ./bin/gen_vs_module_defs.py --in_file src/egl/main/egl.def.in --out_file src/egl/main/egl.mingw.def --compiler_abi gcc --cpu_family x86
python ./bin/gen_vs_module_defs.py --in_file src/egl/main/egl.def.in --out_file src/egl/main/egl.def --compiler_id gcc --cpu_family x86_64
python ./bin/gen_vs_module_defs.py --in_file src/egl/main/egl.def.in --out_file src/egl/main/egl.mingw.def --compiler_id gcc --cpu_family x86
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/lavapipe/vulkan_lvp.def.in --out_file src/gallium/targets/lavapipe/vulkan_lvp.def --compiler_abi gcc --cpu_family x86_64
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/lavapipe/vulkan_lvp.def.in --out_file src/gallium/targets/lavapipe/vulkan_lvp.mingw.def --compiler_abi gcc --cpu_family x86
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/lavapipe/vulkan_lvp.def.in --out_file src/gallium/targets/lavapipe/vulkan_lvp.def --compiler_id gcc --cpu_family x86_64
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/lavapipe/vulkan_lvp.def.in --out_file src/gallium/targets/lavapipe/vulkan_lvp.mingw.def --compiler_id gcc --cpu_family x86
'''
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=gen_help)
parser.add_argument('--in_file', help='input template moudle definition file')
parser.add_argument('--out_file', help='output moudle definition file')
parser.add_argument('--compiler_abi', help='compiler abi')
parser.add_argument('--compiler_id', help='compiler id')
parser.add_argument('--cpu_family', help='cpu family')
args = parser.parse_args()
# print(args)
gen_vs_module_def(args.in_file, args.out_file, args.compiler_abi, args.compiler_id, args.cpu_family)
gen_vs_module_def(args.in_file, args.out_file, args.compiler_id, args.cpu_family)

View File

@@ -59,7 +59,8 @@ SOURCES = [
Source('include/EGL/egl.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/main/api/EGL/egl.h'),
Source('include/EGL/eglplatform.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/main/api/EGL/eglplatform.h'),
Source('include/EGL/eglext.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/main/api/EGL/eglext.h'),
Source('include/EGL/eglext_angle.h', 'https://chromium.googlesource.com/angle/angle/+/refs/heads/main/include/EGL/eglext_angle.h?format=TEXT'),
Source('include/EGL/eglextchromium.h', 'https://chromium.googlesource.com/chromium/src/+/refs/heads/master/ui/gl/EGL/eglextchromium.h?format=TEXT'),
Source('include/EGL/eglext_angle.h', 'https://chromium.googlesource.com/angle/angle/+/refs/heads/master/include/EGL/eglext_angle.h?format=TEXT'),
Source('include/EGL/eglmesaext.h', None),
],
},

View File

@@ -20,11 +20,5 @@
git_sha1_gen_py = files('git_sha1_gen.py')
gen_vs_module_defs_py = files('gen_vs_module_defs.py')
gen_vs_module_defs_normal_command = [
prog_python, gen_vs_module_defs_py,
'--in_file', '@INPUT@', '--out_file', '@OUTPUT@',
'--compiler_abi', cc.get_argument_syntax(),
'--compiler_id', cc.get_id(), '--cpu_family', host_machine.cpu_family()
]
symbols_check = find_program('symbols-check.py')
install_megadrivers_py = find_program('install_megadrivers.py')

Some files were not shown because too many files have changed in this diff Show More