Compare commits
161 Commits
mesa-22.0.
...
mesa-21.1.
Author | SHA1 | Date | |
---|---|---|---|
|
19ed21fba9 | ||
|
df37cbfab6 | ||
|
3707ffe7bc | ||
|
8f8ce535ef | ||
|
31836d7f11 | ||
|
b0f5108c84 | ||
|
010f9027f2 | ||
|
4255942654 | ||
|
ccef1f34ae | ||
|
42b5f56583 | ||
|
5f48cd7041 | ||
|
37c88be5ea | ||
|
7c2ee1d626 | ||
|
be5cc7cdd9 | ||
|
d6c5525526 | ||
|
c269aa387f | ||
|
3f78ccb7ba | ||
|
f53f73b8df | ||
|
dd20621ac3 | ||
|
4c0b3b5d9e | ||
|
baaf7cfb82 | ||
|
f5ce440e1b | ||
|
563c41a44a | ||
|
e633a4a3a8 | ||
|
4496db61e9 | ||
|
df0202feaf | ||
|
2c8eba7188 | ||
|
5ddac7c5e7 | ||
|
a3237b0dd3 | ||
|
d0fa20ba17 | ||
|
5440acb8fc | ||
|
0d5e73d205 | ||
|
6b1197e728 | ||
|
d501b039cc | ||
|
5eb41d49a6 | ||
|
fb4d89a85e | ||
|
dff02e24c4 | ||
|
7ba683d490 | ||
|
c575dae37a | ||
|
734c940bb4 | ||
|
a840d01c57 | ||
|
0a1b6f7cc4 | ||
|
269e8b5ff5 | ||
|
891eeea222 | ||
|
e8d30fa9ec | ||
|
5edae4604e | ||
|
5dfe95bb25 | ||
|
00f2f212b1 | ||
|
26dcb7f863 | ||
|
f999b51070 | ||
|
3521f76806 | ||
|
975ab701d7 | ||
|
695def8e07 | ||
|
2e6cf74e63 | ||
|
a0614e1135 | ||
|
71a46b9a9d | ||
|
cebc8191a8 | ||
|
db1b074110 | ||
|
b74e7421b3 | ||
|
3234ef8538 | ||
|
95994bc0b1 | ||
|
246c6ec7ab | ||
|
a693bc1b99 | ||
|
5b34892630 | ||
|
7ebbd4c654 | ||
|
4d28c2b086 | ||
|
a24924bb60 | ||
|
7a8e3ca3be | ||
|
315df0a7d7 | ||
|
ac7b6c9dc8 | ||
|
ecba845c0d | ||
|
ce04ac4e54 | ||
|
e430e991db | ||
|
ec959d85e9 | ||
|
a205d583c4 | ||
|
38b948b5dc | ||
|
e45f8434e3 | ||
|
d9711344bd | ||
|
0e3547af95 | ||
|
368b5e4561 | ||
|
09d505a3ab | ||
|
c40e1896b2 | ||
|
80be401ef8 | ||
|
9988f53d51 | ||
|
687a8bf579 | ||
|
5f748f1e11 | ||
|
d5869ccc75 | ||
|
c0e63f4abe | ||
|
a8ee300289 | ||
|
3c35559eaf | ||
|
0c5b2aa429 | ||
|
7ab251c71a | ||
|
a87b2fcfbc | ||
|
70e8b11f37 | ||
|
ef12d4ebb4 | ||
|
8a598d76ec | ||
|
1d0fc4ef05 | ||
|
35e318f2fa | ||
|
4f8cf816ac | ||
|
c835bfc52a | ||
|
3ed9152fd1 | ||
|
2c21bc476f | ||
|
de8c1ab62a | ||
|
87fe4028c3 | ||
|
46b9602c75 | ||
|
87b50c6f26 | ||
|
0ead3bc280 | ||
|
70687cdbe6 | ||
|
6a5ad328d1 | ||
|
5a9a89ae07 | ||
|
53710c4f9f | ||
|
77f6ec705a | ||
|
e43d870ac5 | ||
|
3bf3eb13cb | ||
|
7359a3cc1a | ||
|
ee3c6aa21a | ||
|
e5ecf4b534 | ||
|
09db311555 | ||
|
6e361edbbc | ||
|
0514031a0f | ||
|
af0926bac0 | ||
|
9d518272a6 | ||
|
54908b2307 | ||
|
5ecd135d03 | ||
|
e624fba3b4 | ||
|
4a44e69ab1 | ||
|
4a5f6581a6 | ||
|
7e8c906197 | ||
|
3a04f9ec5e | ||
|
742188d95a | ||
|
8636a1220c | ||
|
743b02f939 | ||
|
b0ac2df044 | ||
|
c8935e0399 | ||
|
269da9f426 | ||
|
a51a51b408 | ||
|
efcedfc99e | ||
|
61660ee4e0 | ||
|
7e32a89902 | ||
|
8e39721254 | ||
|
f6e8543ff7 | ||
|
d7e9722382 | ||
|
c94c36ece0 | ||
|
48f8188985 | ||
|
8a52fed6ba | ||
|
50b926566c | ||
|
b571fef9e2 | ||
|
d90e5fe88a | ||
|
da74e86cb7 | ||
|
31ada2b7cf | ||
|
e80952634b | ||
|
e3ef541f95 | ||
|
09151387c5 | ||
|
6b385bf6ad | ||
|
170e8ea824 | ||
|
b6a1e3a393 | ||
|
979ec228f2 | ||
|
3e337c947f | ||
|
8d31d16517 | ||
|
6d71157dc6 | ||
|
764cb454b0 |
@@ -36,10 +36,6 @@ indent_size = 2
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.rst]
|
||||
indent_style = space
|
||||
indent_size = 3
|
||||
|
||||
[*.patch]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
|
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -1,6 +0,0 @@
|
||||
*.csv eol=crlf
|
||||
* text=auto
|
||||
*.jpg binary
|
||||
*.png binary
|
||||
*.gif binary
|
||||
*.ico binary
|
709
.gitlab-ci.yml
709
.gitlab-ci.yml
File diff suppressed because it is too large
Load Diff
@@ -1,20 +0,0 @@
|
||||
# Note: skips lists for CI are just a list of lines that, when
|
||||
# non-zero-length and not starting with '#', will regex match to
|
||||
# delete lines from the test list. Be careful.
|
||||
|
||||
# These are tremendously slow (pushing toward a minute), and aren't
|
||||
# reliable to be run in parallel with other tests due to CPU-side timing.
|
||||
dEQP-GLES[0-9]*.functional.flush_finish.*
|
||||
|
||||
# https://gitlab.freedesktop.org/mesa/mesa/-/issues/4575
|
||||
dEQP-VK.wsi.display.get_display_plane_capabilities
|
||||
|
||||
# piglit: WGL is Windows-only
|
||||
wgl@.*
|
||||
|
||||
# These are sensitive to CPU timing, and would need to be run in isolation
|
||||
# on the system rather than in parallel with other tests.
|
||||
glx@glx_arb_sync_control@timing.*
|
||||
|
||||
# This test is not built with waffle, while we do build tests with waffle
|
||||
spec@!opengl 1.1@windowoverlap
|
@@ -1,26 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This test script groups together a bunch of fast dEQP variant runs
|
||||
# to amortize the cost of rebooting the board.
|
||||
|
||||
set -ex
|
||||
|
||||
EXIT=0
|
||||
|
||||
# Run reset tests without parallelism:
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/reset \
|
||||
FDO_CI_CONCURRENT=1 \
|
||||
DEQP_CASELIST_FILTER='.*reset.*' \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
||||
# Then run everything else with parallelism:
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/nonrobustness \
|
||||
DEQP_CASELIST_INV_FILTER='.*reset.*' \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
81
.gitlab-ci/bare-metal/arm64_a630_gles_others.sh
Executable file
81
.gitlab-ci/bare-metal/arm64_a630_gles_others.sh
Executable file
@@ -0,0 +1,81 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This test script groups together a bunch of fast dEQP variant runs
|
||||
# to amortize the cost of rebooting the board.
|
||||
|
||||
set -ex
|
||||
|
||||
EXIT=0
|
||||
|
||||
# Test rendering with the gmem path forced when possible (~1 minute)
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/gmem \
|
||||
DEQP_VER=gles31 \
|
||||
DEQP_FRACTION=5 \
|
||||
FD_MESA_DEBUG=nobypass \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
||||
# Test rendering with the bypass path forced when possible (~1 minute)
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/bypass \
|
||||
DEQP_VER=gles31 \
|
||||
DEQP_FRACTION=5 \
|
||||
FD_MESA_DEBUG=nogmem \
|
||||
GPU_VERSION=freedreno-a630-bypass \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
||||
# Test rendering with the UBO-to-constants optimization disabled (~1 minute)
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/nouboopt \
|
||||
DEQP_VER=gles31 \
|
||||
IR3_SHADER_DEBUG=nouboopt \
|
||||
DEQP_CASELIST_FILTER="functional.*ubo" \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
||||
# gles3-565nozs mustpass (~20s)
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/gles3-565nozs \
|
||||
DEQP_VER=gles3 \
|
||||
DEQP_CONFIG="rgb565d0s0ms0" \
|
||||
DEQP_VARIANT="565-no-depth-no-stencil" \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
||||
# gles31-565nozs mustpass (~1s)
|
||||
if ! env \
|
||||
DEQP_RESULTS_DIR=results/gles31-565nozs \
|
||||
DEQP_VER=gles31 \
|
||||
DEQP_CONFIG="rgb565d0s0ms0" \
|
||||
DEQP_VARIANT="565-no-depth-no-stencil" \
|
||||
/install/deqp-runner.sh; then
|
||||
EXIT=1
|
||||
fi
|
||||
|
||||
# gles3-multisample mustpass -- disabled pending https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/1859
|
||||
# if ! env \
|
||||
# DEQP_RESULTS_DIR=results/gles3-multisample \
|
||||
# DEQP_VER=gles3 \
|
||||
# DEQP_CONFIG="rgba8888d24s8ms4" \
|
||||
# DEQP_VARIANT="multisample" \
|
||||
# /install/deqp-runner.sh; then
|
||||
# EXIT=1
|
||||
# fi
|
||||
|
||||
# gles31-multisample mustpass -- disabled pending https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/1859
|
||||
# if ! env \
|
||||
# DEQP_RESULTS_DIR=results/gles31-multisample \
|
||||
# DEQP_VER=gles31 \
|
||||
# DEQP_CONFIG="rgba8888d24s8ms4" \
|
||||
# DEQP_VARIANT="multisample" \
|
||||
# /install/deqp-runner.sh; then
|
||||
# EXIT=1
|
||||
# fi
|
||||
|
||||
exit $EXIT
|
@@ -1,13 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Init entrypoint for bare-metal devices; calls common init code.
|
||||
|
||||
# First stage: very basic setup to bring up network and /dev etc
|
||||
/init-stage1.sh
|
||||
|
||||
# Second stage: run jobs
|
||||
test $? -eq 0 && /init-stage2.sh
|
||||
|
||||
# Wait until the job would have timed out anyway, so we don't spew a "init
|
||||
# exited" panic.
|
||||
sleep 6000
|
@@ -5,7 +5,6 @@
|
||||
|
||||
# We're run from the root of the repo, make a helper var for our paths
|
||||
BM=$CI_PROJECT_DIR/install/bare-metal
|
||||
CI_COMMON=$CI_PROJECT_DIR/install/common
|
||||
|
||||
# Runner config checks
|
||||
if [ -z "$BM_SERIAL" ]; then
|
||||
|
@@ -50,18 +50,12 @@ class CrosServoRun:
|
||||
target=self.iter_feed_queue, daemon=True, args=(self.cpu_ser.lines(),))
|
||||
self.iter_feed_cpu.start()
|
||||
|
||||
def close(self):
|
||||
self.ec_ser.close()
|
||||
self.cpu_ser.close()
|
||||
self.iter_feed_ec.join()
|
||||
self.iter_feed_cpu.join()
|
||||
|
||||
# Feed lines from our serial queues into the merged queue, marking when our
|
||||
# input is done.
|
||||
def iter_feed_queue(self, it):
|
||||
for i in it:
|
||||
self.serial_queue.put(i)
|
||||
self.serial_queue.put(self.sentinel)
|
||||
self.serial_queue.put(sentinel)
|
||||
|
||||
# Return the next line from the queue, counting how many threads have
|
||||
# terminated and joining when done
|
||||
@@ -132,12 +126,6 @@ class CrosServoRun:
|
||||
self.print_error("Detected cheza power management bus error, restarting run...")
|
||||
return 2
|
||||
|
||||
# If the network device dies, it's probably not graphics's fault, just try again.
|
||||
if re.search("NETDEV WATCHDOG", line):
|
||||
self.print_error(
|
||||
"Detected network device failure, restarting run...")
|
||||
return 2
|
||||
|
||||
# These HFI response errors started appearing with the introduction
|
||||
# of piglit runs. CosmicPenguin says:
|
||||
#
|
||||
@@ -151,12 +139,7 @@ class CrosServoRun:
|
||||
self.print_error("Detected cheza power management bus error, restarting run...")
|
||||
return 2
|
||||
|
||||
if re.search("coreboot.*bootblock starting", line):
|
||||
self.print_error(
|
||||
"Detected spontaneous reboot, restarting run...")
|
||||
return 2
|
||||
|
||||
result = re.search("hwci: mesa: (\S*)", line)
|
||||
result = re.search("bare-metal result: (\S*)", line)
|
||||
if result:
|
||||
if result.group(1) == "pass":
|
||||
return 0
|
||||
@@ -185,8 +168,6 @@ def main():
|
||||
# power down the CPU on the device
|
||||
servo.ec_write("power off\n")
|
||||
|
||||
servo.close()
|
||||
|
||||
sys.exit(retval)
|
||||
|
||||
|
||||
|
@@ -1,10 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
relay=$1
|
||||
|
||||
if [ -z "$relay" ]; then
|
||||
echo "Must supply a relay arg"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
$CI_PROJECT_DIR/install/bare-metal/eth008-power-relay.py $ETH_HOST $ETH_PORT off $relay
|
@@ -1,28 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import sys
|
||||
import socket
|
||||
|
||||
host = sys.argv[1]
|
||||
port = sys.argv[2]
|
||||
mode = sys.argv[3]
|
||||
relay = sys.argv[4]
|
||||
msg = None
|
||||
|
||||
if mode == "on":
|
||||
msg = b'\x20'
|
||||
else:
|
||||
msg = b'\x21'
|
||||
|
||||
msg += int(relay).to_bytes(1, 'big')
|
||||
msg += b'\x00'
|
||||
|
||||
c = socket.create_connection((host, int(port)))
|
||||
c.sendall(msg)
|
||||
|
||||
data = c.recv(1)
|
||||
c.close()
|
||||
|
||||
if data[0] == b'\x01':
|
||||
print('Command failed')
|
||||
sys.exit(1)
|
@@ -1,12 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
relay=$1
|
||||
|
||||
if [ -z "$relay" ]; then
|
||||
echo "Must supply a relay arg"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
$CI_PROJECT_DIR/install/bare-metal/eth008-power-relay.py $ETH_HOST $ETH_PORT off $relay
|
||||
sleep 5
|
||||
$CI_PROJECT_DIR/install/bare-metal/eth008-power-relay.py $ETH_HOST $ETH_PORT on $relay
|
@@ -1,7 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
BM=$CI_PROJECT_DIR/install/bare-metal
|
||||
CI_COMMON=$CI_PROJECT_DIR/install/common
|
||||
|
||||
if [ -z "$BM_SERIAL" -a -z "$BM_SERIAL_SCRIPT" ]; then
|
||||
echo "Must set BM_SERIAL OR BM_SERIAL_SCRIPT in your gitlab-runner config.toml [[runners]] environment"
|
||||
@@ -50,6 +49,15 @@ if echo $BM_CMDLINE | grep -q "root=/dev/nfs"; then
|
||||
BM_FASTBOOT_NFSROOT=1
|
||||
fi
|
||||
|
||||
if [ -z "$BM_FASTBOOT_NFSROOT" ]; then
|
||||
if [ -z "$BM_WEBDAV_IP" -o -z "$BM_WEBDAV_PORT" ]; then
|
||||
echo "BM_WEBDAV_IP and/or BM_WEBDAV_PORT is not set - no results will be uploaded from DUT!"
|
||||
WEBDAV_CMDLINE=""
|
||||
else
|
||||
WEBDAV_CMDLINE="webdav=http://$BM_WEBDAV_IP:$BM_WEBDAV_PORT"
|
||||
fi
|
||||
fi
|
||||
|
||||
set -ex
|
||||
|
||||
# Clear out any previous run's artifacts.
|
||||
@@ -89,6 +97,14 @@ else
|
||||
cpio -H newc -o | \
|
||||
xz --check=crc32 -T4 - > $CI_PROJECT_DIR/rootfs.cpio.gz
|
||||
popd
|
||||
|
||||
# Start nginx to get results from DUT
|
||||
if [ -n "$WEBDAV_CMDLINE" ]; then
|
||||
ln -s `pwd`/results /results
|
||||
sed -i s/80/$BM_WEBDAV_PORT/g /etc/nginx/sites-enabled/default
|
||||
sed -i s/www-data/root/g /etc/nginx/nginx.conf
|
||||
nginx
|
||||
fi
|
||||
fi
|
||||
|
||||
# Make the combined kernel image and dtb for passing to fastboot. For normal
|
||||
@@ -116,7 +132,7 @@ abootimg \
|
||||
--create artifacts/fastboot.img \
|
||||
-k Image.gz-dtb \
|
||||
-r rootfs.cpio.gz \
|
||||
-c cmdline="$BM_CMDLINE"
|
||||
-c cmdline="$BM_CMDLINE $WEBDAV_CMDLINE"
|
||||
rm Image.gz-dtb
|
||||
|
||||
export PATH=$BM:$PATH
|
||||
|
@@ -31,14 +31,9 @@ import threading
|
||||
class FastbootRun:
|
||||
def __init__(self, args):
|
||||
self.powerup = args.powerup
|
||||
# We would like something like a 1 minute timeout, but the piglit traces
|
||||
# jobs stall out for long periods of time.
|
||||
self.ser = SerialBuffer(args.dev, "results/serial-output.txt", "R SERIAL> ", timeout=600)
|
||||
self.ser = SerialBuffer(args.dev, "results/serial-output.txt", "R SERIAL> ")
|
||||
self.fastboot="fastboot boot -s {ser} artifacts/fastboot.img".format(ser=args.fbserial)
|
||||
|
||||
def close(self):
|
||||
self.ser.close()
|
||||
|
||||
def print_error(self, message):
|
||||
RED = '\033[0;31m'
|
||||
NO_COLOR = '\033[0m'
|
||||
@@ -60,23 +55,16 @@ class FastbootRun:
|
||||
break
|
||||
|
||||
if re.search("data abort", line):
|
||||
self.print_error("Detected crash during boot, restarting run...")
|
||||
return 2
|
||||
return 1
|
||||
|
||||
if not fastboot_ready:
|
||||
self.print_error("Failed to get to fastboot prompt, restarting run...")
|
||||
return 2
|
||||
self.print_error("Failed to get to fastboot prompt")
|
||||
return 1
|
||||
|
||||
if self.logged_system(self.fastboot) != 0:
|
||||
return 1
|
||||
|
||||
print_more_lines = -1
|
||||
for line in self.ser.lines():
|
||||
if print_more_lines == 0:
|
||||
return 2
|
||||
if print_more_lines > 0:
|
||||
print_more_lines -= 1
|
||||
|
||||
if re.search("---. end Kernel panic", line):
|
||||
return 1
|
||||
|
||||
@@ -92,33 +80,15 @@ class FastbootRun:
|
||||
"Detected kernel soft lockup, restarting run...")
|
||||
return 2
|
||||
|
||||
# If the network device dies, it's probably not graphics's fault, just try again.
|
||||
if re.search("NETDEV WATCHDOG", line):
|
||||
self.print_error(
|
||||
"Detected network device failure, restarting run...")
|
||||
return 2
|
||||
|
||||
# A3xx recovery doesn't quite work. Sometimes the GPU will get
|
||||
# wedged and recovery will fail (because power can't be reset?)
|
||||
# This assumes that the jobs are sufficiently well-tested that GPU
|
||||
# hangs aren't always triggered, so just try again. But print some
|
||||
# more lines first so that we get better information on the cause
|
||||
# of the hang. Once a hang happens, it's pretty chatty.
|
||||
if "[drm:adreno_recover] *ERROR* gpu hw init failed: -22" in line:
|
||||
self.print_error(
|
||||
"Detected GPU hang, restarting run...")
|
||||
if print_more_lines == -1:
|
||||
print_more_lines = 30
|
||||
|
||||
result = re.search("hwci: mesa: (\S*)", line)
|
||||
result = re.search("bare-metal result: (\S*)", line)
|
||||
if result:
|
||||
if result.group(1) == "pass":
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
self.print_error("Reached the end of the CPU serial log without finding a result, restarting run...")
|
||||
return 2
|
||||
self.print_error("Reached the end of the CPU serial log without finding a result")
|
||||
return 1
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
@@ -132,12 +102,9 @@ def main():
|
||||
|
||||
while True:
|
||||
retval = fastboot.run()
|
||||
fastboot.close()
|
||||
if retval != 2:
|
||||
break
|
||||
|
||||
fastboot = FastbootRun(args)
|
||||
|
||||
fastboot.logged_system(args.powerdown)
|
||||
|
||||
sys.exit(retval)
|
||||
|
69
.gitlab-ci/bare-metal/init.sh
Executable file
69
.gitlab-ci/bare-metal/init.sh
Executable file
@@ -0,0 +1,69 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
mount -t proc none /proc
|
||||
mount -t sysfs none /sys
|
||||
mount -t devtmpfs none /dev || echo possibly already mounted
|
||||
mkdir -p /dev/pts
|
||||
mount -t devpts devpts /dev/pts
|
||||
mount -t tmpfs tmpfs /tmp
|
||||
|
||||
. /set-job-env-vars.sh
|
||||
|
||||
[ -z "$BM_KERNEL_MODULES" ] || echo -n $BM_KERNEL_MODULES | xargs -d, -n1 /usr/sbin/modprobe
|
||||
|
||||
# Store Mesa's disk cache under /tmp, rather than sending it out over NFS.
|
||||
export XDG_CACHE_HOME=/tmp
|
||||
|
||||
echo "nameserver 8.8.8.8" > /etc/resolv.conf
|
||||
|
||||
# Not all DUTs have network
|
||||
sntp -sS pool.ntp.org || true
|
||||
|
||||
# Start a little daemon to capture the first devcoredump we encounter. (They
|
||||
# expire after 5 minutes, so we poll for them).
|
||||
./capture-devcoredump.sh &
|
||||
|
||||
# If we want Xorg to be running for the test, then we start it up before the
|
||||
# BARE_METAL_TEST_SCRIPT because we need to use xinit to start X (otherwise
|
||||
# without using -displayfd you can race with Xorg's startup), but xinit will eat
|
||||
# your client's return code
|
||||
if [ -n "$BM_START_XORG" ]; then
|
||||
echo "touch /xorg-started; sleep 100000" > /xorg-script
|
||||
env \
|
||||
LD_LIBRARY_PATH=/install/lib/ \
|
||||
LIBGL_DRIVERS_PATH=/install/lib/dri/ \
|
||||
xinit /bin/sh /xorg-script -- /usr/bin/Xorg -noreset -dpms -logfile /Xorg.0.log &
|
||||
|
||||
# Wait for xorg to be ready for connections.
|
||||
for i in 1 2 3 4 5; do
|
||||
if [ -e /xorg-started ]; then
|
||||
break
|
||||
fi
|
||||
sleep 5
|
||||
done
|
||||
export DISPLAY=:0
|
||||
fi
|
||||
|
||||
if sh $BARE_METAL_TEST_SCRIPT; then
|
||||
OK=1
|
||||
else
|
||||
OK=0
|
||||
fi
|
||||
|
||||
# upload artifacts via webdav
|
||||
WEBDAV=$(cat /proc/cmdline | tr " " "\n" | grep webdav | cut -d '=' -f 2 || true)
|
||||
if [ -n "$WEBDAV" ]; then
|
||||
find /results -type f -exec curl -T {} $WEBDAV/{} \;
|
||||
fi
|
||||
|
||||
if [ $OK -eq 1 ]; then
|
||||
echo "bare-metal result: pass"
|
||||
else
|
||||
echo "bare-metal result: fail"
|
||||
fi
|
||||
|
||||
# Wait until the job would have timed out anyway, so we don't spew a "init
|
||||
# exited" panic.
|
||||
sleep 6000
|
20
.gitlab-ci/bare-metal/nginx-default-site
Normal file
20
.gitlab-ci/bare-metal/nginx-default-site
Normal file
@@ -0,0 +1,20 @@
|
||||
server {
|
||||
listen 80 default_server;
|
||||
listen [::]:80 default_server;
|
||||
|
||||
server_name _;
|
||||
|
||||
location / {
|
||||
dav_methods PUT;
|
||||
dav_ext_methods PROPFIND OPTIONS;
|
||||
dav_access user:rw group:rw all:r;
|
||||
|
||||
client_body_temp_path /tmp;
|
||||
client_max_body_size 0;
|
||||
create_full_put_path on;
|
||||
|
||||
root /results;
|
||||
|
||||
autoindex on;
|
||||
}
|
||||
}
|
@@ -5,7 +5,6 @@
|
||||
|
||||
# We're run from the root of the repo, make a helper var for our paths
|
||||
BM=$CI_PROJECT_DIR/install/bare-metal
|
||||
CI_COMMON=$CI_PROJECT_DIR/install/common
|
||||
|
||||
# Runner config checks
|
||||
if [ -z "$BM_SERIAL" ]; then
|
||||
|
@@ -66,7 +66,7 @@ class PoERun:
|
||||
self.print_error("Memory overflow in the binner; GPU hang")
|
||||
return 1
|
||||
|
||||
result = re.search("hwci: mesa: (\S*)", line)
|
||||
result = re.search("bare-metal result: (\S*)", line)
|
||||
if result:
|
||||
if result.group(1) == "pass":
|
||||
return 0
|
||||
|
@@ -5,25 +5,88 @@ rootfs_dst=$1
|
||||
mkdir -p $rootfs_dst/results
|
||||
|
||||
# Set up the init script that brings up the system.
|
||||
cp $BM/bm-init.sh $rootfs_dst/init
|
||||
cp $CI_COMMON/init*.sh $rootfs_dst/
|
||||
cp $BM/init.sh $rootfs_dst/init
|
||||
|
||||
# Make JWT token available as file in the bare-metal storage to enable access
|
||||
# to MinIO
|
||||
cp "${CI_JOB_JWT_FILE}" "${rootfs_dst}${CI_JOB_JWT_FILE}"
|
||||
|
||||
cp $CI_COMMON/capture-devcoredump.sh $rootfs_dst/
|
||||
cp $BM/capture-devcoredump.sh $rootfs_dst/
|
||||
|
||||
set +x
|
||||
|
||||
# Pass through relevant env vars from the gitlab job to the baremetal init script
|
||||
"$CI_COMMON"/generate-env.sh > $rootfs_dst/set-job-env-vars.sh
|
||||
touch $rootfs_dst/set-job-env-vars.sh
|
||||
chmod +x $rootfs_dst/set-job-env-vars.sh
|
||||
for var in \
|
||||
BARE_METAL_TEST_SCRIPT \
|
||||
BM_KERNEL_MODULES \
|
||||
BM_START_XORG \
|
||||
CI_COMMIT_BRANCH \
|
||||
CI_COMMIT_TITLE \
|
||||
CI_JOB_ID \
|
||||
CI_JOB_JWT \
|
||||
CI_JOB_URL \
|
||||
CI_MERGE_REQUEST_SOURCE_BRANCH_NAME \
|
||||
CI_MERGE_REQUEST_TITLE \
|
||||
CI_NODE_INDEX \
|
||||
CI_NODE_TOTAL \
|
||||
CI_PAGES_DOMAIN \
|
||||
CI_PIPELINE_ID \
|
||||
CI_PROJECT_NAME \
|
||||
CI_PROJECT_PATH \
|
||||
CI_PROJECT_ROOT_NAMESPACE \
|
||||
CI_RUNNER_DESCRIPTION \
|
||||
CI_SERVER_URL \
|
||||
DEQP_CASELIST_FILTER \
|
||||
DEQP_CONFIG \
|
||||
DEQP_EXPECTED_RENDERER \
|
||||
DEQP_FRACTION \
|
||||
DEQP_HEIGHT \
|
||||
DEQP_NO_SAVE_RESULTS \
|
||||
DEQP_PARALLEL \
|
||||
DEQP_RESULTS_DIR \
|
||||
DEQP_RUNNER_OPTIONS \
|
||||
DEQP_VARIANT \
|
||||
DEQP_VER \
|
||||
DEQP_WIDTH \
|
||||
DEVICE_NAME \
|
||||
DRIVER_NAME \
|
||||
EGL_PLATFORM \
|
||||
FDO_CI_CONCURRENT \
|
||||
FDO_UPSTREAM_REPO \
|
||||
FD_MESA_DEBUG \
|
||||
FLAKES_CHANNEL \
|
||||
GPU_VERSION \
|
||||
IR3_SHADER_DEBUG \
|
||||
MESA_GL_VERSION_OVERRIDE \
|
||||
MESA_GLSL_VERSION_OVERRIDE \
|
||||
MESA_GLES_VERSION_OVERRIDE \
|
||||
MINIO_HOST \
|
||||
NIR_VALIDATE \
|
||||
PIGLIT_FRACTION \
|
||||
PIGLIT_HTML_SUMMARY \
|
||||
PIGLIT_JUNIT_RESULTS \
|
||||
PIGLIT_OPTIONS \
|
||||
PIGLIT_PLATFORM \
|
||||
PIGLIT_PROFILES \
|
||||
PIGLIT_REPLAY_ARTIFACTS_BASE_URL \
|
||||
PIGLIT_REPLAY_DESCRIPTION_FILE \
|
||||
PIGLIT_REPLAY_DEVICE_NAME \
|
||||
PIGLIT_REPLAY_EXTRA_ARGS \
|
||||
PIGLIT_REPLAY_REFERENCE_IMAGES_BASE_URL \
|
||||
PIGLIT_REPLAY_UPLOAD_TO_MINIO \
|
||||
PIGLIT_RESULTS \
|
||||
PIGLIT_TESTS \
|
||||
TEST_LD_PRELOAD \
|
||||
TU_DEBUG \
|
||||
VK_CPU \
|
||||
VK_DRIVER \
|
||||
; do
|
||||
if [ -n "${!var+x}" ]; then
|
||||
echo "export $var=${!var@Q}" >> $rootfs_dst/set-job-env-vars.sh
|
||||
fi
|
||||
done
|
||||
echo "Variables passed through:"
|
||||
cat $rootfs_dst/set-job-env-vars.sh
|
||||
|
||||
set -x
|
||||
|
||||
# Add the Mesa drivers we built, and make a consistent symlink to them.
|
||||
mkdir -p $rootfs_dst/$CI_PROJECT_DIR
|
||||
rsync -aH --delete $CI_PROJECT_DIR/install/ $rootfs_dst/$CI_PROJECT_DIR/install/
|
||||
ln -sf $CI_PROJECT_DIR/install $rootfs_dst/install
|
||||
|
@@ -28,6 +28,7 @@ import serial
|
||||
import threading
|
||||
import time
|
||||
|
||||
|
||||
class SerialBuffer:
|
||||
def __init__(self, dev, filename, prefix, timeout = None):
|
||||
self.filename = filename
|
||||
@@ -35,17 +36,15 @@ class SerialBuffer:
|
||||
|
||||
if dev:
|
||||
self.f = open(filename, "wb+")
|
||||
self.serial = serial.Serial(dev, 115200, timeout=timeout)
|
||||
self.serial = serial.Serial(dev, 115200, timeout=timeout if timeout else 10)
|
||||
else:
|
||||
self.f = open(filename, "rb")
|
||||
self.serial = None
|
||||
|
||||
self.byte_queue = queue.Queue()
|
||||
self.line_queue = queue.Queue()
|
||||
self.prefix = prefix
|
||||
self.timeout = timeout
|
||||
self.sentinel = object()
|
||||
self.closing = False
|
||||
|
||||
if self.dev:
|
||||
self.read_thread = threading.Thread(
|
||||
@@ -59,31 +58,24 @@ class SerialBuffer:
|
||||
target=self.serial_lines_thread_loop, daemon=True)
|
||||
self.lines_thread.start()
|
||||
|
||||
def close(self):
|
||||
self.closing = True
|
||||
if self.serial:
|
||||
self.serial.cancel_read()
|
||||
self.read_thread.join()
|
||||
self.lines_thread.join()
|
||||
if self.serial:
|
||||
self.serial.close()
|
||||
|
||||
# Thread that just reads the bytes from the serial device to try to keep from
|
||||
# buffer overflowing it. If nothing is received in 1 minute, it finalizes.
|
||||
def serial_read_thread_loop(self):
|
||||
greet = "Serial thread reading from %s\n" % self.dev
|
||||
self.byte_queue.put(greet.encode())
|
||||
|
||||
while not self.closing:
|
||||
while True:
|
||||
try:
|
||||
b = self.serial.read()
|
||||
if len(b) == 0:
|
||||
if len(b) > 0:
|
||||
self.byte_queue.put(b)
|
||||
elif self.timeout:
|
||||
self.byte_queue.put(self.sentinel)
|
||||
break
|
||||
self.byte_queue.put(b)
|
||||
except Exception as err:
|
||||
print(self.prefix + str(err))
|
||||
self.byte_queue.put(self.sentinel)
|
||||
break
|
||||
self.byte_queue.put(self.sentinel)
|
||||
|
||||
# Thread that just reads the bytes from the file of serial output that some
|
||||
# other process is appending to.
|
||||
@@ -91,13 +83,12 @@ class SerialBuffer:
|
||||
greet = "Serial thread reading from %s\n" % self.filename
|
||||
self.byte_queue.put(greet.encode())
|
||||
|
||||
while not self.closing:
|
||||
while True:
|
||||
line = self.f.readline()
|
||||
if line:
|
||||
self.byte_queue.put(line)
|
||||
else:
|
||||
time.sleep(0.1)
|
||||
self.byte_queue.put(self.sentinel)
|
||||
|
||||
# Thread that processes the stream of bytes to 1) log to stdout, 2) log to
|
||||
# file, 3) add to the queue of lines to be read by program logic
|
||||
|
@@ -1,41 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Copyright © 2020 Christian Gmeiner
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
# copy of this software and associated documentation files (the "Software"),
|
||||
# to deal in the Software without restriction, including without limitation
|
||||
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
# and/or sell copies of the Software, and to permit persons to whom the
|
||||
# Software is furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice (including the next
|
||||
# paragraph) shall be included in all copies or substantial portions of the
|
||||
# Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
#
|
||||
# Tiny script to read bytes from telnet, and write the output to stdout, with a
|
||||
# buffer in between so we don't lose serial output from its buffer.
|
||||
#
|
||||
|
||||
import sys
|
||||
import telnetlib
|
||||
|
||||
host=sys.argv[1]
|
||||
port=sys.argv[2]
|
||||
|
||||
tn = telnetlib.Telnet(host, port, 1000000)
|
||||
|
||||
while True:
|
||||
bytes = tn.read_some()
|
||||
sys.stdout.buffer.write(bytes)
|
||||
sys.stdout.flush()
|
||||
|
||||
tn.close()
|
@@ -1,111 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
for var in \
|
||||
ASAN_OPTIONS \
|
||||
BASE_SYSTEM_FORK_HOST_PREFIX \
|
||||
BASE_SYSTEM_MAINLINE_HOST_PREFIX \
|
||||
CI_COMMIT_BRANCH \
|
||||
CI_COMMIT_REF_NAME \
|
||||
CI_COMMIT_TITLE \
|
||||
CI_JOB_ID \
|
||||
CI_JOB_JWT_FILE \
|
||||
CI_JOB_NAME \
|
||||
CI_JOB_URL \
|
||||
CI_MERGE_REQUEST_SOURCE_BRANCH_NAME \
|
||||
CI_MERGE_REQUEST_TITLE \
|
||||
CI_NODE_INDEX \
|
||||
CI_NODE_TOTAL \
|
||||
CI_PAGES_DOMAIN \
|
||||
CI_PIPELINE_ID \
|
||||
CI_PIPELINE_URL \
|
||||
CI_PROJECT_DIR \
|
||||
CI_PROJECT_NAME \
|
||||
CI_PROJECT_PATH \
|
||||
CI_PROJECT_ROOT_NAMESPACE \
|
||||
CI_RUNNER_DESCRIPTION \
|
||||
CI_SERVER_URL \
|
||||
CROSVM_GALLIUM_DRIVER \
|
||||
CROSVM_GPU_ARGS \
|
||||
CROSVM_TEST_SCRIPT \
|
||||
DEQP_BIN_DIR \
|
||||
DEQP_CASELIST_FILTER \
|
||||
DEQP_CASELIST_INV_FILTER \
|
||||
DEQP_CONFIG \
|
||||
DEQP_EXPECTED_RENDERER \
|
||||
DEQP_FRACTION \
|
||||
DEQP_HEIGHT \
|
||||
DEQP_RESULTS_DIR \
|
||||
DEQP_RUNNER_OPTIONS \
|
||||
DEQP_SUITE \
|
||||
DEQP_TEMP_DIR \
|
||||
DEQP_VARIANT \
|
||||
DEQP_VER \
|
||||
DEQP_WIDTH \
|
||||
DEVICE_NAME \
|
||||
DRIVER_NAME \
|
||||
EGL_PLATFORM \
|
||||
ETNA_MESA_DEBUG \
|
||||
FDO_CI_CONCURRENT \
|
||||
FDO_UPSTREAM_REPO \
|
||||
FD_MESA_DEBUG \
|
||||
FLAKES_CHANNEL \
|
||||
GALLIUM_DRIVER \
|
||||
GALLIVM_PERF \
|
||||
GPU_VERSION \
|
||||
GTEST \
|
||||
GTEST_FAILS \
|
||||
GTEST_FRACTION \
|
||||
GTEST_RESULTS_DIR \
|
||||
GTEST_RUNNER_OPTIONS \
|
||||
GTEST_SKIPS \
|
||||
HWCI_FREQ_MAX \
|
||||
HWCI_KERNEL_MODULES \
|
||||
HWCI_START_XORG \
|
||||
HWCI_TEST_SCRIPT \
|
||||
IR3_SHADER_DEBUG \
|
||||
JOB_ARTIFACTS_BASE \
|
||||
JOB_RESULTS_PATH \
|
||||
JOB_ROOTFS_OVERLAY_PATH \
|
||||
LD_LIBRARY_PATH \
|
||||
LP_NUM_THREADS \
|
||||
MESA_BASE_TAG \
|
||||
MESA_BUILD_PATH \
|
||||
MESA_DEBUG \
|
||||
MESA_GLES_VERSION_OVERRIDE \
|
||||
MESA_GLSL_VERSION_OVERRIDE \
|
||||
MESA_GL_VERSION_OVERRIDE \
|
||||
MESA_IMAGE \
|
||||
MESA_IMAGE_PATH \
|
||||
MESA_IMAGE_TAG \
|
||||
MESA_TEMPLATES_COMMIT \
|
||||
MESA_VK_IGNORE_CONFORMANCE_WARNING \
|
||||
MINIO_HOST \
|
||||
NIR_DEBUG \
|
||||
PAN_I_WANT_A_BROKEN_VULKAN_DRIVER \
|
||||
PAN_MESA_DEBUG \
|
||||
PIGLIT_FRACTION \
|
||||
PIGLIT_NO_WINDOW \
|
||||
PIGLIT_OPTIONS \
|
||||
PIGLIT_PLATFORM \
|
||||
PIGLIT_PROFILES \
|
||||
PIGLIT_REPLAY_ARTIFACTS_BASE_URL \
|
||||
PIGLIT_REPLAY_DESCRIPTION_FILE \
|
||||
PIGLIT_REPLAY_DEVICE_NAME \
|
||||
PIGLIT_REPLAY_EXTRA_ARGS \
|
||||
PIGLIT_REPLAY_REFERENCE_IMAGES_BASE \
|
||||
PIGLIT_REPLAY_SUBCOMMAND \
|
||||
PIGLIT_RESULTS \
|
||||
PIGLIT_TESTS \
|
||||
PIPELINE_ARTIFACTS_BASE \
|
||||
SKQP_ASSETS_DIR \
|
||||
SKQP_BACKENDS \
|
||||
TU_DEBUG \
|
||||
VIRGL_HOST_API \
|
||||
VK_CPU \
|
||||
VK_DRIVER \
|
||||
VK_ICD_FILENAMES \
|
||||
; do
|
||||
if [ -n "${!var+x}" ]; then
|
||||
echo "export $var=${!var@Q}"
|
||||
fi
|
||||
done
|
@@ -1,22 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Very early init, used to make sure devices and network are set up and
|
||||
# reachable.
|
||||
|
||||
set -ex
|
||||
|
||||
cd /
|
||||
|
||||
mount -t proc none /proc
|
||||
mount -t sysfs none /sys
|
||||
mount -t devtmpfs none /dev || echo possibly already mounted
|
||||
mkdir -p /dev/pts
|
||||
mount -t devpts devpts /dev/pts
|
||||
mount -t tmpfs tmpfs /tmp
|
||||
|
||||
echo "nameserver 8.8.8.8" > /etc/resolv.conf
|
||||
[ -z "$NFS_SERVER_IP" ] || echo "$NFS_SERVER_IP caching-proxy" >> /etc/hosts
|
||||
|
||||
# Set the time so we can validate certificates before we fetch anything;
|
||||
# however as not all DUTs have network, make this non-fatal.
|
||||
for i in 1 2 3; do sntp -sS pool.ntp.org && break || sleep 2; done || true
|
@@ -1,78 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Second-stage init, used to set up devices and our job environment before
|
||||
# running tests.
|
||||
|
||||
. /set-job-env-vars.sh
|
||||
|
||||
set -ex
|
||||
|
||||
# Set up any devices required by the jobs
|
||||
[ -z "$HWCI_KERNEL_MODULES" ] || (echo -n $HWCI_KERNEL_MODULES | xargs -d, -n1 /usr/sbin/modprobe)
|
||||
|
||||
# Fix prefix confusion: the build installs to $CI_PROJECT_DIR, but we expect
|
||||
# it in /install
|
||||
ln -sf $CI_PROJECT_DIR/install /install
|
||||
export LD_LIBRARY_PATH=/install/lib
|
||||
export LIBGL_DRIVERS_PATH=/install/lib/dri
|
||||
|
||||
# Store Mesa's disk cache under /tmp, rather than sending it out over NFS.
|
||||
export XDG_CACHE_HOME=/tmp
|
||||
|
||||
# Make sure Python can find all our imports
|
||||
export PYTHONPATH=$(python3 -c "import sys;print(\":\".join(sys.path))")
|
||||
|
||||
if [ "$HWCI_FREQ_MAX" = "true" ]; then
|
||||
# Ensure initialization of the DRM device (needed by MSM)
|
||||
head -0 /dev/dri/renderD128
|
||||
|
||||
# Disable GPU frequency scaling
|
||||
DEVFREQ_GOVERNOR=`find /sys/devices -name governor | grep gpu || true`
|
||||
test -z "$DEVFREQ_GOVERNOR" || echo performance > $DEVFREQ_GOVERNOR || true
|
||||
|
||||
# Disable CPU frequency scaling
|
||||
echo performance | tee -a /sys/devices/system/cpu/cpufreq/policy*/scaling_governor || true
|
||||
|
||||
# Disable GPU runtime power management
|
||||
GPU_AUTOSUSPEND=`find /sys/devices -name autosuspend_delay_ms | grep gpu | head -1`
|
||||
test -z "$GPU_AUTOSUSPEND" || echo -1 > $GPU_AUTOSUSPEND || true
|
||||
fi
|
||||
|
||||
# Start a little daemon to capture the first devcoredump we encounter. (They
|
||||
# expire after 5 minutes, so we poll for them).
|
||||
./capture-devcoredump.sh &
|
||||
|
||||
# If we want Xorg to be running for the test, then we start it up before the
|
||||
# HWCI_TEST_SCRIPT because we need to use xinit to start X (otherwise
|
||||
# without using -displayfd you can race with Xorg's startup), but xinit will eat
|
||||
# your client's return code
|
||||
if [ -n "$HWCI_START_XORG" ]; then
|
||||
echo "touch /xorg-started; sleep 100000" > /xorg-script
|
||||
env \
|
||||
xinit /bin/sh /xorg-script -- /usr/bin/Xorg -noreset -s 0 -dpms -logfile /Xorg.0.log &
|
||||
|
||||
# Wait for xorg to be ready for connections.
|
||||
for i in 1 2 3 4 5; do
|
||||
if [ -e /xorg-started ]; then
|
||||
break
|
||||
fi
|
||||
sleep 5
|
||||
done
|
||||
export DISPLAY=:0
|
||||
fi
|
||||
|
||||
RESULT=fail
|
||||
if sh $HWCI_TEST_SCRIPT; then
|
||||
RESULT=pass
|
||||
rm -rf results/trace/$PIGLIT_REPLAY_DEVICE_NAME
|
||||
fi
|
||||
|
||||
# upload artifacts
|
||||
MINIO=$(cat /proc/cmdline | tr ' ' '\n' | grep minio_results | cut -d '=' -f 2 || true)
|
||||
if [ -n "$MINIO" ]; then
|
||||
tar -czf results.tar.gz results/;
|
||||
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}";
|
||||
ci-fairy minio cp results.tar.gz minio://"$MINIO"/results.tar.gz;
|
||||
fi
|
||||
|
||||
echo "hwci: mesa: $RESULT"
|
@@ -1,21 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
_XORG_SCRIPT="/xorg-script"
|
||||
_FLAG_FILE="/xorg-started"
|
||||
|
||||
echo "touch ${_FLAG_FILE}; sleep 100000" > "${_XORG_SCRIPT}"
|
||||
if [ "x$1" != "x" ]; then
|
||||
export LD_LIBRARY_PATH="${1}/lib"
|
||||
export LIBGL_DRIVERS_PATH="${1}/lib/dri"
|
||||
fi
|
||||
xinit /bin/sh "${_XORG_SCRIPT}" -- /usr/bin/Xorg vt45 -noreset -s 0 -dpms -logfile /Xorg.0.log &
|
||||
|
||||
# Wait for xorg to be ready for connections.
|
||||
for i in 1 2 3 4 5; do
|
||||
if [ -e "${_FLAG_FILE}" ]; then
|
||||
break
|
||||
fi
|
||||
sleep 5
|
||||
done
|
60
.gitlab-ci/container/android_build.sh
Normal file
60
.gitlab-ci/container/android_build.sh
Normal file
@@ -0,0 +1,60 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
EPHEMERAL="\
|
||||
rdfind \
|
||||
unzip \
|
||||
"
|
||||
|
||||
apt-get install -y --no-remove $EPHEMERAL
|
||||
|
||||
# Fetch the NDK and extract just the toolchain we want.
|
||||
ndk=android-ndk-r21d
|
||||
wget -O $ndk.zip https://dl.google.com/android/repository/$ndk-linux-x86_64.zip
|
||||
unzip -d / $ndk.zip "$ndk/toolchains/llvm/*"
|
||||
rm $ndk.zip
|
||||
# Since it was packed as a zip file, symlinks/hardlinks got turned into
|
||||
# duplicate files. Turn them into hardlinks to save on container space.
|
||||
rdfind -makehardlinks true -makeresultsfile false /android-ndk-r21d/
|
||||
# Drop some large tools we won't use in this build.
|
||||
find /android-ndk-r21d/ -type f | egrep -i "clang-check|clang-tidy|lldb" | xargs rm -f
|
||||
|
||||
sh .gitlab-ci/container/create-android-ndk-pc.sh /$ndk zlib.pc "" "-lz" "1.2.3"
|
||||
|
||||
sh .gitlab-ci/container/create-android-cross-file.sh /$ndk x86_64-linux-android x86_64 x86_64
|
||||
sh .gitlab-ci/container/create-android-cross-file.sh /$ndk i686-linux-android x86 x86
|
||||
sh .gitlab-ci/container/create-android-cross-file.sh /$ndk aarch64-linux-android arm armv8
|
||||
sh .gitlab-ci/container/create-android-cross-file.sh /$ndk arm-linux-androideabi arm armv7hl armv7a-linux-androideabi
|
||||
|
||||
# Not using build-libdrm.sh because we don't want its cleanup after building
|
||||
# each arch. Fetch and extract now.
|
||||
export LIBDRM_VERSION=libdrm-2.4.102
|
||||
wget https://dri.freedesktop.org/libdrm/$LIBDRM_VERSION.tar.xz
|
||||
tar -xf $LIBDRM_VERSION.tar.xz && rm $LIBDRM_VERSION.tar.xz
|
||||
|
||||
for arch in \
|
||||
x86_64-linux-android \
|
||||
i686-linux-android \
|
||||
aarch64-linux-android \
|
||||
arm-linux-androideabi ; do
|
||||
|
||||
cd $LIBDRM_VERSION
|
||||
rm -rf build-$arch
|
||||
meson build-$arch \
|
||||
--cross-file=/cross_file-$arch.txt \
|
||||
--libdir=lib/$arch \
|
||||
-Dlibkms=false \
|
||||
-Dnouveau=false \
|
||||
-Dvc4=false \
|
||||
-Detnaviv=false \
|
||||
-Dfreedreno=false \
|
||||
-Dintel=false \
|
||||
-Dcairo-tests=false
|
||||
ninja -C build-$arch install
|
||||
cd ..
|
||||
done
|
||||
|
||||
rm -rf $LIBDRM_VERSION
|
||||
|
||||
apt-get purge -y $EPHEMERAL
|
@@ -1,8 +1,6 @@
|
||||
CONFIG_LOCALVERSION_AUTO=y
|
||||
CONFIG_DEBUG_KERNEL=y
|
||||
CONFIG_LOCALVERSION="ccu"
|
||||
|
||||
# abootimg with a 'dummy' rootfs fails with root=/dev/nfs
|
||||
CONFIG_BLK_DEV_INITRD=n
|
||||
CONFIG_DEBUG_KERNEL=y
|
||||
|
||||
CONFIG_DEVFREQ_GOV_PERFORMANCE=y
|
||||
CONFIG_DEVFREQ_GOV_POWERSAVE=y
|
||||
@@ -11,7 +9,6 @@ CONFIG_DEVFREQ_GOV_PASSIVE=y
|
||||
CONFIG_DEVFREQ_GOV_SIMPLE_ONDEMAND=y
|
||||
|
||||
CONFIG_DRM=y
|
||||
CONFIG_DRM_ETNAVIV=y
|
||||
CONFIG_DRM_ROCKCHIP=y
|
||||
CONFIG_DRM_PANFROST=y
|
||||
CONFIG_DRM_LIMA=y
|
||||
|
@@ -1,8 +1,6 @@
|
||||
CONFIG_LOCALVERSION_AUTO=y
|
||||
CONFIG_DEBUG_KERNEL=y
|
||||
CONFIG_LOCALVERSION="ccu"
|
||||
|
||||
# abootimg with a 'dummy' rootfs fails with root=/dev/nfs
|
||||
CONFIG_BLK_DEV_INITRD=n
|
||||
CONFIG_DEBUG_KERNEL=y
|
||||
|
||||
CONFIG_DEVFREQ_GOV_PERFORMANCE=y
|
||||
CONFIG_DEVFREQ_GOV_POWERSAVE=y
|
||||
@@ -14,9 +12,9 @@ CONFIG_DRM_ROCKCHIP=y
|
||||
CONFIG_DRM_PANFROST=y
|
||||
CONFIG_DRM_LIMA=y
|
||||
CONFIG_DRM_PANEL_SIMPLE=y
|
||||
CONFIG_DRM_PANEL_EDP=y
|
||||
CONFIG_DRM_MSM=y
|
||||
CONFIG_DRM_I2C_ADV7511=y
|
||||
CONFIG_DRM_I2C_ADV7533=y
|
||||
CONFIG_PWM_CROS_EC=y
|
||||
CONFIG_BACKLIGHT_PWM=y
|
||||
|
||||
@@ -32,11 +30,6 @@ CONFIG_TYPEC=y
|
||||
CONFIG_TYPEC_TCPM=y
|
||||
|
||||
# MSM platform bits
|
||||
|
||||
# For CONFIG_QCOM_LMH
|
||||
CONFIG_OF=y
|
||||
|
||||
CONFIG_QCOM_COMMAND_DB=y
|
||||
CONFIG_QCOM_RPMHPD=y
|
||||
CONFIG_QCOM_RPMPD=y
|
||||
CONFIG_SDM_GPUCC_845=y
|
||||
@@ -50,11 +43,9 @@ CONFIG_I2C_QCOM_GENI=y
|
||||
CONFIG_SPI_QCOM_GENI=y
|
||||
CONFIG_PHY_QCOM_QUSB2=y
|
||||
CONFIG_PHY_QCOM_QMP=y
|
||||
CONFIG_QCOM_CLK_APCC_MSM8996=y
|
||||
CONFIG_QCOM_LLCC=y
|
||||
CONFIG_QCOM_LMH=y
|
||||
CONFIG_QCOM_SPMI_TEMP_ALARM=y
|
||||
CONFIG_QCOM_WDT=y
|
||||
CONFIG_QCOM_CLK_APCC_MSM8996=y
|
||||
CONFIG_POWER_RESET_QCOM_PON=y
|
||||
CONFIG_RTC_DRV_PM8XXX=y
|
||||
CONFIG_INTERCONNECT=y
|
||||
@@ -63,9 +54,8 @@ CONFIG_INTERCONNECT_QCOM_SDM845=y
|
||||
CONFIG_INTERCONNECT_QCOM_MSM8916=y
|
||||
CONFIG_INTERCONNECT_QCOM_OSM_L3=y
|
||||
CONFIG_INTERCONNECT_QCOM_SC7180=y
|
||||
CONFIG_QCOM_WDT=y
|
||||
CONFIG_CRYPTO_DEV_QCOM_RNG=y
|
||||
CONFIG_SC_DISPCC_7180=y
|
||||
CONFIG_SC_GPUCC_7180=y
|
||||
|
||||
# db410c ethernet
|
||||
CONFIG_USB_RTL8152=y
|
||||
@@ -82,6 +72,7 @@ CONFIG_ARCH_K3=n
|
||||
CONFIG_ARCH_LAYERSCAPE=n
|
||||
CONFIG_ARCH_LG1K=n
|
||||
CONFIG_ARCH_HISI=n
|
||||
CONFIG_ARCH_MEDIATEK=n
|
||||
CONFIG_ARCH_MVEBU=n
|
||||
CONFIG_ARCH_SEATTLE=n
|
||||
CONFIG_ARCH_SYNQUACER=n
|
||||
@@ -144,16 +135,3 @@ CONFIG_USB_NET_SMSC95XX=y
|
||||
# For amlogic
|
||||
CONFIG_MESON_GXL_PHY=y
|
||||
CONFIG_MDIO_BUS_MUX_MESON_G12A=y
|
||||
CONFIG_DRM_MESON=y
|
||||
|
||||
# For Mediatek
|
||||
CONFIG_DRM_MEDIATEK=y
|
||||
CONFIG_PWM_MEDIATEK=y
|
||||
CONFIG_DRM_MEDIATEK_HDMI=y
|
||||
CONFIG_GNSS=y
|
||||
CONFIG_GNSS_MTK_SERIAL=y
|
||||
CONFIG_HW_RANDOM=y
|
||||
CONFIG_HW_RANDOM_MTK=y
|
||||
CONFIG_MTK_DEVAPC=y
|
||||
CONFIG_PWM_MTK_DISP=y
|
||||
CONFIG_MTK_CMDQ=y
|
||||
|
@@ -8,6 +8,11 @@ sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list
|
||||
echo 'deb https://deb.debian.org/debian buster main' >/etc/apt/sources.list.d/buster.list
|
||||
apt-get update
|
||||
|
||||
EPHEMERAL="
|
||||
python3-pytest-runner
|
||||
python3-wheel
|
||||
"
|
||||
|
||||
apt-get -y install \
|
||||
abootimg \
|
||||
autoconf \
|
||||
@@ -43,21 +48,33 @@ apt-get -y install \
|
||||
llvm-11-dev \
|
||||
meson \
|
||||
pkg-config \
|
||||
python-is-python3 \
|
||||
python3-aiohttp \
|
||||
python3-jinja2 \
|
||||
python3-mako \
|
||||
python3-pil \
|
||||
python3-pip \
|
||||
python3-requests \
|
||||
python3-setuptools \
|
||||
python3-yaml \
|
||||
python3-zmq \
|
||||
u-boot-tools \
|
||||
unzip \
|
||||
wget \
|
||||
xz-utils \
|
||||
zlib1g-dev
|
||||
zlib1g-dev \
|
||||
$EPHEMERAL
|
||||
|
||||
# Update lavacli to v1.1+
|
||||
pip3 install git+https://git.lavasoftware.org/lava/lavacli@3db3ddc45e5358908bc6a17448059ea2340492b7
|
||||
|
||||
# Not available anymore in bullseye
|
||||
apt-get install -y --no-remove -t buster \
|
||||
android-sdk-ext4-utils
|
||||
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@6f5af7e5574509726c79109e3c147cee95e81366
|
||||
|
||||
apt-get purge -y $EPHEMERAL
|
||||
|
||||
arch=armhf
|
||||
. .gitlab-ci/container/cross_build.sh
|
@@ -13,14 +13,21 @@ apt-get install -y --no-remove \
|
||||
cpio \
|
||||
fastboot \
|
||||
netcat \
|
||||
nginx-full \
|
||||
procps \
|
||||
python-is-python3 \
|
||||
python3-distutils \
|
||||
python3-minimal \
|
||||
python3-serial \
|
||||
rsync \
|
||||
snmp \
|
||||
unzip \
|
||||
wget
|
||||
|
||||
# setup nginx
|
||||
sed -i '/gzip_/ s/#\ //g' /etc/nginx/nginx.conf
|
||||
cp .gitlab-ci/bare-metal/nginx-default-site /etc/nginx/sites-enabled/default
|
||||
|
||||
# setup SNMPv2 SMI MIB
|
||||
wget https://raw.githubusercontent.com/net-snmp/net-snmp/master/mibs/SNMPv2-SMI.txt \
|
||||
-O /usr/share/snmp/mibs/SNMPv2-SMI.txt
|
@@ -31,18 +31,5 @@ if [[ $arch == "arm64" ]]; then
|
||||
wget ${ARTIFACTS_URL}/$DTB
|
||||
done
|
||||
|
||||
popd
|
||||
elif [[ $arch == "armhf" ]]; then
|
||||
mkdir -p /baremetal-files
|
||||
pushd /baremetal-files
|
||||
|
||||
wget ${ARTIFACTS_URL}/zImage
|
||||
|
||||
DEVICE_TREES="imx6q-cubox-i.dtb"
|
||||
|
||||
for DTB in $DEVICE_TREES; do
|
||||
wget ${ARTIFACTS_URL}/$DTB
|
||||
done
|
||||
|
||||
popd
|
||||
fi
|
||||
|
@@ -1,18 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
APITRACE_VERSION="170424754bb46002ba706e16ee5404b61988d74a"
|
||||
|
||||
git clone https://github.com/apitrace/apitrace.git --single-branch --no-checkout /apitrace
|
||||
pushd /apitrace
|
||||
git checkout "$APITRACE_VERSION"
|
||||
git submodule update --init --depth 1 --recursive
|
||||
cmake -S . -B _build -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_GUI=False -DENABLE_WAFFLE=on $EXTRA_CMAKE_ARGS
|
||||
ninja -C _build
|
||||
mkdir build
|
||||
cp _build/apitrace build
|
||||
cp _build/eglretrace build
|
||||
${STRIP_CMD:-strip} build/*
|
||||
find . -not -path './build' -not -path './build/*' -delete
|
||||
popd
|
@@ -1,34 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
CROSVM_VERSION=d2b6a64dd31c92a284a905c0f2483d0b222b1220
|
||||
git clone --single-branch -b for-mesa-ci --no-checkout https://gitlab.freedesktop.org/tomeu/crosvm.git /platform/crosvm
|
||||
pushd /platform/crosvm
|
||||
git checkout "$CROSVM_VERSION"
|
||||
git submodule update --init
|
||||
|
||||
VIRGLRENDERER_VERSION=2a5fb800c6b0ce15ad37c2c698635e3e2d27b37c
|
||||
pushd third_party/virglrenderer
|
||||
git checkout "$VIRGLRENDERER_VERSION"
|
||||
meson build/ $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
popd
|
||||
|
||||
RUSTFLAGS='-L native=/usr/local/lib' cargo install \
|
||||
bindgen \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--root /usr/local \
|
||||
$EXTRA_CARGO_ARGS
|
||||
|
||||
RUSTFLAGS='-L native=/usr/local/lib' cargo install \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--locked \
|
||||
--features 'default-no-sandbox gpu x virgl_renderer virgl_renderer_next' \
|
||||
--path . \
|
||||
--root /usr/local \
|
||||
$EXTRA_CARGO_ARGS
|
||||
|
||||
popd
|
||||
|
||||
rm -rf $PLATFORM2_ROOT $AOSP_EXTERNAL_ROOT/minijail $THIRD_PARTY_ROOT/adhd $THIRD_PARTY_ROOT/rust-vmm /platform/crosvm
|
@@ -1,22 +1,9 @@
|
||||
#!/bin/sh
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
if [ -n "${DEQP_RUNNER_GIT_TAG}${DEQP_RUNNER_GIT_REV}" ]; then
|
||||
# Build and install from source
|
||||
EXTRA_CARGO_ARGS="--git ${DEQP_RUNNER_GIT_URL:-https://gitlab.freedesktop.org/anholt/deqp-runner.git} ${EXTRA_CARGO_ARGS}"
|
||||
|
||||
if [ -n "${DEQP_RUNNER_GIT_TAG}" ]; then
|
||||
EXTRA_CARGO_ARGS="--tag ${DEQP_RUNNER_GIT_TAG} ${EXTRA_CARGO_ARGS}"
|
||||
else
|
||||
EXTRA_CARGO_ARGS="--rev ${DEQP_RUNNER_GIT_REV} ${EXTRA_CARGO_ARGS}"
|
||||
fi
|
||||
else
|
||||
# Install from package registry
|
||||
EXTRA_CARGO_ARGS="--version 0.11.0 ${EXTRA_CARGO_ARGS} -- deqp-runner"
|
||||
fi
|
||||
|
||||
cargo install --locked \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--root /usr/local \
|
||||
${EXTRA_CARGO_ARGS}
|
||||
cargo install --locked deqp-runner \
|
||||
-j ${FDO_CI_CONCURRENT:-4} \
|
||||
--version 0.6.5 \
|
||||
--root /usr/local \
|
||||
$EXTRA_CARGO_ARGS
|
||||
|
@@ -6,7 +6,7 @@ git config --global user.email "mesa@example.com"
|
||||
git config --global user.name "Mesa CI"
|
||||
git clone \
|
||||
https://github.com/KhronosGroup/VK-GL-CTS.git \
|
||||
-b vulkan-cts-1.2.7.2 \
|
||||
-b vulkan-cts-1.2.6.0 \
|
||||
--depth 1 \
|
||||
/VK-GL-CTS
|
||||
pushd /VK-GL-CTS
|
||||
@@ -23,40 +23,20 @@ cp doc/testlog-stylesheet/testlog.{css,xsl} /deqp
|
||||
popd
|
||||
|
||||
pushd /deqp
|
||||
# When including EGL/X11 testing, do that build first and save off its
|
||||
# deqp-egl binary.
|
||||
cmake -S /VK-GL-CTS -B . -G Ninja \
|
||||
-DDEQP_TARGET=x11_egl_glx \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
$EXTRA_CMAKE_ARGS
|
||||
ninja modules/egl/deqp-egl
|
||||
cp /deqp/modules/egl/deqp-egl /deqp/modules/egl/deqp-egl-x11
|
||||
|
||||
|
||||
cmake -S /VK-GL-CTS -B . -G Ninja \
|
||||
-DDEQP_TARGET=${DEQP_TARGET:-x11_glx} \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
$EXTRA_CMAKE_ARGS
|
||||
ninja
|
||||
|
||||
mv /deqp/modules/egl/deqp-egl-x11 /deqp/modules/egl/deqp-egl
|
||||
|
||||
# Copy out the mustpass lists we want.
|
||||
mkdir /deqp/mustpass
|
||||
for mustpass in $(< /VK-GL-CTS/external/vulkancts/mustpass/master/vk-default.txt) ; do
|
||||
cat /VK-GL-CTS/external/vulkancts/mustpass/master/$mustpass \
|
||||
>> /deqp/mustpass/vk-master.txt
|
||||
done
|
||||
cp /VK-GL-CTS/external/vulkancts/mustpass/master/vk-default.txt \
|
||||
/deqp/mustpass/vk-master.txt
|
||||
|
||||
cp \
|
||||
/deqp/external/openglcts/modules/gl_cts/data/mustpass/gles/aosp_mustpass/3.2.6.x/*.txt \
|
||||
/deqp/mustpass/.
|
||||
cp \
|
||||
/deqp/external/openglcts/modules/gl_cts/data/mustpass/egl/aosp_mustpass/3.2.6.x/egl-master.txt \
|
||||
/deqp/mustpass/.
|
||||
cp \
|
||||
/deqp/external/openglcts/modules/gl_cts/data/mustpass/gles/khronos_mustpass/3.2.6.x/*-master.txt \
|
||||
/deqp/mustpass/.
|
||||
cp \
|
||||
/deqp/external/openglcts/modules/gl_cts/data/mustpass/gl/khronos_mustpass/4.6.1.x/*-master.txt \
|
||||
/deqp/mustpass/.
|
||||
@@ -68,14 +48,11 @@ cp /deqp/executor/testlog-to-* /deqp/executor.save
|
||||
rm -rf /deqp/executor
|
||||
mv /deqp/executor.save /deqp/executor
|
||||
|
||||
# Remove other mustpass files, since we saved off the ones we wanted to conventient locations above.
|
||||
rm -rf /deqp/external/openglcts/modules/gl_cts/data/mustpass
|
||||
rm -rf /deqp/external/vulkancts/modules/vulkan/vk-master*
|
||||
rm -rf /deqp/external/vulkancts/modules/vulkan/vk-default
|
||||
|
||||
rm -rf /deqp/external/openglcts/modules/cts-runner
|
||||
rm -rf /deqp/modules/internal
|
||||
rm -rf /deqp/execserver
|
||||
rm -rf /deqp/modules/egl
|
||||
rm -rf /deqp/framework
|
||||
find -iname '*cmake*' -o -name '*ninja*' -o -name '*.o' -o -name '*.a' | xargs rm -rf
|
||||
${STRIP_CMD:-strip} external/vulkancts/modules/vulkan/deqp-vk
|
||||
|
@@ -1,16 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
PARALLEL_DEQP_RUNNER_VERSION=6596b71cf37a7efb4d54acd48c770ed2d4ad6b7e
|
||||
|
||||
git clone https://gitlab.freedesktop.org/mesa/parallel-deqp-runner --single-branch -b master --no-checkout /parallel-deqp-runner
|
||||
pushd /parallel-deqp-runner
|
||||
git checkout "$PARALLEL_DEQP_RUNNER_VERSION"
|
||||
meson . _build
|
||||
ninja -C _build hang-detection
|
||||
mkdir -p build/bin
|
||||
install _build/hang-detection build/bin
|
||||
strip build/bin/*
|
||||
find . -not -path './build' -not -path './build/*' -delete
|
||||
popd
|
@@ -1,51 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
mkdir -p kernel
|
||||
wget -qO- ${KERNEL_URL} | tar -xj --strip-components=1 -C kernel
|
||||
pushd kernel
|
||||
|
||||
# The kernel doesn't like the gold linker (or the old lld in our debians).
|
||||
# Sneak in some override symlinks during kernel build until we can update
|
||||
# debian (they'll get blown away by the rm of the kernel dir at the end).
|
||||
mkdir -p ld-links
|
||||
for i in /usr/bin/*-ld /usr/bin/ld; do
|
||||
i=`basename $i`
|
||||
ln -sf /usr/bin/$i.bfd ld-links/$i
|
||||
done
|
||||
export PATH=`pwd`/ld-links:$PATH
|
||||
|
||||
export LOCALVERSION="`basename $KERNEL_URL`"
|
||||
./scripts/kconfig/merge_config.sh ${DEFCONFIG} ../.gitlab-ci/container/${KERNEL_ARCH}.config
|
||||
make ${KERNEL_IMAGE_NAME}
|
||||
for image in ${KERNEL_IMAGE_NAME}; do
|
||||
cp arch/${KERNEL_ARCH}/boot/${image} /lava-files/.
|
||||
done
|
||||
|
||||
if [[ -n ${DEVICE_TREES} ]]; then
|
||||
make dtbs
|
||||
cp ${DEVICE_TREES} /lava-files/.
|
||||
fi
|
||||
|
||||
if [[ ${DEBIAN_ARCH} = "amd64" ]]; then
|
||||
make modules
|
||||
INSTALL_MOD_PATH=/lava-files/rootfs-${DEBIAN_ARCH}/ make modules_install
|
||||
fi
|
||||
|
||||
if [[ ${DEBIAN_ARCH} = "arm64" ]]; then
|
||||
make Image.lzma
|
||||
mkimage \
|
||||
-f auto \
|
||||
-A arm \
|
||||
-O linux \
|
||||
-d arch/arm64/boot/Image.lzma \
|
||||
-C lzma\
|
||||
-b arch/arm64/boot/dts/qcom/sdm845-cheza-r3.dtb \
|
||||
/lava-files/cheza-kernel
|
||||
KERNEL_IMAGE_NAME+=" cheza-kernel"
|
||||
fi
|
||||
|
||||
popd
|
||||
rm -rf kernel
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
set -ex
|
||||
|
||||
export LIBDRM_VERSION=libdrm-2.4.109
|
||||
export LIBDRM_VERSION=libdrm-2.4.105
|
||||
|
||||
wget https://dri.freedesktop.org/libdrm/$LIBDRM_VERSION.tar.xz
|
||||
tar -xvf $LIBDRM_VERSION.tar.xz && rm $LIBDRM_VERSION.tar.xz
|
||||
|
@@ -2,9 +2,13 @@
|
||||
|
||||
set -ex
|
||||
|
||||
if [ -n "$INCLUDE_OPENCL_TESTS" ]; then
|
||||
PIGLIT_OPTS="-DPIGLIT_BUILD_CL_TESTS=ON"
|
||||
fi
|
||||
|
||||
git clone https://gitlab.freedesktop.org/mesa/piglit.git --single-branch --no-checkout /piglit
|
||||
pushd /piglit
|
||||
git checkout af1785f31f65622d9b1ca1c08c75cf140bc7ed22
|
||||
git checkout 6a4be9e9946df310d9402f995f371c7deb8c27ba
|
||||
patch -p1 <$OLDPWD/.gitlab-ci/piglit/disable-vs_in.diff
|
||||
cmake -S . -B . -G Ninja -DCMAKE_BUILD_TYPE=Release $PIGLIT_OPTS $EXTRA_CMAKE_ARGS
|
||||
ninja $PIGLIT_BUILD_TARGETS
|
||||
|
@@ -15,10 +15,10 @@ ln -s /usr/local/bin $HOME/.cargo/bin
|
||||
# version of the compiler, rather than whatever the container's Debian comes
|
||||
# with.
|
||||
#
|
||||
# Pick the rust compiler (1.48) available in Debian stable, and pick a specific
|
||||
# Pick the rust compiler (1.41) available in Debian stable, and pick a specific
|
||||
# snapshot from rustup so the compiler doesn't drift on us.
|
||||
wget https://sh.rustup.rs -O - | \
|
||||
sh -s -- -y --default-toolchain 1.49.0-2020-12-31
|
||||
sh -s -- -y --default-toolchain 1.41.1-2020-02-27
|
||||
|
||||
# Set up a config script for cross compiling -- cargo needs your system cc for
|
||||
# linking in cross builds, but doesn't know what you want to use for system cc.
|
||||
|
@@ -1,82 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
create_gn_args() {
|
||||
cp "${BASE_ARGS_GN_FILE}" "${SKQP_OUT_DIR}"/args.gn
|
||||
echo "target_cpu = \"${SKQP_ARCH}\"" >> "${SKQP_OUT_DIR}"/args.gn
|
||||
}
|
||||
|
||||
download_skqp_models() (
|
||||
# The download_model.py script needs a checksum file to know what models
|
||||
# version to download.
|
||||
|
||||
# This is the most recent commit available in the skia repository with a
|
||||
# valid files.checksum
|
||||
SKIA_LAST_SKQP_CUT_COMMIT_SHA=ccf5f0d75b6a6b54756f2c62d57e3730eed8aa45
|
||||
git fetch origin "${SKIA_LAST_SKQP_CUT_COMMIT_SHA}:refs/remotes/origin/${SKIA_LAST_SKQP_CUT_COMMIT_SHA}"
|
||||
git checkout "${SKIA_LAST_SKQP_CUT_COMMIT_SHA}" -- \
|
||||
platform_tools/android/apps/skqp/src/main/assets/files.checksum
|
||||
|
||||
# The following patch transforms download_model.py from python2 to python3.
|
||||
git apply "${DOWNLOAD_MODEL_PATCH_FILE}"
|
||||
python3 tools/skqp/download_model.py
|
||||
|
||||
# Copy resources from skia to skqp directory
|
||||
python3 tools/skqp/setup_resources
|
||||
)
|
||||
|
||||
set -ex
|
||||
|
||||
SCRIPT_DIR=$(realpath "$(dirname "$0")")
|
||||
FETCH_GN_PATCH_FILE="${SCRIPT_DIR}/build-skqp_fetch-gn.patch"
|
||||
BASE_ARGS_GN_FILE="${SCRIPT_DIR}/build-skqp_base.gn"
|
||||
DOWNLOAD_MODEL_PATCH_FILE="${SCRIPT_DIR}/build-skqp_download_model.patch"
|
||||
|
||||
SKQP_ARCH=${SKQP_ARCH:-x64}
|
||||
SKIA_DIR=${SKIA_DIR:-$(mktemp -d)}
|
||||
SKQP_DIR=${SKQP_DIR:-$(mktemp -d)}
|
||||
SKQP_OUT_DIR=${SKIA_DIR}/out/${SKQP_ARCH}
|
||||
SKQP_INSTALL_DIR=/skqp
|
||||
SKQP_ASSETS_DIR="${SKQP_INSTALL_DIR}/assets"
|
||||
# Build list_gpu_unit_tests to update the unittests.txt file properly to the
|
||||
# target hardware.
|
||||
SKQP_BINARIES=(skqp list_gpu_unit_tests)
|
||||
|
||||
# Using a recent release version to mitigate instability during test phase
|
||||
SKIA_COMMIT_SHA="canvaskit/0.32.0"
|
||||
|
||||
git clone 'https://skia.googlesource.com/skia/' \
|
||||
--single-branch \
|
||||
-b "${SKIA_COMMIT_SHA}" \
|
||||
"${SKIA_DIR}"
|
||||
|
||||
pushd "${SKIA_DIR}"
|
||||
|
||||
git apply "${FETCH_GN_PATCH_FILE}"
|
||||
# Fetch some needed build tools needed to build skia/skqp
|
||||
# Basically, it clones repositories with commits SHAs from
|
||||
# ${SKIA_DIR}/DEPS directory
|
||||
python3 tools/git-sync-deps
|
||||
|
||||
mkdir -p "${SKQP_OUT_DIR}"
|
||||
mkdir -p "${SKQP_INSTALL_DIR}"
|
||||
|
||||
create_gn_args
|
||||
|
||||
# Build and install skqp binaries
|
||||
bin/gn gen "${SKQP_OUT_DIR}"
|
||||
|
||||
for BINARY in "${SKQP_BINARIES[@]}"
|
||||
do
|
||||
/usr/bin/ninja -C "${SKQP_OUT_DIR}" "${BINARY}"
|
||||
install -m 0755 "${SKQP_OUT_DIR}/${BINARY}" "${SKQP_INSTALL_DIR}"
|
||||
done
|
||||
|
||||
# Acquire assets and move them to the target directory.
|
||||
download_skqp_models
|
||||
mv platform_tools/android/apps/skqp/src/main/assets/ "${SKQP_ASSETS_DIR}"
|
||||
|
||||
popd
|
||||
rm -Rf "${SKQP_DIR}"
|
||||
rm -Rf "${SKIA_DIR}"
|
||||
|
||||
set +ex
|
@@ -1,25 +0,0 @@
|
||||
cc = "gcc"
|
||||
cxx = "g++"
|
||||
|
||||
extra_cflags = [ "-DSK_ENABLE_DUMP_GPU", "-DSK_BUILD_FOR_SKQP" ]
|
||||
extra_cflags_cc = [ "-static", "-Wno-error", "-Wno-macro-redefined", "-Wno-suggest-destructor-override", "-Wno-suggest-override" ]
|
||||
|
||||
is_debug = false
|
||||
|
||||
skia_enable_fontmgr_android = false
|
||||
skia_enable_fontmgr_empty = true
|
||||
skia_enable_pdf = false
|
||||
skia_enable_skottie = false
|
||||
|
||||
skia_skqp_global_error_tolerance = 8
|
||||
skia_tools_require_resources = true
|
||||
|
||||
skia_use_dng_sdk = false
|
||||
skia_use_expat = true
|
||||
skia_use_icu = false
|
||||
skia_use_libheif = false
|
||||
skia_use_lua = false
|
||||
skia_use_piex = false
|
||||
skia_use_vulkan = true
|
||||
|
||||
target_os = "linux"
|
@@ -1,22 +0,0 @@
|
||||
diff --git a/tools/skqp/download_model.py b/tools/skqp/download_model.py
|
||||
index fb0020e481..a5d8a03754 100755
|
||||
--- a/tools/skqp/download_model.py
|
||||
+++ b/tools/skqp/download_model.py
|
||||
@@ -10,7 +10,7 @@ import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
-import urllib2
|
||||
+import urllib.request, urllib.error, urllib.parse
|
||||
|
||||
def checksum(path):
|
||||
if not os.path.exists(path):
|
||||
@@ -33,7 +33,7 @@ def download(md5, path):
|
||||
pass # ignore race condition
|
||||
url = 'https://storage.googleapis.com/skia-skqp-assets/' + md5
|
||||
with open(path, 'wb') as o:
|
||||
- shutil.copyfileobj(urllib2.urlopen(url), o)
|
||||
+ shutil.copyfileobj(urllib.request.urlopen(url), o)
|
||||
|
||||
def tmp(prefix):
|
||||
fd, path = tempfile.mkstemp(prefix=prefix)
|
@@ -1,13 +0,0 @@
|
||||
diff --git a/bin/fetch-gn b/bin/fetch-gn
|
||||
index b4bb14c630..59c4591a30 100755
|
||||
--- a/bin/fetch-gn
|
||||
+++ b/bin/fetch-gn
|
||||
@@ -23,7 +23,7 @@ os.chdir(os.path.join(os.path.dirname(__file__), os.pardir))
|
||||
gnzip = os.path.join(tempfile.mkdtemp(), 'gn.zip')
|
||||
with open(gnzip, 'wb') as f:
|
||||
OS = {'darwin': 'mac', 'linux': 'linux', 'linux2': 'linux', 'win32': 'windows'}[sys.platform]
|
||||
- cpu = {'amd64': 'amd64', 'arm64': 'arm64', 'x86_64': 'amd64'}[platform.machine().lower()]
|
||||
+ cpu = {'amd64': 'amd64', 'arm64': 'arm64', 'x86_64': 'amd64', 'aarch64': 'arm64'}[platform.machine().lower()]
|
||||
|
||||
rev = 'd62642c920e6a0d1756316d225a90fd6faa9e21e'
|
||||
url = 'https://chrome-infra-packages.appspot.com/dl/gn/gn/{}-{}/+/git_revision:{}'.format(
|
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
git config --global user.email "mesa@example.com"
|
||||
git config --global user.name "Mesa CI"
|
||||
git clone \
|
||||
https://github.com/intel/libva-utils.git \
|
||||
-b 2.13.0 \
|
||||
--depth 1 \
|
||||
/va-utils
|
||||
|
||||
pushd /va-utils
|
||||
meson build -D tests=true -Dprefix=/va $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
popd
|
||||
rm -rf /va-utils
|
20
.gitlab-ci/container/build-virglrenderer.sh
Normal file
20
.gitlab-ci/container/build-virglrenderer.sh
Normal file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
mkdir -p /epoxy
|
||||
pushd /epoxy
|
||||
wget -qO- https://github.com/anholt/libepoxy/releases/download/1.5.4/libepoxy-1.5.4.tar.xz | tar -xJ --strip-components=1
|
||||
meson build/ $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
popd
|
||||
rm -rf /epoxy
|
||||
|
||||
VIRGLRENDERER_VERSION=43148d1115a12219a0560a538c9872d07c28c558
|
||||
git clone https://gitlab.freedesktop.org/virgl/virglrenderer.git --single-branch --no-checkout /virglrenderer
|
||||
pushd /virglrenderer
|
||||
git checkout "$VIRGLRENDERER_VERSION"
|
||||
meson build/ $EXTRA_MESON_ARGS
|
||||
ninja -C build install
|
||||
popd
|
||||
rm -rf /virglrenderer
|
@@ -1,43 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
VKD3D_PROTON_VERSION="2.3.1"
|
||||
VKD3D_PROTON_COMMIT="3ed3526332f53d7d35cf1b685fa8096b01f26ff0"
|
||||
|
||||
VKD3D_PROTON_DST_DIR="/vkd3d-proton-tests"
|
||||
VKD3D_PROTON_SRC_DIR="/vkd3d-proton-src"
|
||||
VKD3D_PROTON_BUILD_DIR="/vkd3d-proton-$VKD3D_PROTON_VERSION"
|
||||
|
||||
function build_arch {
|
||||
local arch="$1"
|
||||
shift
|
||||
|
||||
meson "$@" \
|
||||
-Denable_tests=true \
|
||||
--buildtype release \
|
||||
--prefix "$VKD3D_PROTON_BUILD_DIR" \
|
||||
--strip \
|
||||
--bindir "x${arch}" \
|
||||
--libdir "x${arch}" \
|
||||
"$VKD3D_PROTON_BUILD_DIR/build.${arch}"
|
||||
|
||||
ninja -C "$VKD3D_PROTON_BUILD_DIR/build.${arch}" install
|
||||
|
||||
install -D -m755 -t "${VKD3D_PROTON_DST_DIR}/x${arch}/bin" "$VKD3D_PROTON_BUILD_DIR/build.${arch}/tests/"*.exe
|
||||
}
|
||||
|
||||
git clone https://github.com/HansKristian-Work/vkd3d-proton.git --single-branch -b "v$VKD3D_PROTON_VERSION" --no-checkout "$VKD3D_PROTON_SRC_DIR"
|
||||
pushd "$VKD3D_PROTON_SRC_DIR"
|
||||
git checkout "$VKD3D_PROTON_COMMIT"
|
||||
git submodule update --init --recursive
|
||||
git submodule update --recursive
|
||||
build_arch 64 --cross-file build-win64.txt
|
||||
build_arch 86 --cross-file build-win32.txt
|
||||
cp "setup_vkd3d_proton.sh" "$VKD3D_PROTON_BUILD_DIR/setup_vkd3d_proton.sh"
|
||||
chmod +x "$VKD3D_PROTON_BUILD_DIR/setup_vkd3d_proton.sh"
|
||||
popd
|
||||
|
||||
"$VKD3D_PROTON_BUILD_DIR"/setup_vkd3d_proton.sh install
|
||||
rm -rf "$VKD3D_PROTON_BUILD_DIR"
|
||||
rm -rf "$VKD3D_PROTON_SRC_DIR"
|
@@ -1,22 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
export LIBWAYLAND_VERSION="1.18.0"
|
||||
export WAYLAND_PROTOCOLS_VERSION="1.24"
|
||||
|
||||
git clone https://gitlab.freedesktop.org/wayland/wayland
|
||||
cd wayland
|
||||
git checkout "$LIBWAYLAND_VERSION"
|
||||
meson -Ddocumentation=false -Ddtd_validation=false -Dlibraries=true _build
|
||||
ninja -C _build install
|
||||
cd ..
|
||||
rm -rf wayland
|
||||
|
||||
git clone https://gitlab.freedesktop.org/wayland/wayland-protocols
|
||||
cd wayland-protocols
|
||||
git checkout "$WAYLAND_PROTOCOLS_VERSION"
|
||||
meson _build
|
||||
ninja -C _build install
|
||||
cd ..
|
||||
rm -rf wayland-protocols
|
@@ -1,8 +1,6 @@
|
||||
#!/bin/sh
|
||||
|
||||
if test -f /etc/debian_version; then
|
||||
apt-get autoremove -y --purge
|
||||
fi
|
||||
apt-get autoremove -y --purge
|
||||
|
||||
# Clean up any build cache for rust.
|
||||
rm -rf /.cargo
|
||||
|
@@ -1,21 +1,15 @@
|
||||
#!/bin/sh
|
||||
|
||||
if test -f /etc/debian_version; then
|
||||
CCACHE_PATH=/usr/lib/ccache
|
||||
else
|
||||
CCACHE_PATH=/usr/lib64/ccache
|
||||
fi
|
||||
|
||||
# Common setup among container builds before we get to building code.
|
||||
|
||||
export CCACHE_COMPILERCHECK=content
|
||||
export CCACHE_COMPRESS=true
|
||||
export CCACHE_DIR=/cache/mesa/ccache
|
||||
export PATH=$CCACHE_PATH:$PATH
|
||||
export PATH=/usr/lib/ccache:$PATH
|
||||
|
||||
# CMake ignores $PATH, so we have to force CC/GCC to the ccache versions.
|
||||
export CC="${CCACHE_PATH}/gcc"
|
||||
export CXX="${CCACHE_PATH}/g++"
|
||||
export CC="/usr/lib/ccache/gcc"
|
||||
export CXX="/usr/lib/ccache/g++"
|
||||
|
||||
# Force linkers to gold, since it's so much faster for building. We can't use
|
||||
# lld because we're on old debian and it's buggy. ming fails meson builds
|
||||
|
@@ -17,7 +17,7 @@ cat >$cross_file <<EOF
|
||||
[binaries]
|
||||
ar = '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/$arch-ar'
|
||||
c = ['ccache', '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch2}29-clang', '-fno-exceptions', '-fno-unwind-tables', '-fno-asynchronous-unwind-tables']
|
||||
cpp = ['ccache', '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch2}29-clang++', '-fno-exceptions', '-fno-unwind-tables', '-fno-asynchronous-unwind-tables']
|
||||
cpp = ['ccache', '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch2}29-clang++', '-fno-exceptions', '-fno-unwind-tables', '-fno-asynchronous-unwind-tables', '-static-libstdc++']
|
||||
c_ld = 'lld'
|
||||
cpp_ld = 'lld'
|
||||
strip = '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/$arch-strip'
|
||||
|
@@ -3,17 +3,11 @@
|
||||
set -ex
|
||||
|
||||
if [ $DEBIAN_ARCH = arm64 ]; then
|
||||
ARCH_PACKAGES="firmware-qcom-media
|
||||
libfontconfig1
|
||||
libgl1
|
||||
libglu1-mesa
|
||||
"
|
||||
ARCH_PACKAGES="firmware-qcom-media"
|
||||
elif [ $DEBIAN_ARCH = amd64 ]; then
|
||||
ARCH_PACKAGES="firmware-amd-graphics
|
||||
libelf1
|
||||
libllvm11
|
||||
libva2
|
||||
libva-drm2
|
||||
"
|
||||
fi
|
||||
|
||||
@@ -27,7 +21,9 @@ INSTALL_CI_FAIRY_PACKAGES="git
|
||||
apt-get -y install --no-install-recommends \
|
||||
$ARCH_PACKAGES \
|
||||
$INSTALL_CI_FAIRY_PACKAGES \
|
||||
apitrace \
|
||||
ca-certificates \
|
||||
curl \
|
||||
firmware-realtek \
|
||||
initramfs-tools \
|
||||
libasan6 \
|
||||
@@ -75,7 +71,7 @@ apt-get -y install --no-install-recommends \
|
||||
|
||||
# Needed for ci-fairy, this revision is able to upload files to
|
||||
# MinIO and doesn't depend on git
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@0f1abc24c043e63894085a6bd12f14263e8b29eb
|
||||
|
||||
apt-get purge -y \
|
||||
$INSTALL_CI_FAIRY_PACKAGES
|
||||
@@ -150,8 +146,6 @@ rm -rf usr/sbin/update-usbids
|
||||
rm -rf var/lib/usbutils/usb.ids
|
||||
rm -rf usr/share/misc/usb.ids
|
||||
|
||||
rm -rf /root/.pip
|
||||
|
||||
#######################################################################
|
||||
# Crush into a minimal production image to be deployed via some type of image
|
||||
# updating system.
|
||||
|
@@ -1,106 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
EPHEMERAL="\
|
||||
autoconf \
|
||||
rdfind \
|
||||
unzip \
|
||||
"
|
||||
|
||||
apt-get install -y --no-remove $EPHEMERAL
|
||||
|
||||
# Fetch the NDK and extract just the toolchain we want.
|
||||
ndk=android-ndk-r21d
|
||||
wget -O $ndk.zip https://dl.google.com/android/repository/$ndk-linux-x86_64.zip
|
||||
unzip -d / $ndk.zip "$ndk/toolchains/llvm/*"
|
||||
rm $ndk.zip
|
||||
# Since it was packed as a zip file, symlinks/hardlinks got turned into
|
||||
# duplicate files. Turn them into hardlinks to save on container space.
|
||||
rdfind -makehardlinks true -makeresultsfile false /android-ndk-r21d/
|
||||
# Drop some large tools we won't use in this build.
|
||||
find /android-ndk-r21d/ -type f | egrep -i "clang-check|clang-tidy|lldb" | xargs rm -f
|
||||
|
||||
sh .gitlab-ci/container/create-android-ndk-pc.sh /$ndk zlib.pc "" "-lz" "1.2.3"
|
||||
|
||||
sh .gitlab-ci/container/create-android-cross-file.sh /$ndk x86_64-linux-android x86_64 x86_64
|
||||
sh .gitlab-ci/container/create-android-cross-file.sh /$ndk i686-linux-android x86 x86
|
||||
sh .gitlab-ci/container/create-android-cross-file.sh /$ndk aarch64-linux-android arm armv8
|
||||
sh .gitlab-ci/container/create-android-cross-file.sh /$ndk arm-linux-androideabi arm armv7hl armv7a-linux-androideabi
|
||||
|
||||
# Not using build-libdrm.sh because we don't want its cleanup after building
|
||||
# each arch. Fetch and extract now.
|
||||
export LIBDRM_VERSION=libdrm-2.4.109
|
||||
wget https://dri.freedesktop.org/libdrm/$LIBDRM_VERSION.tar.xz
|
||||
tar -xf $LIBDRM_VERSION.tar.xz && rm $LIBDRM_VERSION.tar.xz
|
||||
|
||||
for arch in \
|
||||
x86_64-linux-android \
|
||||
i686-linux-android \
|
||||
aarch64-linux-android \
|
||||
arm-linux-androideabi ; do
|
||||
|
||||
cd $LIBDRM_VERSION
|
||||
rm -rf build-$arch
|
||||
meson build-$arch \
|
||||
--cross-file=/cross_file-$arch.txt \
|
||||
--libdir=lib/$arch \
|
||||
-Dlibkms=false \
|
||||
-Dnouveau=false \
|
||||
-Dvc4=false \
|
||||
-Detnaviv=false \
|
||||
-Dfreedreno=false \
|
||||
-Dintel=false \
|
||||
-Dcairo-tests=false \
|
||||
-Dvalgrind=false
|
||||
ninja -C build-$arch install
|
||||
cd ..
|
||||
done
|
||||
|
||||
rm -rf $LIBDRM_VERSION
|
||||
|
||||
export LIBELF_VERSION=libelf-0.8.13
|
||||
wget https://fossies.org/linux/misc/old/$LIBELF_VERSION.tar.gz
|
||||
|
||||
# Not 100% sure who runs the mirror above so be extra careful
|
||||
if ! echo "4136d7b4c04df68b686570afa26988ac ${LIBELF_VERSION}.tar.gz" | md5sum -c -; then
|
||||
echo "Checksum failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
tar -xf ${LIBELF_VERSION}.tar.gz
|
||||
cd $LIBELF_VERSION
|
||||
|
||||
# Work around a bug in the original configure not enabling __LIBELF64.
|
||||
autoreconf
|
||||
|
||||
for arch in \
|
||||
x86_64-linux-android \
|
||||
i686-linux-android \
|
||||
aarch64-linux-android \
|
||||
arm-linux-androideabi ; do
|
||||
|
||||
ccarch=${arch}
|
||||
if [ "${arch}" == 'arm-linux-androideabi' ]
|
||||
then
|
||||
ccarch=armv7a-linux-androideabi
|
||||
fi
|
||||
|
||||
export CC=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch}-ar
|
||||
export CC=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${ccarch}29-clang
|
||||
export CXX=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${ccarch}29-clang++
|
||||
export LD=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch}-ld
|
||||
export RANLIB=/android-ndk-r21d/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch}-ranlib
|
||||
|
||||
# The configure script doesn't know about android, but doesn't really use the host anyway it
|
||||
# seems
|
||||
./configure --host=x86_64-linux-gnu --disable-nls --disable-shared \
|
||||
--libdir=/usr/local/lib/${arch}
|
||||
make install
|
||||
make distclean
|
||||
done
|
||||
|
||||
cd ..
|
||||
rm -rf $LIBELF_VERSION
|
||||
|
||||
apt-get purge -y $EPHEMERAL
|
@@ -1,107 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Ephemeral packages (installed for this script and removed again at the end)
|
||||
STABLE_EPHEMERAL=" \
|
||||
autoconf \
|
||||
automake \
|
||||
autotools-dev \
|
||||
bzip2 \
|
||||
libtool \
|
||||
python3-pip \
|
||||
"
|
||||
|
||||
# We need multiarch for Wine
|
||||
dpkg --add-architecture i386
|
||||
apt-get update
|
||||
|
||||
apt-get install -y --no-remove \
|
||||
$STABLE_EPHEMERAL \
|
||||
check \
|
||||
clang \
|
||||
cmake \
|
||||
libasan6 \
|
||||
libarchive-dev \
|
||||
libclang-cpp11-dev \
|
||||
libgbm-dev \
|
||||
libglvnd-dev \
|
||||
libllvmspirvlib-dev \
|
||||
liblua5.3-dev \
|
||||
libxcb-dri2-0-dev \
|
||||
libxcb-dri3-dev \
|
||||
libxcb-glx0-dev \
|
||||
libxcb-present-dev \
|
||||
libxcb-randr0-dev \
|
||||
libxcb-shm0-dev \
|
||||
libxcb-sync-dev \
|
||||
libxcb-xfixes0-dev \
|
||||
libxcb1-dev \
|
||||
libxml2-dev \
|
||||
llvm-11-dev \
|
||||
llvm-9-dev \
|
||||
ocl-icd-opencl-dev \
|
||||
procps \
|
||||
spirv-tools \
|
||||
strace \
|
||||
time \
|
||||
wine \
|
||||
wine32
|
||||
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
|
||||
# Debian's pkg-config wrapers for mingw are broken, and there's no sign that
|
||||
# they're going to be fixed, so we'll just have to fix it ourselves
|
||||
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=930492
|
||||
cat >/usr/local/bin/x86_64-w64-mingw32-pkg-config <<EOF
|
||||
#!/bin/sh
|
||||
|
||||
PKG_CONFIG_LIBDIR=/usr/x86_64-w64-mingw32/lib/pkgconfig pkg-config \$@
|
||||
EOF
|
||||
chmod +x /usr/local/bin/x86_64-w64-mingw32-pkg-config
|
||||
|
||||
|
||||
# dependencies where we want a specific version
|
||||
export XORG_RELEASES=https://xorg.freedesktop.org/releases/individual
|
||||
|
||||
export XORGMACROS_VERSION=util-macros-1.19.0
|
||||
|
||||
wget $XORG_RELEASES/util/$XORGMACROS_VERSION.tar.bz2
|
||||
tar -xvf $XORGMACROS_VERSION.tar.bz2 && rm $XORGMACROS_VERSION.tar.bz2
|
||||
cd $XORGMACROS_VERSION; ./configure; make install; cd ..
|
||||
rm -rf $XORGMACROS_VERSION
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
||||
pushd /usr/local
|
||||
git clone https://gitlab.freedesktop.org/mesa/shader-db.git --depth 1
|
||||
rm -rf shader-db/.git
|
||||
cd shader-db
|
||||
make
|
||||
popd
|
||||
|
||||
git clone https://github.com/microsoft/DirectX-Headers -b v1.0.1 --depth 1
|
||||
pushd DirectX-Headers
|
||||
mkdir build
|
||||
cd build
|
||||
meson .. --backend=ninja --buildtype=release -Dbuild-test=false
|
||||
ninja
|
||||
ninja install
|
||||
popd
|
||||
rm -rf DirectX-Headers
|
||||
|
||||
pip3 install git+https://git.lavasoftware.org/lava/lavacli@3db3ddc45e5358908bc6a17448059ea2340492b7
|
||||
|
||||
############### Uninstall the build software
|
||||
|
||||
apt-get purge -y \
|
||||
$STABLE_EPHEMERAL
|
||||
|
||||
. .gitlab-ci/container/container_post_build.sh
|
@@ -1,122 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Ephemeral packages (installed for this script and removed again at the end)
|
||||
STABLE_EPHEMERAL=" \
|
||||
autoconf \
|
||||
automake \
|
||||
bc \
|
||||
bison \
|
||||
bzip2 \
|
||||
ccache \
|
||||
clang-11 \
|
||||
cmake \
|
||||
flex \
|
||||
g++ \
|
||||
glslang-tools \
|
||||
libasound2-dev \
|
||||
libcap-dev \
|
||||
libclang-cpp11-dev \
|
||||
libelf-dev \
|
||||
libexpat1-dev \
|
||||
libfdt-dev \
|
||||
libgbm-dev \
|
||||
libgles2-mesa-dev \
|
||||
libllvmspirvlib-dev \
|
||||
libpciaccess-dev \
|
||||
libpng-dev \
|
||||
libudev-dev \
|
||||
libvulkan-dev \
|
||||
libwaffle-dev \
|
||||
libx11-xcb-dev \
|
||||
libxcb-dri2-0-dev \
|
||||
libxext-dev \
|
||||
libxkbcommon-dev \
|
||||
libxrender-dev \
|
||||
llvm-11-dev \
|
||||
llvm-spirv \
|
||||
make \
|
||||
meson \
|
||||
ocl-icd-opencl-dev \
|
||||
patch \
|
||||
pkg-config \
|
||||
python3-distutils \
|
||||
xz-utils \
|
||||
"
|
||||
|
||||
apt-get install -y --no-remove \
|
||||
$STABLE_EPHEMERAL \
|
||||
clinfo \
|
||||
iptables \
|
||||
libclang-common-11-dev \
|
||||
libclang-cpp11 \
|
||||
libcap2 \
|
||||
libegl1 \
|
||||
libepoxy-dev \
|
||||
libfdt1 \
|
||||
libllvmspirvlib11 \
|
||||
libxcb-shm0 \
|
||||
ocl-icd-libopencl1 \
|
||||
python3-lxml \
|
||||
python3-renderdoc \
|
||||
python3-simplejson \
|
||||
spirv-tools \
|
||||
sysvinit-core \
|
||||
wget
|
||||
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
############### Build libdrm
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
############### Build Wayland
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
||||
############### Build Crosvm
|
||||
|
||||
. .gitlab-ci/container/build-rust.sh
|
||||
. .gitlab-ci/container/build-crosvm.sh
|
||||
rm -rf /root/.cargo
|
||||
rm -rf /root/.rustup
|
||||
|
||||
############### Build kernel
|
||||
|
||||
export DEFCONFIG="arch/x86/configs/x86_64_defconfig"
|
||||
export KERNEL_IMAGE_NAME=bzImage
|
||||
export KERNEL_ARCH=x86_64
|
||||
export DEBIAN_ARCH=amd64
|
||||
|
||||
mkdir -p /lava-files/
|
||||
. .gitlab-ci/container/build-kernel.sh
|
||||
|
||||
############### Build libclc
|
||||
|
||||
. .gitlab-ci/container/build-libclc.sh
|
||||
|
||||
############### Build piglit
|
||||
|
||||
PIGLIT_OPTS="-DPIGLIT_BUILD_CL_TESTS=ON -DPIGLIT_BUILD_DMA_BUF_TESTS=ON" . .gitlab-ci/container/build-piglit.sh
|
||||
|
||||
############### Build dEQP GL
|
||||
|
||||
DEQP_TARGET=surfaceless . .gitlab-ci/container/build-deqp.sh
|
||||
|
||||
############### Build apitrace
|
||||
|
||||
. .gitlab-ci/container/build-apitrace.sh
|
||||
|
||||
############### Uninstall the build software
|
||||
|
||||
ccache --show-stats
|
||||
|
||||
apt-get purge -y \
|
||||
$STABLE_EPHEMERAL
|
||||
|
||||
apt-get autoremove -y --purge
|
@@ -1,164 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Ephemeral packages (installed for this script and removed again at the end)
|
||||
STABLE_EPHEMERAL=" \
|
||||
ccache \
|
||||
cmake \
|
||||
g++ \
|
||||
g++-mingw-w64-i686-posix \
|
||||
g++-mingw-w64-x86-64-posix \
|
||||
glslang-tools \
|
||||
libexpat1-dev \
|
||||
libgbm-dev \
|
||||
libgles2-mesa-dev \
|
||||
liblz4-dev \
|
||||
libpciaccess-dev \
|
||||
libudev-dev \
|
||||
libvulkan-dev \
|
||||
libwaffle-dev \
|
||||
libx11-xcb-dev \
|
||||
libxcb-ewmh-dev \
|
||||
libxcb-keysyms1-dev \
|
||||
libxkbcommon-dev \
|
||||
libxrandr-dev \
|
||||
libxrender-dev \
|
||||
libzstd-dev \
|
||||
meson \
|
||||
mingw-w64-i686-dev \
|
||||
mingw-w64-tools \
|
||||
mingw-w64-x86-64-dev \
|
||||
p7zip \
|
||||
patch \
|
||||
pkg-config \
|
||||
python3-distutils \
|
||||
wget \
|
||||
xz-utils \
|
||||
"
|
||||
|
||||
apt-get install -y --no-remove \
|
||||
$STABLE_EPHEMERAL \
|
||||
libxcb-shm0 \
|
||||
python3-lxml \
|
||||
python3-simplejson \
|
||||
xinit \
|
||||
xserver-xorg-video-amdgpu \
|
||||
xserver-xorg-video-ati
|
||||
|
||||
# We need multiarch for Wine
|
||||
dpkg --add-architecture i386
|
||||
|
||||
apt-get update
|
||||
|
||||
apt-get install -y --no-remove \
|
||||
wine \
|
||||
wine32 \
|
||||
wine64
|
||||
|
||||
function setup_wine() {
|
||||
export WINEDEBUG="-all"
|
||||
export WINEPREFIX="$1"
|
||||
|
||||
# We don't want crash dialogs
|
||||
cat >crashdialog.reg <<EOF
|
||||
Windows Registry Editor Version 5.00
|
||||
|
||||
[HKEY_CURRENT_USER\Software\Wine\WineDbg]
|
||||
"ShowCrashDialog"=dword:00000000
|
||||
|
||||
EOF
|
||||
|
||||
# Set the wine prefix and disable the crash dialog
|
||||
wine regedit crashdialog.reg
|
||||
rm crashdialog.reg
|
||||
|
||||
# An immediate wine command may fail with: "${WINEPREFIX}: Not a
|
||||
# valid wine prefix." and that is just spit because of checking
|
||||
# the existance of the system.reg file, which fails. Just giving
|
||||
# it a bit more of time for it to be created solves the problem
|
||||
# ...
|
||||
while ! test -f "${WINEPREFIX}/system.reg"; do sleep 1; done
|
||||
}
|
||||
|
||||
############### Install DXVK
|
||||
|
||||
DXVK_VERSION="1.8.1"
|
||||
|
||||
setup_wine "/dxvk-wine64"
|
||||
|
||||
wget "https://github.com/doitsujin/dxvk/releases/download/v${DXVK_VERSION}/dxvk-${DXVK_VERSION}.tar.gz"
|
||||
tar xzpf dxvk-"${DXVK_VERSION}".tar.gz
|
||||
dxvk-"${DXVK_VERSION}"/setup_dxvk.sh install
|
||||
rm -rf dxvk-"${DXVK_VERSION}"
|
||||
rm dxvk-"${DXVK_VERSION}".tar.gz
|
||||
|
||||
############### Install Windows' apitrace binaries
|
||||
|
||||
APITRACE_VERSION="10.0"
|
||||
APITRACE_VERSION_DATE=""
|
||||
|
||||
wget "https://github.com/apitrace/apitrace/releases/download/${APITRACE_VERSION}/apitrace-${APITRACE_VERSION}${APITRACE_VERSION_DATE}-win64.7z"
|
||||
7zr x "apitrace-${APITRACE_VERSION}${APITRACE_VERSION_DATE}-win64.7z" \
|
||||
"apitrace-${APITRACE_VERSION}${APITRACE_VERSION_DATE}-win64/bin/apitrace.exe" \
|
||||
"apitrace-${APITRACE_VERSION}${APITRACE_VERSION_DATE}-win64/bin/d3dretrace.exe"
|
||||
mv "apitrace-${APITRACE_VERSION}${APITRACE_VERSION_DATE}-win64" /apitrace-msvc-win64
|
||||
rm "apitrace-${APITRACE_VERSION}${APITRACE_VERSION_DATE}-win64.7z"
|
||||
|
||||
# Add the apitrace path to the registry
|
||||
wine \
|
||||
reg add "HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment" \
|
||||
/v Path \
|
||||
/t REG_EXPAND_SZ \
|
||||
/d "C:\windows\system32;C:\windows;C:\windows\system32\wbem;Z:\apitrace-msvc-win64\bin" \
|
||||
/f
|
||||
|
||||
############### Building ...
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
############### Build libdrm
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
############### Build Wayland
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
||||
############### Build parallel-deqp-runner's hang-detection tool
|
||||
|
||||
. .gitlab-ci/container/build-hang-detection.sh
|
||||
|
||||
############### Build piglit
|
||||
|
||||
PIGLIT_BUILD_TARGETS="piglit_replayer" . .gitlab-ci/container/build-piglit.sh
|
||||
|
||||
############### Build Fossilize
|
||||
|
||||
. .gitlab-ci/container/build-fossilize.sh
|
||||
|
||||
############### Build dEQP VK
|
||||
|
||||
. .gitlab-ci/container/build-deqp.sh
|
||||
|
||||
############### Build gfxreconstruct
|
||||
|
||||
. .gitlab-ci/container/build-gfxreconstruct.sh
|
||||
|
||||
############### Build VKD3D-Proton
|
||||
|
||||
setup_wine "/vkd3d-proton-wine64"
|
||||
|
||||
. .gitlab-ci/container/build-vkd3d-proton.sh
|
||||
|
||||
############### Uninstall the build software
|
||||
|
||||
ccache --show-stats
|
||||
|
||||
apt-get purge -y \
|
||||
$STABLE_EPHEMERAL
|
||||
|
||||
apt-get autoremove -y --purge
|
@@ -1,98 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
|
||||
|
||||
EPHEMERAL="
|
||||
autoconf
|
||||
automake
|
||||
bzip2
|
||||
git
|
||||
libtool
|
||||
pkgconfig(epoxy)
|
||||
pkgconfig(gbm)
|
||||
unzip
|
||||
wget
|
||||
xz
|
||||
"
|
||||
|
||||
dnf install -y --setopt=install_weak_deps=False \
|
||||
bison \
|
||||
ccache \
|
||||
clang-devel \
|
||||
flex \
|
||||
gcc \
|
||||
gcc-c++ \
|
||||
gettext \
|
||||
kernel-headers \
|
||||
llvm-devel \
|
||||
meson \
|
||||
"pkgconfig(dri2proto)" \
|
||||
"pkgconfig(expat)" \
|
||||
"pkgconfig(glproto)" \
|
||||
"pkgconfig(libclc)" \
|
||||
"pkgconfig(libelf)" \
|
||||
"pkgconfig(libglvnd)" \
|
||||
"pkgconfig(libomxil-bellagio)" \
|
||||
"pkgconfig(libselinux)" \
|
||||
"pkgconfig(libva)" \
|
||||
"pkgconfig(pciaccess)" \
|
||||
"pkgconfig(vdpau)" \
|
||||
"pkgconfig(vulkan)" \
|
||||
"pkgconfig(x11)" \
|
||||
"pkgconfig(x11-xcb)" \
|
||||
"pkgconfig(xcb)" \
|
||||
"pkgconfig(xcb-dri2)" \
|
||||
"pkgconfig(xcb-dri3)" \
|
||||
"pkgconfig(xcb-glx)" \
|
||||
"pkgconfig(xcb-present)" \
|
||||
"pkgconfig(xcb-randr)" \
|
||||
"pkgconfig(xcb-sync)" \
|
||||
"pkgconfig(xcb-xfixes)" \
|
||||
"pkgconfig(xdamage)" \
|
||||
"pkgconfig(xext)" \
|
||||
"pkgconfig(xfixes)" \
|
||||
"pkgconfig(xrandr)" \
|
||||
"pkgconfig(xshmfence)" \
|
||||
"pkgconfig(xxf86vm)" \
|
||||
"pkgconfig(zlib)" \
|
||||
python-unversioned-command \
|
||||
python3-devel \
|
||||
python3-mako \
|
||||
python3-devel \
|
||||
python3-mako \
|
||||
vulkan-headers \
|
||||
$EPHEMERAL
|
||||
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
|
||||
# dependencies where we want a specific version
|
||||
export XORG_RELEASES=https://xorg.freedesktop.org/releases/individual
|
||||
|
||||
export XORGMACROS_VERSION=util-macros-1.19.0
|
||||
|
||||
wget $XORG_RELEASES/util/$XORGMACROS_VERSION.tar.bz2
|
||||
tar -xvf $XORGMACROS_VERSION.tar.bz2 && rm $XORGMACROS_VERSION.tar.bz2
|
||||
cd $XORGMACROS_VERSION; ./configure; make install; cd ..
|
||||
rm -rf $XORGMACROS_VERSION
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
. .gitlab-ci/container/build-wayland.sh
|
||||
|
||||
pushd /usr/local
|
||||
git clone https://gitlab.freedesktop.org/mesa/shader-db.git --depth 1
|
||||
rm -rf shader-db/.git
|
||||
cd shader-db
|
||||
make
|
||||
popd
|
||||
|
||||
|
||||
############### Uninstall the build software
|
||||
|
||||
dnf remove -y $EPHEMERAL
|
||||
|
||||
. .gitlab-ci/container/container_post_build.sh
|
@@ -7,7 +7,7 @@ export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
check_minio()
|
||||
{
|
||||
MINIO_PATH="${MINIO_HOST}/mesa-lava/$1/${DISTRIBUTION_TAG}/${DEBIAN_ARCH}"
|
||||
MINIO_PATH="${MINIO_HOST}/mesa-lava/$1/${MINIO_SUFFIX}/${DISTRIBUTION_TAG}/${DEBIAN_ARCH}"
|
||||
if wget -q --method=HEAD "https://${MINIO_PATH}/done"; then
|
||||
exit
|
||||
fi
|
||||
@@ -27,22 +27,18 @@ if [[ "$DEBIAN_ARCH" = "arm64" ]]; then
|
||||
KERNEL_ARCH="arm64"
|
||||
DEFCONFIG="arch/arm64/configs/defconfig"
|
||||
DEVICE_TREES="arch/arm64/boot/dts/rockchip/rk3399-gru-kevin.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/amlogic/meson-gxl-s805x-libretech-ac.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/amlogic/meson-gxl-s905x-libretech-cc.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/allwinner/sun50i-h6-pine-h64.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/amlogic/meson-gxm-khadas-vim2.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/qcom/apq8016-sbc.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/qcom/apq8096-db820c.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/amlogic/meson-g12b-a311d-khadas-vim3.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/mediatek/mt8183-kukui-jacuzzi-juniper-sku16.dtb"
|
||||
DEVICE_TREES+=" arch/arm64/boot/dts/qcom/sc7180-trogdor-lazor-limozeen-nots.dtb"
|
||||
KERNEL_IMAGE_NAME="Image"
|
||||
elif [[ "$DEBIAN_ARCH" = "armhf" ]]; then
|
||||
GCC_ARCH="arm-linux-gnueabihf"
|
||||
KERNEL_ARCH="arm"
|
||||
DEFCONFIG="arch/arm/configs/multi_v7_defconfig"
|
||||
DEVICE_TREES="arch/arm/boot/dts/rk3288-veyron-jaq.dtb"
|
||||
DEVICE_TREES+=" arch/arm/boot/dts/sun8i-h3-libretech-all-h3-cc.dtb"
|
||||
DEVICE_TREES+=" arch/arm/boot/dts/imx6q-cubox-i.dtb"
|
||||
DEVICE_TREES="arch/arm/boot/dts/rk3288-veyron-jaq.dtb arch/arm/boot/dts/sun8i-h3-libretech-all-h3-cc.dtb"
|
||||
KERNEL_IMAGE_NAME="zImage"
|
||||
. .gitlab-ci/container/create-cross-file.sh armhf
|
||||
else
|
||||
@@ -51,7 +47,6 @@ else
|
||||
DEFCONFIG="arch/x86/configs/x86_64_defconfig"
|
||||
DEVICE_TREES=""
|
||||
KERNEL_IMAGE_NAME="bzImage"
|
||||
ARCH_PACKAGES="libva-dev"
|
||||
fi
|
||||
|
||||
# Determine if we're in a cross build.
|
||||
@@ -73,38 +68,26 @@ fi
|
||||
|
||||
apt-get update
|
||||
apt-get install -y --no-remove \
|
||||
${ARCH_PACKAGES} \
|
||||
automake \
|
||||
bc \
|
||||
cmake \
|
||||
debootstrap \
|
||||
git \
|
||||
glslang-tools \
|
||||
libdrm-dev \
|
||||
libegl1-mesa-dev \
|
||||
libfontconfig-dev \
|
||||
libgbm-dev \
|
||||
libgl-dev \
|
||||
libgles2-mesa-dev \
|
||||
libglu1-mesa-dev \
|
||||
libglx-dev \
|
||||
libpng-dev \
|
||||
libssl-dev \
|
||||
libudev-dev \
|
||||
libvulkan-dev \
|
||||
libwaffle-dev \
|
||||
libwayland-dev \
|
||||
libx11-xcb-dev \
|
||||
libxcb-dri2-0-dev \
|
||||
libxkbcommon-dev \
|
||||
ninja-build \
|
||||
patch \
|
||||
python-is-python3 \
|
||||
python3-distutils \
|
||||
python3-mako \
|
||||
python3-numpy \
|
||||
python3-serial \
|
||||
unzip \
|
||||
wget
|
||||
|
||||
|
||||
@@ -114,7 +97,6 @@ if [[ "$DEBIAN_ARCH" = "armhf" ]]; then
|
||||
libelf-dev:armhf \
|
||||
libgbm-dev:armhf \
|
||||
libgles2-mesa-dev:armhf \
|
||||
libpng-dev:armhf \
|
||||
libudev-dev:armhf \
|
||||
libvulkan-dev:armhf \
|
||||
libwaffle-dev:armhf \
|
||||
@@ -129,17 +111,11 @@ STRIP_CMD="${GCC_ARCH}-strip"
|
||||
mkdir -p /lava-files/rootfs-${DEBIAN_ARCH}
|
||||
|
||||
|
||||
############### Build apitrace
|
||||
. .gitlab-ci/container/build-apitrace.sh
|
||||
mkdir -p /lava-files/rootfs-${DEBIAN_ARCH}/apitrace
|
||||
mv /apitrace/build /lava-files/rootfs-${DEBIAN_ARCH}/apitrace
|
||||
rm -rf /apitrace
|
||||
|
||||
|
||||
############### Build dEQP runner
|
||||
. .gitlab-ci/container/build-deqp-runner.sh
|
||||
mkdir -p /lava-files/rootfs-${DEBIAN_ARCH}/usr/bin
|
||||
mv /usr/local/bin/*-runner /lava-files/rootfs-${DEBIAN_ARCH}/usr/bin/.
|
||||
mv /usr/local/bin/deqp-runner /lava-files/rootfs-${DEBIAN_ARCH}/usr/bin/.
|
||||
mv /usr/local/bin/piglit-runner /lava-files/rootfs-${DEBIAN_ARCH}/usr/bin/.
|
||||
|
||||
|
||||
############### Build dEQP
|
||||
@@ -148,33 +124,70 @@ DEQP_TARGET=surfaceless . .gitlab-ci/container/build-deqp.sh
|
||||
mv /deqp /lava-files/rootfs-${DEBIAN_ARCH}/.
|
||||
|
||||
|
||||
############### Build SKQP
|
||||
if [[ "$DEBIAN_ARCH" = "arm64" ]]; then
|
||||
SKQP_ARCH="arm64" . .gitlab-ci/container/build-skqp.sh
|
||||
mv /skqp /lava-files/rootfs-${DEBIAN_ARCH}/.
|
||||
fi
|
||||
|
||||
|
||||
############### Build piglit
|
||||
PIGLIT_OPTS="-DPIGLIT_BUILD_DMA_BUF_TESTS=ON" . .gitlab-ci/container/build-piglit.sh
|
||||
. .gitlab-ci/container/build-piglit.sh
|
||||
mv /piglit /lava-files/rootfs-${DEBIAN_ARCH}/.
|
||||
|
||||
############### Build libva tests
|
||||
if [[ "$DEBIAN_ARCH" = "amd64" ]]; then
|
||||
. .gitlab-ci/container/build-va-tools.sh
|
||||
mv /va/bin/* /lava-files/rootfs-${DEBIAN_ARCH}/usr/bin/
|
||||
fi
|
||||
|
||||
############### Build libdrm
|
||||
EXTRA_MESON_ARGS+=" -D prefix=/libdrm"
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
############### Build kernel
|
||||
. .gitlab-ci/container/build-kernel.sh
|
||||
|
||||
############### Cross-build kernel
|
||||
mkdir -p kernel
|
||||
wget -qO- ${KERNEL_URL} | tar -xz --strip-components=1 -C kernel
|
||||
pushd kernel
|
||||
|
||||
# The kernel doesn't like the gold linker (or the old lld in our debians).
|
||||
# Sneak in some override symlinks during kernel build until we can update
|
||||
# debian (they'll get blown away by the rm of the kernel dir at the end).
|
||||
mkdir -p ld-links
|
||||
for i in /usr/bin/*-ld /usr/bin/ld; do
|
||||
i=`basename $i`
|
||||
ln -sf /usr/bin/$i.bfd ld-links/$i
|
||||
done
|
||||
export PATH=`pwd`/ld-links:$PATH
|
||||
|
||||
if [ -n "$INSTALL_KERNEL_MODULES" ]; then
|
||||
# Disable all modules in defconfig, so we only build the ones we want
|
||||
sed -i 's/=m/=n/g' ${DEFCONFIG}
|
||||
fi
|
||||
|
||||
./scripts/kconfig/merge_config.sh ${DEFCONFIG} ../.gitlab-ci/container/${KERNEL_ARCH}.config
|
||||
make ${KERNEL_IMAGE_NAME}
|
||||
for image in ${KERNEL_IMAGE_NAME}; do
|
||||
cp arch/${KERNEL_ARCH}/boot/${image} /lava-files/.
|
||||
done
|
||||
|
||||
if [[ -n ${DEVICE_TREES} ]]; then
|
||||
make dtbs
|
||||
cp ${DEVICE_TREES} /lava-files/.
|
||||
fi
|
||||
|
||||
if [ -n "$INSTALL_KERNEL_MODULES" ]; then
|
||||
make modules
|
||||
INSTALL_MOD_PATH=/lava-files/rootfs-${DEBIAN_ARCH}/ make modules_install
|
||||
fi
|
||||
|
||||
if [[ ${DEBIAN_ARCH} = "arm64" ]] && [[ ${MINIO_SUFFIX} = "baremetal" ]]; then
|
||||
make Image.lzma
|
||||
mkimage \
|
||||
-f auto \
|
||||
-A arm \
|
||||
-O linux \
|
||||
-d arch/arm64/boot/Image.lzma \
|
||||
-C lzma\
|
||||
-b arch/arm64/boot/dts/qcom/sdm845-cheza-r3.dtb \
|
||||
/lava-files/cheza-kernel
|
||||
KERNEL_IMAGE_NAME+=" cheza-kernel"
|
||||
fi
|
||||
|
||||
popd
|
||||
rm -rf kernel
|
||||
|
||||
############### Delete rust, since the tests won't be compiling anything.
|
||||
rm -rf /root/.cargo
|
||||
rm -rf /root/.rustup
|
||||
|
||||
############### Create rootfs
|
||||
set +e
|
||||
@@ -201,12 +214,10 @@ rm /lava-files/rootfs-${DEBIAN_ARCH}/create-rootfs.sh
|
||||
# created.
|
||||
mkdir -p /lava-files/rootfs-${DEBIAN_ARCH}/usr/lib/$GCC_ARCH
|
||||
find /libdrm/ -name lib\*\.so\* | xargs cp -t /lava-files/rootfs-${DEBIAN_ARCH}/usr/lib/$GCC_ARCH/.
|
||||
mkdir -p /lava-files/rootfs-${DEBIAN_ARCH}/libdrm/
|
||||
cp -Rp /libdrm/share /lava-files/rootfs-${DEBIAN_ARCH}/libdrm/share
|
||||
rm -rf /libdrm
|
||||
|
||||
|
||||
if [ ${DEBIAN_ARCH} = arm64 ]; then
|
||||
if [ ${DEBIAN_ARCH} = arm64 ] && [ ${MINIO_SUFFIX} = baremetal ]; then
|
||||
# Make a gzipped copy of the Image for db410c.
|
||||
gzip -k /lava-files/Image
|
||||
KERNEL_IMAGE_NAME+=" Image.gz"
|
||||
@@ -220,7 +231,7 @@ popd
|
||||
. .gitlab-ci/container/container_post_build.sh
|
||||
|
||||
############### Upload the files!
|
||||
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
|
||||
ci-fairy minio login $CI_JOB_JWT
|
||||
FILES_TO_UPLOAD="lava-rootfs.tgz \
|
||||
$KERNEL_IMAGE_NAME"
|
||||
|
||||
|
@@ -1,14 +1,7 @@
|
||||
CONFIG_LOCALVERSION_AUTO=y
|
||||
CONFIG_LOCALVERSION="ccu"
|
||||
|
||||
CONFIG_DEBUG_KERNEL=y
|
||||
|
||||
CONFIG_PWM=y
|
||||
CONFIG_PM_DEVFREQ=y
|
||||
CONFIG_OF=y
|
||||
CONFIG_CROS_EC=y
|
||||
|
||||
# abootimg with a 'dummy' rootfs fails with root=/dev/nfs
|
||||
CONFIG_BLK_DEV_INITRD=n
|
||||
|
||||
CONFIG_DEVFREQ_GOV_PERFORMANCE=y
|
||||
CONFIG_DEVFREQ_GOV_POWERSAVE=y
|
||||
CONFIG_DEVFREQ_GOV_USERSPACE=y
|
||||
@@ -62,8 +55,7 @@ CONFIG_X86_AMD_FREQ_SENSITIVITY=y
|
||||
CONFIG_PINCTRL=y
|
||||
CONFIG_PINCTRL_AMD=y
|
||||
CONFIG_DRM_AMDGPU=m
|
||||
CONFIG_DRM_AMDGPU_SI=y
|
||||
CONFIG_DRM_AMDGPU_USERPTR=y
|
||||
CONFIG_DRM_AMDGPU_SI=m
|
||||
CONFIG_DRM_AMD_ACP=n
|
||||
CONFIG_ACPI_WMI=y
|
||||
CONFIG_MXM_WMI=y
|
||||
@@ -75,26 +67,3 @@ CONFIG_CHROME_PLATFORMS=y
|
||||
|
||||
#options for Intel devices
|
||||
CONFIG_MFD_INTEL_LPSS_PCI=y
|
||||
|
||||
#options for KVM guests
|
||||
CONFIG_FUSE_FS=y
|
||||
CONFIG_HYPERVISOR_GUEST=y
|
||||
CONFIG_KVM=y
|
||||
CONFIG_KVM_GUEST=y
|
||||
CONFIG_VIRT_DRIVERS=y
|
||||
CONFIG_VIRTIO_FS=y
|
||||
CONFIG_DRM_VIRTIO_GPU=y
|
||||
CONFIG_SERIAL_8250_CONSOLE=y
|
||||
CONFIG_VIRTIO_NET=y
|
||||
CONFIG_VIRTIO_CONSOLE=y
|
||||
CONFIG_PARAVIRT=y
|
||||
CONFIG_VIRTIO_BLK=y
|
||||
CONFIG_VIRTUALIZATION=y
|
||||
CONFIG_VIRTIO=y
|
||||
CONFIG_VIRTIO_PCI=y
|
||||
CONFIG_VIRTIO_MMIO=y
|
||||
CONFIG_VIRTIO_MMIO_CMDLINE_DEVICES=y
|
||||
CONFIG_CRYPTO_DEV_VIRTIO=y
|
||||
CONFIG_HW_RANDOM_VIRTIO=y
|
||||
CONFIG_BLK_MQ_VIRTIO=y
|
||||
CONFIG_TUN=y
|
||||
|
@@ -14,6 +14,7 @@ sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list
|
||||
STABLE_EPHEMERAL=" \
|
||||
python3-pip \
|
||||
python3-setuptools \
|
||||
unzip \
|
||||
"
|
||||
|
||||
apt-get update
|
||||
@@ -28,7 +29,6 @@ apt-get install -y --no-remove \
|
||||
g++-mingw-w64-x86-64 \
|
||||
gcc \
|
||||
git \
|
||||
glslang-tools \
|
||||
kmod \
|
||||
libclang-11-dev \
|
||||
libclang-9-dev \
|
||||
@@ -58,11 +58,13 @@ apt-get install -y --no-remove \
|
||||
make \
|
||||
meson \
|
||||
pkg-config \
|
||||
python-is-python3 \
|
||||
python3-mako \
|
||||
python3-pil \
|
||||
python3-requests \
|
||||
qemu-user \
|
||||
valgrind \
|
||||
wayland-protocols \
|
||||
wget \
|
||||
wine64 \
|
||||
x11proto-dri2-dev \
|
||||
@@ -72,7 +74,14 @@ apt-get install -y --no-remove \
|
||||
zlib1g-dev
|
||||
|
||||
# Needed for ci-fairy, this revision is able to upload files to MinIO
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@6f5af7e5574509726c79109e3c147cee95e81366
|
||||
|
||||
# for the vulkan overlay layer and ACO tests
|
||||
wget https://github.com/KhronosGroup/glslang/releases/download/SDK-candidate-26-Jul-2020/glslang-master-linux-Release.zip
|
||||
unzip glslang-master-linux-Release.zip bin/glslangValidator
|
||||
install -m755 bin/glslangValidator /usr/local/bin/
|
||||
rm bin/glslangValidator glslang-master-linux-Release.zip
|
||||
|
||||
|
||||
############### Uninstall ephemeral packages
|
||||
|
110
.gitlab-ci/container/x86_build.sh
Normal file
110
.gitlab-ci/container/x86_build.sh
Normal file
@@ -0,0 +1,110 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Ephemeral packages (installed for this script and removed again at the end)
|
||||
STABLE_EPHEMERAL=" \
|
||||
autoconf \
|
||||
automake \
|
||||
autotools-dev \
|
||||
bzip2 \
|
||||
cmake \
|
||||
libgbm-dev \
|
||||
libtool \
|
||||
unzip \
|
||||
"
|
||||
|
||||
# We need multiarch for Wine
|
||||
dpkg --add-architecture i386
|
||||
apt-get update
|
||||
|
||||
apt-get install -y --no-remove \
|
||||
$STABLE_EPHEMERAL \
|
||||
clang \
|
||||
libasan6 \
|
||||
libarchive-dev \
|
||||
libclang-cpp11-dev \
|
||||
libglvnd-dev \
|
||||
libllvmspirvlib-dev \
|
||||
liblua5.3-dev \
|
||||
libxcb-dri2-0-dev \
|
||||
libxcb-dri3-dev \
|
||||
libxcb-glx0-dev \
|
||||
libxcb-present-dev \
|
||||
libxcb-randr0-dev \
|
||||
libxcb-shm0-dev \
|
||||
libxcb-sync-dev \
|
||||
libxcb-xfixes0-dev \
|
||||
libxcb1-dev \
|
||||
libxml2-dev \
|
||||
llvm-11-dev \
|
||||
llvm-9-dev \
|
||||
ocl-icd-opencl-dev \
|
||||
procps \
|
||||
spirv-tools \
|
||||
strace \
|
||||
time \
|
||||
wine \
|
||||
wine32
|
||||
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
|
||||
# Debian's pkg-config wrapers for mingw are broken, and there's no sign that
|
||||
# they're going to be fixed, so we'll just have to fix it ourselves
|
||||
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=930492
|
||||
cat >/usr/local/bin/x86_64-w64-mingw32-pkg-config <<EOF
|
||||
#!/bin/sh
|
||||
|
||||
PKG_CONFIG_LIBDIR=/usr/x86_64-w64-mingw32/lib/pkgconfig pkg-config \$@
|
||||
EOF
|
||||
chmod +x /usr/local/bin/x86_64-w64-mingw32-pkg-config
|
||||
|
||||
|
||||
# dependencies where we want a specific version
|
||||
export XORG_RELEASES=https://xorg.freedesktop.org/releases/individual
|
||||
export WAYLAND_RELEASES=https://wayland.freedesktop.org/releases
|
||||
|
||||
export XORGMACROS_VERSION=util-macros-1.19.0
|
||||
export LIBWAYLAND_VERSION=wayland-1.18.0
|
||||
|
||||
wget $XORG_RELEASES/util/$XORGMACROS_VERSION.tar.bz2
|
||||
tar -xvf $XORGMACROS_VERSION.tar.bz2 && rm $XORGMACROS_VERSION.tar.bz2
|
||||
cd $XORGMACROS_VERSION; ./configure; make install; cd ..
|
||||
rm -rf $XORGMACROS_VERSION
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
wget $WAYLAND_RELEASES/$LIBWAYLAND_VERSION.tar.xz
|
||||
tar -xvf $LIBWAYLAND_VERSION.tar.xz && rm $LIBWAYLAND_VERSION.tar.xz
|
||||
cd $LIBWAYLAND_VERSION; ./configure --enable-libraries --without-host-scanner --disable-documentation --disable-dtd-validation; make install; cd ..
|
||||
rm -rf $LIBWAYLAND_VERSION
|
||||
|
||||
|
||||
pushd /usr/local
|
||||
git clone https://gitlab.freedesktop.org/mesa/shader-db.git --depth 1
|
||||
rm -rf shader-db/.git
|
||||
cd shader-db
|
||||
make
|
||||
popd
|
||||
|
||||
git clone https://github.com/microsoft/DirectX-Headers -b v1.0.1 --depth 1
|
||||
pushd DirectX-Headers
|
||||
mkdir build
|
||||
cd build
|
||||
meson .. --backend=ninja --buildtype=release -Dbuild-test=false
|
||||
ninja
|
||||
ninja install
|
||||
popd
|
||||
rm -rf DirectX-Headers
|
||||
|
||||
############### Uninstall the build software
|
||||
|
||||
apt-get purge -y \
|
||||
$STABLE_EPHEMERAL
|
||||
|
||||
. .gitlab-ci/container/container_post_build.sh
|
@@ -41,6 +41,7 @@ apt-get install -y --no-remove \
|
||||
libxkbcommon0 \
|
||||
libxrandr2 \
|
||||
libxrender1 \
|
||||
python-is-python3 \
|
||||
python3-mako \
|
||||
python3-numpy \
|
||||
python3-packaging \
|
||||
@@ -59,7 +60,7 @@ apt-get install -y --no-install-recommends \
|
||||
|
||||
# Needed for ci-fairy, this revision is able to upload files to MinIO
|
||||
# and doesn't depend on git
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@34f4ade99434043f88e164933f570301fd18b125
|
||||
pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates@0f1abc24c043e63894085a6bd12f14263e8b29eb
|
||||
|
||||
############### Build dEQP runner
|
||||
. .gitlab-ci/container/build-deqp-runner.sh
|
87
.gitlab-ci/container/x86_test-gl.sh
Normal file
87
.gitlab-ci/container/x86_test-gl.sh
Normal file
@@ -0,0 +1,87 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Ephemeral packages (installed for this script and removed again at the end)
|
||||
STABLE_EPHEMERAL=" \
|
||||
autoconf \
|
||||
automake \
|
||||
ccache \
|
||||
clang-11 \
|
||||
cmake \
|
||||
g++ \
|
||||
libclang-cpp11-dev \
|
||||
libgbm-dev \
|
||||
libgles2-mesa-dev \
|
||||
libllvmspirvlib-dev \
|
||||
libpciaccess-dev \
|
||||
libudev-dev \
|
||||
libvulkan-dev \
|
||||
libwaffle-dev \
|
||||
libwayland-dev \
|
||||
libx11-xcb-dev \
|
||||
libxkbcommon-dev \
|
||||
libxrender-dev \
|
||||
llvm-11-dev \
|
||||
llvm-spirv \
|
||||
make \
|
||||
meson \
|
||||
ocl-icd-opencl-dev \
|
||||
patch \
|
||||
pkg-config \
|
||||
python3-distutils \
|
||||
wget \
|
||||
xz-utils \
|
||||
"
|
||||
|
||||
apt-get install -y --no-remove \
|
||||
$STABLE_EPHEMERAL \
|
||||
apitrace \
|
||||
clinfo \
|
||||
libclang-common-11-dev \
|
||||
libclang-cpp11 \
|
||||
libegl1 \
|
||||
libllvmspirvlib11 \
|
||||
libxcb-shm0 \
|
||||
ocl-icd-libopencl1 \
|
||||
python3-lxml \
|
||||
python3-renderdoc \
|
||||
python3-simplejson \
|
||||
spirv-tools
|
||||
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
|
||||
############### Build libdrm
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
############### Build libclc
|
||||
|
||||
. .gitlab-ci/container/build-libclc.sh
|
||||
|
||||
############### Build virglrenderer
|
||||
|
||||
. .gitlab-ci/container/build-virglrenderer.sh
|
||||
|
||||
############### Build piglit
|
||||
|
||||
INCLUDE_OPENCL_TESTS=1 . .gitlab-ci/container/build-piglit.sh
|
||||
|
||||
############### Build dEQP GL
|
||||
|
||||
DEQP_TARGET=surfaceless . .gitlab-ci/container/build-deqp.sh
|
||||
|
||||
|
||||
############### Uninstall the build software
|
||||
|
||||
ccache --show-stats
|
||||
|
||||
apt-get purge -y \
|
||||
$STABLE_EPHEMERAL
|
||||
|
||||
apt-get autoremove -y --purge
|
140
.gitlab-ci/container/x86_test-vk.sh
Normal file
140
.gitlab-ci/container/x86_test-vk.sh
Normal file
@@ -0,0 +1,140 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o xtrace
|
||||
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Ephemeral packages (installed for this script and removed again at the end)
|
||||
STABLE_EPHEMERAL=" \
|
||||
ccache \
|
||||
cmake \
|
||||
g++ \
|
||||
libgbm-dev \
|
||||
libgles2-mesa-dev \
|
||||
liblz4-dev \
|
||||
libpciaccess-dev \
|
||||
libudev-dev \
|
||||
libvulkan-dev \
|
||||
libwaffle-dev \
|
||||
libwayland-dev \
|
||||
libx11-xcb-dev \
|
||||
libxcb-ewmh-dev \
|
||||
libxcb-keysyms1-dev \
|
||||
libxkbcommon-dev \
|
||||
libxrandr-dev \
|
||||
libxrender-dev \
|
||||
libzstd-dev \
|
||||
meson \
|
||||
p7zip \
|
||||
patch \
|
||||
pkg-config \
|
||||
python3-distutils \
|
||||
wget \
|
||||
xz-utils \
|
||||
"
|
||||
|
||||
apt-get install -y --no-remove \
|
||||
$STABLE_EPHEMERAL \
|
||||
libxcb-shm0 \
|
||||
python3-lxml \
|
||||
python3-simplejson
|
||||
|
||||
# We need multiarch for Wine
|
||||
dpkg --add-architecture i386
|
||||
|
||||
apt-get update
|
||||
|
||||
apt-get install -y --no-remove \
|
||||
wine \
|
||||
wine32 \
|
||||
wine64
|
||||
|
||||
|
||||
############### Set up Wine env variables
|
||||
|
||||
export WINEDEBUG="-all"
|
||||
export WINEPREFIX="/dxvk-wine64"
|
||||
|
||||
############### Install DXVK
|
||||
|
||||
DXVK_VERSION="1.6"
|
||||
|
||||
# We don't want crash dialogs
|
||||
cat >crashdialog.reg <<EOF
|
||||
Windows Registry Editor Version 5.00
|
||||
|
||||
[HKEY_CURRENT_USER\Software\Wine\WineDbg]
|
||||
"ShowCrashDialog"=dword:00000000
|
||||
|
||||
EOF
|
||||
|
||||
# Set the wine prefix and disable the crash dialog
|
||||
wine regedit crashdialog.reg
|
||||
rm crashdialog.reg
|
||||
|
||||
# DXVK's setup often fails with:
|
||||
# "${WINEPREFIX}: Not a valid wine prefix."
|
||||
# and that is just spit because of checking the existance of the
|
||||
# system.reg file, which fails.
|
||||
# Just giving it a bit more of time for it to be created solves the
|
||||
# problem ...
|
||||
while ! test -f "${WINEPREFIX}/system.reg"; do sleep 1; done
|
||||
|
||||
wget "https://github.com/doitsujin/dxvk/releases/download/v${DXVK_VERSION}/dxvk-${DXVK_VERSION}.tar.gz"
|
||||
tar xzpf dxvk-"${DXVK_VERSION}".tar.gz
|
||||
dxvk-"${DXVK_VERSION}"/setup_dxvk.sh install
|
||||
rm -rf dxvk-"${DXVK_VERSION}"
|
||||
rm dxvk-"${DXVK_VERSION}".tar.gz
|
||||
|
||||
############### Install Windows' apitrace binaries
|
||||
|
||||
APITRACE_VERSION="9.0"
|
||||
APITRACE_VERSION_DATE="20191126"
|
||||
|
||||
wget "https://github.com/apitrace/apitrace/releases/download/${APITRACE_VERSION}/apitrace-${APITRACE_VERSION}.${APITRACE_VERSION_DATE}-win64.7z"
|
||||
7zr x "apitrace-${APITRACE_VERSION}.${APITRACE_VERSION_DATE}-win64.7z" \
|
||||
"apitrace-${APITRACE_VERSION}.${APITRACE_VERSION_DATE}-win64/bin/apitrace.exe" \
|
||||
"apitrace-${APITRACE_VERSION}.${APITRACE_VERSION_DATE}-win64/bin/d3dretrace.exe"
|
||||
mv "apitrace-${APITRACE_VERSION}.${APITRACE_VERSION_DATE}-win64" /apitrace-msvc-win64
|
||||
rm "apitrace-${APITRACE_VERSION}.${APITRACE_VERSION_DATE}-win64.7z"
|
||||
|
||||
# Add the apitrace path to the registry
|
||||
wine \
|
||||
reg add "HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment" \
|
||||
/v Path \
|
||||
/t REG_EXPAND_SZ \
|
||||
/d "C:\windows\system32;C:\windows;C:\windows\system32\wbem;Z:\apitrace-msvc-win64\bin" \
|
||||
/f
|
||||
|
||||
############### Building ...
|
||||
|
||||
. .gitlab-ci/container/container_pre_build.sh
|
||||
|
||||
############### Build piglit
|
||||
|
||||
PIGLIT_BUILD_TARGETS="piglit_replayer" . .gitlab-ci/container/build-piglit.sh
|
||||
|
||||
############### Build Fossilize
|
||||
|
||||
. .gitlab-ci/container/build-fossilize.sh
|
||||
|
||||
############### Build dEQP VK
|
||||
. .gitlab-ci/container/build-deqp.sh
|
||||
|
||||
############### Build gfxreconstruct
|
||||
|
||||
. .gitlab-ci/container/build-gfxreconstruct.sh
|
||||
|
||||
############### Build libdrm
|
||||
|
||||
. .gitlab-ci/container/build-libdrm.sh
|
||||
|
||||
############### Uninstall the build software
|
||||
|
||||
ccache --show-stats
|
||||
|
||||
apt-get purge -y \
|
||||
$STABLE_EPHEMERAL
|
||||
|
||||
apt-get autoremove -y --purge
|
1
.gitlab-ci/cross-xfail-i386
Normal file
1
.gitlab-ci/cross-xfail-i386
Normal file
@@ -0,0 +1 @@
|
||||
u_format_test
|
@@ -1 +1,3 @@
|
||||
lp_test_arit
|
||||
lp_test_format
|
||||
u_format_test
|
||||
|
@@ -1,38 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
export DEQP_TEMP_DIR="$1"
|
||||
|
||||
mount -t proc none /proc
|
||||
mount -t sysfs none /sys
|
||||
mkdir -p /dev/pts
|
||||
mount -t devpts devpts /dev/pts
|
||||
mount -t tmpfs tmpfs /tmp
|
||||
|
||||
. $DEQP_TEMP_DIR/crosvm-env.sh
|
||||
|
||||
# .gitlab-ci.yml script variable is using relative paths to install directory,
|
||||
# so change to that dir before running `crosvm-script`
|
||||
cd "${CI_PROJECT_DIR}"
|
||||
|
||||
# The exception is the dEQP binary, since it needs to run from the directory
|
||||
# it's in
|
||||
if [ -d "${DEQP_BIN_DIR}" ]
|
||||
then
|
||||
cd "${DEQP_BIN_DIR}"
|
||||
fi
|
||||
|
||||
dmesg --level crit,err,warn -w >> $DEQP_TEMP_DIR/stderr &
|
||||
|
||||
set +e
|
||||
stdbuf -oL sh $DEQP_TEMP_DIR/crosvm-script.sh 2>> $DEQP_TEMP_DIR/stderr >> $DEQP_TEMP_DIR/stdout
|
||||
echo $? > $DEQP_TEMP_DIR/exit_code
|
||||
set -e
|
||||
|
||||
sync
|
||||
sleep 1
|
||||
|
||||
poweroff -d -n -f || true
|
||||
|
||||
sleep 1 # Just in case init would exit before the kernel shuts down the VM
|
@@ -1,58 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
# This script can be called concurrently, pass arguments and env in a
|
||||
# per-instance tmp dir
|
||||
DEQP_TEMP_DIR=$(mktemp -d /tmp.XXXXXXXXXX)
|
||||
export DEQP_TEMP_DIR
|
||||
|
||||
# The dEQP binary needs to run from the directory it's in
|
||||
if [ -n "${1##*.sh}" ] && [ -z "${1##*"deqp"*}" ]; then
|
||||
DEQP_BIN_DIR=$(dirname "$1")
|
||||
export DEQP_BIN_DIR
|
||||
fi
|
||||
|
||||
# Securely pass the current variables to the crosvm environment
|
||||
CI_COMMON="$CI_PROJECT_DIR"/install/common
|
||||
echo "Variables passed through:"
|
||||
"${CI_COMMON}"/generate-env.sh | tee ${DEQP_TEMP_DIR}/crosvm-env.sh
|
||||
|
||||
CROSVM_KERNEL_ARGS="quiet console=null root=my_root rw rootfstype=virtiofs init=$CI_PROJECT_DIR/install/crosvm-init.sh ip=192.168.30.2::192.168.30.1:255.255.255.0:crosvm:eth0 -- $DEQP_TEMP_DIR"
|
||||
|
||||
# Set the crosvm-script as the arguments of the current script.
|
||||
echo "$@" > $DEQP_TEMP_DIR/crosvm-script.sh
|
||||
|
||||
unset DISPLAY
|
||||
unset XDG_RUNTIME_DIR
|
||||
|
||||
/usr/sbin/iptables-legacy -w -t nat -A POSTROUTING -o eth0 -j MASQUERADE
|
||||
echo 1 > /proc/sys/net/ipv4/ip_forward
|
||||
|
||||
# Send output from guest to host
|
||||
touch $DEQP_TEMP_DIR/stderr $DEQP_TEMP_DIR/stdout
|
||||
tail -f $DEQP_TEMP_DIR/stderr >> /dev/stderr &
|
||||
ERR_TAIL_PID=$!
|
||||
tail -f $DEQP_TEMP_DIR/stdout >> /dev/stdout &
|
||||
OUT_TAIL_PID=$!
|
||||
|
||||
trap "exit \$exit_code" INT TERM
|
||||
trap "exit_code=\$?; kill $ERR_TAIL_PID $OUT_TAIL_PID; rm -rf $DEQP_TEMP_DIR" EXIT
|
||||
|
||||
# We aren't testing LLVMPipe here, so we don't need to validate NIR on the host
|
||||
NIR_DEBUG="novalidate" LIBGL_ALWAYS_SOFTWARE="true" GALLIUM_DRIVER="$CROSVM_GALLIUM_DRIVER" crosvm run \
|
||||
--gpu "$CROSVM_GPU_ARGS" \
|
||||
-m 4096 \
|
||||
-c 2 \
|
||||
--disable-sandbox \
|
||||
--shared-dir /:my_root:type=fs:writeback=true:timeout=60:cache=always \
|
||||
--host_ip=192.168.30.1 --netmask=255.255.255.0 --mac "AA:BB:CC:00:00:12" \
|
||||
-p "$CROSVM_KERNEL_ARGS" \
|
||||
/lava-files/bzImage > $DEQP_TEMP_DIR/crosvm 2>&1
|
||||
|
||||
RET=$(cat $DEQP_TEMP_DIR/exit_code || true)
|
||||
|
||||
# Got no exit code from the script, show crosvm output to help with debugging
|
||||
[ -n "$RET" ] || cat $DEQP_TEMP_DIR/crosvm || true
|
||||
|
||||
exit ${RET:-1}
|
10
.gitlab-ci/deqp-default-skips.txt
Normal file
10
.gitlab-ci/deqp-default-skips.txt
Normal file
@@ -0,0 +1,10 @@
|
||||
# Note: skips lists for CI are just a list of lines that, when
|
||||
# non-zero-length and not starting with '#', will regex match to
|
||||
# delete lines from the test list. Be careful.
|
||||
|
||||
# Skip the perf/stress tests to keep runtime manageable
|
||||
dEQP-GLES[0-9]*.performance.*
|
||||
dEQP-GLES[0-9]*.stress.*
|
||||
|
||||
# These are really slow on tiling architectures (including llvmpipe).
|
||||
dEQP-GLES[0-9]*.functional.flush_finish.*
|
@@ -2,11 +2,30 @@
|
||||
|
||||
set -ex
|
||||
|
||||
# Needed so configuration files can contain paths to files in /install
|
||||
ln -sf $CI_PROJECT_DIR/install /install
|
||||
DEQP_WIDTH=${DEQP_WIDTH:-256}
|
||||
DEQP_HEIGHT=${DEQP_HEIGHT:-256}
|
||||
DEQP_CONFIG=${DEQP_CONFIG:-rgba8888d24s8ms0}
|
||||
DEQP_VARIANT=${DEQP_VARIANT:-master}
|
||||
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-surface-width=$DEQP_WIDTH --deqp-surface-height=$DEQP_HEIGHT"
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-surface-type=pbuffer"
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-gl-config-name=$DEQP_CONFIG"
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-visibility=hidden"
|
||||
|
||||
if [ -z "$DEQP_VER" ]; then
|
||||
echo 'DEQP_VER must be set to something like "gles2", "gles31" or "vk" for the test run'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$DEQP_VER" = "vk" ]; then
|
||||
if [ -z "$VK_DRIVER" ]; then
|
||||
echo 'VK_DRIVER must be to something like "radeon" or "intel" for the test run'
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$GPU_VERSION" ]; then
|
||||
echo 'GPU_VERSION must be set to something like "llvmpipe" or "freedreno-a630" (the name used in .gitlab-ci/gpu-version-*.txt)'
|
||||
echo 'GPU_VERSION must be set to something like "llvmpipe" or "freedreno-a630" (the name used in .gitlab-ci/deqp-gpu-version-*.txt)'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -17,103 +36,175 @@ export LD_LIBRARY_PATH=`pwd`/install/lib/
|
||||
export EGL_PLATFORM=surfaceless
|
||||
export VK_ICD_FILENAMES=`pwd`/install/share/vulkan/icd.d/"$VK_DRIVER"_icd.${VK_CPU:-`uname -m`}.json
|
||||
|
||||
# the runner was failing to look for libkms in /usr/local/lib for some reason
|
||||
# I never figured out.
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib
|
||||
|
||||
RESULTS=`pwd`/${DEQP_RESULTS_DIR:-results}
|
||||
mkdir -p $RESULTS
|
||||
|
||||
HANG_DETECTION_CMD=""
|
||||
|
||||
if [ -z "$DEQP_SUITE" ]; then
|
||||
if [ -z "$DEQP_VER" ]; then
|
||||
echo 'DEQP_SUITE must be set to the name of your deqp-gpu_version.toml, or DEQP_VER must be set to something like "gles2", "gles31-khr" or "vk" for the test run'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DEQP_WIDTH=${DEQP_WIDTH:-256}
|
||||
DEQP_HEIGHT=${DEQP_HEIGHT:-256}
|
||||
DEQP_CONFIG=${DEQP_CONFIG:-rgba8888d24s8ms0}
|
||||
DEQP_VARIANT=${DEQP_VARIANT:-master}
|
||||
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-surface-width=$DEQP_WIDTH --deqp-surface-height=$DEQP_HEIGHT"
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-surface-type=${DEQP_SURFACE_TYPE:-pbuffer}"
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-gl-config-name=$DEQP_CONFIG"
|
||||
DEQP_OPTIONS="$DEQP_OPTIONS --deqp-visibility=hidden"
|
||||
|
||||
if [ "$DEQP_VER" = "vk" -a -z "$VK_DRIVER" ]; then
|
||||
echo 'VK_DRIVER must be to something like "radeon" or "intel" for the test run'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Generate test case list file.
|
||||
if [ "$DEQP_VER" = "vk" ]; then
|
||||
MUSTPASS=/deqp/mustpass/vk-$DEQP_VARIANT.txt
|
||||
DEQP=/deqp/external/vulkancts/modules/vulkan/deqp-vk
|
||||
HANG_DETECTION_CMD="/parallel-deqp-runner/build/bin/hang-detection"
|
||||
elif [ "$DEQP_VER" = "gles2" -o "$DEQP_VER" = "gles3" -o "$DEQP_VER" = "gles31" -o "$DEQP_VER" = "egl" ]; then
|
||||
MUSTPASS=/deqp/mustpass/$DEQP_VER-$DEQP_VARIANT.txt
|
||||
DEQP=/deqp/modules/$DEQP_VER/deqp-$DEQP_VER
|
||||
elif [ "$DEQP_VER" = "gles2-khr" -o "$DEQP_VER" = "gles3-khr" -o "$DEQP_VER" = "gles31-khr" -o "$DEQP_VER" = "gles32-khr" ]; then
|
||||
MUSTPASS=/deqp/mustpass/$DEQP_VER-$DEQP_VARIANT.txt
|
||||
DEQP=/deqp/external/openglcts/modules/glcts
|
||||
else
|
||||
MUSTPASS=/deqp/mustpass/$DEQP_VER-$DEQP_VARIANT.txt
|
||||
DEQP=/deqp/external/openglcts/modules/glcts
|
||||
fi
|
||||
|
||||
cp $MUSTPASS /tmp/case-list.txt
|
||||
|
||||
# If the caselist is too long to run in a reasonable amount of time, let the job
|
||||
# specify what fraction (1/n) of the caselist we should run. Note: N~M is a gnu
|
||||
# sed extension to match every nth line (first line is #1).
|
||||
if [ -n "$DEQP_FRACTION" ]; then
|
||||
sed -ni 1~$DEQP_FRACTION"p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
# If the job is parallel at the gitab job level, take the corresponding fraction
|
||||
# of the caselist.
|
||||
if [ -n "$CI_NODE_INDEX" ]; then
|
||||
sed -ni $CI_NODE_INDEX~$CI_NODE_TOTAL"p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ -n "$DEQP_CASELIST_FILTER" ]; then
|
||||
sed -ni "/$DEQP_CASELIST_FILTER/p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ -n "$DEQP_CASELIST_INV_FILTER" ]; then
|
||||
sed -ni "/$DEQP_CASELIST_INV_FILTER/!p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ ! -s /tmp/case-list.txt ]; then
|
||||
echo "Caselist generation failed"
|
||||
exit 1
|
||||
fi
|
||||
# Generate test case list file.
|
||||
if [ "$DEQP_VER" = "vk" ]; then
|
||||
cp /deqp/mustpass/vk-$DEQP_VARIANT.txt /tmp/case-list.txt
|
||||
DEQP=/deqp/external/vulkancts/modules/vulkan/deqp-vk
|
||||
elif [ "$DEQP_VER" = "gles2" -o "$DEQP_VER" = "gles3" -o "$DEQP_VER" = "gles31" ]; then
|
||||
cp /deqp/mustpass/$DEQP_VER-$DEQP_VARIANT.txt /tmp/case-list.txt
|
||||
DEQP=/deqp/modules/$DEQP_VER/deqp-$DEQP_VER
|
||||
SUITE=dEQP
|
||||
else
|
||||
cp /deqp/mustpass/$DEQP_VER-$DEQP_VARIANT.txt /tmp/case-list.txt
|
||||
DEQP=/deqp/external/openglcts/modules/glcts
|
||||
SUITE=KHR
|
||||
fi
|
||||
|
||||
if [ -e "$INSTALL/$GPU_VERSION-fails.txt" ]; then
|
||||
DEQP_RUNNER_OPTIONS="$DEQP_RUNNER_OPTIONS --baseline $INSTALL/$GPU_VERSION-fails.txt"
|
||||
# If the caselist is too long to run in a reasonable amount of time, let the job
|
||||
# specify what fraction (1/n) of the caselist we should run. Note: N~M is a gnu
|
||||
# sed extension to match every nth line (first line is #1).
|
||||
if [ -n "$DEQP_FRACTION" ]; then
|
||||
sed -ni 1~$DEQP_FRACTION"p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
# Default to an empty known flakes file if it doesn't exist.
|
||||
touch $INSTALL/$GPU_VERSION-flakes.txt
|
||||
|
||||
|
||||
if [ -n "$VK_DRIVER" ] && [ -e "$INSTALL/$VK_DRIVER-skips.txt" ]; then
|
||||
DEQP_SKIPS="$DEQP_SKIPS $INSTALL/$VK_DRIVER-skips.txt"
|
||||
# If the job is parallel at the gitab job level, take the corresponding fraction
|
||||
# of the caselist.
|
||||
if [ -n "$CI_NODE_INDEX" ]; then
|
||||
sed -ni $CI_NODE_INDEX~$CI_NODE_TOTAL"p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ -n "$GALLIUM_DRIVER" ] && [ -e "$INSTALL/$GALLIUM_DRIVER-skips.txt" ]; then
|
||||
DEQP_SKIPS="$DEQP_SKIPS $INSTALL/$GALLIUM_DRIVER-skips.txt"
|
||||
if [ -n "$DEQP_CASELIST_FILTER" ]; then
|
||||
sed -ni "/$DEQP_CASELIST_FILTER/p" /tmp/case-list.txt
|
||||
fi
|
||||
|
||||
if [ -n "$DRIVER_NAME" ] && [ -e "$INSTALL/$DRIVER_NAME-skips.txt" ]; then
|
||||
DEQP_SKIPS="$DEQP_SKIPS $INSTALL/$DRIVER_NAME-skips.txt"
|
||||
if [ ! -s /tmp/case-list.txt ]; then
|
||||
echo "Caselist generation failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -e "$INSTALL/$GPU_VERSION-skips.txt" ]; then
|
||||
DEQP_SKIPS="$DEQP_SKIPS $INSTALL/$GPU_VERSION-skips.txt"
|
||||
if [ -e "$INSTALL/deqp-$GPU_VERSION-fails.txt" ]; then
|
||||
DEQP_RUNNER_OPTIONS="$DEQP_RUNNER_OPTIONS --baseline $INSTALL/deqp-$GPU_VERSION-fails.txt"
|
||||
fi
|
||||
|
||||
if [ -e "$INSTALL/deqp-$GPU_VERSION-flakes.txt" ]; then
|
||||
DEQP_RUNNER_OPTIONS="$DEQP_RUNNER_OPTIONS --flakes $INSTALL/deqp-$GPU_VERSION-flakes.txt"
|
||||
fi
|
||||
|
||||
if [ -e "$INSTALL/deqp-$GPU_VERSION-skips.txt" ]; then
|
||||
DEQP_RUNNER_OPTIONS="$DEQP_RUNNER_OPTIONS --skips $INSTALL/deqp-$GPU_VERSION-skips.txt"
|
||||
else
|
||||
DEQP_RUNNER_OPTIONS="$DEQP_RUNNER_OPTIONS --skips $INSTALL/deqp-default-skips.txt"
|
||||
fi
|
||||
|
||||
set +e
|
||||
|
||||
if [ -n "$DEQP_PARALLEL" ]; then
|
||||
JOB="--jobs $DEQP_PARALLEL"
|
||||
elif [ -n "$FDO_CI_CONCURRENT" ]; then
|
||||
JOB="--jobs $FDO_CI_CONCURRENT"
|
||||
else
|
||||
JOB="--jobs 4"
|
||||
fi
|
||||
|
||||
# If this CI lab lacks artifacts support, print the whole list of failures/flakes.
|
||||
if [ -n "$DEQP_NO_SAVE_RESULTS" ]; then
|
||||
SUMMARY_LIMIT="--summary-limit 0"
|
||||
fi
|
||||
|
||||
run_cts() {
|
||||
deqp=$1
|
||||
caselist=$2
|
||||
output=$3
|
||||
deqp-runner \
|
||||
run \
|
||||
--deqp $deqp \
|
||||
--output $RESULTS \
|
||||
--caselist $caselist \
|
||||
--testlog-to-xml /deqp/executor/testlog-to-xml \
|
||||
$JOB \
|
||||
$SUMMARY_LIMIT \
|
||||
$DEQP_RUNNER_OPTIONS \
|
||||
-- \
|
||||
$DEQP_OPTIONS
|
||||
}
|
||||
|
||||
report_flakes() {
|
||||
flakes=`grep ",Flake" $1 | sed 's|,Flake.*||g'`
|
||||
if [ -z "$flakes" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [ -z "$FLAKES_CHANNEL" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
# The nick needs to be something unique so that multiple runners
|
||||
# connecting at the same time don't race for one nick and get blocked.
|
||||
# freenode has a 16-char limit on nicks (9 is the IETF standard, but
|
||||
# various servers extend that). So, trim off the common prefixes of the
|
||||
# runner name, and append the job ID so that software runners with more
|
||||
# than one concurrent job (think swrast) don't collide. For freedreno,
|
||||
# that gives us a nick as long as db410c-N-JJJJJJJJ, and it'll be a while
|
||||
# before we make it to 9-digit jobs (we're at 7 so far).
|
||||
runner=`echo $CI_RUNNER_DESCRIPTION | sed 's|mesa-||' | sed 's|google-freedreno-||g'`
|
||||
bot="$runner-$CI_JOB_ID"
|
||||
channel="$FLAKES_CHANNEL"
|
||||
(
|
||||
echo NICK $bot
|
||||
echo USER $bot unused unused :Gitlab CI Notifier
|
||||
sleep 10
|
||||
echo "JOIN $channel"
|
||||
sleep 1
|
||||
desc="Flakes detected in job: $CI_JOB_URL on $CI_RUNNER_DESCRIPTION"
|
||||
if [ -n "$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME" ]; then
|
||||
desc="$desc on branch $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME ($CI_MERGE_REQUEST_TITLE)"
|
||||
elif [ -n "$CI_COMMIT_BRANCH" ]; then
|
||||
desc="$desc on branch $CI_COMMIT_BRANCH ($CI_COMMIT_TITLE)"
|
||||
fi
|
||||
echo "PRIVMSG $channel :$desc"
|
||||
for flake in $flakes; do
|
||||
echo "PRIVMSG $channel :$flake"
|
||||
done
|
||||
echo "PRIVMSG $channel :See $CI_JOB_URL/artifacts/browse/results/"
|
||||
echo "QUIT"
|
||||
) | nc irc.freenode.net 6667 > /dev/null
|
||||
|
||||
}
|
||||
|
||||
parse_renderer() {
|
||||
RENDERER=`grep -A1 TestCaseResult.\*info.renderer $RESULTS/deqp-info.qpa | grep '<Text' | sed 's|.*<Text>||g' | sed 's|</Text>||g'`
|
||||
VERSION=`grep -A1 TestCaseResult.\*info.version $RESULTS/deqp-info.qpa | grep '<Text' | sed 's|.*<Text>||g' | sed 's|</Text>||g'`
|
||||
echo "Renderer: $RENDERER"
|
||||
echo "Version: $VERSION "
|
||||
|
||||
if ! echo $RENDERER | grep -q $DEQP_EXPECTED_RENDERER; then
|
||||
echo "Expected GL_RENDERER $DEQP_EXPECTED_RENDERER"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
check_renderer() {
|
||||
echo "Capturing renderer info for GLES driver sanity checks"
|
||||
# If you're having trouble loading your driver, uncommenting this may help
|
||||
# debug.
|
||||
# export EGL_LOG_LEVEL=debug
|
||||
VERSION=`echo $DEQP_VER | tr '[a-z]' '[A-Z]'`
|
||||
export LD_PRELOAD=$TEST_LD_PRELOAD
|
||||
$DEQP $DEQP_OPTIONS --deqp-case=$SUITE-$VERSION.info.\* --deqp-log-filename=$RESULTS/deqp-info.qpa
|
||||
export LD_PRELOAD=
|
||||
parse_renderer
|
||||
}
|
||||
|
||||
check_vk_device_name() {
|
||||
echo "Capturing device info for VK driver sanity checks"
|
||||
export LD_PRELOAD=$TEST_LD_PRELOAD
|
||||
$DEQP $DEQP_OPTIONS --deqp-case=dEQP-VK.info.device --deqp-log-filename=$RESULTS/deqp-info.qpa
|
||||
export LD_PRELOAD=
|
||||
DEVICENAME=`grep deviceName $RESULTS/deqp-info.qpa | sed 's|deviceName: ||g'`
|
||||
echo "deviceName: $DEVICENAME"
|
||||
if [ -n "$DEQP_EXPECTED_RENDERER" -a "x$DEVICENAME" != "x$DEQP_EXPECTED_RENDERER" ]; then
|
||||
echo "Expected deviceName $DEQP_EXPECTED_RENDERER"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
report_load() {
|
||||
echo "System load: $(cut -d' ' -f1-3 < /proc/loadavg)"
|
||||
echo "# of CPU cores: $(cat /proc/cpuinfo | grep processor | wc -l)"
|
||||
@@ -136,47 +227,27 @@ if [ "$GALLIUM_DRIVER" = "virpipe" ]; then
|
||||
fi
|
||||
|
||||
GALLIUM_DRIVER=llvmpipe \
|
||||
GALLIVM_PERF="nopt,no_filter_hacks" \
|
||||
virgl_test_server $VTEST_ARGS >$RESULTS/vtest-log.txt 2>&1 &
|
||||
|
||||
sleep 1
|
||||
fi
|
||||
|
||||
if [ -z "$DEQP_SUITE" ]; then
|
||||
if [ -n "$DEQP_EXPECTED_RENDERER" ]; then
|
||||
export DEQP_RUNNER_OPTIONS="$DEQP_RUNNER_OPTIONS --renderer-check "$DEQP_EXPECTED_RENDERER""
|
||||
fi
|
||||
if [ $DEQP_VER != vk -a $DEQP_VER != egl ]; then
|
||||
export DEQP_RUNNER_OPTIONS="$DEQP_RUNNER_OPTIONS --version-check `cat $INSTALL/VERSION | sed 's/[() ]/./g'`"
|
||||
fi
|
||||
|
||||
deqp-runner \
|
||||
run \
|
||||
--deqp $DEQP \
|
||||
--output $RESULTS \
|
||||
--caselist /tmp/case-list.txt \
|
||||
--skips $INSTALL/all-skips.txt $DEQP_SKIPS \
|
||||
--flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--testlog-to-xml /deqp/executor/testlog-to-xml \
|
||||
--jobs ${FDO_CI_CONCURRENT:-4} \
|
||||
$DEQP_RUNNER_OPTIONS \
|
||||
-- \
|
||||
$DEQP_OPTIONS
|
||||
if [ $DEQP_VER = vk ]; then
|
||||
quiet check_vk_device_name
|
||||
else
|
||||
deqp-runner \
|
||||
suite \
|
||||
--suite $INSTALL/deqp-$DEQP_SUITE.toml \
|
||||
--output $RESULTS \
|
||||
--skips $INSTALL/all-skips.txt $DEQP_SKIPS \
|
||||
--flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--testlog-to-xml /deqp/executor/testlog-to-xml \
|
||||
--fraction-start $CI_NODE_INDEX \
|
||||
--fraction $CI_NODE_TOTAL \
|
||||
--jobs ${FDO_CI_CONCURRENT:-4} \
|
||||
$DEQP_RUNNER_OPTIONS
|
||||
quiet check_renderer
|
||||
fi
|
||||
|
||||
RESULTS_CSV=$RESULTS/results.csv
|
||||
FAILURES_CSV=$RESULTS/failures.csv
|
||||
|
||||
export LD_PRELOAD=$TEST_LD_PRELOAD
|
||||
|
||||
run_cts $DEQP /tmp/case-list.txt $RESULTS_CSV
|
||||
DEQP_EXITCODE=$?
|
||||
|
||||
export LD_PRELOAD=
|
||||
quiet report_load
|
||||
|
||||
# Remove all but the first 50 individual XML files uploaded as artifacts, to
|
||||
@@ -192,25 +263,13 @@ find $RESULTS -name \*.xml \
|
||||
-quit
|
||||
|
||||
deqp-runner junit \
|
||||
--testsuite dEQP \
|
||||
--testsuite $DEQP_VER \
|
||||
--results $RESULTS/failures.csv \
|
||||
--output $RESULTS/junit.xml \
|
||||
--limit 50 \
|
||||
--template "See https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/results/{{testcase}}.xml"
|
||||
|
||||
# Report the flakes to the IRC channel for monitoring (if configured):
|
||||
if [ -n "$FLAKES_CHANNEL" ]; then
|
||||
python3 $INSTALL/report-flakes.py \
|
||||
--host irc.oftc.net \
|
||||
--port 6667 \
|
||||
--results $RESULTS/results.csv \
|
||||
--known-flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--channel "$FLAKES_CHANNEL" \
|
||||
--runner "$CI_RUNNER_DESCRIPTION" \
|
||||
--job "$CI_JOB_ID" \
|
||||
--url "$CI_JOB_URL" \
|
||||
--branch "${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:-$CI_COMMIT_BRANCH}" \
|
||||
--branch-title "${CI_MERGE_REQUEST_TITLE:-$CI_COMMIT_TITLE}"
|
||||
fi
|
||||
quiet report_flakes $RESULTS_CSV
|
||||
|
||||
exit $DEQP_EXITCODE
|
||||
|
49
.gitlab-ci/generate_lava.py
Executable file
49
.gitlab-ci/generate_lava.py
Executable file
@@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
import argparse
|
||||
import os
|
||||
import datetime
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--template")
|
||||
parser.add_argument("--pipeline-info")
|
||||
parser.add_argument("--base-artifacts-url")
|
||||
parser.add_argument("--mesa-url")
|
||||
parser.add_argument("--device-type")
|
||||
parser.add_argument("--dtb", nargs='?', default="")
|
||||
parser.add_argument("--kernel-image-name")
|
||||
parser.add_argument("--kernel-image-type", nargs='?', default="")
|
||||
parser.add_argument("--gpu-version")
|
||||
parser.add_argument("--boot-method")
|
||||
parser.add_argument("--lava-tags", nargs='?', default="")
|
||||
parser.add_argument("--env-vars", nargs='?', default="")
|
||||
parser.add_argument("--deqp-version")
|
||||
parser.add_argument("--ci-node-index")
|
||||
parser.add_argument("--ci-node-total")
|
||||
parser.add_argument("--job-type")
|
||||
args = parser.parse_args()
|
||||
|
||||
env = Environment(loader = FileSystemLoader(os.path.dirname(args.template)), trim_blocks=True, lstrip_blocks=True)
|
||||
template = env.get_template(os.path.basename(args.template))
|
||||
|
||||
env_vars = "%s CI_NODE_INDEX=%s CI_NODE_TOTAL=%s" % (args.env_vars, args.ci_node_index, args.ci_node_total)
|
||||
|
||||
values = {}
|
||||
values['pipeline_info'] = args.pipeline_info
|
||||
values['base_artifacts_url'] = args.base_artifacts_url
|
||||
values['mesa_url'] = args.mesa_url
|
||||
values['device_type'] = args.device_type
|
||||
values['dtb'] = args.dtb
|
||||
values['kernel_image_name'] = args.kernel_image_name
|
||||
values['kernel_image_type'] = args.kernel_image_type
|
||||
values['gpu_version'] = args.gpu_version
|
||||
values['boot_method'] = args.boot_method
|
||||
values['tags'] = args.lava_tags
|
||||
values['env_vars'] = env_vars
|
||||
values['deqp_version'] = args.deqp_version
|
||||
|
||||
f = open(os.path.splitext(os.path.basename(args.template))[0], "w")
|
||||
f.write(template.render(values))
|
||||
f.close()
|
||||
|
@@ -1,70 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
INSTALL=`pwd`/install
|
||||
|
||||
# Set up the driver environment.
|
||||
export LD_LIBRARY_PATH=`pwd`/install/lib/
|
||||
export LIBVA_DRIVERS_PATH=`pwd`/install/lib/dri/
|
||||
# libva spams driver open info by default, and that happens per testcase.
|
||||
export LIBVA_MESSAGING_LEVEL=1
|
||||
|
||||
if [ -e "$INSTALL/$GPU_VERSION-fails.txt" ]; then
|
||||
GTEST_RUNNER_OPTIONS="$GTEST_RUNNER_OPTIONS --baseline $INSTALL/$GPU_VERSION-fails.txt"
|
||||
fi
|
||||
|
||||
# Default to an empty known flakes file if it doesn't exist.
|
||||
touch $INSTALL/$GPU_VERSION-flakes.txt
|
||||
|
||||
if [ -n "$GALLIUM_DRIVER" ] && [ -e "$INSTALL/$GALLIUM_DRIVER-skips.txt" ]; then
|
||||
GTEST_SKIPS="$GTEST_SKIPS --skips $INSTALL/$GALLIUM_DRIVER-skips.txt"
|
||||
fi
|
||||
|
||||
if [ -n "$DRIVER_NAME" ] && [ -e "$INSTALL/$DRIVER_NAME-skips.txt" ]; then
|
||||
GTEST_SKIPS="$GTEST_SKIPS --skips $INSTALL/$DRIVER_NAME-skips.txt"
|
||||
fi
|
||||
|
||||
if [ -e "$INSTALL/$GPU_VERSION-skips.txt" ]; then
|
||||
GTEST_SKIPS="$GTEST_SKIPS --skips $INSTALL/$GPU_VERSION-skips.txt"
|
||||
fi
|
||||
|
||||
set +e
|
||||
|
||||
gtest-runner \
|
||||
run \
|
||||
--gtest $GTEST \
|
||||
--output ${GTEST_RESULTS_DIR:-results} \
|
||||
--jobs ${FDO_CI_CONCURRENT:-4} \
|
||||
$GTEST_SKIPS \
|
||||
--flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--fraction-start ${CI_NODE_INDEX:-1} \
|
||||
--fraction $((${CI_NODE_TOTAL:-1} * ${GTEST_FRACTION:-1})) \
|
||||
--env "LD_PRELOAD=$TEST_LD_PRELOAD" \
|
||||
$GTEST_RUNNER_OPTIONS
|
||||
|
||||
GTEST_EXITCODE=$?
|
||||
|
||||
deqp-runner junit \
|
||||
--testsuite gtest \
|
||||
--results $RESULTS/failures.csv \
|
||||
--output $RESULTS/junit.xml \
|
||||
--limit 50 \
|
||||
--template "See https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/results/{{testcase}}.xml"
|
||||
|
||||
# Report the flakes to the IRC channel for monitoring (if configured):
|
||||
if [ -n "$FLAKES_CHANNEL" ]; then
|
||||
python3 $INSTALL/report-flakes.py \
|
||||
--host irc.oftc.net \
|
||||
--port 6667 \
|
||||
--results $RESULTS/results.csv \
|
||||
--known-flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--channel "$FLAKES_CHANNEL" \
|
||||
--runner "$CI_RUNNER_DESCRIPTION" \
|
||||
--job "$CI_JOB_ID" \
|
||||
--url "$CI_JOB_URL" \
|
||||
--branch "${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:-$CI_COMMIT_BRANCH}" \
|
||||
--branch-title "${CI_MERGE_REQUEST_TITLE:-$CI_COMMIT_TITLE}"
|
||||
fi
|
||||
|
||||
exit $GTEST_EXITCODE
|
@@ -1,21 +0,0 @@
|
||||
variables:
|
||||
DEBIAN_X86_BUILD_BASE_IMAGE: "debian/x86_build-base"
|
||||
DEBIAN_BASE_TAG: "2022-01-02-deqp-runner"
|
||||
|
||||
DEBIAN_X86_BUILD_IMAGE_PATH: "debian/x86_build"
|
||||
DEBIAN_BUILD_TAG: "2021-12-31-keep-cmake"
|
||||
|
||||
DEBIAN_X86_TEST_BASE_IMAGE: "debian/x86_test-base"
|
||||
|
||||
DEBIAN_X86_TEST_IMAGE_PATH: "debian/x86_test-gl"
|
||||
DEBIAN_X86_TEST_GL_TAG: "2022-01-18-kernel"
|
||||
DEBIAN_X86_TEST_VK_TAG: "2022-01-02-deqp-runner"
|
||||
|
||||
FEDORA_X86_BUILD_TAG: "2021-12-31-refactor"
|
||||
KERNEL_ROOTFS_TAG: "2022-01-18-kernel"
|
||||
|
||||
WINDOWS_X64_BUILD_PATH: "windows/x64_build"
|
||||
WINDOWS_X64_BUILD_TAG: "2022-20-02-base_split"
|
||||
|
||||
WINDOWS_X64_TEST_PATH: "windows/x64_test"
|
||||
WINDOWS_X64_TEST_TAG: "2022-20-02-base_split"
|
178
.gitlab-ci/lava-gitlab-ci.yml
Normal file
178
.gitlab-ci/lava-gitlab-ci.yml
Normal file
@@ -0,0 +1,178 @@
|
||||
.kernel+rootfs:
|
||||
extends:
|
||||
- .ci-run-policy
|
||||
stage: container-2
|
||||
variables:
|
||||
GIT_STRATEGY: fetch
|
||||
DISTRIBUTION_TAG: &distribution-tag-arm "${MESA_ROOTFS_TAG}--${MESA_IMAGE_TAG}--${MESA_TEMPLATES_COMMIT}"
|
||||
script:
|
||||
- .gitlab-ci/container/lava_build.sh
|
||||
|
||||
.kernel+rootfs-lava:
|
||||
extends:
|
||||
- .kernel+rootfs
|
||||
variables:
|
||||
KERNEL_URL: "https://gitlab.freedesktop.org/tomeu/linux/-/archive/v5.10-rc2-for-mesa-ci/linux-v5.10-rc2-for-mesa-ci.tar.gz"
|
||||
INSTALL_KERNEL_MODULES: 1
|
||||
MESA_ROOTFS_TAG: &lava-rootfs "2021-04-14-librenderdoc"
|
||||
MINIO_SUFFIX: "lava"
|
||||
PIGLIT_BUILD_TARGETS: "piglit_replayer"
|
||||
|
||||
kernel+rootfs_amd64:
|
||||
extends:
|
||||
- .use-x86_build-base
|
||||
- .kernel+rootfs-lava
|
||||
image: "$FDO_BASE_IMAGE"
|
||||
variables:
|
||||
DEBIAN_ARCH: "amd64"
|
||||
DISTRIBUTION_TAG: &distribution-tag-amd64 "${MESA_ROOTFS_TAG}--${MESA_BASE_TAG}--${MESA_TEMPLATES_COMMIT}"
|
||||
|
||||
kernel+rootfs_arm64:
|
||||
extends:
|
||||
- .use-arm_build
|
||||
- .kernel+rootfs-lava
|
||||
tags:
|
||||
- aarch64
|
||||
variables:
|
||||
DEBIAN_ARCH: "arm64"
|
||||
|
||||
kernel+rootfs_armhf:
|
||||
extends:
|
||||
- kernel+rootfs_arm64
|
||||
variables:
|
||||
DEBIAN_ARCH: "armhf"
|
||||
|
||||
.lava-test:
|
||||
extends:
|
||||
- .ci-run-policy
|
||||
# Cancel job if a newer commit is pushed to the same branch
|
||||
interruptible: true
|
||||
variables:
|
||||
DISTRIBUTION_TAG: *distribution-tag-arm
|
||||
GIT_STRATEGY: none # testing doesn't build anything from source
|
||||
ENV_VARS: "DEQP_PARALLEL=6"
|
||||
FIXED_ENV_VARS: "CI_PIPELINE_ID=${CI_PIPELINE_ID} CI_JOB_ID=${CI_JOB_ID} CI_PAGES_DOMAIN=${CI_PAGES_DOMAIN} CI_PROJECT_NAME=${CI_PROJECT_NAME} CI_PROJECT_PATH=${CI_PROJECT_PATH} CI_PROJECT_ROOT_NAMESPACE=${CI_PROJECT_ROOT_NAMESPACE} CI_JOB_JWT=${CI_JOB_JWT} CI_SERVER_URL=${CI_SERVER_URL} DRIVER_NAME=${DRIVER_NAME} FDO_UPSTREAM_REPO=${FDO_UPSTREAM_REPO} PIGLIT_NO_WINDOW=1 PIGLIT_REPLAY_UPLOAD_TO_MINIO=1 MINIO_HOST=${MINIO_HOST} LAVA_TEST_SCRIPT=${LAVA_TEST_SCRIPT} TEST_SUITE=${TEST_SUITE}"
|
||||
DEQP_VERSION: gles2
|
||||
ARTIFACTS_PREFIX: "https://${MINIO_HOST}/mesa-lava/"
|
||||
MESA_ROOTFS_TAG: *lava-rootfs
|
||||
MESA_URL: "https://${MINIO_HOST}/artifacts/${CI_PROJECT_PATH}/${CI_PIPELINE_ID}/mesa-${ARCH}.tar.gz"
|
||||
script:
|
||||
# Try to use the kernel and rootfs built in mainline first, to save cycles
|
||||
- >
|
||||
if wget -q --method=HEAD "${ARTIFACTS_PREFIX}/${FDO_UPSTREAM_REPO}/lava/${DISTRIBUTION_TAG}/${ARCH}/done"; then
|
||||
ARTIFACTS_URL="${ARTIFACTS_PREFIX}/${FDO_UPSTREAM_REPO}/lava/${DISTRIBUTION_TAG}/${ARCH}"
|
||||
else
|
||||
ARTIFACTS_URL="${ARTIFACTS_PREFIX}/${CI_PROJECT_PATH}/lava/${DISTRIBUTION_TAG}/${ARCH}"
|
||||
fi
|
||||
- >
|
||||
artifacts/generate_lava.py \
|
||||
--template artifacts/lava.yml.jinja2 \
|
||||
--pipeline-info "$CI_PIPELINE_URL on $CI_COMMIT_REF_NAME ${CI_NODE_INDEX}/${CI_NODE_TOTAL}" \
|
||||
--base-artifacts-url ${ARTIFACTS_URL} \
|
||||
--mesa-url ${MESA_URL} \
|
||||
--device-type ${DEVICE_TYPE} \
|
||||
--dtb ${DTB} \
|
||||
--env-vars "${ENV_VARS} ${FIXED_ENV_VARS}" \
|
||||
--deqp-version ${DEQP_VERSION} \
|
||||
--kernel-image-name ${KERNEL_IMAGE_NAME} \
|
||||
--kernel-image-type "${KERNEL_IMAGE_TYPE}" \
|
||||
--gpu-version ${GPU_VERSION} \
|
||||
--boot-method ${BOOT_METHOD} \
|
||||
--lava-tags "${LAVA_TAGS}" \
|
||||
--ci-node-index "${CI_NODE_INDEX}" \
|
||||
--ci-node-total "${CI_NODE_TOTAL}"
|
||||
- lava_job_id=`lavacli jobs submit lava.yml` || lavacli jobs submit lava.yml
|
||||
- echo $lava_job_id
|
||||
- rm -rf artifacts/*
|
||||
- cp lava.yml artifacts/.
|
||||
- lavacli jobs logs $lava_job_id | tee artifacts/lava-$lava_job_id.log
|
||||
- lavacli jobs show $lava_job_id
|
||||
- result=`lavacli results $lava_job_id 0_mesa mesa | head -1`
|
||||
- echo $result
|
||||
- '[[ "$result" == "pass" ]]'
|
||||
artifacts:
|
||||
name: "mesa_${CI_JOB_NAME}"
|
||||
when: always
|
||||
paths:
|
||||
- artifacts/
|
||||
|
||||
.lava-test:armhf:
|
||||
variables:
|
||||
ARCH: armhf
|
||||
KERNEL_IMAGE_NAME: zImage
|
||||
KERNEL_IMAGE_TYPE: "type:\ zimage"
|
||||
BOOT_METHOD: u-boot
|
||||
TEST_SUITE: "deqp"
|
||||
LAVA_TEST_SCRIPT: "/install/deqp-runner.sh"
|
||||
extends:
|
||||
- .lava-test
|
||||
- .use-arm_build
|
||||
needs:
|
||||
- kernel+rootfs_armhf
|
||||
- meson-armhf
|
||||
|
||||
.lava-test:arm64:
|
||||
variables:
|
||||
ARCH: arm64
|
||||
KERNEL_IMAGE_NAME: Image
|
||||
KERNEL_IMAGE_TYPE: "type:\ image"
|
||||
BOOT_METHOD: u-boot
|
||||
TEST_SUITE: "deqp"
|
||||
LAVA_TEST_SCRIPT: "/install/deqp-runner.sh"
|
||||
extends:
|
||||
- .lava-test
|
||||
- .use-arm_build
|
||||
dependencies:
|
||||
- meson-arm64
|
||||
needs:
|
||||
- kernel+rootfs_arm64
|
||||
- meson-arm64
|
||||
|
||||
.lava-test:amd64:
|
||||
variables:
|
||||
ARCH: amd64
|
||||
DISTRIBUTION_TAG: *distribution-tag-amd64
|
||||
KERNEL_IMAGE_NAME: bzImage
|
||||
KERNEL_IMAGE_TYPE: "type:\ zimage"
|
||||
BOOT_METHOD: u-boot
|
||||
TEST_SUITE: "deqp"
|
||||
LAVA_TEST_SCRIPT: "/install/deqp-runner.sh"
|
||||
extends:
|
||||
- .use-x86_build-base # for same $MESA_BASE_TAG as in kernel+rootfs_amd64
|
||||
- .use-arm_build # ARM because it must match the architecture of the runner
|
||||
- .lava-test
|
||||
needs:
|
||||
- kernel+rootfs_amd64
|
||||
- arm_build # ARM because it must match the architecture of the runner
|
||||
- meson-testing
|
||||
|
||||
.lava-traces-base:
|
||||
after_script:
|
||||
- mkdir -p artifacts
|
||||
- wget -O "artifacts/junit.xml" "https://minio-packet.freedesktop.org/artifacts/${CI_PROJECT_PATH}/${CI_PIPELINE_ID}/${CI_JOB_ID}/traces/junit.xml"
|
||||
artifacts:
|
||||
reports:
|
||||
junit: artifacts/junit.xml
|
||||
|
||||
.lava-piglit:
|
||||
variables:
|
||||
TEST_SUITE: "piglit"
|
||||
LAVA_TEST_SCRIPT: "/install/piglit/run.sh"
|
||||
|
||||
.lava-piglit-traces:amd64:
|
||||
extends:
|
||||
- .lava-test:amd64
|
||||
- .lava-piglit
|
||||
- .lava-traces-base
|
||||
|
||||
.lava-piglit-traces:armhf:
|
||||
extends:
|
||||
- .lava-test:armhf
|
||||
- .lava-piglit
|
||||
- .lava-traces-base
|
||||
|
||||
.lava-piglit-traces:arm64:
|
||||
extends:
|
||||
- .lava-test:arm64
|
||||
- .lava-piglit
|
||||
- .lava-traces-base
|
120
.gitlab-ci/lava.yml.jinja2
Normal file
120
.gitlab-ci/lava.yml.jinja2
Normal file
@@ -0,0 +1,120 @@
|
||||
job_name: mesa-{{ test_suite }}-{{ deqp_version }}-{{ gpu_version }} {{ pipeline_info }}
|
||||
device_type: {{ device_type }}
|
||||
context:
|
||||
extra_nfsroot_args: " init=/init rootwait"
|
||||
timeouts:
|
||||
job:
|
||||
minutes: 30
|
||||
priority: 75
|
||||
visibility:
|
||||
group:
|
||||
- "Collabora+fdo"
|
||||
{% if tags %}
|
||||
{% set lavatags = tags.split(',') %}
|
||||
tags:
|
||||
{% for tag in lavatags %}
|
||||
- {{ tag }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
actions:
|
||||
- deploy:
|
||||
timeout:
|
||||
minutes: 10
|
||||
to: tftp
|
||||
kernel:
|
||||
url: {{ base_artifacts_url }}/{{ kernel_image_name }}
|
||||
{% if kernel_image_type %}
|
||||
{{ kernel_image_type }}
|
||||
{% endif %}
|
||||
nfsrootfs:
|
||||
url: {{ base_artifacts_url }}/lava-rootfs.tgz
|
||||
compression: gz
|
||||
{% if dtb %}
|
||||
dtb:
|
||||
url: {{ base_artifacts_url }}/{{ dtb }}.dtb
|
||||
{% endif %}
|
||||
os: oe
|
||||
- boot:
|
||||
timeout:
|
||||
minutes: 25
|
||||
method: {{ boot_method }}
|
||||
{% if boot_method == "fastboot" %}
|
||||
{#
|
||||
For fastboot, LAVA doesn't know how to unpack the rootfs/apply overlay/repack,
|
||||
so we transfer the overlay over the network after boot.
|
||||
#}
|
||||
transfer_overlay:
|
||||
download_command: wget -S --progress=dot:giga
|
||||
unpack_command: tar -C / -xzf
|
||||
{% else %}
|
||||
commands: nfs
|
||||
{% endif %}
|
||||
prompts:
|
||||
- 'lava-shell:'
|
||||
- test:
|
||||
timeout:
|
||||
minutes: 30
|
||||
failure_retry: 1
|
||||
definitions:
|
||||
- repository:
|
||||
metadata:
|
||||
format: Lava-Test Test Definition 1.0
|
||||
name: mesa
|
||||
description: "Mesa test plan"
|
||||
os:
|
||||
- oe
|
||||
scope:
|
||||
- functional
|
||||
run:
|
||||
steps:
|
||||
- mount -t proc none /proc
|
||||
- mount -t sysfs none /sys
|
||||
- mount -t devtmpfs none /dev || echo possibly already mounted
|
||||
- mkdir -p /dev/pts
|
||||
- mount -t devpts devpts /dev/pts
|
||||
- mkdir -p /dev/shm
|
||||
- mount -t tmpfs tmpfs /dev/shm
|
||||
- echo "nameserver 8.8.8.8" > /etc/resolv.conf
|
||||
- for i in 1 2 3; do sntp -sS pool.ntp.org && break || sleep 2; done
|
||||
|
||||
- modprobe amdgpu || true
|
||||
|
||||
- DEVFREQ_GOVERNOR=`find /sys/devices -name governor | grep gpu || true`
|
||||
- echo performance > $DEVFREQ_GOVERNOR || true
|
||||
|
||||
- GPU_AUTOSUSPEND=`find /sys/devices -name autosuspend_delay_ms | grep gpu | head -1`
|
||||
- echo -1 > $GPU_AUTOSUSPEND || true
|
||||
|
||||
{% if env_vars %}
|
||||
- export {{ env_vars }}
|
||||
{% endif %}
|
||||
|
||||
# runner script assumes some stuff is in pwd
|
||||
- cd /
|
||||
|
||||
- wget -S --progress=dot:giga -O- {{ mesa_url }} | tar -xz
|
||||
|
||||
- export DEQP_NO_SAVE_RESULTS=1
|
||||
- export GPU_VERSION={{ gpu_version }}
|
||||
- export DEQP_VER={{ deqp_version }}
|
||||
|
||||
- export PYTHONPATH=$(python3 -c "import sys;print(\":\".join(sys.path))")
|
||||
- export PIGLIT_REPLAY_EXTRA_ARGS="--keep-image"
|
||||
- export PIGLIT_REPLAY_REFERENCE_IMAGES_BASE_URL="/mesa-tracie-results/${CI_PROJECT_PATH}"
|
||||
- export PIGLIT_REPLAY_ARTIFACTS_BASE_URL="/artifacts/${CI_PROJECT_PATH}/${CI_PIPELINE_ID}/${CI_JOB_ID}"
|
||||
- export PIGLIT_REPLAY_DESCRIPTION_FILE="/install/traces-${DRIVER_NAME}.yml"
|
||||
- export PIGLIT_REPLAY_DEVICE_NAME=gl-{{ gpu_version }}
|
||||
- export PIGLIT_RESULTS={{ gpu_version }}-${PIGLIT_PROFILES}
|
||||
|
||||
- export LIBGL_DRIVERS_PATH=`pwd`/install/lib/dri
|
||||
|
||||
- "if sh $LAVA_TEST_SCRIPT; then
|
||||
echo 'mesa: pass';
|
||||
else
|
||||
echo 'mesa: fail';
|
||||
fi"
|
||||
parse:
|
||||
pattern: '(?P<test_case_id>\S*):\s+(?P<result>(pass|fail))'
|
||||
from: inline
|
||||
name: mesa
|
||||
path: inline/mesa.yaml
|
@@ -1,120 +0,0 @@
|
||||
.lava-test:
|
||||
extends:
|
||||
- .ci-run-policy
|
||||
# Cancel job if a newer commit is pushed to the same branch
|
||||
interruptible: true
|
||||
variables:
|
||||
GIT_STRATEGY: none # testing doesn't build anything from source
|
||||
FDO_CI_CONCURRENT: 6 # should be replaced by per-machine definitions
|
||||
DEQP_VER: gles2
|
||||
# proxy used to cache data locally
|
||||
FDO_HTTP_CACHE_URI: "http://caching-proxy/cache/?uri="
|
||||
# base system generated by the container build job, shared between many pipelines
|
||||
BASE_SYSTEM_HOST_PREFIX: "${MINIO_HOST}/mesa-lava"
|
||||
BASE_SYSTEM_MAINLINE_HOST_PATH: "${BASE_SYSTEM_HOST_PREFIX}/${FDO_UPSTREAM_REPO}/${DISTRIBUTION_TAG}/${ARCH}"
|
||||
BASE_SYSTEM_FORK_HOST_PATH: "${BASE_SYSTEM_HOST_PREFIX}/${CI_PROJECT_PATH}/${DISTRIBUTION_TAG}/${ARCH}"
|
||||
# per-job build artifacts
|
||||
MESA_BUILD_PATH: "${PIPELINE_ARTIFACTS_BASE}/mesa-${ARCH}.tar.gz"
|
||||
JOB_ROOTFS_OVERLAY_PATH: "${JOB_ARTIFACTS_BASE}/job-rootfs-overlay.tar.gz"
|
||||
JOB_RESULTS_PATH: "${JOB_ARTIFACTS_BASE}/results.tar.gz"
|
||||
PIGLIT_NO_WINDOW: 1
|
||||
VISIBILITY_GROUP: "Collabora+fdo"
|
||||
script:
|
||||
- ./artifacts/lava/lava-submit.sh
|
||||
artifacts:
|
||||
name: "mesa_${CI_JOB_NAME}"
|
||||
when: always
|
||||
paths:
|
||||
- results/
|
||||
exclude:
|
||||
- results/*.shader_cache
|
||||
after_script:
|
||||
- wget -q "https://${JOB_RESULTS_PATH}" -O- | tar -xz
|
||||
|
||||
.lava-test:armhf:
|
||||
variables:
|
||||
ARCH: armhf
|
||||
KERNEL_IMAGE_NAME: zImage
|
||||
KERNEL_IMAGE_TYPE: "zimage"
|
||||
BOOT_METHOD: u-boot
|
||||
HWCI_TEST_SCRIPT: "/install/deqp-runner.sh"
|
||||
extends:
|
||||
- .use-debian/arm_build # for same $MESA_ARTIFACTS_TAG as in kernel+rootfs_armhf
|
||||
- .use-debian/x86_build
|
||||
- .lava-test
|
||||
- .use-kernel+rootfs-arm
|
||||
needs:
|
||||
- kernel+rootfs_armhf
|
||||
- debian/x86_build
|
||||
- debian-armhf
|
||||
|
||||
.lava-test:arm64:
|
||||
variables:
|
||||
ARCH: arm64
|
||||
KERNEL_IMAGE_NAME: Image
|
||||
KERNEL_IMAGE_TYPE: "image"
|
||||
BOOT_METHOD: u-boot
|
||||
HWCI_TEST_SCRIPT: "/install/deqp-runner.sh"
|
||||
extends:
|
||||
- .use-debian/arm_build # for same $MESA_ARTIFACTS_TAG as in kernel+rootfs_arm64
|
||||
- .use-debian/x86_build
|
||||
- .lava-test
|
||||
- .use-kernel+rootfs-arm
|
||||
dependencies:
|
||||
- debian-arm64
|
||||
needs:
|
||||
- kernel+rootfs_arm64
|
||||
- debian/x86_build
|
||||
- debian-arm64
|
||||
|
||||
.lava-test:amd64:
|
||||
variables:
|
||||
ARCH: amd64
|
||||
KERNEL_IMAGE_NAME: bzImage
|
||||
KERNEL_IMAGE_TYPE: "zimage"
|
||||
BOOT_METHOD: u-boot
|
||||
HWCI_TEST_SCRIPT: "/install/deqp-runner.sh"
|
||||
extends:
|
||||
- .use-debian/x86_build-base # for same $MESA_ARTIFACTS_BASE_TAG as in kernel+rootfs_amd64
|
||||
- .use-debian/x86_build
|
||||
- .lava-test
|
||||
- .use-kernel+rootfs-amd64
|
||||
needs:
|
||||
- kernel+rootfs_amd64
|
||||
- debian-testing
|
||||
|
||||
.lava-traces-base:
|
||||
variables:
|
||||
HWCI_TEST_SCRIPT: "/install/piglit/piglit-traces.sh"
|
||||
artifacts:
|
||||
reports:
|
||||
junit: results/junit.xml
|
||||
|
||||
.lava-piglit:
|
||||
variables:
|
||||
PIGLIT_REPLAY_DEVICE_NAME: "gl-${GPU_VERSION}"
|
||||
PIGLIT_RESULTS: "${GPU_VERSION}-${PIGLIT_PROFILES}"
|
||||
HWCI_TEST_SCRIPT: "/install/piglit/piglit-runner.sh"
|
||||
|
||||
.lava-piglit-traces:amd64:
|
||||
extends:
|
||||
- .lava-test:amd64
|
||||
- .lava-piglit
|
||||
- .lava-traces-base
|
||||
|
||||
.lava-piglit-traces:armhf:
|
||||
extends:
|
||||
- .lava-test:armhf
|
||||
- .lava-piglit
|
||||
- .lava-traces-base
|
||||
|
||||
.lava-piglit-traces:arm64:
|
||||
extends:
|
||||
- .lava-test:arm64
|
||||
- .lava-piglit
|
||||
- .lava-traces-base
|
||||
|
||||
.lava-piglit:amd64:
|
||||
extends:
|
||||
- .lava-test:amd64
|
||||
- .lava-piglit
|
@@ -1,44 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -x
|
||||
|
||||
# Try to use the kernel and rootfs built in mainline first, so we're more
|
||||
# likely to hit cache
|
||||
if wget -q --method=HEAD "https://${BASE_SYSTEM_MAINLINE_HOST_PATH}/done"; then
|
||||
BASE_SYSTEM_HOST_PATH="${BASE_SYSTEM_MAINLINE_HOST_PATH}"
|
||||
else
|
||||
BASE_SYSTEM_HOST_PATH="${BASE_SYSTEM_FORK_HOST_PATH}"
|
||||
fi
|
||||
|
||||
rm -rf results
|
||||
mkdir -p results/job-rootfs-overlay/
|
||||
|
||||
cp artifacts/ci-common/capture-devcoredump.sh results/job-rootfs-overlay/
|
||||
cp artifacts/ci-common/init-*.sh results/job-rootfs-overlay/
|
||||
artifacts/ci-common/generate-env.sh > results/job-rootfs-overlay/set-job-env-vars.sh
|
||||
|
||||
tar zcf job-rootfs-overlay.tar.gz -C results/job-rootfs-overlay/ .
|
||||
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
|
||||
ci-fairy minio cp job-rootfs-overlay.tar.gz "minio://${JOB_ROOTFS_OVERLAY_PATH}"
|
||||
|
||||
touch results/lava.log
|
||||
tail -f results/lava.log &
|
||||
artifacts/lava/lava_job_submitter.py \
|
||||
--dump-yaml \
|
||||
--pipeline-info "$CI_JOB_NAME: $CI_PIPELINE_URL on $CI_COMMIT_REF_NAME ${CI_NODE_INDEX}/${CI_NODE_TOTAL}" \
|
||||
--base-system-url-prefix "https://${BASE_SYSTEM_HOST_PATH}" \
|
||||
--mesa-build-url "${FDO_HTTP_CACHE_URI:-}https://${MESA_BUILD_PATH}" \
|
||||
--job-rootfs-overlay-url "${FDO_HTTP_CACHE_URI:-}https://${JOB_ROOTFS_OVERLAY_PATH}" \
|
||||
--job-artifacts-base ${JOB_ARTIFACTS_BASE} \
|
||||
--job-timeout ${JOB_TIMEOUT:-30} \
|
||||
--first-stage-init artifacts/ci-common/init-stage1.sh \
|
||||
--ci-project-dir ${CI_PROJECT_DIR} \
|
||||
--device-type ${DEVICE_TYPE} \
|
||||
--dtb ${DTB} \
|
||||
--jwt-file "${CI_JOB_JWT_FILE}" \
|
||||
--kernel-image-name ${KERNEL_IMAGE_NAME} \
|
||||
--kernel-image-type "${KERNEL_IMAGE_TYPE}" \
|
||||
--boot-method ${BOOT_METHOD} \
|
||||
--visibility-group ${VISIBILITY_GROUP} \
|
||||
--lava-tags "${LAVA_TAGS}" >> results/lava.log
|
@@ -1,373 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (C) 2020, 2021 Collabora Limited
|
||||
# Author: Gustavo Padovan <gustavo.padovan@collabora.com>
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
# copy of this software and associated documentation files (the "Software"),
|
||||
# to deal in the Software without restriction, including without limitation
|
||||
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
# and/or sell copies of the Software, and to permit persons to whom the
|
||||
# Software is furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice (including the next
|
||||
# paragraph) shall be included in all copies or substantial portions of the
|
||||
# Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Send a job to LAVA, track it and collect log back"""
|
||||
|
||||
import argparse
|
||||
import pathlib
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import urllib.parse
|
||||
import xmlrpc
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import lavacli
|
||||
import yaml
|
||||
from lavacli.utils import loader
|
||||
|
||||
# Timeout in minutes to decide if the device from the dispatched LAVA job has
|
||||
# hung or not due to the lack of new log output.
|
||||
DEVICE_HANGING_TIMEOUT_MIN = 5
|
||||
|
||||
# How many seconds the script should wait before try a new polling iteration to
|
||||
# check if the dispatched LAVA job is running or waiting in the job queue.
|
||||
WAIT_FOR_DEVICE_POLLING_TIME_SEC = 10
|
||||
|
||||
# How many seconds to wait between log output LAVA RPC calls.
|
||||
LOG_POLLING_TIME_SEC = 5
|
||||
|
||||
# How many retries should be made when a timeout happen.
|
||||
NUMBER_OF_RETRIES_TIMEOUT_DETECTION = 2
|
||||
|
||||
|
||||
def print_log(msg):
|
||||
print("{}: {}".format(datetime.now(), msg))
|
||||
|
||||
def fatal_err(msg):
|
||||
print_log(msg)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def hide_sensitive_data(yaml_data, hide_tag="HIDEME"):
|
||||
out_data = ""
|
||||
|
||||
for line in yaml_data.splitlines(True):
|
||||
if hide_tag in line:
|
||||
continue
|
||||
out_data += line
|
||||
|
||||
return out_data
|
||||
|
||||
|
||||
def generate_lava_yaml(args):
|
||||
# General metadata and permissions, plus also inexplicably kernel arguments
|
||||
values = {
|
||||
'job_name': 'mesa: {}'.format(args.pipeline_info),
|
||||
'device_type': args.device_type,
|
||||
'visibility': { 'group': [ args.visibility_group ] },
|
||||
'priority': 75,
|
||||
'context': {
|
||||
'extra_nfsroot_args': ' init=/init rootwait minio_results={}'.format(args.job_artifacts_base)
|
||||
},
|
||||
'timeouts': {
|
||||
'job': {
|
||||
'minutes': args.job_timeout
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
if args.lava_tags:
|
||||
values['tags'] = args.lava_tags.split(',')
|
||||
|
||||
# URLs to our kernel rootfs to boot from, both generated by the base
|
||||
# container build
|
||||
deploy = {
|
||||
'timeout': { 'minutes': 10 },
|
||||
'to': 'tftp',
|
||||
'os': 'oe',
|
||||
'kernel': {
|
||||
'url': '{}/{}'.format(args.base_system_url_prefix, args.kernel_image_name),
|
||||
},
|
||||
'nfsrootfs': {
|
||||
'url': '{}/lava-rootfs.tgz'.format(args.base_system_url_prefix),
|
||||
'compression': 'gz',
|
||||
}
|
||||
}
|
||||
if args.kernel_image_type:
|
||||
deploy['kernel']['type'] = args.kernel_image_type
|
||||
if args.dtb:
|
||||
deploy['dtb'] = {
|
||||
'url': '{}/{}.dtb'.format(args.base_system_url_prefix, args.dtb)
|
||||
}
|
||||
|
||||
# always boot over NFS
|
||||
boot = {
|
||||
'timeout': { 'minutes': 25 },
|
||||
'method': args.boot_method,
|
||||
'commands': 'nfs',
|
||||
'prompts': ['lava-shell:'],
|
||||
}
|
||||
|
||||
# skeleton test definition: only declaring each job as a single 'test'
|
||||
# since LAVA's test parsing is not useful to us
|
||||
test = {
|
||||
'timeout': { 'minutes': args.job_timeout },
|
||||
'failure_retry': 1,
|
||||
'definitions': [ {
|
||||
'name': 'mesa',
|
||||
'from': 'inline',
|
||||
'path': 'inline/mesa.yaml',
|
||||
'repository': {
|
||||
'metadata': {
|
||||
'name': 'mesa',
|
||||
'description': 'Mesa test plan',
|
||||
'os': [ 'oe' ],
|
||||
'scope': [ 'functional' ],
|
||||
'format': 'Lava-Test Test Definition 1.0',
|
||||
},
|
||||
'parse': {
|
||||
'pattern': r'hwci: (?P<test_case_id>\S*):\s+(?P<result>(pass|fail))'
|
||||
},
|
||||
'run': {
|
||||
},
|
||||
},
|
||||
} ],
|
||||
}
|
||||
|
||||
# job execution script:
|
||||
# - inline .gitlab-ci/common/init-stage1.sh
|
||||
# - fetch and unpack per-pipeline build artifacts from build job
|
||||
# - fetch and unpack per-job environment from lava-submit.sh
|
||||
# - exec .gitlab-ci/common/init-stage2.sh
|
||||
init_lines = []
|
||||
|
||||
with open(args.first_stage_init, 'r') as init_sh:
|
||||
init_lines += [ x.rstrip() for x in init_sh if not x.startswith('#') and x.rstrip() ]
|
||||
|
||||
with open(args.jwt_file) as jwt_file:
|
||||
init_lines += [
|
||||
"set +x",
|
||||
f'echo -n "{jwt_file.read()}" > "{args.jwt_file}" # HIDEME',
|
||||
"set -x",
|
||||
]
|
||||
|
||||
init_lines += [
|
||||
'mkdir -p {}'.format(args.ci_project_dir),
|
||||
'wget -S --progress=dot:giga -O- {} | tar -xz -C {}'.format(args.mesa_build_url, args.ci_project_dir),
|
||||
'wget -S --progress=dot:giga -O- {} | tar -xz -C /'.format(args.job_rootfs_overlay_url),
|
||||
f'echo "export CI_JOB_JWT_FILE={args.jwt_file}" >> /set-job-env-vars.sh',
|
||||
'exec /init-stage2.sh',
|
||||
]
|
||||
test['definitions'][0]['repository']['run']['steps'] = init_lines
|
||||
|
||||
values['actions'] = [
|
||||
{ 'deploy': deploy },
|
||||
{ 'boot': boot },
|
||||
{ 'test': test },
|
||||
]
|
||||
|
||||
return yaml.dump(values, width=10000000)
|
||||
|
||||
|
||||
def setup_lava_proxy():
|
||||
config = lavacli.load_config("default")
|
||||
uri, usr, tok = (config.get(key) for key in ("uri", "username", "token"))
|
||||
uri_obj = urllib.parse.urlparse(uri)
|
||||
uri_str = "{}://{}:{}@{}{}".format(uri_obj.scheme, usr, tok, uri_obj.netloc, uri_obj.path)
|
||||
transport = lavacli.RequestsTransport(
|
||||
uri_obj.scheme,
|
||||
config.get("proxy"),
|
||||
config.get("timeout", 120.0),
|
||||
config.get("verify_ssl_cert", True),
|
||||
)
|
||||
proxy = xmlrpc.client.ServerProxy(
|
||||
uri_str, allow_none=True, transport=transport)
|
||||
|
||||
print_log("Proxy for {} created.".format(config['uri']))
|
||||
|
||||
return proxy
|
||||
|
||||
|
||||
def _call_proxy(fn, *args):
|
||||
retries = 60
|
||||
for n in range(1, retries + 1):
|
||||
try:
|
||||
return fn(*args)
|
||||
except xmlrpc.client.ProtocolError as err:
|
||||
if n == retries:
|
||||
traceback.print_exc()
|
||||
fatal_err("A protocol error occurred (Err {} {})".format(err.errcode, err.errmsg))
|
||||
else:
|
||||
time.sleep(15)
|
||||
pass
|
||||
except xmlrpc.client.Fault as err:
|
||||
traceback.print_exc()
|
||||
fatal_err("FATAL: Fault: {} (code: {})".format(err.faultString, err.faultCode))
|
||||
|
||||
|
||||
def get_job_results(proxy, job_id, test_suite, test_case):
|
||||
# Look for infrastructure errors and retry if we see them.
|
||||
results_yaml = _call_proxy(proxy.results.get_testjob_results_yaml, job_id)
|
||||
results = yaml.load(results_yaml, Loader=loader(False))
|
||||
for res in results:
|
||||
metadata = res['metadata']
|
||||
if not 'result' in metadata or metadata['result'] != 'fail':
|
||||
continue
|
||||
if 'error_type' in metadata and metadata['error_type'] == "Infrastructure":
|
||||
print_log("LAVA job {} failed with Infrastructure Error. Retry.".format(job_id))
|
||||
return False
|
||||
if 'case' in metadata and metadata['case'] == "validate":
|
||||
print_log("LAVA job {} failed validation (possible download error). Retry.".format(job_id))
|
||||
return False
|
||||
|
||||
results_yaml = _call_proxy(proxy.results.get_testcase_results_yaml, job_id, test_suite, test_case)
|
||||
results = yaml.load(results_yaml, Loader=loader(False))
|
||||
if not results:
|
||||
fatal_err("LAVA: no result for test_suite '{}', test_case '{}'".format(test_suite, test_case))
|
||||
|
||||
print_log("LAVA: result for test_suite '{}', test_case '{}': {}".format(test_suite, test_case, results[0]['result']))
|
||||
if results[0]['result'] != 'pass':
|
||||
fatal_err("FAIL")
|
||||
|
||||
return True
|
||||
|
||||
def wait_until_job_is_started(proxy, job_id):
|
||||
print_log(f"Waiting for job {job_id} to start.")
|
||||
current_state = "Submitted"
|
||||
waiting_states = ["Submitted", "Scheduling", "Scheduled"]
|
||||
while current_state in waiting_states:
|
||||
job_state = _call_proxy(proxy.scheduler.job_state, job_id)
|
||||
current_state = job_state["job_state"]
|
||||
|
||||
time.sleep(WAIT_FOR_DEVICE_POLLING_TIME_SEC)
|
||||
print_log(f"Job {job_id} started.")
|
||||
|
||||
def follow_job_execution(proxy, job_id):
|
||||
line_count = 0
|
||||
finished = False
|
||||
last_time_logs = datetime.now()
|
||||
while not finished:
|
||||
(finished, data) = _call_proxy(proxy.scheduler.jobs.logs, job_id, line_count)
|
||||
logs = yaml.load(str(data), Loader=loader(False))
|
||||
if logs:
|
||||
# Reset the timeout
|
||||
last_time_logs = datetime.now()
|
||||
for line in logs:
|
||||
print("{} {}".format(line["dt"], line["msg"]))
|
||||
|
||||
line_count += len(logs)
|
||||
|
||||
else:
|
||||
time_limit = timedelta(minutes=DEVICE_HANGING_TIMEOUT_MIN)
|
||||
if datetime.now() - last_time_logs > time_limit:
|
||||
print_log("LAVA job {} doesn't advance (machine got hung?). Retry.".format(job_id))
|
||||
return False
|
||||
|
||||
# `proxy.scheduler.jobs.logs` does not block, even when there is no
|
||||
# new log to be fetched. To avoid dosing the LAVA dispatcher
|
||||
# machine, let's add a sleep to save them some stamina.
|
||||
time.sleep(LOG_POLLING_TIME_SEC)
|
||||
|
||||
return True
|
||||
|
||||
def show_job_data(proxy, job_id):
|
||||
show = _call_proxy(proxy.scheduler.jobs.show, job_id)
|
||||
for field, value in show.items():
|
||||
print("{}\t: {}".format(field, value))
|
||||
|
||||
|
||||
def validate_job(proxy, job_file):
|
||||
try:
|
||||
return _call_proxy(proxy.scheduler.jobs.validate, job_file, True)
|
||||
except:
|
||||
return False
|
||||
|
||||
def submit_job(proxy, job_file):
|
||||
return _call_proxy(proxy.scheduler.jobs.submit, job_file)
|
||||
|
||||
|
||||
def main(args):
|
||||
proxy = setup_lava_proxy()
|
||||
|
||||
yaml_file = generate_lava_yaml(args)
|
||||
|
||||
if args.dump_yaml:
|
||||
print(hide_sensitive_data(generate_lava_yaml(args)))
|
||||
|
||||
if args.validate_only:
|
||||
ret = validate_job(proxy, yaml_file)
|
||||
if not ret:
|
||||
fatal_err("Error in LAVA job definition")
|
||||
print("LAVA job definition validated successfully")
|
||||
return
|
||||
|
||||
retry_count = NUMBER_OF_RETRIES_TIMEOUT_DETECTION
|
||||
|
||||
while retry_count >= 0:
|
||||
job_id = submit_job(proxy, yaml_file)
|
||||
|
||||
print_log("LAVA job id: {}".format(job_id))
|
||||
|
||||
wait_until_job_is_started(proxy, job_id)
|
||||
|
||||
if not follow_job_execution(proxy, job_id):
|
||||
print_log(f"Job {job_id} has timed out. Cancelling it.")
|
||||
# Cancel the job as it is considered unreachable by Mesa CI.
|
||||
proxy.scheduler.jobs.cancel(job_id)
|
||||
|
||||
retry_count -= 1
|
||||
continue
|
||||
|
||||
show_job_data(proxy, job_id)
|
||||
|
||||
if get_job_results(proxy, job_id, "0_mesa", "mesa") == True:
|
||||
break
|
||||
|
||||
def create_parser():
|
||||
parser = argparse.ArgumentParser("LAVA job submitter")
|
||||
|
||||
parser.add_argument("--pipeline-info")
|
||||
parser.add_argument("--base-system-url-prefix")
|
||||
parser.add_argument("--mesa-build-url")
|
||||
parser.add_argument("--job-rootfs-overlay-url")
|
||||
parser.add_argument("--job-artifacts-base")
|
||||
parser.add_argument("--job-timeout", type=int)
|
||||
parser.add_argument("--first-stage-init")
|
||||
parser.add_argument("--ci-project-dir")
|
||||
parser.add_argument("--device-type")
|
||||
parser.add_argument("--dtb", nargs='?', default="")
|
||||
parser.add_argument("--kernel-image-name")
|
||||
parser.add_argument("--kernel-image-type", nargs='?', default="")
|
||||
parser.add_argument("--boot-method")
|
||||
parser.add_argument("--lava-tags", nargs='?', default="")
|
||||
parser.add_argument("--jwt-file", type=pathlib.Path)
|
||||
parser.add_argument("--validate-only", action='store_true')
|
||||
parser.add_argument("--dump-yaml", action='store_true')
|
||||
parser.add_argument("--visibility-group")
|
||||
|
||||
return parser
|
||||
|
||||
if __name__ == "__main__":
|
||||
# given that we proxy from DUT -> LAVA dispatcher -> LAVA primary -> us ->
|
||||
# GitLab runner -> GitLab primary -> user, safe to say we don't need any
|
||||
# more buffering
|
||||
sys.stdout.reconfigure(line_buffering=True)
|
||||
sys.stderr.reconfigure(line_buffering=True)
|
||||
|
||||
parser = create_parser()
|
||||
|
||||
parser.set_defaults(func=main)
|
||||
args = parser.parse_args()
|
||||
args.func(args)
|
@@ -44,7 +44,7 @@ fi
|
||||
case $CI_JOB_NAME in
|
||||
# strace and wine don't seem to mix well
|
||||
# ASAN leak detection is incompatible with strace
|
||||
debian-mingw32-x86_64|*-asan*)
|
||||
meson-mingw32-x86_64|*-asan*)
|
||||
if test -f /usr/bin/time; then
|
||||
MESON_TEST_ARGS+=--wrapper=$PWD/.gitlab-ci/meson/time.sh
|
||||
fi
|
||||
@@ -64,14 +64,12 @@ meson _build --native-file=native.file \
|
||||
-D libdir=lib \
|
||||
-D buildtype=${BUILDTYPE:-debug} \
|
||||
-D build-tests=true \
|
||||
-D c_args="$(echo -n $C_ARGS)" \
|
||||
-D cpp_args="$(echo -n $CPP_ARGS)" \
|
||||
-D libunwind=${UNWIND} \
|
||||
${DRI_LOADERS} \
|
||||
-D dri-drivers=${DRI_DRIVERS:-[]} \
|
||||
${GALLIUM_ST} \
|
||||
-D gallium-drivers=${GALLIUM_DRIVERS:-[]} \
|
||||
-D vulkan-drivers=${VULKAN_DRIVERS:-[]} \
|
||||
-D werror=true \
|
||||
${EXTRA_OPTION}
|
||||
cd _build
|
||||
meson configure
|
||||
|
@@ -3,7 +3,7 @@
|
||||
set -ex
|
||||
|
||||
if [ -z "$GPU_VERSION" ]; then
|
||||
echo 'GPU_VERSION must be set to something like "llvmpipe" or "freedreno-a630" (the name used in your ci/gpu-version-*.txt)'
|
||||
echo 'GPU_VERSION must be set to something like "llvmpipe" or "freedreno-a630" (the name used in your ci/piglit-gpu-version-*.txt)'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -17,22 +17,6 @@ export VK_ICD_FILENAMES=`pwd`/install/share/vulkan/icd.d/"$VK_DRIVER"_icd.${VK_C
|
||||
RESULTS=`pwd`/${PIGLIT_RESULTS_DIR:-results}
|
||||
mkdir -p $RESULTS
|
||||
|
||||
if [ "$GALLIUM_DRIVER" = "virpipe" ]; then
|
||||
# deqp is to use virpipe, and virgl_test_server llvmpipe
|
||||
export GALLIUM_DRIVER="$GALLIUM_DRIVER"
|
||||
|
||||
VTEST_ARGS="--use-egl-surfaceless"
|
||||
if [ "$VIRGL_HOST_API" = "GLES" ]; then
|
||||
VTEST_ARGS="$VTEST_ARGS --use-gles"
|
||||
fi
|
||||
|
||||
GALLIUM_DRIVER=llvmpipe \
|
||||
GALLIVM_PERF="nopt" \
|
||||
virgl_test_server $VTEST_ARGS >$RESULTS/vtest-log.txt 2>&1 &
|
||||
|
||||
sleep 1
|
||||
fi
|
||||
|
||||
if [ -n "$PIGLIT_FRACTION" -o -n "$CI_NODE_INDEX" ]; then
|
||||
FRACTION=`expr ${PIGLIT_FRACTION:-1} \* ${CI_NODE_TOTAL:-1}`
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --fraction $FRACTION"
|
||||
@@ -44,45 +28,99 @@ if [ -n "$CI_NODE_INDEX" ]; then
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --fraction-start ${CI_NODE_INDEX}"
|
||||
fi
|
||||
|
||||
if [ -e "$INSTALL/$GPU_VERSION-fails.txt" ]; then
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --baseline $INSTALL/$GPU_VERSION-fails.txt"
|
||||
if [ -e "$INSTALL/piglit-$GPU_VERSION-fails.txt" ]; then
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --baseline $INSTALL/piglit-$GPU_VERSION-fails.txt"
|
||||
fi
|
||||
|
||||
# Default to an empty known flakes file if it doesn't exist.
|
||||
touch $INSTALL/$GPU_VERSION-flakes.txt
|
||||
|
||||
if [ -n "$VK_DRIVER" ] && [ -e "$INSTALL/$VK_DRIVER-skips.txt" ]; then
|
||||
PIGLIT_SKIPS="$PIGLIT_SKIPS $INSTALL/$VK_DRIVER-skips.txt"
|
||||
if [ -e "$INSTALL/piglit-$GPU_VERSION-flakes.txt" ]; then
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --flakes $INSTALL/piglit-$GPU_VERSION-flakes.txt"
|
||||
fi
|
||||
|
||||
if [ -n "$GALLIUM_DRIVER" ] && [ -e "$INSTALL/$GALLIUM_DRIVER-skips.txt" ]; then
|
||||
PIGLIT_SKIPS="$PIGLIT_SKIPS $INSTALL/$GALLIUM_DRIVER-skips.txt"
|
||||
fi
|
||||
|
||||
if [ -n "$DRIVER_NAME" ] && [ -e "$INSTALL/$DRIVER_NAME-skips.txt" ]; then
|
||||
PIGLIT_SKIPS="$PIGLIT_SKIPS $INSTALL/$DRIVER_NAME-skips.txt"
|
||||
fi
|
||||
|
||||
if [ -e "$INSTALL/$GPU_VERSION-skips.txt" ]; then
|
||||
PIGLIT_SKIPS="$PIGLIT_SKIPS $INSTALL/$GPU_VERSION-skips.txt"
|
||||
if [ -e "$INSTALL/piglit-$GPU_VERSION-skips.txt" ]; then
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --skips $INSTALL/piglit-$GPU_VERSION-skips.txt"
|
||||
fi
|
||||
|
||||
set +e
|
||||
|
||||
piglit-runner \
|
||||
run \
|
||||
--piglit-folder /piglit \
|
||||
--output $RESULTS \
|
||||
--jobs ${FDO_CI_CONCURRENT:-4} \
|
||||
--skips $INSTALL/all-skips.txt $PIGLIT_SKIPS \
|
||||
--flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--profile $PIGLIT_PROFILES \
|
||||
--process-isolation \
|
||||
$PIGLIT_RUNNER_OPTIONS \
|
||||
-v -v
|
||||
if [ -n "$PIGLIT_PARALLEL" ]; then
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --jobs $PIGLIT_PARALLEL"
|
||||
elif [ -n "$FDO_CI_CONCURRENT" ]; then
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --jobs $FDO_CI_CONCURRENT"
|
||||
else
|
||||
PIGLIT_RUNNER_OPTIONS="$PIGLIT_RUNNER_OPTIONS --jobs 4"
|
||||
fi
|
||||
|
||||
report_flakes() {
|
||||
# Replace spaces in test names with _ to make the channel reporting not
|
||||
# split it across lines, even though it makes it so you can't copy and
|
||||
# paste from IRC into your flakes list.
|
||||
flakes=`grep ",Flake" $1 | sed 's|,Flake.*||g' | sed 's| |_|g'`
|
||||
if [ -z "$flakes" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [ -z "$FLAKES_CHANNEL" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
# The nick needs to be something unique so that multiple runners
|
||||
# connecting at the same time don't race for one nick and get blocked.
|
||||
# freenode has a 16-char limit on nicks (9 is the IETF standard, but
|
||||
# various servers extend that). So, trim off the common prefixes of the
|
||||
# runner name, and append the job ID so that software runners with more
|
||||
# than one concurrent job (think swrast) don't collide. For freedreno,
|
||||
# that gives us a nick as long as db410c-N-JJJJJJJJ, and it'll be a while
|
||||
# before we make it to 9-digit jobs (we're at 7 so far).
|
||||
runner=`echo $CI_RUNNER_DESCRIPTION | sed 's|mesa-||' | sed 's|google-freedreno-||g'`
|
||||
bot="$runner-$CI_JOB_ID"
|
||||
channel="$FLAKES_CHANNEL"
|
||||
(
|
||||
echo NICK $bot
|
||||
echo USER $bot unused unused :Gitlab CI Notifier
|
||||
sleep 10
|
||||
echo "JOIN $channel"
|
||||
sleep 1
|
||||
desc="Flakes detected in job: $CI_JOB_URL on $CI_RUNNER_DESCRIPTION"
|
||||
if [ -n "$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME" ]; then
|
||||
desc="$desc on branch $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME ($CI_MERGE_REQUEST_TITLE)"
|
||||
elif [ -n "$CI_COMMIT_BRANCH" ]; then
|
||||
desc="$desc on branch $CI_COMMIT_BRANCH ($CI_COMMIT_TITLE)"
|
||||
fi
|
||||
echo "PRIVMSG $channel :$desc"
|
||||
for flake in $flakes; do
|
||||
echo "PRIVMSG $channel :$flake"
|
||||
done
|
||||
echo "PRIVMSG $channel :See $CI_JOB_URL/artifacts/browse/results/"
|
||||
echo "QUIT"
|
||||
) | nc irc.freenode.net 6667 > /dev/null
|
||||
|
||||
}
|
||||
|
||||
# wrapper to supress +x to avoid spamming the log
|
||||
quiet() {
|
||||
set +x
|
||||
"$@"
|
||||
set -x
|
||||
}
|
||||
|
||||
RESULTS_CSV=$RESULTS/results.csv
|
||||
FAILURES_CSV=$RESULTS/failures.csv
|
||||
|
||||
export LD_PRELOAD=$TEST_LD_PRELOAD
|
||||
|
||||
piglit-runner \
|
||||
run \
|
||||
--piglit-folder /piglit \
|
||||
--output $RESULTS \
|
||||
--profile $PIGLIT_PROFILES \
|
||||
--process-isolation \
|
||||
$PIGLIT_RUNNER_OPTIONS \
|
||||
-v -v
|
||||
|
||||
PIGLIT_EXITCODE=$?
|
||||
|
||||
export LD_PRELOAD=
|
||||
|
||||
deqp-runner junit \
|
||||
--testsuite $PIGLIT_PROFILES \
|
||||
--results $RESULTS/failures.csv \
|
||||
@@ -91,18 +129,6 @@ deqp-runner junit \
|
||||
--template "See https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/results/{{testcase}}.xml"
|
||||
|
||||
# Report the flakes to the IRC channel for monitoring (if configured):
|
||||
if [ -n "$FLAKES_CHANNEL" ]; then
|
||||
python3 $INSTALL/report-flakes.py \
|
||||
--host irc.oftc.net \
|
||||
--port 6667 \
|
||||
--results $RESULTS/results.csv \
|
||||
--known-flakes $INSTALL/$GPU_VERSION-flakes.txt \
|
||||
--channel "$FLAKES_CHANNEL" \
|
||||
--runner "$CI_RUNNER_DESCRIPTION" \
|
||||
--job "$CI_JOB_ID" \
|
||||
--url "$CI_JOB_URL" \
|
||||
--branch "${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:-$CI_COMMIT_BRANCH}" \
|
||||
--branch-title "${CI_MERGE_REQUEST_TITLE:-$CI_COMMIT_TITLE}"
|
||||
fi
|
||||
quiet report_flakes $RESULTS_CSV
|
||||
|
||||
exit $PIGLIT_EXITCODE
|
||||
|
@@ -1,234 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
INSTALL=$(realpath -s "$PWD"/install)
|
||||
MINIO_ARGS="--credentials=/tmp/.minio_credentials"
|
||||
|
||||
RESULTS=$(realpath -s "$PWD"/results)
|
||||
mkdir -p "$RESULTS"
|
||||
|
||||
# Set up the driver environment.
|
||||
# Modifiying here directly LD_LIBRARY_PATH may cause problems when
|
||||
# using a command wrapper. Hence, we will just set it when running the
|
||||
# command.
|
||||
export __LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$INSTALL/lib/"
|
||||
|
||||
# Sanity check to ensure that our environment is sufficient to make our tests
|
||||
# run against the Mesa built by CI, rather than any installed distro version.
|
||||
MESA_VERSION=$(head -1 "$INSTALL/VERSION" | sed 's/\./\\./g')
|
||||
|
||||
print_red() {
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
printf "${RED}"
|
||||
"$@"
|
||||
printf "${NC}"
|
||||
}
|
||||
|
||||
# wrapper to supress +x to avoid spamming the log
|
||||
quiet() {
|
||||
set +x
|
||||
"$@"
|
||||
set -x
|
||||
}
|
||||
|
||||
if [ "$VK_DRIVER" ]; then
|
||||
|
||||
### VULKAN ###
|
||||
|
||||
# Set the Vulkan driver to use.
|
||||
export VK_ICD_FILENAMES="$INSTALL/share/vulkan/icd.d/${VK_DRIVER}_icd.x86_64.json"
|
||||
|
||||
# Set environment for Wine.
|
||||
export WINEDEBUG="-all"
|
||||
export WINEPREFIX="/dxvk-wine64"
|
||||
export WINEESYNC=1
|
||||
|
||||
# Set environment for DXVK.
|
||||
export DXVK_LOG_LEVEL="none"
|
||||
export DXVK_STATE_CACHE=0
|
||||
|
||||
# Set environment for gfxreconstruct executables.
|
||||
export PATH="/gfxreconstruct/build/bin:$PATH"
|
||||
|
||||
SANITY_MESA_VERSION_CMD="vulkaninfo"
|
||||
|
||||
HANG_DETECTION_CMD="/parallel-deqp-runner/build/bin/hang-detection"
|
||||
|
||||
|
||||
# Set up the Window System Interface (WSI)
|
||||
|
||||
if [ ${TEST_START_XORG:-0} -eq 1 ]; then
|
||||
"$INSTALL"/common/start-x.sh "$INSTALL"
|
||||
export DISPLAY=:0
|
||||
else
|
||||
# Run vulkan against the host's running X server (xvfb doesn't
|
||||
# have DRI3 support).
|
||||
# Set the DISPLAY env variable in each gitlab-runner's
|
||||
# configuration file:
|
||||
# https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-runners-section
|
||||
quiet printf "%s%s\n" "Running against the hosts' X server. " \
|
||||
"DISPLAY is \"$DISPLAY\"."
|
||||
fi
|
||||
else
|
||||
|
||||
### GL/ES ###
|
||||
|
||||
# Set environment for apitrace executable.
|
||||
export PATH="/apitrace/build:$PATH"
|
||||
|
||||
# Our rootfs may not have "less", which apitrace uses during
|
||||
# apitrace dump
|
||||
export PAGER=cat
|
||||
|
||||
SANITY_MESA_VERSION_CMD="wflinfo"
|
||||
|
||||
HANG_DETECTION_CMD=""
|
||||
|
||||
|
||||
# Set up the platform windowing system.
|
||||
|
||||
if [ "x$EGL_PLATFORM" = "xsurfaceless" ]; then
|
||||
|
||||
# Use the surfaceless EGL platform.
|
||||
export DISPLAY=
|
||||
export WAFFLE_PLATFORM="surfaceless_egl"
|
||||
|
||||
SANITY_MESA_VERSION_CMD="$SANITY_MESA_VERSION_CMD --platform surfaceless_egl --api gles2"
|
||||
|
||||
if [ "x$GALLIUM_DRIVER" = "xvirpipe" ]; then
|
||||
# piglit is to use virpipe, and virgl_test_server llvmpipe
|
||||
export GALLIUM_DRIVER="$GALLIUM_DRIVER"
|
||||
|
||||
LD_LIBRARY_PATH="$__LD_LIBRARY_PATH" \
|
||||
GALLIUM_DRIVER=llvmpipe \
|
||||
VTEST_USE_EGL_SURFACELESS=1 \
|
||||
VTEST_USE_GLES=1 \
|
||||
virgl_test_server >"$RESULTS"/vtest-log.txt 2>&1 &
|
||||
|
||||
sleep 1
|
||||
fi
|
||||
elif [ "x$PIGLIT_PLATFORM" = "xgbm" ]; then
|
||||
SANITY_MESA_VERSION_CMD="$SANITY_MESA_VERSION_CMD --platform gbm --api gl"
|
||||
elif [ "x$PIGLIT_PLATFORM" = "xmixed_glx_egl" ]; then
|
||||
# It is assumed that you have already brought up your X server before
|
||||
# calling this script.
|
||||
SANITY_MESA_VERSION_CMD="$SANITY_MESA_VERSION_CMD --platform glx --api gl"
|
||||
else
|
||||
SANITY_MESA_VERSION_CMD="$SANITY_MESA_VERSION_CMD --platform glx --api gl --profile core"
|
||||
RUN_CMD_WRAPPER="xvfb-run --server-args=\"-noreset\" sh -c"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$ZINK_USE_LAVAPIPE" ]; then
|
||||
export VK_ICD_FILENAMES="$INSTALL/share/vulkan/icd.d/lvp_icd.x86_64.json"
|
||||
fi
|
||||
|
||||
# If the job is parallel at the gitlab job level, will take the corresponding
|
||||
# fraction of the caselist.
|
||||
if [ -n "$CI_NODE_INDEX" ]; then
|
||||
USE_CASELIST=1
|
||||
fi
|
||||
|
||||
replay_minio_upload_images() {
|
||||
find "$RESULTS/$__PREFIX" -type f -name "*.png" -printf "%P\n" \
|
||||
| while read -r line; do
|
||||
|
||||
__TRACE="${line%-*-*}"
|
||||
if grep -q "^$__PREFIX/$__TRACE: pass$" ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.orig"; then
|
||||
if [ "x$CI_PROJECT_PATH" != "x$FDO_UPSTREAM_REPO" ]; then
|
||||
continue
|
||||
fi
|
||||
__MINIO_PATH="$PIGLIT_REPLAY_REFERENCE_IMAGES_BASE"
|
||||
__DESTINATION_FILE_PATH="${line##*-}"
|
||||
if wget -q --method=HEAD "https://${__MINIO_PATH}/${__DESTINATION_FILE_PATH}" 2>/dev/null; then
|
||||
continue
|
||||
fi
|
||||
else
|
||||
__MINIO_PATH="$JOB_ARTIFACTS_BASE"
|
||||
__DESTINATION_FILE_PATH="$__MINIO_TRACES_PREFIX/${line##*-}"
|
||||
fi
|
||||
|
||||
ci-fairy minio cp $MINIO_ARGS "$RESULTS/$__PREFIX/$line" \
|
||||
"minio://${__MINIO_PATH}/${__DESTINATION_FILE_PATH}"
|
||||
done
|
||||
}
|
||||
|
||||
SANITY_MESA_VERSION_CMD="$SANITY_MESA_VERSION_CMD | tee /tmp/version.txt | grep \"Mesa $MESA_VERSION\(\s\|$\)\""
|
||||
|
||||
if [ -d results ]; then
|
||||
cd results && rm -rf ..?* .[!.]* *
|
||||
fi
|
||||
cd /piglit
|
||||
|
||||
if [ -n "$USE_CASELIST" ]; then
|
||||
PIGLIT_TESTS=$(printf "%s" "$PIGLIT_TESTS")
|
||||
PIGLIT_GENTESTS="./piglit print-cmd $PIGLIT_TESTS replay --format \"{name}\" > /tmp/case-list.txt"
|
||||
RUN_GENTESTS="export LD_LIBRARY_PATH=$__LD_LIBRARY_PATH; $PIGLIT_GENTESTS"
|
||||
|
||||
eval $RUN_GENTESTS
|
||||
|
||||
sed -ni $CI_NODE_INDEX~$CI_NODE_TOTAL"p" /tmp/case-list.txt
|
||||
|
||||
PIGLIT_TESTS="--test-list /tmp/case-list.txt"
|
||||
fi
|
||||
|
||||
PIGLIT_OPTIONS=$(printf "%s" "$PIGLIT_OPTIONS")
|
||||
|
||||
PIGLIT_TESTS=$(printf "%s" "$PIGLIT_TESTS")
|
||||
|
||||
PIGLIT_CMD="./piglit run --timeout 300 -j${FDO_CI_CONCURRENT:-4} $PIGLIT_OPTIONS $PIGLIT_TESTS replay "$(/usr/bin/printf "%q" "$RESULTS")
|
||||
|
||||
RUN_CMD="export LD_LIBRARY_PATH=$__LD_LIBRARY_PATH; $SANITY_MESA_VERSION_CMD && $HANG_DETECTION_CMD $PIGLIT_CMD"
|
||||
|
||||
if [ "$RUN_CMD_WRAPPER" ]; then
|
||||
RUN_CMD="set +e; $RUN_CMD_WRAPPER "$(/usr/bin/printf "%q" "$RUN_CMD")"; set -e"
|
||||
fi
|
||||
|
||||
ci-fairy minio login $MINIO_ARGS --token-file "${CI_JOB_JWT_FILE}"
|
||||
|
||||
eval $RUN_CMD
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
printf "%s\n" "Found $(cat /tmp/version.txt), expected $MESA_VERSION"
|
||||
fi
|
||||
|
||||
ARTIFACTS_BASE_URL="https://${CI_PROJECT_ROOT_NAMESPACE}.${CI_PAGES_DOMAIN}/-/${CI_PROJECT_NAME}/-/jobs/${CI_JOB_ID}/artifacts"
|
||||
|
||||
./piglit summary aggregate "$RESULTS" -o junit.xml
|
||||
|
||||
PIGLIT_RESULTS="${PIGLIT_RESULTS:-replay}"
|
||||
RESULTSFILE="$RESULTS/$PIGLIT_RESULTS.txt"
|
||||
mkdir -p .gitlab-ci/piglit
|
||||
./piglit summary console "$RESULTS"/results.json.bz2 \
|
||||
| tee ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.orig" \
|
||||
| head -n -1 | grep -v ": pass" \
|
||||
| sed '/^summary:/Q' \
|
||||
> $RESULTSFILE
|
||||
|
||||
__PREFIX="trace/$PIGLIT_REPLAY_DEVICE_NAME"
|
||||
__MINIO_PATH="$PIGLIT_REPLAY_ARTIFACTS_BASE_URL"
|
||||
__MINIO_TRACES_PREFIX="traces"
|
||||
|
||||
if [ "x$PIGLIT_REPLAY_SUBCOMMAND" != "xprofile" ]; then
|
||||
quiet replay_minio_upload_images
|
||||
fi
|
||||
|
||||
|
||||
if [ ! -s $RESULTSFILE ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
./piglit summary html --exclude-details=pass \
|
||||
"$RESULTS"/summary "$RESULTS"/results.json.bz2
|
||||
|
||||
find "$RESULTS"/summary -type f -name "*.html" -print0 \
|
||||
| xargs -0 sed -i 's%<img src="file://'"${RESULTS}"'.*-\([0-9a-f]*\)\.png%<img src="https://'"${JOB_ARTIFACTS_BASE}"'/traces/\1.png%g'
|
||||
find "$RESULTS"/summary -type f -name "*.html" -print0 \
|
||||
| xargs -0 sed -i 's%<img src="file://%<img src="https://'"${PIGLIT_REPLAY_REFERENCE_IMAGES_BASE}"'/%g'
|
||||
|
||||
quiet print_red echo "Failures in traces:"
|
||||
cat $RESULTSFILE
|
||||
quiet print_red echo "Review the image changes and get the new checksums at: ${ARTIFACTS_BASE_URL}/results/summary/problems.html"
|
||||
exit 1
|
278
.gitlab-ci/piglit/run.sh
Executable file
278
.gitlab-ci/piglit/run.sh
Executable file
@@ -0,0 +1,278 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
INSTALL=$(realpath -s "$PWD"/install)
|
||||
|
||||
RESULTS=$(realpath -s "$PWD"/results)
|
||||
mkdir -p "$RESULTS"
|
||||
|
||||
# Set up the driver environment.
|
||||
# Modifiying here directly LD_LIBRARY_PATH may cause problems when
|
||||
# using a command wrapper. Hence, we will just set it when running the
|
||||
# command.
|
||||
export __LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$INSTALL/lib/"
|
||||
|
||||
# Sanity check to ensure that our environment is sufficient to make our tests
|
||||
# run against the Mesa built by CI, rather than any installed distro version.
|
||||
MESA_VERSION=$(cat "$INSTALL/VERSION" | sed 's/\./\\./g')
|
||||
|
||||
if [ "$VK_DRIVER" ]; then
|
||||
|
||||
### VULKAN ###
|
||||
|
||||
# Set the Vulkan driver to use.
|
||||
export VK_ICD_FILENAMES="$INSTALL/share/vulkan/icd.d/${VK_DRIVER}_icd.x86_64.json"
|
||||
|
||||
if [ "x$PIGLIT_PROFILES" = "xreplay" ]; then
|
||||
# Set environment for Wine.
|
||||
export WINEDEBUG="-all"
|
||||
export WINEPREFIX="/dxvk-wine64"
|
||||
export WINEESYNC=1
|
||||
|
||||
# Set environment for DXVK.
|
||||
export DXVK_LOG_LEVEL="none"
|
||||
export DXVK_STATE_CACHE=0
|
||||
|
||||
# Set environment for gfxreconstruct executables.
|
||||
export PATH="/gfxreconstruct/build/bin:$PATH"
|
||||
fi
|
||||
|
||||
SANITY_MESA_VERSION_CMD="vulkaninfo"
|
||||
|
||||
|
||||
# Set up the Window System Interface (WSI)
|
||||
|
||||
# IMPORTANT:
|
||||
#
|
||||
# Nothing to do here.
|
||||
#
|
||||
# Run vulkan against the host's running X server (xvfb doesn't
|
||||
# have DRI3 support).
|
||||
# Set the DISPLAY env variable in each gitlab-runner's
|
||||
# configuration file:
|
||||
# https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-runners-section
|
||||
else
|
||||
|
||||
### GL/ES ###
|
||||
|
||||
if [ "x$PIGLIT_PROFILES" = "xreplay" ]; then
|
||||
# Set environment for apitrace executable.
|
||||
export PATH="/apitrace/build:$PATH"
|
||||
|
||||
# Our rootfs may not have "less", which apitrace uses during
|
||||
# apitrace dump
|
||||
export PAGER=cat
|
||||
fi
|
||||
|
||||
SANITY_MESA_VERSION_CMD="wflinfo"
|
||||
|
||||
|
||||
# Set up the platform windowing system.
|
||||
|
||||
if [ "x$EGL_PLATFORM" = "xsurfaceless" ]; then
|
||||
|
||||
# Use the surfaceless EGL platform.
|
||||
export DISPLAY=
|
||||
export WAFFLE_PLATFORM="surfaceless_egl"
|
||||
|
||||
SANITY_MESA_VERSION_CMD="$SANITY_MESA_VERSION_CMD --platform surfaceless_egl --api gles2"
|
||||
|
||||
if [ "x$GALLIUM_DRIVER" = "xvirpipe" ]; then
|
||||
# piglit is to use virpipe, and virgl_test_server llvmpipe
|
||||
export GALLIUM_DRIVER="$GALLIUM_DRIVER"
|
||||
|
||||
LD_LIBRARY_PATH="$__LD_LIBRARY_PATH" \
|
||||
GALLIUM_DRIVER=llvmpipe \
|
||||
GALLIVM_PERF="nopt,no_filter_hacks" \
|
||||
VTEST_USE_EGL_SURFACELESS=1 \
|
||||
VTEST_USE_GLES=1 \
|
||||
virgl_test_server >"$RESULTS"/vtest-log.txt 2>&1 &
|
||||
|
||||
sleep 1
|
||||
fi
|
||||
elif [ "x$PIGLIT_PLATFORM" = "xgbm" ]; then
|
||||
SANITY_MESA_VERSION_CMD="$SANITY_MESA_VERSION_CMD --platform gbm --api gl"
|
||||
elif [ "x$PIGLIT_PLATFORM" = "xmixed_glx_egl" ]; then
|
||||
# It is assumed that you have already brought up your X server before
|
||||
# calling this script.
|
||||
SANITY_MESA_VERSION_CMD="$SANITY_MESA_VERSION_CMD --platform glx --api gl"
|
||||
else
|
||||
SANITY_MESA_VERSION_CMD="$SANITY_MESA_VERSION_CMD --platform glx --api gl --profile core"
|
||||
RUN_CMD_WRAPPER="xvfb-run --server-args=\"-noreset\" sh -c"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$ZINK_USE_LAVAPIPE" ]; then
|
||||
export VK_ICD_FILENAMES="$INSTALL/share/vulkan/icd.d/lvp_icd.x86_64.json"
|
||||
fi
|
||||
|
||||
# If the job is parallel at the gitlab job level, will take the corresponding
|
||||
# fraction of the caselist.
|
||||
if [ -n "$CI_NODE_INDEX" ]; then
|
||||
|
||||
if [ "$PIGLIT_PROFILES" != "${PIGLIT_PROFILES% *}" ]; then
|
||||
FAILURE_MESSAGE=$(printf "%s" "Can't parallelize piglit with multiple profiles")
|
||||
quiet print_red printf "%s\n" "$FAILURE_MESSAGE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
USE_CASELIST=1
|
||||
fi
|
||||
|
||||
print_red() {
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
printf "${RED}"
|
||||
"$@"
|
||||
printf "${NC}"
|
||||
}
|
||||
|
||||
# wrapper to supress +x to avoid spamming the log
|
||||
quiet() {
|
||||
set +x
|
||||
"$@"
|
||||
set -x
|
||||
}
|
||||
|
||||
replay_minio_upload_images() {
|
||||
find "$RESULTS/$__PREFIX" -type f -name "*.png" -printf "%P\n" \
|
||||
| while read -r line; do
|
||||
|
||||
__TRACE="${line%-*-*}"
|
||||
if grep -q "^$__PREFIX/$__TRACE: pass$" ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.orig"; then
|
||||
if [ "x$CI_PROJECT_PATH" != "x$FDO_UPSTREAM_REPO" ]; then
|
||||
continue
|
||||
fi
|
||||
__MINIO_PATH="$PIGLIT_REPLAY_REFERENCE_IMAGES_BASE_URL"
|
||||
__DESTINATION_FILE_PATH="${line##*-}"
|
||||
if ci-fairy minio ls "minio://${MINIO_HOST}${__MINIO_PATH}/${__DESTINATION_FILE_PATH}" 2>/dev/null; then
|
||||
continue
|
||||
fi
|
||||
else
|
||||
__MINIO_PATH="$PIGLIT_REPLAY_ARTIFACTS_BASE_URL"
|
||||
__DESTINATION_FILE_PATH="$__MINIO_TRACES_PREFIX/${line##*-}"
|
||||
# Adding to the JUnit the direct link to the diff page in
|
||||
# the dashboard
|
||||
__PIGLIT_TESTCASE_CLASSNAME="piglit\.trace\.$PIGLIT_REPLAY_DEVICE_NAME\.$(dirname $__TRACE | sed 's%/%\\.%g;s@%@\\%@')"
|
||||
__PIGLIT_TESTCASE_NAME="$(basename $__TRACE | sed 's%\.%_%g;s@%@\\%@')"
|
||||
__DASHBOARD_URL="https://tracie.freedesktop.org/dashboard/imagediff/${CI_PROJECT_PATH}/${CI_JOB_ID}/${__TRACE}"
|
||||
__START_TEST_PATTERN='<testcase classname="'"${__PIGLIT_TESTCASE_CLASSNAME}"'" name="'"${__PIGLIT_TESTCASE_NAME}"'" status="fail"'
|
||||
__REPLACE_TEST_PATTERN='</system-out><failure type="fail"/></testcase>'
|
||||
# Replace in the range between __START_TEST_PATTERN and
|
||||
# __REPLACE_TEST_PATTERN leaving __START_TEST_PATTERN out
|
||||
# from the substitution
|
||||
sed '\%'"${__START_TEST_PATTERN}"'%,\%'"${__REPLACE_TEST_PATTERN}"'%{\%'"${__START_TEST_PATTERN}"'%b;s%'"${__REPLACE_TEST_PATTERN}"'%</system-out><failure type="fail">To view the image differences visit: '"${__DASHBOARD_URL}"'</failure></testcase>%}' \
|
||||
-i "$RESULTS"/junit.xml
|
||||
fi
|
||||
|
||||
ci-fairy minio cp "$RESULTS/$__PREFIX/$line" \
|
||||
"minio://${MINIO_HOST}${__MINIO_PATH}/${__DESTINATION_FILE_PATH}"
|
||||
done
|
||||
}
|
||||
|
||||
SANITY_MESA_VERSION_CMD="$SANITY_MESA_VERSION_CMD | tee /tmp/version.txt | grep \"Mesa $MESA_VERSION\(\s\|$\)\""
|
||||
|
||||
rm -rf results
|
||||
cd /piglit
|
||||
|
||||
if [ -n "$USE_CASELIST" ]; then
|
||||
PIGLIT_TESTS=$(printf "%s" "$PIGLIT_TESTS")
|
||||
PIGLIT_GENTESTS="./piglit print-cmd $PIGLIT_TESTS $PIGLIT_PROFILES --format \"{name}\" > /tmp/case-list.txt"
|
||||
RUN_GENTESTS="export LD_LIBRARY_PATH=$__LD_LIBRARY_PATH; $PIGLIT_GENTESTS"
|
||||
|
||||
eval $RUN_GENTESTS
|
||||
|
||||
sed -ni $CI_NODE_INDEX~$CI_NODE_TOTAL"p" /tmp/case-list.txt
|
||||
|
||||
PIGLIT_TESTS="--test-list /tmp/case-list.txt"
|
||||
fi
|
||||
|
||||
PIGLIT_OPTIONS=$(printf "%s" "$PIGLIT_OPTIONS")
|
||||
|
||||
PIGLIT_TESTS=$(printf "%s" "$PIGLIT_TESTS")
|
||||
|
||||
PIGLIT_CMD="./piglit run -j${FDO_CI_CONCURRENT:-4} $PIGLIT_OPTIONS $PIGLIT_TESTS $PIGLIT_PROFILES "$(/usr/bin/printf "%q" "$RESULTS")
|
||||
|
||||
RUN_CMD="export LD_LIBRARY_PATH=$__LD_LIBRARY_PATH; $SANITY_MESA_VERSION_CMD && $PIGLIT_CMD"
|
||||
|
||||
if [ "$RUN_CMD_WRAPPER" ]; then
|
||||
RUN_CMD="set +e; $RUN_CMD_WRAPPER "$(/usr/bin/printf "%q" "$RUN_CMD")"; set -e"
|
||||
fi
|
||||
|
||||
FAILURE_MESSAGE=$(printf "%s" "Unexpected change in results:")
|
||||
|
||||
eval $RUN_CMD
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
printf "%s\n" "Found $(cat /tmp/version.txt), expected $MESA_VERSION"
|
||||
fi
|
||||
|
||||
ARTIFACTS_BASE_URL="https://${CI_PROJECT_ROOT_NAMESPACE}.${CI_PAGES_DOMAIN}/-/${CI_PROJECT_NAME}/-/jobs/${CI_JOB_ID}/artifacts"
|
||||
|
||||
if [ ${PIGLIT_JUNIT_RESULTS:-0} -eq 1 ]; then
|
||||
./piglit summary aggregate "$RESULTS" -o junit.xml
|
||||
FAILURE_MESSAGE=$(printf "${FAILURE_MESSAGE}\n%s" "Check the JUnit report for failures at: ${ARTIFACTS_BASE_URL}/results/junit.xml")
|
||||
fi
|
||||
|
||||
PIGLIT_RESULTS="${PIGLIT_RESULTS:-$PIGLIT_PROFILES}"
|
||||
RESULTSFILE="$RESULTS/$PIGLIT_RESULTS.txt"
|
||||
mkdir -p .gitlab-ci/piglit
|
||||
./piglit summary console "$RESULTS"/results.json.bz2 \
|
||||
| tee ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.orig" \
|
||||
| head -n -1 | grep -v ": pass" \
|
||||
| sed '/^summary:/Q' \
|
||||
> $RESULTSFILE
|
||||
|
||||
if [ "x$PIGLIT_PROFILES" = "xreplay" ] \
|
||||
&& [ ${PIGLIT_REPLAY_UPLOAD_TO_MINIO:-0} -eq 1 ]; then
|
||||
|
||||
ci-fairy minio login $CI_JOB_JWT
|
||||
|
||||
__PREFIX="trace/$PIGLIT_REPLAY_DEVICE_NAME"
|
||||
__MINIO_PATH="$PIGLIT_REPLAY_ARTIFACTS_BASE_URL"
|
||||
__MINIO_TRACES_PREFIX="traces"
|
||||
|
||||
ci-fairy minio cp "$RESULTS"/results.json.bz2 \
|
||||
"minio://${MINIO_HOST}${__MINIO_PATH}/${__MINIO_TRACES_PREFIX}/results.json.bz2"
|
||||
|
||||
quiet replay_minio_upload_images
|
||||
|
||||
ci-fairy minio cp "$RESULTS"/junit.xml \
|
||||
"minio://${MINIO_HOST}${__MINIO_PATH}/${__MINIO_TRACES_PREFIX}/junit.xml"
|
||||
fi
|
||||
|
||||
if [ -n "$USE_CASELIST" ]; then
|
||||
# Just filter the expected results based on the tests that were actually
|
||||
# executed, and switch to the version with no summary
|
||||
cat ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.orig" | sed '/^summary:/Q' | rev \
|
||||
| cut -f2- -d: | rev | sed "s/$/:/g" > /tmp/executed.txt
|
||||
|
||||
grep -F -f /tmp/executed.txt "$INSTALL/$PIGLIT_RESULTS.txt" \
|
||||
> ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline" || true
|
||||
else
|
||||
cp "$INSTALL/$PIGLIT_RESULTS.txt" \
|
||||
".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline"
|
||||
fi
|
||||
|
||||
if diff -q ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline" $RESULTSFILE; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ ${PIGLIT_HTML_SUMMARY:-1} -eq 1 ]; then
|
||||
./piglit summary html --exclude-details=pass \
|
||||
"$RESULTS"/summary "$RESULTS"/results.json.bz2
|
||||
|
||||
if [ "x$PIGLIT_PROFILES" = "xreplay" ]; then
|
||||
find "$RESULTS"/summary -type f -name "*.html" -print0 \
|
||||
| xargs -0 sed -i 's%<img src="file://'"${RESULTS}"'.*-\([0-9a-f]*\)\.png%<img src="https://'"${MINIO_HOST}${PIGLIT_REPLAY_ARTIFACTS_BASE_URL}"'/traces/\1.png%g'
|
||||
find "$RESULTS"/summary -type f -name "*.html" -print0 \
|
||||
| xargs -0 sed -i 's%<img src="file://%<img src="https://'"${MINIO_HOST}${PIGLIT_REPLAY_REFERENCE_IMAGES_BASE_URL}"'/%g'
|
||||
fi
|
||||
|
||||
FAILURE_MESSAGE=$(printf "${FAILURE_MESSAGE}\n%s" "Check the HTML summary for problems at: ${ARTIFACTS_BASE_URL}/results/summary/problems.html")
|
||||
fi
|
||||
|
||||
quiet print_red printf "%s\n" "$FAILURE_MESSAGE"
|
||||
quiet diff --color=always -u ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline" $RESULTSFILE
|
||||
exit 1
|
@@ -3,19 +3,16 @@
|
||||
set -e
|
||||
set -o xtrace
|
||||
|
||||
VERSION=`head -1 install/VERSION`
|
||||
ROOTDIR=`pwd`
|
||||
VERSION=`cat install/VERSION`
|
||||
|
||||
if [ -d results ]; then
|
||||
cd results && rm -rf ..?* .[!.]* *
|
||||
fi
|
||||
rm -rf results
|
||||
cd /piglit
|
||||
|
||||
export OCL_ICD_VENDORS=$ROOTDIR/install/etc/OpenCL/vendors/
|
||||
export OCL_ICD_VENDORS=$OLDPWD/install/etc/OpenCL/vendors/
|
||||
|
||||
set +e
|
||||
unset DISPLAY
|
||||
export LD_LIBRARY_PATH=$ROOTDIR/install/lib
|
||||
export LD_LIBRARY_PATH=$OLDPWD/install/lib
|
||||
clinfo
|
||||
|
||||
# If the job is parallel at the gitlab job level, will take the corresponding
|
||||
@@ -37,7 +34,7 @@ if [ -n "$USE_CASELIST" ]; then
|
||||
PIGLIT_TESTS="--test-list /tmp/case-list.txt"
|
||||
fi
|
||||
|
||||
./piglit run -c -j${FDO_CI_CONCURRENT:-4} $PIGLIT_OPTIONS $PIGLIT_TESTS $PIGLIT_PROFILES $ROOTDIR/results
|
||||
./piglit run -c -j${FDO_CI_CONCURRENT:-4} $PIGLIT_OPTIONS $PIGLIT_TESTS $PIGLIT_PROFILES $OLDPWD/results
|
||||
retVal=$?
|
||||
if [ $retVal -ne 0 ]; then
|
||||
echo "Found $(cat /tmp/version.txt), expected $VERSION"
|
||||
@@ -46,7 +43,7 @@ set -e
|
||||
|
||||
PIGLIT_RESULTS=${PIGLIT_RESULTS:-$PIGLIT_PROFILES}
|
||||
mkdir -p .gitlab-ci/piglit
|
||||
./piglit summary console $ROOTDIR/results \
|
||||
./piglit summary console $OLDPWD/results \
|
||||
| tee ".gitlab-ci/piglit/$PIGLIT_RESULTS.txt.orig" \
|
||||
| head -n -1 \
|
||||
| grep -v ": pass" \
|
||||
@@ -58,17 +55,17 @@ if [ -n "$USE_CASELIST" ]; then
|
||||
# executed, and switch to the version with no summary
|
||||
cat .gitlab-ci/piglit/$PIGLIT_RESULTS.txt.orig | sed '/^summary:/Q' | rev \
|
||||
| cut -f2- -d: | rev | sed "s/$/:/g" > /tmp/executed.txt
|
||||
grep -F -f /tmp/executed.txt $ROOTDIR/install/$PIGLIT_RESULTS.txt \
|
||||
grep -F -f /tmp/executed.txt $OLDPWD/install/$PIGLIT_RESULTS.txt \
|
||||
> .gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline || true
|
||||
else
|
||||
cp $ROOTDIR/install/$PIGLIT_RESULTS.txt .gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline
|
||||
cp $OLDPWD/install/$PIGLIT_RESULTS.txt .gitlab-ci/piglit/$PIGLIT_RESULTS.txt.baseline
|
||||
fi
|
||||
|
||||
if diff -q .gitlab-ci/piglit/$PIGLIT_RESULTS.txt{.baseline,}; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
./piglit summary html --exclude-details=pass $ROOTDIR/results/summary $ROOTDIR/results
|
||||
./piglit summary html --exclude-details=pass $OLDPWD/results/summary $OLDPWD/results
|
||||
|
||||
echo Unexpected change in results:
|
||||
diff -u .gitlab-ci/piglit/$PIGLIT_RESULTS.txt{.baseline,}
|
||||
|
@@ -18,26 +18,21 @@ if [ -n "$CROSS" ]; then
|
||||
else
|
||||
STRIP="strip"
|
||||
fi
|
||||
if [ -z "$ARTIFACTS_DEBUG_SYMBOLS" ]; then
|
||||
if [ -z "$ARTIFACTS_DEBUG_SYMBOLS"]; then
|
||||
find install -name \*.so -exec $STRIP {} \;
|
||||
fi
|
||||
|
||||
# Test runs don't pull down the git tree, so put the dEQP helper
|
||||
# script and associated bits there.
|
||||
echo "$(cat VERSION) (git-$(git rev-parse HEAD | cut -b -10))" > install/VERSION
|
||||
echo "$(cat VERSION) (git-$(git rev-parse HEAD | cut -b -10))" >> install/VERSION
|
||||
cp -Rp .gitlab-ci/bare-metal install/
|
||||
cp -Rp .gitlab-ci/common install/
|
||||
cp -Rp .gitlab-ci/piglit install/
|
||||
cp -Rp .gitlab-ci/fossils.yml install/
|
||||
cp -Rp .gitlab-ci/fossils install/
|
||||
cp -Rp .gitlab-ci/fossilize-runner.sh install/
|
||||
cp -Rp .gitlab-ci/crosvm-init.sh install/
|
||||
cp -Rp .gitlab-ci/*.txt install/
|
||||
cp -Rp .gitlab-ci/report-flakes.py install/
|
||||
cp -Rp .gitlab-ci/vkd3d-proton install/
|
||||
cp -Rp .gitlab-ci/*-runner.sh install/
|
||||
cp -Rp .gitlab-ci/deqp-runner.sh install/
|
||||
cp -Rp .gitlab-ci/deqp-*.txt install/
|
||||
find . -path \*/ci/\*.txt \
|
||||
-o -path \*/ci/\*.toml \
|
||||
-o -path \*/ci/\*traces\*.yml \
|
||||
| xargs -I '{}' cp -p '{}' install/
|
||||
|
||||
@@ -45,13 +40,15 @@ find . -path \*/ci/\*.txt \
|
||||
# packed separately in the zip file.
|
||||
mkdir -p artifacts/
|
||||
tar -cf artifacts/install.tar install
|
||||
cp -Rp .gitlab-ci/common artifacts/ci-common
|
||||
cp -Rp .gitlab-ci/lava artifacts/
|
||||
|
||||
if [ -n "$MINIO_ARTIFACT_NAME" ]; then
|
||||
# Pass needed files to the test stage
|
||||
cp $CI_PROJECT_DIR/.gitlab-ci/generate_lava.py artifacts/.
|
||||
cp $CI_PROJECT_DIR/.gitlab-ci/lava.yml.jinja2 artifacts/.
|
||||
|
||||
MINIO_ARTIFACT_NAME="$MINIO_ARTIFACT_NAME.tar.gz"
|
||||
gzip -c artifacts/install.tar > ${MINIO_ARTIFACT_NAME}
|
||||
ci-fairy minio login --token-file "${CI_JOB_JWT_FILE}"
|
||||
ci-fairy minio cp ${MINIO_ARTIFACT_NAME} minio://${PIPELINE_ARTIFACTS_BASE}/${MINIO_ARTIFACT_NAME}
|
||||
MINIO_PATH=${MINIO_HOST}/artifacts/${CI_PROJECT_PATH}/${CI_PIPELINE_ID}
|
||||
ci-fairy minio login $CI_JOB_JWT
|
||||
ci-fairy minio cp ${MINIO_ARTIFACT_NAME} minio://${MINIO_PATH}/${MINIO_ARTIFACT_NAME}
|
||||
fi
|
||||
|
@@ -1,151 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright © 2021 Google LLC
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
# copy of this software and associated documentation files (the "Software"),
|
||||
# to deal in the Software without restriction, including without limitation
|
||||
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
# and/or sell copies of the Software, and to permit persons to whom the
|
||||
# Software is furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice (including the next
|
||||
# paragraph) shall be included in all copies or substantial portions of the
|
||||
# Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import argparse
|
||||
import io
|
||||
import re
|
||||
import socket
|
||||
import time
|
||||
|
||||
|
||||
class Connection:
|
||||
def __init__(self, host, port, verbose):
|
||||
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.s.connect((host, port))
|
||||
self.s.setblocking(0)
|
||||
self.verbose = verbose
|
||||
|
||||
def send_line(self, line):
|
||||
if self.verbose:
|
||||
print(f"IRC: sending {line}")
|
||||
self.s.sendall((line + '\n').encode())
|
||||
|
||||
def wait(self, secs):
|
||||
for i in range(secs):
|
||||
if self.verbose:
|
||||
while True:
|
||||
try:
|
||||
data = self.s.recv(1024)
|
||||
except io.BlockingIOError:
|
||||
break
|
||||
if data == "":
|
||||
break
|
||||
for line in data.decode().split('\n'):
|
||||
print(f"IRC: received {line}")
|
||||
time.sleep(1)
|
||||
|
||||
def quit(self):
|
||||
self.send_line("QUIT")
|
||||
self.s.shutdown(socket.SHUT_WR)
|
||||
self.s.close()
|
||||
|
||||
|
||||
def read_flakes(results):
|
||||
flakes = []
|
||||
csv = re.compile("(.*),(.*),(.*)")
|
||||
for line in open(results, 'r').readlines():
|
||||
match = csv.match(line)
|
||||
if match.group(2) == "Flake":
|
||||
flakes.append(match.group(1))
|
||||
return flakes
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--host', type=str,
|
||||
help='IRC server hostname', required=True)
|
||||
parser.add_argument('--port', type=int,
|
||||
help='IRC server port', required=True)
|
||||
parser.add_argument('--results', type=str,
|
||||
help='results.csv file from deqp-runner or piglit-runner', required=True)
|
||||
parser.add_argument('--known-flakes', type=str,
|
||||
help='*-flakes.txt file passed to deqp-runner or piglit-runner', required=True)
|
||||
parser.add_argument('--channel', type=str,
|
||||
help='Known flakes report channel', required=True)
|
||||
parser.add_argument('--url', type=str,
|
||||
help='$CI_JOB_URL', required=True)
|
||||
parser.add_argument('--runner', type=str,
|
||||
help='$CI_RUNNER_DESCRIPTION', required=True)
|
||||
parser.add_argument('--branch', type=str,
|
||||
help='optional branch name')
|
||||
parser.add_argument('--branch-title', type=str,
|
||||
help='optional branch title')
|
||||
parser.add_argument('--job', type=str,
|
||||
help='$CI_JOB_ID', required=True)
|
||||
parser.add_argument('--verbose', "-v", action="store_true",
|
||||
help='log IRC interactions')
|
||||
args = parser.parse_args()
|
||||
|
||||
flakes = read_flakes(args.results)
|
||||
if not flakes:
|
||||
exit(0)
|
||||
|
||||
known_flakes = []
|
||||
for line in open(args.known_flakes).readlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
known_flakes.append(re.compile(line))
|
||||
|
||||
irc = Connection(args.host, args.port, args.verbose)
|
||||
|
||||
# The nick needs to be something unique so that multiple runners
|
||||
# connecting at the same time don't race for one nick and get blocked.
|
||||
# freenode has a 16-char limit on nicks (9 is the IETF standard, but
|
||||
# various servers extend that). So, trim off the common prefixes of the
|
||||
# runner name, and append the job ID so that software runners with more
|
||||
# than one concurrent job (think swrast) don't collide. For freedreno,
|
||||
# that gives us a nick as long as db410c-N-JJJJJJJJ, and it'll be a while
|
||||
# before we make it to 9-digit jobs (we're at 7 so far).
|
||||
nick = args.runner
|
||||
nick = nick.replace('mesa-', '')
|
||||
nick = nick.replace('google-freedreno-', '')
|
||||
nick += f'-{args.job}'
|
||||
irc.send_line(f"NICK {nick}")
|
||||
irc.send_line(f"USER {nick} unused unused: Gitlab CI Notifier")
|
||||
irc.wait(10)
|
||||
irc.send_line(f"JOIN {args.channel}")
|
||||
irc.wait(1)
|
||||
|
||||
branchinfo = ""
|
||||
if args.branch:
|
||||
branchinfo = f" on branch {args.branch} ({args.branch_title})"
|
||||
irc.send_line(
|
||||
f"PRIVMSG {args.channel} :Flakes detected in job {args.url} on {args.runner}{branchinfo}:")
|
||||
|
||||
for flake in flakes:
|
||||
status = "NEW "
|
||||
for known in known_flakes:
|
||||
if known.match(flake):
|
||||
status = ""
|
||||
break
|
||||
|
||||
irc.send_line(f"PRIVMSG {args.channel} :{status}{flake}")
|
||||
|
||||
irc.send_line(
|
||||
f"PRIVMSG {args.channel} :See {args.url}/artifacts/browse/results/")
|
||||
|
||||
irc.quit()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@@ -1,85 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
copy_tests_files() (
|
||||
SKQP_BACKEND="${1}"
|
||||
SKQP_FILE_PREFIX="${INSTALL}/${GPU_VERSION}-skqp"
|
||||
|
||||
if echo "${SKQP_BACKEND}" | grep -qE 'gl(es)?'
|
||||
then
|
||||
SKQP_RENDER_TESTS_FILE="${SKQP_FILE_PREFIX}-${SKQP_BACKEND}_rendertests.txt"
|
||||
cp "${SKQP_RENDER_TESTS_FILE}" "${SKQP_ASSETS_DIR}"/skqp/rendertests.txt
|
||||
return 0
|
||||
fi
|
||||
|
||||
# The unittests.txt path is hardcoded inside assets directory,
|
||||
# that is why it needs to be a special case.
|
||||
if echo "${SKQP_BACKEND}" | grep -qE "unitTest"
|
||||
then
|
||||
cp "${SKQP_FILE_PREFIX}_unittests.txt" "${SKQP_ASSETS_DIR}"/skqp/unittests.txt
|
||||
fi
|
||||
)
|
||||
|
||||
set -ex
|
||||
|
||||
# Needed so configuration files can contain paths to files in /install
|
||||
ln -sf "$CI_PROJECT_DIR"/install /install
|
||||
|
||||
INSTALL=${PWD}/install
|
||||
|
||||
if [ -z "$GPU_VERSION" ]; then
|
||||
echo 'GPU_VERSION must be set to something like "llvmpipe" or "freedreno-a630" (the name used in .gitlab-ci/gpu-version-*.txt)'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SKQP_ASSETS_DIR=/skqp/assets
|
||||
SKQP_RESULTS_DIR="${SKQP_RESULTS_DIR:-results}"
|
||||
|
||||
mkdir "${SKQP_ASSETS_DIR}"/skqp
|
||||
|
||||
SKQP_EXITCODE=0
|
||||
for SKQP_BACKEND in ${SKQP_BACKENDS}
|
||||
do
|
||||
set -e
|
||||
copy_tests_files "${SKQP_BACKEND}"
|
||||
|
||||
set +e
|
||||
SKQP_BACKEND_RESULTS_DIR="${SKQP_RESULTS_DIR}"/"${SKQP_BACKEND}"
|
||||
mkdir -p "${SKQP_BACKEND_RESULTS_DIR}"
|
||||
/skqp/skqp "${SKQP_ASSETS_DIR}" '' "${SKQP_BACKEND_RESULTS_DIR}" "${SKQP_BACKEND}_"
|
||||
BACKEND_EXITCODE=$?
|
||||
|
||||
if [ ! $BACKEND_EXITCODE -eq 0 ]
|
||||
then
|
||||
echo "skqp failed on ${SKQP_BACKEND} tests with ${BACKEND_EXITCODE} exit code."
|
||||
fi
|
||||
|
||||
# Propagate error codes to leverage the final job result
|
||||
SKQP_EXITCODE=$(( SKQP_EXITCODE | BACKEND_EXITCODE ))
|
||||
done
|
||||
|
||||
set +x
|
||||
|
||||
# Unit tests produce empty HTML reports, guide the user to check the TXT file.
|
||||
if echo "${SKQP_BACKENDS}" | grep -qE "unitTest"
|
||||
then
|
||||
# Remove the empty HTML report to avoid confusion
|
||||
rm -f "${SKQP_RESULTS_DIR}"/unitTest/report.html
|
||||
|
||||
echo "See skqp unit test results at:"
|
||||
echo "https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/${SKQP_RESULTS_DIR}/unitTest/unit_tests.txt"
|
||||
fi
|
||||
|
||||
for REPORT in "${SKQP_RESULTS_DIR}"/**/report.html
|
||||
do
|
||||
BACKEND_NAME=$(echo "${REPORT}" | sed 's@.*/\([^/]*\)/report.html@\1@')
|
||||
echo "See skqp ${BACKEND_NAME} render tests report at:"
|
||||
echo "https://$CI_PROJECT_ROOT_NAMESPACE.pages.freedesktop.org/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/${REPORT}"
|
||||
done
|
||||
|
||||
# If there is no report available, tell the user that something is wrong.
|
||||
if [ ! -f "${REPORT}" ]
|
||||
then
|
||||
echo "No skqp report available. Probably some fatal error has occured during the skqp execution."
|
||||
fi
|
||||
|
||||
exit $SKQP_EXITCODE
|
@@ -18,7 +18,6 @@
|
||||
- .gitlab-ci/**/*
|
||||
- include/**/*
|
||||
- meson.build
|
||||
- .gitattributes
|
||||
- src/*
|
||||
- src/compiler/**/*
|
||||
- src/drm-shim/**/*
|
||||
@@ -31,6 +30,10 @@
|
||||
- src/loader/**/*
|
||||
- src/mapi/**/*
|
||||
- src/mesa/*
|
||||
- src/mesa/drivers/*
|
||||
- src/mesa/drivers/common/**/*
|
||||
- src/mesa/drivers/dri/*
|
||||
- src/mesa/drivers/dri/common/**/*
|
||||
- src/mesa/main/**/*
|
||||
- src/mesa/math/**/*
|
||||
- src/mesa/program/**/*
|
||||
@@ -38,10 +41,11 @@
|
||||
- src/mesa/state_tracker/**/*
|
||||
- src/mesa/swrast/**/*
|
||||
- src/mesa/swrast_setup/**/*
|
||||
- src/mesa/tnl/**/*
|
||||
- src/mesa/tnl_dd/**/*
|
||||
- src/mesa/vbo/**/*
|
||||
- src/mesa/x86/**/*
|
||||
- src/mesa/x86-64/**/*
|
||||
- src/tool/**/*
|
||||
- src/util/**/*
|
||||
|
||||
.vulkan-rules:
|
||||
@@ -62,8 +66,7 @@
|
||||
- src/gallium/auxiliary/**/*
|
||||
- src/gallium/drivers/*
|
||||
- src/gallium/include/**/*
|
||||
- src/gallium/frontends/dri/*
|
||||
- src/gallium/frontends/glx/**/*
|
||||
- src/gallium/state_trackers/**/*
|
||||
- src/gallium/targets/**/*
|
||||
- src/gallium/tests/**/*
|
||||
- src/gallium/winsys/*
|
||||
@@ -78,7 +81,7 @@
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes: &softpipe_file_list
|
||||
- changes:
|
||||
- src/gallium/drivers/softpipe/**/*
|
||||
- src/gallium/winsys/sw/**/*
|
||||
when: on_success
|
||||
@@ -115,9 +118,6 @@
|
||||
- src/gallium/frontends/lavapipe/**/*
|
||||
- src/gallium/winsys/sw/**/*
|
||||
when: on_success
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.llvmpipe-cl-rules:
|
||||
@@ -128,7 +128,6 @@
|
||||
- .gitlab-ci.yml
|
||||
- .gitlab-ci/**/*
|
||||
- meson.build
|
||||
- .gitattributes
|
||||
- include/**/*
|
||||
- src/compiler/**/*
|
||||
- src/include/**/*
|
||||
@@ -148,8 +147,6 @@
|
||||
.freedreno-rules:
|
||||
stage: freedreno
|
||||
rules:
|
||||
- if: '$FD_FARM == "offline"'
|
||||
when: never
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
@@ -158,9 +155,6 @@
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: on_success
|
||||
- changes: &freedreno_file_list
|
||||
# Note: when https://gitlab.com/gitlab-org/gitlab/-/issues/198688
|
||||
# is supported, we can change the src/freedreno/ rule to explicitly
|
||||
# exclude tools
|
||||
@@ -170,58 +164,7 @@
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
# Unfortunately YAML doesn't let us concatenate arrays, so we have to do the
|
||||
# rules duplication manually
|
||||
.freedreno-rules-restricted:
|
||||
stage: freedreno
|
||||
rules:
|
||||
- if: '$FD_FARM == "offline"'
|
||||
when: never
|
||||
# If the triggerer has access to the restricted traces and if it is pre-merge
|
||||
- if: '($GITLAB_USER_LOGIN !~ "/^(robclark|anholt|flto|cwabbott0|Danil|tomeu)$/") &&
|
||||
($GITLAB_USER_LOGIN != "marge-bot" || $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME != $CI_COMMIT_REF_NAME)'
|
||||
when: never
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*freedreno_file_list
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
# Unfortunately YAML doesn't let us concatenate arrays, so we have to do the
|
||||
# rules duplication manually
|
||||
.freedreno-rules-performance:
|
||||
stage: freedreno
|
||||
rules:
|
||||
- if: '$FD_FARM == "offline"'
|
||||
when: never
|
||||
- *ignore_scheduled_pipelines
|
||||
# Run only on pre-merge pipelines from Marge
|
||||
- if: '$GITLAB_USER_LOGIN != "marge-bot" || $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME != $CI_COMMIT_REF_NAME'
|
||||
when: never
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: manual
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: manual
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: manual
|
||||
- changes:
|
||||
*freedreno_file_list
|
||||
when: manual
|
||||
- when: never
|
||||
|
||||
.panfrost-midgard-rules:
|
||||
.panfrost-rules:
|
||||
stage: arm
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
@@ -231,46 +174,10 @@
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes: &panfrost_gallium_file_list
|
||||
- changes:
|
||||
- src/gallium/drivers/panfrost/**/*
|
||||
- src/gallium/winsys/panfrost/**/*
|
||||
when: on_success
|
||||
- changes: &panfrost_common_file_list
|
||||
- src/panfrost/ci/*
|
||||
- src/panfrost/include/*
|
||||
- src/panfrost/lib/*
|
||||
- src/panfrost/shared/*
|
||||
- src/panfrost/util/*
|
||||
when: on_success
|
||||
- changes:
|
||||
- src/panfrost/midgard/**/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.panfrost-bifrost-rules:
|
||||
stage: arm
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*panfrost_common_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*panfrost_gallium_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: on_success
|
||||
- changes: &panfrost_vulkan_file_list
|
||||
- src/panfrost/vulkan/*
|
||||
when: on_success
|
||||
- changes:
|
||||
- src/panfrost/bifrost/**/*
|
||||
- src/panfrost/**/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
@@ -317,9 +224,6 @@
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
- src/broadcom/**/*
|
||||
when: on_success
|
||||
@@ -343,13 +247,12 @@
|
||||
- when: never
|
||||
|
||||
.radv-rules:
|
||||
stage: amd
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes: &radv_file_list
|
||||
- changes:
|
||||
- src/amd/**/*
|
||||
- src/vulkan/**/*
|
||||
when: on_success
|
||||
@@ -368,7 +271,7 @@
|
||||
- changes:
|
||||
*llvmpipe_file_list
|
||||
when: on_success
|
||||
- changes: &virgl_file_list
|
||||
- changes:
|
||||
- src/gallium/drivers/virgl/**/*
|
||||
- src/gallium/winsys/virgl/**/*
|
||||
when: on_success
|
||||
@@ -384,7 +287,7 @@
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes: &radeonsi_file_list
|
||||
- changes:
|
||||
- src/gallium/drivers/radeonsi/**/*
|
||||
- src/gallium/winsys/amdgpu/**/*
|
||||
- src/amd/*
|
||||
@@ -395,59 +298,6 @@
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.radeonsi-vaapi-rules:
|
||||
stage: amd
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*radeonsi_file_list
|
||||
when: on_success
|
||||
- changes: &radeon_vcn_file_list
|
||||
- src/gallium/frontends/va/**/*
|
||||
- src/gallium/drivers/radeon/**/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.i915g-rules:
|
||||
stage: intel
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
- src/gallium/drivers/i915/**/*
|
||||
- src/gallium/winsys/i915/**/*
|
||||
- src/intel/**/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.crocus-rules:
|
||||
stage: intel
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
- src/gallium/drivers/crocus/**/*
|
||||
- src/gallium/winsys/crocus/**/*
|
||||
- src/intel/**/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.iris-rules:
|
||||
stage: intel
|
||||
rules:
|
||||
@@ -458,7 +308,7 @@
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes: &iris_file_list
|
||||
- changes:
|
||||
- src/gallium/drivers/iris/**/*
|
||||
- src/gallium/winsys/iris/**/*
|
||||
- src/intel/**/*
|
||||
@@ -467,36 +317,47 @@
|
||||
|
||||
# Unfortunately YAML doesn't let us concatenate arrays, so we have to do the
|
||||
# rules duplication manually
|
||||
.iris-rules-performance:
|
||||
stage: intel
|
||||
.windows-build-rules:
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
# Run only on pre-merge pipelines from Marge
|
||||
- if: '$GITLAB_USER_LOGIN != "marge-bot" || $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME != $CI_COMMIT_REF_NAME'
|
||||
when: never
|
||||
- changes:
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: manual
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: manual
|
||||
when: on_success
|
||||
- changes:
|
||||
*iris_file_list
|
||||
when: manual
|
||||
- when: never
|
||||
|
||||
.anv-rules:
|
||||
stage: intel
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
*lavapipe_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: on_success
|
||||
- changes: &d3d12_file_list
|
||||
- src/gallium/drivers/d3d12/**/*
|
||||
- src/microsoft/**/*
|
||||
- src/gallium/frontends/wgl/*
|
||||
- src/gallium/winsys/d3d12/wgl/*
|
||||
- src/gallium/targets/libgl-gdi/*
|
||||
- src/gallium/targets/libgl-d3d12/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.windows-test-rules:
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
- src/intel/**/*
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*lavapipe_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: on_success
|
||||
- changes: *d3d12_file_list
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
@@ -513,142 +374,7 @@
|
||||
- changes:
|
||||
*lavapipe_file_list
|
||||
when: on_success
|
||||
- changes: &zink_file_list
|
||||
- changes:
|
||||
- src/gallium/drivers/zink/**/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
# Unfortunately YAML doesn't let us concatenate arrays, so we have to do the
|
||||
# rules duplication manually
|
||||
.windows-build-rules:
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*softpipe_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*lavapipe_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: on_success
|
||||
- changes: &d3d12_file_list
|
||||
- src/gallium/drivers/d3d12/**/*
|
||||
- src/microsoft/**/*
|
||||
- src/gallium/frontends/wgl/*
|
||||
- src/gallium/winsys/d3d12/wgl/*
|
||||
- src/gallium/targets/libgl-gdi/*
|
||||
- src/gallium/targets/libgl-d3d12/*
|
||||
when: on_success
|
||||
- changes:
|
||||
*zink_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*radv_file_list
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.windows-test-rules:
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*lavapipe_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*vulkan_file_list
|
||||
when: on_success
|
||||
- changes: *d3d12_file_list
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.etnaviv-rules:
|
||||
stage: etnaviv
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
- src/etnaviv/**/*
|
||||
- src/gallium/drivers/etnaviv/**/*
|
||||
- src/gallium/winsys/etnaviv/**/*
|
||||
- src/gallium/auxiliary/renderonly/**/*
|
||||
- src/gallium/winsys/kmsro/**/*
|
||||
- src/gallium/winsys/etnaviv/**/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
# Rules for unusual architectures that only build a subset of drivers
|
||||
.ppc64el-rules:
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*softpipe_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*llvmpipe_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*lavapipe_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*radv_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*radeonsi_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*zink_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*virgl_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
- src/gallium/drivers/nouveau/**/*
|
||||
- src/gallium/winsys/nouveau/**/*
|
||||
when: on_success
|
||||
- when: never
|
||||
|
||||
.s390x-rules:
|
||||
rules:
|
||||
- *ignore_scheduled_pipelines
|
||||
- changes:
|
||||
*mesa_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*gallium_core_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*softpipe_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*llvmpipe_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*lavapipe_file_list
|
||||
when: on_success
|
||||
- changes:
|
||||
*zink_file_list
|
||||
when: on_success
|
||||
- when: never
|
||||
|
@@ -1,93 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
if [ "x$VK_DRIVER" = "x" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
INSTALL=$(realpath -s "$PWD"/install)
|
||||
|
||||
RESULTS=$(realpath -s "$PWD"/results)
|
||||
|
||||
# Set up the driver environment.
|
||||
# Modifiying here directly LD_LIBRARY_PATH may cause problems when
|
||||
# using a command wrapper. Hence, we will just set it when running the
|
||||
# command.
|
||||
export __LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$INSTALL/lib/"
|
||||
|
||||
|
||||
# Sanity check to ensure that our environment is sufficient to make our tests
|
||||
# run against the Mesa built by CI, rather than any installed distro version.
|
||||
MESA_VERSION=$(cat "$INSTALL/VERSION" | sed 's/\./\\./g')
|
||||
|
||||
# Set the Vulkan driver to use.
|
||||
export VK_ICD_FILENAMES="$INSTALL/share/vulkan/icd.d/${VK_DRIVER}_icd.x86_64.json"
|
||||
|
||||
# Set environment for Wine.
|
||||
export WINEDEBUG="-all"
|
||||
export WINEPREFIX="/vkd3d-proton-wine64"
|
||||
export WINEESYNC=1
|
||||
|
||||
print_red() {
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
printf "${RED}"
|
||||
"$@"
|
||||
printf "${NC}"
|
||||
}
|
||||
|
||||
# wrapper to supress +x to avoid spamming the log
|
||||
quiet() {
|
||||
set +x
|
||||
"$@"
|
||||
set -x
|
||||
}
|
||||
|
||||
SANITY_MESA_VERSION_CMD="vulkaninfo | tee /tmp/version.txt | grep \"Mesa $MESA_VERSION\(\s\|$\)\""
|
||||
|
||||
HANG_DETECTION_CMD="/parallel-deqp-runner/build/bin/hang-detection"
|
||||
|
||||
RUN_CMD="export LD_LIBRARY_PATH=$__LD_LIBRARY_PATH; $SANITY_MESA_VERSION_CMD"
|
||||
|
||||
set +e
|
||||
eval $RUN_CMD
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
printf "%s\n" "Found $(cat /tmp/version.txt), expected $MESA_VERSION"
|
||||
fi
|
||||
set -e
|
||||
|
||||
if [ -d "$RESULTS" ]; then
|
||||
cd "$RESULTS" && rm -rf ..?* .[!.]* * && cd -
|
||||
else
|
||||
mkdir "$RESULTS"
|
||||
fi
|
||||
|
||||
VKD3D_PROTON_TESTSUITE_CMD="wine /vkd3d-proton-tests/x64/bin/d3d12.exe >$RESULTS/vkd3d-proton.log 2>&1"
|
||||
|
||||
quiet printf "%s\n" "Running vkd3d-proton testsuite..."
|
||||
RUN_CMD="export LD_LIBRARY_PATH=$__LD_LIBRARY_PATH; $HANG_DETECTION_CMD $VKD3D_PROTON_TESTSUITE_CMD"
|
||||
|
||||
set +e
|
||||
eval $RUN_CMD
|
||||
|
||||
VKD3D_PROTON_RESULTS="vkd3d-proton-${VKD3D_PROTON_RESULTS:-results}"
|
||||
RESULTSFILE="$RESULTS/$VKD3D_PROTON_RESULTS.txt"
|
||||
mkdir -p .gitlab-ci/vkd3d-proton
|
||||
grep "Test failed" "$RESULTS"/vkd3d-proton.log > "$RESULTSFILE"
|
||||
|
||||
if [ -f "$INSTALL/$VKD3D_PROTON_RESULTS.txt" ]; then
|
||||
cp "$INSTALL/$VKD3D_PROTON_RESULTS.txt" \
|
||||
".gitlab-ci/vkd3d-proton/$VKD3D_PROTON_RESULTS.txt.baseline"
|
||||
else
|
||||
touch ".gitlab-ci/vkd3d-proton/$VKD3D_PROTON_RESULTS.txt.baseline"
|
||||
fi
|
||||
|
||||
if diff -q ".gitlab-ci/vkd3d-proton/$VKD3D_PROTON_RESULTS.txt.baseline" "$RESULTSFILE"; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
quiet print_red printf "%s\n" "Changes found, see vkd3d-proton.log!"
|
||||
quiet diff --color=always -u ".gitlab-ci/vkd3d-proton/$VKD3D_PROTON_RESULTS.txt.baseline" "$RESULTSFILE"
|
||||
exit 1
|
13
.gitlab-ci/windows/Dockerfile
Normal file
13
.gitlab-ci/windows/Dockerfile
Normal file
@@ -0,0 +1,13 @@
|
||||
# escape=`
|
||||
|
||||
FROM mcr.microsoft.com/windows:1809
|
||||
|
||||
# Make sure any failure in PowerShell scripts is fatal
|
||||
SHELL ["powershell", "-ExecutionPolicy", "RemoteSigned", "-Command", "$ErrorActionPreference = 'Stop';"]
|
||||
ENV ErrorActionPreference='Stop'
|
||||
|
||||
COPY mesa_deps_vs2019.ps1 C:\
|
||||
RUN C:\mesa_deps_vs2019.ps1
|
||||
|
||||
COPY mesa_deps.ps1 C:\
|
||||
RUN C:\mesa_deps.ps1
|
@@ -1,13 +0,0 @@
|
||||
# escape=`
|
||||
|
||||
FROM mcr.microsoft.com/windows:1809
|
||||
|
||||
# Make sure any failure in PowerShell scripts is fatal
|
||||
SHELL ["powershell", "-ExecutionPolicy", "RemoteSigned", "-Command", "$ErrorActionPreference = 'Stop';"]
|
||||
ENV ErrorActionPreference='Stop'
|
||||
|
||||
COPY mesa_deps_vs2019.ps1 C:\
|
||||
RUN C:\mesa_deps_vs2019.ps1
|
||||
|
||||
COPY mesa_deps_build.ps1 C:\
|
||||
RUN C:\mesa_deps_build.ps1
|
@@ -1,7 +0,0 @@
|
||||
# escape=`
|
||||
|
||||
ARG base_image
|
||||
FROM ${base_image}
|
||||
|
||||
COPY mesa_deps_test.ps1 C:\
|
||||
RUN C:\mesa_deps_test.ps1
|
@@ -2,14 +2,12 @@
|
||||
Write-Host "Refreshing Windows TLS CA cache"
|
||||
(New-Object System.Net.WebClient).DownloadString("https://github.com") >$null
|
||||
|
||||
$env:PYTHONUTF8=1
|
||||
|
||||
Get-Date
|
||||
Write-Host "Compiling Mesa"
|
||||
$builddir = New-Item -ItemType Directory -Name "_build"
|
||||
$installdir = New-Item -ItemType Directory -Name "_install"
|
||||
Push-Location $builddir.FullName
|
||||
cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && meson --default-library=shared -Dzlib:default_library=static --buildtype=release -Db_ndebug=false -Dc_std=c17 -Dcpp_std=vc++latest -Db_vscrt=mt --cmake-prefix-path=`"C:\llvm-10`" --pkg-config-path=`"C:\llvm-10\lib\pkgconfig;C:\llvm-10\share\pkgconfig;C:\spirv-tools\lib\pkgconfig`" --prefix=`"$installdir`" -Dllvm=enabled -Dshared-llvm=disabled -Dvulkan-drivers=swrast,amd -Dgallium-drivers=swrast,d3d12,zink -Dshared-glapi=enabled -Dgles2=enabled -Dmicrosoft-clc=enabled -Dstatic-libclc=all -Dspirv-to-dxil=true -Dbuild-tests=true -Dwerror=true -Dwarning_level=2 -Dzlib:warning_level=1 -Dlibelf:warning_level=1 && ninja -j32 install && meson test --num-processes 32"
|
||||
cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && meson --default-library=shared -Dzlib:default_library=static --buildtype=release -Db_ndebug=false -Db_vscrt=mt --cmake-prefix-path=`"C:\llvm-10`" --pkg-config-path=`"C:\llvm-10\lib\pkgconfig;C:\llvm-10\share\pkgconfig;C:\spirv-tools\lib\pkgconfig`" --prefix=`"$installdir`" -Dllvm=enabled -Dshared-llvm=disabled -Dvulkan-drivers=swrast -Dgallium-drivers=swrast,d3d12 -Dmicrosoft-clc=enabled -Dstatic-libclc=all -Dbuild-tests=true -Dwerror=true && ninja -j32 install && meson test --num-processes 32"
|
||||
$buildstatus = $?
|
||||
Pop-Location
|
||||
|
||||
@@ -21,8 +19,4 @@ if (!$buildstatus) {
|
||||
}
|
||||
|
||||
Copy-Item ".\.gitlab-ci\windows\piglit_run.ps1" -Destination $installdir
|
||||
|
||||
Copy-Item ".\.gitlab-ci\windows\spirv2dxil_check.ps1" -Destination $installdir
|
||||
Copy-Item ".\.gitlab-ci\windows\spirv2dxil_run.ps1" -Destination $installdir
|
||||
|
||||
Get-ChildItem -Recurse -Filter "ci" | Get-ChildItem -Filter "*.txt" | Copy-Item -Destination $installdir
|
||||
Copy-Item ".\.gitlab-ci\windows\quick_gl.txt" -Destination $installdir
|
||||
|
@@ -6,8 +6,6 @@ $registry_username = $args[1]
|
||||
$registry_password = $args[2]
|
||||
$registry_user_image = $args[3]
|
||||
$registry_central_image = $args[4]
|
||||
$build_dockerfile = $args[5]
|
||||
$registry_base_image = $args[6]
|
||||
|
||||
Set-Location -Path ".\.gitlab-ci\windows"
|
||||
|
||||
@@ -41,7 +39,7 @@ if ($?) {
|
||||
}
|
||||
|
||||
Write-Host "No image found at $registry_user_image or $registry_central_image; rebuilding"
|
||||
docker --config "windows-docker.conf" build --no-cache -t "$registry_user_image" -f "$build_dockerfile" --build-arg base_image="$registry_base_image" .
|
||||
docker --config "windows-docker.conf" build --no-cache -t "$registry_user_image" .
|
||||
if (!$?) {
|
||||
Write-Host "Container build failed"
|
||||
docker --config "windows-docker.conf" logout "$registry_uri"
|
||||
|
194
.gitlab-ci/windows/mesa_deps.ps1
Normal file
194
.gitlab-ci/windows/mesa_deps.ps1
Normal file
@@ -0,0 +1,194 @@
|
||||
# Download new TLS certs from Windows Update
|
||||
Get-Date
|
||||
Write-Host "Updating TLS certificate store"
|
||||
$certdir = (New-Item -ItemType Directory -Name "_tlscerts")
|
||||
certutil -syncwithWU "$certdir"
|
||||
Foreach ($file in (Get-ChildItem -Path "$certdir\*" -Include "*.crt")) {
|
||||
Import-Certificate -FilePath $file -CertStoreLocation Cert:\LocalMachine\Root
|
||||
}
|
||||
Remove-Item -Recurse -Path $certdir
|
||||
|
||||
|
||||
Get-Date
|
||||
Write-Host "Installing Chocolatey"
|
||||
Invoke-Expression ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))
|
||||
Import-Module "$env:ProgramData\chocolatey\helpers\chocolateyProfile.psm1"
|
||||
Update-SessionEnvironment
|
||||
Write-Host "Installing Chocolatey packages"
|
||||
|
||||
# Chocolatey tries to download winflexbison from SourceForge, which is not super reliable, and has no retry
|
||||
# loop of its own - so we give it a helping hand here
|
||||
For ($i = 0; $i -lt 5; $i++) {
|
||||
choco install -y python3 --params="/InstallDir:C:\python3"
|
||||
$python_install = $?
|
||||
choco install --allow-empty-checksums -y cmake git git-lfs ninja pkgconfiglite winflexbison --installargs "ADD_CMAKE_TO_PATH=System"
|
||||
$other_install = $?
|
||||
$choco_installed = $other_install -and $python_install
|
||||
if ($choco_installed) {
|
||||
Break
|
||||
}
|
||||
}
|
||||
|
||||
if (!$choco_installed) {
|
||||
Write-Host "Couldn't install dependencies from Chocolatey"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
# Add Chocolatey's native install path
|
||||
Update-SessionEnvironment
|
||||
# Python and CMake add themselves to the system environment path, which doesn't get refreshed
|
||||
# until we start a new shell
|
||||
$env:PATH = "C:\python3;C:\python3\scripts;C:\Program Files\CMake\bin;$env:PATH"
|
||||
|
||||
Start-Process -NoNewWindow -Wait git -ArgumentList 'config --global core.autocrlf false'
|
||||
|
||||
Get-Date
|
||||
Write-Host "Installing Meson, Mako and numpy"
|
||||
pip3 install meson mako numpy
|
||||
if (!$?) {
|
||||
Write-Host "Failed to install dependencies from pip"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
# we want more secure TLS 1.2 for most things, but it breaks SourceForge
|
||||
# downloads so must be done after Chocolatey use
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 -bor [Net.SecurityProtocolType]::Tls13;
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning LLVM master"
|
||||
git clone -b release/12.x --depth=1 https://github.com/llvm/llvm-project llvm-project
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone LLVM repository"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
# ideally we want to use a tag here insted of a sha,
|
||||
# but as of today, SPIRV-LLVM-Translator doesn't have
|
||||
# a tag matching LLVM 12.0.0
|
||||
Get-Date
|
||||
Write-Host "Cloning SPIRV-LLVM-Translator"
|
||||
git clone https://github.com/KhronosGroup/SPIRV-LLVM-Translator llvm-project/llvm/projects/SPIRV-LLVM-Translator
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone SPIRV-LLVM-Translator repository"
|
||||
Exit 1
|
||||
}
|
||||
Push-Location llvm-project/llvm/projects/SPIRV-LLVM-Translator
|
||||
git checkout 5b641633b3bcc3251a52260eee11db13a79d7258
|
||||
Pop-Location
|
||||
|
||||
Get-Date
|
||||
# slightly convoluted syntax but avoids the CWD being under the PS filesystem meta-path
|
||||
$llvm_build = New-Item -ItemType Directory -Path ".\llvm-project" -Name "build"
|
||||
Push-Location -Path $llvm_build.FullName
|
||||
Write-Host "Compiling LLVM and Clang"
|
||||
cmd.exe /C 'C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && cmake ../llvm -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_USE_CRT_RELEASE=MT -DCMAKE_INSTALL_PREFIX="C:\llvm-10" -DLLVM_ENABLE_PROJECTS="clang;lld" -DLLVM_TARGETS_TO_BUILD=X86 -DLLVM_OPTIMIZED_TABLEGEN=TRUE -DLLVM_ENABLE_ASSERTIONS=TRUE -DLLVM_INCLUDE_UTILS=OFF -DLLVM_INCLUDE_RUNTIMES=OFF -DLLVM_INCLUDE_TESTS=OFF -DLLVM_INCLUDE_EXAMPLES=OFF -DLLVM_INCLUDE_GO_TESTS=OFF -DLLVM_INCLUDE_BENCHMARKS=OFF -DLLVM_BUILD_LLVM_C_DYLIB=OFF -DLLVM_ENABLE_DIA_SDK=OFF -DCLANG_BUILD_TOOLS=ON -DLLVM_SPIRV_INCLUDE_TESTS=OFF && ninja -j32 install'
|
||||
$buildstatus = $?
|
||||
Pop-Location
|
||||
if (!$buildstatus) {
|
||||
Write-Host "Failed to compile LLVM"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
$libclc_build = New-Item -ItemType Directory -Path ".\llvm-project" -Name "build-libclc"
|
||||
Push-Location -Path $libclc_build.FullName
|
||||
Write-Host "Compiling libclc"
|
||||
# libclc can only be built with Ninja, because CMake's VS backend doesn't know how to compile new language types
|
||||
cmd.exe /C 'C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && cmake ../libclc -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER="/llvm-10/bin/clang-cl.exe" -DCMAKE_CXX_FLAGS="-m64" -DCMAKE_POLICY_DEFAULT_CMP0091=NEW -DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded -DCMAKE_INSTALL_PREFIX="C:\llvm-10" -DLIBCLC_TARGETS_TO_BUILD="spirv-mesa3d-;spirv64-mesa3d-" && ninja -j32 install'
|
||||
$buildstatus = $?
|
||||
Pop-Location
|
||||
Remove-Item -Recurse -Path $libclc_build
|
||||
if (!$buildstatus) {
|
||||
Write-Host "Failed to compile libclc"
|
||||
Exit 1
|
||||
}
|
||||
Remove-Item -Recurse -Path $llvm_build
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning SPIRV-Tools"
|
||||
git clone https://github.com/KhronosGroup/SPIRV-Tools
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone SPIRV-Tools repository"
|
||||
Exit 1
|
||||
}
|
||||
git clone https://github.com/KhronosGroup/SPIRV-Headers SPIRV-Tools/external/SPIRV-Headers
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone SPIRV-Headers repository"
|
||||
Exit 1
|
||||
}
|
||||
Write-Host "Building SPIRV-Tools"
|
||||
$spv_build = New-Item -ItemType Directory -Path ".\SPIRV-Tools" -Name "build"
|
||||
Push-Location -Path $spv_build.FullName
|
||||
# SPIRV-Tools doesn't use multi-threaded MSVCRT, but we need it to
|
||||
cmd.exe /C 'C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_POLICY_DEFAULT_CMP0091=NEW -DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded -DCMAKE_INSTALL_PREFIX="C:\spirv-tools" && ninja -j32 install'
|
||||
$buildstatus = $?
|
||||
Pop-Location
|
||||
Remove-Item -Recurse -Path $spv_build
|
||||
if (!$buildstatus) {
|
||||
Write-Host "Failed to compile SPIRV-Tools"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Host "Downloading Freeglut"
|
||||
|
||||
$freeglut_zip = 'freeglut-MSVC.zip'
|
||||
$freeglut_url = "https://www.transmissionzero.co.uk/files/software/development/GLUT/$freeglut_zip"
|
||||
|
||||
For ($i = 0; $i -lt 5; $i++) {
|
||||
Invoke-WebRequest -Uri $freeglut_url -OutFile $freeglut_zip
|
||||
$freeglut_downloaded = $?
|
||||
if ($freeglut_downloaded) {
|
||||
Break
|
||||
}
|
||||
}
|
||||
|
||||
if (!$freeglut_downloaded) {
|
||||
Write-Host "Failed to download Freeglut"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Host "Installing Freeglut"
|
||||
Expand-Archive $freeglut_zip -DestinationPath C:\
|
||||
if (!$?) {
|
||||
Write-Host "Failed to install Freeglut"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Host "Downloading glext.h"
|
||||
New-Item -ItemType Directory -Path ".\glext" -Name "GL"
|
||||
$ProgressPreference = "SilentlyContinue"
|
||||
Invoke-WebRequest -Uri 'https://www.khronos.org/registry/OpenGL/api/GL/glext.h' -OutFile '.\glext\GL\glext.h' | Out-Null
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning Piglit"
|
||||
git clone --no-progress --single-branch --no-checkout https://gitlab.freedesktop.org/mesa/piglit.git 'C:\src\piglit'
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone Piglit repository"
|
||||
Exit 1
|
||||
}
|
||||
Push-Location -Path C:\src\piglit
|
||||
git checkout b0bbeb876a506e0ee689dd7e17cee374c8284058
|
||||
Pop-Location
|
||||
|
||||
Get-Date
|
||||
$piglit_build = New-Item -ItemType Directory -Path "C:\src\piglit" -Name "build"
|
||||
Push-Location -Path $piglit_build.FullName
|
||||
Write-Host "Compiling Piglit"
|
||||
cmd.exe /C 'C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="C:\Piglit" -DGLUT_INCLUDE_DIR=C:\freeglut\include -DGLUT_glut_LIBRARY_RELEASE=C:\freeglut\lib\x64\freeglut.lib -DGLEXT_INCLUDE_DIR=.\glext && ninja -j32'
|
||||
$buildstatus = $?
|
||||
ninja -j32 install | Out-Null
|
||||
$installstatus = $?
|
||||
Pop-Location
|
||||
Remove-Item -Recurse -Path $piglit_build
|
||||
if (!$buildstatus -Or !$installstatus) {
|
||||
Write-Host "Failed to compile or install Piglit"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Copy-Item -Path C:\freeglut\bin\x64\freeglut.dll -Destination C:\Piglit\lib\piglit\bin\freeglut.dll
|
||||
|
||||
Get-Date
|
||||
Write-Host "Complete"
|
@@ -1,146 +0,0 @@
|
||||
# Download new TLS certs from Windows Update
|
||||
Get-Date
|
||||
Write-Host "Updating TLS certificate store"
|
||||
$certdir = (New-Item -ItemType Directory -Name "_tlscerts")
|
||||
certutil -syncwithWU "$certdir"
|
||||
Foreach ($file in (Get-ChildItem -Path "$certdir\*" -Include "*.crt")) {
|
||||
Import-Certificate -FilePath $file -CertStoreLocation Cert:\LocalMachine\Root
|
||||
}
|
||||
Remove-Item -Recurse -Path $certdir
|
||||
|
||||
|
||||
Get-Date
|
||||
Write-Host "Installing Chocolatey"
|
||||
Invoke-Expression ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))
|
||||
Import-Module "$env:ProgramData\chocolatey\helpers\chocolateyProfile.psm1"
|
||||
Update-SessionEnvironment
|
||||
Write-Host "Installing Chocolatey packages"
|
||||
|
||||
# Chocolatey tries to download winflexbison from SourceForge, which is not super reliable, and has no retry
|
||||
# loop of its own - so we give it a helping hand here
|
||||
For ($i = 0; $i -lt 5; $i++) {
|
||||
choco install -y python3 --params="/InstallDir:C:\python3"
|
||||
$python_install = $?
|
||||
choco install --allow-empty-checksums -y cmake git git-lfs ninja pkgconfiglite winflexbison vulkan-sdk --installargs "ADD_CMAKE_TO_PATH=System"
|
||||
$other_install = $?
|
||||
$choco_installed = $other_install -and $python_install
|
||||
if ($choco_installed) {
|
||||
Break
|
||||
}
|
||||
}
|
||||
|
||||
if (!$choco_installed) {
|
||||
Write-Host "Couldn't install dependencies from Chocolatey"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
# Add Chocolatey's native install path
|
||||
Update-SessionEnvironment
|
||||
# Python and CMake add themselves to the system environment path, which doesn't get refreshed
|
||||
# until we start a new shell
|
||||
$env:PATH = "C:\python3;C:\python3\scripts;C:\Program Files\CMake\bin;$env:PATH"
|
||||
|
||||
Start-Process -NoNewWindow -Wait git -ArgumentList 'config --global core.autocrlf false'
|
||||
|
||||
Get-Date
|
||||
Write-Host "Installing Meson, Mako and numpy"
|
||||
pip3 install meson mako numpy
|
||||
if (!$?) {
|
||||
Write-Host "Failed to install dependencies from pip"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
# we want more secure TLS 1.2 for most things, but it breaks SourceForge
|
||||
# downloads so must be done after Chocolatey use
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 -bor [Net.SecurityProtocolType]::Tls13;
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning LLVM release/12.x"
|
||||
git clone -b release/12.x --depth=1 https://github.com/llvm/llvm-project llvm-project
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone LLVM repository"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
# ideally we want to use a tag here insted of a sha,
|
||||
# but as of today, SPIRV-LLVM-Translator doesn't have
|
||||
# a tag matching LLVM 12.0.0
|
||||
Get-Date
|
||||
Write-Host "Cloning SPIRV-LLVM-Translator"
|
||||
git clone https://github.com/KhronosGroup/SPIRV-LLVM-Translator llvm-project/llvm/projects/SPIRV-LLVM-Translator
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone SPIRV-LLVM-Translator repository"
|
||||
Exit 1
|
||||
}
|
||||
Push-Location llvm-project/llvm/projects/SPIRV-LLVM-Translator
|
||||
git checkout 5b641633b3bcc3251a52260eee11db13a79d7258
|
||||
Pop-Location
|
||||
|
||||
Get-Date
|
||||
# slightly convoluted syntax but avoids the CWD being under the PS filesystem meta-path
|
||||
$llvm_build = New-Item -ItemType Directory -Path ".\llvm-project" -Name "build"
|
||||
Push-Location -Path $llvm_build.FullName
|
||||
Write-Host "Compiling LLVM and Clang"
|
||||
cmd.exe /C 'C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && cmake ../llvm -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_USE_CRT_RELEASE=MT -DCMAKE_INSTALL_PREFIX="C:\llvm-10" -DLLVM_ENABLE_PROJECTS="clang;lld" -DLLVM_TARGETS_TO_BUILD=AMDGPU;X86 -DLLVM_OPTIMIZED_TABLEGEN=TRUE -DLLVM_ENABLE_ASSERTIONS=TRUE -DLLVM_INCLUDE_UTILS=OFF -DLLVM_INCLUDE_RUNTIMES=OFF -DLLVM_INCLUDE_TESTS=OFF -DLLVM_INCLUDE_EXAMPLES=OFF -DLLVM_INCLUDE_GO_TESTS=OFF -DLLVM_INCLUDE_BENCHMARKS=OFF -DLLVM_BUILD_LLVM_C_DYLIB=OFF -DLLVM_ENABLE_DIA_SDK=OFF -DCLANG_BUILD_TOOLS=ON -DLLVM_SPIRV_INCLUDE_TESTS=OFF && ninja -j32 install'
|
||||
$buildstatus = $?
|
||||
Pop-Location
|
||||
if (!$buildstatus) {
|
||||
Write-Host "Failed to compile LLVM"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
$libclc_build = New-Item -ItemType Directory -Path ".\llvm-project" -Name "build-libclc"
|
||||
Push-Location -Path $libclc_build.FullName
|
||||
Write-Host "Compiling libclc"
|
||||
# libclc can only be built with Ninja, because CMake's VS backend doesn't know how to compile new language types
|
||||
cmd.exe /C 'C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && cmake ../libclc -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_FLAGS="-m64" -DCMAKE_POLICY_DEFAULT_CMP0091=NEW -DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded -DCMAKE_INSTALL_PREFIX="C:\llvm-10" -DLIBCLC_TARGETS_TO_BUILD="spirv-mesa3d-;spirv64-mesa3d-" && ninja -j32 install'
|
||||
$buildstatus = $?
|
||||
Pop-Location
|
||||
Remove-Item -Recurse -Path $libclc_build
|
||||
if (!$buildstatus) {
|
||||
Write-Host "Failed to compile libclc"
|
||||
Exit 1
|
||||
}
|
||||
Remove-Item -Recurse -Path $llvm_build
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning SPIRV-Tools"
|
||||
git clone https://github.com/KhronosGroup/SPIRV-Tools
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone SPIRV-Tools repository"
|
||||
Exit 1
|
||||
}
|
||||
git clone https://github.com/KhronosGroup/SPIRV-Headers SPIRV-Tools/external/SPIRV-Headers
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone SPIRV-Headers repository"
|
||||
Exit 1
|
||||
}
|
||||
Write-Host "Building SPIRV-Tools"
|
||||
$spv_build = New-Item -ItemType Directory -Path ".\SPIRV-Tools" -Name "build"
|
||||
Push-Location -Path $spv_build.FullName
|
||||
# SPIRV-Tools doesn't use multi-threaded MSVCRT, but we need it to
|
||||
cmd.exe /C 'C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_POLICY_DEFAULT_CMP0091=NEW -DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded -DCMAKE_INSTALL_PREFIX="C:\spirv-tools" && ninja -j32 install'
|
||||
$buildstatus = $?
|
||||
Pop-Location
|
||||
Remove-Item -Recurse -Path $spv_build
|
||||
if (!$buildstatus) {
|
||||
Write-Host "Failed to compile SPIRV-Tools"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
# See https://gitlab.freedesktop.org/mesa/mesa/-/issues/3855
|
||||
# Until that's resolved, we need the vulkan-runtime as a build dependency to be able to run any unit tests on GL
|
||||
Get-Date
|
||||
Write-Host "Downloading Vulkan-Runtime"
|
||||
Invoke-WebRequest -Uri 'https://sdk.lunarg.com/sdk/download/latest/windows/vulkan-runtime.exe' -OutFile 'C:\vulkan-runtime.exe' | Out-Null
|
||||
Write-Host "Installing Vulkan-Runtime"
|
||||
Start-Process -NoNewWindow -Wait C:\vulkan-runtime.exe -ArgumentList '/S'
|
||||
if (!$?) {
|
||||
Write-Host "Failed to install Vulkan-Runtime"
|
||||
Exit 1
|
||||
}
|
||||
Remove-Item C:\vulkan-runtime.exe -Force
|
||||
|
||||
Get-Date
|
||||
Write-Host "Complete"
|
@@ -1,70 +0,0 @@
|
||||
Get-Date
|
||||
Write-Host "Downloading Freeglut"
|
||||
|
||||
$freeglut_zip = 'freeglut-MSVC.zip'
|
||||
$freeglut_url = "https://www.transmissionzero.co.uk/files/software/development/GLUT/$freeglut_zip"
|
||||
|
||||
For ($i = 0; $i -lt 5; $i++) {
|
||||
Invoke-WebRequest -Uri $freeglut_url -OutFile $freeglut_zip
|
||||
$freeglut_downloaded = $?
|
||||
if ($freeglut_downloaded) {
|
||||
Break
|
||||
}
|
||||
}
|
||||
|
||||
if (!$freeglut_downloaded) {
|
||||
Write-Host "Failed to download Freeglut"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Host "Installing Freeglut"
|
||||
Expand-Archive $freeglut_zip -DestinationPath C:\
|
||||
if (!$?) {
|
||||
Write-Host "Failed to install Freeglut"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Get-Date
|
||||
Write-Host "Downloading glext.h"
|
||||
New-Item -ItemType Directory -Path ".\glext" -Name "GL"
|
||||
$ProgressPreference = "SilentlyContinue"
|
||||
Invoke-WebRequest -Uri 'https://www.khronos.org/registry/OpenGL/api/GL/glext.h' -OutFile '.\glext\GL\glext.h' | Out-Null
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning Piglit"
|
||||
git clone --no-progress --single-branch --no-checkout https://gitlab.freedesktop.org/mesa/piglit.git 'C:\src\piglit'
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone Piglit repository"
|
||||
Exit 1
|
||||
}
|
||||
Push-Location -Path C:\src\piglit
|
||||
git checkout f7f2a6c2275cae023a27b6cc81be3dda8c99492d
|
||||
Pop-Location
|
||||
|
||||
Get-Date
|
||||
$piglit_build = New-Item -ItemType Directory -Path "C:\src\piglit" -Name "build"
|
||||
Push-Location -Path $piglit_build.FullName
|
||||
Write-Host "Compiling Piglit"
|
||||
cmd.exe /C 'C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="C:\Piglit" -DGLUT_INCLUDE_DIR=C:\freeglut\include -DGLUT_glut_LIBRARY_RELEASE=C:\freeglut\lib\x64\freeglut.lib -DGLEXT_INCLUDE_DIR=.\glext && ninja -j32'
|
||||
$buildstatus = $?
|
||||
ninja -j32 install | Out-Null
|
||||
$installstatus = $?
|
||||
Pop-Location
|
||||
Remove-Item -Recurse -Path $piglit_build
|
||||
if (!$buildstatus -Or !$installstatus) {
|
||||
Write-Host "Failed to compile or install Piglit"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Copy-Item -Path C:\freeglut\bin\x64\freeglut.dll -Destination C:\Piglit\lib\piglit\bin\freeglut.dll
|
||||
|
||||
Get-Date
|
||||
Write-Host "Cloning spirv-samples"
|
||||
git clone --no-progress --single-branch --no-checkout https://github.com/dneto0/spirv-samples.git C:\spirv-samples\
|
||||
Push-Location -Path C:\spirv-samples\
|
||||
git checkout 7ac0ad5a7fe0ec884faba1dc2916028d0268eeef
|
||||
Pop-Location
|
||||
|
||||
Get-Date
|
||||
Write-Host "Complete"
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user