Compare commits
	
		
			409 Commits
		
	
	
		
			mesa-23.3.
			...
			mesa_7_0_3
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 70d227ac62 | ||
|  | 7f2c4f96f5 | ||
|  | 667f0f60fc | ||
|  | 7592b8cc10 | ||
|  | f55b831859 | ||
|  | 217f7f7e5d | ||
|  | 1e83d70b6d | ||
|  | 767dfa5b9c | ||
|  | 7ff5b38126 | ||
|  | e209f5300d | ||
|  | 3c4b50c352 | ||
|  | 4e7c2fcf18 | ||
|  | 22534f94f5 | ||
|  | 1a6928fdbe | ||
|  | 325dbbac47 | ||
|  | 39ac6b0481 | ||
|  | 46cc4854e9 | ||
|  | f93882512e | ||
|  | 0dee2a4f6f | ||
|  | 3cebc35669 | ||
|  | e75a204fb9 | ||
|  | ac06a5c16a | ||
|  | ed758fee0c | ||
|  | a21c61ee8b | ||
|  | 24697da20e | ||
|  | 7120c0089d | ||
|  | 88a436a8f7 | ||
|  | 2fdb5a245d | ||
|  | 8441b53538 | ||
|  | d336df8b73 | ||
|  | 8aaf805b8a | ||
|  | 8161fd2785 | ||
|  | 2f23025dfe | ||
|  | 1867eac230 | ||
|  | 096e35d05f | ||
|  | 8998f52b97 | ||
|  | 0fd38dcc83 | ||
|  | ff63cf8068 | ||
|  | 4716670de9 | ||
|  | 33c5b38034 | ||
|  | 5737d6c565 | ||
|  | 08a7f56c6a | ||
|  | 7916f2b4aa | ||
|  | 193d303ac7 | ||
|  | e70609b7b8 | ||
|  | da476ff02d | ||
|  | 77e3b5d28b | ||
|  | bf97ca448c | ||
|  | c5f8ff8b32 | ||
|  | 50465766d1 | ||
|  | 7d8df58a63 | ||
|  | 762c074012 | ||
|  | 51f2ee3bfb | ||
|  | b5cd34aa21 | ||
|  | ae5c6dcd42 | ||
|  | 97196d0c8c | ||
|  | f6de56b88a | ||
|  | d64ea43b76 | ||
|  | 2deaf93d24 | ||
|  | a107ec8300 | ||
|  | 120a1f9508 | ||
|  | bb84007a57 | ||
|  | 86234e55a6 | ||
|  | 61972077cd | ||
|  | 5a7feb8ea2 | ||
|  | 4b4c131cd0 | ||
|  | 0fd23f01c6 | ||
|  | 557b0d9df7 | ||
|  | 888f4380cf | ||
|  | 3266c5e95a | ||
|  | 834decdaae | ||
|  | 18b2d83173 | ||
|  | 30c65c3c62 | ||
|  | 04fcc4cf1e | ||
|  | 48ae5cf09d | ||
|  | 45cdb6eb45 | ||
|  | b9f3f732aa | ||
|  | 17006ddd6b | ||
|  | b77a354df3 | ||
|  | 71d46beebf | ||
|  | 454e296eb1 | ||
|  | f9b696be2b | ||
|  | c923edbc71 | ||
|  | e20c1d987f | ||
|  | cd354eb10f | ||
|  | e9ac27ee23 | ||
|  | 3f9dc9f5b6 | ||
|  | ba709875ae | ||
|  | 80f8397b35 | ||
|  | b45fa27fa1 | ||
|  | b59480645d | ||
|  | 148cb36979 | ||
|  | c1eb78f7ff | ||
|  | 8aa0fd6b20 | ||
|  | a1b3a908f5 | ||
|  | f9e70d951a | ||
|  | 41ed6be1da | ||
|  | 27de28fc3e | ||
|  | dd2f01c229 | ||
|  | deb5c56c77 | ||
|  | 0107acded0 | ||
|  | 2ac5e08d1d | ||
|  | 1837b8c214 | ||
|  | 03d2bc1774 | ||
|  | 385bddbde0 | ||
|  | c135426267 | ||
|  | 4be0c98120 | ||
|  | e760aebd5a | ||
|  | 1f6e7d9c29 | ||
|  | c6d421af05 | ||
|  | 952df5e493 | ||
|  | 45acb8b058 | ||
|  | e8997c0183 | ||
|  | 169e62f563 | ||
|  | 1b43babfb1 | ||
|  | 43e902f774 | ||
|  | b0a800e249 | ||
|  | 73d5f232ad | ||
|  | f7209541e4 | ||
|  | 397a32dca7 | ||
|  | e0719d7122 | ||
|  | 3f18c0a9f2 | ||
|  | b14be61938 | ||
|  | 440d620308 | ||
|  | 8dd9df0f3e | ||
|  | 6b9534eaf5 | ||
|  | c91d374ad7 | ||
|  | 1cab4160bc | ||
|  | 718d2dfbbd | ||
|  | 0967e1270d | ||
|  | a08c02f1a8 | ||
|  | bdfd9afead | ||
|  | 1c91a590d6 | ||
|  | 65bd7968bf | ||
|  | 6ccd23b87b | ||
|  | 823409b7d0 | ||
|  | c85d31f4a0 | ||
|  | 24af5c44da | ||
|  | 75efacf8eb | ||
|  | 825e810247 | ||
|  | e20723cfc1 | ||
|  | 9f39a67c1d | ||
|  | 2044f3f791 | ||
|  | 30d4e8c422 | ||
|  | 92d552f6a2 | ||
|  | 5adfcbbc4f | ||
|  | 69969b3819 | ||
|  | edfee04fcb | ||
|  | 28c9930888 | ||
|  | e3a35a123e | ||
|  | 412168f2e8 | ||
|  | fc0fa0d636 | ||
|  | 8a40b670c3 | ||
|  | d9fa5cbfc9 | ||
|  | 1c21564292 | ||
|  | 6cc96bf156 | ||
|  | 29afd4bbc4 | ||
|  | 4e3db063ec | ||
|  | 6560744c38 | ||
|  | 572ad87881 | ||
|  | 2ab75d6cfa | ||
|  | 041a8eb5ec | ||
|  | c1c13bdcfa | ||
|  | 403edd34dd | ||
|  | f279e48416 | ||
|  | bf854d8d27 | ||
|  | f334121679 | ||
|  | b88e2be609 | ||
|  | 08229c8bb8 | ||
|  | c984017f71 | ||
|  | 00e7dd8a13 | ||
|  | d11b375b16 | ||
|  | d1afa8146f | ||
|  | 31d4ba8a93 | ||
|  | 1a7640958c | ||
|  | 2e2a5a450b | ||
|  | 574fd63ff0 | ||
|  | 1063d47526 | ||
|  | b59a892d3e | ||
|  | 6e5d5d77df | ||
|  | a6319d18cf | ||
|  | 5103e883fd | ||
|  | 98c4ea4b24 | ||
|  | 29b8cac7d7 | ||
|  | cdb02d43fe | ||
|  | 50cecddc4a | ||
|  | 177f6398e5 | ||
|  | 887bd6a46f | ||
|  | 9af5153410 | ||
|  | 0e2103689e | ||
|  | 2c496d8e46 | ||
|  | 8f0ba02e44 | ||
|  | 524c56e417 | ||
|  | 773f3a266c | ||
|  | 882ada0d22 | ||
|  | 6229005f93 | ||
|  | 6c63b35ef7 | ||
|  | fb85e50e09 | ||
|  | 5e5f908d0c | ||
|  | c09e2143c7 | ||
|  | 74ced1e67f | ||
|  | dd02c1609e | ||
|  | cdf4880a79 | ||
|  | 2ec8e8547d | ||
|  | b7c36d1d33 | ||
|  | 970d18a708 | ||
|  | 9456e7f0ff | ||
|  | a5db24adc2 | ||
|  | 17664e2620 | ||
|  | 43a0eb0253 | ||
|  | bcdaed2c0a | ||
|  | 78c6637db5 | ||
|  | 44214a98fd | ||
|  | 9cc6d2310c | ||
|  | 1a045954be | ||
|  | b3fc9a1585 | ||
|  | 2a86a449ca | ||
|  | 44f032db61 | ||
|  | 5a520729ee | ||
|  | 36fbe78c60 | ||
|  | 029bb17770 | ||
|  | 7958bee6be | ||
|  | 0b9bb21ce4 | ||
|  | cfcc5b794e | ||
|  | 925ff408a6 | ||
|  | f40ca444f5 | ||
|  | 638a5e0382 | ||
|  | 6d777bf055 | ||
|  | ae6d4e8bc0 | ||
|  | f2728724d3 | ||
|  | bc95b34cce | ||
|  | 9eb89616af | ||
|  | f7713a6d81 | ||
|  | 1cec9b727a | ||
|  | 18a4730f5b | ||
|  | 1342aefcdd | ||
|  | 1fd4b1e702 | ||
|  | 32dff44c28 | ||
|  | e7c2ae476a | ||
|  | a66301f83d | ||
|  | a5f7d56221 | ||
|  | 140080c1ec | ||
|  | ef43af6d43 | ||
|  | 949d0532f0 | ||
|  | 96efc76d74 | ||
|  | 6ca0d63692 | ||
|  | 95fc2485e2 | ||
|  | d6ce1e4834 | ||
|  | 2cc2b12fc7 | ||
|  | 2259f81cf7 | ||
|  | 791f12880f | ||
|  | 7aa7373a64 | ||
|  | 2f186a190f | ||
|  | 357893a873 | ||
|  | a17881da5f | ||
|  | 4872ee83a9 | ||
|  | 5ec222a832 | ||
|  | 40438b3440 | ||
|  | e0b80660c8 | ||
|  | a2016301f5 | ||
|  | 7ec6729fb6 | ||
|  | 837a2cf72e | ||
|  | 0adca14536 | ||
|  | 57b0f66c6d | ||
|  | e47e7a9571 | ||
|  | 68ca01560f | ||
|  | 64f1c91fcb | ||
|  | 7d33f94198 | ||
|  | d69a596193 | ||
|  | 8aa4ae638e | ||
|  | 3069f34841 | ||
|  | b3788a0e4f | ||
|  | 582bc029ab | ||
|  | e9e3099433 | ||
|  | 4fc561cc7b | ||
|  | ad3e3a0b99 | ||
|  | bfb8c849cb | ||
|  | 09dd25b5d1 | ||
|  | c48efb1dbe | ||
|  | 5408acb6b7 | ||
|  | 15481160f9 | ||
|  | 00ccff03a5 | ||
|  | 21594921b1 | ||
|  | 42c91eebc9 | ||
|  | 88273e08b4 | ||
|  | 08f7fb45ef | ||
|  | 75ebda3ce6 | ||
|  | 53ff15fcbf | ||
|  | 72adb70ee2 | ||
|  | 960d41fcfe | ||
|  | 0c4e35b070 | ||
|  | c53f1cb813 | ||
|  | f6696cc15e | ||
|  | fef9a41fe4 | ||
|  | e279a0a076 | ||
|  | 88c8aaed96 | ||
|  | 62cbbd7226 | ||
|  | f7cbe7fcee | ||
|  | d8e4ff9079 | ||
|  | 510ff27eda | ||
|  | 5abf055100 | ||
|  | 4bb5721f6b | ||
|  | c41ab70d3a | ||
|  | a05e403bf2 | ||
|  | 2f57e0f71d | ||
|  | 95a2eb9767 | ||
|  | 55175daed7 | ||
|  | ae16a51e95 | ||
|  | 4398d67546 | ||
|  | c93738687c | ||
|  | 799492e606 | ||
|  | f33ff071bb | ||
|  | 99e854743c | ||
|  | 6f79062d91 | ||
|  | 2f682c3995 | ||
|  | 067370e68f | ||
|  | 2aa439a6a4 | ||
|  | b477182dc3 | ||
|  | a8964ca89e | ||
|  | 4f0e92d07c | ||
|  | 577f4e8a5f | ||
|  | 0586d9fe56 | ||
|  | 1f0d9bf05e | ||
|  | 8bcae2a527 | ||
|  | 35ca9aae91 | ||
|  | 6cdd2bf8d7 | ||
|  | ce7d175adb | ||
|  | 1904fd095f | ||
|  | 897d0ac5cc | ||
|  | e262d0182f | ||
|  | aa5b748c10 | ||
|  | 3c182c5bc8 | ||
|  | 1b5677847b | ||
|  | 1be59a9dbe | ||
|  | 55c2178ad4 | ||
|  | 3e3d392e1c | ||
|  | 756a7a4e4b | ||
|  | 121b4ac220 | ||
|  | cdbd5db3af | ||
|  | b2240f6ad4 | ||
|  | 33814a55f8 | ||
|  | a57c5a417b | ||
|  | ff1a28de17 | ||
|  | 772f57f99b | ||
|  | dacee32b8b | ||
|  | fa1a3eb06d | ||
|  | bd4817ca54 | ||
|  | 005eea249c | ||
|  | 9747de8ec6 | ||
|  | 849a9799d0 | ||
|  | e32ae4fe33 | ||
|  | 4595389c4c | ||
|  | e8ccc7cc49 | ||
|  | 673d21047a | ||
|  | fdefc2bbda | ||
|  | 2079df8527 | ||
|  | b53659452c | ||
|  | 46f1d6653e | ||
|  | c1938a60f7 | ||
|  | d65110f352 | ||
|  | 9fa3bbcb5a | ||
|  | 99d62f2922 | ||
|  | 284743cafe | ||
|  | 08d7307b2a | ||
|  | ef6a64abe6 | ||
|  | 18bfa52ef7 | ||
|  | d62be2652c | ||
|  | ea53ff80af | ||
|  | feeca1bcbc | ||
|  | 12e7278c08 | ||
|  | 45b5c44eb9 | ||
|  | e3456c1028 | ||
|  | 327fb38573 | ||
|  | 1d25d9e15f | ||
|  | cc7cee3f48 | ||
|  | d7062710cd | ||
|  | 9bfba734d8 | ||
|  | aaebf2f47e | ||
|  | 23f8d77b38 | ||
|  | 87d22ee0fe | ||
|  | 52e25f63d4 | ||
|  | f3fb67972b | ||
|  | 6400756364 | ||
|  | d59f0314bd | ||
|  | 131baefac3 | ||
|  | a450078b67 | ||
|  | f6b041bf6c | ||
|  | 4f340d181a | ||
|  | b58e38e936 | ||
|  | 8713cb48a8 | ||
|  | 277c5e57ed | ||
|  | b3d62d5af5 | ||
|  | 9c0f0c8d81 | ||
|  | 18a0a2a7ac | ||
|  | 8b99d9e33c | ||
|  | 01e7e153e3 | ||
|  | 0ad4ca24d2 | ||
|  | 724a155552 | ||
|  | d38b74a316 | ||
|  | 54cab4b47b | ||
|  | c093666bc5 | ||
|  | 04972f6761 | ||
|  | 67f8234622 | ||
|  | 201d6dbd9c | ||
|  | fbcac5aa83 | ||
|  | 2b72ab8f8f | ||
|  | 0ea97b9408 | ||
|  | 28683ac7c0 | ||
|  | c72e3e210f | 
| @@ -1,2 +0,0 @@ | |||||||
| # Vendored code |  | ||||||
| src/amd/vulkan/radix_sort/* |  | ||||||
| @@ -1,9 +0,0 @@ | |||||||
| # The following files are opted into `ninja clang-format` and |  | ||||||
| # enforcement in the CI. |  | ||||||
|  |  | ||||||
| src/**/asahi/**/* |  | ||||||
| src/**/panfrost/**/* |  | ||||||
| src/gallium/drivers/i915 |  | ||||||
| src/amd/vulkan/**/* |  | ||||||
| src/amd/compiler/**/* |  | ||||||
| src/egl/**/* |  | ||||||
| @@ -1,18 +0,0 @@ | |||||||
| ((nil . ((show-trailing-whitespace . t))) |  | ||||||
|  (prog-mode |  | ||||||
|   (indent-tabs-mode . nil) |  | ||||||
|   (tab-width . 8) |  | ||||||
|   (c-basic-offset . 3) |  | ||||||
|   (c-file-style . "stroustrup") |  | ||||||
|   (fill-column . 78) |  | ||||||
|   (eval . (progn |  | ||||||
| 	    (c-set-offset 'case-label '0) |  | ||||||
| 	    (c-set-offset 'innamespace '0) |  | ||||||
| 	    (c-set-offset 'inline-open '0))) |  | ||||||
|   (whitespace-style face indentation) |  | ||||||
|   (whitespace-line-column . 79) |  | ||||||
|   (eval ignore-errors |  | ||||||
|         (require 'whitespace) |  | ||||||
|         (whitespace-mode 1))) |  | ||||||
|  (makefile-mode (indent-tabs-mode . t)) |  | ||||||
|  ) |  | ||||||
| @@ -1,44 +0,0 @@ | |||||||
| # To use this config on you editor, follow the instructions at: |  | ||||||
| # http://editorconfig.org |  | ||||||
|  |  | ||||||
| root = true |  | ||||||
|  |  | ||||||
| [*] |  | ||||||
| charset = utf-8 |  | ||||||
| insert_final_newline = true |  | ||||||
| tab_width = 8 |  | ||||||
|  |  | ||||||
| [*.{c,h,cpp,hpp,cc,hh,y,yy}] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 3 |  | ||||||
| max_line_length = 78 |  | ||||||
|  |  | ||||||
| [{Makefile*,*.mk}] |  | ||||||
| indent_style = tab |  | ||||||
|  |  | ||||||
| [*.py] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 4 |  | ||||||
|  |  | ||||||
| [*.yml] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 2 |  | ||||||
|  |  | ||||||
| [*.rst] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 3 |  | ||||||
|  |  | ||||||
| [*.patch] |  | ||||||
| trim_trailing_whitespace = false |  | ||||||
|  |  | ||||||
| [{meson.build,meson_options.txt}] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 2 |  | ||||||
|  |  | ||||||
| [*.ps1] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 2 |  | ||||||
|  |  | ||||||
| [*.rs] |  | ||||||
| indent_style = space |  | ||||||
| indent_size = 4 |  | ||||||
| @@ -1,67 +0,0 @@ | |||||||
| # List of commits to ignore when using `git blame`. |  | ||||||
| # Enable with: |  | ||||||
| #   git config blame.ignoreRevsFile .git-blame-ignore-revs |  | ||||||
| # |  | ||||||
| # Per git-blame(1): |  | ||||||
| #   Ignore revisions listed in the file, one unabbreviated object name |  | ||||||
| #   per line, in git-blame. Whitespace and comments beginning with # are |  | ||||||
| #   ignored. |  | ||||||
| # |  | ||||||
| # Please keep these in chronological order :) |  | ||||||
| # |  | ||||||
| # You can add a new commit with the following command: |  | ||||||
| #   git log -1 --pretty=format:'%n# %s%n%H%n' >> .git-blame-ignore-revs $COMMIT |  | ||||||
|  |  | ||||||
| # pvr: Fix clang-format error. |  | ||||||
| 0ad5b0a74ef73f5fcbe1406ad9d57fe5dc00a5b1 |  | ||||||
|  |  | ||||||
| # panfrost: Fix up some formatting for clang-format |  | ||||||
| a4705afe63412498d13ded73cba969c66be67907 |  | ||||||
|  |  | ||||||
| # asahi: clang-format the world again |  | ||||||
| 26c51bb8d8a33098b1990425a391f56ffba5728c |  | ||||||
|  |  | ||||||
| # perfetto: Add a .clang-format for the directory. |  | ||||||
| da78d5d729b1800136dd713b68492cb339993f4a |  | ||||||
|  |  | ||||||
| # panfrost/winsys: Clang-format |  | ||||||
| c90f036516a5376002be6550a917e8bad6a8a3b8 |  | ||||||
|  |  | ||||||
| # panfrost: Re-run clang-format |  | ||||||
| 4ccf174009af6732cbffa5d8ebb4687da7517505 |  | ||||||
|  |  | ||||||
| # panvk: Clang-format |  | ||||||
| c7bf3b69ebc8f2252dbf724a4de638e6bb2ac402 |  | ||||||
|  |  | ||||||
| # pan/mdg: Fix icky formatting |  | ||||||
| 133af0d6c945d3aaca8989edd15283a2b7dcc6c7 |  | ||||||
|  |  | ||||||
| # mapi: clang-format _glapi_add_dispatch() |  | ||||||
| 30332529663268a6406e910848e906e725e6fda7 |  | ||||||
|  |  | ||||||
| # radv: reformat according to its .clang-format |  | ||||||
| 8b319c6db8bd93603b18bd783eb75225fcfd51b7 |  | ||||||
|  |  | ||||||
| # aco: reformat according to its .clang-format |  | ||||||
| 6b21653ab4d3a67e711fe10e3d403128b6d26eb2 |  | ||||||
|  |  | ||||||
| # egl: re-format using clang-format |  | ||||||
| 2f670d89db038d5a29f6b72732fd7ad63dfaf4c6 |  | ||||||
|  |  | ||||||
| # panfrost: clang-format the tree |  | ||||||
| 0afd691f29683f6e9dde60f79eca094373521806 |  | ||||||
|  |  | ||||||
| # aco: Format. |  | ||||||
| 1e2639026fec7069806449f9ba2a124ce4eb5569 |  | ||||||
|  |  | ||||||
| # radv: Format. |  | ||||||
| 59c501ca353f8ec9d2717c98af2bfa1a1dbf4d75 |  | ||||||
|  |  | ||||||
| # pvr: clang-format fixes |  | ||||||
| 953c04ebd39c52d457301bdd8ac803949001da2d |  | ||||||
|  |  | ||||||
| # freedreno: Re-indent |  | ||||||
| 2d439343ea1aee146d4ce32800992cd389bd505d |  | ||||||
|  |  | ||||||
| # ir3: Reformat source with clang-format |  | ||||||
| 177138d8cb0b4f6a42ef0a1f8593e14d79f17c54 |  | ||||||
							
								
								
									
										6
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							| @@ -1,6 +0,0 @@ | |||||||
| *.csv eol=crlf |  | ||||||
| * text=auto |  | ||||||
| *.jpg binary |  | ||||||
| *.png binary |  | ||||||
| *.gif binary |  | ||||||
| *.ico binary |  | ||||||
							
								
								
									
										60
									
								
								.github/workflows/macos.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										60
									
								
								.github/workflows/macos.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,60 +0,0 @@ | |||||||
| name: macOS-CI |  | ||||||
| on: push |  | ||||||
|  |  | ||||||
| permissions: |  | ||||||
|   contents: read |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   macOS-CI: |  | ||||||
|     strategy: |  | ||||||
|       matrix: |  | ||||||
|         glx_option: ['dri', 'xlib'] |  | ||||||
|     runs-on: macos-11 |  | ||||||
|     env: |  | ||||||
|       GALLIUM_DUMP_CPU: true |  | ||||||
|       MESON_EXEC: /Users/runner/Library/Python/3.11/bin/meson |  | ||||||
|     steps: |  | ||||||
|     - name: Checkout |  | ||||||
|       uses: actions/checkout@v3 |  | ||||||
|     - name: Install Dependencies |  | ||||||
|       run: | |  | ||||||
|         cat > Brewfile <<EOL |  | ||||||
|         brew "bison" |  | ||||||
|         brew "expat" |  | ||||||
|         brew "gettext" |  | ||||||
|         brew "libx11" |  | ||||||
|         brew "libxcb" |  | ||||||
|         brew "libxdamage" |  | ||||||
|         brew "libxext" |  | ||||||
|         brew "molten-vk" |  | ||||||
|         brew "ninja" |  | ||||||
|         brew "pkg-config" |  | ||||||
|         brew "python@3.10" |  | ||||||
|         EOL |  | ||||||
|  |  | ||||||
|         brew update |  | ||||||
|         brew bundle --verbose |  | ||||||
|     - name: Install Mako and meson |  | ||||||
|       run: pip3 install --user mako meson |  | ||||||
|     - name: Configure |  | ||||||
|       run: | |  | ||||||
|         cat > native_config <<EOL |  | ||||||
|         [binaries] |  | ||||||
|         llvm-config = '/usr/local/opt/llvm/bin/llvm-config' |  | ||||||
|         EOL |  | ||||||
|         $MESON_EXEC . build --native-file=native_config -Dmoltenvk-dir=$(brew --prefix molten-vk) -Dbuild-tests=true -Dosmesa=true -Dgallium-drivers=swrast,zink -Dglx=${{ matrix.glx_option }} |  | ||||||
|     - name: Build |  | ||||||
|       run: $MESON_EXEC compile -C build |  | ||||||
|     - name: Test |  | ||||||
|       run: $MESON_EXEC test -C build --print-errorlogs |  | ||||||
|     - name: Install |  | ||||||
|       run: $MESON_EXEC install -C build --destdir $PWD/install |  | ||||||
|     - name: 'Upload Artifact' |  | ||||||
|       if: always() |  | ||||||
|       uses: actions/upload-artifact@v3 |  | ||||||
|       with: |  | ||||||
|         name: macos-${{ matrix.glx_option }}-result |  | ||||||
|         path: | |  | ||||||
|           build/meson-logs/ |  | ||||||
|           install/ |  | ||||||
|         retention-days: 5 |  | ||||||
							
								
								
									
										15
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										15
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,5 +1,10 @@ | |||||||
| .vscode* | *.a | ||||||
| *.pyc | *.o | ||||||
| *.pyo | *.so | ||||||
| *.out | *.sw[a-z] | ||||||
| /build | *.pc | ||||||
|  | *~ | ||||||
|  | depend | ||||||
|  | depend.bak | ||||||
|  | lib | ||||||
|  | lib64 | ||||||
|   | |||||||
							
								
								
									
										261
									
								
								.gitlab-ci.yml
									
									
									
									
									
								
							
							
						
						
									
										261
									
								
								.gitlab-ci.yml
									
									
									
									
									
								
							| @@ -1,261 +0,0 @@ | |||||||
| workflow: |  | ||||||
|   rules: |  | ||||||
|     # do not duplicate pipelines on merge pipelines |  | ||||||
|     - if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_PIPELINE_SOURCE == "push" |  | ||||||
|       when: never |  | ||||||
|     # merge pipeline |  | ||||||
|     - if: $GITLAB_USER_LOGIN == "marge-bot" && $CI_COMMIT_BRANCH == null |  | ||||||
|       variables: |  | ||||||
|         MESA_CI_PERFORMANCE_ENABLED: 1 |  | ||||||
|         VALVE_INFRA_VANGOGH_JOB_PRIORITY: ""  # Empty tags are ignored by gitlab |  | ||||||
|     # post-merge pipeline |  | ||||||
|     - if: $GITLAB_USER_LOGIN == "marge-bot" && $CI_COMMIT_BRANCH |  | ||||||
|       variables: |  | ||||||
|         JOB_PRIORITY: 40 |  | ||||||
|         VALVE_INFRA_VANGOGH_JOB_PRIORITY: priority:low |  | ||||||
|     # any other pipeline |  | ||||||
|     - if: $GITLAB_USER_LOGIN != "marge-bot" |  | ||||||
|       variables: |  | ||||||
|         JOB_PRIORITY: 50 |  | ||||||
|         VALVE_INFRA_VANGOGH_JOB_PRIORITY: priority:low |  | ||||||
|     - when: always |  | ||||||
|  |  | ||||||
| variables: |  | ||||||
|   FDO_UPSTREAM_REPO: mesa/mesa |  | ||||||
|   MESA_TEMPLATES_COMMIT: &ci-templates-commit d5aa3941aa03c2f716595116354fb81eb8012acb |  | ||||||
|   CI_PRE_CLONE_SCRIPT: |- |  | ||||||
|           set -o xtrace |  | ||||||
|           wget -q -O download-git-cache.sh ${CI_PROJECT_URL}/-/raw/${CI_COMMIT_SHA}/.gitlab-ci/download-git-cache.sh |  | ||||||
|           bash download-git-cache.sh |  | ||||||
|           rm download-git-cache.sh |  | ||||||
|           set +o xtrace |  | ||||||
|   CI_JOB_JWT_FILE: /minio_jwt |  | ||||||
|   S3_HOST: s3.freedesktop.org |  | ||||||
|   # per-pipeline artifact storage on MinIO |  | ||||||
|   PIPELINE_ARTIFACTS_BASE: ${S3_HOST}/artifacts/${CI_PROJECT_PATH}/${CI_PIPELINE_ID} |  | ||||||
|   # per-job artifact storage on MinIO |  | ||||||
|   JOB_ARTIFACTS_BASE: ${PIPELINE_ARTIFACTS_BASE}/${CI_JOB_ID} |  | ||||||
|   KERNEL_IMAGE_BASE: https://${S3_HOST}/mesa-lava/gfx-ci/linux/${KERNEL_TAG} |  | ||||||
|   # reference images stored for traces |  | ||||||
|   PIGLIT_REPLAY_REFERENCE_IMAGES_BASE: "${S3_HOST}/mesa-tracie-results/$FDO_UPSTREAM_REPO" |  | ||||||
|   # For individual CI farm status see .ci-farms folder |  | ||||||
|   # Disable farm with   `git mv .ci-farms{,-disabled}/$farm_name` |  | ||||||
|   # Re-enable farm with `git mv .ci-farms{-disabled,}/$farm_name` |  | ||||||
|   # NEVER MIX FARM MAINTENANCE WITH ANY OTHER CHANGE IN THE SAME MERGE REQUEST! |  | ||||||
|  |  | ||||||
| default: |  | ||||||
|   before_script: |  | ||||||
|     - > |  | ||||||
|       export SCRIPTS_DIR=$(mktemp -d) && |  | ||||||
|       curl -L -s --retry 4 -f --retry-all-errors --retry-delay 60 -O --output-dir "${SCRIPTS_DIR}" "${CI_PROJECT_URL}/-/raw/${CI_COMMIT_SHA}/.gitlab-ci/setup-test-env.sh" && |  | ||||||
|       . ${SCRIPTS_DIR}/setup-test-env.sh && |  | ||||||
|       echo -n "${CI_JOB_JWT}" > "${CI_JOB_JWT_FILE}" && |  | ||||||
|       unset CI_JOB_JWT  # Unsetting vulnerable env variables |  | ||||||
|  |  | ||||||
|   after_script: |  | ||||||
|     # Work around https://gitlab.com/gitlab-org/gitlab/-/issues/20338 |  | ||||||
|     - find -name '*.log' -exec mv {} {}.txt \; |  | ||||||
|  |  | ||||||
|     - > |  | ||||||
|       set +x |  | ||||||
|  |  | ||||||
|       test -e "${CI_JOB_JWT_FILE}" && |  | ||||||
|       export CI_JOB_JWT="$(<${CI_JOB_JWT_FILE})" && |  | ||||||
|       rm "${CI_JOB_JWT_FILE}" |  | ||||||
|  |  | ||||||
|   # Retry when job fails. Failed jobs can be found in the Mesa CI Daily Reports: |  | ||||||
|   # https://gitlab.freedesktop.org/mesa/mesa/-/issues/?sort=created_date&state=opened&label_name%5B%5D=CI%20daily |  | ||||||
|   retry: |  | ||||||
|     max: 1 |  | ||||||
|     # Ignore runner_unsupported, stale_schedule, archived_failure, or |  | ||||||
|     # unmet_prerequisites |  | ||||||
|     when: |  | ||||||
|       - api_failure |  | ||||||
|       - runner_system_failure |  | ||||||
|       - script_failure |  | ||||||
|       - job_execution_timeout |  | ||||||
|       - scheduler_failure |  | ||||||
|       - data_integrity_failure |  | ||||||
|       - unknown_failure |  | ||||||
|  |  | ||||||
| stages: |  | ||||||
|   - sanity |  | ||||||
|   - container |  | ||||||
|   - git-archive |  | ||||||
|   - build-x86_64 |  | ||||||
|   - build-misc |  | ||||||
|   - lint |  | ||||||
|   - amd |  | ||||||
|   - intel |  | ||||||
|   - nouveau |  | ||||||
|   - arm |  | ||||||
|   - broadcom |  | ||||||
|   - freedreno |  | ||||||
|   - etnaviv |  | ||||||
|   - software-renderer |  | ||||||
|   - layered-backends |  | ||||||
|   - deploy |  | ||||||
|  |  | ||||||
| include: |  | ||||||
|   - project: 'freedesktop/ci-templates' |  | ||||||
|     ref: 16bc29078de5e0a067ff84a1a199a3760d3b3811 |  | ||||||
|     file: |  | ||||||
|       - '/templates/ci-fairy.yml' |  | ||||||
|   - project: 'freedesktop/ci-templates' |  | ||||||
|     ref: *ci-templates-commit |  | ||||||
|     file: |  | ||||||
|       - '/templates/alpine.yml' |  | ||||||
|       - '/templates/debian.yml' |  | ||||||
|       - '/templates/fedora.yml' |  | ||||||
|   - local: '.gitlab-ci/image-tags.yml' |  | ||||||
|   - local: '.gitlab-ci/lava/lava-gitlab-ci.yml' |  | ||||||
|   - local: '.gitlab-ci/container/gitlab-ci.yml' |  | ||||||
|   - local: '.gitlab-ci/build/gitlab-ci.yml' |  | ||||||
|   - local: '.gitlab-ci/test/gitlab-ci.yml' |  | ||||||
|   - local: '.gitlab-ci/farm-rules.yml' |  | ||||||
|   - local: '.gitlab-ci/test-source-dep.yml' |  | ||||||
|   - local: 'docs/gitlab-ci.yml' |  | ||||||
|   - local: 'src/amd/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/broadcom/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/etnaviv/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/freedreno/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/crocus/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/d3d12/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/i915/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/lima/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/llvmpipe/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/nouveau/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/softpipe/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/virgl/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/drivers/zink/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/gallium/frontends/lavapipe/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/intel/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/microsoft/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/panfrost/ci/gitlab-ci.yml' |  | ||||||
|   - local: 'src/virtio/ci/gitlab-ci.yml' |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # YAML anchors for rule conditions |  | ||||||
| # -------------------------------- |  | ||||||
| .rules-anchors: |  | ||||||
|   rules: |  | ||||||
|     # Post-merge pipeline |  | ||||||
|     - if: &is-post-merge '$CI_PROJECT_NAMESPACE == "mesa" && $CI_COMMIT_BRANCH' |  | ||||||
|       when: on_success |  | ||||||
|     # Post-merge pipeline, not for Marge Bot |  | ||||||
|     - if: &is-post-merge-not-for-marge '$CI_PROJECT_NAMESPACE == "mesa" && $GITLAB_USER_LOGIN != "marge-bot" && $CI_COMMIT_BRANCH' |  | ||||||
|       when: on_success |  | ||||||
|     # Pre-merge pipeline |  | ||||||
|     - if: &is-pre-merge '$CI_PIPELINE_SOURCE == "merge_request_event"' |  | ||||||
|       when: on_success |  | ||||||
|     # Pre-merge pipeline for Marge Bot |  | ||||||
|     - if: &is-pre-merge-for-marge '$GITLAB_USER_LOGIN == "marge-bot" && $CI_PIPELINE_SOURCE == "merge_request_event"' |  | ||||||
|       when: on_success |  | ||||||
|  |  | ||||||
|  |  | ||||||
| .container+build-rules: |  | ||||||
|   rules: |  | ||||||
|     # Run when re-enabling a disabled farm, but not when disabling it |  | ||||||
|     - !reference [.disable-farm-mr-rules, rules] |  | ||||||
|     # Run pipeline by default in the main project if any CI pipeline |  | ||||||
|     # configuration files were changed, to ensure docker images are up to date |  | ||||||
|     - if: *is-post-merge |  | ||||||
|       changes: &all_paths |  | ||||||
|         - VERSION |  | ||||||
|         - bin/git_sha1_gen.py |  | ||||||
|         - bin/install_megadrivers.py |  | ||||||
|         - bin/symbols-check.py |  | ||||||
|         # GitLab CI |  | ||||||
|         - .gitlab-ci.yml |  | ||||||
|         - .gitlab-ci/**/* |  | ||||||
|         # Meson |  | ||||||
|         - meson* |  | ||||||
|         - build-support/**/* |  | ||||||
|         - subprojects/**/* |  | ||||||
|         # Source code |  | ||||||
|         - include/**/* |  | ||||||
|         - src/**/* |  | ||||||
|       when: on_success |  | ||||||
|     # Run pipeline by default if it was triggered by Marge Bot, is for a |  | ||||||
|     # merge request, and any files affecting the pipeline were changed |  | ||||||
|     - if: *is-pre-merge-for-marge |  | ||||||
|       changes: |  | ||||||
|         *all_paths |  | ||||||
|       when: on_success |  | ||||||
|     # Run pipeline by default in the main project if it was not triggered by |  | ||||||
|     # Marge Bot, and any files affecting the pipeline were changed |  | ||||||
|     - if: *is-post-merge-not-for-marge |  | ||||||
|       changes: |  | ||||||
|         *all_paths |  | ||||||
|       when: on_success |  | ||||||
|     # Just skip everything for MRs which don't actually change anything in the |  | ||||||
|     # build - the same rules as above, but without the file-change rules |  | ||||||
|     - if: *is-pre-merge-for-marge |  | ||||||
|       when: never |  | ||||||
|     - if: *is-post-merge |  | ||||||
|       when: never |  | ||||||
|     # Always allow user branches etc to trigger jobs manually |  | ||||||
|     - when: manual |  | ||||||
|  |  | ||||||
|  |  | ||||||
| .ci-deqp-artifacts: |  | ||||||
|   artifacts: |  | ||||||
|     name: "mesa_${CI_JOB_NAME}" |  | ||||||
|     when: always |  | ||||||
|     untracked: false |  | ||||||
|     paths: |  | ||||||
|       # Watch out!  Artifacts are relative to the build dir. |  | ||||||
|       # https://gitlab.com/gitlab-org/gitlab-ce/commit/8788fb925706cad594adf6917a6c5f6587dd1521 |  | ||||||
|       - artifacts |  | ||||||
|       - _build/meson-logs/*.txt |  | ||||||
|       - _build/meson-logs/strace |  | ||||||
|  |  | ||||||
| # Git archive |  | ||||||
|  |  | ||||||
| make git archive: |  | ||||||
|   extends: |  | ||||||
|     - .fdo.ci-fairy |  | ||||||
|   stage: git-archive |  | ||||||
|   rules: |  | ||||||
|     - !reference [.scheduled_pipeline-rules, rules] |  | ||||||
|   # ensure we are running on packet |  | ||||||
|   tags: |  | ||||||
|     - packet.net |  | ||||||
|   script: |  | ||||||
|     # Compactify the .git directory |  | ||||||
|     - git gc --aggressive |  | ||||||
|     # Download & cache the perfetto subproject as well. |  | ||||||
|     - rm -rf subprojects/perfetto ; mkdir -p subprojects/perfetto && curl https://android.googlesource.com/platform/external/perfetto/+archive/$(grep 'revision =' subprojects/perfetto.wrap | cut -d ' ' -f3).tar.gz | tar zxf - -C subprojects/perfetto |  | ||||||
|     # compress the current folder |  | ||||||
|     - tar -cvzf ../$CI_PROJECT_NAME.tar.gz . |  | ||||||
|  |  | ||||||
|     - ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" ../$CI_PROJECT_NAME.tar.gz https://$S3_HOST/git-cache/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/$CI_PROJECT_NAME.tar.gz |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # Sanity checks of MR settings and commit logs |  | ||||||
| sanity: |  | ||||||
|   extends: |  | ||||||
|     - .fdo.ci-fairy |  | ||||||
|   stage: sanity |  | ||||||
|   rules: |  | ||||||
|     - if: *is-pre-merge |  | ||||||
|       when: on_success |  | ||||||
|     # Other cases default to never |  | ||||||
|   variables: |  | ||||||
|     GIT_STRATEGY: none |  | ||||||
|   script: |  | ||||||
|     # ci-fairy check-commits --junit-xml=check-commits.xml |  | ||||||
|     - ci-fairy check-merge-request --require-allow-collaboration --junit-xml=check-merge-request.xml |  | ||||||
|   artifacts: |  | ||||||
|     when: on_failure |  | ||||||
|     reports: |  | ||||||
|       junit: check-*.xml |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # Jobs that need to pass before spending hardware resources on further testing |  | ||||||
| .required-for-hardware-jobs: |  | ||||||
|   needs: |  | ||||||
|     - job: clang-format |  | ||||||
|       optional: true |  | ||||||
|     - job: rustfmt |  | ||||||
|       optional: true |  | ||||||
| @@ -1,38 +0,0 @@ | |||||||
| # Note: skips lists for CI are just a list of lines that, when |  | ||||||
| # non-zero-length and not starting with '#', will regex match to |  | ||||||
| # delete lines from the test list.  Be careful. |  | ||||||
|  |  | ||||||
| # This test checks the driver's reported conformance version against the |  | ||||||
| # version of the CTS we're running. This check fails every few months |  | ||||||
| # and everyone has to go and bump the number in every driver. |  | ||||||
| # Running this check only makes sense while preparing a conformance |  | ||||||
| # submission, so skip it in the regular CI. |  | ||||||
| dEQP-VK.api.driver_properties.conformance_version |  | ||||||
|  |  | ||||||
| # These are tremendously slow (pushing toward a minute), and aren't |  | ||||||
| # reliable to be run in parallel with other tests due to CPU-side timing. |  | ||||||
| dEQP-GLES[0-9]*.functional.flush_finish.* |  | ||||||
|  |  | ||||||
| # piglit: WGL is Windows-only |  | ||||||
| wgl@.* |  | ||||||
|  |  | ||||||
| # These are sensitive to CPU timing, and would need to be run in isolation |  | ||||||
| # on the system rather than in parallel with other tests. |  | ||||||
| glx@glx_arb_sync_control@timing.* |  | ||||||
|  |  | ||||||
| # This test is not built with waffle, while we do build tests with waffle |  | ||||||
| spec@!opengl 1.1@windowoverlap |  | ||||||
|  |  | ||||||
| # These tests all read from the front buffer after a swap.  Given that we |  | ||||||
| # run piglit tests in parallel in Mesa CI, and don't have a compositor |  | ||||||
| # running, the frontbuffer reads may end up with undefined results from |  | ||||||
| # windows overlapping us. |  | ||||||
| # |  | ||||||
| # Piglit does mark these tests as not to be run in parallel, but deqp-runner |  | ||||||
| # doesn't respect that.  We need to extend deqp-runner to allow some tests to be |  | ||||||
| # marked as single-threaded and run after the rayon loop if we want to support |  | ||||||
| # them. |  | ||||||
| # |  | ||||||
| # Note that "glx-" tests don't appear in x11-skips.txt because they can be |  | ||||||
| # run even if PIGLIT_PLATFORM=gbm (for example) |  | ||||||
| glx@glx-copy-sub-buffer.* |  | ||||||
| @@ -1,68 +0,0 @@ | |||||||
| version: 1 |  | ||||||
|  |  | ||||||
| # Rules to match for a machine to qualify |  | ||||||
| target: |  | ||||||
| {% if tags %} |  | ||||||
|   tags: |  | ||||||
| {% for tag in tags %} |  | ||||||
|     - '{{ tag | trim }}' |  | ||||||
| {% endfor %} |  | ||||||
| {% endif %} |  | ||||||
|  |  | ||||||
| timeouts: |  | ||||||
|   first_console_activity:  # This limits the time it can take to receive the first console log |  | ||||||
|     minutes: {{ timeout_first_minutes }} |  | ||||||
|     retries: {{ timeout_first_retries }} |  | ||||||
|   console_activity:        # Reset every time we receive a message from the logs |  | ||||||
|     minutes: {{ timeout_minutes }} |  | ||||||
|     retries: {{ timeout_retries }} |  | ||||||
|   boot_cycle: |  | ||||||
|     minutes: {{ timeout_boot_minutes }} |  | ||||||
|     retries: {{ timeout_boot_retries }} |  | ||||||
|   overall:                 # Maximum time the job can take, not overrideable by the "continue" deployment |  | ||||||
|     minutes: {{ timeout_overall_minutes }} |  | ||||||
|     retries: 0 |  | ||||||
|     # no retries possible here |  | ||||||
|  |  | ||||||
| console_patterns: |  | ||||||
|     session_end: |  | ||||||
|         regex: >- |  | ||||||
|           {{ session_end_regex }} |  | ||||||
| {% if session_reboot_regex %} |  | ||||||
|     session_reboot: |  | ||||||
|         regex: >- |  | ||||||
|           {{ session_reboot_regex }} |  | ||||||
| {% endif %} |  | ||||||
|     job_success: |  | ||||||
|         regex: >- |  | ||||||
|           {{ job_success_regex }} |  | ||||||
|     job_warn: |  | ||||||
|         regex: >- |  | ||||||
|           {{ job_warn_regex }} |  | ||||||
|  |  | ||||||
| # Environment to deploy |  | ||||||
| deployment: |  | ||||||
|   # Initial boot |  | ||||||
|   start: |  | ||||||
|     kernel: |  | ||||||
|       url: '{{ kernel_url }}' |  | ||||||
|       cmdline: > |  | ||||||
|         SALAD.machine_id={{ '{{' }} machine_id }} |  | ||||||
|         console={{ '{{' }} local_tty_device }},115200 earlyprintk=vga,keep |  | ||||||
|         loglevel={{ log_level }} no_hash_pointers |  | ||||||
|         b2c.service="--privileged --tls-verify=false --pid=host docker://{{ '{{' }} fdo_proxy_registry }}/gfx-ci/ci-tron/telegraf:latest" b2c.hostname=dut-{{ '{{' }} machine.full_name }} |  | ||||||
|         b2c.container="-ti --tls-verify=false docker://{{ '{{' }} fdo_proxy_registry }}/gfx-ci/ci-tron/machine-registration:latest check" |  | ||||||
|         b2c.ntp_peer=10.42.0.1 b2c.pipefail b2c.cache_device=auto b2c.poweroff_delay={{ poweroff_delay }} |  | ||||||
|         b2c.minio="gateway,{{ '{{' }} minio_url }},{{ '{{' }} job_bucket_access_key }},{{ '{{' }} job_bucket_secret_key }}" |  | ||||||
|         b2c.volume="{{ '{{' }} job_bucket }}-results,mirror=gateway/{{ '{{' }} job_bucket }},pull_on=pipeline_start,push_on=changes,overwrite{% for excl in job_volume_exclusions %},exclude={{ excl }}{% endfor %},remove,expiration=pipeline_end,preserve" |  | ||||||
| {% for volume in volumes %} |  | ||||||
|         b2c.volume={{ volume }} |  | ||||||
| {% endfor %} |  | ||||||
|         b2c.container="-v {{ '{{' }} job_bucket }}-results:{{ working_dir }} -w {{ working_dir }} {% for mount_volume in mount_volumes %} -v {{ mount_volume }}{% endfor %} --tls-verify=false docker://{{ local_container }} {{ container_cmd }}" |  | ||||||
|         {% if cmdline_extras is defined %} |  | ||||||
|         {{ cmdline_extras }} |  | ||||||
|         {% endif %} |  | ||||||
|  |  | ||||||
|     initramfs: |  | ||||||
|       url: '{{ initramfs_url }}' |  | ||||||
|  |  | ||||||
| @@ -1,107 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
|  |  | ||||||
| # Copyright © 2022 Valve Corporation |  | ||||||
| # |  | ||||||
| # Permission is hereby granted, free of charge, to any person obtaining a |  | ||||||
| # copy of this software and associated documentation files (the "Software"), |  | ||||||
| # to deal in the Software without restriction, including without limitation |  | ||||||
| # the rights to use, copy, modify, merge, publish, distribute, sublicense, |  | ||||||
| # and/or sell copies of the Software, and to permit persons to whom the |  | ||||||
| # Software is furnished to do so, subject to the following conditions: |  | ||||||
| # |  | ||||||
| # The above copyright notice and this permission notice (including the next |  | ||||||
| # paragraph) shall be included in all copies or substantial portions of the |  | ||||||
| # Software. |  | ||||||
| # |  | ||||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |  | ||||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |  | ||||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL |  | ||||||
| # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |  | ||||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |  | ||||||
| # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |  | ||||||
| # IN THE SOFTWARE. |  | ||||||
|  |  | ||||||
| from jinja2 import Environment, FileSystemLoader |  | ||||||
| from argparse import ArgumentParser |  | ||||||
| from os import environ, path |  | ||||||
| import json |  | ||||||
|  |  | ||||||
|  |  | ||||||
| parser = ArgumentParser() |  | ||||||
| parser.add_argument('--ci-job-id') |  | ||||||
| parser.add_argument('--container-cmd') |  | ||||||
| parser.add_argument('--initramfs-url') |  | ||||||
| parser.add_argument('--job-success-regex') |  | ||||||
| parser.add_argument('--job-warn-regex') |  | ||||||
| parser.add_argument('--kernel-url') |  | ||||||
| parser.add_argument('--log-level', type=int) |  | ||||||
| parser.add_argument('--poweroff-delay', type=int) |  | ||||||
| parser.add_argument('--session-end-regex') |  | ||||||
| parser.add_argument('--session-reboot-regex') |  | ||||||
| parser.add_argument('--tags', nargs='?', default='') |  | ||||||
| parser.add_argument('--template', default='b2c.yml.jinja2.jinja2') |  | ||||||
| parser.add_argument('--timeout-boot-minutes', type=int) |  | ||||||
| parser.add_argument('--timeout-boot-retries', type=int) |  | ||||||
| parser.add_argument('--timeout-first-minutes', type=int) |  | ||||||
| parser.add_argument('--timeout-first-retries', type=int) |  | ||||||
| parser.add_argument('--timeout-minutes', type=int) |  | ||||||
| parser.add_argument('--timeout-overall-minutes', type=int) |  | ||||||
| parser.add_argument('--timeout-retries', type=int) |  | ||||||
| parser.add_argument('--job-volume-exclusions', nargs='?', default='') |  | ||||||
| parser.add_argument('--volume', action='append') |  | ||||||
| parser.add_argument('--mount-volume', action='append') |  | ||||||
| parser.add_argument('--local-container', default=environ.get('B2C_LOCAL_CONTAINER', 'alpine:latest')) |  | ||||||
| parser.add_argument('--working-dir') |  | ||||||
| args = parser.parse_args() |  | ||||||
|  |  | ||||||
| env = Environment(loader=FileSystemLoader(path.dirname(args.template)), |  | ||||||
|                   trim_blocks=True, lstrip_blocks=True) |  | ||||||
|  |  | ||||||
| template = env.get_template(path.basename(args.template)) |  | ||||||
|  |  | ||||||
| values = {} |  | ||||||
| values['ci_job_id'] = args.ci_job_id |  | ||||||
| values['container_cmd'] = args.container_cmd |  | ||||||
| values['initramfs_url'] = args.initramfs_url |  | ||||||
| values['job_success_regex'] = args.job_success_regex |  | ||||||
| values['job_warn_regex'] = args.job_warn_regex |  | ||||||
| values['kernel_url'] = args.kernel_url |  | ||||||
| values['log_level'] = args.log_level |  | ||||||
| values['poweroff_delay'] = args.poweroff_delay |  | ||||||
| values['session_end_regex'] = args.session_end_regex |  | ||||||
| values['session_reboot_regex'] = args.session_reboot_regex |  | ||||||
| try: |  | ||||||
|     values['tags'] = json.loads(args.tags) |  | ||||||
| except json.decoder.JSONDecodeError: |  | ||||||
|     values['tags'] = args.tags.split(",") |  | ||||||
| values['template'] = args.template |  | ||||||
| values['timeout_boot_minutes'] = args.timeout_boot_minutes |  | ||||||
| values['timeout_boot_retries'] = args.timeout_boot_retries |  | ||||||
| values['timeout_first_minutes'] = args.timeout_first_minutes |  | ||||||
| values['timeout_first_retries'] = args.timeout_first_retries |  | ||||||
| values['timeout_minutes'] = args.timeout_minutes |  | ||||||
| values['timeout_overall_minutes'] = args.timeout_overall_minutes |  | ||||||
| values['timeout_retries'] = args.timeout_retries |  | ||||||
| if len(args.job_volume_exclusions) > 0: |  | ||||||
|     exclusions = args.job_volume_exclusions.split(",") |  | ||||||
|     values['job_volume_exclusions'] = [excl for excl in exclusions if len(excl) > 0] |  | ||||||
| if args.volume is not None: |  | ||||||
|     values['volumes'] = args.volume |  | ||||||
| if args.mount_volume is not None: |  | ||||||
|     values['mount_volumes'] = args.mount_volume |  | ||||||
| values['working_dir'] = args.working_dir |  | ||||||
|  |  | ||||||
| assert(len(args.local_container) > 0) |  | ||||||
|  |  | ||||||
| # Use the gateway's pull-through registry caches to reduce load on fd.o. |  | ||||||
| values['local_container'] = args.local_container |  | ||||||
| for url, replacement in [('registry.freedesktop.org', '{{ fdo_proxy_registry }}'), |  | ||||||
|                          ('harbor.freedesktop.org', '{{ harbor_fdo_registry }}')]: |  | ||||||
|     values['local_container'] = values['local_container'].replace(url, replacement) |  | ||||||
|  |  | ||||||
| if 'B2C_KERNEL_CMDLINE_EXTRAS' in environ: |  | ||||||
|     values['cmdline_extras'] = environ['B2C_KERNEL_CMDLINE_EXTRAS'] |  | ||||||
|  |  | ||||||
| f = open(path.splitext(path.basename(args.template))[0], "w") |  | ||||||
| f.write(template.render(values)) |  | ||||||
| f.close() |  | ||||||
| @@ -1,2 +0,0 @@ | |||||||
| [*.sh] |  | ||||||
| indent_size = 2 |  | ||||||
| @@ -1,13 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| # Init entrypoint for bare-metal devices; calls common init code. |  | ||||||
|  |  | ||||||
| # First stage: very basic setup to bring up network and /dev etc |  | ||||||
| /init-stage1.sh |  | ||||||
|  |  | ||||||
| # Second stage: run jobs |  | ||||||
| test $? -eq 0 && /init-stage2.sh |  | ||||||
|  |  | ||||||
| # Wait until the job would have timed out anyway, so we don't spew a "init |  | ||||||
| # exited" panic. |  | ||||||
| sleep 6000 |  | ||||||
| @@ -1,17 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_INTERFACE" ]; then |  | ||||||
|     echo "Must supply the PoE Interface to power down" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_ADDRESS" ]; then |  | ||||||
|     echo "Must supply the PoE Switch host" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| SNMP_KEY="1.3.6.1.4.1.9.9.402.1.2.1.1.1.$BM_POE_INTERFACE" |  | ||||||
| SNMP_OFF="i 4" |  | ||||||
|  |  | ||||||
| snmpset -v2c -r 3 -t 30 -cmesaci "$BM_POE_ADDRESS" "$SNMP_KEY" $SNMP_OFF |  | ||||||
| @@ -1,22 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_INTERFACE" ]; then |  | ||||||
|     echo "Must supply the PoE Interface to power up" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_ADDRESS" ]; then |  | ||||||
|     echo "Must supply the PoE Switch host" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| SNMP_KEY="1.3.6.1.4.1.9.9.402.1.2.1.1.1.$BM_POE_INTERFACE" |  | ||||||
| SNMP_ON="i 1" |  | ||||||
| SNMP_OFF="i 4" |  | ||||||
|  |  | ||||||
| snmpset -v2c -r 3 -t 10 -cmesaci "$BM_POE_ADDRESS" "$SNMP_KEY" $SNMP_OFF |  | ||||||
| sleep 3s |  | ||||||
| snmpset -v2c -r 3 -t 10 -cmesaci "$BM_POE_ADDRESS" "$SNMP_KEY" $SNMP_ON |  | ||||||
| @@ -1,105 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC1091 # The relative paths in this file only become valid at runtime. |  | ||||||
| # shellcheck disable=SC2034 |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| # Boot script for Chrome OS devices attached to a servo debug connector, using |  | ||||||
| # NFS and TFTP to boot. |  | ||||||
|  |  | ||||||
| # We're run from the root of the repo, make a helper var for our paths |  | ||||||
| BM=$CI_PROJECT_DIR/install/bare-metal |  | ||||||
| CI_COMMON=$CI_PROJECT_DIR/install/common |  | ||||||
|  |  | ||||||
| # Runner config checks |  | ||||||
| if [ -z "$BM_SERIAL" ]; then |  | ||||||
|   echo "Must set BM_SERIAL in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is the CPU serial device." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_SERIAL_EC" ]; then |  | ||||||
|   echo "Must set BM_SERIAL in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is the EC serial device for controlling board power" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ ! -d /nfs ]; then |  | ||||||
|   echo "NFS rootfs directory needs to be mounted at /nfs by the gitlab runner" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ ! -d /tftp ]; then |  | ||||||
|   echo "TFTP directory for this board needs to be mounted at /tftp by the gitlab runner" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # job config checks |  | ||||||
| if [ -z "$BM_KERNEL" ]; then |  | ||||||
|   echo "Must set BM_KERNEL to your board's kernel FIT image" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_ROOTFS" ]; then |  | ||||||
|   echo "Must set BM_ROOTFS to your board's rootfs directory in the job's variables" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_CMDLINE" ]; then |  | ||||||
|   echo "Must set BM_CMDLINE to your board's kernel command line arguments" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| # Clear out any previous run's artifacts. |  | ||||||
| rm -rf results/ |  | ||||||
| mkdir -p results |  | ||||||
|  |  | ||||||
| # Create the rootfs in the NFS directory.  rm to make sure it's in a pristine |  | ||||||
| # state, since it's volume-mounted on the host. |  | ||||||
| rsync -a --delete $BM_ROOTFS/ /nfs/ |  | ||||||
| mkdir -p /nfs/results |  | ||||||
| . $BM/rootfs-setup.sh /nfs |  | ||||||
|  |  | ||||||
| # Put the kernel/dtb image and the boot command line in the tftp directory for |  | ||||||
| # the board to find.  For normal Mesa development, we build the kernel and |  | ||||||
| # store it in the docker container that this script is running in. |  | ||||||
| # |  | ||||||
| # However, container builds are expensive, so when you're hacking on the |  | ||||||
| # kernel, it's nice to be able to skip the half hour container build and plus |  | ||||||
| # moving that container to the runner.  So, if BM_KERNEL is a URL, fetch it |  | ||||||
| # instead of looking in the container.  Note that the kernel build should be |  | ||||||
| # the output of: |  | ||||||
| # |  | ||||||
| # make Image.lzma |  | ||||||
| # |  | ||||||
| # mkimage \ |  | ||||||
| #  -A arm64 \ |  | ||||||
| #  -f auto \ |  | ||||||
| #  -C lzma \ |  | ||||||
| #  -d arch/arm64/boot/Image.lzma \ |  | ||||||
| #  -b arch/arm64/boot/dts/qcom/sdm845-cheza-r3.dtb \ |  | ||||||
| #  cheza-image.img |  | ||||||
|  |  | ||||||
| rm -rf /tftp/* |  | ||||||
| if echo "$BM_KERNEL" | grep -q http; then |  | ||||||
|   curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|       $BM_KERNEL -o /tftp/vmlinuz |  | ||||||
| else |  | ||||||
|   cp /baremetal-files/"$BM_KERNEL" /tftp/vmlinuz |  | ||||||
| fi |  | ||||||
| echo "$BM_CMDLINE" > /tftp/cmdline |  | ||||||
|  |  | ||||||
| set +e |  | ||||||
| python3 $BM/cros_servo_run.py \ |  | ||||||
|         --cpu $BM_SERIAL \ |  | ||||||
|         --ec $BM_SERIAL_EC \ |  | ||||||
|         --test-timeout ${TEST_PHASE_TIMEOUT:-20} |  | ||||||
| ret=$? |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| # Bring artifacts back from the NFS dir to the build dir where gitlab-runner |  | ||||||
| # will look for them. |  | ||||||
| cp -Rp /nfs/results/. results/ |  | ||||||
|  |  | ||||||
| exit $ret |  | ||||||
| @@ -1,158 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
| # |  | ||||||
| # Copyright © 2020 Google LLC |  | ||||||
| # SPDX-License-Identifier: MIT |  | ||||||
|  |  | ||||||
| import argparse |  | ||||||
| import re |  | ||||||
| import sys |  | ||||||
|  |  | ||||||
| from serial_buffer import SerialBuffer |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class CrosServoRun: |  | ||||||
|     def __init__(self, cpu, ec, test_timeout): |  | ||||||
|         self.cpu_ser = SerialBuffer( |  | ||||||
|             cpu, "results/serial.txt", "R SERIAL-CPU> ") |  | ||||||
|         # Merge the EC serial into the cpu_ser's line stream so that we can |  | ||||||
|         # effectively poll on both at the same time and not have to worry about |  | ||||||
|         self.ec_ser = SerialBuffer( |  | ||||||
|             ec, "results/serial-ec.txt", "R SERIAL-EC> ", line_queue=self.cpu_ser.line_queue) |  | ||||||
|         self.test_timeout = test_timeout |  | ||||||
|  |  | ||||||
|     def close(self): |  | ||||||
|         self.ec_ser.close() |  | ||||||
|         self.cpu_ser.close() |  | ||||||
|  |  | ||||||
|     def ec_write(self, s): |  | ||||||
|         print("W SERIAL-EC> %s" % s) |  | ||||||
|         self.ec_ser.serial.write(s.encode()) |  | ||||||
|  |  | ||||||
|     def cpu_write(self, s): |  | ||||||
|         print("W SERIAL-CPU> %s" % s) |  | ||||||
|         self.cpu_ser.serial.write(s.encode()) |  | ||||||
|  |  | ||||||
|     def print_error(self, message): |  | ||||||
|         RED = '\033[0;31m' |  | ||||||
|         NO_COLOR = '\033[0m' |  | ||||||
|         print(RED + message + NO_COLOR) |  | ||||||
|  |  | ||||||
|     def run(self): |  | ||||||
|         # Flush any partial commands in the EC's prompt, then ask for a reboot. |  | ||||||
|         self.ec_write("\n") |  | ||||||
|         self.ec_write("reboot\n") |  | ||||||
|  |  | ||||||
|         bootloader_done = False |  | ||||||
|         tftp_failures = 0 |  | ||||||
|         # This is emitted right when the bootloader pauses to check for input. |  | ||||||
|         # Emit a ^N character to request network boot, because we don't have a |  | ||||||
|         # direct-to-netboot firmware on cheza. |  | ||||||
|         for line in self.cpu_ser.lines(timeout=120, phase="bootloader"): |  | ||||||
|             if re.search("load_archive: loading locale_en.bin", line): |  | ||||||
|                 self.cpu_write("\016") |  | ||||||
|                 bootloader_done = True |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|             # The Cheza firmware seems to occasionally get stuck looping in |  | ||||||
|             # this error state during TFTP booting, possibly based on amount of |  | ||||||
|             # network traffic around it, but it'll usually recover after a |  | ||||||
|             # reboot. Currently mostly visible on google-freedreno-cheza-14. |  | ||||||
|             if re.search("R8152: Bulk read error 0xffffffbf", line): |  | ||||||
|                 tftp_failures += 1 |  | ||||||
|                 if tftp_failures >= 10: |  | ||||||
|                     self.print_error( |  | ||||||
|                         "Detected intermittent tftp failure, restarting run.") |  | ||||||
|                     return 1 |  | ||||||
|  |  | ||||||
|             # If the board has a netboot firmware and we made it to booting the |  | ||||||
|             # kernel, proceed to processing of the test run. |  | ||||||
|             if re.search("Booting Linux", line): |  | ||||||
|                 bootloader_done = True |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|             # The Cheza boards have issues with failing to bring up power to |  | ||||||
|             # the system sometimes, possibly dependent on ambient temperature |  | ||||||
|             # in the farm. |  | ||||||
|             if re.search("POWER_GOOD not seen in time", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected intermittent poweron failure, abandoning run.") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|         if not bootloader_done: |  | ||||||
|             print("Failed to make it through bootloader, abandoning run.") |  | ||||||
|             return 1 |  | ||||||
|  |  | ||||||
|         for line in self.cpu_ser.lines(timeout=self.test_timeout, phase="test"): |  | ||||||
|             if re.search("---. end Kernel panic", line): |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             # There are very infrequent bus errors during power management transitions |  | ||||||
|             # on cheza, which we don't expect to be the case on future boards. |  | ||||||
|             if re.search("Kernel panic - not syncing: Asynchronous SError Interrupt", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected cheza power management bus error, abandoning run.") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             # If the network device dies, it's probably not graphics's fault, just try again. |  | ||||||
|             if re.search("NETDEV WATCHDOG", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected network device failure, abandoning run.") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             # These HFI response errors started appearing with the introduction |  | ||||||
|             # of piglit runs.  CosmicPenguin says: |  | ||||||
|             # |  | ||||||
|             # "message ID 106 isn't a thing, so likely what happened is that we |  | ||||||
|             # got confused when parsing the HFI queue.  If it happened on only |  | ||||||
|             # one run, then memory corruption could be a possible clue" |  | ||||||
|             # |  | ||||||
|             # Given that it seems to trigger randomly near a GPU fault and then |  | ||||||
|             # break many tests after that, just restart the whole run. |  | ||||||
|             if re.search("a6xx_hfi_send_msg.*Unexpected message id .* on the response queue", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected cheza power management bus error, abandoning run.") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             if re.search("coreboot.*bootblock starting", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected spontaneous reboot, abandoning run.") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             if re.search("arm-smmu 5040000.iommu: TLB sync timed out -- SMMU may be deadlocked", line): |  | ||||||
|                 self.print_error("Detected cheza MMU fail, abandoning run.") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             result = re.search("hwci: mesa: (\S*)", line) |  | ||||||
|             if result: |  | ||||||
|                 if result.group(1) == "pass": |  | ||||||
|                     return 0 |  | ||||||
|                 else: |  | ||||||
|                     return 1 |  | ||||||
|  |  | ||||||
|         self.print_error( |  | ||||||
|             "Reached the end of the CPU serial log without finding a result") |  | ||||||
|         return 1 |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     parser = argparse.ArgumentParser() |  | ||||||
|     parser.add_argument('--cpu', type=str, |  | ||||||
|                         help='CPU Serial device', required=True) |  | ||||||
|     parser.add_argument( |  | ||||||
|         '--ec', type=str, help='EC Serial device', required=True) |  | ||||||
|     parser.add_argument( |  | ||||||
|         '--test-timeout', type=int, help='Test phase timeout (minutes)', required=True) |  | ||||||
|     args = parser.parse_args() |  | ||||||
|  |  | ||||||
|     servo = CrosServoRun(args.cpu, args.ec, args.test_timeout * 60) |  | ||||||
|     retval = servo.run() |  | ||||||
|  |  | ||||||
|     # power down the CPU on the device |  | ||||||
|     servo.ec_write("power off\n") |  | ||||||
|     servo.close() |  | ||||||
|  |  | ||||||
|     sys.exit(retval) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     main() |  | ||||||
| @@ -1,10 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| relay=$1 |  | ||||||
|  |  | ||||||
| if [ -z "$relay" ]; then |  | ||||||
|     echo "Must supply a relay arg" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| "$CI_PROJECT_DIR"/install/bare-metal/eth008-power-relay.py "$ETH_HOST" "$ETH_PORT" off "$relay" |  | ||||||
| @@ -1,28 +0,0 @@ | |||||||
| #!/usr/bin/python3 |  | ||||||
|  |  | ||||||
| import sys |  | ||||||
| import socket |  | ||||||
|  |  | ||||||
| host = sys.argv[1] |  | ||||||
| port = sys.argv[2] |  | ||||||
| mode = sys.argv[3] |  | ||||||
| relay = sys.argv[4] |  | ||||||
| msg = None |  | ||||||
|  |  | ||||||
| if mode == "on": |  | ||||||
|     msg = b'\x20' |  | ||||||
| else: |  | ||||||
|     msg = b'\x21' |  | ||||||
|  |  | ||||||
| msg += int(relay).to_bytes(1, 'big') |  | ||||||
| msg += b'\x00' |  | ||||||
|  |  | ||||||
| c = socket.create_connection((host, int(port))) |  | ||||||
| c.sendall(msg) |  | ||||||
|  |  | ||||||
| data = c.recv(1) |  | ||||||
| c.close() |  | ||||||
|  |  | ||||||
| if data[0] == b'\x01': |  | ||||||
|     print('Command failed') |  | ||||||
|     sys.exit(1) |  | ||||||
| @@ -1,12 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| relay=$1 |  | ||||||
|  |  | ||||||
| if [ -z "$relay" ]; then |  | ||||||
|     echo "Must supply a relay arg" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| "$CI_PROJECT_DIR"/install/bare-metal/eth008-power-relay.py "$ETH_HOST" "$ETH_PORT" off "$relay" |  | ||||||
| sleep 5 |  | ||||||
| "$CI_PROJECT_DIR"/install/bare-metal/eth008-power-relay.py "$ETH_HOST" "$ETH_PORT" on "$relay" |  | ||||||
| @@ -1,31 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| STRINGS=$(mktemp) |  | ||||||
| ERRORS=$(mktemp) |  | ||||||
|  |  | ||||||
| trap 'rm $STRINGS; rm $ERRORS;' EXIT |  | ||||||
|  |  | ||||||
| FILE=$1 |  | ||||||
| shift 1 |  | ||||||
|  |  | ||||||
| while getopts "f:e:" opt; do |  | ||||||
|   case $opt in |  | ||||||
|     f) echo "$OPTARG" >> "$STRINGS";; |  | ||||||
|     e) echo "$OPTARG" >> "$STRINGS" ; echo "$OPTARG" >> "$ERRORS";; |  | ||||||
|     *) exit |  | ||||||
|   esac |  | ||||||
| done |  | ||||||
| shift $((OPTIND -1)) |  | ||||||
|  |  | ||||||
| echo "Waiting for $FILE to say one of following strings" |  | ||||||
| cat "$STRINGS" |  | ||||||
|  |  | ||||||
| while ! grep -E -wf "$STRINGS" "$FILE"; do |  | ||||||
|   sleep 2 |  | ||||||
| done |  | ||||||
|  |  | ||||||
| if grep -E -wf "$ERRORS" "$FILE"; then |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
| @@ -1,159 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC1091 # The relative paths in this file only become valid at runtime. |  | ||||||
| # shellcheck disable=SC2034 |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| . "$SCRIPTS_DIR"/setup-test-env.sh |  | ||||||
|  |  | ||||||
| BM=$CI_PROJECT_DIR/install/bare-metal |  | ||||||
| CI_COMMON=$CI_PROJECT_DIR/install/common |  | ||||||
|  |  | ||||||
| if [ -z "$BM_SERIAL" ] && [ -z "$BM_SERIAL_SCRIPT" ]; then |  | ||||||
|   echo "Must set BM_SERIAL OR BM_SERIAL_SCRIPT in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "BM_SERIAL:" |  | ||||||
|   echo "  This is the serial device to talk to for waiting for fastboot to be ready and logging from the kernel." |  | ||||||
|   echo "BM_SERIAL_SCRIPT:" |  | ||||||
|   echo "  This is a shell script to talk to for waiting for fastboot to be ready and logging from the kernel." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POWERUP" ]; then |  | ||||||
|   echo "Must set BM_POWERUP in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is a shell script that should reset the device and begin its boot sequence" |  | ||||||
|   echo "such that it pauses at fastboot." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POWERDOWN" ]; then |  | ||||||
|   echo "Must set BM_POWERDOWN in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is a shell script that should power off the device." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_FASTBOOT_SERIAL" ]; then |  | ||||||
|   echo "Must set BM_FASTBOOT_SERIAL in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This must be the a stable-across-resets fastboot serial number." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_KERNEL" ]; then |  | ||||||
|   echo "Must set BM_KERNEL to your board's kernel vmlinuz or Image.gz in the job's variables:" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_DTB" ]; then |  | ||||||
|   echo "Must set BM_DTB to your board's DTB file in the job's variables:" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_ROOTFS" ]; then |  | ||||||
|   echo "Must set BM_ROOTFS to your board's rootfs directory in the job's variables:" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if echo $BM_CMDLINE | grep -q "root=/dev/nfs"; then |  | ||||||
|   BM_FASTBOOT_NFSROOT=1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| # Clear out any previous run's artifacts. |  | ||||||
| rm -rf results/ |  | ||||||
| mkdir -p results/ |  | ||||||
|  |  | ||||||
| if [ -n "$BM_FASTBOOT_NFSROOT" ]; then |  | ||||||
|   # Create the rootfs in the NFS directory.  rm to make sure it's in a pristine |  | ||||||
|   # state, since it's volume-mounted on the host. |  | ||||||
|   rsync -a --delete $BM_ROOTFS/ /nfs/ |  | ||||||
|   mkdir -p /nfs/results |  | ||||||
|   . $BM/rootfs-setup.sh /nfs |  | ||||||
|  |  | ||||||
|   # Root on NFS, no need for an inintramfs. |  | ||||||
|   rm -f rootfs.cpio.gz |  | ||||||
|   touch rootfs.cpio |  | ||||||
|   gzip rootfs.cpio |  | ||||||
| else |  | ||||||
|   # Create the rootfs in a temp dir |  | ||||||
|   rsync -a --delete $BM_ROOTFS/ rootfs/ |  | ||||||
|   . $BM/rootfs-setup.sh rootfs |  | ||||||
|  |  | ||||||
|   # Finally, pack it up into a cpio rootfs.  Skip the vulkan CTS since none of |  | ||||||
|   # these devices use it and it would take up space in the initrd. |  | ||||||
|  |  | ||||||
|   if [ -n "$PIGLIT_PROFILES" ]; then |  | ||||||
|     EXCLUDE_FILTER="deqp|arb_gpu_shader5|arb_gpu_shader_fp64|arb_gpu_shader_int64|glsl-4.[0123456]0|arb_tessellation_shader" |  | ||||||
|   else |  | ||||||
|     EXCLUDE_FILTER="piglit|python" |  | ||||||
|   fi |  | ||||||
|  |  | ||||||
|   pushd rootfs |  | ||||||
|   find -H . | \ |  | ||||||
|     grep -E -v "external/(openglcts|vulkancts|amber|glslang|spirv-tools)" | |  | ||||||
|     grep -E -v "traces-db|apitrace|renderdoc" | \ |  | ||||||
|     grep -E -v $EXCLUDE_FILTER | \ |  | ||||||
|     cpio -H newc -o | \ |  | ||||||
|     xz --check=crc32 -T4 - > $CI_PROJECT_DIR/rootfs.cpio.gz |  | ||||||
|   popd |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Make the combined kernel image and dtb for passing to fastboot.  For normal |  | ||||||
| # Mesa development, we build the kernel and store it in the docker container |  | ||||||
| # that this script is running in. |  | ||||||
| # |  | ||||||
| # However, container builds are expensive, so when you're hacking on the |  | ||||||
| # kernel, it's nice to be able to skip the half hour container build and plus |  | ||||||
| # moving that container to the runner.  So, if BM_KERNEL+BM_DTB are URLs, |  | ||||||
| # fetch them instead of looking in the container. |  | ||||||
| if echo "$BM_KERNEL $BM_DTB" | grep -q http; then |  | ||||||
|   curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|       "$BM_KERNEL" -o kernel |  | ||||||
|   curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|       "$BM_DTB" -o dtb |  | ||||||
|  |  | ||||||
|   cat kernel dtb > Image.gz-dtb |  | ||||||
|   rm kernel |  | ||||||
| else |  | ||||||
|   cat /baremetal-files/"$BM_KERNEL" /baremetal-files/"$BM_DTB".dtb > Image.gz-dtb |  | ||||||
|   cp /baremetal-files/"$BM_DTB".dtb dtb |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| export PATH=$BM:$PATH |  | ||||||
|  |  | ||||||
| mkdir -p artifacts |  | ||||||
| mkbootimg.py \ |  | ||||||
|   --kernel Image.gz-dtb \ |  | ||||||
|   --ramdisk rootfs.cpio.gz \ |  | ||||||
|   --dtb dtb \ |  | ||||||
|   --cmdline "$BM_CMDLINE" \ |  | ||||||
|   $BM_MKBOOT_PARAMS \ |  | ||||||
|   --header_version 2 \ |  | ||||||
|   -o artifacts/fastboot.img |  | ||||||
|  |  | ||||||
| rm Image.gz-dtb dtb |  | ||||||
|  |  | ||||||
| # Start background command for talking to serial if we have one. |  | ||||||
| if [ -n "$BM_SERIAL_SCRIPT" ]; then |  | ||||||
|   $BM_SERIAL_SCRIPT > results/serial-output.txt & |  | ||||||
|  |  | ||||||
|   while [ ! -e results/serial-output.txt ]; do |  | ||||||
|     sleep 1 |  | ||||||
|   done |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| set +e |  | ||||||
| $BM/fastboot_run.py \ |  | ||||||
|   --dev="$BM_SERIAL" \ |  | ||||||
|   --test-timeout ${TEST_PHASE_TIMEOUT:-20} \ |  | ||||||
|   --fbserial="$BM_FASTBOOT_SERIAL" \ |  | ||||||
|   --powerup="$BM_POWERUP" \ |  | ||||||
|   --powerdown="$BM_POWERDOWN" |  | ||||||
| ret=$? |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| if [ -n "$BM_FASTBOOT_NFSROOT" ]; then |  | ||||||
|   # Bring artifacts back from the NFS dir to the build dir where gitlab-runner |  | ||||||
|   # will look for them. |  | ||||||
|   cp -Rp /nfs/results/. results/ |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| exit $ret |  | ||||||
| @@ -1,159 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
| # |  | ||||||
| # Copyright © 2020 Google LLC |  | ||||||
| # |  | ||||||
| # Permission is hereby granted, free of charge, to any person obtaining a |  | ||||||
| # copy of this software and associated documentation files (the "Software"), |  | ||||||
| # to deal in the Software without restriction, including without limitation |  | ||||||
| # the rights to use, copy, modify, merge, publish, distribute, sublicense, |  | ||||||
| # and/or sell copies of the Software, and to permit persons to whom the |  | ||||||
| # Software is furnished to do so, subject to the following conditions: |  | ||||||
| # |  | ||||||
| # The above copyright notice and this permission notice (including the next |  | ||||||
| # paragraph) shall be included in all copies or substantial portions of the |  | ||||||
| # Software. |  | ||||||
| # |  | ||||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |  | ||||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |  | ||||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL |  | ||||||
| # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |  | ||||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |  | ||||||
| # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |  | ||||||
| # IN THE SOFTWARE. |  | ||||||
|  |  | ||||||
| import argparse |  | ||||||
| import subprocess |  | ||||||
| import re |  | ||||||
| from serial_buffer import SerialBuffer |  | ||||||
| import sys |  | ||||||
| import threading |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class FastbootRun: |  | ||||||
|     def __init__(self, args, test_timeout): |  | ||||||
|         self.powerup = args.powerup |  | ||||||
|         self.ser = SerialBuffer( |  | ||||||
|             args.dev, "results/serial-output.txt", "R SERIAL> ") |  | ||||||
|         self.fastboot = "fastboot boot -s {ser} artifacts/fastboot.img".format( |  | ||||||
|             ser=args.fbserial) |  | ||||||
|         self.test_timeout = test_timeout |  | ||||||
|  |  | ||||||
|     def close(self): |  | ||||||
|         self.ser.close() |  | ||||||
|  |  | ||||||
|     def print_error(self, message): |  | ||||||
|         RED = '\033[0;31m' |  | ||||||
|         NO_COLOR = '\033[0m' |  | ||||||
|         print(RED + message + NO_COLOR) |  | ||||||
|  |  | ||||||
|     def logged_system(self, cmd, timeout=60): |  | ||||||
|         print("Running '{}'".format(cmd)) |  | ||||||
|         try: |  | ||||||
|             return subprocess.call(cmd, shell=True, timeout=timeout) |  | ||||||
|         except subprocess.TimeoutExpired: |  | ||||||
|             self.print_error("timeout, abandoning run.") |  | ||||||
|             return 1 |  | ||||||
|  |  | ||||||
|     def run(self): |  | ||||||
|         if ret := self.logged_system(self.powerup): |  | ||||||
|             return ret |  | ||||||
|  |  | ||||||
|         fastboot_ready = False |  | ||||||
|         for line in self.ser.lines(timeout=2 * 60, phase="bootloader"): |  | ||||||
|             if re.search("[Ff]astboot: [Pp]rocessing commands", line) or \ |  | ||||||
|                     re.search("Listening for fastboot command on", line): |  | ||||||
|                 fastboot_ready = True |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|             if re.search("data abort", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected crash during boot, abandoning run.") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|         if not fastboot_ready: |  | ||||||
|             self.print_error( |  | ||||||
|                 "Failed to get to fastboot prompt, abandoning run.") |  | ||||||
|             return 1 |  | ||||||
|  |  | ||||||
|         if ret := self.logged_system(self.fastboot): |  | ||||||
|             return ret |  | ||||||
|  |  | ||||||
|         print_more_lines = -1 |  | ||||||
|         for line in self.ser.lines(timeout=self.test_timeout, phase="test"): |  | ||||||
|             if print_more_lines == 0: |  | ||||||
|                 return 1 |  | ||||||
|             if print_more_lines > 0: |  | ||||||
|                 print_more_lines -= 1 |  | ||||||
|  |  | ||||||
|             if re.search("---. end Kernel panic", line): |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             # The db820c boards intermittently reboot.  Just restart the run |  | ||||||
|             # when if we see a reboot after we got past fastboot. |  | ||||||
|             if re.search("PON REASON", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected spontaneous reboot, abandoning run.") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             # db820c sometimes wedges around iommu fault recovery |  | ||||||
|             if re.search("watchdog: BUG: soft lockup - CPU.* stuck", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected kernel soft lockup, abandoning run.") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             # If the network device dies, it's probably not graphics's fault, just try again. |  | ||||||
|             if re.search("NETDEV WATCHDOG", line): |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected network device failure, abandoning run.") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             # A3xx recovery doesn't quite work. Sometimes the GPU will get |  | ||||||
|             # wedged and recovery will fail (because power can't be reset?) |  | ||||||
|             # This assumes that the jobs are sufficiently well-tested that GPU |  | ||||||
|             # hangs aren't always triggered, so just try again. But print some |  | ||||||
|             # more lines first so that we get better information on the cause |  | ||||||
|             # of the hang. Once a hang happens, it's pretty chatty. |  | ||||||
|             if "[drm:adreno_recover] *ERROR* gpu hw init failed: -22" in line: |  | ||||||
|                 self.print_error( |  | ||||||
|                     "Detected GPU hang, abandoning run.") |  | ||||||
|                 if print_more_lines == -1: |  | ||||||
|                     print_more_lines = 30 |  | ||||||
|  |  | ||||||
|             result = re.search("hwci: mesa: (\S*)", line) |  | ||||||
|             if result: |  | ||||||
|                 if result.group(1) == "pass": |  | ||||||
|                     return 0 |  | ||||||
|                 else: |  | ||||||
|                     return 1 |  | ||||||
|  |  | ||||||
|         self.print_error( |  | ||||||
|             "Reached the end of the CPU serial log without finding a result, abandoning run.") |  | ||||||
|         return 1 |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     parser = argparse.ArgumentParser() |  | ||||||
|     parser.add_argument( |  | ||||||
|         '--dev', type=str, help='Serial device (otherwise reading from serial-output.txt)') |  | ||||||
|     parser.add_argument('--powerup', type=str, |  | ||||||
|                         help='shell command for rebooting', required=True) |  | ||||||
|     parser.add_argument('--powerdown', type=str, |  | ||||||
|                         help='shell command for powering off', required=True) |  | ||||||
|     parser.add_argument('--fbserial', type=str, |  | ||||||
|                         help='fastboot serial number of the board', required=True) |  | ||||||
|     parser.add_argument('--test-timeout', type=int, |  | ||||||
|                         help='Test phase timeout (minutes)', required=True) |  | ||||||
|     args = parser.parse_args() |  | ||||||
|  |  | ||||||
|     fastboot = FastbootRun(args, args.test_timeout * 60) |  | ||||||
|  |  | ||||||
|     retval = fastboot.run() |  | ||||||
|     fastboot.close() |  | ||||||
|  |  | ||||||
|     fastboot.logged_system(args.powerdown) |  | ||||||
|  |  | ||||||
|     sys.exit(retval) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     main() |  | ||||||
| @@ -1,10 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| relay=$1 |  | ||||||
|  |  | ||||||
| if [ -z "$relay" ]; then |  | ||||||
|     echo "Must supply a relay arg" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| "$CI_PROJECT_DIR"/install/bare-metal/google-power-relay.py off "$relay" |  | ||||||
| @@ -1,19 +0,0 @@ | |||||||
| #!/usr/bin/python3 |  | ||||||
|  |  | ||||||
| import sys |  | ||||||
| import serial |  | ||||||
|  |  | ||||||
| mode = sys.argv[1] |  | ||||||
| relay = sys.argv[2] |  | ||||||
|  |  | ||||||
| # our relays are "off" means "board is powered". |  | ||||||
| mode_swap = { |  | ||||||
|     "on": "off", |  | ||||||
|     "off": "on", |  | ||||||
| } |  | ||||||
| mode = mode_swap[mode] |  | ||||||
|  |  | ||||||
| ser = serial.Serial('/dev/ttyACM0', 115200, timeout=2) |  | ||||||
| command = "relay {} {}\n\r".format(mode, relay) |  | ||||||
| ser.write(command.encode()) |  | ||||||
| ser.close() |  | ||||||
| @@ -1,12 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| relay=$1 |  | ||||||
|  |  | ||||||
| if [ -z "$relay" ]; then |  | ||||||
|     echo "Must supply a relay arg" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| "$CI_PROJECT_DIR"/install/bare-metal/google-power-relay.py off "$relay" |  | ||||||
| sleep 5 |  | ||||||
| "$CI_PROJECT_DIR"/install/bare-metal/google-power-relay.py on "$relay" |  | ||||||
| @@ -1,569 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
| # |  | ||||||
| # Copyright 2015, The Android Open Source Project |  | ||||||
| # |  | ||||||
| # Licensed under the Apache License, Version 2.0 (the "License"); |  | ||||||
| # you may not use this file except in compliance with the License. |  | ||||||
| # You may obtain a copy of the License at |  | ||||||
| # |  | ||||||
| #     http://www.apache.org/licenses/LICENSE-2.0 |  | ||||||
| # |  | ||||||
| # Unless required by applicable law or agreed to in writing, software |  | ||||||
| # distributed under the License is distributed on an "AS IS" BASIS, |  | ||||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |  | ||||||
| # See the License for the specific language governing permissions and |  | ||||||
| # limitations under the License. |  | ||||||
| """Creates the boot image.""" |  | ||||||
| from argparse import (ArgumentParser, ArgumentTypeError, |  | ||||||
|                       FileType, RawDescriptionHelpFormatter) |  | ||||||
| from hashlib import sha1 |  | ||||||
| from os import fstat |  | ||||||
| from struct import pack |  | ||||||
| import array |  | ||||||
| import collections |  | ||||||
| import os |  | ||||||
| import re |  | ||||||
| import subprocess |  | ||||||
| import tempfile |  | ||||||
| # Constant and structure definition is in |  | ||||||
| # system/tools/mkbootimg/include/bootimg/bootimg.h |  | ||||||
| BOOT_MAGIC = 'ANDROID!' |  | ||||||
| BOOT_MAGIC_SIZE = 8 |  | ||||||
| BOOT_NAME_SIZE = 16 |  | ||||||
| BOOT_ARGS_SIZE = 512 |  | ||||||
| BOOT_EXTRA_ARGS_SIZE = 1024 |  | ||||||
| BOOT_IMAGE_HEADER_V1_SIZE = 1648 |  | ||||||
| BOOT_IMAGE_HEADER_V2_SIZE = 1660 |  | ||||||
| BOOT_IMAGE_HEADER_V3_SIZE = 1580 |  | ||||||
| BOOT_IMAGE_HEADER_V3_PAGESIZE = 4096 |  | ||||||
| BOOT_IMAGE_HEADER_V4_SIZE = 1584 |  | ||||||
| BOOT_IMAGE_V4_SIGNATURE_SIZE = 4096 |  | ||||||
| VENDOR_BOOT_MAGIC = 'VNDRBOOT' |  | ||||||
| VENDOR_BOOT_MAGIC_SIZE = 8 |  | ||||||
| VENDOR_BOOT_NAME_SIZE = BOOT_NAME_SIZE |  | ||||||
| VENDOR_BOOT_ARGS_SIZE = 2048 |  | ||||||
| VENDOR_BOOT_IMAGE_HEADER_V3_SIZE = 2112 |  | ||||||
| VENDOR_BOOT_IMAGE_HEADER_V4_SIZE = 2128 |  | ||||||
| VENDOR_RAMDISK_TYPE_NONE = 0 |  | ||||||
| VENDOR_RAMDISK_TYPE_PLATFORM = 1 |  | ||||||
| VENDOR_RAMDISK_TYPE_RECOVERY = 2 |  | ||||||
| VENDOR_RAMDISK_TYPE_DLKM = 3 |  | ||||||
| VENDOR_RAMDISK_NAME_SIZE = 32 |  | ||||||
| VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE = 16 |  | ||||||
| VENDOR_RAMDISK_TABLE_ENTRY_V4_SIZE = 108 |  | ||||||
| # Names with special meaning, mustn't be specified in --ramdisk_name. |  | ||||||
| VENDOR_RAMDISK_NAME_BLOCKLIST = {b'default'} |  | ||||||
| PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT = '--vendor_ramdisk_fragment' |  | ||||||
| def filesize(f): |  | ||||||
|     if f is None: |  | ||||||
|         return 0 |  | ||||||
|     try: |  | ||||||
|         return fstat(f.fileno()).st_size |  | ||||||
|     except OSError: |  | ||||||
|         return 0 |  | ||||||
| def update_sha(sha, f): |  | ||||||
|     if f: |  | ||||||
|         sha.update(f.read()) |  | ||||||
|         f.seek(0) |  | ||||||
|         sha.update(pack('I', filesize(f))) |  | ||||||
|     else: |  | ||||||
|         sha.update(pack('I', 0)) |  | ||||||
| def pad_file(f, padding): |  | ||||||
|     pad = (padding - (f.tell() & (padding - 1))) & (padding - 1) |  | ||||||
|     f.write(pack(str(pad) + 'x')) |  | ||||||
| def get_number_of_pages(image_size, page_size): |  | ||||||
|     """calculates the number of pages required for the image""" |  | ||||||
|     return (image_size + page_size - 1) // page_size |  | ||||||
| def get_recovery_dtbo_offset(args): |  | ||||||
|     """calculates the offset of recovery_dtbo image in the boot image""" |  | ||||||
|     num_header_pages = 1 # header occupies a page |  | ||||||
|     num_kernel_pages = get_number_of_pages(filesize(args.kernel), args.pagesize) |  | ||||||
|     num_ramdisk_pages = get_number_of_pages(filesize(args.ramdisk), |  | ||||||
|                                             args.pagesize) |  | ||||||
|     num_second_pages = get_number_of_pages(filesize(args.second), args.pagesize) |  | ||||||
|     dtbo_offset = args.pagesize * (num_header_pages + num_kernel_pages + |  | ||||||
|                                    num_ramdisk_pages + num_second_pages) |  | ||||||
|     return dtbo_offset |  | ||||||
| def write_header_v3_and_above(args): |  | ||||||
|     if args.header_version > 3: |  | ||||||
|         boot_header_size = BOOT_IMAGE_HEADER_V4_SIZE |  | ||||||
|     else: |  | ||||||
|         boot_header_size = BOOT_IMAGE_HEADER_V3_SIZE |  | ||||||
|     args.output.write(pack(f'{BOOT_MAGIC_SIZE}s', BOOT_MAGIC.encode())) |  | ||||||
|     # kernel size in bytes |  | ||||||
|     args.output.write(pack('I', filesize(args.kernel))) |  | ||||||
|     # ramdisk size in bytes |  | ||||||
|     args.output.write(pack('I', filesize(args.ramdisk))) |  | ||||||
|     # os version and patch level |  | ||||||
|     args.output.write(pack('I', (args.os_version << 11) | args.os_patch_level)) |  | ||||||
|     args.output.write(pack('I', boot_header_size)) |  | ||||||
|     # reserved |  | ||||||
|     args.output.write(pack('4I', 0, 0, 0, 0)) |  | ||||||
|     # version of boot image header |  | ||||||
|     args.output.write(pack('I', args.header_version)) |  | ||||||
|     args.output.write(pack(f'{BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE}s', |  | ||||||
|                            args.cmdline)) |  | ||||||
|     if args.header_version >= 4: |  | ||||||
|         # The signature used to verify boot image v4. |  | ||||||
|         args.output.write(pack('I', BOOT_IMAGE_V4_SIGNATURE_SIZE)) |  | ||||||
|     pad_file(args.output, BOOT_IMAGE_HEADER_V3_PAGESIZE) |  | ||||||
| def write_vendor_boot_header(args): |  | ||||||
|     if filesize(args.dtb) == 0: |  | ||||||
|         raise ValueError('DTB image must not be empty.') |  | ||||||
|     if args.header_version > 3: |  | ||||||
|         vendor_ramdisk_size = args.vendor_ramdisk_total_size |  | ||||||
|         vendor_boot_header_size = VENDOR_BOOT_IMAGE_HEADER_V4_SIZE |  | ||||||
|     else: |  | ||||||
|         vendor_ramdisk_size = filesize(args.vendor_ramdisk) |  | ||||||
|         vendor_boot_header_size = VENDOR_BOOT_IMAGE_HEADER_V3_SIZE |  | ||||||
|     args.vendor_boot.write(pack(f'{VENDOR_BOOT_MAGIC_SIZE}s', |  | ||||||
|                                 VENDOR_BOOT_MAGIC.encode())) |  | ||||||
|     # version of boot image header |  | ||||||
|     args.vendor_boot.write(pack('I', args.header_version)) |  | ||||||
|     # flash page size |  | ||||||
|     args.vendor_boot.write(pack('I', args.pagesize)) |  | ||||||
|     # kernel physical load address |  | ||||||
|     args.vendor_boot.write(pack('I', args.base + args.kernel_offset)) |  | ||||||
|     # ramdisk physical load address |  | ||||||
|     args.vendor_boot.write(pack('I', args.base + args.ramdisk_offset)) |  | ||||||
|     # ramdisk size in bytes |  | ||||||
|     args.vendor_boot.write(pack('I', vendor_ramdisk_size)) |  | ||||||
|     args.vendor_boot.write(pack(f'{VENDOR_BOOT_ARGS_SIZE}s', |  | ||||||
|                                 args.vendor_cmdline)) |  | ||||||
|     # kernel tags physical load address |  | ||||||
|     args.vendor_boot.write(pack('I', args.base + args.tags_offset)) |  | ||||||
|     # asciiz product name |  | ||||||
|     args.vendor_boot.write(pack(f'{VENDOR_BOOT_NAME_SIZE}s', args.board)) |  | ||||||
|     # header size in bytes |  | ||||||
|     args.vendor_boot.write(pack('I', vendor_boot_header_size)) |  | ||||||
|     # dtb size in bytes |  | ||||||
|     args.vendor_boot.write(pack('I', filesize(args.dtb))) |  | ||||||
|     # dtb physical load address |  | ||||||
|     args.vendor_boot.write(pack('Q', args.base + args.dtb_offset)) |  | ||||||
|     if args.header_version > 3: |  | ||||||
|         vendor_ramdisk_table_size = (args.vendor_ramdisk_table_entry_num * |  | ||||||
|                                      VENDOR_RAMDISK_TABLE_ENTRY_V4_SIZE) |  | ||||||
|         # vendor ramdisk table size in bytes |  | ||||||
|         args.vendor_boot.write(pack('I', vendor_ramdisk_table_size)) |  | ||||||
|         # number of vendor ramdisk table entries |  | ||||||
|         args.vendor_boot.write(pack('I', args.vendor_ramdisk_table_entry_num)) |  | ||||||
|         # vendor ramdisk table entry size in bytes |  | ||||||
|         args.vendor_boot.write(pack('I', VENDOR_RAMDISK_TABLE_ENTRY_V4_SIZE)) |  | ||||||
|         # bootconfig section size in bytes |  | ||||||
|         args.vendor_boot.write(pack('I', filesize(args.vendor_bootconfig))) |  | ||||||
|     pad_file(args.vendor_boot, args.pagesize) |  | ||||||
| def write_header(args): |  | ||||||
|     if args.header_version > 4: |  | ||||||
|         raise ValueError( |  | ||||||
|             f'Boot header version {args.header_version} not supported') |  | ||||||
|     if args.header_version in {3, 4}: |  | ||||||
|         return write_header_v3_and_above(args) |  | ||||||
|     ramdisk_load_address = ((args.base + args.ramdisk_offset) |  | ||||||
|                             if filesize(args.ramdisk) > 0 else 0) |  | ||||||
|     second_load_address = ((args.base + args.second_offset) |  | ||||||
|                            if filesize(args.second) > 0 else 0) |  | ||||||
|     args.output.write(pack(f'{BOOT_MAGIC_SIZE}s', BOOT_MAGIC.encode())) |  | ||||||
|     # kernel size in bytes |  | ||||||
|     args.output.write(pack('I', filesize(args.kernel))) |  | ||||||
|     # kernel physical load address |  | ||||||
|     args.output.write(pack('I', args.base + args.kernel_offset)) |  | ||||||
|     # ramdisk size in bytes |  | ||||||
|     args.output.write(pack('I', filesize(args.ramdisk))) |  | ||||||
|     # ramdisk physical load address |  | ||||||
|     args.output.write(pack('I', ramdisk_load_address)) |  | ||||||
|     # second bootloader size in bytes |  | ||||||
|     args.output.write(pack('I', filesize(args.second))) |  | ||||||
|     # second bootloader physical load address |  | ||||||
|     args.output.write(pack('I', second_load_address)) |  | ||||||
|     # kernel tags physical load address |  | ||||||
|     args.output.write(pack('I', args.base + args.tags_offset)) |  | ||||||
|     # flash page size |  | ||||||
|     args.output.write(pack('I', args.pagesize)) |  | ||||||
|     # version of boot image header |  | ||||||
|     args.output.write(pack('I', args.header_version)) |  | ||||||
|     # os version and patch level |  | ||||||
|     args.output.write(pack('I', (args.os_version << 11) | args.os_patch_level)) |  | ||||||
|     # asciiz product name |  | ||||||
|     args.output.write(pack(f'{BOOT_NAME_SIZE}s', args.board)) |  | ||||||
|     args.output.write(pack(f'{BOOT_ARGS_SIZE}s', args.cmdline)) |  | ||||||
|     sha = sha1() |  | ||||||
|     update_sha(sha, args.kernel) |  | ||||||
|     update_sha(sha, args.ramdisk) |  | ||||||
|     update_sha(sha, args.second) |  | ||||||
|     if args.header_version > 0: |  | ||||||
|         update_sha(sha, args.recovery_dtbo) |  | ||||||
|     if args.header_version > 1: |  | ||||||
|         update_sha(sha, args.dtb) |  | ||||||
|     img_id = pack('32s', sha.digest()) |  | ||||||
|     args.output.write(img_id) |  | ||||||
|     args.output.write(pack(f'{BOOT_EXTRA_ARGS_SIZE}s', args.extra_cmdline)) |  | ||||||
|     if args.header_version > 0: |  | ||||||
|         if args.recovery_dtbo: |  | ||||||
|             # recovery dtbo size in bytes |  | ||||||
|             args.output.write(pack('I', filesize(args.recovery_dtbo))) |  | ||||||
|             # recovert dtbo offset in the boot image |  | ||||||
|             args.output.write(pack('Q', get_recovery_dtbo_offset(args))) |  | ||||||
|         else: |  | ||||||
|             # Set to zero if no recovery dtbo |  | ||||||
|             args.output.write(pack('I', 0)) |  | ||||||
|             args.output.write(pack('Q', 0)) |  | ||||||
|     # Populate boot image header size for header versions 1 and 2. |  | ||||||
|     if args.header_version == 1: |  | ||||||
|         args.output.write(pack('I', BOOT_IMAGE_HEADER_V1_SIZE)) |  | ||||||
|     elif args.header_version == 2: |  | ||||||
|         args.output.write(pack('I', BOOT_IMAGE_HEADER_V2_SIZE)) |  | ||||||
|     if args.header_version > 1: |  | ||||||
|         if filesize(args.dtb) == 0: |  | ||||||
|             raise ValueError('DTB image must not be empty.') |  | ||||||
|         # dtb size in bytes |  | ||||||
|         args.output.write(pack('I', filesize(args.dtb))) |  | ||||||
|         # dtb physical load address |  | ||||||
|         args.output.write(pack('Q', args.base + args.dtb_offset)) |  | ||||||
|     pad_file(args.output, args.pagesize) |  | ||||||
|     return img_id |  | ||||||
| class AsciizBytes: |  | ||||||
|     """Parses a string and encodes it as an asciiz bytes object. |  | ||||||
|     >>> AsciizBytes(bufsize=4)('foo') |  | ||||||
|     b'foo\\x00' |  | ||||||
|     >>> AsciizBytes(bufsize=4)('foob') |  | ||||||
|     Traceback (most recent call last): |  | ||||||
|         ... |  | ||||||
|     argparse.ArgumentTypeError: Encoded asciiz length exceeded: max 4, got 5 |  | ||||||
|     """ |  | ||||||
|     def __init__(self, bufsize): |  | ||||||
|         self.bufsize = bufsize |  | ||||||
|     def __call__(self, arg): |  | ||||||
|         arg_bytes = arg.encode() + b'\x00' |  | ||||||
|         if len(arg_bytes) > self.bufsize: |  | ||||||
|             raise ArgumentTypeError( |  | ||||||
|                 'Encoded asciiz length exceeded: ' |  | ||||||
|                 f'max {self.bufsize}, got {len(arg_bytes)}') |  | ||||||
|         return arg_bytes |  | ||||||
| class VendorRamdiskTableBuilder: |  | ||||||
|     """Vendor ramdisk table builder. |  | ||||||
|     Attributes: |  | ||||||
|         entries: A list of VendorRamdiskTableEntry namedtuple. |  | ||||||
|         ramdisk_total_size: Total size in bytes of all ramdisks in the table. |  | ||||||
|     """ |  | ||||||
|     VendorRamdiskTableEntry = collections.namedtuple(  # pylint: disable=invalid-name |  | ||||||
|         'VendorRamdiskTableEntry', |  | ||||||
|         ['ramdisk_path', 'ramdisk_size', 'ramdisk_offset', 'ramdisk_type', |  | ||||||
|          'ramdisk_name', 'board_id']) |  | ||||||
|     def __init__(self): |  | ||||||
|         self.entries = [] |  | ||||||
|         self.ramdisk_total_size = 0 |  | ||||||
|         self.ramdisk_names = set() |  | ||||||
|     def add_entry(self, ramdisk_path, ramdisk_type, ramdisk_name, board_id): |  | ||||||
|         # Strip any trailing null for simple comparison. |  | ||||||
|         stripped_ramdisk_name = ramdisk_name.rstrip(b'\x00') |  | ||||||
|         if stripped_ramdisk_name in VENDOR_RAMDISK_NAME_BLOCKLIST: |  | ||||||
|             raise ValueError( |  | ||||||
|                 f'Banned vendor ramdisk name: {stripped_ramdisk_name}') |  | ||||||
|         if stripped_ramdisk_name in self.ramdisk_names: |  | ||||||
|             raise ValueError( |  | ||||||
|                 f'Duplicated vendor ramdisk name: {stripped_ramdisk_name}') |  | ||||||
|         self.ramdisk_names.add(stripped_ramdisk_name) |  | ||||||
|         if board_id is None: |  | ||||||
|             board_id = array.array( |  | ||||||
|                 'I', [0] * VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE) |  | ||||||
|         else: |  | ||||||
|             board_id = array.array('I', board_id) |  | ||||||
|         if len(board_id) != VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE: |  | ||||||
|             raise ValueError('board_id size must be ' |  | ||||||
|                              f'{VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE}') |  | ||||||
|         with open(ramdisk_path, 'rb') as f: |  | ||||||
|             ramdisk_size = filesize(f) |  | ||||||
|         self.entries.append(self.VendorRamdiskTableEntry( |  | ||||||
|             ramdisk_path, ramdisk_size, self.ramdisk_total_size, ramdisk_type, |  | ||||||
|             ramdisk_name, board_id)) |  | ||||||
|         self.ramdisk_total_size += ramdisk_size |  | ||||||
|     def write_ramdisks_padded(self, fout, alignment): |  | ||||||
|         for entry in self.entries: |  | ||||||
|             with open(entry.ramdisk_path, 'rb') as f: |  | ||||||
|                 fout.write(f.read()) |  | ||||||
|         pad_file(fout, alignment) |  | ||||||
|     def write_entries_padded(self, fout, alignment): |  | ||||||
|         for entry in self.entries: |  | ||||||
|             fout.write(pack('I', entry.ramdisk_size)) |  | ||||||
|             fout.write(pack('I', entry.ramdisk_offset)) |  | ||||||
|             fout.write(pack('I', entry.ramdisk_type)) |  | ||||||
|             fout.write(pack(f'{VENDOR_RAMDISK_NAME_SIZE}s', |  | ||||||
|                             entry.ramdisk_name)) |  | ||||||
|             fout.write(entry.board_id) |  | ||||||
|         pad_file(fout, alignment) |  | ||||||
| def write_padded_file(f_out, f_in, padding): |  | ||||||
|     if f_in is None: |  | ||||||
|         return |  | ||||||
|     f_out.write(f_in.read()) |  | ||||||
|     pad_file(f_out, padding) |  | ||||||
| def parse_int(x): |  | ||||||
|     return int(x, 0) |  | ||||||
| def parse_os_version(x): |  | ||||||
|     match = re.search(r'^(\d{1,3})(?:\.(\d{1,3})(?:\.(\d{1,3}))?)?', x) |  | ||||||
|     if match: |  | ||||||
|         a = int(match.group(1)) |  | ||||||
|         b = c = 0 |  | ||||||
|         if match.lastindex >= 2: |  | ||||||
|             b = int(match.group(2)) |  | ||||||
|         if match.lastindex == 3: |  | ||||||
|             c = int(match.group(3)) |  | ||||||
|         # 7 bits allocated for each field |  | ||||||
|         assert a < 128 |  | ||||||
|         assert b < 128 |  | ||||||
|         assert c < 128 |  | ||||||
|         return (a << 14) | (b << 7) | c |  | ||||||
|     return 0 |  | ||||||
| def parse_os_patch_level(x): |  | ||||||
|     match = re.search(r'^(\d{4})-(\d{2})(?:-(\d{2}))?', x) |  | ||||||
|     if match: |  | ||||||
|         y = int(match.group(1)) - 2000 |  | ||||||
|         m = int(match.group(2)) |  | ||||||
|         # 7 bits allocated for the year, 4 bits for the month |  | ||||||
|         assert 0 <= y < 128 |  | ||||||
|         assert 0 < m <= 12 |  | ||||||
|         return (y << 4) | m |  | ||||||
|     return 0 |  | ||||||
| def parse_vendor_ramdisk_type(x): |  | ||||||
|     type_dict = { |  | ||||||
|         'none': VENDOR_RAMDISK_TYPE_NONE, |  | ||||||
|         'platform': VENDOR_RAMDISK_TYPE_PLATFORM, |  | ||||||
|         'recovery': VENDOR_RAMDISK_TYPE_RECOVERY, |  | ||||||
|         'dlkm': VENDOR_RAMDISK_TYPE_DLKM, |  | ||||||
|     } |  | ||||||
|     if x.lower() in type_dict: |  | ||||||
|         return type_dict[x.lower()] |  | ||||||
|     return parse_int(x) |  | ||||||
| def get_vendor_boot_v4_usage(): |  | ||||||
|     return """vendor boot version 4 arguments: |  | ||||||
|   --ramdisk_type {none,platform,recovery,dlkm} |  | ||||||
|                         specify the type of the ramdisk |  | ||||||
|   --ramdisk_name NAME |  | ||||||
|                         specify the name of the ramdisk |  | ||||||
|   --board_id{0..15} NUMBER |  | ||||||
|                         specify the value of the board_id vector, defaults to 0 |  | ||||||
|   --vendor_ramdisk_fragment VENDOR_RAMDISK_FILE |  | ||||||
|                         path to the vendor ramdisk file |  | ||||||
|   These options can be specified multiple times, where each vendor ramdisk |  | ||||||
|   option group ends with a --vendor_ramdisk_fragment option. |  | ||||||
|   Each option group appends an additional ramdisk to the vendor boot image. |  | ||||||
| """ |  | ||||||
| def parse_vendor_ramdisk_args(args, args_list): |  | ||||||
|     """Parses vendor ramdisk specific arguments. |  | ||||||
|     Args: |  | ||||||
|         args: An argparse.Namespace object. Parsed results are stored into this |  | ||||||
|             object. |  | ||||||
|         args_list: A list of argument strings to be parsed. |  | ||||||
|     Returns: |  | ||||||
|         A list argument strings that are not parsed by this method. |  | ||||||
|     """ |  | ||||||
|     parser = ArgumentParser(add_help=False) |  | ||||||
|     parser.add_argument('--ramdisk_type', type=parse_vendor_ramdisk_type, |  | ||||||
|                         default=VENDOR_RAMDISK_TYPE_NONE) |  | ||||||
|     parser.add_argument('--ramdisk_name', |  | ||||||
|                         type=AsciizBytes(bufsize=VENDOR_RAMDISK_NAME_SIZE), |  | ||||||
|                         required=True) |  | ||||||
|     for i in range(VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE): |  | ||||||
|         parser.add_argument(f'--board_id{i}', type=parse_int, default=0) |  | ||||||
|     parser.add_argument(PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT, required=True) |  | ||||||
|     unknown_args = [] |  | ||||||
|     vendor_ramdisk_table_builder = VendorRamdiskTableBuilder() |  | ||||||
|     if args.vendor_ramdisk is not None: |  | ||||||
|         vendor_ramdisk_table_builder.add_entry( |  | ||||||
|             args.vendor_ramdisk.name, VENDOR_RAMDISK_TYPE_PLATFORM, b'', None) |  | ||||||
|     while PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT in args_list: |  | ||||||
|         idx = args_list.index(PARSER_ARGUMENT_VENDOR_RAMDISK_FRAGMENT) + 2 |  | ||||||
|         vendor_ramdisk_args = args_list[:idx] |  | ||||||
|         args_list = args_list[idx:] |  | ||||||
|         ramdisk_args, extra_args = parser.parse_known_args(vendor_ramdisk_args) |  | ||||||
|         ramdisk_args_dict = vars(ramdisk_args) |  | ||||||
|         unknown_args.extend(extra_args) |  | ||||||
|         ramdisk_path = ramdisk_args.vendor_ramdisk_fragment |  | ||||||
|         ramdisk_type = ramdisk_args.ramdisk_type |  | ||||||
|         ramdisk_name = ramdisk_args.ramdisk_name |  | ||||||
|         board_id = [ramdisk_args_dict[f'board_id{i}'] |  | ||||||
|                     for i in range(VENDOR_RAMDISK_TABLE_ENTRY_BOARD_ID_SIZE)] |  | ||||||
|         vendor_ramdisk_table_builder.add_entry(ramdisk_path, ramdisk_type, |  | ||||||
|                                                ramdisk_name, board_id) |  | ||||||
|     if len(args_list) > 0: |  | ||||||
|         unknown_args.extend(args_list) |  | ||||||
|     args.vendor_ramdisk_total_size = (vendor_ramdisk_table_builder |  | ||||||
|                                       .ramdisk_total_size) |  | ||||||
|     args.vendor_ramdisk_table_entry_num = len(vendor_ramdisk_table_builder |  | ||||||
|                                               .entries) |  | ||||||
|     args.vendor_ramdisk_table_builder = vendor_ramdisk_table_builder |  | ||||||
|     return unknown_args |  | ||||||
| def parse_cmdline(): |  | ||||||
|     version_parser = ArgumentParser(add_help=False) |  | ||||||
|     version_parser.add_argument('--header_version', type=parse_int, default=0) |  | ||||||
|     if version_parser.parse_known_args()[0].header_version < 3: |  | ||||||
|         # For boot header v0 to v2, the kernel commandline field is split into |  | ||||||
|         # two fields, cmdline and extra_cmdline. Both fields are asciiz strings, |  | ||||||
|         # so we minus one here to ensure the encoded string plus the |  | ||||||
|         # null-terminator can fit in the buffer size. |  | ||||||
|         cmdline_size = BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE - 1 |  | ||||||
|     else: |  | ||||||
|         cmdline_size = BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE |  | ||||||
|     parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter, |  | ||||||
|                             epilog=get_vendor_boot_v4_usage()) |  | ||||||
|     parser.add_argument('--kernel', type=FileType('rb'), |  | ||||||
|                         help='path to the kernel') |  | ||||||
|     parser.add_argument('--ramdisk', type=FileType('rb'), |  | ||||||
|                         help='path to the ramdisk') |  | ||||||
|     parser.add_argument('--second', type=FileType('rb'), |  | ||||||
|                         help='path to the second bootloader') |  | ||||||
|     parser.add_argument('--dtb', type=FileType('rb'), help='path to the dtb') |  | ||||||
|     dtbo_group = parser.add_mutually_exclusive_group() |  | ||||||
|     dtbo_group.add_argument('--recovery_dtbo', type=FileType('rb'), |  | ||||||
|                             help='path to the recovery DTBO') |  | ||||||
|     dtbo_group.add_argument('--recovery_acpio', type=FileType('rb'), |  | ||||||
|                             metavar='RECOVERY_ACPIO', dest='recovery_dtbo', |  | ||||||
|                             help='path to the recovery ACPIO') |  | ||||||
|     parser.add_argument('--cmdline', type=AsciizBytes(bufsize=cmdline_size), |  | ||||||
|                         default='', help='kernel command line arguments') |  | ||||||
|     parser.add_argument('--vendor_cmdline', |  | ||||||
|                         type=AsciizBytes(bufsize=VENDOR_BOOT_ARGS_SIZE), |  | ||||||
|                         default='', |  | ||||||
|                         help='vendor boot kernel command line arguments') |  | ||||||
|     parser.add_argument('--base', type=parse_int, default=0x10000000, |  | ||||||
|                         help='base address') |  | ||||||
|     parser.add_argument('--kernel_offset', type=parse_int, default=0x00008000, |  | ||||||
|                         help='kernel offset') |  | ||||||
|     parser.add_argument('--ramdisk_offset', type=parse_int, default=0x01000000, |  | ||||||
|                         help='ramdisk offset') |  | ||||||
|     parser.add_argument('--second_offset', type=parse_int, default=0x00f00000, |  | ||||||
|                         help='second bootloader offset') |  | ||||||
|     parser.add_argument('--dtb_offset', type=parse_int, default=0x01f00000, |  | ||||||
|                         help='dtb offset') |  | ||||||
|     parser.add_argument('--os_version', type=parse_os_version, default=0, |  | ||||||
|                         help='operating system version') |  | ||||||
|     parser.add_argument('--os_patch_level', type=parse_os_patch_level, |  | ||||||
|                         default=0, help='operating system patch level') |  | ||||||
|     parser.add_argument('--tags_offset', type=parse_int, default=0x00000100, |  | ||||||
|                         help='tags offset') |  | ||||||
|     parser.add_argument('--board', type=AsciizBytes(bufsize=BOOT_NAME_SIZE), |  | ||||||
|                         default='', help='board name') |  | ||||||
|     parser.add_argument('--pagesize', type=parse_int, |  | ||||||
|                         choices=[2**i for i in range(11, 15)], default=2048, |  | ||||||
|                         help='page size') |  | ||||||
|     parser.add_argument('--id', action='store_true', |  | ||||||
|                         help='print the image ID on standard output') |  | ||||||
|     parser.add_argument('--header_version', type=parse_int, default=0, |  | ||||||
|                         help='boot image header version') |  | ||||||
|     parser.add_argument('-o', '--output', type=FileType('wb'), |  | ||||||
|                         help='output file name') |  | ||||||
|     parser.add_argument('--gki_signing_algorithm', |  | ||||||
|                         help='GKI signing algorithm to use') |  | ||||||
|     parser.add_argument('--gki_signing_key', |  | ||||||
|                         help='path to RSA private key file') |  | ||||||
|     parser.add_argument('--gki_signing_signature_args', |  | ||||||
|                         help='other hash arguments passed to avbtool') |  | ||||||
|     parser.add_argument('--gki_signing_avbtool_path', |  | ||||||
|                         help='path to avbtool for boot signature generation') |  | ||||||
|     parser.add_argument('--vendor_boot', type=FileType('wb'), |  | ||||||
|                         help='vendor boot output file name') |  | ||||||
|     parser.add_argument('--vendor_ramdisk', type=FileType('rb'), |  | ||||||
|                         help='path to the vendor ramdisk') |  | ||||||
|     parser.add_argument('--vendor_bootconfig', type=FileType('rb'), |  | ||||||
|                         help='path to the vendor bootconfig file') |  | ||||||
|     args, extra_args = parser.parse_known_args() |  | ||||||
|     if args.vendor_boot is not None and args.header_version > 3: |  | ||||||
|         extra_args = parse_vendor_ramdisk_args(args, extra_args) |  | ||||||
|     if len(extra_args) > 0: |  | ||||||
|         raise ValueError(f'Unrecognized arguments: {extra_args}') |  | ||||||
|     if args.header_version < 3: |  | ||||||
|         args.extra_cmdline = args.cmdline[BOOT_ARGS_SIZE-1:] |  | ||||||
|         args.cmdline = args.cmdline[:BOOT_ARGS_SIZE-1] + b'\x00' |  | ||||||
|         assert len(args.cmdline) <= BOOT_ARGS_SIZE |  | ||||||
|         assert len(args.extra_cmdline) <= BOOT_EXTRA_ARGS_SIZE |  | ||||||
|     return args |  | ||||||
| def add_boot_image_signature(args, pagesize): |  | ||||||
|     """Adds the boot image signature. |  | ||||||
|     Note that the signature will only be verified in VTS to ensure a |  | ||||||
|     generic boot.img is used. It will not be used by the device |  | ||||||
|     bootloader at boot time. The bootloader should only verify |  | ||||||
|     the boot vbmeta at the end of the boot partition (or in the top-level |  | ||||||
|     vbmeta partition) via the Android Verified Boot process, when the |  | ||||||
|     device boots. |  | ||||||
|     """ |  | ||||||
|     args.output.flush()  # Flush the buffer for signature calculation. |  | ||||||
|     # Appends zeros if the signing key is not specified. |  | ||||||
|     if not args.gki_signing_key or not args.gki_signing_algorithm: |  | ||||||
|         zeros = b'\x00' * BOOT_IMAGE_V4_SIGNATURE_SIZE |  | ||||||
|         args.output.write(zeros) |  | ||||||
|         pad_file(args.output, pagesize) |  | ||||||
|         return |  | ||||||
|     avbtool = 'avbtool'  # Used from otatools.zip or Android build env. |  | ||||||
|     # We need to specify the path of avbtool in build/core/Makefile. |  | ||||||
|     # Because avbtool is not guaranteed to be in $PATH there. |  | ||||||
|     if args.gki_signing_avbtool_path: |  | ||||||
|         avbtool = args.gki_signing_avbtool_path |  | ||||||
|     # Need to specify a value of --partition_size for avbtool to work. |  | ||||||
|     # We use 64 MB below, but avbtool will not resize the boot image to |  | ||||||
|     # this size because --do_not_append_vbmeta_image is also specified. |  | ||||||
|     avbtool_cmd = [ |  | ||||||
|         avbtool, 'add_hash_footer', |  | ||||||
|         '--partition_name', 'boot', |  | ||||||
|         '--partition_size', str(64 * 1024 * 1024), |  | ||||||
|         '--image', args.output.name, |  | ||||||
|         '--algorithm', args.gki_signing_algorithm, |  | ||||||
|         '--key', args.gki_signing_key, |  | ||||||
|         '--salt', 'd00df00d']  # TODO: use a hash of kernel/ramdisk as the salt. |  | ||||||
|     # Additional arguments passed to avbtool. |  | ||||||
|     if args.gki_signing_signature_args: |  | ||||||
|         avbtool_cmd += args.gki_signing_signature_args.split() |  | ||||||
|     # Outputs the signed vbmeta to a separate file, then append to boot.img |  | ||||||
|     # as the boot signature. |  | ||||||
|     with tempfile.TemporaryDirectory() as temp_out_dir: |  | ||||||
|         boot_signature_output = os.path.join(temp_out_dir, 'boot_signature') |  | ||||||
|         avbtool_cmd += ['--do_not_append_vbmeta_image', |  | ||||||
|                         '--output_vbmeta_image', boot_signature_output] |  | ||||||
|         subprocess.check_call(avbtool_cmd) |  | ||||||
|         with open(boot_signature_output, 'rb') as boot_signature: |  | ||||||
|             if filesize(boot_signature) > BOOT_IMAGE_V4_SIGNATURE_SIZE: |  | ||||||
|                 raise ValueError( |  | ||||||
|                     f'boot sigature size is > {BOOT_IMAGE_V4_SIGNATURE_SIZE}') |  | ||||||
|             write_padded_file(args.output, boot_signature, pagesize) |  | ||||||
| def write_data(args, pagesize): |  | ||||||
|     write_padded_file(args.output, args.kernel, pagesize) |  | ||||||
|     write_padded_file(args.output, args.ramdisk, pagesize) |  | ||||||
|     write_padded_file(args.output, args.second, pagesize) |  | ||||||
|     if args.header_version > 0 and args.header_version < 3: |  | ||||||
|         write_padded_file(args.output, args.recovery_dtbo, pagesize) |  | ||||||
|     if args.header_version == 2: |  | ||||||
|         write_padded_file(args.output, args.dtb, pagesize) |  | ||||||
|     if args.header_version >= 4: |  | ||||||
|         add_boot_image_signature(args, pagesize) |  | ||||||
| def write_vendor_boot_data(args): |  | ||||||
|     if args.header_version > 3: |  | ||||||
|         builder = args.vendor_ramdisk_table_builder |  | ||||||
|         builder.write_ramdisks_padded(args.vendor_boot, args.pagesize) |  | ||||||
|         write_padded_file(args.vendor_boot, args.dtb, args.pagesize) |  | ||||||
|         builder.write_entries_padded(args.vendor_boot, args.pagesize) |  | ||||||
|         write_padded_file(args.vendor_boot, args.vendor_bootconfig, |  | ||||||
|             args.pagesize) |  | ||||||
|     else: |  | ||||||
|         write_padded_file(args.vendor_boot, args.vendor_ramdisk, args.pagesize) |  | ||||||
|         write_padded_file(args.vendor_boot, args.dtb, args.pagesize) |  | ||||||
| def main(): |  | ||||||
|     args = parse_cmdline() |  | ||||||
|     if args.vendor_boot is not None: |  | ||||||
|         if args.header_version not in {3, 4}: |  | ||||||
|             raise ValueError( |  | ||||||
|                 '--vendor_boot not compatible with given header version') |  | ||||||
|         if args.header_version == 3 and args.vendor_ramdisk is None: |  | ||||||
|             raise ValueError('--vendor_ramdisk missing or invalid') |  | ||||||
|         write_vendor_boot_header(args) |  | ||||||
|         write_vendor_boot_data(args) |  | ||||||
|     if args.output is not None: |  | ||||||
|         if args.second is not None and args.header_version > 2: |  | ||||||
|             raise ValueError( |  | ||||||
|                 '--second not compatible with given header version') |  | ||||||
|         img_id = write_header(args) |  | ||||||
|         if args.header_version > 2: |  | ||||||
|             write_data(args, BOOT_IMAGE_HEADER_V3_PAGESIZE) |  | ||||||
|         else: |  | ||||||
|             write_data(args, args.pagesize) |  | ||||||
|         if args.id and img_id is not None: |  | ||||||
|             print('0x' + ''.join(f'{octet:02x}' for octet in img_id)) |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     main() |  | ||||||
| @@ -1,16 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_INTERFACE" ]; then |  | ||||||
|     echo "Must supply the PoE Interface to power up" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_ADDRESS" ]; then |  | ||||||
|     echo "Must supply the PoE Switch host" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| SNMP_KEY="SNMPv2-SMI::mib-2.105.1.1.1.3.1.$((48 + BM_POE_INTERFACE))" |  | ||||||
| SNMP_OFF="i 2" |  | ||||||
|  |  | ||||||
| flock /var/run/poe.lock -c "snmpset -v2c -r 3 -t 30 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_OFF" |  | ||||||
| @@ -1,19 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_INTERFACE" ]; then |  | ||||||
|     echo "Must supply the PoE Interface to power up" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_ADDRESS" ]; then |  | ||||||
|     echo "Must supply the PoE Switch host" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| SNMP_KEY="SNMPv2-SMI::mib-2.105.1.1.1.3.1.$((48 + BM_POE_INTERFACE))" |  | ||||||
| SNMP_ON="i 1" |  | ||||||
| SNMP_OFF="i 2" |  | ||||||
|  |  | ||||||
| flock /var/run/poe.lock -c "snmpset -v2c -r 3 -t 30 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_OFF" |  | ||||||
| sleep 3s |  | ||||||
| flock /var/run/poe.lock -c "snmpset -v2c -r 3 -t 30 -cmesaci $BM_POE_ADDRESS $SNMP_KEY $SNMP_ON" |  | ||||||
| @@ -1,186 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC1091 |  | ||||||
| # shellcheck disable=SC2034 |  | ||||||
| # shellcheck disable=SC2059 |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| . "$SCRIPTS_DIR"/setup-test-env.sh |  | ||||||
|  |  | ||||||
| # Boot script for devices attached to a PoE switch, using NFS for the root |  | ||||||
| # filesystem. |  | ||||||
|  |  | ||||||
| # We're run from the root of the repo, make a helper var for our paths |  | ||||||
| BM=$CI_PROJECT_DIR/install/bare-metal |  | ||||||
| CI_COMMON=$CI_PROJECT_DIR/install/common |  | ||||||
|  |  | ||||||
| # Runner config checks |  | ||||||
| if [ -z "$BM_SERIAL" ]; then |  | ||||||
|   echo "Must set BM_SERIAL in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is the serial port to listen the device." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_ADDRESS" ]; then |  | ||||||
|   echo "Must set BM_POE_ADDRESS in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is the PoE switch address to connect for powering up/down devices." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POE_INTERFACE" ]; then |  | ||||||
|   echo "Must set BM_POE_INTERFACE in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is the PoE switch interface where the device is connected." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POWERUP" ]; then |  | ||||||
|   echo "Must set BM_POWERUP in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is a shell script that should power up the device and begin its boot sequence." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_POWERDOWN" ]; then |  | ||||||
|   echo "Must set BM_POWERDOWN in your gitlab-runner config.toml [[runners]] environment" |  | ||||||
|   echo "This is a shell script that should power off the device." |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ ! -d /nfs ]; then |  | ||||||
|   echo "NFS rootfs directory needs to be mounted at /nfs by the gitlab runner" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ ! -d /tftp ]; then |  | ||||||
|   echo "TFTP directory for this board needs to be mounted at /tftp by the gitlab runner" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # job config checks |  | ||||||
| if [ -z "$BM_ROOTFS" ]; then |  | ||||||
|   echo "Must set BM_ROOTFS to your board's rootfs directory in the job's variables" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_BOOTFS" ]; then |  | ||||||
|   echo "Must set /boot files for the TFTP boot in the job's variables" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_CMDLINE" ]; then |  | ||||||
|   echo "Must set BM_CMDLINE to your board's kernel command line arguments" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$BM_BOOTCONFIG" ]; then |  | ||||||
|   echo "Must set BM_BOOTCONFIG to your board's required boot configuration arguments" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| date +'%F %T' |  | ||||||
|  |  | ||||||
| # Clear out any previous run's artifacts. |  | ||||||
| rm -rf results/ |  | ||||||
| mkdir -p results |  | ||||||
|  |  | ||||||
| # Create the rootfs in the NFS directory.  rm to make sure it's in a pristine |  | ||||||
| # state, since it's volume-mounted on the host. |  | ||||||
| rsync -a --delete $BM_ROOTFS/ /nfs/ |  | ||||||
|  |  | ||||||
| date +'%F %T' |  | ||||||
|  |  | ||||||
| # If BM_BOOTFS is an URL, download it |  | ||||||
| if echo $BM_BOOTFS | grep -q http; then |  | ||||||
|   curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|     "${FDO_HTTP_CACHE_URI:-}$BM_BOOTFS" -o /tmp/bootfs.tar |  | ||||||
|   BM_BOOTFS=/tmp/bootfs.tar |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| date +'%F %T' |  | ||||||
|  |  | ||||||
| # If BM_BOOTFS is a file, assume it is a tarball and uncompress it |  | ||||||
| if [ -f $BM_BOOTFS ]; then |  | ||||||
|   mkdir -p /tmp/bootfs |  | ||||||
|   tar xf $BM_BOOTFS -C /tmp/bootfs |  | ||||||
|   BM_BOOTFS=/tmp/bootfs |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| date +'%F %T' |  | ||||||
|  |  | ||||||
| # Install kernel modules (it could be either in /lib/modules or |  | ||||||
| # /usr/lib/modules, but we want to install in the latter) |  | ||||||
| [ -d $BM_BOOTFS/usr/lib/modules ] && rsync -a $BM_BOOTFS/usr/lib/modules/ /nfs/usr/lib/modules/ |  | ||||||
| [ -d $BM_BOOTFS/lib/modules ] && rsync -a $BM_BOOTFS/lib/modules/ /nfs/lib/modules/ |  | ||||||
|  |  | ||||||
| date +'%F %T' |  | ||||||
|  |  | ||||||
| # Install kernel image + bootloader files |  | ||||||
| rsync -aL --delete $BM_BOOTFS/boot/ /tftp/ |  | ||||||
|  |  | ||||||
| date +'%F %T' |  | ||||||
|  |  | ||||||
| # Set up the pxelinux config for Jetson Nano |  | ||||||
| mkdir -p /tftp/pxelinux.cfg |  | ||||||
| cat <<EOF >/tftp/pxelinux.cfg/default-arm-tegra210-p3450-0000 |  | ||||||
| PROMPT 0 |  | ||||||
| TIMEOUT 30 |  | ||||||
| DEFAULT primary |  | ||||||
| MENU TITLE jetson nano boot options |  | ||||||
| LABEL primary |  | ||||||
|       MENU LABEL CI kernel on TFTP |  | ||||||
|       LINUX Image |  | ||||||
|       FDT tegra210-p3450-0000.dtb |  | ||||||
|       APPEND \${cbootargs} $BM_CMDLINE |  | ||||||
| EOF |  | ||||||
|  |  | ||||||
| # Set up the pxelinux config for Jetson TK1 |  | ||||||
| cat <<EOF >/tftp/pxelinux.cfg/default-arm-tegra124-jetson-tk1 |  | ||||||
| PROMPT 0 |  | ||||||
| TIMEOUT 30 |  | ||||||
| DEFAULT primary |  | ||||||
| MENU TITLE jetson TK1 boot options |  | ||||||
| LABEL primary |  | ||||||
|       MENU LABEL CI kernel on TFTP |  | ||||||
|       LINUX zImage |  | ||||||
|       FDT tegra124-jetson-tk1.dtb |  | ||||||
|       APPEND \${cbootargs} $BM_CMDLINE |  | ||||||
| EOF |  | ||||||
|  |  | ||||||
| # Create the rootfs in the NFS directory |  | ||||||
| mkdir -p /nfs/results |  | ||||||
| . $BM/rootfs-setup.sh /nfs |  | ||||||
|  |  | ||||||
| date +'%F %T' |  | ||||||
|  |  | ||||||
| echo "$BM_CMDLINE" > /tftp/cmdline.txt |  | ||||||
|  |  | ||||||
| # Add some required options in config.txt |  | ||||||
| printf "$BM_BOOTCONFIG" >> /tftp/config.txt |  | ||||||
|  |  | ||||||
| set +e |  | ||||||
| ATTEMPTS=3 |  | ||||||
| while [ $((ATTEMPTS--)) -gt 0 ]; do |  | ||||||
|   python3 $BM/poe_run.py \ |  | ||||||
|           --dev="$BM_SERIAL" \ |  | ||||||
|           --powerup="$BM_POWERUP" \ |  | ||||||
|           --powerdown="$BM_POWERDOWN" \ |  | ||||||
|           --test-timeout ${TEST_PHASE_TIMEOUT:-20} |  | ||||||
|   ret=$? |  | ||||||
|  |  | ||||||
|   if [ $ret -eq 2 ]; then |  | ||||||
|     echo "Did not detect boot sequence, retrying..." |  | ||||||
|   else |  | ||||||
|     ATTEMPTS=0 |  | ||||||
|   fi |  | ||||||
| done |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| date +'%F %T' |  | ||||||
|  |  | ||||||
| # Bring artifacts back from the NFS dir to the build dir where gitlab-runner |  | ||||||
| # will look for them. |  | ||||||
| cp -Rp /nfs/results/. results/ |  | ||||||
|  |  | ||||||
| date +'%F %T' |  | ||||||
|  |  | ||||||
| exit $ret |  | ||||||
| @@ -1,115 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
| # |  | ||||||
| # Copyright © 2020 Igalia, S.L. |  | ||||||
| # |  | ||||||
| # Permission is hereby granted, free of charge, to any person obtaining a |  | ||||||
| # copy of this software and associated documentation files (the "Software"), |  | ||||||
| # to deal in the Software without restriction, including without limitation |  | ||||||
| # the rights to use, copy, modify, merge, publish, distribute, sublicense, |  | ||||||
| # and/or sell copies of the Software, and to permit persons to whom the |  | ||||||
| # Software is furnished to do so, subject to the following conditions: |  | ||||||
| # |  | ||||||
| # The above copyright notice and this permission notice (including the next |  | ||||||
| # paragraph) shall be included in all copies or substantial portions of the |  | ||||||
| # Software. |  | ||||||
| # |  | ||||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |  | ||||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |  | ||||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL |  | ||||||
| # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |  | ||||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |  | ||||||
| # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |  | ||||||
| # IN THE SOFTWARE. |  | ||||||
|  |  | ||||||
| import argparse |  | ||||||
| import os |  | ||||||
| import re |  | ||||||
| from serial_buffer import SerialBuffer |  | ||||||
| import sys |  | ||||||
| import threading |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class PoERun: |  | ||||||
|     def __init__(self, args, test_timeout): |  | ||||||
|         self.powerup = args.powerup |  | ||||||
|         self.powerdown = args.powerdown |  | ||||||
|         self.ser = SerialBuffer( |  | ||||||
|             args.dev, "results/serial-output.txt", "") |  | ||||||
|         self.test_timeout = test_timeout |  | ||||||
|  |  | ||||||
|     def print_error(self, message): |  | ||||||
|         RED = '\033[0;31m' |  | ||||||
|         NO_COLOR = '\033[0m' |  | ||||||
|         print(RED + message + NO_COLOR) |  | ||||||
|  |  | ||||||
|     def logged_system(self, cmd): |  | ||||||
|         print("Running '{}'".format(cmd)) |  | ||||||
|         return os.system(cmd) |  | ||||||
|  |  | ||||||
|     def run(self): |  | ||||||
|         if self.logged_system(self.powerup) != 0: |  | ||||||
|             return 1 |  | ||||||
|  |  | ||||||
|         boot_detected = False |  | ||||||
|         for line in self.ser.lines(timeout=5 * 60, phase="bootloader"): |  | ||||||
|             if re.search("Booting Linux", line): |  | ||||||
|                 boot_detected = True |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|         if not boot_detected: |  | ||||||
|             self.print_error( |  | ||||||
|                 "Something wrong; couldn't detect the boot start up sequence") |  | ||||||
|             return 1 |  | ||||||
|  |  | ||||||
|         for line in self.ser.lines(timeout=self.test_timeout, phase="test"): |  | ||||||
|             if re.search("---. end Kernel panic", line): |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             # Binning memory problems |  | ||||||
|             if re.search("binner overflow mem", line): |  | ||||||
|                 self.print_error("Memory overflow in the binner; GPU hang") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             if re.search("nouveau 57000000.gpu: bus: MMIO read of 00000000 FAULT at 137000", line): |  | ||||||
|                 self.print_error("nouveau jetson boot bug, abandoning run.") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             # network fail on tk1 |  | ||||||
|             if re.search("NETDEV WATCHDOG:.* transmit queue 0 timed out", line): |  | ||||||
|                 self.print_error("nouveau jetson tk1 network fail, abandoning run.") |  | ||||||
|                 return 1 |  | ||||||
|  |  | ||||||
|             result = re.search("hwci: mesa: (\S*)", line) |  | ||||||
|             if result: |  | ||||||
|                 if result.group(1) == "pass": |  | ||||||
|                     return 0 |  | ||||||
|                 else: |  | ||||||
|                     return 1 |  | ||||||
|  |  | ||||||
|         self.print_error( |  | ||||||
|             "Reached the end of the CPU serial log without finding a result") |  | ||||||
|         return 1 |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     parser = argparse.ArgumentParser() |  | ||||||
|     parser.add_argument('--dev', type=str, |  | ||||||
|                         help='Serial device to monitor', required=True) |  | ||||||
|     parser.add_argument('--powerup', type=str, |  | ||||||
|                         help='shell command for rebooting', required=True) |  | ||||||
|     parser.add_argument('--powerdown', type=str, |  | ||||||
|                         help='shell command for powering off', required=True) |  | ||||||
|     parser.add_argument( |  | ||||||
|         '--test-timeout', type=int, help='Test phase timeout (minutes)', required=True) |  | ||||||
|     args = parser.parse_args() |  | ||||||
|  |  | ||||||
|     poe = PoERun(args, args.test_timeout * 60) |  | ||||||
|     retval = poe.run() |  | ||||||
|  |  | ||||||
|     poe.logged_system(args.powerdown) |  | ||||||
|  |  | ||||||
|     sys.exit(retval) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     main() |  | ||||||
| @@ -1,37 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| rootfs_dst=$1 |  | ||||||
|  |  | ||||||
| mkdir -p $rootfs_dst/results |  | ||||||
|  |  | ||||||
| # Set up the init script that brings up the system. |  | ||||||
| cp $BM/bm-init.sh $rootfs_dst/init |  | ||||||
| cp $CI_COMMON/init*.sh $rootfs_dst/ |  | ||||||
|  |  | ||||||
| date +'%F %T' |  | ||||||
|  |  | ||||||
| # Make JWT token available as file in the bare-metal storage to enable access |  | ||||||
| # to MinIO |  | ||||||
| cp "${CI_JOB_JWT_FILE}" "${rootfs_dst}${CI_JOB_JWT_FILE}" |  | ||||||
|  |  | ||||||
| date +'%F %T' |  | ||||||
|  |  | ||||||
| cp $CI_COMMON/capture-devcoredump.sh $rootfs_dst/ |  | ||||||
| cp $CI_COMMON/intel-gpu-freq.sh $rootfs_dst/ |  | ||||||
| cp $CI_COMMON/kdl.sh $rootfs_dst/ |  | ||||||
| cp "$SCRIPTS_DIR/setup-test-env.sh" "$rootfs_dst/" |  | ||||||
|  |  | ||||||
| set +x |  | ||||||
|  |  | ||||||
| # Pass through relevant env vars from the gitlab job to the baremetal init script |  | ||||||
| echo "Variables passed through:" |  | ||||||
| "$CI_COMMON"/generate-env.sh | tee $rootfs_dst/set-job-env-vars.sh |  | ||||||
|  |  | ||||||
| set -x |  | ||||||
|  |  | ||||||
| # Add the Mesa drivers we built, and make a consistent symlink to them. |  | ||||||
| mkdir -p $rootfs_dst/$CI_PROJECT_DIR |  | ||||||
| rsync -aH --delete $CI_PROJECT_DIR/install/ $rootfs_dst/$CI_PROJECT_DIR/install/ |  | ||||||
|  |  | ||||||
| date +'%F %T' |  | ||||||
| @@ -1,185 +0,0 @@ | |||||||
| #!/usr/bin/env python3 |  | ||||||
| # |  | ||||||
| # Copyright © 2020 Google LLC |  | ||||||
| # |  | ||||||
| # Permission is hereby granted, free of charge, to any person obtaining a |  | ||||||
| # copy of this software and associated documentation files (the "Software"), |  | ||||||
| # to deal in the Software without restriction, including without limitation |  | ||||||
| # the rights to use, copy, modify, merge, publish, distribute, sublicense, |  | ||||||
| # and/or sell copies of the Software, and to permit persons to whom the |  | ||||||
| # Software is furnished to do so, subject to the following conditions: |  | ||||||
| # |  | ||||||
| # The above copyright notice and this permission notice (including the next |  | ||||||
| # paragraph) shall be included in all copies or substantial portions of the |  | ||||||
| # Software. |  | ||||||
| # |  | ||||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |  | ||||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |  | ||||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL |  | ||||||
| # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |  | ||||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |  | ||||||
| # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |  | ||||||
| # IN THE SOFTWARE. |  | ||||||
|  |  | ||||||
| import argparse |  | ||||||
| from datetime import datetime, timezone |  | ||||||
| import queue |  | ||||||
| import serial |  | ||||||
| import threading |  | ||||||
| import time |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SerialBuffer: |  | ||||||
|     def __init__(self, dev, filename, prefix, timeout=None, line_queue=None): |  | ||||||
|         self.filename = filename |  | ||||||
|         self.dev = dev |  | ||||||
|  |  | ||||||
|         if dev: |  | ||||||
|             self.f = open(filename, "wb+") |  | ||||||
|             self.serial = serial.Serial(dev, 115200, timeout=timeout) |  | ||||||
|         else: |  | ||||||
|             self.f = open(filename, "rb") |  | ||||||
|             self.serial = None |  | ||||||
|  |  | ||||||
|         self.byte_queue = queue.Queue() |  | ||||||
|         # allow multiple SerialBuffers to share a line queue so you can merge |  | ||||||
|         # servo's CPU and EC streams into one thing to watch the boot/test |  | ||||||
|         # progress on. |  | ||||||
|         if line_queue: |  | ||||||
|             self.line_queue = line_queue |  | ||||||
|         else: |  | ||||||
|             self.line_queue = queue.Queue() |  | ||||||
|         self.prefix = prefix |  | ||||||
|         self.timeout = timeout |  | ||||||
|         self.sentinel = object() |  | ||||||
|         self.closing = False |  | ||||||
|  |  | ||||||
|         if self.dev: |  | ||||||
|             self.read_thread = threading.Thread( |  | ||||||
|                 target=self.serial_read_thread_loop, daemon=True) |  | ||||||
|         else: |  | ||||||
|             self.read_thread = threading.Thread( |  | ||||||
|                 target=self.serial_file_read_thread_loop, daemon=True) |  | ||||||
|         self.read_thread.start() |  | ||||||
|  |  | ||||||
|         self.lines_thread = threading.Thread( |  | ||||||
|             target=self.serial_lines_thread_loop, daemon=True) |  | ||||||
|         self.lines_thread.start() |  | ||||||
|  |  | ||||||
|     def close(self): |  | ||||||
|         self.closing = True |  | ||||||
|         if self.serial: |  | ||||||
|             self.serial.cancel_read() |  | ||||||
|         self.read_thread.join() |  | ||||||
|         self.lines_thread.join() |  | ||||||
|         if self.serial: |  | ||||||
|             self.serial.close() |  | ||||||
|  |  | ||||||
|     # Thread that just reads the bytes from the serial device to try to keep from |  | ||||||
|     # buffer overflowing it. If nothing is received in 1 minute, it finalizes. |  | ||||||
|     def serial_read_thread_loop(self): |  | ||||||
|         greet = "Serial thread reading from %s\n" % self.dev |  | ||||||
|         self.byte_queue.put(greet.encode()) |  | ||||||
|  |  | ||||||
|         while not self.closing: |  | ||||||
|             try: |  | ||||||
|                 b = self.serial.read() |  | ||||||
|                 if len(b) == 0: |  | ||||||
|                     break |  | ||||||
|                 self.byte_queue.put(b) |  | ||||||
|             except Exception as err: |  | ||||||
|                 print(self.prefix + str(err)) |  | ||||||
|                 break |  | ||||||
|         self.byte_queue.put(self.sentinel) |  | ||||||
|  |  | ||||||
|     # Thread that just reads the bytes from the file of serial output that some |  | ||||||
|     # other process is appending to. |  | ||||||
|     def serial_file_read_thread_loop(self): |  | ||||||
|         greet = "Serial thread reading from %s\n" % self.filename |  | ||||||
|         self.byte_queue.put(greet.encode()) |  | ||||||
|  |  | ||||||
|         while not self.closing: |  | ||||||
|             line = self.f.readline() |  | ||||||
|             if line: |  | ||||||
|                 self.byte_queue.put(line) |  | ||||||
|             else: |  | ||||||
|                 time.sleep(0.1) |  | ||||||
|         self.byte_queue.put(self.sentinel) |  | ||||||
|  |  | ||||||
|     # Thread that processes the stream of bytes to 1) log to stdout, 2) log to |  | ||||||
|     # file, 3) add to the queue of lines to be read by program logic |  | ||||||
|  |  | ||||||
|     def serial_lines_thread_loop(self): |  | ||||||
|         line = bytearray() |  | ||||||
|         while True: |  | ||||||
|             bytes = self.byte_queue.get(block=True) |  | ||||||
|  |  | ||||||
|             if bytes == self.sentinel: |  | ||||||
|                 self.read_thread.join() |  | ||||||
|                 self.line_queue.put(self.sentinel) |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|             # Write our data to the output file if we're the ones reading from |  | ||||||
|             # the serial device |  | ||||||
|             if self.dev: |  | ||||||
|                 self.f.write(bytes) |  | ||||||
|                 self.f.flush() |  | ||||||
|  |  | ||||||
|             for b in bytes: |  | ||||||
|                 line.append(b) |  | ||||||
|                 if b == b'\n'[0]: |  | ||||||
|                     line = line.decode(errors="replace") |  | ||||||
|  |  | ||||||
|                     time = datetime.now().strftime('%y-%m-%d %H:%M:%S') |  | ||||||
|                     print("{endc}{time} {prefix}{line}".format( |  | ||||||
|                         time=time, prefix=self.prefix, line=line, endc='\033[0m'), flush=True, end='') |  | ||||||
|  |  | ||||||
|                     self.line_queue.put(line) |  | ||||||
|                     line = bytearray() |  | ||||||
|  |  | ||||||
|     def lines(self, timeout=None, phase=None): |  | ||||||
|         start_time = time.monotonic() |  | ||||||
|         while True: |  | ||||||
|             read_timeout = None |  | ||||||
|             if timeout: |  | ||||||
|                 read_timeout = timeout - (time.monotonic() - start_time) |  | ||||||
|                 if read_timeout <= 0: |  | ||||||
|                     print("read timeout waiting for serial during {}".format(phase)) |  | ||||||
|                     self.close() |  | ||||||
|                     break |  | ||||||
|  |  | ||||||
|             try: |  | ||||||
|                 line = self.line_queue.get(timeout=read_timeout) |  | ||||||
|             except queue.Empty: |  | ||||||
|                 print("read timeout waiting for serial during {}".format(phase)) |  | ||||||
|                 self.close() |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|             if line == self.sentinel: |  | ||||||
|                 print("End of serial output") |  | ||||||
|                 self.lines_thread.join() |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|             yield line |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     parser = argparse.ArgumentParser() |  | ||||||
|  |  | ||||||
|     parser.add_argument('--dev', type=str, help='Serial device') |  | ||||||
|     parser.add_argument('--file', type=str, |  | ||||||
|                         help='Filename for serial output', required=True) |  | ||||||
|     parser.add_argument('--prefix', type=str, |  | ||||||
|                         help='Prefix for logging serial to stdout', nargs='?') |  | ||||||
|  |  | ||||||
|     args = parser.parse_args() |  | ||||||
|  |  | ||||||
|     ser = SerialBuffer(args.dev, args.file, args.prefix or "") |  | ||||||
|     for line in ser.lines(): |  | ||||||
|         # We're just using this as a logger, so eat the produced lines and drop |  | ||||||
|         # them |  | ||||||
|         pass |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     main() |  | ||||||
| @@ -1,41 +0,0 @@ | |||||||
| #!/usr/bin/python3 |  | ||||||
|  |  | ||||||
| # Copyright © 2020 Christian Gmeiner |  | ||||||
| # |  | ||||||
| # Permission is hereby granted, free of charge, to any person obtaining a |  | ||||||
| # copy of this software and associated documentation files (the "Software"), |  | ||||||
| # to deal in the Software without restriction, including without limitation |  | ||||||
| # the rights to use, copy, modify, merge, publish, distribute, sublicense, |  | ||||||
| # and/or sell copies of the Software, and to permit persons to whom the |  | ||||||
| # Software is furnished to do so, subject to the following conditions: |  | ||||||
| # |  | ||||||
| # The above copyright notice and this permission notice (including the next |  | ||||||
| # paragraph) shall be included in all copies or substantial portions of the |  | ||||||
| # Software. |  | ||||||
| # |  | ||||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |  | ||||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |  | ||||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL |  | ||||||
| # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |  | ||||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |  | ||||||
| # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |  | ||||||
| # IN THE SOFTWARE. |  | ||||||
| # |  | ||||||
| # Tiny script to read bytes from telnet, and write the output to stdout, with a |  | ||||||
| # buffer in between so we don't lose serial output from its buffer. |  | ||||||
| # |  | ||||||
|  |  | ||||||
| import sys |  | ||||||
| import telnetlib |  | ||||||
|  |  | ||||||
| host = sys.argv[1] |  | ||||||
| port = sys.argv[2] |  | ||||||
|  |  | ||||||
| tn = telnetlib.Telnet(host, port, 1000000) |  | ||||||
|  |  | ||||||
| while True: |  | ||||||
|     bytes = tn.read_some() |  | ||||||
|     sys.stdout.buffer.write(bytes) |  | ||||||
|     sys.stdout.flush() |  | ||||||
|  |  | ||||||
| tn.close() |  | ||||||
| @@ -1 +0,0 @@ | |||||||
| ../bin/ci |  | ||||||
| @@ -1,7 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
| # shellcheck disable=SC1091 |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| _COMPILER=clang++-15 |  | ||||||
| . compiler-wrapper.sh |  | ||||||
| @@ -1,7 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
| # shellcheck disable=SC1091 |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| _COMPILER=clang++ |  | ||||||
| . compiler-wrapper.sh |  | ||||||
| @@ -1,7 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
| # shellcheck disable=SC1091 |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| _COMPILER=clang-15 |  | ||||||
| . compiler-wrapper.sh |  | ||||||
| @@ -1,7 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
| # shellcheck disable=SC1091 |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| _COMPILER=clang |  | ||||||
| . compiler-wrapper.sh |  | ||||||
| @@ -1,7 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
| # shellcheck disable=SC1091 |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| _COMPILER=g++ |  | ||||||
| . compiler-wrapper.sh |  | ||||||
| @@ -1,7 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
| # shellcheck disable=SC1091 |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| _COMPILER=gcc |  | ||||||
| . compiler-wrapper.sh |  | ||||||
| @@ -1,21 +0,0 @@ | |||||||
| # shellcheck disable=SC1091 |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
| if command -V ccache >/dev/null 2>/dev/null; then |  | ||||||
|   CCACHE=ccache |  | ||||||
| else |  | ||||||
|   CCACHE= |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if echo "$@" | grep -E 'meson-private/tmp[^ /]*/testfile.c' >/dev/null; then |  | ||||||
|     # Invoked for meson feature check |  | ||||||
|     exec $CCACHE $_COMPILER "$@" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ "$(eval printf "'%s'" "\"\${$(($#-1))}\"")" = "-c" ]; then |  | ||||||
|     # Not invoked for linking |  | ||||||
|     exec $CCACHE $_COMPILER "$@" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Compiler invoked by ninja for linking. Add -Werror to turn compiler warnings into errors |  | ||||||
| # with LTO. (meson's werror should arguably do this, but meanwhile we need to) |  | ||||||
| exec $CCACHE $_COMPILER "$@" -Werror |  | ||||||
| @@ -1,728 +0,0 @@ | |||||||
| # Shared between windows and Linux |  | ||||||
| .build-common: |  | ||||||
|   extends: .container+build-rules |  | ||||||
|   # Cancel job if a newer commit is pushed to the same branch |  | ||||||
|   interruptible: true |  | ||||||
|   # Build jobs don't take more than 1-3 minutes. 5-8 min max on a fresh runner |  | ||||||
|   # without a populated ccache. |  | ||||||
|   # These jobs are never slow, either they finish within reasonable time or |  | ||||||
|   # something has gone wrong and the job will never terminate, so we should |  | ||||||
|   # instead timeout so that the retry mechanism can kick in. |  | ||||||
|   # A few exception are made, see `timeout:` overrides in the rest of this |  | ||||||
|   # file. |  | ||||||
|   timeout: 30m |  | ||||||
|   artifacts: |  | ||||||
|     name: "mesa_${CI_JOB_NAME}" |  | ||||||
|     when: always |  | ||||||
|     paths: |  | ||||||
|       - _build/meson-logs/*.txt |  | ||||||
|       - _build/meson-logs/strace |  | ||||||
|       - shader-db |  | ||||||
|       - artifacts |  | ||||||
|  |  | ||||||
| # Just Linux |  | ||||||
| .build-linux: |  | ||||||
|   extends: .build-common |  | ||||||
|   variables: |  | ||||||
|     CCACHE_COMPILERCHECK: "content" |  | ||||||
|     CCACHE_COMPRESS: "true" |  | ||||||
|     CCACHE_DIR: /cache/mesa/ccache |  | ||||||
|   # Use ccache transparently, and print stats before/after |  | ||||||
|   before_script: |  | ||||||
|     - !reference [default, before_script] |  | ||||||
|     - | |  | ||||||
|       export PATH="/usr/lib/ccache:$PATH" |  | ||||||
|       export CCACHE_BASEDIR="$PWD" |  | ||||||
|       if test -x /usr/bin/ccache; then |  | ||||||
|         section_start ccache_before "ccache stats before build" |  | ||||||
|         ccache --show-stats |  | ||||||
|         section_end ccache_before |  | ||||||
|       fi |  | ||||||
|   after_script: |  | ||||||
|     - if test -x /usr/bin/ccache; then ccache --show-stats | grep "Hits:"; fi |  | ||||||
|     - !reference [default, after_script] |  | ||||||
|  |  | ||||||
| .build-windows: |  | ||||||
|   extends: |  | ||||||
|     - .build-common |  | ||||||
|     - .windows-docker-tags |  | ||||||
|   cache: |  | ||||||
|     key: ${CI_JOB_NAME} |  | ||||||
|     paths: |  | ||||||
|       - subprojects/packagecache |  | ||||||
|  |  | ||||||
| .meson-build: |  | ||||||
|   extends: |  | ||||||
|     - .build-linux |  | ||||||
|     - .use-debian/x86_64_build |  | ||||||
|   stage: build-x86_64 |  | ||||||
|   variables: |  | ||||||
|     LLVM_VERSION: 15 |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|  |  | ||||||
| .meson-build_mingw: |  | ||||||
|   extends: |  | ||||||
|     - .build-linux |  | ||||||
|     - .use-debian/x86_64_build_mingw |  | ||||||
|     - .use-wine |  | ||||||
|   stage: build-x86_64 |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|  |  | ||||||
| debian-testing: |  | ||||||
|   extends: |  | ||||||
|     - .meson-build |  | ||||||
|     - .ci-deqp-artifacts |  | ||||||
|   variables: |  | ||||||
|     UNWIND: "enabled" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=dri |  | ||||||
|       -D gbm=enabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-va=enabled |  | ||||||
|     GALLIUM_DRIVERS: "swrast,virgl,radeonsi,zink,crocus,iris,i915" |  | ||||||
|     VULKAN_DRIVERS: "swrast,amd,intel,intel_hasvk,virtio,nouveau-experimental" |  | ||||||
|     BUILDTYPE: "debugoptimized" |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D spirv-to-dxil=true |  | ||||||
|       -D valgrind=disabled |  | ||||||
|       -D perfetto=true |  | ||||||
|       -D tools=drm-shim |  | ||||||
|     S3_ARTIFACT_NAME: mesa-x86_64-default-${BUILDTYPE} |  | ||||||
|     LLVM_VERSION: 15 |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|     - .gitlab-ci/prepare-artifacts.sh |  | ||||||
|   artifacts: |  | ||||||
|     reports: |  | ||||||
|       junit: artifacts/ci_scripts_report.xml |  | ||||||
|  |  | ||||||
| debian-testing-asan: |  | ||||||
|   extends: |  | ||||||
|     - debian-testing |  | ||||||
|   variables: |  | ||||||
|     C_ARGS: > |  | ||||||
|       -Wno-error=stringop-truncation |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D b_sanitize=address |  | ||||||
|       -D valgrind=disabled |  | ||||||
|       -D tools=dlclose-skip |  | ||||||
|     S3_ARTIFACT_NAME: "" |  | ||||||
|     ARTIFACTS_DEBUG_SYMBOLS: 1 |  | ||||||
|  |  | ||||||
| debian-testing-msan: |  | ||||||
|   # https://github.com/google/sanitizers/wiki/MemorySanitizerLibcxxHowTo |  | ||||||
|   # msan cannot fully work until it's used together with msan libc |  | ||||||
|   extends: |  | ||||||
|     - debian-clang |  | ||||||
|   variables: |  | ||||||
|     # l_undef is incompatible with msan |  | ||||||
|     EXTRA_OPTION: |  | ||||||
|       -D b_sanitize=memory |  | ||||||
|       -D b_lundef=false |  | ||||||
|     S3_ARTIFACT_NAME: "" |  | ||||||
|     ARTIFACTS_DEBUG_SYMBOLS: 1 |  | ||||||
|     # Don't run all the tests yet: |  | ||||||
|     # GLSL has some issues in sexpression reading. |  | ||||||
|     # gtest has issues in its test initialization. |  | ||||||
|     MESON_TEST_ARGS: "--suite glcpp --suite format" |  | ||||||
|     GALLIUM_DRIVERS: "freedreno,iris,nouveau,kmsro,r300,r600,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus" |  | ||||||
|     VULKAN_DRIVERS: intel,amd,broadcom,virtio |  | ||||||
|  |  | ||||||
| .debian-cl-testing: |  | ||||||
|   extends: |  | ||||||
|     - .meson-build |  | ||||||
|     - .ci-deqp-artifacts |  | ||||||
|   variables: |  | ||||||
|     LLVM_VERSION: 15 |  | ||||||
|     UNWIND: "enabled" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=disabled |  | ||||||
|       -D egl=disabled |  | ||||||
|       -D gbm=disabled |  | ||||||
|     GALLIUM_DRIVERS: "swrast" |  | ||||||
|     BUILDTYPE: "debugoptimized" |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D valgrind=disabled |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|     - .gitlab-ci/prepare-artifacts.sh |  | ||||||
|  |  | ||||||
| debian-rusticl-testing: |  | ||||||
|   extends: |  | ||||||
|     - .debian-cl-testing |  | ||||||
|   variables: |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D gallium-rusticl=true |  | ||||||
|       -D opencl-spirv=true |  | ||||||
|  |  | ||||||
| debian-build-testing: |  | ||||||
|   extends: .meson-build |  | ||||||
|   variables: |  | ||||||
|     BUILDTYPE: debug |  | ||||||
|     UNWIND: "enabled" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=dri |  | ||||||
|       -D gbm=enabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-extra-hud=true |  | ||||||
|       -D gallium-vdpau=enabled |  | ||||||
|       -D gallium-omx=bellagio |  | ||||||
|       -D gallium-va=enabled |  | ||||||
|       -D gallium-xa=enabled |  | ||||||
|       -D gallium-nine=true |  | ||||||
|       -D gallium-opencl=disabled |  | ||||||
|       -D gallium-rusticl=false |  | ||||||
|     GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,asahi,crocus" |  | ||||||
|     VULKAN_DRIVERS: swrast |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D spirv-to-dxil=true |  | ||||||
|       -D osmesa=true |  | ||||||
|       -D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi |  | ||||||
|       -D b_lto=true |  | ||||||
|     LLVM_VERSION: 15 |  | ||||||
|   script: | |  | ||||||
|     section_start lava-pytest "lava-pytest" |  | ||||||
|     .gitlab-ci/lava/lava-pytest.sh |  | ||||||
|     section_switch shellcheck "shellcheck" |  | ||||||
|     .gitlab-ci/run-shellcheck.sh |  | ||||||
|     section_switch yamllint "yamllint" |  | ||||||
|     .gitlab-ci/run-yamllint.sh |  | ||||||
|     section_switch meson "meson" |  | ||||||
|     .gitlab-ci/meson/build.sh |  | ||||||
|     section_switch shader-db "shader-db" |  | ||||||
|     .gitlab-ci/run-shader-db.sh |  | ||||||
|   timeout: 30m |  | ||||||
|  |  | ||||||
| # Test a release build with -Werror so new warnings don't sneak in. |  | ||||||
| debian-release: |  | ||||||
|   extends: .meson-build |  | ||||||
|   variables: |  | ||||||
|     LLVM_VERSION: 15 |  | ||||||
|     UNWIND: "enabled" |  | ||||||
|     C_ARGS: > |  | ||||||
|       -Wno-error=stringop-overread |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=dri |  | ||||||
|       -D gbm=enabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-extra-hud=true |  | ||||||
|       -D gallium-vdpau=enabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=enabled |  | ||||||
|       -D gallium-xa=enabled |  | ||||||
|       -D gallium-nine=false |  | ||||||
|       -D gallium-opencl=disabled |  | ||||||
|       -D gallium-rusticl=false |  | ||||||
|       -D llvm=enabled |  | ||||||
|     GALLIUM_DRIVERS: "i915,iris,nouveau,kmsro,freedreno,r300,svga,swrast,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,d3d12,crocus" |  | ||||||
|     VULKAN_DRIVERS: "amd,imagination-experimental,microsoft-experimental" |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D spirv-to-dxil=true |  | ||||||
|       -D osmesa=true |  | ||||||
|       -D tools=all |  | ||||||
|       -D intel-clc=enabled |  | ||||||
|       -D imagination-srv=true |  | ||||||
|     BUILDTYPE: "release" |  | ||||||
|     S3_ARTIFACT_NAME: "mesa-x86_64-default-${BUILDTYPE}" |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|     - 'if [ -n "$MESA_CI_PERFORMANCE_ENABLED" ]; then .gitlab-ci/prepare-artifacts.sh; fi' |  | ||||||
|  |  | ||||||
| alpine-build-testing: |  | ||||||
|   extends: |  | ||||||
|     - .meson-build |  | ||||||
|     - .use-alpine/x86_64_build |  | ||||||
|   stage: build-x86_64 |  | ||||||
|   variables: |  | ||||||
|     BUILDTYPE: "release" |  | ||||||
|     C_ARGS: > |  | ||||||
|       -Wno-error=cpp |  | ||||||
|       -Wno-error=array-bounds |  | ||||||
|       -Wno-error=stringop-overread |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=disabled |  | ||||||
|       -D gbm=enabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D glvnd=false |  | ||||||
|       -D platforms=wayland |  | ||||||
|     LLVM_VERSION: "" |  | ||||||
|     GALLIUM_DRIVERS: "crocus,etnaviv,freedreno,iris,kmsro,lima,nouveau,panfrost,r300,r600,radeonsi,svga,swrast,tegra,v3d,vc4,virgl,zink" |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-extra-hud=true |  | ||||||
|       -D gallium-vdpau=disabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=enabled |  | ||||||
|       -D gallium-xa=disabled |  | ||||||
|       -D gallium-nine=true |  | ||||||
|       -D gallium-rusticl=false |  | ||||||
|       -D gles1=disabled |  | ||||||
|       -D gles2=enabled |  | ||||||
|       -D llvm=enabled |  | ||||||
|       -D microsoft-clc=disabled |  | ||||||
|       -D shared-llvm=enabled |  | ||||||
|     UNWIND: "disabled" |  | ||||||
|     VULKAN_DRIVERS: "amd,broadcom,freedreno,intel,imagination-experimental" |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|  |  | ||||||
| fedora-release: |  | ||||||
|   extends: |  | ||||||
|     - .meson-build |  | ||||||
|     - .use-fedora/x86_64_build |  | ||||||
|   variables: |  | ||||||
|     BUILDTYPE: "release" |  | ||||||
|     C_LINK_ARGS: > |  | ||||||
|       -Wno-error=stringop-overflow |  | ||||||
|       -Wno-error=stringop-overread |  | ||||||
|     CPP_ARGS: > |  | ||||||
|       -Wno-error=dangling-reference |  | ||||||
|       -Wno-error=overloaded-virtual |  | ||||||
|     CPP_LINK_ARGS: > |  | ||||||
|       -Wno-error=stringop-overflow |  | ||||||
|       -Wno-error=stringop-overread |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=dri |  | ||||||
|       -D gbm=enabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D glvnd=true |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D b_lto=true |  | ||||||
|       -D osmesa=true |  | ||||||
|       -D selinux=true |  | ||||||
|       -D tools=drm-shim,etnaviv,freedreno,glsl,intel,nir,nouveau,lima,panfrost,imagination |  | ||||||
|       -D vulkan-layers=device-select,overlay |  | ||||||
|       -D intel-clc=enabled |  | ||||||
|       -D imagination-srv=true |  | ||||||
|     GALLIUM_DRIVERS: "crocus,etnaviv,freedreno,i915,iris,kmsro,lima,nouveau,panfrost,r300,r600,radeonsi,svga,swrast,tegra,v3d,vc4,virgl,zink" |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-extra-hud=true |  | ||||||
|       -D gallium-vdpau=enabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=enabled |  | ||||||
|       -D gallium-xa=enabled |  | ||||||
|       -D gallium-nine=false |  | ||||||
|       -D gallium-opencl=icd |  | ||||||
|       -D gallium-rusticl=true |  | ||||||
|       -D gles1=disabled |  | ||||||
|       -D gles2=enabled |  | ||||||
|       -D llvm=enabled |  | ||||||
|       -D microsoft-clc=disabled |  | ||||||
|       -D shared-llvm=enabled |  | ||||||
|     LLVM_VERSION: "" |  | ||||||
|     UNWIND: "disabled" |  | ||||||
|     VULKAN_DRIVERS: "amd,broadcom,freedreno,imagination-experimental,intel,intel_hasvk" |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|  |  | ||||||
| debian-android: |  | ||||||
|   extends: |  | ||||||
|     - .meson-cross |  | ||||||
|     - .use-debian/android_build |  | ||||||
|     - .ci-deqp-artifacts |  | ||||||
|   variables: |  | ||||||
|     BUILDTYPE: debug |  | ||||||
|     UNWIND: "disabled" |  | ||||||
|     C_ARGS: > |  | ||||||
|       -Wno-error=asm-operand-widths |  | ||||||
|       -Wno-error=constant-conversion |  | ||||||
|       -Wno-error=enum-conversion |  | ||||||
|       -Wno-error=initializer-overrides |  | ||||||
|       -Wno-error=sometimes-uninitialized |  | ||||||
|     CPP_ARGS: > |  | ||||||
|       -Wno-error=c99-designator |  | ||||||
|       -Wno-error=unused-variable |  | ||||||
|       -Wno-error=unused-but-set-variable |  | ||||||
|       -Wno-error=self-assign |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=disabled |  | ||||||
|       -D gbm=disabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D platforms=android |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D android-stub=true |  | ||||||
|       -D llvm=disabled |  | ||||||
|       -D platform-sdk-version=33 |  | ||||||
|       -D valgrind=disabled |  | ||||||
|       -D android-libbacktrace=disabled |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=disabled |  | ||||||
|       -D gallium-vdpau=disabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=disabled |  | ||||||
|       -D gallium-xa=disabled |  | ||||||
|       -D gallium-nine=false |  | ||||||
|       -D gallium-opencl=disabled |  | ||||||
|       -D gallium-rusticl=false |  | ||||||
|     LLVM_VERSION: "" |  | ||||||
|     PKG_CONFIG_LIBDIR: "/disable/non/android/system/pc/files" |  | ||||||
|     ARTIFACTS_DEBUG_SYMBOLS: 1 |  | ||||||
|     S3_ARTIFACT_NAME: mesa-x86_64-android-${BUILDTYPE} |  | ||||||
|   script: |  | ||||||
|     - CROSS=aarch64-linux-android GALLIUM_DRIVERS=etnaviv,freedreno,lima,panfrost,vc4,v3d VULKAN_DRIVERS=freedreno,broadcom,virtio .gitlab-ci/meson/build.sh |  | ||||||
|     # x86_64 build: |  | ||||||
|     # Can't do Intel because gen_decoder.c currently requires libexpat, which |  | ||||||
|     # is not a dependency that AOSP wants to accept.  Can't do Radeon Gallium |  | ||||||
|     # drivers because they requires LLVM, which we don't have an Android build |  | ||||||
|     # of. |  | ||||||
|     - CROSS=x86_64-linux-android GALLIUM_DRIVERS=iris,virgl VULKAN_DRIVERS=amd,intel .gitlab-ci/meson/build.sh |  | ||||||
|     - .gitlab-ci/prepare-artifacts.sh |  | ||||||
|  |  | ||||||
| .meson-cross: |  | ||||||
|   extends: |  | ||||||
|     - .meson-build |  | ||||||
|   stage: build-misc |  | ||||||
|   variables: |  | ||||||
|     UNWIND: "disabled" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=dri |  | ||||||
|       -D gbm=enabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|       -D osmesa=false |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-vdpau=disabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=disabled |  | ||||||
|       -D gallium-xa=disabled |  | ||||||
|       -D gallium-nine=false |  | ||||||
|  |  | ||||||
| .meson-arm: |  | ||||||
|   extends: |  | ||||||
|     - .meson-cross |  | ||||||
|     - .use-debian/arm64_build |  | ||||||
|   needs: |  | ||||||
|     - debian/arm64_build |  | ||||||
|   variables: |  | ||||||
|     VULKAN_DRIVERS: freedreno,broadcom |  | ||||||
|     GALLIUM_DRIVERS: "etnaviv,freedreno,kmsro,lima,nouveau,panfrost,swrast,tegra,v3d,vc4,zink" |  | ||||||
|     BUILDTYPE: "debugoptimized" |  | ||||||
|   tags: |  | ||||||
|     - aarch64 |  | ||||||
|  |  | ||||||
| debian-arm32: |  | ||||||
|   extends: |  | ||||||
|     - .meson-arm |  | ||||||
|     - .ci-deqp-artifacts |  | ||||||
|   variables: |  | ||||||
|     CROSS: armhf |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D llvm=disabled |  | ||||||
|       -D valgrind=disabled |  | ||||||
|     S3_ARTIFACT_NAME: mesa-arm32-default-${BUILDTYPE} |  | ||||||
|     # The strip command segfaults, failing to strip the binary and leaving |  | ||||||
|     # tempfiles in our artifacts. |  | ||||||
|     ARTIFACTS_DEBUG_SYMBOLS: 1 |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|     - .gitlab-ci/prepare-artifacts.sh |  | ||||||
|  |  | ||||||
| debian-arm32-asan: |  | ||||||
|   extends: |  | ||||||
|     - debian-arm32 |  | ||||||
|   variables: |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D llvm=disabled |  | ||||||
|       -D b_sanitize=address |  | ||||||
|       -D valgrind=disabled |  | ||||||
|       -D tools=dlclose-skip |  | ||||||
|     ARTIFACTS_DEBUG_SYMBOLS: 1 |  | ||||||
|     S3_ARTIFACT_NAME: mesa-arm32-asan-${BUILDTYPE} |  | ||||||
|     MESON_TEST_ARGS: "--no-suite mesa:compiler --no-suite mesa:util" |  | ||||||
|  |  | ||||||
| debian-arm64: |  | ||||||
|   extends: |  | ||||||
|     - .meson-arm |  | ||||||
|     - .ci-deqp-artifacts |  | ||||||
|   variables: |  | ||||||
|     C_ARGS: > |  | ||||||
|       -Wno-error=array-bounds |  | ||||||
|       -Wno-error=stringop-truncation |  | ||||||
|     VULKAN_DRIVERS: "freedreno,broadcom,panfrost,imagination-experimental" |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D llvm=disabled |  | ||||||
|       -D valgrind=disabled |  | ||||||
|       -D imagination-srv=true |  | ||||||
|       -D perfetto=true |  | ||||||
|       -D freedreno-kmds=msm,virtio |  | ||||||
|     S3_ARTIFACT_NAME: mesa-arm64-default-${BUILDTYPE} |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|     - .gitlab-ci/prepare-artifacts.sh |  | ||||||
|  |  | ||||||
| debian-arm64-asan: |  | ||||||
|   extends: |  | ||||||
|     - debian-arm64 |  | ||||||
|   variables: |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D llvm=disabled |  | ||||||
|       -D b_sanitize=address |  | ||||||
|       -D valgrind=disabled |  | ||||||
|       -D tools=dlclose-skip |  | ||||||
|     ARTIFACTS_DEBUG_SYMBOLS: 1 |  | ||||||
|     S3_ARTIFACT_NAME: mesa-arm64-asan-${BUILDTYPE} |  | ||||||
|     MESON_TEST_ARGS: "--no-suite mesa:compiler" |  | ||||||
|  |  | ||||||
| debian-arm64-build-test: |  | ||||||
|   extends: |  | ||||||
|     - .meson-arm |  | ||||||
|     - .ci-deqp-artifacts |  | ||||||
|   variables: |  | ||||||
|     VULKAN_DRIVERS: "amd" |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -Dtools=panfrost,imagination |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|  |  | ||||||
| debian-arm64-release: |  | ||||||
|   extends: |  | ||||||
|     - debian-arm64 |  | ||||||
|   variables: |  | ||||||
|     BUILDTYPE: release |  | ||||||
|     S3_ARTIFACT_NAME: mesa-arm64-default-${BUILDTYPE} |  | ||||||
|     C_ARGS: > |  | ||||||
|       -Wno-error=array-bounds |  | ||||||
|       -Wno-error=stringop-truncation |  | ||||||
|       -Wno-error=stringop-overread |  | ||||||
|   script: |  | ||||||
|     - .gitlab-ci/meson/build.sh |  | ||||||
|     - 'if [ -n "$MESA_CI_PERFORMANCE_ENABLED" ]; then .gitlab-ci/prepare-artifacts.sh; fi' |  | ||||||
|  |  | ||||||
| debian-clang: |  | ||||||
|   extends: .meson-build |  | ||||||
|   variables: |  | ||||||
|     BUILDTYPE: debug |  | ||||||
|     LLVM_VERSION: 15 |  | ||||||
|     UNWIND: "enabled" |  | ||||||
|     GALLIUM_DUMP_CPU: "true" |  | ||||||
|     C_ARGS: > |  | ||||||
|       -Wno-error=constant-conversion |  | ||||||
|       -Wno-error=enum-conversion |  | ||||||
|       -Wno-error=initializer-overrides |  | ||||||
|       -Wno-error=sometimes-uninitialized |  | ||||||
|     CPP_ARGS: > |  | ||||||
|       -Wno-error=c99-designator |  | ||||||
|       -Wno-error=overloaded-virtual |  | ||||||
|       -Wno-error=tautological-constant-out-of-range-compare |  | ||||||
|       -Wno-error=unused-private-field |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=dri |  | ||||||
|       -D gbm=enabled |  | ||||||
|       -D egl=enabled |  | ||||||
|       -D glvnd=true |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-extra-hud=true |  | ||||||
|       -D gallium-vdpau=enabled |  | ||||||
|       -D gallium-omx=bellagio |  | ||||||
|       -D gallium-va=enabled |  | ||||||
|       -D gallium-xa=enabled |  | ||||||
|       -D gallium-nine=true |  | ||||||
|       -D gallium-opencl=icd |  | ||||||
|       -D gles1=enabled |  | ||||||
|       -D gles2=enabled |  | ||||||
|       -D llvm=enabled |  | ||||||
|       -D microsoft-clc=disabled |  | ||||||
|       -D shared-llvm=enabled |  | ||||||
|       -D opencl-spirv=true |  | ||||||
|       -D shared-glapi=enabled |  | ||||||
|     GALLIUM_DRIVERS: "iris,nouveau,kmsro,r300,r600,freedreno,swrast,svga,v3d,vc4,virgl,etnaviv,panfrost,lima,zink,radeonsi,tegra,d3d12,crocus,i915,asahi" |  | ||||||
|     VULKAN_DRIVERS: intel,amd,freedreno,broadcom,virtio,swrast,panfrost,imagination-experimental,microsoft-experimental |  | ||||||
|     EXTRA_OPTION: |  | ||||||
|       -D spirv-to-dxil=true |  | ||||||
|       -D osmesa=true |  | ||||||
|       -D imagination-srv=true |  | ||||||
|       -D tools=drm-shim,etnaviv,freedreno,glsl,intel,intel-ui,nir,nouveau,lima,panfrost,asahi,imagination |  | ||||||
|       -D vulkan-layers=device-select,overlay |  | ||||||
|       -D build-aco-tests=true |  | ||||||
|       -D intel-clc=enabled |  | ||||||
|       -D imagination-srv=true |  | ||||||
|     CC: clang-${LLVM_VERSION} |  | ||||||
|     CXX: clang++-${LLVM_VERSION} |  | ||||||
|  |  | ||||||
| debian-clang-release: |  | ||||||
|   extends: debian-clang |  | ||||||
|   variables: |  | ||||||
|     BUILDTYPE: "release" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=xlib |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-extra-hud=true |  | ||||||
|       -D gallium-vdpau=enabled |  | ||||||
|       -D gallium-omx=bellagio |  | ||||||
|       -D gallium-va=enabled |  | ||||||
|       -D gallium-xa=enabled |  | ||||||
|       -D gallium-nine=true |  | ||||||
|       -D gallium-opencl=icd |  | ||||||
|       -D gles1=disabled |  | ||||||
|       -D gles2=disabled |  | ||||||
|       -D llvm=enabled |  | ||||||
|       -D microsoft-clc=disabled |  | ||||||
|       -D shared-llvm=enabled |  | ||||||
|       -D opencl-spirv=true |  | ||||||
|       -D shared-glapi=disabled |  | ||||||
|  |  | ||||||
| windows-vs2019: |  | ||||||
|   extends: |  | ||||||
|     - .build-windows |  | ||||||
|     - .use-windows_build_vs2019 |  | ||||||
|     - .windows-build-rules |  | ||||||
|   stage: build-misc |  | ||||||
|   script: |  | ||||||
|     - pwsh -ExecutionPolicy RemoteSigned .\.gitlab-ci\windows\mesa_build.ps1 |  | ||||||
|   artifacts: |  | ||||||
|     paths: |  | ||||||
|       - _build/meson-logs/*.txt |  | ||||||
|       - _install/ |  | ||||||
|  |  | ||||||
| .debian-cl: |  | ||||||
|   extends: .meson-build |  | ||||||
|   variables: |  | ||||||
|     LLVM_VERSION: 15 |  | ||||||
|     UNWIND: "enabled" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=disabled |  | ||||||
|       -D egl=disabled |  | ||||||
|       -D gbm=disabled |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D valgrind=disabled |  | ||||||
|  |  | ||||||
| debian-rusticl: |  | ||||||
|   extends: .debian-cl |  | ||||||
|   variables: |  | ||||||
|     BUILDTYPE: debug |  | ||||||
|     GALLIUM_DRIVERS: "iris,swrast" |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=disabled |  | ||||||
|       -D gallium-vdpau=disabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=disabled |  | ||||||
|       -D gallium-xa=disabled |  | ||||||
|       -D gallium-nine=false |  | ||||||
|       -D gallium-opencl=disabled |  | ||||||
|       -D gallium-rusticl=true |  | ||||||
|     RUSTC: clippy-driver |  | ||||||
|  |  | ||||||
| debian-vulkan: |  | ||||||
|   extends: .meson-build |  | ||||||
|   variables: |  | ||||||
|     BUILDTYPE: debug |  | ||||||
|     LLVM_VERSION: 15 |  | ||||||
|     UNWIND: "disabled" |  | ||||||
|     DRI_LOADERS: > |  | ||||||
|       -D glx=disabled |  | ||||||
|       -D gbm=disabled |  | ||||||
|       -D egl=disabled |  | ||||||
|       -D platforms=x11,wayland |  | ||||||
|       -D osmesa=false |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D dri3=enabled |  | ||||||
|       -D gallium-vdpau=disabled |  | ||||||
|       -D gallium-omx=disabled |  | ||||||
|       -D gallium-va=disabled |  | ||||||
|       -D gallium-xa=disabled |  | ||||||
|       -D gallium-nine=false |  | ||||||
|       -D gallium-opencl=disabled |  | ||||||
|       -D gallium-rusticl=false |  | ||||||
|       -D b_sanitize=undefined |  | ||||||
|       -D c_args=-fno-sanitize-recover=all |  | ||||||
|       -D cpp_args=-fno-sanitize-recover=all |  | ||||||
|     UBSAN_OPTIONS: "print_stacktrace=1" |  | ||||||
|     VULKAN_DRIVERS: amd,broadcom,freedreno,intel,intel_hasvk,virtio,imagination-experimental,microsoft-experimental |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D vulkan-layers=device-select,overlay |  | ||||||
|       -D build-aco-tests=true |  | ||||||
|       -D intel-clc=disabled |  | ||||||
|       -D imagination-srv=true |  | ||||||
|  |  | ||||||
| debian-x86_32: |  | ||||||
|   extends: |  | ||||||
|     - .meson-cross |  | ||||||
|     - .use-debian/x86_32_build |  | ||||||
|   variables: |  | ||||||
|     BUILDTYPE: debug |  | ||||||
|     CROSS: i386 |  | ||||||
|     VULKAN_DRIVERS: intel,amd,swrast,virtio |  | ||||||
|     GALLIUM_DRIVERS: "iris,nouveau,r300,r600,radeonsi,swrast,virgl,zink,crocus" |  | ||||||
|     LLVM_VERSION: 15 |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D vulkan-layers=device-select,overlay |  | ||||||
|  |  | ||||||
| debian-s390x: |  | ||||||
|   extends: |  | ||||||
|     - debian-ppc64el |  | ||||||
|     - .use-debian/s390x_build |  | ||||||
|     - .s390x-rules |  | ||||||
|   tags: |  | ||||||
|     - kvm |  | ||||||
|   variables: |  | ||||||
|     CROSS: s390x |  | ||||||
|     GALLIUM_DRIVERS: "swrast,zink" |  | ||||||
|     LLVM_VERSION: 15 |  | ||||||
|     VULKAN_DRIVERS: "swrast" |  | ||||||
|  |  | ||||||
| debian-ppc64el: |  | ||||||
|   extends: |  | ||||||
|     - .meson-cross |  | ||||||
|     - .use-debian/ppc64el_build |  | ||||||
|     - .ppc64el-rules |  | ||||||
|   variables: |  | ||||||
|     BUILDTYPE: debug |  | ||||||
|     CROSS: ppc64el |  | ||||||
|     GALLIUM_DRIVERS: "nouveau,radeonsi,swrast,virgl,zink" |  | ||||||
|     VULKAN_DRIVERS: "amd,swrast" |  | ||||||
|  |  | ||||||
| # Disabled as it hangs with winedbg on shared runners |  | ||||||
| .debian-mingw32-x86_64: |  | ||||||
|   extends: .meson-build_mingw |  | ||||||
|   stage: build-misc |  | ||||||
|   variables: |  | ||||||
|     UNWIND: "disabled" |  | ||||||
|     C_ARGS: > |  | ||||||
|       -Wno-error=format |  | ||||||
|       -Wno-error=unused-but-set-variable |  | ||||||
|     CPP_ARGS: > |  | ||||||
|       -Wno-error=format |  | ||||||
|       -Wno-error=unused-function |  | ||||||
|       -Wno-error=unused-variable |  | ||||||
|       -Wno-error=sign-compare |  | ||||||
|       -Wno-error=narrowing |  | ||||||
|     GALLIUM_DRIVERS: "swrast,d3d12,zink" |  | ||||||
|     VULKAN_DRIVERS: "swrast,amd,microsoft-experimental" |  | ||||||
|     GALLIUM_ST: > |  | ||||||
|       -D gallium-opencl=icd |  | ||||||
|       -D gallium-rusticl=false |  | ||||||
|       -D opencl-spirv=true |  | ||||||
|       -D microsoft-clc=enabled |  | ||||||
|       -D static-libclc=all |  | ||||||
|       -D opencl-external-clang-headers=disabled |  | ||||||
|       -D llvm=enabled |  | ||||||
|       -D gallium-va=enabled |  | ||||||
|       -D video-codecs=h264dec,h264enc,h265dec,h265enc,vc1dec |  | ||||||
|     EXTRA_OPTION: > |  | ||||||
|       -D min-windows-version=7 |  | ||||||
|       -D spirv-to-dxil=true |  | ||||||
|       -D gles1=enabled |  | ||||||
|       -D gles2=enabled |  | ||||||
|       -D osmesa=true |  | ||||||
|       -D cpp_rtti=true |  | ||||||
|       -D shared-glapi=enabled |  | ||||||
|       -D zlib=enabled |  | ||||||
|       --cross-file=.gitlab-ci/x86_64-w64-mingw32 |  | ||||||
| @@ -1,35 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2035 |  | ||||||
| # shellcheck disable=SC2061 |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| while true; do |  | ||||||
|   devcds=$(find /sys/devices/virtual/devcoredump/ -name data 2>/dev/null) |  | ||||||
|   for i in $devcds; do |  | ||||||
|     echo "Found a devcoredump at $i." |  | ||||||
|     if cp $i /results/first.devcore; then |  | ||||||
|       echo 1 > $i |  | ||||||
|       echo "Saved to the job artifacts at /first.devcore" |  | ||||||
|       exit 0 |  | ||||||
|     fi |  | ||||||
|   done |  | ||||||
|   i915_error_states=$(find /sys/devices/ -path */drm/card*/error) |  | ||||||
|   for i in $i915_error_states; do |  | ||||||
|     tmpfile=$(mktemp) |  | ||||||
|     cp "$i" "$tmpfile" |  | ||||||
|     filesize=$(stat --printf="%s" "$tmpfile") |  | ||||||
|     # Does the file contain "No error state collected" ? |  | ||||||
|     if [ "$filesize" = 25 ]; then |  | ||||||
|         rm "$tmpfile" |  | ||||||
|     else |  | ||||||
|         echo "Found an i915 error state at $i size=$filesize." |  | ||||||
|         if cp "$tmpfile" /results/first.i915_error_state; then |  | ||||||
|             rm "$tmpfile" |  | ||||||
|             echo 1 > "$i" |  | ||||||
|             echo "Saved to the job artifacts at /first.i915_error_state" |  | ||||||
|             exit 0 |  | ||||||
|         fi |  | ||||||
|     fi |  | ||||||
|   done |  | ||||||
|   sleep 10 |  | ||||||
| done |  | ||||||
| @@ -1,128 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| for var in \ |  | ||||||
|     ACO_DEBUG \ |  | ||||||
|     ASAN_OPTIONS \ |  | ||||||
|     BASE_SYSTEM_FORK_HOST_PREFIX \ |  | ||||||
|     BASE_SYSTEM_MAINLINE_HOST_PREFIX \ |  | ||||||
|     CI_COMMIT_BRANCH \ |  | ||||||
|     CI_COMMIT_REF_NAME \ |  | ||||||
|     CI_COMMIT_TITLE \ |  | ||||||
|     CI_JOB_ID \ |  | ||||||
|     CI_JOB_JWT_FILE \ |  | ||||||
|     CI_JOB_STARTED_AT \ |  | ||||||
|     CI_JOB_NAME \ |  | ||||||
|     CI_JOB_URL \ |  | ||||||
|     CI_MERGE_REQUEST_SOURCE_BRANCH_NAME \ |  | ||||||
|     CI_MERGE_REQUEST_TITLE \ |  | ||||||
|     CI_NODE_INDEX \ |  | ||||||
|     CI_NODE_TOTAL \ |  | ||||||
|     CI_PAGES_DOMAIN \ |  | ||||||
|     CI_PIPELINE_ID \ |  | ||||||
|     CI_PIPELINE_URL \ |  | ||||||
|     CI_PROJECT_DIR \ |  | ||||||
|     CI_PROJECT_NAME \ |  | ||||||
|     CI_PROJECT_PATH \ |  | ||||||
|     CI_PROJECT_ROOT_NAMESPACE \ |  | ||||||
|     CI_RUNNER_DESCRIPTION \ |  | ||||||
|     CI_SERVER_URL \ |  | ||||||
|     CROSVM_GALLIUM_DRIVER \ |  | ||||||
|     CROSVM_GPU_ARGS \ |  | ||||||
|     CURRENT_SECTION \ |  | ||||||
|     DEQP_BIN_DIR \ |  | ||||||
|     DEQP_CONFIG \ |  | ||||||
|     DEQP_EXPECTED_RENDERER \ |  | ||||||
|     DEQP_FRACTION \ |  | ||||||
|     DEQP_HEIGHT \ |  | ||||||
|     DEQP_RESULTS_DIR \ |  | ||||||
|     DEQP_RUNNER_OPTIONS \ |  | ||||||
|     DEQP_SUITE \ |  | ||||||
|     DEQP_TEMP_DIR \ |  | ||||||
|     DEQP_VARIANT \ |  | ||||||
|     DEQP_VER \ |  | ||||||
|     DEQP_WIDTH \ |  | ||||||
|     DEVICE_NAME \ |  | ||||||
|     DRIVER_NAME \ |  | ||||||
|     EGL_PLATFORM \ |  | ||||||
|     ETNA_MESA_DEBUG \ |  | ||||||
|     FDO_CI_CONCURRENT \ |  | ||||||
|     FDO_UPSTREAM_REPO \ |  | ||||||
|     FD_MESA_DEBUG \ |  | ||||||
|     FLAKES_CHANNEL \ |  | ||||||
|     FREEDRENO_HANGCHECK_MS \ |  | ||||||
|     GALLIUM_DRIVER \ |  | ||||||
|     GALLIVM_PERF \ |  | ||||||
|     GPU_VERSION \ |  | ||||||
|     GTEST \ |  | ||||||
|     GTEST_FAILS \ |  | ||||||
|     GTEST_FRACTION \ |  | ||||||
|     GTEST_RESULTS_DIR \ |  | ||||||
|     GTEST_RUNNER_OPTIONS \ |  | ||||||
|     GTEST_SKIPS \ |  | ||||||
|     HWCI_FREQ_MAX \ |  | ||||||
|     HWCI_KERNEL_MODULES \ |  | ||||||
|     HWCI_KVM \ |  | ||||||
|     HWCI_START_WESTON \ |  | ||||||
|     HWCI_START_XORG \ |  | ||||||
|     HWCI_TEST_SCRIPT \ |  | ||||||
|     IR3_SHADER_DEBUG \ |  | ||||||
|     JOB_ARTIFACTS_BASE \ |  | ||||||
|     JOB_RESULTS_PATH \ |  | ||||||
|     JOB_ROOTFS_OVERLAY_PATH \ |  | ||||||
|     KERNEL_IMAGE_BASE \ |  | ||||||
|     KERNEL_IMAGE_NAME \ |  | ||||||
|     LD_LIBRARY_PATH \ |  | ||||||
|     LP_NUM_THREADS \ |  | ||||||
|     MESA_BASE_TAG \ |  | ||||||
|     MESA_BUILD_PATH \ |  | ||||||
|     MESA_DEBUG \ |  | ||||||
|     MESA_GLES_VERSION_OVERRIDE \ |  | ||||||
|     MESA_GLSL_VERSION_OVERRIDE \ |  | ||||||
|     MESA_GL_VERSION_OVERRIDE \ |  | ||||||
|     MESA_IMAGE \ |  | ||||||
|     MESA_IMAGE_PATH \ |  | ||||||
|     MESA_IMAGE_TAG \ |  | ||||||
|     MESA_LOADER_DRIVER_OVERRIDE \ |  | ||||||
|     MESA_TEMPLATES_COMMIT \ |  | ||||||
|     MESA_VK_IGNORE_CONFORMANCE_WARNING \ |  | ||||||
|     S3_HOST \ |  | ||||||
|     S3_RESULTS_UPLOAD \ |  | ||||||
|     NIR_DEBUG \ |  | ||||||
|     PAN_I_WANT_A_BROKEN_VULKAN_DRIVER \ |  | ||||||
|     PAN_MESA_DEBUG \ |  | ||||||
|     PIGLIT_FRACTION \ |  | ||||||
|     PIGLIT_NO_WINDOW \ |  | ||||||
|     PIGLIT_OPTIONS \ |  | ||||||
|     PIGLIT_PLATFORM \ |  | ||||||
|     PIGLIT_PROFILES \ |  | ||||||
|     PIGLIT_REPLAY_ARTIFACTS_BASE_URL \ |  | ||||||
|     PIGLIT_REPLAY_DESCRIPTION_FILE \ |  | ||||||
|     PIGLIT_REPLAY_DEVICE_NAME \ |  | ||||||
|     PIGLIT_REPLAY_EXTRA_ARGS \ |  | ||||||
|     PIGLIT_REPLAY_LOOP_TIMES \ |  | ||||||
|     PIGLIT_REPLAY_REFERENCE_IMAGES_BASE \ |  | ||||||
|     PIGLIT_REPLAY_SUBCOMMAND \ |  | ||||||
|     PIGLIT_RESULTS \ |  | ||||||
|     PIGLIT_TESTS \ |  | ||||||
|     PIPELINE_ARTIFACTS_BASE \ |  | ||||||
|     RADV_DEBUG \ |  | ||||||
|     RADV_PERFTEST \ |  | ||||||
|     SKQP_ASSETS_DIR \ |  | ||||||
|     SKQP_BACKENDS \ |  | ||||||
|     TU_DEBUG \ |  | ||||||
|     USE_ANGLE \ |  | ||||||
|     VIRGL_HOST_API \ |  | ||||||
|     WAFFLE_PLATFORM \ |  | ||||||
|     VK_CPU \ |  | ||||||
|     VK_DRIVER \ |  | ||||||
|     VK_ICD_FILENAMES \ |  | ||||||
|     VKD3D_PROTON_RESULTS \ |  | ||||||
|     VKD3D_CONFIG \ |  | ||||||
|     ZINK_DESCRIPTORS \ |  | ||||||
|     ZINK_DEBUG \ |  | ||||||
|     LVP_POISON_MEMORY \ |  | ||||||
|     ; do |  | ||||||
|   if [ -n "${!var+x}" ]; then |  | ||||||
|     echo "export $var=${!var@Q}" |  | ||||||
|   fi |  | ||||||
| done |  | ||||||
| @@ -1,25 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| # Very early init, used to make sure devices and network are set up and |  | ||||||
| # reachable. |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| cd / |  | ||||||
|  |  | ||||||
| mount -t proc none /proc |  | ||||||
| mount -t sysfs none /sys |  | ||||||
| mount -t debugfs none /sys/kernel/debug |  | ||||||
| mount -t devtmpfs none /dev || echo possibly already mounted |  | ||||||
| mkdir -p /dev/pts |  | ||||||
| mount -t devpts devpts /dev/pts |  | ||||||
| mkdir /dev/shm |  | ||||||
| mount -t tmpfs -o noexec,nodev,nosuid tmpfs /dev/shm |  | ||||||
| mount -t tmpfs tmpfs /tmp |  | ||||||
|  |  | ||||||
| echo "nameserver 8.8.8.8" > /etc/resolv.conf |  | ||||||
| [ -z "$NFS_SERVER_IP" ] || echo "$NFS_SERVER_IP caching-proxy" >> /etc/hosts |  | ||||||
|  |  | ||||||
| # Set the time so we can validate certificates before we fetch anything; |  | ||||||
| # however as not all DUTs have network, make this non-fatal. |  | ||||||
| for _ in 1 2 3; do sntp -sS pool.ntp.org && break || sleep 2; done || true |  | ||||||
| @@ -1,226 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC1090 |  | ||||||
| # shellcheck disable=SC1091 |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
| # shellcheck disable=SC2155 |  | ||||||
|  |  | ||||||
| # Second-stage init, used to set up devices and our job environment before |  | ||||||
| # running tests. |  | ||||||
|  |  | ||||||
| # Make sure to kill itself and all the children process from this script on |  | ||||||
| # exiting, since any console output may interfere with LAVA signals handling, |  | ||||||
| # which based on the log console. |  | ||||||
| cleanup() { |  | ||||||
|   if [ "$BACKGROUND_PIDS" = "" ]; then |  | ||||||
|     return 0 |  | ||||||
|   fi |  | ||||||
|  |  | ||||||
|   set +x |  | ||||||
|   echo "Killing all child processes" |  | ||||||
|   for pid in $BACKGROUND_PIDS |  | ||||||
|   do |  | ||||||
|     kill "$pid" 2>/dev/null || true |  | ||||||
|   done |  | ||||||
|  |  | ||||||
|   # Sleep just a little to give enough time for subprocesses to be gracefully |  | ||||||
|   # killed. Then apply a SIGKILL if necessary. |  | ||||||
|   sleep 5 |  | ||||||
|   for pid in $BACKGROUND_PIDS |  | ||||||
|   do |  | ||||||
|     kill -9 "$pid" 2>/dev/null || true |  | ||||||
|   done |  | ||||||
|  |  | ||||||
|   BACKGROUND_PIDS= |  | ||||||
|   set -x |  | ||||||
| } |  | ||||||
| trap cleanup INT TERM EXIT |  | ||||||
|  |  | ||||||
| # Space separated values with the PIDS of the processes started in the |  | ||||||
| # background by this script |  | ||||||
| BACKGROUND_PIDS= |  | ||||||
|  |  | ||||||
|  |  | ||||||
| for path in '/dut-env-vars.sh' '/set-job-env-vars.sh' './set-job-env-vars.sh'; do |  | ||||||
|     [ -f "$path" ] && source "$path" |  | ||||||
| done |  | ||||||
| . "$SCRIPTS_DIR"/setup-test-env.sh |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| # Set up any devices required by the jobs |  | ||||||
| [ -z "$HWCI_KERNEL_MODULES" ] || { |  | ||||||
|     echo -n $HWCI_KERNEL_MODULES | xargs -d, -n1 /usr/sbin/modprobe |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # Set up ZRAM |  | ||||||
| HWCI_ZRAM_SIZE=2G |  | ||||||
| if /sbin/zramctl --find --size $HWCI_ZRAM_SIZE -a zstd; then |  | ||||||
|     mkswap /dev/zram0 |  | ||||||
|     swapon /dev/zram0 |  | ||||||
|     echo "zram: $HWCI_ZRAM_SIZE activated" |  | ||||||
| else |  | ||||||
|     echo "zram: skipping, not supported" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Load the KVM module specific to the detected CPU virtualization extensions: |  | ||||||
| # - vmx for Intel VT |  | ||||||
| # - svm for AMD-V |  | ||||||
| # |  | ||||||
| # Additionally, download the kernel image to boot the VM via HWCI_TEST_SCRIPT. |  | ||||||
| # |  | ||||||
| if [ "$HWCI_KVM" = "true" ]; then |  | ||||||
|     unset KVM_KERNEL_MODULE |  | ||||||
|     { |  | ||||||
|       grep -qs '\bvmx\b' /proc/cpuinfo && KVM_KERNEL_MODULE=kvm_intel |  | ||||||
|     } || { |  | ||||||
|       grep -qs '\bsvm\b' /proc/cpuinfo && KVM_KERNEL_MODULE=kvm_amd |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     { |  | ||||||
|       [ -z "${KVM_KERNEL_MODULE}" ] && \ |  | ||||||
|       echo "WARNING: Failed to detect CPU virtualization extensions" |  | ||||||
|     } || \ |  | ||||||
|         modprobe ${KVM_KERNEL_MODULE} |  | ||||||
|  |  | ||||||
|     mkdir -p /lava-files |  | ||||||
|     curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
| 	-o "/lava-files/${KERNEL_IMAGE_NAME}" \ |  | ||||||
|         "${KERNEL_IMAGE_BASE}/amd64/${KERNEL_IMAGE_NAME}" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Fix prefix confusion: the build installs to $CI_PROJECT_DIR, but we expect |  | ||||||
| # it in /install |  | ||||||
| ln -sf $CI_PROJECT_DIR/install /install |  | ||||||
| export LD_LIBRARY_PATH=/install/lib |  | ||||||
| export LIBGL_DRIVERS_PATH=/install/lib/dri |  | ||||||
|  |  | ||||||
| # https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/22495#note_1876691 |  | ||||||
| # The navi21 boards seem to have trouble with ld.so.cache, so try explicitly |  | ||||||
| # telling it to look in /usr/local/lib. |  | ||||||
| export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib |  | ||||||
|  |  | ||||||
| # Store Mesa's disk cache under /tmp, rather than sending it out over NFS. |  | ||||||
| export XDG_CACHE_HOME=/tmp |  | ||||||
|  |  | ||||||
| # Make sure Python can find all our imports |  | ||||||
| export PYTHONPATH=$(python3 -c "import sys;print(\":\".join(sys.path))") |  | ||||||
|  |  | ||||||
| if [ "$HWCI_FREQ_MAX" = "true" ]; then |  | ||||||
|   # Ensure initialization of the DRM device (needed by MSM) |  | ||||||
|   head -0 /dev/dri/renderD128 |  | ||||||
|  |  | ||||||
|   # Disable GPU frequency scaling |  | ||||||
|   DEVFREQ_GOVERNOR=$(find /sys/devices -name governor | grep gpu || true) |  | ||||||
|   test -z "$DEVFREQ_GOVERNOR" || echo performance > $DEVFREQ_GOVERNOR || true |  | ||||||
|  |  | ||||||
|   # Disable CPU frequency scaling |  | ||||||
|   echo performance | tee -a /sys/devices/system/cpu/cpufreq/policy*/scaling_governor || true |  | ||||||
|  |  | ||||||
|   # Disable GPU runtime power management |  | ||||||
|   GPU_AUTOSUSPEND=$(find /sys/devices -name autosuspend_delay_ms | grep gpu | head -1) |  | ||||||
|   test -z "$GPU_AUTOSUSPEND" || echo -1 > $GPU_AUTOSUSPEND || true |  | ||||||
|   # Lock Intel GPU frequency to 70% of the maximum allowed by hardware |  | ||||||
|   # and enable throttling detection & reporting. |  | ||||||
|   # Additionally, set the upper limit for CPU scaling frequency to 65% of the |  | ||||||
|   # maximum permitted, as an additional measure to mitigate thermal throttling. |  | ||||||
|   /intel-gpu-freq.sh -s 70% --cpu-set-max 65% -g all -d |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Start a little daemon to capture sysfs records and produce a JSON file |  | ||||||
| if [ -x /kdl.sh ]; then |  | ||||||
|   echo "launch kdl.sh!" |  | ||||||
|   /kdl.sh & |  | ||||||
|   BACKGROUND_PIDS="$! $BACKGROUND_PIDS" |  | ||||||
| else |  | ||||||
|   echo "kdl.sh not found!" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Increase freedreno hangcheck timer because it's right at the edge of the |  | ||||||
| # spilling tests timing out (and some traces, too) |  | ||||||
| if [ -n "$FREEDRENO_HANGCHECK_MS" ]; then |  | ||||||
|     echo $FREEDRENO_HANGCHECK_MS | tee -a /sys/kernel/debug/dri/128/hangcheck_period_ms |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Start a little daemon to capture the first devcoredump we encounter.  (They |  | ||||||
| # expire after 5 minutes, so we poll for them). |  | ||||||
| if [ -x /capture-devcoredump.sh ]; then |  | ||||||
|   /capture-devcoredump.sh & |  | ||||||
|   BACKGROUND_PIDS="$! $BACKGROUND_PIDS" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # If we want Xorg to be running for the test, then we start it up before the |  | ||||||
| # HWCI_TEST_SCRIPT because we need to use xinit to start X (otherwise |  | ||||||
| # without using -displayfd you can race with Xorg's startup), but xinit will eat |  | ||||||
| # your client's return code |  | ||||||
| if [ -n "$HWCI_START_XORG" ]; then |  | ||||||
|   echo "touch /xorg-started; sleep 100000" > /xorg-script |  | ||||||
|   env \ |  | ||||||
|     VK_ICD_FILENAMES="/install/share/vulkan/icd.d/${VK_DRIVER}_icd.$(uname -m).json" \ |  | ||||||
|     xinit /bin/sh /xorg-script -- /usr/bin/Xorg -noreset -s 0 -dpms -logfile /Xorg.0.log & |  | ||||||
|   BACKGROUND_PIDS="$! $BACKGROUND_PIDS" |  | ||||||
|  |  | ||||||
|   # Wait for xorg to be ready for connections. |  | ||||||
|   for _ in 1 2 3 4 5; do |  | ||||||
|     if [ -e /xorg-started ]; then |  | ||||||
|       break |  | ||||||
|     fi |  | ||||||
|     sleep 5 |  | ||||||
|   done |  | ||||||
|   export DISPLAY=:0 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -n "$HWCI_START_WESTON" ]; then |  | ||||||
|   WESTON_X11_SOCK="/tmp/.X11-unix/X0" |  | ||||||
|   if [ -n "$HWCI_START_XORG" ]; then |  | ||||||
|     echo "Please consider dropping HWCI_START_XORG and instead using Weston XWayland for testing." |  | ||||||
|     WESTON_X11_SOCK="/tmp/.X11-unix/X1" |  | ||||||
|   fi |  | ||||||
|   export WAYLAND_DISPLAY=wayland-0 |  | ||||||
|  |  | ||||||
|   # Display server is Weston Xwayland when HWCI_START_XORG is not set or Xorg when it's |  | ||||||
|   export DISPLAY=:0 |  | ||||||
|   mkdir -p /tmp/.X11-unix |  | ||||||
|  |  | ||||||
|   env \ |  | ||||||
|     VK_ICD_FILENAMES="/install/share/vulkan/icd.d/${VK_DRIVER}_icd.$(uname -m).json" \ |  | ||||||
|     weston -Bheadless-backend.so --use-gl -Swayland-0 --xwayland --idle-time=0 & |  | ||||||
|   BACKGROUND_PIDS="$! $BACKGROUND_PIDS" |  | ||||||
|  |  | ||||||
|   while [ ! -S "$WESTON_X11_SOCK" ]; do sleep 1; done |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| set +e |  | ||||||
| bash -c ". $SCRIPTS_DIR/setup-test-env.sh && $HWCI_TEST_SCRIPT" |  | ||||||
| EXIT_CODE=$? |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| # Let's make sure the results are always stored in current working directory |  | ||||||
| mv -f ${CI_PROJECT_DIR}/results ./ 2>/dev/null || true |  | ||||||
|  |  | ||||||
| [ ${EXIT_CODE} -ne 0 ] || rm -rf results/trace/"$PIGLIT_REPLAY_DEVICE_NAME" |  | ||||||
|  |  | ||||||
| # Make sure that capture-devcoredump is done before we start trying to tar up |  | ||||||
| # artifacts -- if it's writing while tar is reading, tar will throw an error and |  | ||||||
| # kill the job. |  | ||||||
| cleanup |  | ||||||
|  |  | ||||||
| # upload artifacts |  | ||||||
| if [ -n "$S3_RESULTS_UPLOAD" ]; then |  | ||||||
|   tar --zstd -cf results.tar.zst results/; |  | ||||||
|   ci-fairy s3cp --token-file "${CI_JOB_JWT_FILE}" results.tar.zst https://"$S3_RESULTS_UPLOAD"/results.tar.zst; |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # We still need to echo the hwci: mesa message, as some scripts rely on it, such |  | ||||||
| # as the python ones inside the bare-metal folder |  | ||||||
| [ ${EXIT_CODE} -eq 0 ] && RESULT=pass || RESULT=fail |  | ||||||
|  |  | ||||||
| set +x |  | ||||||
|  |  | ||||||
| # Print the final result; both bare-metal and LAVA look for this string to get |  | ||||||
| # the result of our run, so try really hard to get it out rather than losing |  | ||||||
| # the run. The device gets shut down right at this point, and a630 seems to |  | ||||||
| # enjoy corrupting the last line of serial output before shutdown. |  | ||||||
| for _ in $(seq 0 3); do echo "hwci: mesa: $RESULT"; sleep 1; echo; done |  | ||||||
|  |  | ||||||
| exit $EXIT_CODE |  | ||||||
| @@ -1,768 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2013 |  | ||||||
| # shellcheck disable=SC2015 |  | ||||||
| # shellcheck disable=SC2034 |  | ||||||
| # shellcheck disable=SC2046 |  | ||||||
| # shellcheck disable=SC2059 |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
| # shellcheck disable=SC2154 |  | ||||||
| # shellcheck disable=SC2155 |  | ||||||
| # shellcheck disable=SC2162 |  | ||||||
| # shellcheck disable=SC2229 |  | ||||||
| # |  | ||||||
| # This is an utility script to manage Intel GPU frequencies. |  | ||||||
| # It can be used for debugging performance problems or trying to obtain a stable |  | ||||||
| # frequency while benchmarking. |  | ||||||
| # |  | ||||||
| # Note the Intel i915 GPU driver allows to change the minimum, maximum and boost |  | ||||||
| # frequencies in steps of 50 MHz via: |  | ||||||
| # |  | ||||||
| # /sys/class/drm/card<n>/<freq_info> |  | ||||||
| # |  | ||||||
| # Where <n> is the DRM card index and <freq_info> one of the following: |  | ||||||
| # |  | ||||||
| # - gt_max_freq_mhz (enforced maximum freq) |  | ||||||
| # - gt_min_freq_mhz (enforced minimum freq) |  | ||||||
| # - gt_boost_freq_mhz (enforced boost freq) |  | ||||||
| # |  | ||||||
| # The hardware capabilities can be accessed via: |  | ||||||
| # |  | ||||||
| # - gt_RP0_freq_mhz (supported maximum freq) |  | ||||||
| # - gt_RPn_freq_mhz (supported minimum freq) |  | ||||||
| # - gt_RP1_freq_mhz (most efficient freq) |  | ||||||
| # |  | ||||||
| # The current frequency can be read from: |  | ||||||
| # - gt_act_freq_mhz (the actual GPU freq) |  | ||||||
| # - gt_cur_freq_mhz (the last requested freq) |  | ||||||
| # |  | ||||||
| # Also note that in addition to GPU management, the script offers the |  | ||||||
| # possibility to adjust CPU operating frequencies. However, this is currently |  | ||||||
| # limited to just setting the maximum scaling frequency as percentage of the |  | ||||||
| # maximum frequency allowed by the hardware. |  | ||||||
| # |  | ||||||
| # Copyright (C) 2022 Collabora Ltd. |  | ||||||
| # Author: Cristian Ciocaltea <cristian.ciocaltea@collabora.com> |  | ||||||
| # |  | ||||||
| # SPDX-License-Identifier: MIT |  | ||||||
| # |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Constants |  | ||||||
| # |  | ||||||
|  |  | ||||||
| # GPU |  | ||||||
| DRM_FREQ_SYSFS_PATTERN="/sys/class/drm/card%d/gt_%s_freq_mhz" |  | ||||||
| ENF_FREQ_INFO="max min boost" |  | ||||||
| CAP_FREQ_INFO="RP0 RPn RP1" |  | ||||||
| ACT_FREQ_INFO="act cur" |  | ||||||
| THROTT_DETECT_SLEEP_SEC=2 |  | ||||||
| THROTT_DETECT_PID_FILE_PATH=/tmp/thrott-detect.pid |  | ||||||
|  |  | ||||||
| # CPU |  | ||||||
| CPU_SYSFS_PREFIX=/sys/devices/system/cpu |  | ||||||
| CPU_PSTATE_SYSFS_PATTERN="${CPU_SYSFS_PREFIX}/intel_pstate/%s" |  | ||||||
| CPU_FREQ_SYSFS_PATTERN="${CPU_SYSFS_PREFIX}/cpu%s/cpufreq/%s_freq" |  | ||||||
| CAP_CPU_FREQ_INFO="cpuinfo_max cpuinfo_min" |  | ||||||
| ENF_CPU_FREQ_INFO="scaling_max scaling_min" |  | ||||||
| ACT_CPU_FREQ_INFO="scaling_cur" |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Global variables. |  | ||||||
| # |  | ||||||
| unset INTEL_DRM_CARD_INDEX |  | ||||||
| unset GET_ACT_FREQ GET_ENF_FREQ GET_CAP_FREQ |  | ||||||
| unset SET_MIN_FREQ SET_MAX_FREQ |  | ||||||
| unset MONITOR_FREQ |  | ||||||
| unset CPU_SET_MAX_FREQ |  | ||||||
| unset DETECT_THROTT |  | ||||||
| unset DRY_RUN |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Simple printf based stderr logger. |  | ||||||
| # |  | ||||||
| log() { |  | ||||||
|     local msg_type=$1 |  | ||||||
|  |  | ||||||
|     shift |  | ||||||
|     printf "%s: %s: " "${msg_type}" "${0##*/}" >&2 |  | ||||||
|     printf "$@" >&2 |  | ||||||
|     printf "\n" >&2 |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper to print sysfs path for the given card index and freq info. |  | ||||||
| # |  | ||||||
| # arg1: Frequency info sysfs name, one of *_FREQ_INFO constants above |  | ||||||
| # arg2: Video card index, defaults to INTEL_DRM_CARD_INDEX |  | ||||||
| # |  | ||||||
| print_freq_sysfs_path() { |  | ||||||
|     printf ${DRM_FREQ_SYSFS_PATTERN} "${2:-${INTEL_DRM_CARD_INDEX}}" "$1" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper to set INTEL_DRM_CARD_INDEX for the first identified Intel video card. |  | ||||||
| # |  | ||||||
| identify_intel_gpu() { |  | ||||||
|     local i=0 vendor path |  | ||||||
|  |  | ||||||
|     while [ ${i} -lt 16 ]; do |  | ||||||
|         [ -c "/dev/dri/card$i" ] || { |  | ||||||
|             i=$((i + 1)) |  | ||||||
|             continue |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         path=$(print_freq_sysfs_path "" ${i}) |  | ||||||
|         path=${path%/*}/device/vendor |  | ||||||
|  |  | ||||||
|         [ -r "${path}" ] && read vendor < "${path}" && \ |  | ||||||
|             [ "${vendor}" = "0x8086" ] && INTEL_DRM_CARD_INDEX=$i && return 0 |  | ||||||
|  |  | ||||||
|         i=$((i + 1)) |  | ||||||
|     done |  | ||||||
|  |  | ||||||
|     return 1 |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Read the specified freq info from sysfs. |  | ||||||
| # |  | ||||||
| # arg1: Flag (y/n) to also enable printing the freq info. |  | ||||||
| # arg2...: Frequency info sysfs name(s), see *_FREQ_INFO constants above |  | ||||||
| # return: Global variable(s) FREQ_${arg} containing the requested information |  | ||||||
| # |  | ||||||
| read_freq_info() { |  | ||||||
|     local var val info path print=0 ret=0 |  | ||||||
|  |  | ||||||
|     [ "$1" = "y" ] && print=1 |  | ||||||
|     shift |  | ||||||
|  |  | ||||||
|     while [ $# -gt 0 ]; do |  | ||||||
|         info=$1 |  | ||||||
|         shift |  | ||||||
|         var=FREQ_${info} |  | ||||||
|         path=$(print_freq_sysfs_path "${info}") |  | ||||||
|  |  | ||||||
|         [ -r ${path} ] && read ${var} < ${path} || { |  | ||||||
|             log ERROR "Failed to read freq info from: %s" "${path}" |  | ||||||
|             ret=1 |  | ||||||
|             continue |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         [ -n "${var}" ] || { |  | ||||||
|             log ERROR "Got empty freq info from: %s" "${path}" |  | ||||||
|             ret=1 |  | ||||||
|             continue |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         [ ${print} -eq 1 ] && { |  | ||||||
|             eval val=\$${var} |  | ||||||
|             printf "%6s: %4s MHz\n" "${info}" "${val}" |  | ||||||
|         } |  | ||||||
|     done |  | ||||||
|  |  | ||||||
|     return ${ret} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Display requested info. |  | ||||||
| # |  | ||||||
| print_freq_info() { |  | ||||||
|     local req_freq |  | ||||||
|  |  | ||||||
|     [ -n "${GET_CAP_FREQ}" ] && { |  | ||||||
|         printf "* Hardware capabilities\n" |  | ||||||
|         read_freq_info y ${CAP_FREQ_INFO} |  | ||||||
|         printf "\n" |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ -n "${GET_ENF_FREQ}" ] && { |  | ||||||
|         printf "* Enforcements\n" |  | ||||||
|         read_freq_info y ${ENF_FREQ_INFO} |  | ||||||
|         printf "\n" |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ -n "${GET_ACT_FREQ}" ] && { |  | ||||||
|         printf "* Actual\n" |  | ||||||
|         read_freq_info y ${ACT_FREQ_INFO} |  | ||||||
|         printf "\n" |  | ||||||
|     } |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper to print frequency value as requested by user via '-s, --set' option. |  | ||||||
| # arg1: user requested freq value |  | ||||||
| # |  | ||||||
| compute_freq_set() { |  | ||||||
|     local val |  | ||||||
|  |  | ||||||
|     case "$1" in |  | ||||||
|     +) |  | ||||||
|         val=${FREQ_RP0} |  | ||||||
|         ;; |  | ||||||
|     -) |  | ||||||
|         val=${FREQ_RPn} |  | ||||||
|         ;; |  | ||||||
|     *%) |  | ||||||
|         val=$((${1%?} * FREQ_RP0 / 100)) |  | ||||||
|         # Adjust freq to comply with 50 MHz increments |  | ||||||
|         val=$((val / 50 * 50)) |  | ||||||
|         ;; |  | ||||||
|     *[!0-9]*) |  | ||||||
|         log ERROR "Cannot set freq to invalid value: %s" "$1" |  | ||||||
|         return 1 |  | ||||||
|         ;; |  | ||||||
|     "") |  | ||||||
|         log ERROR "Cannot set freq to unspecified value" |  | ||||||
|         return 1 |  | ||||||
|         ;; |  | ||||||
|     *) |  | ||||||
|         # Adjust freq to comply with 50 MHz increments |  | ||||||
|         val=$(($1 / 50 * 50)) |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
|  |  | ||||||
|     printf "%s" "${val}" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper for set_freq(). |  | ||||||
| # |  | ||||||
| set_freq_max() { |  | ||||||
|     log INFO "Setting GPU max freq to %s MHz" "${SET_MAX_FREQ}" |  | ||||||
|  |  | ||||||
|     read_freq_info n min || return $? |  | ||||||
|  |  | ||||||
|     [ ${SET_MAX_FREQ} -gt ${FREQ_RP0} ] && { |  | ||||||
|         log ERROR "Cannot set GPU max freq (%s) to be greater than hw max freq (%s)" \ |  | ||||||
|             "${SET_MAX_FREQ}" "${FREQ_RP0}" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ ${SET_MAX_FREQ} -lt ${FREQ_RPn} ] && { |  | ||||||
|         log ERROR "Cannot set GPU max freq (%s) to be less than hw min freq (%s)" \ |  | ||||||
|             "${SET_MIN_FREQ}" "${FREQ_RPn}" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ ${SET_MAX_FREQ} -lt ${FREQ_min} ] && { |  | ||||||
|         log ERROR "Cannot set GPU max freq (%s) to be less than min freq (%s)" \ |  | ||||||
|             "${SET_MAX_FREQ}" "${FREQ_min}" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ -z "${DRY_RUN}" ] || return 0 |  | ||||||
|  |  | ||||||
|     if ! printf "%s" ${SET_MAX_FREQ} | tee $(print_freq_sysfs_path max) \ |  | ||||||
|         $(print_freq_sysfs_path boost) > /dev/null; |  | ||||||
|     then |  | ||||||
|         log ERROR "Failed to set GPU max frequency" |  | ||||||
|         return 1 |  | ||||||
|     fi |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper for set_freq(). |  | ||||||
| # |  | ||||||
| set_freq_min() { |  | ||||||
|     log INFO "Setting GPU min freq to %s MHz" "${SET_MIN_FREQ}" |  | ||||||
|  |  | ||||||
|     read_freq_info n max || return $? |  | ||||||
|  |  | ||||||
|     [ ${SET_MIN_FREQ} -gt ${FREQ_max} ] && { |  | ||||||
|         log ERROR "Cannot set GPU min freq (%s) to be greater than max freq (%s)" \ |  | ||||||
|             "${SET_MIN_FREQ}" "${FREQ_max}" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ ${SET_MIN_FREQ} -lt ${FREQ_RPn} ] && { |  | ||||||
|         log ERROR "Cannot set GPU min freq (%s) to be less than hw min freq (%s)" \ |  | ||||||
|             "${SET_MIN_FREQ}" "${FREQ_RPn}" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ -z "${DRY_RUN}" ] || return 0 |  | ||||||
|  |  | ||||||
|     if ! printf "%s" ${SET_MIN_FREQ} > $(print_freq_sysfs_path min); |  | ||||||
|     then |  | ||||||
|         log ERROR "Failed to set GPU min frequency" |  | ||||||
|         return 1 |  | ||||||
|     fi |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Set min or max or both GPU frequencies to the user indicated values. |  | ||||||
| # |  | ||||||
| set_freq() { |  | ||||||
|     # Get hw max & min frequencies |  | ||||||
|     read_freq_info n RP0 RPn || return $? |  | ||||||
|  |  | ||||||
|     [ -z "${SET_MAX_FREQ}" ] || { |  | ||||||
|         SET_MAX_FREQ=$(compute_freq_set "${SET_MAX_FREQ}") |  | ||||||
|         [ -z "${SET_MAX_FREQ}" ] && return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     [ -z "${SET_MIN_FREQ}" ] || { |  | ||||||
|         SET_MIN_FREQ=$(compute_freq_set "${SET_MIN_FREQ}") |  | ||||||
|         [ -z "${SET_MIN_FREQ}" ] && return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     # |  | ||||||
|     # Ensure correct operation order, to avoid setting min freq |  | ||||||
|     # to a value which is larger than max freq. |  | ||||||
|     # |  | ||||||
|     # E.g.: |  | ||||||
|     #   crt_min=crt_max=600; new_min=new_max=700 |  | ||||||
|     #   > operation order: max=700; min=700 |  | ||||||
|     # |  | ||||||
|     #   crt_min=crt_max=600; new_min=new_max=500 |  | ||||||
|     #   > operation order: min=500; max=500 |  | ||||||
|     # |  | ||||||
|     if [ -n "${SET_MAX_FREQ}" ] && [ -n "${SET_MIN_FREQ}" ]; then |  | ||||||
|         [ ${SET_MAX_FREQ} -lt ${SET_MIN_FREQ} ] && { |  | ||||||
|             log ERROR "Cannot set GPU max freq to be less than min freq" |  | ||||||
|             return 1 |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         read_freq_info n min || return $? |  | ||||||
|  |  | ||||||
|         if [ ${SET_MAX_FREQ} -lt ${FREQ_min} ]; then |  | ||||||
|             set_freq_min || return $? |  | ||||||
|             set_freq_max |  | ||||||
|         else |  | ||||||
|             set_freq_max || return $? |  | ||||||
|             set_freq_min |  | ||||||
|         fi |  | ||||||
|     elif [ -n "${SET_MAX_FREQ}" ]; then |  | ||||||
|         set_freq_max |  | ||||||
|     elif [ -n "${SET_MIN_FREQ}" ]; then |  | ||||||
|         set_freq_min |  | ||||||
|     else |  | ||||||
|         log "Unexpected call to set_freq()" |  | ||||||
|         return 1 |  | ||||||
|     fi |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper for detect_throttling(). |  | ||||||
| # |  | ||||||
| get_thrott_detect_pid() { |  | ||||||
|     [ -e ${THROTT_DETECT_PID_FILE_PATH} ] || return 0 |  | ||||||
|  |  | ||||||
|     local pid |  | ||||||
|     read pid < ${THROTT_DETECT_PID_FILE_PATH} || { |  | ||||||
|         log ERROR "Failed to read pid from: %s" "${THROTT_DETECT_PID_FILE_PATH}" |  | ||||||
|         return 1 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     local proc_path=/proc/${pid:-invalid}/cmdline |  | ||||||
|     [ -r ${proc_path} ] && grep -qs "${0##*/}" ${proc_path} && { |  | ||||||
|         printf "%s" "${pid}" |  | ||||||
|         return 0 |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     # Remove orphaned PID file |  | ||||||
|     rm -rf ${THROTT_DETECT_PID_FILE_PATH} |  | ||||||
|     return 1 |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Control detection and reporting of GPU throttling events. |  | ||||||
| # arg1: start - run throttle detector in background |  | ||||||
| #       stop - stop throttle detector process, if any |  | ||||||
| #       status - verify if throttle detector is running |  | ||||||
| # |  | ||||||
| detect_throttling() { |  | ||||||
|     local pid |  | ||||||
|     pid=$(get_thrott_detect_pid) |  | ||||||
|  |  | ||||||
|     case "$1" in |  | ||||||
|     status) |  | ||||||
|         printf "Throttling detector is " |  | ||||||
|         [ -z "${pid}" ] && printf "not running\n" && return 0 |  | ||||||
|         printf "running (pid=%s)\n" ${pid} |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     stop) |  | ||||||
|         [ -z "${pid}" ] && return 0 |  | ||||||
|  |  | ||||||
|         log INFO "Stopping throttling detector (pid=%s)" "${pid}" |  | ||||||
|         kill ${pid}; sleep 1; kill -0 ${pid} 2>/dev/null && kill -9 ${pid} |  | ||||||
|         rm -rf ${THROTT_DETECT_PID_FILE_PATH} |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     start) |  | ||||||
|         [ -n "${pid}" ] && { |  | ||||||
|             log WARN "Throttling detector is already running (pid=%s)" ${pid} |  | ||||||
|             return 0 |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         ( |  | ||||||
|             read_freq_info n RPn || exit $? |  | ||||||
|  |  | ||||||
|             while true; do |  | ||||||
|                 sleep ${THROTT_DETECT_SLEEP_SEC} |  | ||||||
|                 read_freq_info n act min cur || exit $? |  | ||||||
|  |  | ||||||
|                 # |  | ||||||
|                 # The throttling seems to occur when act freq goes below min. |  | ||||||
|                 # However, it's necessary to exclude the idle states, where |  | ||||||
|                 # act freq normally reaches RPn and cur goes below min. |  | ||||||
|                 # |  | ||||||
|                 [ ${FREQ_act} -lt ${FREQ_min} ] && \ |  | ||||||
|                 [ ${FREQ_act} -gt ${FREQ_RPn} ] && \ |  | ||||||
|                 [ ${FREQ_cur} -ge ${FREQ_min} ] && \ |  | ||||||
|                     printf "GPU throttling detected: act=%s min=%s cur=%s RPn=%s\n" \ |  | ||||||
|                     ${FREQ_act} ${FREQ_min} ${FREQ_cur} ${FREQ_RPn} |  | ||||||
|             done |  | ||||||
|         ) & |  | ||||||
|  |  | ||||||
|         pid=$! |  | ||||||
|         log INFO "Started GPU throttling detector (pid=%s)" ${pid} |  | ||||||
|  |  | ||||||
|         printf "%s\n" ${pid} > ${THROTT_DETECT_PID_FILE_PATH} || \ |  | ||||||
|             log WARN "Failed to write throttle detector PID file" |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Retrieve the list of online CPUs. |  | ||||||
| # |  | ||||||
| get_online_cpus() { |  | ||||||
|     local path cpu_index |  | ||||||
|  |  | ||||||
|     printf "0" |  | ||||||
|     for path in $(grep 1 ${CPU_SYSFS_PREFIX}/cpu*/online); do |  | ||||||
|         cpu_index=${path##*/cpu} |  | ||||||
|         printf " %s" ${cpu_index%%/*} |  | ||||||
|     done |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper to print sysfs path for the given CPU index and freq info. |  | ||||||
| # |  | ||||||
| # arg1: Frequency info sysfs name, one of *_CPU_FREQ_INFO constants above |  | ||||||
| # arg2: CPU index |  | ||||||
| # |  | ||||||
| print_cpu_freq_sysfs_path() { |  | ||||||
|     printf ${CPU_FREQ_SYSFS_PATTERN} "$2" "$1" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Read the specified CPU freq info from sysfs. |  | ||||||
| # |  | ||||||
| # arg1: CPU index |  | ||||||
| # arg2: Flag (y/n) to also enable printing the freq info. |  | ||||||
| # arg3...: Frequency info sysfs name(s), see *_CPU_FREQ_INFO constants above |  | ||||||
| # return: Global variable(s) CPU_FREQ_${arg} containing the requested information |  | ||||||
| # |  | ||||||
| read_cpu_freq_info() { |  | ||||||
|     local var val info path cpu_index print=0 ret=0 |  | ||||||
|  |  | ||||||
|     cpu_index=$1 |  | ||||||
|     [ "$2" = "y" ] && print=1 |  | ||||||
|     shift 2 |  | ||||||
|  |  | ||||||
|     while [ $# -gt 0 ]; do |  | ||||||
|         info=$1 |  | ||||||
|         shift |  | ||||||
|         var=CPU_FREQ_${info} |  | ||||||
|         path=$(print_cpu_freq_sysfs_path "${info}" ${cpu_index}) |  | ||||||
|  |  | ||||||
|         [ -r ${path} ] && read ${var} < ${path} || { |  | ||||||
|             log ERROR "Failed to read CPU freq info from: %s" "${path}" |  | ||||||
|             ret=1 |  | ||||||
|             continue |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         [ -n "${var}" ] || { |  | ||||||
|             log ERROR "Got empty CPU freq info from: %s" "${path}" |  | ||||||
|             ret=1 |  | ||||||
|             continue |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         [ ${print} -eq 1 ] && { |  | ||||||
|             eval val=\$${var} |  | ||||||
|             printf "%6s: %4s Hz\n" "${info}" "${val}" |  | ||||||
|         } |  | ||||||
|     done |  | ||||||
|  |  | ||||||
|     return ${ret} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Helper to print freq. value as requested by user via '--cpu-set-max' option. |  | ||||||
| # arg1: user requested freq value |  | ||||||
| # |  | ||||||
| compute_cpu_freq_set() { |  | ||||||
|     local val |  | ||||||
|  |  | ||||||
|     case "$1" in |  | ||||||
|     +) |  | ||||||
|         val=${CPU_FREQ_cpuinfo_max} |  | ||||||
|         ;; |  | ||||||
|     -) |  | ||||||
|         val=${CPU_FREQ_cpuinfo_min} |  | ||||||
|         ;; |  | ||||||
|     *%) |  | ||||||
|         val=$((${1%?} * CPU_FREQ_cpuinfo_max / 100)) |  | ||||||
|         ;; |  | ||||||
|     *[!0-9]*) |  | ||||||
|         log ERROR "Cannot set CPU freq to invalid value: %s" "$1" |  | ||||||
|         return 1 |  | ||||||
|         ;; |  | ||||||
|     "") |  | ||||||
|         log ERROR "Cannot set CPU freq to unspecified value" |  | ||||||
|         return 1 |  | ||||||
|         ;; |  | ||||||
|     *) |  | ||||||
|         log ERROR "Cannot set CPU freq to custom value; use +, -, or % instead" |  | ||||||
|         return 1 |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
|  |  | ||||||
|     printf "%s" "${val}" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Adjust CPU max scaling frequency. |  | ||||||
| # |  | ||||||
| set_cpu_freq_max() { |  | ||||||
|     local target_freq res=0 |  | ||||||
|     case "${CPU_SET_MAX_FREQ}" in |  | ||||||
|     +) |  | ||||||
|         target_freq=100 |  | ||||||
|         ;; |  | ||||||
|     -) |  | ||||||
|         target_freq=1 |  | ||||||
|         ;; |  | ||||||
|     *%) |  | ||||||
|         target_freq=${CPU_SET_MAX_FREQ%?} |  | ||||||
|         ;; |  | ||||||
|     *) |  | ||||||
|         log ERROR "Invalid CPU freq" |  | ||||||
|         return 1 |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
|  |  | ||||||
|     local pstate_info=$(printf "${CPU_PSTATE_SYSFS_PATTERN}" max_perf_pct) |  | ||||||
|     [ -e "${pstate_info}" ] && { |  | ||||||
|         log INFO "Setting intel_pstate max perf to %s" "${target_freq}%" |  | ||||||
|         if ! printf "%s" "${target_freq}" > "${pstate_info}"; |  | ||||||
| 	then |  | ||||||
|             log ERROR "Failed to set intel_pstate max perf" |  | ||||||
|             res=1 |  | ||||||
| 	fi |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     local cpu_index |  | ||||||
|     for cpu_index in $(get_online_cpus); do |  | ||||||
|         read_cpu_freq_info ${cpu_index} n ${CAP_CPU_FREQ_INFO} || { res=$?; continue; } |  | ||||||
|  |  | ||||||
|         target_freq=$(compute_cpu_freq_set "${CPU_SET_MAX_FREQ}") |  | ||||||
|         [ -z "${target_freq}" ] && { res=$?; continue; } |  | ||||||
|  |  | ||||||
|         log INFO "Setting CPU%s max scaling freq to %s Hz" ${cpu_index} "${target_freq}" |  | ||||||
|         [ -n "${DRY_RUN}" ] && continue |  | ||||||
|  |  | ||||||
|         if ! printf "%s" ${target_freq} > $(print_cpu_freq_sysfs_path scaling_max ${cpu_index}); |  | ||||||
| 	then |  | ||||||
|             res=1 |  | ||||||
|             log ERROR "Failed to set CPU%s max scaling frequency" ${cpu_index} |  | ||||||
| 	fi |  | ||||||
|     done |  | ||||||
|  |  | ||||||
|     return ${res} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Show help message. |  | ||||||
| # |  | ||||||
| print_usage() { |  | ||||||
|     cat <<EOF |  | ||||||
| Usage: ${0##*/} [OPTION]... |  | ||||||
|  |  | ||||||
| A script to manage Intel GPU frequencies. Can be used for debugging performance |  | ||||||
| problems or trying to obtain a stable frequency while benchmarking. |  | ||||||
|  |  | ||||||
| Note Intel GPUs only accept specific frequencies, usually multiples of 50 MHz. |  | ||||||
|  |  | ||||||
| Options: |  | ||||||
|   -g, --get [act|enf|cap|all] |  | ||||||
|                         Get frequency information: active (default), enforced, |  | ||||||
|                         hardware capabilities or all of them. |  | ||||||
|  |  | ||||||
|   -s, --set [{min|max}=]{FREQUENCY[%]|+|-} |  | ||||||
|                         Set min or max frequency to the given value (MHz). |  | ||||||
|                         Append '%' to interpret FREQUENCY as % of hw max. |  | ||||||
|                         Use '+' or '-' to set frequency to hardware max or min. |  | ||||||
|                         Omit min/max prefix to set both frequencies. |  | ||||||
|  |  | ||||||
|   -r, --reset           Reset frequencies to hardware defaults. |  | ||||||
|  |  | ||||||
|   -m, --monitor [act|enf|cap|all] |  | ||||||
|                         Monitor the indicated frequencies via 'watch' utility. |  | ||||||
|                         See '-g, --get' option for more details. |  | ||||||
|  |  | ||||||
|   -d|--detect-thrott [start|stop|status] |  | ||||||
|                         Start (default operation) the throttling detector |  | ||||||
|                         as a background process. Use 'stop' or 'status' to |  | ||||||
|                         terminate the detector process or verify its status. |  | ||||||
|  |  | ||||||
|   --cpu-set-max [FREQUENCY%|+|-} |  | ||||||
|                         Set CPU max scaling frequency as % of hw max. |  | ||||||
|                         Use '+' or '-' to set frequency to hardware max or min. |  | ||||||
|  |  | ||||||
|   -r, --reset           Reset frequencies to hardware defaults. |  | ||||||
|  |  | ||||||
|   --dry-run             See what the script will do without applying any |  | ||||||
|                         frequency changes. |  | ||||||
|  |  | ||||||
|   -h, --help            Display this help text and exit. |  | ||||||
| EOF |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Parse user input for '-g, --get' option. |  | ||||||
| # Returns 0 if a value has been provided, otherwise 1. |  | ||||||
| # |  | ||||||
| parse_option_get() { |  | ||||||
|     local ret=0 |  | ||||||
|  |  | ||||||
|     case "$1" in |  | ||||||
|     act) GET_ACT_FREQ=1;; |  | ||||||
|     enf) GET_ENF_FREQ=1;; |  | ||||||
|     cap) GET_CAP_FREQ=1;; |  | ||||||
|     all) GET_ACT_FREQ=1; GET_ENF_FREQ=1; GET_CAP_FREQ=1;; |  | ||||||
|     -*|"") |  | ||||||
|         # No value provided, using default. |  | ||||||
|         GET_ACT_FREQ=1 |  | ||||||
|         ret=1 |  | ||||||
|         ;; |  | ||||||
|     *) |  | ||||||
|         print_usage |  | ||||||
|         exit 1 |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
|  |  | ||||||
|     return ${ret} |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Validate user input for '-s, --set' option. |  | ||||||
| # arg1: input value to be validated |  | ||||||
| # arg2: optional flag indicating input is restricted to % |  | ||||||
| # |  | ||||||
| validate_option_set() { |  | ||||||
|     case "$1" in |  | ||||||
|     +|-|[0-9]%|[0-9][0-9]%) |  | ||||||
|         return 0 |  | ||||||
|         ;; |  | ||||||
|     *[!0-9]*|"") |  | ||||||
|         print_usage |  | ||||||
|         exit 1 |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
|  |  | ||||||
|     [ -z "$2" ] || { print_usage; exit 1; } |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Parse script arguments. |  | ||||||
| # |  | ||||||
| [ $# -eq 0 ] && { print_usage; exit 1; } |  | ||||||
|  |  | ||||||
| while [ $# -gt 0 ]; do |  | ||||||
|     case "$1" in |  | ||||||
|     -g|--get) |  | ||||||
|         parse_option_get "$2" && shift |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     -s|--set) |  | ||||||
|         shift |  | ||||||
|         case "$1" in |  | ||||||
|         min=*) |  | ||||||
|             SET_MIN_FREQ=${1#min=} |  | ||||||
|             validate_option_set "${SET_MIN_FREQ}" |  | ||||||
|             ;; |  | ||||||
|         max=*) |  | ||||||
|             SET_MAX_FREQ=${1#max=} |  | ||||||
|             validate_option_set "${SET_MAX_FREQ}" |  | ||||||
|             ;; |  | ||||||
|         *) |  | ||||||
|             SET_MIN_FREQ=$1 |  | ||||||
|             validate_option_set "${SET_MIN_FREQ}" |  | ||||||
|             SET_MAX_FREQ=${SET_MIN_FREQ} |  | ||||||
|             ;; |  | ||||||
|         esac |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     -r|--reset) |  | ||||||
|         RESET_FREQ=1 |  | ||||||
|         SET_MIN_FREQ="-" |  | ||||||
|         SET_MAX_FREQ="+" |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     -m|--monitor) |  | ||||||
|         MONITOR_FREQ=act |  | ||||||
|         parse_option_get "$2" && MONITOR_FREQ=$2 && shift |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     -d|--detect-thrott) |  | ||||||
|         DETECT_THROTT=start |  | ||||||
|         case "$2" in |  | ||||||
|         start|stop|status) |  | ||||||
|             DETECT_THROTT=$2 |  | ||||||
|             shift |  | ||||||
|             ;; |  | ||||||
|         esac |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     --cpu-set-max) |  | ||||||
|         shift |  | ||||||
|         CPU_SET_MAX_FREQ=$1 |  | ||||||
|         validate_option_set "${CPU_SET_MAX_FREQ}" restricted |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     --dry-run) |  | ||||||
|         DRY_RUN=1 |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     -h|--help) |  | ||||||
|         print_usage |  | ||||||
|         exit 0 |  | ||||||
|         ;; |  | ||||||
|  |  | ||||||
|     *) |  | ||||||
|         print_usage |  | ||||||
|         exit 1 |  | ||||||
|         ;; |  | ||||||
|     esac |  | ||||||
|  |  | ||||||
|     shift |  | ||||||
| done |  | ||||||
|  |  | ||||||
| # |  | ||||||
| # Main |  | ||||||
| # |  | ||||||
| RET=0 |  | ||||||
|  |  | ||||||
| identify_intel_gpu || { |  | ||||||
|     log INFO "No Intel GPU detected" |  | ||||||
|     exit 0 |  | ||||||
| } |  | ||||||
|  |  | ||||||
| [ -n "${SET_MIN_FREQ}${SET_MAX_FREQ}" ] && { set_freq || RET=$?; } |  | ||||||
| print_freq_info |  | ||||||
|  |  | ||||||
| [ -n "${DETECT_THROTT}" ] && detect_throttling ${DETECT_THROTT} |  | ||||||
|  |  | ||||||
| [ -n "${CPU_SET_MAX_FREQ}" ] && { set_cpu_freq_max || RET=$?; } |  | ||||||
|  |  | ||||||
| [ -n "${MONITOR_FREQ}" ] && { |  | ||||||
|     log INFO "Entering frequency monitoring mode" |  | ||||||
|     sleep 2 |  | ||||||
|     exec watch -d -n 1 "$0" -g "${MONITOR_FREQ}" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| exit ${RET} |  | ||||||
| @@ -1,24 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC1091  # the path is created in build-kdl and |  | ||||||
| # here is check if exist |  | ||||||
|  |  | ||||||
| terminate() { |  | ||||||
|   echo "ci-kdl.sh caught SIGTERM signal! propagating to child processes" |  | ||||||
|   for job in $(jobs -p) |  | ||||||
|   do |  | ||||||
|     kill -15 "$job" |  | ||||||
|   done |  | ||||||
| } |  | ||||||
|  |  | ||||||
| trap terminate SIGTERM |  | ||||||
|  |  | ||||||
| if [ -f /ci-kdl.venv/bin/activate ]; then |  | ||||||
|   source /ci-kdl.venv/bin/activate |  | ||||||
|   /ci-kdl.venv/bin/python /ci-kdl.venv/bin/ci-kdl | tee -a /results/kdl.log & |  | ||||||
|   child=$! |  | ||||||
|   wait $child |  | ||||||
|   mv kdl_*.json /results/kdl.json |  | ||||||
| else |  | ||||||
|   echo -e "Not possible to activate ci-kdl virtual environment" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| @@ -1,21 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| _XORG_SCRIPT="/xorg-script" |  | ||||||
| _FLAG_FILE="/xorg-started" |  | ||||||
|  |  | ||||||
| echo "touch ${_FLAG_FILE}; sleep 100000" > "${_XORG_SCRIPT}" |  | ||||||
| if [ "x$1" != "x" ]; then |  | ||||||
|     export LD_LIBRARY_PATH="${1}/lib" |  | ||||||
|     export LIBGL_DRIVERS_PATH="${1}/lib/dri" |  | ||||||
| fi |  | ||||||
| xinit /bin/sh "${_XORG_SCRIPT}" -- /usr/bin/Xorg vt45 -noreset -s 0 -dpms -logfile /Xorg.0.log & |  | ||||||
|  |  | ||||||
| # Wait for xorg to be ready for connections. |  | ||||||
| for _ in 1 2 3 4 5; do |  | ||||||
|     if [ -e "${_FLAG_FILE}" ]; then |  | ||||||
|         break |  | ||||||
|     fi |  | ||||||
|     sleep 5 |  | ||||||
| done |  | ||||||
| @@ -1,58 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC1091 |  | ||||||
|  |  | ||||||
| # When changing this file, you need to bump the following |  | ||||||
| # .gitlab-ci/image-tags.yml tags: |  | ||||||
| # ALPINE_X86_64_BUILD_TAG |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| EPHEMERAL=( |  | ||||||
| ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| DEPS=( |  | ||||||
|     bash |  | ||||||
|     bison |  | ||||||
|     ccache |  | ||||||
|     cmake |  | ||||||
|     clang-dev |  | ||||||
|     coreutils |  | ||||||
|     curl |  | ||||||
|     flex |  | ||||||
|     gcc |  | ||||||
|     g++ |  | ||||||
|     git |  | ||||||
|     gettext |  | ||||||
|     glslang |  | ||||||
|     linux-headers |  | ||||||
|     llvm16-dev |  | ||||||
|     meson |  | ||||||
|     expat-dev |  | ||||||
|     elfutils-dev |  | ||||||
|     libdrm-dev |  | ||||||
|     libselinux-dev |  | ||||||
|     libva-dev |  | ||||||
|     libpciaccess-dev |  | ||||||
|     zlib-dev |  | ||||||
|     python3-dev |  | ||||||
|     py3-mako |  | ||||||
|     py3-ply |  | ||||||
|     vulkan-headers |  | ||||||
|     spirv-tools-dev |  | ||||||
|     util-macros |  | ||||||
|     wayland-dev |  | ||||||
|     wayland-protocols |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| apk add "${DEPS[@]}" "${EPHEMERAL[@]}" |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_pre_build.sh |  | ||||||
|  |  | ||||||
|  |  | ||||||
| ############### Uninstall the build software |  | ||||||
|  |  | ||||||
| apk del "${EPHEMERAL[@]}" |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_post_build.sh |  | ||||||
| @@ -1,29 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
|  |  | ||||||
| # This is a ci-templates build script to generate a container for LAVA SSH client. |  | ||||||
|  |  | ||||||
| # shellcheck disable=SC1091 |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| EPHEMERAL=( |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| # We only need these very basic packages to run the tests. |  | ||||||
| DEPS=( |  | ||||||
|     openssh-client  # for ssh |  | ||||||
|     iputils         # for ping |  | ||||||
|     bash |  | ||||||
|     curl |  | ||||||
| ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| apk add "${DEPS[@]}" "${EPHEMERAL[@]}" |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_pre_build.sh |  | ||||||
|  |  | ||||||
| ############### Uninstall the build software |  | ||||||
|  |  | ||||||
| apk del "${EPHEMERAL[@]}" |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_post_build.sh |  | ||||||
| @@ -1,62 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| # Fetch the arm-built rootfs image and unpack it in our x86_64 container (saves |  | ||||||
| # network transfer, disk usage, and runtime on test jobs) |  | ||||||
|  |  | ||||||
| # shellcheck disable=SC2154 # arch is assigned in previous scripts |  | ||||||
| if curl -X HEAD -s "${ARTIFACTS_PREFIX}/${FDO_UPSTREAM_REPO}/${ARTIFACTS_SUFFIX}/${arch}/done"; then |  | ||||||
|   ARTIFACTS_URL="${ARTIFACTS_PREFIX}/${FDO_UPSTREAM_REPO}/${ARTIFACTS_SUFFIX}/${arch}" |  | ||||||
| else |  | ||||||
|   ARTIFACTS_URL="${ARTIFACTS_PREFIX}/${CI_PROJECT_PATH}/${ARTIFACTS_SUFFIX}/${arch}" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|     "${ARTIFACTS_URL}"/lava-rootfs.tar.zst -o rootfs.tar.zst |  | ||||||
| mkdir -p /rootfs-"$arch" |  | ||||||
| tar -C /rootfs-"$arch" '--exclude=./dev/*' --zstd -xf rootfs.tar.zst |  | ||||||
| rm rootfs.tar.zst |  | ||||||
|  |  | ||||||
| if [[ $arch == "arm64" ]]; then |  | ||||||
|     mkdir -p /baremetal-files |  | ||||||
|     pushd /baremetal-files |  | ||||||
|  |  | ||||||
|     curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
| 	-O "${KERNEL_IMAGE_BASE}"/arm64/Image |  | ||||||
|     curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|         -O "${KERNEL_IMAGE_BASE}"/arm64/Image.gz |  | ||||||
|     curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|         -O "${KERNEL_IMAGE_BASE}"/arm64/cheza-kernel |  | ||||||
|  |  | ||||||
|     DEVICE_TREES="" |  | ||||||
|     DEVICE_TREES="$DEVICE_TREES apq8016-sbc.dtb" |  | ||||||
|     DEVICE_TREES="$DEVICE_TREES apq8096-db820c.dtb" |  | ||||||
|     DEVICE_TREES="$DEVICE_TREES tegra210-p3450-0000.dtb" |  | ||||||
|     DEVICE_TREES="$DEVICE_TREES imx8mq-nitrogen.dtb" |  | ||||||
|  |  | ||||||
|     for DTB in $DEVICE_TREES; do |  | ||||||
| 	curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|             -O "${KERNEL_IMAGE_BASE}/arm64/$DTB" |  | ||||||
|     done |  | ||||||
|  |  | ||||||
|     popd |  | ||||||
| elif [[ $arch == "armhf" ]]; then |  | ||||||
|     mkdir -p /baremetal-files |  | ||||||
|     pushd /baremetal-files |  | ||||||
|  |  | ||||||
|     curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|         -O "${KERNEL_IMAGE_BASE}"/armhf/zImage |  | ||||||
|  |  | ||||||
|     DEVICE_TREES="" |  | ||||||
|     DEVICE_TREES="$DEVICE_TREES imx6q-cubox-i.dtb" |  | ||||||
|     DEVICE_TREES="$DEVICE_TREES tegra124-jetson-tk1.dtb" |  | ||||||
|  |  | ||||||
|     for DTB in $DEVICE_TREES; do |  | ||||||
| 	curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|             -O "${KERNEL_IMAGE_BASE}/armhf/$DTB" |  | ||||||
|     done |  | ||||||
|  |  | ||||||
|     popd |  | ||||||
| fi |  | ||||||
| @@ -1,58 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| ANGLE_REV="0518a3ff4d4e7e5b2ce8203358f719613a31c118" |  | ||||||
|  |  | ||||||
| # DEPOT tools |  | ||||||
| git clone --depth 1 https://chromium.googlesource.com/chromium/tools/depot_tools.git |  | ||||||
| PWD=$(pwd) |  | ||||||
| export PATH=$PWD/depot_tools:$PATH |  | ||||||
| export DEPOT_TOOLS_UPDATE=0 |  | ||||||
|  |  | ||||||
| mkdir /angle-build |  | ||||||
| pushd /angle-build |  | ||||||
| git init |  | ||||||
| git remote add origin https://chromium.googlesource.com/angle/angle.git |  | ||||||
| git fetch --depth 1 origin "$ANGLE_REV" |  | ||||||
| git checkout FETCH_HEAD |  | ||||||
|  |  | ||||||
| # source preparation |  | ||||||
| python3 scripts/bootstrap.py |  | ||||||
| mkdir -p build/config |  | ||||||
| gclient sync |  | ||||||
|  |  | ||||||
| sed -i "/catapult/d" testing/BUILD.gn |  | ||||||
|  |  | ||||||
| mkdir -p out/Release |  | ||||||
| echo ' |  | ||||||
| is_debug = false |  | ||||||
| angle_enable_swiftshader = false |  | ||||||
| angle_enable_null = false |  | ||||||
| angle_enable_gl = false |  | ||||||
| angle_enable_vulkan = true |  | ||||||
| angle_has_histograms = false |  | ||||||
| build_angle_trace_perf_tests = false |  | ||||||
| build_angle_deqp_tests = false |  | ||||||
| angle_use_custom_libvulkan = false |  | ||||||
| dcheck_always_on=true |  | ||||||
| ' > out/Release/args.gn |  | ||||||
|  |  | ||||||
| if [[ "$DEBIAN_ARCH" = "arm64" ]]; then |  | ||||||
|   build/linux/sysroot_scripts/install-sysroot.py --arch=arm64 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| gn gen out/Release |  | ||||||
| # depot_tools overrides ninja with a version that doesn't work.  We want |  | ||||||
| # ninja with FDO_CI_CONCURRENT anyway. |  | ||||||
| /usr/local/bin/ninja -C out/Release/ |  | ||||||
|  |  | ||||||
| mkdir /angle |  | ||||||
| cp out/Release/lib*GL*.so /angle/ |  | ||||||
| ln -s libEGL.so /angle/libEGL.so.1 |  | ||||||
| ln -s libGLESv2.so /angle/libGLESv2.so.2 |  | ||||||
|  |  | ||||||
| rm -rf out |  | ||||||
|  |  | ||||||
| popd |  | ||||||
| rm -rf ./depot_tools |  | ||||||
| @@ -1,25 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| # When changing this file, you need to bump the following |  | ||||||
| # .gitlab-ci/image-tags.yml tags: |  | ||||||
| # DEBIAN_X86_64_TEST_GL_TAG |  | ||||||
| # DEBIAN_X86_64_TEST_VK_TAG |  | ||||||
| # KERNEL_ROOTFS_TAG |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| APITRACE_VERSION="0a6506433e1f9f7b69757b4e5730326970c4321a" |  | ||||||
|  |  | ||||||
| git clone https://github.com/apitrace/apitrace.git --single-branch --no-checkout /apitrace |  | ||||||
| pushd /apitrace |  | ||||||
| git checkout "$APITRACE_VERSION" |  | ||||||
| git submodule update --init --depth 1 --recursive |  | ||||||
| cmake -S . -B _build -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_GUI=False -DENABLE_WAFFLE=on $EXTRA_CMAKE_ARGS |  | ||||||
| cmake --build _build --parallel --target apitrace eglretrace |  | ||||||
| mkdir build |  | ||||||
| cp _build/apitrace build |  | ||||||
| cp _build/eglretrace build |  | ||||||
| ${STRIP_CMD:-strip} build/* |  | ||||||
| find . -not -path './build' -not -path './build/*' -delete |  | ||||||
| popd |  | ||||||
| @@ -1,44 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| git config --global user.email "mesa@example.com" |  | ||||||
| git config --global user.name "Mesa CI" |  | ||||||
|  |  | ||||||
| CROSVM_VERSION=e3815e62d675ef436956a992e0ed58b7309c759d |  | ||||||
| git clone --single-branch -b main --no-checkout https://chromium.googlesource.com/crosvm/crosvm /platform/crosvm |  | ||||||
| pushd /platform/crosvm |  | ||||||
| git checkout "$CROSVM_VERSION" |  | ||||||
| git submodule update --init |  | ||||||
|  |  | ||||||
| VIRGLRENDERER_VERSION=10120c0d9ebdc00eae1b5c9f7c98fc0d198ba602 |  | ||||||
| rm -rf third_party/virglrenderer |  | ||||||
| git clone --single-branch -b main --no-checkout https://gitlab.freedesktop.org/virgl/virglrenderer.git third_party/virglrenderer |  | ||||||
| pushd third_party/virglrenderer |  | ||||||
| git checkout "$VIRGLRENDERER_VERSION" |  | ||||||
| meson setup build/ -D libdir=lib -D render-server-worker=process -D venus=true $EXTRA_MESON_ARGS |  | ||||||
| meson install -C build |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| cargo update -p pkg-config@0.3.26 --precise 0.3.27 |  | ||||||
|  |  | ||||||
| RUSTFLAGS='-L native=/usr/local/lib' cargo install \ |  | ||||||
|   bindgen-cli \ |  | ||||||
|   --locked \ |  | ||||||
|   -j ${FDO_CI_CONCURRENT:-4} \ |  | ||||||
|   --root /usr/local \ |  | ||||||
|   --version 0.65.1 \ |  | ||||||
|   $EXTRA_CARGO_ARGS |  | ||||||
|  |  | ||||||
| CROSVM_USE_SYSTEM_VIRGLRENDERER=1 RUSTFLAGS='-L native=/usr/local/lib' cargo install \ |  | ||||||
|   -j ${FDO_CI_CONCURRENT:-4} \ |  | ||||||
|   --locked \ |  | ||||||
|   --features 'default-no-sandbox gpu x virgl_renderer virgl_renderer_next' \ |  | ||||||
|   --path . \ |  | ||||||
|   --root /usr/local \ |  | ||||||
|   $EXTRA_CARGO_ARGS |  | ||||||
|  |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| rm -rf /platform/crosvm |  | ||||||
| @@ -1,56 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| if [ -n "${DEQP_RUNNER_GIT_TAG}${DEQP_RUNNER_GIT_REV}" ]; then |  | ||||||
|     # Build and install from source |  | ||||||
|     DEQP_RUNNER_CARGO_ARGS="--git ${DEQP_RUNNER_GIT_URL:-https://gitlab.freedesktop.org/anholt/deqp-runner.git}" |  | ||||||
|  |  | ||||||
|     if [ -n "${DEQP_RUNNER_GIT_TAG}" ]; then |  | ||||||
|         DEQP_RUNNER_CARGO_ARGS="--tag ${DEQP_RUNNER_GIT_TAG} ${DEQP_RUNNER_CARGO_ARGS}" |  | ||||||
|     else |  | ||||||
|         DEQP_RUNNER_CARGO_ARGS="--rev ${DEQP_RUNNER_GIT_REV} ${DEQP_RUNNER_CARGO_ARGS}" |  | ||||||
|     fi |  | ||||||
|  |  | ||||||
|     DEQP_RUNNER_CARGO_ARGS="${DEQP_RUNNER_CARGO_ARGS} ${EXTRA_CARGO_ARGS}" |  | ||||||
| else |  | ||||||
|     # Install from package registry |  | ||||||
|     DEQP_RUNNER_CARGO_ARGS="--version 0.16.0 ${EXTRA_CARGO_ARGS} -- deqp-runner" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [ -z "$ANDROID_NDK_HOME" ]; then |  | ||||||
|     cargo install --locked  \ |  | ||||||
|         -j ${FDO_CI_CONCURRENT:-4} \ |  | ||||||
|         --root /usr/local \ |  | ||||||
|         ${DEQP_RUNNER_CARGO_ARGS} |  | ||||||
| else |  | ||||||
|     mkdir -p /deqp-runner |  | ||||||
|     pushd /deqp-runner |  | ||||||
|     git clone --branch v0.16.1 --depth 1 https://gitlab.freedesktop.org/anholt/deqp-runner.git deqp-runner-git |  | ||||||
|     pushd deqp-runner-git |  | ||||||
|  |  | ||||||
|     cargo install --locked  \ |  | ||||||
|         -j ${FDO_CI_CONCURRENT:-4} \ |  | ||||||
|         --root /usr/local --version 2.10.0 \ |  | ||||||
|         cargo-ndk |  | ||||||
|  |  | ||||||
|     rustup target add x86_64-linux-android |  | ||||||
|     RUSTFLAGS='-C target-feature=+crt-static' cargo ndk --target x86_64-linux-android build |  | ||||||
|  |  | ||||||
|     mv target/x86_64-linux-android/debug/deqp-runner /deqp-runner |  | ||||||
|  |  | ||||||
|     cargo uninstall --locked  \ |  | ||||||
|         --root /usr/local \ |  | ||||||
|         cargo-ndk |  | ||||||
|  |  | ||||||
|     popd |  | ||||||
|     rm -rf deqp-runner-git |  | ||||||
|     popd |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # remove unused test runners to shrink images for the Mesa CI build (not kernel, |  | ||||||
| # which chooses its own deqp branch) |  | ||||||
| if [ -z "${DEQP_RUNNER_GIT_TAG}${DEQP_RUNNER_GIT_REV}" ]; then |  | ||||||
|     rm -f /usr/local/bin/igt-runner |  | ||||||
| fi |  | ||||||
| @@ -1,142 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| # When changing this file, you need to bump the following |  | ||||||
| # .gitlab-ci/image-tags.yml tags: |  | ||||||
| # DEBIAN_X86_64_TEST_ANDROID_TAG |  | ||||||
| # DEBIAN_X86_64_TEST_GL_TAG |  | ||||||
| # DEBIAN_X86_64_TEST_VK_TAG |  | ||||||
| # KERNEL_ROOTFS_TAG |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| git config --global user.email "mesa@example.com" |  | ||||||
| git config --global user.name "Mesa CI" |  | ||||||
| git clone \ |  | ||||||
|     https://github.com/KhronosGroup/VK-GL-CTS.git \ |  | ||||||
|     -b vulkan-cts-1.3.7.0 \ |  | ||||||
|     --depth 1 \ |  | ||||||
|     /VK-GL-CTS |  | ||||||
| pushd /VK-GL-CTS |  | ||||||
|  |  | ||||||
| # Patches to VulkanCTS may come from commits in their repo (listed in |  | ||||||
| # cts_commits_to_backport) or patch files stored in our repo (in the patch |  | ||||||
| # directory `$OLDPWD/.gitlab-ci/container/patches/` listed in cts_patch_files). |  | ||||||
| # Both list variables would have comments explaining the reasons behind the |  | ||||||
| # patches. |  | ||||||
|  |  | ||||||
| cts_commits_to_backport=( |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| for commit in "${cts_commits_to_backport[@]}" |  | ||||||
| do |  | ||||||
|   PATCH_URL="https://github.com/KhronosGroup/VK-GL-CTS/commit/$commit.patch" |  | ||||||
|   echo "Apply patch to VK-GL-CTS from $PATCH_URL" |  | ||||||
|   curl -L --retry 4 -f --retry-all-errors --retry-delay 60 $PATCH_URL | \ |  | ||||||
|     git am - |  | ||||||
| done |  | ||||||
|  |  | ||||||
| cts_patch_files=( |  | ||||||
|   # Android specific patches. |  | ||||||
|   build-deqp_Allow-running-on-Android-from-the-command-line.patch |  | ||||||
|   build-deqp_Android-prints-to-stdout-instead-of-logcat.patch |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| for patch in "${cts_patch_files[@]}" |  | ||||||
| do |  | ||||||
|   echo "Apply patch to VK-GL-CTS from $patch" |  | ||||||
|   git am < $OLDPWD/.gitlab-ci/container/patches/$patch |  | ||||||
| done |  | ||||||
|  |  | ||||||
| # --insecure is due to SSL cert failures hitting sourceforge for zlib and |  | ||||||
| # libpng (sigh).  The archives get their checksums checked anyway, and git |  | ||||||
| # always goes through ssh or https. |  | ||||||
| python3 external/fetch_sources.py --insecure |  | ||||||
|  |  | ||||||
| mkdir -p /deqp |  | ||||||
|  |  | ||||||
| # Save the testlog stylesheets: |  | ||||||
| cp doc/testlog-stylesheet/testlog.{css,xsl} /deqp |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| pushd /deqp |  | ||||||
|  |  | ||||||
| if [ "${DEQP_TARGET}" != 'android' ]; then |  | ||||||
|     # When including EGL/X11 testing, do that build first and save off its |  | ||||||
|     # deqp-egl binary. |  | ||||||
|     cmake -S /VK-GL-CTS -B . -G Ninja \ |  | ||||||
|         -DDEQP_TARGET=x11_egl_glx \ |  | ||||||
|         -DCMAKE_BUILD_TYPE=Release \ |  | ||||||
|         $EXTRA_CMAKE_ARGS |  | ||||||
|     ninja modules/egl/deqp-egl |  | ||||||
|     mv /deqp/modules/egl/deqp-egl /deqp/modules/egl/deqp-egl-x11 |  | ||||||
|  |  | ||||||
|     cmake -S /VK-GL-CTS -B . -G Ninja \ |  | ||||||
|         -DDEQP_TARGET=wayland \ |  | ||||||
|         -DCMAKE_BUILD_TYPE=Release \ |  | ||||||
|         $EXTRA_CMAKE_ARGS |  | ||||||
|     ninja modules/egl/deqp-egl |  | ||||||
|     mv /deqp/modules/egl/deqp-egl /deqp/modules/egl/deqp-egl-wayland |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| cmake -S /VK-GL-CTS -B . -G Ninja \ |  | ||||||
|       -DDEQP_TARGET=${DEQP_TARGET:-x11_glx} \ |  | ||||||
|       -DCMAKE_BUILD_TYPE=Release \ |  | ||||||
|       $EXTRA_CMAKE_ARGS |  | ||||||
| mold --run ninja |  | ||||||
|  |  | ||||||
| if [ "${DEQP_TARGET}" = 'android' ]; then |  | ||||||
|     mv /deqp/modules/egl/deqp-egl /deqp/modules/egl/deqp-egl-android |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| git -C /VK-GL-CTS describe --long > /deqp/version |  | ||||||
|  |  | ||||||
| # Copy out the mustpass lists we want. |  | ||||||
| mkdir /deqp/mustpass |  | ||||||
| for mustpass in $(< /VK-GL-CTS/external/vulkancts/mustpass/main/vk-default.txt) ; do |  | ||||||
|     cat /VK-GL-CTS/external/vulkancts/mustpass/main/$mustpass \ |  | ||||||
|         >> /deqp/mustpass/vk-master.txt |  | ||||||
| done |  | ||||||
|  |  | ||||||
| if [ "${DEQP_TARGET}" != 'android' ]; then |  | ||||||
|     cp \ |  | ||||||
|         /deqp/external/openglcts/modules/gl_cts/data/mustpass/gles/aosp_mustpass/3.2.6.x/*.txt \ |  | ||||||
|         /deqp/mustpass/. |  | ||||||
|     cp \ |  | ||||||
|         /deqp/external/openglcts/modules/gl_cts/data/mustpass/egl/aosp_mustpass/3.2.6.x/egl-master.txt \ |  | ||||||
|         /deqp/mustpass/. |  | ||||||
|     cp \ |  | ||||||
|         /deqp/external/openglcts/modules/gl_cts/data/mustpass/gles/khronos_mustpass/3.2.6.x/*-master.txt \ |  | ||||||
|         /deqp/mustpass/. |  | ||||||
|     cp \ |  | ||||||
|         /deqp/external/openglcts/modules/gl_cts/data/mustpass/gl/khronos_mustpass/4.6.1.x/*-master.txt \ |  | ||||||
|         /deqp/mustpass/. |  | ||||||
|     cp \ |  | ||||||
|         /deqp/external/openglcts/modules/gl_cts/data/mustpass/gl/khronos_mustpass_single/4.6.1.x/*-single.txt \ |  | ||||||
|         /deqp/mustpass/. |  | ||||||
|  |  | ||||||
|     # Save *some* executor utils, but otherwise strip things down |  | ||||||
|     # to reduct deqp build size: |  | ||||||
|     mkdir /deqp/executor.save |  | ||||||
|     cp /deqp/executor/testlog-to-* /deqp/executor.save |  | ||||||
|     rm -rf /deqp/executor |  | ||||||
|     mv /deqp/executor.save /deqp/executor |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Remove other mustpass files, since we saved off the ones we wanted to conventient locations above. |  | ||||||
| rm -rf /deqp/external/openglcts/modules/gl_cts/data/mustpass |  | ||||||
| rm -rf /deqp/external/vulkancts/modules/vulkan/vk-master* |  | ||||||
| rm -rf /deqp/external/vulkancts/modules/vulkan/vk-default |  | ||||||
|  |  | ||||||
| rm -rf /deqp/external/openglcts/modules/cts-runner |  | ||||||
| rm -rf /deqp/modules/internal |  | ||||||
| rm -rf /deqp/execserver |  | ||||||
| rm -rf /deqp/framework |  | ||||||
| # shellcheck disable=SC2038,SC2185 # TODO: rewrite find |  | ||||||
| find -iname '*cmake*' -o -name '*ninja*' -o -name '*.o' -o -name '*.a' | xargs rm -rf |  | ||||||
| ${STRIP_CMD:-strip} external/vulkancts/modules/vulkan/deqp-vk |  | ||||||
| ${STRIP_CMD:-strip} external/openglcts/modules/glcts |  | ||||||
| ${STRIP_CMD:-strip} modules/*/deqp-* |  | ||||||
| du -sh ./* |  | ||||||
| rm -rf /VK-GL-CTS |  | ||||||
| popd |  | ||||||
| @@ -1,19 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| # When changing this file, you need to bump the following |  | ||||||
| # .gitlab-ci/image-tags.yml tags: |  | ||||||
| # DEBIAN_X86_64_TEST_VK_TAG |  | ||||||
| # KERNEL_ROOTFS_TAG |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| git clone https://github.com/ValveSoftware/Fossilize.git |  | ||||||
| cd Fossilize |  | ||||||
| git checkout b43ee42bbd5631ea21fe9a2dee4190d5d875c327 |  | ||||||
| git submodule update --init |  | ||||||
| mkdir build |  | ||||||
| cd build |  | ||||||
| cmake -S .. -B . -G Ninja -DCMAKE_BUILD_TYPE=Release |  | ||||||
| ninja -C . install |  | ||||||
| cd ../.. |  | ||||||
| rm -rf Fossilize |  | ||||||
| @@ -1,19 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| GFXRECONSTRUCT_VERSION=761837794a1e57f918a85af7000b12e531b178ae |  | ||||||
|  |  | ||||||
| git clone https://github.com/LunarG/gfxreconstruct.git \ |  | ||||||
|     --single-branch \ |  | ||||||
|     -b master \ |  | ||||||
|     --no-checkout \ |  | ||||||
|     /gfxreconstruct |  | ||||||
| pushd /gfxreconstruct |  | ||||||
| git checkout "$GFXRECONSTRUCT_VERSION" |  | ||||||
| git submodule update --init |  | ||||||
| git submodule update |  | ||||||
| cmake -S . -B _build -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX:PATH=/gfxreconstruct/build -DBUILD_WERROR=OFF |  | ||||||
| cmake --build _build --parallel --target tools/{replay,info}/install/strip |  | ||||||
| find . -not -path './build' -not -path './build/*' -delete |  | ||||||
| popd |  | ||||||
| @@ -1,16 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| PARALLEL_DEQP_RUNNER_VERSION=fe557794b5dadd8dbf0eae403296625e03bda18a |  | ||||||
|  |  | ||||||
| git clone https://gitlab.freedesktop.org/mesa/parallel-deqp-runner --single-branch -b master --no-checkout /parallel-deqp-runner |  | ||||||
| pushd /parallel-deqp-runner |  | ||||||
| git checkout "$PARALLEL_DEQP_RUNNER_VERSION" |  | ||||||
| meson . _build |  | ||||||
| ninja -C _build hang-detection |  | ||||||
| mkdir -p build/bin |  | ||||||
| install _build/hang-detection build/bin |  | ||||||
| strip build/bin/* |  | ||||||
| find . -not -path './build' -not -path './build/*' -delete |  | ||||||
| popd |  | ||||||
| @@ -1,23 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC1091  # the path is created by the script |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| KDL_REVISION="5056f71b100a68b72b285c6fc845a66a2ed25985" |  | ||||||
|  |  | ||||||
| git clone \ |  | ||||||
|     https://gitlab.freedesktop.org/gfx-ci/ci-kdl.git \ |  | ||||||
|     --depth 1 \ |  | ||||||
|     ci-kdl.git |  | ||||||
| pushd ci-kdl.git |  | ||||||
| git checkout ${KDL_REVISION} |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| python3 -m venv ci-kdl.venv |  | ||||||
| source ci-kdl.venv/bin/activate |  | ||||||
| pushd ci-kdl.git |  | ||||||
| pip install -r requirements.txt |  | ||||||
| pip install . |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| rm -rf ci-kdl.git |  | ||||||
| @@ -1,31 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
| # shellcheck disable=SC2153 |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| mkdir -p kernel |  | ||||||
| pushd kernel |  | ||||||
|  |  | ||||||
| if [[ ${DEBIAN_ARCH} = "arm64" ]]; then |  | ||||||
|     KERNEL_IMAGE_NAME+=" cheza-kernel" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| for image in ${KERNEL_IMAGE_NAME}; do |  | ||||||
|     curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|       -o "/lava-files/${image}" "${KERNEL_IMAGE_BASE}/${DEBIAN_ARCH}/${image}" |  | ||||||
| done |  | ||||||
|  |  | ||||||
| for dtb in ${DEVICE_TREES}; do |  | ||||||
|     curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|       -o "/lava-files/${dtb}" "${KERNEL_IMAGE_BASE}/${DEBIAN_ARCH}/${dtb}" |  | ||||||
|   done |  | ||||||
|  |  | ||||||
| mkdir -p "/lava-files/rootfs-${DEBIAN_ARCH}" |  | ||||||
| curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|   -O "${KERNEL_IMAGE_BASE}/${DEBIAN_ARCH}/modules.tar.zst" |  | ||||||
| tar --keep-directory-symlink --zstd -xf modules.tar.zst -C "/lava-files/rootfs-${DEBIAN_ARCH}/" |  | ||||||
|  |  | ||||||
| popd |  | ||||||
| rm -rf kernel |  | ||||||
|  |  | ||||||
| @@ -1,31 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| export LLVM_CONFIG="llvm-config-${LLVM_VERSION:?"llvm unset!"}" |  | ||||||
| LLVM_TAG="llvmorg-15.0.7" |  | ||||||
|  |  | ||||||
| $LLVM_CONFIG --version |  | ||||||
|  |  | ||||||
| git config --global user.email "mesa@example.com" |  | ||||||
| git config --global user.name "Mesa CI" |  | ||||||
| git clone \ |  | ||||||
|     https://github.com/llvm/llvm-project \ |  | ||||||
|     --depth 1 \ |  | ||||||
|     -b "${LLVM_TAG}" \ |  | ||||||
|     /llvm-project |  | ||||||
|  |  | ||||||
| mkdir /libclc |  | ||||||
| pushd /libclc |  | ||||||
| cmake -S /llvm-project/libclc -B . -G Ninja -DLLVM_CONFIG="$LLVM_CONFIG" -DLIBCLC_TARGETS_TO_BUILD="spirv-mesa3d-;spirv64-mesa3d-" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr -DLLVM_SPIRV=/usr/bin/llvm-spirv |  | ||||||
| ninja |  | ||||||
| ninja install |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| # workaroud cmake vs debian packaging. |  | ||||||
| mkdir -p /usr/lib/clc |  | ||||||
| ln -s /usr/share/clc/spirv64-mesa3d-.spv /usr/lib/clc/ |  | ||||||
| ln -s /usr/share/clc/spirv-mesa3d-.spv /usr/lib/clc/ |  | ||||||
|  |  | ||||||
| du -sh ./* |  | ||||||
| rm -rf /libclc /llvm-project |  | ||||||
| @@ -1,15 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| export LIBDRM_VERSION=libdrm-2.4.114 |  | ||||||
|  |  | ||||||
| curl -L -O --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|     https://dri.freedesktop.org/libdrm/"$LIBDRM_VERSION".tar.xz |  | ||||||
| tar -xvf "$LIBDRM_VERSION".tar.xz && rm "$LIBDRM_VERSION".tar.xz |  | ||||||
| cd "$LIBDRM_VERSION" |  | ||||||
| meson setup build -D vc4=disabled -D freedreno=disabled -D etnaviv=disabled $EXTRA_MESON_ARGS |  | ||||||
| meson install -C build |  | ||||||
| cd .. |  | ||||||
| rm -rf "$LIBDRM_VERSION" |  | ||||||
| @@ -1,22 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| VER="${LLVM_VERSION:?llvm not set}.0.0" |  | ||||||
|  |  | ||||||
| curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|     -O "https://github.com/KhronosGroup/SPIRV-LLVM-Translator/archive/refs/tags/v${VER}.tar.gz" |  | ||||||
| tar -xvf "v${VER}.tar.gz" && rm "v${VER}.tar.gz" |  | ||||||
|  |  | ||||||
| mkdir "SPIRV-LLVM-Translator-${VER}/build" |  | ||||||
| pushd "SPIRV-LLVM-Translator-${VER}/build" |  | ||||||
| cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr |  | ||||||
| ninja |  | ||||||
| ninja install |  | ||||||
| # For some reason llvm-spirv is not installed by default |  | ||||||
| ninja llvm-spirv |  | ||||||
| cp tools/llvm-spirv/llvm-spirv /usr/bin/ |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| du -sh "SPIRV-LLVM-Translator-${VER}" |  | ||||||
| rm -rf "SPIRV-LLVM-Translator-${VER}" |  | ||||||
| @@ -1,15 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| MOLD_VERSION="1.11.0" |  | ||||||
|  |  | ||||||
| git clone -b v"$MOLD_VERSION" --single-branch --depth 1 https://github.com/rui314/mold.git |  | ||||||
| pushd mold |  | ||||||
|  |  | ||||||
| cmake -DCMAKE_BUILD_TYPE=Release -D BUILD_TESTING=OFF -D MOLD_LTO=ON |  | ||||||
| cmake --build . --parallel |  | ||||||
| cmake --install . |  | ||||||
|  |  | ||||||
| popd |  | ||||||
| rm -rf mold |  | ||||||
| @@ -1,33 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| # When changing this file, you need to bump the following |  | ||||||
| # .gitlab-ci/image-tags.yml tags: |  | ||||||
| # DEBIAN_X86_64_TEST_GL_TAG |  | ||||||
| # DEBIAN_X86_64_TEST_VK_TAG |  | ||||||
| # KERNEL_ROOTFS_TAG |  | ||||||
|  |  | ||||||
| REV="f7db20b03de6896d013826c0a731bc4417c1a5a0" |  | ||||||
|  |  | ||||||
| git clone https://gitlab.freedesktop.org/mesa/piglit.git --single-branch --no-checkout /piglit |  | ||||||
| pushd /piglit |  | ||||||
| git checkout "$REV" |  | ||||||
| patch -p1 <$OLDPWD/.gitlab-ci/piglit/disable-vs_in.diff |  | ||||||
| cmake -S . -B . -G Ninja -DCMAKE_BUILD_TYPE=Release $PIGLIT_OPTS $EXTRA_CMAKE_ARGS |  | ||||||
| ninja $PIGLIT_BUILD_TARGETS |  | ||||||
| # shellcheck disable=SC2038,SC2185 # TODO: rewrite find |  | ||||||
| find -name .git -o -name '*ninja*' -o -iname '*cmake*' -o -name '*.[chao]' | xargs rm -rf |  | ||||||
| rm -rf target_api |  | ||||||
| if [ "$PIGLIT_BUILD_TARGETS" = "piglit_replayer" ]; then |  | ||||||
|     # shellcheck disable=SC2038,SC2185 # TODO: rewrite find |  | ||||||
|     find ! -regex "^\.$" \ |  | ||||||
|          ! -regex "^\.\/piglit.*" \ |  | ||||||
|          ! -regex "^\.\/framework.*" \ |  | ||||||
|          ! -regex "^\.\/bin$" \ |  | ||||||
|          ! -regex "^\.\/bin\/replayer\.py" \ |  | ||||||
|          ! -regex "^\.\/templates.*" \ |  | ||||||
|          ! -regex "^\.\/tests$" \ |  | ||||||
|          ! -regex "^\.\/tests\/replay\.py" 2>/dev/null | xargs rm -rf |  | ||||||
| fi |  | ||||||
| popd |  | ||||||
| @@ -1,39 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| # Note that this script is not actually "building" rust, but build- is the |  | ||||||
| # convention for the shared helpers for putting stuff in our containers. |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| # cargo (and rustup) wants to store stuff in $HOME/.cargo, and binaries in |  | ||||||
| # $HOME/.cargo/bin.  Make bin a link to a public bin directory so the commands |  | ||||||
| # are just available to all build jobs. |  | ||||||
| mkdir -p "$HOME"/.cargo |  | ||||||
| ln -s /usr/local/bin "$HOME"/.cargo/bin |  | ||||||
|  |  | ||||||
| # Rusticl requires at least Rust 1.66.0 |  | ||||||
| # |  | ||||||
| # Also, pick a specific snapshot from rustup so the compiler doesn't drift on |  | ||||||
| # us. |  | ||||||
| RUST_VERSION=1.66.1-2023-01-10 |  | ||||||
|  |  | ||||||
| # For rust in Mesa, we use rustup to install.  This lets us pick an arbitrary |  | ||||||
| # version of the compiler, rather than whatever the container's Debian comes |  | ||||||
| # with. |  | ||||||
| curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|     --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- \ |  | ||||||
|       --default-toolchain $RUST_VERSION \ |  | ||||||
|       --profile minimal \ |  | ||||||
|       -y |  | ||||||
|  |  | ||||||
| rustup component add clippy rustfmt |  | ||||||
|  |  | ||||||
| # Set up a config script for cross compiling -- cargo needs your system cc for |  | ||||||
| # linking in cross builds, but doesn't know what you want to use for system cc. |  | ||||||
| cat > /root/.cargo/config <<EOF |  | ||||||
| [target.armv7-unknown-linux-gnueabihf] |  | ||||||
| linker = "arm-linux-gnueabihf-gcc" |  | ||||||
|  |  | ||||||
| [target.aarch64-unknown-linux-gnu] |  | ||||||
| linker = "aarch64-linux-gnu-gcc" |  | ||||||
| EOF |  | ||||||
| @@ -1,14 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
|  |  | ||||||
| # When changing this file, you need to bump the following |  | ||||||
| # .gitlab-ci/image-tags.yml tags: |  | ||||||
| # DEBIAN_BUILD_TAG |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| pushd /usr/local |  | ||||||
| git clone https://gitlab.freedesktop.org/mesa/shader-db.git --depth 1 |  | ||||||
| rm -rf shader-db/.git |  | ||||||
| cd shader-db |  | ||||||
| make |  | ||||||
| popd |  | ||||||
| @@ -1,89 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # SPDX-License-Identifier: MIT |  | ||||||
| # |  | ||||||
| # Copyright © 2022 Collabora Limited |  | ||||||
| # Author: Guilherme Gallo <guilherme.gallo@collabora.com> |  | ||||||
| # |  | ||||||
| # When changing this file, you need to bump the following |  | ||||||
| # .gitlab-ci/image-tags.yml tags: |  | ||||||
| # KERNEL_ROOTFS_TAG |  | ||||||
|  |  | ||||||
| SKQP_BRANCH=android-cts-12.1_r5 |  | ||||||
|  |  | ||||||
| # hack for skqp see the clang |  | ||||||
| pushd /usr/bin/ |  | ||||||
| ln -s ../lib/llvm-15/bin/clang clang |  | ||||||
| ln -s ../lib/llvm-15/bin/clang++ clang++ |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| create_gn_args() { |  | ||||||
|     # gn can be configured to cross-compile skia and its tools |  | ||||||
|     # It is important to set the target_cpu to guarantee the intended target |  | ||||||
|     # machine |  | ||||||
|     cp "${BASE_ARGS_GN_FILE}" "${SKQP_OUT_DIR}"/args.gn |  | ||||||
|     echo "target_cpu = \"${SKQP_ARCH}\"" >> "${SKQP_OUT_DIR}"/args.gn |  | ||||||
| } |  | ||||||
|  |  | ||||||
|  |  | ||||||
| download_skia_source() { |  | ||||||
|     if [ -z ${SKIA_DIR+x} ] |  | ||||||
|     then |  | ||||||
|         return 1 |  | ||||||
|     fi |  | ||||||
|  |  | ||||||
|     # Skia cloned from https://android.googlesource.com/platform/external/skqp |  | ||||||
|     # has all needed assets tracked on git-fs |  | ||||||
|     SKQP_REPO=https://android.googlesource.com/platform/external/skqp |  | ||||||
|  |  | ||||||
|     git clone --branch "${SKQP_BRANCH}" --depth 1 "${SKQP_REPO}" "${SKIA_DIR}" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| SCRIPT_DIR=$(realpath "$(dirname "$0")") |  | ||||||
| SKQP_PATCH_DIR="${SCRIPT_DIR}/patches" |  | ||||||
| BASE_ARGS_GN_FILE="${SCRIPT_DIR}/build-skqp_base.gn" |  | ||||||
|  |  | ||||||
| SKQP_ARCH=${SKQP_ARCH:-x64} |  | ||||||
| SKIA_DIR=${SKIA_DIR:-$(mktemp -d)} |  | ||||||
| SKQP_OUT_DIR=${SKIA_DIR}/out/${SKQP_ARCH} |  | ||||||
| SKQP_INSTALL_DIR=${SKQP_INSTALL_DIR:-/skqp} |  | ||||||
| SKQP_ASSETS_DIR="${SKQP_INSTALL_DIR}/assets" |  | ||||||
| SKQP_BINARIES=(skqp list_gpu_unit_tests list_gms) |  | ||||||
|  |  | ||||||
| download_skia_source |  | ||||||
|  |  | ||||||
| pushd "${SKIA_DIR}" |  | ||||||
|  |  | ||||||
| # Apply all skqp patches for Mesa CI |  | ||||||
| cat "${SKQP_PATCH_DIR}"/build-skqp_*.patch | |  | ||||||
|     patch -p1 |  | ||||||
|  |  | ||||||
| # Fetch some needed build tools needed to build skia/skqp. |  | ||||||
| # Basically, it clones repositories with commits SHAs from ${SKIA_DIR}/DEPS |  | ||||||
| # directory. |  | ||||||
| python tools/git-sync-deps |  | ||||||
|  |  | ||||||
| mkdir -p "${SKQP_OUT_DIR}" |  | ||||||
| mkdir -p "${SKQP_INSTALL_DIR}" |  | ||||||
|  |  | ||||||
| create_gn_args |  | ||||||
|  |  | ||||||
| # Build and install skqp binaries |  | ||||||
| bin/gn gen "${SKQP_OUT_DIR}" |  | ||||||
|  |  | ||||||
| for BINARY in "${SKQP_BINARIES[@]}" |  | ||||||
| do |  | ||||||
|     /usr/bin/ninja -C "${SKQP_OUT_DIR}" "${BINARY}" |  | ||||||
|     # Strip binary, since gn is not stripping it even when `is_debug == false` |  | ||||||
|     ${STRIP_CMD:-strip} "${SKQP_OUT_DIR}/${BINARY}" |  | ||||||
|     install -m 0755 "${SKQP_OUT_DIR}/${BINARY}" "${SKQP_INSTALL_DIR}" |  | ||||||
| done |  | ||||||
|  |  | ||||||
| # Move assets to the target directory, which will reside in rootfs. |  | ||||||
| mv platform_tools/android/apps/skqp/src/main/assets/ "${SKQP_ASSETS_DIR}" |  | ||||||
|  |  | ||||||
| popd |  | ||||||
| rm -Rf "${SKIA_DIR}" |  | ||||||
|  |  | ||||||
| set +ex |  | ||||||
| @@ -1,59 +0,0 @@ | |||||||
| cc = "clang" |  | ||||||
| cxx = "clang++" |  | ||||||
|  |  | ||||||
| extra_cflags = [ |  | ||||||
|         "-Wno-error", |  | ||||||
|  |  | ||||||
|         "-DSK_ENABLE_DUMP_GPU", |  | ||||||
|         "-DSK_BUILD_FOR_SKQP" |  | ||||||
|     ] |  | ||||||
| extra_cflags_cc = [ |  | ||||||
|         "-Wno-error", |  | ||||||
|  |  | ||||||
|         # skqp build process produces a lot of compilation warnings, silencing |  | ||||||
|         # most of them to remove clutter and avoid the CI job log to exceed the |  | ||||||
|         # maximum size |  | ||||||
|  |  | ||||||
|         # GCC flags |  | ||||||
|         "-Wno-redundant-move", |  | ||||||
|         "-Wno-suggest-override", |  | ||||||
|         "-Wno-class-memaccess", |  | ||||||
|         "-Wno-deprecated-copy", |  | ||||||
|         "-Wno-uninitialized", |  | ||||||
|  |  | ||||||
|         # Clang flags |  | ||||||
|         "-Wno-macro-redefined", |  | ||||||
|         "-Wno-anon-enum-enum-conversion", |  | ||||||
|         "-Wno-suggest-destructor-override", |  | ||||||
|         "-Wno-return-std-move-in-c++11", |  | ||||||
|         "-Wno-extra-semi-stmt", |  | ||||||
|         "-Wno-reserved-identifier", |  | ||||||
|         "-Wno-bitwise-instead-of-logical", |  | ||||||
|         "-Wno-reserved-identifier", |  | ||||||
|         "-Wno-psabi", |  | ||||||
|         "-Wno-unused-but-set-variable", |  | ||||||
|         "-Wno-sizeof-array-div", |  | ||||||
|         "-Wno-string-concatenation", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
| cc_wrapper = "ccache" |  | ||||||
|  |  | ||||||
| is_debug = false |  | ||||||
|  |  | ||||||
| skia_enable_fontmgr_android = false |  | ||||||
| skia_enable_fontmgr_empty = true |  | ||||||
| skia_enable_pdf = false |  | ||||||
| skia_enable_skottie = false |  | ||||||
|  |  | ||||||
| skia_skqp_global_error_tolerance = 8 |  | ||||||
| skia_tools_require_resources = true |  | ||||||
|  |  | ||||||
| skia_use_dng_sdk = false |  | ||||||
| skia_use_expat = true |  | ||||||
| skia_use_icu = false |  | ||||||
| skia_use_libheif = false |  | ||||||
| skia_use_lua = false |  | ||||||
| skia_use_piex = false |  | ||||||
| skia_use_vulkan = true |  | ||||||
|  |  | ||||||
| target_os = "linux" |  | ||||||
| @@ -1,25 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
| # When changing this file, you need to bump the following |  | ||||||
| # .gitlab-ci/image-tags.yml tags: |  | ||||||
| # KERNEL_ROOTFS_TAG |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| git config --global user.email "mesa@example.com" |  | ||||||
| git config --global user.name "Mesa CI" |  | ||||||
|  |  | ||||||
| git clone \ |  | ||||||
|     https://github.com/intel/libva-utils.git \ |  | ||||||
|     -b 2.18.1 \ |  | ||||||
|     --depth 1 \ |  | ||||||
|     /va-utils |  | ||||||
|  |  | ||||||
| pushd /va-utils |  | ||||||
| # Too old libva in Debian 11. TODO: when this PR gets in, refer to the patch. |  | ||||||
| curl -L https://github.com/intel/libva-utils/pull/329.patch | git am |  | ||||||
|  |  | ||||||
| meson setup build -D tests=true -Dprefix=/va $EXTRA_MESON_ARGS |  | ||||||
| meson install -C build |  | ||||||
| popd |  | ||||||
| rm -rf /va-utils |  | ||||||
| @@ -1,43 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| # When changing this file, you need to bump the following |  | ||||||
| # .gitlab-ci/image-tags.yml tags: |  | ||||||
| # DEBIAN_X86_64_TEST_VK_TAG |  | ||||||
| # KERNEL_ROOTFS_TAG |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| VKD3D_PROTON_COMMIT="2ad6cfdeaacdf47e2689e30a8fb5ac8193725f0d" |  | ||||||
|  |  | ||||||
| VKD3D_PROTON_DST_DIR="/vkd3d-proton-tests" |  | ||||||
| VKD3D_PROTON_SRC_DIR="/vkd3d-proton-src" |  | ||||||
| VKD3D_PROTON_BUILD_DIR="/vkd3d-proton-$VKD3D_PROTON_VERSION" |  | ||||||
|  |  | ||||||
| function build_arch { |  | ||||||
|   local arch="$1" |  | ||||||
|   shift |  | ||||||
|  |  | ||||||
|   meson "$@"                               \ |  | ||||||
|         -Denable_tests=true                \ |  | ||||||
|         --buildtype release                \ |  | ||||||
|         --prefix "$VKD3D_PROTON_DST_DIR"   \ |  | ||||||
|         --strip                            \ |  | ||||||
|         --bindir "x${arch}"                \ |  | ||||||
|         --libdir "x${arch}"                \ |  | ||||||
|         "$VKD3D_PROTON_BUILD_DIR/build.${arch}" |  | ||||||
|  |  | ||||||
|   ninja -C "$VKD3D_PROTON_BUILD_DIR/build.${arch}" install |  | ||||||
|  |  | ||||||
|   install -D -m755 -t "${VKD3D_PROTON_DST_DIR}/x${arch}/bin" "$VKD3D_PROTON_BUILD_DIR/build.${arch}/tests/d3d12" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| git clone https://github.com/HansKristian-Work/vkd3d-proton.git --single-branch -b master --no-checkout "$VKD3D_PROTON_SRC_DIR" |  | ||||||
| pushd "$VKD3D_PROTON_SRC_DIR" |  | ||||||
| git checkout "$VKD3D_PROTON_COMMIT" |  | ||||||
| git submodule update --init --recursive |  | ||||||
| git submodule update --recursive |  | ||||||
| build_arch 64 |  | ||||||
| build_arch 86 |  | ||||||
| popd |  | ||||||
|  |  | ||||||
| rm -rf "$VKD3D_PROTON_BUILD_DIR" |  | ||||||
| rm -rf "$VKD3D_PROTON_SRC_DIR" |  | ||||||
| @@ -1,18 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
|  |  | ||||||
| # When changing this file, you need to bump the following |  | ||||||
| # .gitlab-ci/image-tags.yml tags: |  | ||||||
| # DEBIAN_X86_64_TEST_GL_TAG |  | ||||||
| # KERNEL_ROOTFS_TAG: |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| VALIDATION_TAG="v1.3.263" |  | ||||||
|  |  | ||||||
| git clone -b "$VALIDATION_TAG" --single-branch --depth 1 https://github.com/KhronosGroup/Vulkan-ValidationLayers.git |  | ||||||
| pushd Vulkan-ValidationLayers |  | ||||||
| python3 scripts/update_deps.py --dir external --config debug |  | ||||||
| cmake -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr -DBUILD_TESTS=OFF -DBUILD_WERROR=OFF -C external/helper.cmake -S . -B build |  | ||||||
| ninja -C build install |  | ||||||
| popd |  | ||||||
| rm -rf Vulkan-ValidationLayers |  | ||||||
| @@ -1,23 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| export LIBWAYLAND_VERSION="1.21.0" |  | ||||||
| export WAYLAND_PROTOCOLS_VERSION="1.31" |  | ||||||
|  |  | ||||||
| git clone https://gitlab.freedesktop.org/wayland/wayland |  | ||||||
| cd wayland |  | ||||||
| git checkout "$LIBWAYLAND_VERSION" |  | ||||||
| meson setup -Ddocumentation=false -Ddtd_validation=false -Dlibraries=true _build $EXTRA_MESON_ARGS |  | ||||||
| meson install -C _build |  | ||||||
| cd .. |  | ||||||
| rm -rf wayland |  | ||||||
|  |  | ||||||
| git clone https://gitlab.freedesktop.org/wayland/wayland-protocols |  | ||||||
| cd wayland-protocols |  | ||||||
| git checkout "$WAYLAND_PROTOCOLS_VERSION" |  | ||||||
| meson setup _build $EXTRA_MESON_ARGS |  | ||||||
| meson install -C _build |  | ||||||
| cd .. |  | ||||||
| rm -rf wayland-protocols |  | ||||||
| @@ -1,12 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| if test -f /etc/debian_version; then |  | ||||||
|     apt-get autoremove -y --purge |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Clean up any build cache for rust. |  | ||||||
| rm -rf /.cargo |  | ||||||
|  |  | ||||||
| if test -x /usr/bin/ccache; then |  | ||||||
|     ccache --show-stats |  | ||||||
| fi |  | ||||||
| @@ -1,52 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
|  |  | ||||||
| if test -x /usr/bin/ccache; then |  | ||||||
|     if test -f /etc/debian_version; then |  | ||||||
|         CCACHE_PATH=/usr/lib/ccache |  | ||||||
|     elif test -f /etc/alpine-release; then |  | ||||||
|         CCACHE_PATH=/usr/lib/ccache/bin |  | ||||||
|     else |  | ||||||
|         CCACHE_PATH=/usr/lib64/ccache |  | ||||||
|     fi |  | ||||||
|  |  | ||||||
|     # Common setup among container builds before we get to building code. |  | ||||||
|  |  | ||||||
|     export CCACHE_COMPILERCHECK=content |  | ||||||
|     export CCACHE_COMPRESS=true |  | ||||||
|     export CCACHE_DIR=/cache/$CI_PROJECT_NAME/ccache |  | ||||||
|     export PATH=$CCACHE_PATH:$PATH |  | ||||||
|  |  | ||||||
|     # CMake ignores $PATH, so we have to force CC/GCC to the ccache versions. |  | ||||||
|     export CC="${CCACHE_PATH}/gcc" |  | ||||||
|     export CXX="${CCACHE_PATH}/g++" |  | ||||||
|  |  | ||||||
|     ccache --show-stats |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # When not using the mold linker (e.g. unsupported architecture), force |  | ||||||
| # linkers to gold, since it's so much faster for building.  We can't use |  | ||||||
| # lld because we're on old debian and it's buggy.  ming fails meson builds |  | ||||||
| # with it with "meson.build:21:0: ERROR: Unable to determine dynamic linker" |  | ||||||
| find /usr/bin -name \*-ld -o -name ld | \ |  | ||||||
|     grep -v mingw | \ |  | ||||||
|     xargs -n 1 -I '{}' ln -sf '{}.gold' '{}' |  | ||||||
|  |  | ||||||
| # Make a wrapper script for ninja to always include the -j flags |  | ||||||
| { |  | ||||||
|     echo '#!/bin/sh -x' |  | ||||||
|     # shellcheck disable=SC2016 |  | ||||||
|     echo '/usr/bin/ninja -j${FDO_CI_CONCURRENT:-4} "$@"' |  | ||||||
| } > /usr/local/bin/ninja |  | ||||||
| chmod +x /usr/local/bin/ninja |  | ||||||
|  |  | ||||||
| # Set MAKEFLAGS so that all make invocations in container builds include the |  | ||||||
| # flags (doesn't apply to non-container builds, but we don't run make there) |  | ||||||
| export MAKEFLAGS="-j${FDO_CI_CONCURRENT:-4}" |  | ||||||
|  |  | ||||||
| # make wget to try more than once, when download fails or timeout |  | ||||||
| echo -e "retry_connrefused = on\n" \ |  | ||||||
|         "read_timeout = 300\n" \ |  | ||||||
|         "tries = 4\n" \ |  | ||||||
| 	"retry_on_host_error = on\n" \ |  | ||||||
| 	"retry_on_http_error = 429,500,502,503,504\n" \ |  | ||||||
|         "wait_retry = 32" >> /etc/wgetrc |  | ||||||
| @@ -1,37 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| ndk=$1 |  | ||||||
| arch=$2 |  | ||||||
| cpu_family=$3 |  | ||||||
| cpu=$4 |  | ||||||
| cross_file="/cross_file-$arch.txt" |  | ||||||
| sdk_version=$5 |  | ||||||
|  |  | ||||||
| # armv7 has the toolchain split between two names. |  | ||||||
| arch2=${6:-$2} |  | ||||||
|  |  | ||||||
| # Note that we disable C++ exceptions, because Mesa doesn't use exceptions, |  | ||||||
| # and allowing it in code generation means we get unwind symbols that break |  | ||||||
| # the libEGL and driver symbol tests. |  | ||||||
|  |  | ||||||
| cat > "$cross_file" <<EOF |  | ||||||
| [binaries] |  | ||||||
| ar = '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-ar' |  | ||||||
| c = ['ccache', '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch2}${sdk_version}-clang', '-fno-exceptions', '-fno-unwind-tables', '-fno-asynchronous-unwind-tables'] |  | ||||||
| cpp = ['ccache', '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch2}${sdk_version}-clang++', '-fno-exceptions', '-fno-unwind-tables', '-fno-asynchronous-unwind-tables', '-static-libstdc++'] |  | ||||||
| c_ld = 'lld' |  | ||||||
| cpp_ld = 'lld' |  | ||||||
| strip = '$ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip' |  | ||||||
| pkgconfig = ['/usr/bin/pkgconf'] |  | ||||||
|  |  | ||||||
| [host_machine] |  | ||||||
| system = 'android' |  | ||||||
| cpu_family = '$cpu_family' |  | ||||||
| cpu = '$cpu' |  | ||||||
| endian = 'little' |  | ||||||
|  |  | ||||||
| [properties] |  | ||||||
| needs_exe_wrapper = true |  | ||||||
| pkg_config_libdir = '/usr/local/lib/${arch2}/pkgconfig/:/${ndk}/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/lib/${arch2}/pkgconfig/' |  | ||||||
|  |  | ||||||
| EOF |  | ||||||
| @@ -1,40 +0,0 @@ | |||||||
| #!/bin/sh |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| # Makes a .pc file in the Android NDK for meson to find its libraries. |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| ndk="$1" |  | ||||||
| pc="$2" |  | ||||||
| cflags="$3" |  | ||||||
| libs="$4" |  | ||||||
| version="$5" |  | ||||||
| sdk_version="$6" |  | ||||||
|  |  | ||||||
| sysroot=$ndk/toolchains/llvm/prebuilt/linux-x86_64/sysroot |  | ||||||
|  |  | ||||||
| for arch in \ |  | ||||||
|         x86_64-linux-android \ |  | ||||||
|         i686-linux-android \ |  | ||||||
|         aarch64-linux-android \ |  | ||||||
|         arm-linux-androideabi; do |  | ||||||
|     pcdir=$sysroot/usr/lib/$arch/pkgconfig |  | ||||||
|     mkdir -p $pcdir |  | ||||||
|  |  | ||||||
|     cat >$pcdir/$pc <<EOF |  | ||||||
| prefix=$sysroot |  | ||||||
| exec_prefix=$sysroot |  | ||||||
| libdir=$sysroot/usr/lib/$arch/$sdk_version |  | ||||||
| sharedlibdir=$sysroot/usr/lib/$arch |  | ||||||
| includedir=$sysroot/usr/include |  | ||||||
|  |  | ||||||
| Name: zlib |  | ||||||
| Description: zlib compression library |  | ||||||
| Version: $version |  | ||||||
|  |  | ||||||
| Requires: |  | ||||||
| Libs: -L$sysroot/usr/lib/$arch/$sdk_version $libs |  | ||||||
| Cflags: -I$sysroot/usr/include $cflags |  | ||||||
| EOF |  | ||||||
| done |  | ||||||
| @@ -1,54 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| arch=$1 |  | ||||||
| cross_file="/cross_file-$arch.txt" |  | ||||||
| meson env2mfile --cross --debarch "$arch" -o "$cross_file" |  | ||||||
|  |  | ||||||
| # Explicitly set ccache path for cross compilers |  | ||||||
| sed -i "s|/usr/bin/\([^-]*\)-linux-gnu\([^-]*\)-g|/usr/lib/ccache/\\1-linux-gnu\\2-g|g" "$cross_file" |  | ||||||
|  |  | ||||||
| # Rely on qemu-user being configured in binfmt_misc on the host |  | ||||||
| # shellcheck disable=SC1003 # how this sed doesn't seems to work for me locally |  | ||||||
| sed -i -e '/\[properties\]/a\' -e "needs_exe_wrapper = False" "$cross_file" |  | ||||||
|  |  | ||||||
| # Add a line for rustc, which meson env2mfile is missing. |  | ||||||
| cc=$(sed -n "s|^c\s*=\s*\[?'\(.*\)'\]?|\1|p" < "$cross_file") |  | ||||||
|  |  | ||||||
| if [[ "$arch" = "arm64" ]]; then |  | ||||||
|     rust_target=aarch64-unknown-linux-gnu |  | ||||||
| elif [[ "$arch" = "armhf" ]]; then |  | ||||||
|     rust_target=armv7-unknown-linux-gnueabihf |  | ||||||
| elif [[ "$arch" = "i386" ]]; then |  | ||||||
|     rust_target=i686-unknown-linux-gnu |  | ||||||
| elif [[ "$arch" = "ppc64el" ]]; then |  | ||||||
|     rust_target=powerpc64le-unknown-linux-gnu |  | ||||||
| elif [[ "$arch" = "s390x" ]]; then |  | ||||||
|     rust_target=s390x-unknown-linux-gnu |  | ||||||
| else |  | ||||||
|     echo "Needs rustc target mapping" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # shellcheck disable=SC1003 # how this sed doesn't seems to work for me locally |  | ||||||
| sed -i -e '/\[binaries\]/a\' -e "rust = ['rustc', '--target=$rust_target', '-C', 'linker=$cc']" "$cross_file" |  | ||||||
|  |  | ||||||
| # Set up cmake cross compile toolchain file for dEQP builds |  | ||||||
| toolchain_file="/toolchain-$arch.cmake" |  | ||||||
| if [[ "$arch" = "arm64" ]]; then |  | ||||||
|     GCC_ARCH="aarch64-linux-gnu" |  | ||||||
|     DE_CPU="DE_CPU_ARM_64" |  | ||||||
| elif [[ "$arch" = "armhf" ]]; then |  | ||||||
|     GCC_ARCH="arm-linux-gnueabihf" |  | ||||||
|     DE_CPU="DE_CPU_ARM" |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| if [[ -n "$GCC_ARCH" ]]; then |  | ||||||
|     { |  | ||||||
|         echo "set(CMAKE_SYSTEM_NAME Linux)"; |  | ||||||
|         echo "set(CMAKE_SYSTEM_PROCESSOR arm)"; |  | ||||||
|         echo "set(CMAKE_C_COMPILER /usr/lib/ccache/$GCC_ARCH-gcc)"; |  | ||||||
|         echo "set(CMAKE_CXX_COMPILER /usr/lib/ccache/$GCC_ARCH-g++)"; |  | ||||||
|         echo "set(CMAKE_CXX_FLAGS_INIT \"-Wno-psabi\")";  # makes ABI warnings quiet for ARMv7 |  | ||||||
|         echo "set(ENV{PKG_CONFIG} \"/usr/bin/$GCC_ARCH-pkgconf\")"; |  | ||||||
|         echo "set(DE_CPU $DE_CPU)"; |  | ||||||
|     } > "$toolchain_file" |  | ||||||
| fi |  | ||||||
| @@ -1,86 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| export DEBIAN_FRONTEND=noninteractive |  | ||||||
| export LLVM_VERSION="${LLVM_VERSION:=15}" |  | ||||||
|  |  | ||||||
| # Ephemeral packages (installed for this script and removed again at the end) |  | ||||||
| STABLE_EPHEMERAL=" \ |  | ||||||
|         " |  | ||||||
|  |  | ||||||
| dpkg --add-architecture $arch |  | ||||||
| apt-get update |  | ||||||
|  |  | ||||||
| apt-get install -y --no-remove \ |  | ||||||
|         $EXTRA_LOCAL_PACKAGES \ |  | ||||||
|         $STABLE_EPHEMERAL \ |  | ||||||
|         crossbuild-essential-$arch \ |  | ||||||
|         pkgconf:$arch \ |  | ||||||
|         libasan8:$arch \ |  | ||||||
|         libdrm-dev:$arch \ |  | ||||||
|         libelf-dev:$arch \ |  | ||||||
|         libexpat1-dev:$arch \ |  | ||||||
|         libffi-dev:$arch \ |  | ||||||
|         libpciaccess-dev:$arch \ |  | ||||||
|         libstdc++6:$arch \ |  | ||||||
|         libvulkan-dev:$arch \ |  | ||||||
|         libx11-dev:$arch \ |  | ||||||
|         libx11-xcb-dev:$arch \ |  | ||||||
|         libxcb-dri2-0-dev:$arch \ |  | ||||||
|         libxcb-dri3-dev:$arch \ |  | ||||||
|         libxcb-glx0-dev:$arch \ |  | ||||||
|         libxcb-present-dev:$arch \ |  | ||||||
|         libxcb-randr0-dev:$arch \ |  | ||||||
|         libxcb-shm0-dev:$arch \ |  | ||||||
|         libxcb-xfixes0-dev:$arch \ |  | ||||||
|         libxdamage-dev:$arch \ |  | ||||||
|         libxext-dev:$arch \ |  | ||||||
|         libxrandr-dev:$arch \ |  | ||||||
|         libxshmfence-dev:$arch \ |  | ||||||
|         libxxf86vm-dev:$arch \ |  | ||||||
|         libwayland-dev:$arch |  | ||||||
|  |  | ||||||
| if [[ $arch != "armhf" ]]; then |  | ||||||
|     # We don't need clang-format for the crossbuilds, but the installed amd64 |  | ||||||
|     # package will conflict with libclang. Uninstall clang-format (and its |  | ||||||
|     # problematic dependency) to fix. |  | ||||||
|     apt-get remove -y clang-format-${LLVM_VERSION} libclang-cpp${LLVM_VERSION} |  | ||||||
|  |  | ||||||
|     # llvm-*-tools:$arch conflicts with python3:amd64. Install dependencies only |  | ||||||
|     # with apt-get, then force-install llvm-*-{dev,tools}:$arch with dpkg to get |  | ||||||
|     # around this. |  | ||||||
|     apt-get install -y --no-remove --no-install-recommends \ |  | ||||||
|             libclang-cpp${LLVM_VERSION}:$arch \ |  | ||||||
|             libgcc-s1:$arch \ |  | ||||||
|             libtinfo-dev:$arch \ |  | ||||||
|             libz3-dev:$arch \ |  | ||||||
|             llvm-${LLVM_VERSION}:$arch \ |  | ||||||
|             zlib1g |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/create-cross-file.sh $arch |  | ||||||
|  |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_pre_build.sh |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # dependencies where we want a specific version |  | ||||||
| EXTRA_MESON_ARGS="--cross-file=/cross_file-${arch}.txt -D libdir=lib/$(dpkg-architecture -A $arch -qDEB_TARGET_MULTIARCH)" |  | ||||||
| . .gitlab-ci/container/build-libdrm.sh |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-wayland.sh |  | ||||||
|  |  | ||||||
| apt-get purge -y \ |  | ||||||
|         $STABLE_EPHEMERAL |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_post_build.sh |  | ||||||
|  |  | ||||||
| # This needs to be done after container_post_build.sh, or apt-get breaks in there |  | ||||||
| if [[ $arch != "armhf" ]]; then |  | ||||||
|     apt-get download llvm-${LLVM_VERSION}-{dev,tools}:$arch |  | ||||||
|     dpkg -i --force-depends llvm-${LLVM_VERSION}-*_${arch}.deb |  | ||||||
|     rm llvm-${LLVM_VERSION}-*_${arch}.deb |  | ||||||
| fi |  | ||||||
| @@ -1,109 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -ex |  | ||||||
|  |  | ||||||
| EPHEMERAL="\ |  | ||||||
|          autoconf \ |  | ||||||
|          rdfind \ |  | ||||||
|          unzip \ |  | ||||||
|          " |  | ||||||
|  |  | ||||||
| apt-get install -y --no-remove $EPHEMERAL |  | ||||||
|  |  | ||||||
| # Fetch the NDK and extract just the toolchain we want. |  | ||||||
| ndk=$ANDROID_NDK |  | ||||||
| curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|   -o $ndk.zip https://dl.google.com/android/repository/$ndk-linux.zip |  | ||||||
| unzip -d / $ndk.zip "$ndk/toolchains/llvm/*" |  | ||||||
| rm $ndk.zip |  | ||||||
| # Since it was packed as a zip file, symlinks/hardlinks got turned into |  | ||||||
| # duplicate files.  Turn them into hardlinks to save on container space. |  | ||||||
| rdfind -makehardlinks true -makeresultsfile false /${ndk}/ |  | ||||||
| # Drop some large tools we won't use in this build. |  | ||||||
| find /${ndk}/ -type f | grep -E -i "clang-check|clang-tidy|lldb" | xargs rm -f |  | ||||||
|  |  | ||||||
| sh .gitlab-ci/container/create-android-ndk-pc.sh /$ndk zlib.pc "" "-lz" "1.2.3" $ANDROID_SDK_VERSION |  | ||||||
|  |  | ||||||
| sh .gitlab-ci/container/create-android-cross-file.sh /$ndk x86_64-linux-android x86_64 x86_64 $ANDROID_SDK_VERSION |  | ||||||
| sh .gitlab-ci/container/create-android-cross-file.sh /$ndk i686-linux-android x86 x86 $ANDROID_SDK_VERSION |  | ||||||
| sh .gitlab-ci/container/create-android-cross-file.sh /$ndk aarch64-linux-android aarch64 armv8 $ANDROID_SDK_VERSION |  | ||||||
| sh .gitlab-ci/container/create-android-cross-file.sh /$ndk arm-linux-androideabi arm armv7hl $ANDROID_SDK_VERSION armv7a-linux-androideabi |  | ||||||
|  |  | ||||||
| # Not using build-libdrm.sh because we don't want its cleanup after building |  | ||||||
| # each arch.  Fetch and extract now. |  | ||||||
| export LIBDRM_VERSION=libdrm-2.4.114 |  | ||||||
| curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|   -O https://dri.freedesktop.org/libdrm/$LIBDRM_VERSION.tar.xz |  | ||||||
| tar -xf $LIBDRM_VERSION.tar.xz && rm $LIBDRM_VERSION.tar.xz |  | ||||||
|  |  | ||||||
| for arch in \ |  | ||||||
|         x86_64-linux-android \ |  | ||||||
|         i686-linux-android \ |  | ||||||
|         aarch64-linux-android \ |  | ||||||
|         arm-linux-androideabi ; do |  | ||||||
|  |  | ||||||
|     cd $LIBDRM_VERSION |  | ||||||
|     rm -rf build-$arch |  | ||||||
|     meson setup build-$arch \ |  | ||||||
|           --cross-file=/cross_file-$arch.txt \ |  | ||||||
|           --libdir=lib/$arch \ |  | ||||||
|           -Dnouveau=disabled \ |  | ||||||
|           -Dvc4=disabled \ |  | ||||||
|           -Detnaviv=disabled \ |  | ||||||
|           -Dfreedreno=disabled \ |  | ||||||
|           -Dintel=disabled \ |  | ||||||
|           -Dcairo-tests=disabled \ |  | ||||||
|           -Dvalgrind=disabled |  | ||||||
|     meson install -C build-$arch |  | ||||||
|     cd .. |  | ||||||
| done |  | ||||||
|  |  | ||||||
| rm -rf $LIBDRM_VERSION |  | ||||||
|  |  | ||||||
| export LIBELF_VERSION=libelf-0.8.13 |  | ||||||
| curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|   -O https://fossies.org/linux/misc/old/$LIBELF_VERSION.tar.gz |  | ||||||
|  |  | ||||||
| # Not 100% sure who runs the mirror above so be extra careful |  | ||||||
| if ! echo "4136d7b4c04df68b686570afa26988ac ${LIBELF_VERSION}.tar.gz" | md5sum -c -; then |  | ||||||
|     echo "Checksum failed" |  | ||||||
|     exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| tar -xf ${LIBELF_VERSION}.tar.gz |  | ||||||
| cd $LIBELF_VERSION |  | ||||||
|  |  | ||||||
| # Work around a bug in the original configure not enabling __LIBELF64. |  | ||||||
| autoreconf |  | ||||||
|  |  | ||||||
| for arch in \ |  | ||||||
|         x86_64-linux-android \ |  | ||||||
|         i686-linux-android \ |  | ||||||
|         aarch64-linux-android \ |  | ||||||
|         arm-linux-androideabi ; do |  | ||||||
|  |  | ||||||
|     ccarch=${arch} |  | ||||||
|     if [ "${arch}" ==  'arm-linux-androideabi' ] |  | ||||||
|     then |  | ||||||
|        ccarch=armv7a-linux-androideabi |  | ||||||
|     fi |  | ||||||
|  |  | ||||||
|     export CC=/${ndk}/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-ar |  | ||||||
|     export CC=/${ndk}/toolchains/llvm/prebuilt/linux-x86_64/bin/${ccarch}${ANDROID_SDK_VERSION}-clang |  | ||||||
|     export CXX=/${ndk}/toolchains/llvm/prebuilt/linux-x86_64/bin/${ccarch}${ANDROID_SDK_VERSION}-clang++ |  | ||||||
|     export LD=/${ndk}/toolchains/llvm/prebuilt/linux-x86_64/bin/${arch}-ld |  | ||||||
|     export RANLIB=/${ndk}/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-ranlib |  | ||||||
|  |  | ||||||
|     # The configure script doesn't know about android, but doesn't really use the host anyway it |  | ||||||
|     # seems |  | ||||||
|     ./configure --host=x86_64-linux-gnu  --disable-nls --disable-shared \ |  | ||||||
|                 --libdir=/usr/local/lib/${arch} |  | ||||||
|     make install |  | ||||||
|     make distclean |  | ||||||
| done |  | ||||||
|  |  | ||||||
| cd .. |  | ||||||
| rm -rf $LIBELF_VERSION |  | ||||||
|  |  | ||||||
| apt-get purge -y $EPHEMERAL |  | ||||||
| @@ -1,5 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| arch=armhf . .gitlab-ci/container/debian/arm_test.sh |  | ||||||
| @@ -1,86 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2086 # we want word splitting |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| export LLVM_VERSION="${LLVM_VERSION:=15}" |  | ||||||
|  |  | ||||||
| apt-get -y install ca-certificates |  | ||||||
| sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list.d/* |  | ||||||
| apt-get update |  | ||||||
|  |  | ||||||
| # Ephemeral packages (installed for this script and removed again at |  | ||||||
| # the end) |  | ||||||
| STABLE_EPHEMERAL=" \ |  | ||||||
|         libssl-dev \ |  | ||||||
|         " |  | ||||||
|  |  | ||||||
| apt-get -y install \ |  | ||||||
| 	${STABLE_EPHEMERAL} \ |  | ||||||
| 	apt-utils \ |  | ||||||
| 	android-libext4-utils \ |  | ||||||
| 	autoconf \ |  | ||||||
| 	automake \ |  | ||||||
| 	bc \ |  | ||||||
| 	bison \ |  | ||||||
| 	ccache \ |  | ||||||
| 	cmake \ |  | ||||||
| 	curl \ |  | ||||||
| 	debootstrap \ |  | ||||||
| 	fastboot \ |  | ||||||
| 	flex \ |  | ||||||
| 	g++ \ |  | ||||||
| 	git \ |  | ||||||
| 	glslang-tools \ |  | ||||||
| 	kmod \ |  | ||||||
| 	libasan8 \ |  | ||||||
| 	libdrm-dev \ |  | ||||||
| 	libelf-dev \ |  | ||||||
| 	libexpat1-dev \ |  | ||||||
| 	libvulkan-dev \ |  | ||||||
| 	libx11-dev \ |  | ||||||
| 	libx11-xcb-dev \ |  | ||||||
| 	libxcb-dri2-0-dev \ |  | ||||||
| 	libxcb-dri3-dev \ |  | ||||||
| 	libxcb-glx0-dev \ |  | ||||||
| 	libxcb-present-dev \ |  | ||||||
| 	libxcb-randr0-dev \ |  | ||||||
| 	libxcb-shm0-dev \ |  | ||||||
| 	libxcb-xfixes0-dev \ |  | ||||||
| 	libxdamage-dev \ |  | ||||||
| 	libxext-dev \ |  | ||||||
| 	libxrandr-dev \ |  | ||||||
| 	libxshmfence-dev \ |  | ||||||
| 	libxxf86vm-dev \ |  | ||||||
| 	libwayland-dev \ |  | ||||||
| 	libwayland-egl-backend-dev \ |  | ||||||
| 	llvm-${LLVM_VERSION}-dev \ |  | ||||||
| 	ninja-build \ |  | ||||||
| 	meson \ |  | ||||||
| 	openssh-server \ |  | ||||||
| 	pkgconf \ |  | ||||||
| 	python3-mako \ |  | ||||||
| 	python3-pil \ |  | ||||||
| 	python3-pip \ |  | ||||||
| 	python3-requests \ |  | ||||||
| 	python3-setuptools \ |  | ||||||
| 	u-boot-tools \ |  | ||||||
| 	xz-utils \ |  | ||||||
| 	zlib1g-dev \ |  | ||||||
| 	zstd |  | ||||||
|  |  | ||||||
| pip3 install --break-system-packages git+http://gitlab.freedesktop.org/freedesktop/ci-templates@ffe4d1b10aab7534489f0c4bbc4c5899df17d3f2 |  | ||||||
|  |  | ||||||
| arch=armhf |  | ||||||
| . .gitlab-ci/container/cross_build.sh |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_pre_build.sh |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-mold.sh |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/build-wayland.sh |  | ||||||
|  |  | ||||||
| apt-get purge -y $STABLE_EPHEMERAL |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/container_post_build.sh |  | ||||||
| @@ -1,5 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
|  |  | ||||||
| arch=arm64 . .gitlab-ci/container/debian/arm_test.sh |  | ||||||
| @@ -1,47 +0,0 @@ | |||||||
| #!/usr/bin/env bash |  | ||||||
| # shellcheck disable=SC2154 # arch is assigned in previous scripts |  | ||||||
| # When changing this file, you need to bump the following |  | ||||||
| # .gitlab-ci/image-tags.yml tags: |  | ||||||
| # DEBIAN_BASE_TAG |  | ||||||
| # KERNEL_ROOTFS_TAG |  | ||||||
|  |  | ||||||
| set -e |  | ||||||
| set -o xtrace |  | ||||||
|  |  | ||||||
| ############### Install packages for baremetal testing |  | ||||||
| apt-get install -y ca-certificates |  | ||||||
| sed -i -e 's/http:\/\/deb/https:\/\/deb/g' /etc/apt/sources.list.d/* |  | ||||||
| apt-get update |  | ||||||
|  |  | ||||||
| apt-get install -y --no-remove \ |  | ||||||
|         cpio \ |  | ||||||
|         curl \ |  | ||||||
|         fastboot \ |  | ||||||
|         netcat-openbsd \ |  | ||||||
|         openssh-server \ |  | ||||||
|         procps \ |  | ||||||
|         python3-distutils \ |  | ||||||
|         python3-minimal \ |  | ||||||
|         python3-serial \ |  | ||||||
|         rsync \ |  | ||||||
|         snmp \ |  | ||||||
|         zstd |  | ||||||
|  |  | ||||||
| # setup SNMPv2 SMI MIB |  | ||||||
| curl -L --retry 4 -f --retry-all-errors --retry-delay 60 \ |  | ||||||
|     https://raw.githubusercontent.com/net-snmp/net-snmp/master/mibs/SNMPv2-SMI.txt \ |  | ||||||
|     -o /usr/share/snmp/mibs/SNMPv2-SMI.txt |  | ||||||
|  |  | ||||||
| . .gitlab-ci/container/baremetal_build.sh |  | ||||||
|  |  | ||||||
| mkdir -p /baremetal-files/jetson-nano/boot/ |  | ||||||
| ln -s \ |  | ||||||
|     /baremetal-files/Image \ |  | ||||||
|     /baremetal-files/tegra210-p3450-0000.dtb \ |  | ||||||
|     /baremetal-files/jetson-nano/boot/ |  | ||||||
|  |  | ||||||
| mkdir -p /baremetal-files/jetson-tk1/boot/ |  | ||||||
| ln -s \ |  | ||||||
|     /baremetal-files/zImage \ |  | ||||||
|     /baremetal-files/tegra124-jetson-tk1.dtb \ |  | ||||||
|     /baremetal-files/jetson-tk1/boot/ |  | ||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user