diff options
Diffstat (limited to 'contrib/SDL-3.2.8/build-scripts/build-release.py')
| -rwxr-xr-x | contrib/SDL-3.2.8/build-scripts/build-release.py | 1556 |
1 files changed, 1556 insertions, 0 deletions
diff --git a/contrib/SDL-3.2.8/build-scripts/build-release.py b/contrib/SDL-3.2.8/build-scripts/build-release.py new file mode 100755 index 0000000..f3faa06 --- /dev/null +++ b/contrib/SDL-3.2.8/build-scripts/build-release.py | |||
| @@ -0,0 +1,1556 @@ | |||
| 1 | #!/usr/bin/env python3 | ||
| 2 | |||
| 3 | """ | ||
| 4 | This script is shared between SDL2, SDL3, and all satellite libraries. | ||
| 5 | Don't specialize this script for doing project-specific modifications. | ||
| 6 | Rather, modify release-info.json. | ||
| 7 | """ | ||
| 8 | |||
| 9 | import argparse | ||
| 10 | import collections | ||
| 11 | import dataclasses | ||
| 12 | from collections.abc import Callable | ||
| 13 | import contextlib | ||
| 14 | import datetime | ||
| 15 | import fnmatch | ||
| 16 | import glob | ||
| 17 | import io | ||
| 18 | import json | ||
| 19 | import logging | ||
| 20 | import multiprocessing | ||
| 21 | import os | ||
| 22 | from pathlib import Path | ||
| 23 | import platform | ||
| 24 | import re | ||
| 25 | import shlex | ||
| 26 | import shutil | ||
| 27 | import subprocess | ||
| 28 | import sys | ||
| 29 | import tarfile | ||
| 30 | import tempfile | ||
| 31 | import textwrap | ||
| 32 | import typing | ||
| 33 | import zipfile | ||
| 34 | |||
| 35 | |||
| 36 | logger = logging.getLogger(__name__) | ||
| 37 | GIT_HASH_FILENAME = ".git-hash" | ||
| 38 | REVISION_TXT = "REVISION.txt" | ||
| 39 | |||
| 40 | RE_ILLEGAL_MINGW_LIBRARIES = re.compile(r"(?:lib)?(?:gcc|(?:std)?c[+][+]|(?:win)?pthread).*", flags=re.I) | ||
| 41 | |||
| 42 | |||
| 43 | def safe_isotime_to_datetime(str_isotime: str) -> datetime.datetime: | ||
| 44 | try: | ||
| 45 | return datetime.datetime.fromisoformat(str_isotime) | ||
| 46 | except ValueError: | ||
| 47 | pass | ||
| 48 | logger.warning("Invalid iso time: %s", str_isotime) | ||
| 49 | if str_isotime[-6:-5] in ("+", "-"): | ||
| 50 | # Commits can have isotime with invalid timezone offset (e.g. "2021-07-04T20:01:40+32:00") | ||
| 51 | modified_str_isotime = str_isotime[:-6] + "+00:00" | ||
| 52 | try: | ||
| 53 | return datetime.datetime.fromisoformat(modified_str_isotime) | ||
| 54 | except ValueError: | ||
| 55 | pass | ||
| 56 | raise ValueError(f"Invalid isotime: {str_isotime}") | ||
| 57 | |||
| 58 | |||
| 59 | def arc_join(*parts: list[str]) -> str: | ||
| 60 | assert all(p[:1] != "/" and p[-1:] != "/" for p in parts), f"None of {parts} may start or end with '/'" | ||
| 61 | return "/".join(p for p in parts if p) | ||
| 62 | |||
| 63 | |||
| 64 | @dataclasses.dataclass(frozen=True) | ||
| 65 | class VsArchPlatformConfig: | ||
| 66 | arch: str | ||
| 67 | configuration: str | ||
| 68 | platform: str | ||
| 69 | |||
| 70 | def extra_context(self): | ||
| 71 | return { | ||
| 72 | "ARCH": self.arch, | ||
| 73 | "CONFIGURATION": self.configuration, | ||
| 74 | "PLATFORM": self.platform, | ||
| 75 | } | ||
| 76 | |||
| 77 | |||
| 78 | @contextlib.contextmanager | ||
| 79 | def chdir(path): | ||
| 80 | original_cwd = os.getcwd() | ||
| 81 | try: | ||
| 82 | os.chdir(path) | ||
| 83 | yield | ||
| 84 | finally: | ||
| 85 | os.chdir(original_cwd) | ||
| 86 | |||
| 87 | |||
| 88 | class Executer: | ||
| 89 | def __init__(self, root: Path, dry: bool=False): | ||
| 90 | self.root = root | ||
| 91 | self.dry = dry | ||
| 92 | |||
| 93 | def run(self, cmd, cwd=None, env=None): | ||
| 94 | logger.info("Executing args=%r", cmd) | ||
| 95 | sys.stdout.flush() | ||
| 96 | if not self.dry: | ||
| 97 | subprocess.check_call(cmd, cwd=cwd or self.root, env=env, text=True) | ||
| 98 | |||
| 99 | def check_output(self, cmd, cwd=None, dry_out=None, env=None, text=True): | ||
| 100 | logger.info("Executing args=%r", cmd) | ||
| 101 | sys.stdout.flush() | ||
| 102 | if self.dry: | ||
| 103 | return dry_out | ||
| 104 | return subprocess.check_output(cmd, cwd=cwd or self.root, env=env, text=text) | ||
| 105 | |||
| 106 | |||
| 107 | class SectionPrinter: | ||
| 108 | @contextlib.contextmanager | ||
| 109 | def group(self, title: str): | ||
| 110 | print(f"{title}:") | ||
| 111 | yield | ||
| 112 | |||
| 113 | |||
| 114 | class GitHubSectionPrinter(SectionPrinter): | ||
| 115 | def __init__(self): | ||
| 116 | super().__init__() | ||
| 117 | self.in_group = False | ||
| 118 | |||
| 119 | @contextlib.contextmanager | ||
| 120 | def group(self, title: str): | ||
| 121 | print(f"::group::{title}") | ||
| 122 | assert not self.in_group, "Can enter a group only once" | ||
| 123 | self.in_group = True | ||
| 124 | yield | ||
| 125 | self.in_group = False | ||
| 126 | print("::endgroup::") | ||
| 127 | |||
| 128 | |||
| 129 | class VisualStudio: | ||
| 130 | def __init__(self, executer: Executer, year: typing.Optional[str]=None): | ||
| 131 | self.executer = executer | ||
| 132 | self.vsdevcmd = self.find_vsdevcmd(year) | ||
| 133 | self.msbuild = self.find_msbuild() | ||
| 134 | |||
| 135 | @property | ||
| 136 | def dry(self) -> bool: | ||
| 137 | return self.executer.dry | ||
| 138 | |||
| 139 | VS_YEAR_TO_VERSION = { | ||
| 140 | "2022": 17, | ||
| 141 | "2019": 16, | ||
| 142 | "2017": 15, | ||
| 143 | "2015": 14, | ||
| 144 | "2013": 12, | ||
| 145 | } | ||
| 146 | |||
| 147 | def find_vsdevcmd(self, year: typing.Optional[str]=None) -> typing.Optional[Path]: | ||
| 148 | vswhere_spec = ["-latest"] | ||
| 149 | if year is not None: | ||
| 150 | try: | ||
| 151 | version = self.VS_YEAR_TO_VERSION[year] | ||
| 152 | except KeyError: | ||
| 153 | logger.error("Invalid Visual Studio year") | ||
| 154 | return None | ||
| 155 | vswhere_spec.extend(["-version", f"[{version},{version+1})"]) | ||
| 156 | vswhere_cmd = ["vswhere"] + vswhere_spec + ["-property", "installationPath"] | ||
| 157 | vs_install_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp").strip()) | ||
| 158 | logger.info("VS install_path = %s", vs_install_path) | ||
| 159 | assert vs_install_path.is_dir(), "VS installation path does not exist" | ||
| 160 | vsdevcmd_path = vs_install_path / "Common7/Tools/vsdevcmd.bat" | ||
| 161 | logger.info("vsdevcmd path = %s", vsdevcmd_path) | ||
| 162 | if self.dry: | ||
| 163 | vsdevcmd_path.parent.mkdir(parents=True, exist_ok=True) | ||
| 164 | vsdevcmd_path.touch(exist_ok=True) | ||
| 165 | assert vsdevcmd_path.is_file(), "vsdevcmd.bat batch file does not exist" | ||
| 166 | return vsdevcmd_path | ||
| 167 | |||
| 168 | def find_msbuild(self) -> typing.Optional[Path]: | ||
| 169 | vswhere_cmd = ["vswhere", "-latest", "-requires", "Microsoft.Component.MSBuild", "-find", r"MSBuild\**\Bin\MSBuild.exe"] | ||
| 170 | msbuild_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp/MSBuild.exe").strip()) | ||
| 171 | logger.info("MSBuild path = %s", msbuild_path) | ||
| 172 | if self.dry: | ||
| 173 | msbuild_path.parent.mkdir(parents=True, exist_ok=True) | ||
| 174 | msbuild_path.touch(exist_ok=True) | ||
| 175 | assert msbuild_path.is_file(), "MSBuild.exe does not exist" | ||
| 176 | return msbuild_path | ||
| 177 | |||
| 178 | def build(self, arch_platform: VsArchPlatformConfig, projects: list[Path]): | ||
| 179 | assert projects, "Need at least one project to build" | ||
| 180 | |||
| 181 | vsdev_cmd_str = f"\"{self.vsdevcmd}\" -arch={arch_platform.arch}" | ||
| 182 | msbuild_cmd_str = " && ".join([f"\"{self.msbuild}\" \"{project}\" /m /p:BuildInParallel=true /p:Platform={arch_platform.platform} /p:Configuration={arch_platform.configuration}" for project in projects]) | ||
| 183 | bat_contents = f"{vsdev_cmd_str} && {msbuild_cmd_str}\n" | ||
| 184 | bat_path = Path(tempfile.gettempdir()) / "cmd.bat" | ||
| 185 | with bat_path.open("w") as f: | ||
| 186 | f.write(bat_contents) | ||
| 187 | |||
| 188 | logger.info("Running cmd.exe script (%s): %s", bat_path, bat_contents) | ||
| 189 | cmd = ["cmd.exe", "/D", "/E:ON", "/V:OFF", "/S", "/C", f"CALL {str(bat_path)}"] | ||
| 190 | self.executer.run(cmd) | ||
| 191 | |||
| 192 | |||
| 193 | class Archiver: | ||
| 194 | def __init__(self, zip_path: typing.Optional[Path]=None, tgz_path: typing.Optional[Path]=None, txz_path: typing.Optional[Path]=None): | ||
| 195 | self._zip_files = [] | ||
| 196 | self._tar_files = [] | ||
| 197 | self._added_files = set() | ||
| 198 | if zip_path: | ||
| 199 | self._zip_files.append(zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_DEFLATED)) | ||
| 200 | if tgz_path: | ||
| 201 | self._tar_files.append(tarfile.open(tgz_path, "w:gz")) | ||
| 202 | if txz_path: | ||
| 203 | self._tar_files.append(tarfile.open(txz_path, "w:xz")) | ||
| 204 | |||
| 205 | @property | ||
| 206 | def added_files(self) -> set[str]: | ||
| 207 | return self._added_files | ||
| 208 | |||
| 209 | def add_file_data(self, arcpath: str, data: bytes, mode: int, time: datetime.datetime): | ||
| 210 | for zf in self._zip_files: | ||
| 211 | file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second) | ||
| 212 | zip_info = zipfile.ZipInfo(filename=arcpath, date_time=file_data_time) | ||
| 213 | zip_info.external_attr = mode << 16 | ||
| 214 | zip_info.compress_type = zipfile.ZIP_DEFLATED | ||
| 215 | zf.writestr(zip_info, data=data) | ||
| 216 | for tf in self._tar_files: | ||
| 217 | tar_info = tarfile.TarInfo(arcpath) | ||
| 218 | tar_info.type = tarfile.REGTYPE | ||
| 219 | tar_info.mode = mode | ||
| 220 | tar_info.size = len(data) | ||
| 221 | tar_info.mtime = int(time.timestamp()) | ||
| 222 | tf.addfile(tar_info, fileobj=io.BytesIO(data)) | ||
| 223 | |||
| 224 | self._added_files.add(arcpath) | ||
| 225 | |||
| 226 | def add_symlink(self, arcpath: str, target: str, time: datetime.datetime, files_for_zip): | ||
| 227 | logger.debug("Adding symlink (target=%r) -> %s", target, arcpath) | ||
| 228 | for zf in self._zip_files: | ||
| 229 | file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second) | ||
| 230 | for f in files_for_zip: | ||
| 231 | zip_info = zipfile.ZipInfo(filename=f["arcpath"], date_time=file_data_time) | ||
| 232 | zip_info.external_attr = f["mode"] << 16 | ||
| 233 | zip_info.compress_type = zipfile.ZIP_DEFLATED | ||
| 234 | zf.writestr(zip_info, data=f["data"]) | ||
| 235 | for tf in self._tar_files: | ||
| 236 | tar_info = tarfile.TarInfo(arcpath) | ||
| 237 | tar_info.type = tarfile.SYMTYPE | ||
| 238 | tar_info.mode = 0o777 | ||
| 239 | tar_info.mtime = int(time.timestamp()) | ||
| 240 | tar_info.linkname = target | ||
| 241 | tf.addfile(tar_info) | ||
| 242 | |||
| 243 | self._added_files.update(f["arcpath"] for f in files_for_zip) | ||
| 244 | |||
| 245 | def add_git_hash(self, arcdir: str, commit: str, time: datetime.datetime): | ||
| 246 | arcpath = arc_join(arcdir, GIT_HASH_FILENAME) | ||
| 247 | data = f"{commit}\n".encode() | ||
| 248 | self.add_file_data(arcpath=arcpath, data=data, mode=0o100644, time=time) | ||
| 249 | |||
| 250 | def add_file_path(self, arcpath: str, path: Path): | ||
| 251 | assert path.is_file(), f"{path} should be a file" | ||
| 252 | logger.debug("Adding %s -> %s", path, arcpath) | ||
| 253 | for zf in self._zip_files: | ||
| 254 | zf.write(path, arcname=arcpath) | ||
| 255 | for tf in self._tar_files: | ||
| 256 | tf.add(path, arcname=arcpath) | ||
| 257 | |||
| 258 | def add_file_directory(self, arcdirpath: str, dirpath: Path): | ||
| 259 | assert dirpath.is_dir() | ||
| 260 | if arcdirpath and arcdirpath[-1:] != "/": | ||
| 261 | arcdirpath += "/" | ||
| 262 | for f in dirpath.iterdir(): | ||
| 263 | if f.is_file(): | ||
| 264 | arcpath = f"{arcdirpath}{f.name}" | ||
| 265 | logger.debug("Adding %s to %s", f, arcpath) | ||
| 266 | self.add_file_path(arcpath=arcpath, path=f) | ||
| 267 | |||
| 268 | def close(self): | ||
| 269 | # Archiver is intentionally made invalid after this function | ||
| 270 | del self._zip_files | ||
| 271 | self._zip_files = None | ||
| 272 | del self._tar_files | ||
| 273 | self._tar_files = None | ||
| 274 | |||
| 275 | def __enter__(self): | ||
| 276 | return self | ||
| 277 | |||
| 278 | def __exit__(self, type, value, traceback): | ||
| 279 | self.close() | ||
| 280 | |||
| 281 | |||
| 282 | class NodeInArchive: | ||
| 283 | def __init__(self, arcpath: str, path: typing.Optional[Path]=None, data: typing.Optional[bytes]=None, mode: typing.Optional[int]=None, symtarget: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None, directory: bool=False): | ||
| 284 | self.arcpath = arcpath | ||
| 285 | self.path = path | ||
| 286 | self.data = data | ||
| 287 | self.mode = mode | ||
| 288 | self.symtarget = symtarget | ||
| 289 | self.time = time | ||
| 290 | self.directory = directory | ||
| 291 | |||
| 292 | @classmethod | ||
| 293 | def from_fs(cls, arcpath: str, path: Path, mode: int=0o100644, time: typing.Optional[datetime.datetime]=None) -> "NodeInArchive": | ||
| 294 | if time is None: | ||
| 295 | time = datetime.datetime.fromtimestamp(os.stat(path).st_mtime) | ||
| 296 | return cls(arcpath=arcpath, path=path, mode=mode) | ||
| 297 | |||
| 298 | @classmethod | ||
| 299 | def from_data(cls, arcpath: str, data: bytes, time: datetime.datetime) -> "NodeInArchive": | ||
| 300 | return cls(arcpath=arcpath, data=data, time=time, mode=0o100644) | ||
| 301 | |||
| 302 | @classmethod | ||
| 303 | def from_text(cls, arcpath: str, text: str, time: datetime.datetime) -> "NodeInArchive": | ||
| 304 | return cls.from_data(arcpath=arcpath, data=text.encode(), time=time) | ||
| 305 | |||
| 306 | @classmethod | ||
| 307 | def from_symlink(cls, arcpath: str, symtarget: str) -> "NodeInArchive": | ||
| 308 | return cls(arcpath=arcpath, symtarget=symtarget) | ||
| 309 | |||
| 310 | @classmethod | ||
| 311 | def from_directory(cls, arcpath: str) -> "NodeInArchive": | ||
| 312 | return cls(arcpath=arcpath, directory=True) | ||
| 313 | |||
| 314 | def __repr__(self) -> str: | ||
| 315 | return f"<{type(self).__name__}:arcpath={self.arcpath},path='{str(self.path)}',len(data)={len(self.data) if self.data else 'n/a'},directory={self.directory},symtarget={self.symtarget}>" | ||
| 316 | |||
| 317 | |||
| 318 | def configure_file(path: Path, context: dict[str, str]) -> bytes: | ||
| 319 | text = path.read_text() | ||
| 320 | return configure_text(text, context=context).encode() | ||
| 321 | |||
| 322 | |||
| 323 | def configure_text(text: str, context: dict[str, str]) -> str: | ||
| 324 | original_text = text | ||
| 325 | for txt, repl in context.items(): | ||
| 326 | text = text.replace(f"@<@{txt}@>@", repl) | ||
| 327 | success = all(thing not in text for thing in ("@<@", "@>@")) | ||
| 328 | if not success: | ||
| 329 | raise ValueError(f"Failed to configure {repr(original_text)}") | ||
| 330 | return text | ||
| 331 | |||
| 332 | |||
| 333 | def configure_text_list(text_list: list[str], context: dict[str, str]) -> list[str]: | ||
| 334 | return [configure_text(text=e, context=context) for e in text_list] | ||
| 335 | |||
| 336 | |||
| 337 | class ArchiveFileTree: | ||
| 338 | def __init__(self): | ||
| 339 | self._tree: dict[str, NodeInArchive] = {} | ||
| 340 | |||
| 341 | def add_file(self, file: NodeInArchive): | ||
| 342 | self._tree[file.arcpath] = file | ||
| 343 | |||
| 344 | def __iter__(self) -> typing.Iterable[NodeInArchive]: | ||
| 345 | yield from self._tree.values() | ||
| 346 | |||
| 347 | def __contains__(self, value: str) -> bool: | ||
| 348 | return value in self._tree | ||
| 349 | |||
| 350 | def get_latest_mod_time(self) -> datetime.datetime: | ||
| 351 | return max(item.time for item in self._tree.values() if item.time) | ||
| 352 | |||
| 353 | def add_to_archiver(self, archive_base: str, archiver: Archiver): | ||
| 354 | remaining_symlinks = set() | ||
| 355 | added_files = dict() | ||
| 356 | |||
| 357 | def calculate_symlink_target(s: NodeInArchive) -> str: | ||
| 358 | dest_dir = os.path.dirname(s.arcpath) | ||
| 359 | if dest_dir: | ||
| 360 | dest_dir += "/" | ||
| 361 | target = dest_dir + s.symtarget | ||
| 362 | while True: | ||
| 363 | new_target, n = re.subn(r"([^/]+/+[.]{2}/)", "", target) | ||
| 364 | target = new_target | ||
| 365 | if not n: | ||
| 366 | break | ||
| 367 | return target | ||
| 368 | |||
| 369 | # Add files in first pass | ||
| 370 | for arcpath, node in self._tree.items(): | ||
| 371 | assert node is not None, f"{arcpath} -> node" | ||
| 372 | if node.data is not None: | ||
| 373 | archiver.add_file_data(arcpath=arc_join(archive_base, arcpath), data=node.data, time=node.time, mode=node.mode) | ||
| 374 | assert node.arcpath is not None, f"{node=}" | ||
| 375 | added_files[node.arcpath] = node | ||
| 376 | elif node.path is not None: | ||
| 377 | archiver.add_file_path(arcpath=arc_join(archive_base, arcpath), path=node.path) | ||
| 378 | assert node.arcpath is not None, f"{node=}" | ||
| 379 | added_files[node.arcpath] = node | ||
| 380 | elif node.symtarget is not None: | ||
| 381 | remaining_symlinks.add(node) | ||
| 382 | elif node.directory: | ||
| 383 | pass | ||
| 384 | else: | ||
| 385 | raise ValueError(f"Invalid Archive Node: {repr(node)}") | ||
| 386 | |||
| 387 | assert None not in added_files | ||
| 388 | |||
| 389 | # Resolve symlinks in second pass: zipfile does not support symlinks, so add files to zip archive | ||
| 390 | while True: | ||
| 391 | if not remaining_symlinks: | ||
| 392 | break | ||
| 393 | symlinks_this_time = set() | ||
| 394 | extra_added_files = {} | ||
| 395 | for symlink in remaining_symlinks: | ||
| 396 | symlink_files_for_zip = {} | ||
| 397 | symlink_target_path = calculate_symlink_target(symlink) | ||
| 398 | if symlink_target_path in added_files: | ||
| 399 | symlink_files_for_zip[symlink.arcpath] = added_files[symlink_target_path] | ||
| 400 | else: | ||
| 401 | symlink_target_path_slash = symlink_target_path + "/" | ||
| 402 | for added_file in added_files: | ||
| 403 | if added_file.startswith(symlink_target_path_slash): | ||
| 404 | path_in_symlink = symlink.arcpath + "/" + added_file.removeprefix(symlink_target_path_slash) | ||
| 405 | symlink_files_for_zip[path_in_symlink] = added_files[added_file] | ||
| 406 | if symlink_files_for_zip: | ||
| 407 | symlinks_this_time.add(symlink) | ||
| 408 | extra_added_files.update(symlink_files_for_zip) | ||
| 409 | files_for_zip = [{"arcpath": f"{archive_base}/{sym_path}", "data": sym_info.data, "mode": sym_info.mode} for sym_path, sym_info in symlink_files_for_zip.items()] | ||
| 410 | archiver.add_symlink(arcpath=f"{archive_base}/{symlink.arcpath}", target=symlink.symtarget, time=symlink.time, files_for_zip=files_for_zip) | ||
| 411 | # if not symlinks_this_time: | ||
| 412 | # logger.info("files added: %r", set(path for path in added_files.keys())) | ||
| 413 | assert symlinks_this_time, f"No targets found for symlinks: {remaining_symlinks}" | ||
| 414 | remaining_symlinks.difference_update(symlinks_this_time) | ||
| 415 | added_files.update(extra_added_files) | ||
| 416 | |||
| 417 | def add_directory_tree(self, arc_dir: str, path: Path, time: datetime.datetime): | ||
| 418 | assert path.is_dir() | ||
| 419 | for files_dir, _, filenames in os.walk(path): | ||
| 420 | files_dir_path = Path(files_dir) | ||
| 421 | rel_files_path = files_dir_path.relative_to(path) | ||
| 422 | for filename in filenames: | ||
| 423 | self.add_file(NodeInArchive.from_fs(arcpath=arc_join(arc_dir, str(rel_files_path), filename), path=files_dir_path / filename, time=time)) | ||
| 424 | |||
| 425 | def _add_files_recursively(self, arc_dir: str, paths: list[Path], time: datetime.datetime): | ||
| 426 | logger.debug(f"_add_files_recursively({arc_dir=} {paths=})") | ||
| 427 | for path in paths: | ||
| 428 | arcpath = arc_join(arc_dir, path.name) | ||
| 429 | if path.is_file(): | ||
| 430 | logger.debug("Adding %s as %s", path, arcpath) | ||
| 431 | self.add_file(NodeInArchive.from_fs(arcpath=arcpath, path=path, time=time)) | ||
| 432 | elif path.is_dir(): | ||
| 433 | self._add_files_recursively(arc_dir=arc_join(arc_dir, path.name), paths=list(path.iterdir()), time=time) | ||
| 434 | else: | ||
| 435 | raise ValueError(f"Unsupported file type to add recursively: {path}") | ||
| 436 | |||
| 437 | def add_file_mapping(self, arc_dir: str, file_mapping: dict[str, list[str]], file_mapping_root: Path, context: dict[str, str], time: datetime.datetime): | ||
| 438 | for meta_rel_destdir, meta_file_globs in file_mapping.items(): | ||
| 439 | rel_destdir = configure_text(meta_rel_destdir, context=context) | ||
| 440 | assert "@" not in rel_destdir, f"archive destination should not contain an @ after configuration ({repr(meta_rel_destdir)}->{repr(rel_destdir)})" | ||
| 441 | for meta_file_glob in meta_file_globs: | ||
| 442 | file_glob = configure_text(meta_file_glob, context=context) | ||
| 443 | assert "@" not in rel_destdir, f"archive glob should not contain an @ after configuration ({repr(meta_file_glob)}->{repr(file_glob)})" | ||
| 444 | if ":" in file_glob: | ||
| 445 | original_path, new_filename = file_glob.rsplit(":", 1) | ||
| 446 | assert ":" not in original_path, f"Too many ':' in {repr(file_glob)}" | ||
| 447 | assert "/" not in new_filename, f"New filename cannot contain a '/' in {repr(file_glob)}" | ||
| 448 | path = file_mapping_root / original_path | ||
| 449 | arcpath = arc_join(arc_dir, rel_destdir, new_filename) | ||
| 450 | if path.suffix == ".in": | ||
| 451 | data = configure_file(path, context=context) | ||
| 452 | logger.debug("Adding processed %s -> %s", path, arcpath) | ||
| 453 | self.add_file(NodeInArchive.from_data(arcpath=arcpath, data=data, time=time)) | ||
| 454 | else: | ||
| 455 | logger.debug("Adding %s -> %s", path, arcpath) | ||
| 456 | self.add_file(NodeInArchive.from_fs(arcpath=arcpath, path=path, time=time)) | ||
| 457 | else: | ||
| 458 | relative_file_paths = glob.glob(file_glob, root_dir=file_mapping_root) | ||
| 459 | assert relative_file_paths, f"Glob '{file_glob}' does not match any file" | ||
| 460 | self._add_files_recursively(arc_dir=arc_join(arc_dir, rel_destdir), paths=[file_mapping_root / p for p in relative_file_paths], time=time) | ||
| 461 | |||
| 462 | |||
| 463 | class SourceCollector: | ||
| 464 | # TreeItem = collections.namedtuple("TreeItem", ("path", "mode", "data", "symtarget", "directory", "time")) | ||
| 465 | def __init__(self, root: Path, commit: str, filter: typing.Optional[Callable[[str], bool]], executer: Executer): | ||
| 466 | self.root = root | ||
| 467 | self.commit = commit | ||
| 468 | self.filter = filter | ||
| 469 | self.executer = executer | ||
| 470 | |||
| 471 | def get_archive_file_tree(self) -> ArchiveFileTree: | ||
| 472 | git_archive_args = ["git", "archive", "--format=tar.gz", self.commit, "-o", "/dev/stdout"] | ||
| 473 | logger.info("Executing args=%r", git_archive_args) | ||
| 474 | contents_tgz = subprocess.check_output(git_archive_args, cwd=self.root, text=False) | ||
| 475 | tar_archive = tarfile.open(fileobj=io.BytesIO(contents_tgz), mode="r:gz") | ||
| 476 | filenames = tuple(m.name for m in tar_archive if (m.isfile() or m.issym())) | ||
| 477 | |||
| 478 | file_times = self._get_file_times(paths=filenames) | ||
| 479 | git_contents = ArchiveFileTree() | ||
| 480 | for ti in tar_archive: | ||
| 481 | if self.filter and not self.filter(ti.name): | ||
| 482 | continue | ||
| 483 | data = None | ||
| 484 | symtarget = None | ||
| 485 | directory = False | ||
| 486 | file_time = None | ||
| 487 | if ti.isfile(): | ||
| 488 | contents_file = tar_archive.extractfile(ti.name) | ||
| 489 | data = contents_file.read() | ||
| 490 | file_time = file_times[ti.name] | ||
| 491 | elif ti.issym(): | ||
| 492 | symtarget = ti.linkname | ||
| 493 | file_time = file_times[ti.name] | ||
| 494 | elif ti.isdir(): | ||
| 495 | directory = True | ||
| 496 | else: | ||
| 497 | raise ValueError(f"{ti.name}: unknown type") | ||
| 498 | node = NodeInArchive(arcpath=ti.name, data=data, mode=ti.mode, symtarget=symtarget, time=file_time, directory=directory) | ||
| 499 | git_contents.add_file(node) | ||
| 500 | return git_contents | ||
| 501 | |||
| 502 | def _get_file_times(self, paths: tuple[str, ...]) -> dict[str, datetime.datetime]: | ||
| 503 | dry_out = textwrap.dedent("""\ | ||
| 504 | time=2024-03-14T15:40:25-07:00 | ||
| 505 | |||
| 506 | M\tCMakeLists.txt | ||
| 507 | """) | ||
| 508 | git_log_out = self.executer.check_output(["git", "log", "--name-status", '--pretty=time=%cI', self.commit], dry_out=dry_out, cwd=self.root).splitlines(keepends=False) | ||
| 509 | current_time = None | ||
| 510 | set_paths = set(paths) | ||
| 511 | path_times: dict[str, datetime.datetime] = {} | ||
| 512 | for line in git_log_out: | ||
| 513 | if not line: | ||
| 514 | continue | ||
| 515 | if line.startswith("time="): | ||
| 516 | current_time = safe_isotime_to_datetime(line.removeprefix("time=")) | ||
| 517 | continue | ||
| 518 | mod_type, file_paths = line.split(maxsplit=1) | ||
| 519 | assert current_time is not None | ||
| 520 | for file_path in file_paths.split("\t"): | ||
| 521 | if file_path in set_paths and file_path not in path_times: | ||
| 522 | path_times[file_path] = current_time | ||
| 523 | |||
| 524 | # FIXME: find out why some files are not shown in "git log" | ||
| 525 | # assert set(path_times.keys()) == set_paths | ||
| 526 | if set(path_times.keys()) != set_paths: | ||
| 527 | found_times = set(path_times.keys()) | ||
| 528 | paths_without_times = set_paths.difference(found_times) | ||
| 529 | logger.warning("No times found for these paths: %s", paths_without_times) | ||
| 530 | max_time = max(time for time in path_times.values()) | ||
| 531 | for path in paths_without_times: | ||
| 532 | path_times[path] = max_time | ||
| 533 | |||
| 534 | return path_times | ||
| 535 | |||
| 536 | |||
| 537 | class AndroidApiVersion: | ||
| 538 | def __init__(self, name: str, ints: tuple[int, ...]): | ||
| 539 | self.name = name | ||
| 540 | self.ints = ints | ||
| 541 | |||
| 542 | def __repr__(self) -> str: | ||
| 543 | return f"<{self.name} ({'.'.join(str(v) for v in self.ints)})>" | ||
| 544 | |||
| 545 | |||
| 546 | class Releaser: | ||
| 547 | def __init__(self, release_info: dict, commit: str, revision: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str, deps_path: Path, overwrite: bool, github: bool, fast: bool): | ||
| 548 | self.release_info = release_info | ||
| 549 | self.project = release_info["name"] | ||
| 550 | self.version = self.extract_sdl_version(root=root, release_info=release_info) | ||
| 551 | self.root = root | ||
| 552 | self.commit = commit | ||
| 553 | self.revision = revision | ||
| 554 | self.dist_path = dist_path | ||
| 555 | self.section_printer = section_printer | ||
| 556 | self.executer = executer | ||
| 557 | self.cmake_generator = cmake_generator | ||
| 558 | self.cpu_count = multiprocessing.cpu_count() | ||
| 559 | self.deps_path = deps_path | ||
| 560 | self.overwrite = overwrite | ||
| 561 | self.github = github | ||
| 562 | self.fast = fast | ||
| 563 | self.arc_time = datetime.datetime.now() | ||
| 564 | |||
| 565 | self.artifacts: dict[str, Path] = {} | ||
| 566 | |||
| 567 | def get_context(self, extra_context: typing.Optional[dict[str, str]]=None) -> dict[str, str]: | ||
| 568 | ctx = { | ||
| 569 | "PROJECT_NAME": self.project, | ||
| 570 | "PROJECT_VERSION": self.version, | ||
| 571 | "PROJECT_COMMIT": self.commit, | ||
| 572 | "PROJECT_REVISION": self.revision, | ||
| 573 | "PROJECT_ROOT": str(self.root), | ||
| 574 | } | ||
| 575 | if extra_context: | ||
| 576 | ctx.update(extra_context) | ||
| 577 | return ctx | ||
| 578 | |||
| 579 | @property | ||
| 580 | def dry(self) -> bool: | ||
| 581 | return self.executer.dry | ||
| 582 | |||
| 583 | def prepare(self): | ||
| 584 | logger.debug("Creating dist folder") | ||
| 585 | self.dist_path.mkdir(parents=True, exist_ok=True) | ||
| 586 | |||
| 587 | @classmethod | ||
| 588 | def _path_filter(cls, path: str) -> bool: | ||
| 589 | if ".gitmodules" in path: | ||
| 590 | return True | ||
| 591 | if path.startswith(".git"): | ||
| 592 | return False | ||
| 593 | return True | ||
| 594 | |||
| 595 | @classmethod | ||
| 596 | def _external_repo_path_filter(cls, path: str) -> bool: | ||
| 597 | if not cls._path_filter(path): | ||
| 598 | return False | ||
| 599 | if path.startswith("test/") or path.startswith("tests/"): | ||
| 600 | return False | ||
| 601 | return True | ||
| 602 | |||
| 603 | def create_source_archives(self) -> None: | ||
| 604 | source_collector = SourceCollector(root=self.root, commit=self.commit, executer=self.executer, filter=self._path_filter) | ||
| 605 | print(f"Collecting sources of {self.project}...") | ||
| 606 | archive_tree: ArchiveFileTree = source_collector.get_archive_file_tree() | ||
| 607 | latest_mod_time = archive_tree.get_latest_mod_time() | ||
| 608 | archive_tree.add_file(NodeInArchive.from_text(arcpath=REVISION_TXT, text=f"{self.revision}\n", time=latest_mod_time)) | ||
| 609 | archive_tree.add_file(NodeInArchive.from_text(arcpath=f"{GIT_HASH_FILENAME}", text=f"{self.commit}\n", time=latest_mod_time)) | ||
| 610 | archive_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["source"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=latest_mod_time) | ||
| 611 | |||
| 612 | if "Makefile.am" in archive_tree: | ||
| 613 | patched_time = latest_mod_time + datetime.timedelta(minutes=1) | ||
| 614 | print(f"Makefile.am detected -> touching aclocal.m4, */Makefile.in, configure") | ||
| 615 | for node_data in archive_tree: | ||
| 616 | arc_name = os.path.basename(node_data.arcpath) | ||
| 617 | arc_name_we, arc_name_ext = os.path.splitext(arc_name) | ||
| 618 | if arc_name in ("aclocal.m4", "configure", "Makefile.in"): | ||
| 619 | print(f"Bumping time of {node_data.arcpath}") | ||
| 620 | node_data.time = patched_time | ||
| 621 | |||
| 622 | archive_base = f"{self.project}-{self.version}" | ||
| 623 | zip_path = self.dist_path / f"{archive_base}.zip" | ||
| 624 | tgz_path = self.dist_path / f"{archive_base}.tar.gz" | ||
| 625 | txz_path = self.dist_path / f"{archive_base}.tar.xz" | ||
| 626 | |||
| 627 | logger.info("Creating zip/tgz/txz source archives ...") | ||
| 628 | if self.dry: | ||
| 629 | zip_path.touch() | ||
| 630 | tgz_path.touch() | ||
| 631 | txz_path.touch() | ||
| 632 | else: | ||
| 633 | with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver: | ||
| 634 | print(f"Adding source files of {self.project}...") | ||
| 635 | archive_tree.add_to_archiver(archive_base=archive_base, archiver=archiver) | ||
| 636 | |||
| 637 | for extra_repo in self.release_info["source"].get("extra-repos", []): | ||
| 638 | extra_repo_root = self.root / extra_repo | ||
| 639 | assert (extra_repo_root / ".git").exists(), f"{extra_repo_root} must be a git repo" | ||
| 640 | extra_repo_commit = self.executer.check_output(["git", "rev-parse", "HEAD"], dry_out=f"gitsha-extra-repo-{extra_repo}", cwd=extra_repo_root).strip() | ||
| 641 | extra_repo_source_collector = SourceCollector(root=extra_repo_root, commit=extra_repo_commit, executer=self.executer, filter=self._external_repo_path_filter) | ||
| 642 | print(f"Collecting sources of {extra_repo} ...") | ||
| 643 | extra_repo_archive_tree = extra_repo_source_collector.get_archive_file_tree() | ||
| 644 | print(f"Adding source files of {extra_repo} ...") | ||
| 645 | extra_repo_archive_tree.add_to_archiver(archive_base=f"{archive_base}/{extra_repo}", archiver=archiver) | ||
| 646 | |||
| 647 | for file in self.release_info["source"]["checks"]: | ||
| 648 | assert f"{archive_base}/{file}" in archiver.added_files, f"'{archive_base}/{file}' must exist" | ||
| 649 | |||
| 650 | logger.info("... done") | ||
| 651 | |||
| 652 | self.artifacts["src-zip"] = zip_path | ||
| 653 | self.artifacts["src-tar-gz"] = tgz_path | ||
| 654 | self.artifacts["src-tar-xz"] = txz_path | ||
| 655 | |||
| 656 | if not self.dry: | ||
| 657 | with tgz_path.open("r+b") as f: | ||
| 658 | # Zero the embedded timestamp in the gzip'ed tarball | ||
| 659 | f.seek(4, 0) | ||
| 660 | f.write(b"\x00\x00\x00\x00") | ||
| 661 | |||
| 662 | def create_dmg(self, configuration: str="Release") -> None: | ||
| 663 | dmg_in = self.root / self.release_info["dmg"]["path"] | ||
| 664 | xcode_project = self.root / self.release_info["dmg"]["project"] | ||
| 665 | assert xcode_project.is_dir(), f"{xcode_project} must be a directory" | ||
| 666 | assert (xcode_project / "project.pbxproj").is_file, f"{xcode_project} must contain project.pbxproj" | ||
| 667 | if not self.fast: | ||
| 668 | dmg_in.unlink(missing_ok=True) | ||
| 669 | build_xcconfig = self.release_info["dmg"].get("build-xcconfig") | ||
| 670 | if build_xcconfig: | ||
| 671 | shutil.copy(self.root / build_xcconfig, xcode_project.parent / "build.xcconfig") | ||
| 672 | |||
| 673 | xcode_scheme = self.release_info["dmg"].get("scheme") | ||
| 674 | xcode_target = self.release_info["dmg"].get("target") | ||
| 675 | assert xcode_scheme or xcode_target, "dmg needs scheme or target" | ||
| 676 | assert not (xcode_scheme and xcode_target), "dmg cannot have both scheme and target set" | ||
| 677 | if xcode_scheme: | ||
| 678 | scheme_or_target = "-scheme" | ||
| 679 | target_like = xcode_scheme | ||
| 680 | else: | ||
| 681 | scheme_or_target = "-target" | ||
| 682 | target_like = xcode_target | ||
| 683 | self.executer.run(["xcodebuild", "ONLY_ACTIVE_ARCH=NO", "-project", xcode_project, scheme_or_target, target_like, "-configuration", configuration]) | ||
| 684 | if self.dry: | ||
| 685 | dmg_in.parent.mkdir(parents=True, exist_ok=True) | ||
| 686 | dmg_in.touch() | ||
| 687 | |||
| 688 | assert dmg_in.is_file(), f"{self.project}.dmg was not created by xcodebuild" | ||
| 689 | |||
| 690 | dmg_out = self.dist_path / f"{self.project}-{self.version}.dmg" | ||
| 691 | shutil.copy(dmg_in, dmg_out) | ||
| 692 | self.artifacts["dmg"] = dmg_out | ||
| 693 | |||
| 694 | @property | ||
| 695 | def git_hash_data(self) -> bytes: | ||
| 696 | return f"{self.commit}\n".encode() | ||
| 697 | |||
| 698 | def verify_mingw_library(self, triplet: str, path: Path): | ||
| 699 | objdump_output = self.executer.check_output([f"{triplet}-objdump", "-p", str(path)]) | ||
| 700 | libraries = re.findall(r"DLL Name: ([^\n]+)", objdump_output) | ||
| 701 | logger.info("%s (%s) libraries: %r", path, triplet, libraries) | ||
| 702 | illegal_libraries = list(filter(RE_ILLEGAL_MINGW_LIBRARIES.match, libraries)) | ||
| 703 | logger.error("Detected 'illegal' libraries: %r", illegal_libraries) | ||
| 704 | if illegal_libraries: | ||
| 705 | raise Exception(f"{path} links to illegal libraries: {illegal_libraries}") | ||
| 706 | |||
| 707 | def create_mingw_archives(self) -> None: | ||
| 708 | build_type = "Release" | ||
| 709 | build_parent_dir = self.root / "build-mingw" | ||
| 710 | ARCH_TO_GNU_ARCH = { | ||
| 711 | # "arm64": "aarch64", | ||
| 712 | "x86": "i686", | ||
| 713 | "x64": "x86_64", | ||
| 714 | } | ||
| 715 | ARCH_TO_TRIPLET = { | ||
| 716 | # "arm64": "aarch64-w64-mingw32", | ||
| 717 | "x86": "i686-w64-mingw32", | ||
| 718 | "x64": "x86_64-w64-mingw32", | ||
| 719 | } | ||
| 720 | |||
| 721 | new_env = dict(os.environ) | ||
| 722 | |||
| 723 | cmake_prefix_paths = [] | ||
| 724 | mingw_deps_path = self.deps_path / "mingw-deps" | ||
| 725 | |||
| 726 | if "dependencies" in self.release_info["mingw"]: | ||
| 727 | shutil.rmtree(mingw_deps_path, ignore_errors=True) | ||
| 728 | mingw_deps_path.mkdir() | ||
| 729 | |||
| 730 | for triplet in ARCH_TO_TRIPLET.values(): | ||
| 731 | (mingw_deps_path / triplet).mkdir() | ||
| 732 | |||
| 733 | def extract_filter(member: tarfile.TarInfo, path: str, /): | ||
| 734 | if member.name.startswith("SDL"): | ||
| 735 | member.name = "/".join(Path(member.name).parts[1:]) | ||
| 736 | return member | ||
| 737 | for dep in self.release_info.get("dependencies", {}): | ||
| 738 | extract_path = mingw_deps_path / f"extract-{dep}" | ||
| 739 | extract_path.mkdir() | ||
| 740 | with chdir(extract_path): | ||
| 741 | tar_path = self.deps_path / glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)[0] | ||
| 742 | logger.info("Extracting %s to %s", tar_path, mingw_deps_path) | ||
| 743 | assert tar_path.suffix in (".gz", ".xz") | ||
| 744 | with tarfile.open(tar_path, mode=f"r:{tar_path.suffix.strip('.')}") as tarf: | ||
| 745 | tarf.extractall(filter=extract_filter) | ||
| 746 | for arch, triplet in ARCH_TO_TRIPLET.items(): | ||
| 747 | install_cmd = self.release_info["mingw"]["dependencies"][dep]["install-command"] | ||
| 748 | extra_configure_data = { | ||
| 749 | "ARCH": ARCH_TO_GNU_ARCH[arch], | ||
| 750 | "TRIPLET": triplet, | ||
| 751 | "PREFIX": str(mingw_deps_path / triplet), | ||
| 752 | } | ||
| 753 | install_cmd = configure_text(install_cmd, context=self.get_context(extra_configure_data)) | ||
| 754 | self.executer.run(shlex.split(install_cmd), cwd=str(extract_path)) | ||
| 755 | |||
| 756 | dep_binpath = mingw_deps_path / triplet / "bin" | ||
| 757 | assert dep_binpath.is_dir(), f"{dep_binpath} for PATH should exist" | ||
| 758 | dep_pkgconfig = mingw_deps_path / triplet / "lib/pkgconfig" | ||
| 759 | assert dep_pkgconfig.is_dir(), f"{dep_pkgconfig} for PKG_CONFIG_PATH should exist" | ||
| 760 | |||
| 761 | new_env["PATH"] = os.pathsep.join([str(dep_binpath), new_env["PATH"]]) | ||
| 762 | new_env["PKG_CONFIG_PATH"] = str(dep_pkgconfig) | ||
| 763 | cmake_prefix_paths.append(mingw_deps_path) | ||
| 764 | |||
| 765 | new_env["CFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}" | ||
| 766 | new_env["CXXFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}" | ||
| 767 | |||
| 768 | assert any(system in self.release_info["mingw"] for system in ("autotools", "cmake")) | ||
| 769 | assert not all(system in self.release_info["mingw"] for system in ("autotools", "cmake")) | ||
| 770 | |||
| 771 | mingw_archs = set() | ||
| 772 | arc_root = f"{self.project}-{self.version}" | ||
| 773 | archive_file_tree = ArchiveFileTree() | ||
| 774 | |||
| 775 | if "autotools" in self.release_info["mingw"]: | ||
| 776 | for arch in self.release_info["mingw"]["autotools"]["archs"]: | ||
| 777 | triplet = ARCH_TO_TRIPLET[arch] | ||
| 778 | new_env["CC"] = f"{triplet}-gcc" | ||
| 779 | new_env["CXX"] = f"{triplet}-g++" | ||
| 780 | new_env["RC"] = f"{triplet}-windres" | ||
| 781 | |||
| 782 | assert arch not in mingw_archs | ||
| 783 | mingw_archs.add(arch) | ||
| 784 | |||
| 785 | build_path = build_parent_dir / f"build-{triplet}" | ||
| 786 | install_path = build_parent_dir / f"install-{triplet}" | ||
| 787 | shutil.rmtree(install_path, ignore_errors=True) | ||
| 788 | build_path.mkdir(parents=True, exist_ok=True) | ||
| 789 | context = self.get_context({ | ||
| 790 | "ARCH": arch, | ||
| 791 | "DEP_PREFIX": str(mingw_deps_path / triplet), | ||
| 792 | }) | ||
| 793 | extra_args = configure_text_list(text_list=self.release_info["mingw"]["autotools"]["args"], context=context) | ||
| 794 | |||
| 795 | with self.section_printer.group(f"Configuring MinGW {triplet} (autotools)"): | ||
| 796 | assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})" | ||
| 797 | self.executer.run([ | ||
| 798 | self.root / "configure", | ||
| 799 | f"--prefix={install_path}", | ||
| 800 | f"--includedir=${{prefix}}/include", | ||
| 801 | f"--libdir=${{prefix}}/lib", | ||
| 802 | f"--bindir=${{prefix}}/bin", | ||
| 803 | f"--host={triplet}", | ||
| 804 | f"--build=x86_64-none-linux-gnu", | ||
| 805 | "CFLAGS=-O2", | ||
| 806 | "CXXFLAGS=-O2", | ||
| 807 | "LDFLAGS=-Wl,-s", | ||
| 808 | ] + extra_args, cwd=build_path, env=new_env) | ||
| 809 | with self.section_printer.group(f"Build MinGW {triplet} (autotools)"): | ||
| 810 | self.executer.run(["make", f"-j{self.cpu_count}"], cwd=build_path, env=new_env) | ||
| 811 | with self.section_printer.group(f"Install MinGW {triplet} (autotools)"): | ||
| 812 | self.executer.run(["make", "install"], cwd=build_path, env=new_env) | ||
| 813 | self.verify_mingw_library(triplet=ARCH_TO_TRIPLET[arch], path=install_path / "bin" / f"{self.project}.dll") | ||
| 814 | archive_file_tree.add_directory_tree(arc_dir=arc_join(arc_root, triplet), path=install_path, time=self.arc_time) | ||
| 815 | |||
| 816 | print("Recording arch-dependent extra files for MinGW development archive ...") | ||
| 817 | extra_context = { | ||
| 818 | "TRIPLET": ARCH_TO_TRIPLET[arch], | ||
| 819 | } | ||
| 820 | archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"]["autotools"].get("files", {}), file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time) | ||
| 821 | |||
| 822 | if "cmake" in self.release_info["mingw"]: | ||
| 823 | assert self.release_info["mingw"]["cmake"]["shared-static"] in ("args", "both") | ||
| 824 | for arch in self.release_info["mingw"]["cmake"]["archs"]: | ||
| 825 | triplet = ARCH_TO_TRIPLET[arch] | ||
| 826 | new_env["CC"] = f"{triplet}-gcc" | ||
| 827 | new_env["CXX"] = f"{triplet}-g++" | ||
| 828 | new_env["RC"] = f"{triplet}-windres" | ||
| 829 | |||
| 830 | assert arch not in mingw_archs | ||
| 831 | mingw_archs.add(arch) | ||
| 832 | |||
| 833 | context = self.get_context({ | ||
| 834 | "ARCH": arch, | ||
| 835 | "DEP_PREFIX": str(mingw_deps_path / triplet), | ||
| 836 | }) | ||
| 837 | extra_args = configure_text_list(text_list=self.release_info["mingw"]["cmake"]["args"], context=context) | ||
| 838 | |||
| 839 | build_path = build_parent_dir / f"build-{triplet}" | ||
| 840 | install_path = build_parent_dir / f"install-{triplet}" | ||
| 841 | shutil.rmtree(install_path, ignore_errors=True) | ||
| 842 | build_path.mkdir(parents=True, exist_ok=True) | ||
| 843 | if self.release_info["mingw"]["cmake"]["shared-static"] == "args": | ||
| 844 | args_for_shared_static = ([], ) | ||
| 845 | elif self.release_info["mingw"]["cmake"]["shared-static"] == "both": | ||
| 846 | args_for_shared_static = (["-DBUILD_SHARED_LIBS=ON"], ["-DBUILD_SHARED_LIBS=OFF"]) | ||
| 847 | for arg_for_shared_static in args_for_shared_static: | ||
| 848 | with self.section_printer.group(f"Configuring MinGW {triplet} (CMake)"): | ||
| 849 | assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})" | ||
| 850 | self.executer.run([ | ||
| 851 | f"cmake", | ||
| 852 | f"-S", str(self.root), "-B", str(build_path), | ||
| 853 | f"-DCMAKE_BUILD_TYPE={build_type}", | ||
| 854 | f'''-DCMAKE_C_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''', | ||
| 855 | f'''-DCMAKE_CXX_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''', | ||
| 856 | f"-DCMAKE_PREFIX_PATH={mingw_deps_path / triplet}", | ||
| 857 | f"-DCMAKE_INSTALL_PREFIX={install_path}", | ||
| 858 | f"-DCMAKE_INSTALL_INCLUDEDIR=include", | ||
| 859 | f"-DCMAKE_INSTALL_LIBDIR=lib", | ||
| 860 | f"-DCMAKE_INSTALL_BINDIR=bin", | ||
| 861 | f"-DCMAKE_INSTALL_DATAROOTDIR=share", | ||
| 862 | f"-DCMAKE_TOOLCHAIN_FILE={self.root}/build-scripts/cmake-toolchain-mingw64-{ARCH_TO_GNU_ARCH[arch]}.cmake", | ||
| 863 | f"-G{self.cmake_generator}", | ||
| 864 | ] + extra_args + ([] if self.fast else ["--fresh"]) + arg_for_shared_static, cwd=build_path, env=new_env) | ||
| 865 | with self.section_printer.group(f"Build MinGW {triplet} (CMake)"): | ||
| 866 | self.executer.run(["cmake", "--build", str(build_path), "--verbose", "--config", build_type], cwd=build_path, env=new_env) | ||
| 867 | with self.section_printer.group(f"Install MinGW {triplet} (CMake)"): | ||
| 868 | self.executer.run(["cmake", "--install", str(build_path)], cwd=build_path, env=new_env) | ||
| 869 | self.verify_mingw_library(triplet=ARCH_TO_TRIPLET[arch], path=install_path / "bin" / f"{self.project}.dll") | ||
| 870 | archive_file_tree.add_directory_tree(arc_dir=arc_join(arc_root, triplet), path=install_path, time=self.arc_time) | ||
| 871 | |||
| 872 | print("Recording arch-dependent extra files for MinGW development archive ...") | ||
| 873 | extra_context = { | ||
| 874 | "TRIPLET": ARCH_TO_TRIPLET[arch], | ||
| 875 | } | ||
| 876 | archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"]["cmake"].get("files", {}), file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time) | ||
| 877 | print("... done") | ||
| 878 | |||
| 879 | print("Recording extra files for MinGW development archive ...") | ||
| 880 | archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=self.arc_time) | ||
| 881 | print("... done") | ||
| 882 | |||
| 883 | print("Creating zip/tgz/txz development archives ...") | ||
| 884 | zip_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.zip" | ||
| 885 | tgz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.gz" | ||
| 886 | txz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.xz" | ||
| 887 | |||
| 888 | with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver: | ||
| 889 | archive_file_tree.add_to_archiver(archive_base="", archiver=archiver) | ||
| 890 | archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time) | ||
| 891 | print("... done") | ||
| 892 | |||
| 893 | self.artifacts["mingw-devel-zip"] = zip_path | ||
| 894 | self.artifacts["mingw-devel-tar-gz"] = tgz_path | ||
| 895 | self.artifacts["mingw-devel-tar-xz"] = txz_path | ||
| 896 | |||
| 897 | def _detect_android_api(self, android_home: str) -> typing.Optional[AndroidApiVersion]: | ||
| 898 | platform_dirs = list(Path(p) for p in glob.glob(f"{android_home}/platforms/android-*")) | ||
| 899 | re_platform = re.compile("^android-([0-9]+)(?:-ext([0-9]+))?$") | ||
| 900 | platform_versions: list[AndroidApiVersion] = [] | ||
| 901 | for platform_dir in platform_dirs: | ||
| 902 | logger.debug("Found Android Platform SDK: %s", platform_dir) | ||
| 903 | if not (platform_dir / "android.jar").is_file(): | ||
| 904 | logger.debug("Skipping SDK, missing android.jar") | ||
| 905 | continue | ||
| 906 | if m:= re_platform.match(platform_dir.name): | ||
| 907 | platform_versions.append(AndroidApiVersion(name=platform_dir.name, ints=(int(m.group(1)), int(m.group(2) or 0)))) | ||
| 908 | platform_versions.sort(key=lambda v: v.ints) | ||
| 909 | logger.info("Available platform versions: %s", platform_versions) | ||
| 910 | platform_versions = list(filter(lambda v: v.ints >= self._android_api_minimum.ints, platform_versions)) | ||
| 911 | logger.info("Valid platform versions (>=%s): %s", self._android_api_minimum.ints, platform_versions) | ||
| 912 | if not platform_versions: | ||
| 913 | return None | ||
| 914 | android_api = platform_versions[0] | ||
| 915 | logger.info("Selected API version %s", android_api) | ||
| 916 | return android_api | ||
| 917 | |||
| 918 | def _get_prefab_json_text(self) -> str: | ||
| 919 | return textwrap.dedent(f"""\ | ||
| 920 | {{ | ||
| 921 | "schema_version": 2, | ||
| 922 | "name": "{self.project}", | ||
| 923 | "version": "{self.version}", | ||
| 924 | "dependencies": [] | ||
| 925 | }} | ||
| 926 | """) | ||
| 927 | |||
| 928 | def _get_prefab_module_json_text(self, library_name: typing.Optional[str], export_libraries: list[str]) -> str: | ||
| 929 | for lib in export_libraries: | ||
| 930 | assert isinstance(lib, str), f"{lib} must be a string" | ||
| 931 | module_json_dict = { | ||
| 932 | "export_libraries": export_libraries, | ||
| 933 | } | ||
| 934 | if library_name: | ||
| 935 | module_json_dict["library_name"] = f"lib{library_name}" | ||
| 936 | return json.dumps(module_json_dict, indent=4) | ||
| 937 | |||
| 938 | @property | ||
| 939 | def _android_api_minimum(self) -> AndroidApiVersion: | ||
| 940 | value = self.release_info["android"]["api-minimum"] | ||
| 941 | if isinstance(value, int): | ||
| 942 | ints = (value, ) | ||
| 943 | elif isinstance(value, str): | ||
| 944 | ints = tuple(split(".")) | ||
| 945 | else: | ||
| 946 | raise ValueError("Invalid android.api-minimum: must be X or X.Y") | ||
| 947 | match len(ints): | ||
| 948 | case 1: name = f"android-{ints[0]}" | ||
| 949 | case 2: name = f"android-{ints[0]}-ext-{ints[1]}" | ||
| 950 | case _: raise ValueError("Invalid android.api-minimum: must be X or X.Y") | ||
| 951 | return AndroidApiVersion(name=name, ints=ints) | ||
| 952 | |||
| 953 | @property | ||
| 954 | def _android_api_target(self): | ||
| 955 | return self.release_info["android"]["api-target"] | ||
| 956 | |||
| 957 | @property | ||
| 958 | def _android_ndk_minimum(self): | ||
| 959 | return self.release_info["android"]["ndk-minimum"] | ||
| 960 | |||
| 961 | def _get_prefab_abi_json_text(self, abi: str, cpp: bool, shared: bool) -> str: | ||
| 962 | abi_json_dict = { | ||
| 963 | "abi": abi, | ||
| 964 | "api": self._android_api_minimum.ints[0], | ||
| 965 | "ndk": self._android_ndk_minimum, | ||
| 966 | "stl": "c++_shared" if cpp else "none", | ||
| 967 | "static": not shared, | ||
| 968 | } | ||
| 969 | return json.dumps(abi_json_dict, indent=4) | ||
| 970 | |||
| 971 | def _get_android_manifest_text(self) -> str: | ||
| 972 | return textwrap.dedent(f"""\ | ||
| 973 | <manifest | ||
| 974 | xmlns:android="http://schemas.android.com/apk/res/android" | ||
| 975 | package="org.libsdl.android.{self.project}" android:versionCode="1" | ||
| 976 | android:versionName="1.0"> | ||
| 977 | <uses-sdk android:minSdkVersion="{self._android_api_minimum.ints[0]}" | ||
| 978 | android:targetSdkVersion="{self._android_api_target}" /> | ||
| 979 | </manifest> | ||
| 980 | """) | ||
| 981 | |||
| 982 | def create_android_archives(self, android_api: int, android_home: Path, android_ndk_home: Path) -> None: | ||
| 983 | cmake_toolchain_file = Path(android_ndk_home) / "build/cmake/android.toolchain.cmake" | ||
| 984 | if not cmake_toolchain_file.exists(): | ||
| 985 | logger.error("CMake toolchain file does not exist (%s)", cmake_toolchain_file) | ||
| 986 | raise SystemExit(1) | ||
| 987 | aar_path = self.root / "build-android" / f"{self.project}-{self.version}.aar" | ||
| 988 | android_dist_path = self.dist_path / f"{self.project}-devel-{self.version}-android.zip" | ||
| 989 | android_abis = self.release_info["android"]["abis"] | ||
| 990 | java_jars_added = False | ||
| 991 | module_data_added = False | ||
| 992 | android_deps_path = self.deps_path / "android-deps" | ||
| 993 | shutil.rmtree(android_deps_path, ignore_errors=True) | ||
| 994 | |||
| 995 | for dep, depinfo in self.release_info["android"].get("dependencies", {}).items(): | ||
| 996 | dep_devel_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0] | ||
| 997 | |||
| 998 | dep_extract_path = self.deps_path / f"extract/android/{dep}" | ||
| 999 | shutil.rmtree(dep_extract_path, ignore_errors=True) | ||
| 1000 | dep_extract_path.mkdir(parents=True, exist_ok=True) | ||
| 1001 | |||
| 1002 | with self.section_printer.group(f"Extracting Android dependency {dep} ({dep_devel_zip})"): | ||
| 1003 | with zipfile.ZipFile(dep_devel_zip, "r") as zf: | ||
| 1004 | zf.extractall(dep_extract_path) | ||
| 1005 | |||
| 1006 | dep_devel_aar = dep_extract_path / glob.glob("*.aar", root_dir=dep_extract_path)[0] | ||
| 1007 | self.executer.run([sys.executable, str(dep_devel_aar), "-o", str(android_deps_path)]) | ||
| 1008 | |||
| 1009 | for module_name, module_info in self.release_info["android"]["modules"].items(): | ||
| 1010 | assert "type" in module_info and module_info["type"] in ("interface", "library"), f"module {module_name} must have a valid type" | ||
| 1011 | |||
| 1012 | aar_file_tree = ArchiveFileTree() | ||
| 1013 | android_devel_file_tree = ArchiveFileTree() | ||
| 1014 | |||
| 1015 | for android_abi in android_abis: | ||
| 1016 | with self.section_printer.group(f"Building for Android {android_api} {android_abi}"): | ||
| 1017 | build_dir = self.root / "build-android" / f"{android_abi}-build" | ||
| 1018 | install_dir = self.root / "install-android" / f"{android_abi}-install" | ||
| 1019 | shutil.rmtree(install_dir, ignore_errors=True) | ||
| 1020 | assert not install_dir.is_dir(), f"{install_dir} should not exist prior to build" | ||
| 1021 | build_type = "Release" | ||
| 1022 | cmake_args = [ | ||
| 1023 | "cmake", | ||
| 1024 | "-S", str(self.root), | ||
| 1025 | "-B", str(build_dir), | ||
| 1026 | f'''-DCMAKE_C_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''', | ||
| 1027 | f'''-DCMAKE_CXX_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''', | ||
| 1028 | f"-DCMAKE_TOOLCHAIN_FILE={cmake_toolchain_file}", | ||
| 1029 | f"-DCMAKE_PREFIX_PATH={str(android_deps_path)}", | ||
| 1030 | f"-DCMAKE_FIND_ROOT_PATH_MODE_PACKAGE=BOTH", | ||
| 1031 | f"-DANDROID_HOME={android_home}", | ||
| 1032 | f"-DANDROID_PLATFORM={android_api}", | ||
| 1033 | f"-DANDROID_ABI={android_abi}", | ||
| 1034 | "-DCMAKE_POSITION_INDEPENDENT_CODE=ON", | ||
| 1035 | f"-DCMAKE_INSTALL_PREFIX={install_dir}", | ||
| 1036 | "-DCMAKE_INSTALL_INCLUDEDIR=include ", | ||
| 1037 | "-DCMAKE_INSTALL_LIBDIR=lib", | ||
| 1038 | "-DCMAKE_INSTALL_DATAROOTDIR=share", | ||
| 1039 | f"-DCMAKE_BUILD_TYPE={build_type}", | ||
| 1040 | f"-G{self.cmake_generator}", | ||
| 1041 | ] + self.release_info["android"]["cmake"]["args"] + ([] if self.fast else ["--fresh"]) | ||
| 1042 | build_args = [ | ||
| 1043 | "cmake", | ||
| 1044 | "--build", str(build_dir), | ||
| 1045 | "--verbose", | ||
| 1046 | "--config", build_type, | ||
| 1047 | ] | ||
| 1048 | install_args = [ | ||
| 1049 | "cmake", | ||
| 1050 | "--install", str(build_dir), | ||
| 1051 | "--config", build_type, | ||
| 1052 | ] | ||
| 1053 | self.executer.run(cmake_args) | ||
| 1054 | self.executer.run(build_args) | ||
| 1055 | self.executer.run(install_args) | ||
| 1056 | |||
| 1057 | for module_name, module_info in self.release_info["android"]["modules"].items(): | ||
| 1058 | arcdir_prefab_module = f"prefab/modules/{module_name}" | ||
| 1059 | if module_info["type"] == "library": | ||
| 1060 | library = install_dir / module_info["library"] | ||
| 1061 | assert library.suffix in (".so", ".a") | ||
| 1062 | assert library.is_file(), f"CMake should have built library '{library}' for module {module_name}" | ||
| 1063 | arcdir_prefab_libs = f"{arcdir_prefab_module}/libs/android.{android_abi}" | ||
| 1064 | aar_file_tree.add_file(NodeInArchive.from_fs(arcpath=f"{arcdir_prefab_libs}/{library.name}", path=library, time=self.arc_time)) | ||
| 1065 | aar_file_tree.add_file(NodeInArchive.from_text(arcpath=f"{arcdir_prefab_libs}/abi.json", text=self._get_prefab_abi_json_text(abi=android_abi, cpp=False, shared=library.suffix == ".so"), time=self.arc_time)) | ||
| 1066 | |||
| 1067 | if not module_data_added: | ||
| 1068 | library_name = None | ||
| 1069 | if module_info["type"] == "library": | ||
| 1070 | library_name = Path(module_info["library"]).stem.removeprefix("lib") | ||
| 1071 | export_libraries = module_info.get("export-libraries", []) | ||
| 1072 | aar_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_module, "module.json"), text=self._get_prefab_module_json_text(library_name=library_name, export_libraries=export_libraries), time=self.arc_time)) | ||
| 1073 | arcdir_prefab_include = f"prefab/modules/{module_name}/include" | ||
| 1074 | if "includes" in module_info: | ||
| 1075 | aar_file_tree.add_file_mapping(arc_dir=arcdir_prefab_include, file_mapping=module_info["includes"], file_mapping_root=install_dir, context=self.get_context(), time=self.arc_time) | ||
| 1076 | else: | ||
| 1077 | aar_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_include, ".keep"), text="\n", time=self.arc_time)) | ||
| 1078 | module_data_added = True | ||
| 1079 | |||
| 1080 | if not java_jars_added: | ||
| 1081 | java_jars_added = True | ||
| 1082 | if "jars" in self.release_info["android"]: | ||
| 1083 | classes_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["classes"], context=self.get_context()) | ||
| 1084 | sources_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["sources"], context=self.get_context()) | ||
| 1085 | doc_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["doc"], context=self.get_context()) | ||
| 1086 | assert classes_jar_path.is_file(), f"CMake should have compiled the java sources and archived them into a JAR ({classes_jar_path})" | ||
| 1087 | assert sources_jar_path.is_file(), f"CMake should have archived the java sources into a JAR ({sources_jar_path})" | ||
| 1088 | assert doc_jar_path.is_file(), f"CMake should have archived javadoc into a JAR ({doc_jar_path})" | ||
| 1089 | |||
| 1090 | aar_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes.jar", path=classes_jar_path, time=self.arc_time)) | ||
| 1091 | aar_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-sources.jar", path=sources_jar_path, time=self.arc_time)) | ||
| 1092 | aar_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-doc.jar", path=doc_jar_path, time=self.arc_time)) | ||
| 1093 | |||
| 1094 | assert ("jars" in self.release_info["android"] and java_jars_added) or "jars" not in self.release_info["android"], "Must have archived java JAR archives" | ||
| 1095 | |||
| 1096 | aar_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["android"]["aar-files"], file_mapping_root=self.root, context=self.get_context(), time=self.arc_time) | ||
| 1097 | |||
| 1098 | aar_file_tree.add_file(NodeInArchive.from_text(arcpath="prefab/prefab.json", text=self._get_prefab_json_text(), time=self.arc_time)) | ||
| 1099 | aar_file_tree.add_file(NodeInArchive.from_text(arcpath="AndroidManifest.xml", text=self._get_android_manifest_text(), time=self.arc_time)) | ||
| 1100 | |||
| 1101 | with Archiver(zip_path=aar_path) as archiver: | ||
| 1102 | aar_file_tree.add_to_archiver(archive_base="", archiver=archiver) | ||
| 1103 | archiver.add_git_hash(arcdir="", commit=self.commit, time=self.arc_time) | ||
| 1104 | |||
| 1105 | android_devel_file_tree.add_file(NodeInArchive.from_fs(arcpath=aar_path.name, path=aar_path)) | ||
| 1106 | android_devel_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["android"]["files"], file_mapping_root=self.root, context=self.get_context(), time=self.arc_time) | ||
| 1107 | with Archiver(zip_path=android_dist_path) as archiver: | ||
| 1108 | android_devel_file_tree.add_to_archiver(archive_base="", archiver=archiver) | ||
| 1109 | archiver.add_git_hash(arcdir="", commit=self.commit, time=self.arc_time) | ||
| 1110 | |||
| 1111 | self.artifacts[f"android-aar"] = android_dist_path | ||
| 1112 | |||
| 1113 | def download_dependencies(self): | ||
| 1114 | shutil.rmtree(self.deps_path, ignore_errors=True) | ||
| 1115 | self.deps_path.mkdir(parents=True) | ||
| 1116 | |||
| 1117 | if self.github: | ||
| 1118 | with open(os.environ["GITHUB_OUTPUT"], "a") as f: | ||
| 1119 | f.write(f"dep-path={self.deps_path.absolute()}\n") | ||
| 1120 | |||
| 1121 | for dep, depinfo in self.release_info.get("dependencies", {}).items(): | ||
| 1122 | startswith = depinfo["startswith"] | ||
| 1123 | dep_repo = depinfo["repo"] | ||
| 1124 | # FIXME: dropped "--exclude-pre-releases" | ||
| 1125 | dep_string_data = self.executer.check_output(["gh", "-R", dep_repo, "release", "list", "--exclude-drafts", "--json", "name,createdAt,tagName", "--jq", f'[.[]|select(.name|startswith("{startswith}"))]|max_by(.createdAt)']).strip() | ||
| 1126 | dep_data = json.loads(dep_string_data) | ||
| 1127 | dep_tag = dep_data["tagName"] | ||
| 1128 | dep_version = dep_data["name"] | ||
| 1129 | logger.info("Download dependency %s version %s (tag=%s) ", dep, dep_version, dep_tag) | ||
| 1130 | self.executer.run(["gh", "-R", dep_repo, "release", "download", dep_tag], cwd=self.deps_path) | ||
| 1131 | if self.github: | ||
| 1132 | with open(os.environ["GITHUB_OUTPUT"], "a") as f: | ||
| 1133 | f.write(f"dep-{dep.lower()}-version={dep_version}\n") | ||
| 1134 | |||
| 1135 | def verify_dependencies(self): | ||
| 1136 | for dep, depinfo in self.release_info.get("dependencies", {}).items(): | ||
| 1137 | if "mingw" in self.release_info: | ||
| 1138 | mingw_matches = glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path) | ||
| 1139 | assert len(mingw_matches) == 1, f"Exactly one archive matches mingw {dep} dependency: {mingw_matches}" | ||
| 1140 | if "dmg" in self.release_info: | ||
| 1141 | dmg_matches = glob.glob(self.release_info["dmg"]["dependencies"][dep]["artifact"], root_dir=self.deps_path) | ||
| 1142 | assert len(dmg_matches) == 1, f"Exactly one archive matches dmg {dep} dependency: {dmg_matches}" | ||
| 1143 | if "msvc" in self.release_info: | ||
| 1144 | msvc_matches = glob.glob(self.release_info["msvc"]["dependencies"][dep]["artifact"], root_dir=self.deps_path) | ||
| 1145 | assert len(msvc_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {msvc_matches}" | ||
| 1146 | if "android" in self.release_info: | ||
| 1147 | android_matches = glob.glob(self.release_info["android"]["dependencies"][dep]["artifact"], root_dir=self.deps_path) | ||
| 1148 | assert len(android_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {android_matches}" | ||
| 1149 | |||
| 1150 | @staticmethod | ||
| 1151 | def _arch_to_vs_platform(arch: str, configuration: str="Release") -> VsArchPlatformConfig: | ||
| 1152 | ARCH_TO_VS_PLATFORM = { | ||
| 1153 | "x86": VsArchPlatformConfig(arch="x86", platform="Win32", configuration=configuration), | ||
| 1154 | "x64": VsArchPlatformConfig(arch="x64", platform="x64", configuration=configuration), | ||
| 1155 | "arm64": VsArchPlatformConfig(arch="arm64", platform="ARM64", configuration=configuration), | ||
| 1156 | } | ||
| 1157 | return ARCH_TO_VS_PLATFORM[arch] | ||
| 1158 | |||
| 1159 | def build_msvc(self): | ||
| 1160 | with self.section_printer.group("Find Visual Studio"): | ||
| 1161 | vs = VisualStudio(executer=self.executer) | ||
| 1162 | for arch in self.release_info["msvc"].get("msbuild", {}).get("archs", []): | ||
| 1163 | self._build_msvc_msbuild(arch_platform=self._arch_to_vs_platform(arch=arch), vs=vs) | ||
| 1164 | if "cmake" in self.release_info["msvc"]: | ||
| 1165 | deps_path = self.root / "msvc-deps" | ||
| 1166 | shutil.rmtree(deps_path, ignore_errors=True) | ||
| 1167 | dep_roots = [] | ||
| 1168 | for dep, depinfo in self.release_info["msvc"].get("dependencies", {}).items(): | ||
| 1169 | dep_extract_path = deps_path / f"extract-{dep}" | ||
| 1170 | msvc_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0] | ||
| 1171 | with zipfile.ZipFile(msvc_zip, "r") as zf: | ||
| 1172 | zf.extractall(dep_extract_path) | ||
| 1173 | contents_msvc_zip = glob.glob(str(dep_extract_path / "*")) | ||
| 1174 | assert len(contents_msvc_zip) == 1, f"There must be exactly one root item in the root directory of {dep}" | ||
| 1175 | dep_roots.append(contents_msvc_zip[0]) | ||
| 1176 | |||
| 1177 | for arch in self.release_info["msvc"].get("cmake", {}).get("archs", []): | ||
| 1178 | self._build_msvc_cmake(arch_platform=self._arch_to_vs_platform(arch=arch), dep_roots=dep_roots) | ||
| 1179 | with self.section_printer.group("Create SDL VC development zip"): | ||
| 1180 | self._build_msvc_devel() | ||
| 1181 | |||
| 1182 | def _build_msvc_msbuild(self, arch_platform: VsArchPlatformConfig, vs: VisualStudio): | ||
| 1183 | platform_context = self.get_context(arch_platform.extra_context()) | ||
| 1184 | for dep, depinfo in self.release_info["msvc"].get("dependencies", {}).items(): | ||
| 1185 | msvc_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0] | ||
| 1186 | |||
| 1187 | src_globs = [configure_text(instr["src"], context=platform_context) for instr in depinfo["copy"]] | ||
| 1188 | with zipfile.ZipFile(msvc_zip, "r") as zf: | ||
| 1189 | for member in zf.namelist(): | ||
| 1190 | member_path = "/".join(Path(member).parts[1:]) | ||
| 1191 | for src_i, src_glob in enumerate(src_globs): | ||
| 1192 | if fnmatch.fnmatch(member_path, src_glob): | ||
| 1193 | dst = (self.root / configure_text(depinfo["copy"][src_i]["dst"], context=platform_context)).resolve() / Path(member_path).name | ||
| 1194 | zip_data = zf.read(member) | ||
| 1195 | if dst.exists(): | ||
| 1196 | identical = False | ||
| 1197 | if dst.is_file(): | ||
| 1198 | orig_bytes = dst.read_bytes() | ||
| 1199 | if orig_bytes == zip_data: | ||
| 1200 | identical = True | ||
| 1201 | if not identical: | ||
| 1202 | logger.warning("Extracting dependency %s, will cause %s to be overwritten", dep, dst) | ||
| 1203 | if not self.overwrite: | ||
| 1204 | raise RuntimeError("Run with --overwrite to allow overwriting") | ||
| 1205 | logger.debug("Extracting %s -> %s", member, dst) | ||
| 1206 | |||
| 1207 | dst.parent.mkdir(exist_ok=True, parents=True) | ||
| 1208 | dst.write_bytes(zip_data) | ||
| 1209 | |||
| 1210 | prebuilt_paths = set(self.root / full_prebuilt_path for prebuilt_path in self.release_info["msvc"]["msbuild"].get("prebuilt", []) for full_prebuilt_path in glob.glob(configure_text(prebuilt_path, context=platform_context), root_dir=self.root)) | ||
| 1211 | msbuild_paths = set(self.root / configure_text(f, context=platform_context) for file_mapping in (self.release_info["msvc"]["msbuild"]["files-lib"], self.release_info["msvc"]["msbuild"]["files-devel"]) for files_list in file_mapping.values() for f in files_list) | ||
| 1212 | assert prebuilt_paths.issubset(msbuild_paths), f"msvc.msbuild.prebuilt must be a subset of (msvc.msbuild.files-lib, msvc.msbuild.files-devel)" | ||
| 1213 | built_paths = msbuild_paths.difference(prebuilt_paths) | ||
| 1214 | logger.info("MSbuild builds these files, to be included in the package: %s", built_paths) | ||
| 1215 | if not self.fast: | ||
| 1216 | for b in built_paths: | ||
| 1217 | b.unlink(missing_ok=True) | ||
| 1218 | |||
| 1219 | rel_projects: list[str] = self.release_info["msvc"]["msbuild"]["projects"] | ||
| 1220 | projects = list(self.root / p for p in rel_projects) | ||
| 1221 | |||
| 1222 | directory_build_props_src_relpath = self.release_info["msvc"]["msbuild"].get("directory-build-props") | ||
| 1223 | for project in projects: | ||
| 1224 | dir_b_props = project.parent / "Directory.Build.props" | ||
| 1225 | dir_b_props.unlink(missing_ok = True) | ||
| 1226 | if directory_build_props_src_relpath: | ||
| 1227 | src = self.root / directory_build_props_src_relpath | ||
| 1228 | logger.debug("Copying %s -> %s", src, dir_b_props) | ||
| 1229 | shutil.copy(src=src, dst=dir_b_props) | ||
| 1230 | |||
| 1231 | with self.section_printer.group(f"Build {arch_platform.arch} VS binary"): | ||
| 1232 | vs.build(arch_platform=arch_platform, projects=projects) | ||
| 1233 | |||
| 1234 | if self.dry: | ||
| 1235 | for b in built_paths: | ||
| 1236 | b.parent.mkdir(parents=True, exist_ok=True) | ||
| 1237 | b.touch() | ||
| 1238 | |||
| 1239 | for b in built_paths: | ||
| 1240 | assert b.is_file(), f"{b} has not been created" | ||
| 1241 | b.parent.mkdir(parents=True, exist_ok=True) | ||
| 1242 | b.touch() | ||
| 1243 | |||
| 1244 | zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch_platform.arch}.zip" | ||
| 1245 | zip_path.unlink(missing_ok=True) | ||
| 1246 | |||
| 1247 | logger.info("Collecting files...") | ||
| 1248 | archive_file_tree = ArchiveFileTree() | ||
| 1249 | archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["msbuild"]["files-lib"], file_mapping_root=self.root, context=platform_context, time=self.arc_time) | ||
| 1250 | archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["files-lib"], file_mapping_root=self.root, context=platform_context, time=self.arc_time) | ||
| 1251 | |||
| 1252 | logger.info("Writing to %s", zip_path) | ||
| 1253 | with Archiver(zip_path=zip_path) as archiver: | ||
| 1254 | arc_root = f"" | ||
| 1255 | archive_file_tree.add_to_archiver(archive_base=arc_root, archiver=archiver) | ||
| 1256 | archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time) | ||
| 1257 | self.artifacts[f"VC-{arch_platform.arch}"] = zip_path | ||
| 1258 | |||
| 1259 | for p in built_paths: | ||
| 1260 | assert p.is_file(), f"{p} should exist" | ||
| 1261 | |||
| 1262 | def _arch_platform_to_build_path(self, arch_platform: VsArchPlatformConfig) -> Path: | ||
| 1263 | return self.root / f"build-vs-{arch_platform.arch}" | ||
| 1264 | |||
| 1265 | def _arch_platform_to_install_path(self, arch_platform: VsArchPlatformConfig) -> Path: | ||
| 1266 | return self._arch_platform_to_build_path(arch_platform) / "prefix" | ||
| 1267 | |||
| 1268 | def _build_msvc_cmake(self, arch_platform: VsArchPlatformConfig, dep_roots: list[Path]): | ||
| 1269 | build_path = self._arch_platform_to_build_path(arch_platform) | ||
| 1270 | install_path = self._arch_platform_to_install_path(arch_platform) | ||
| 1271 | platform_context = self.get_context(extra_context=arch_platform.extra_context()) | ||
| 1272 | |||
| 1273 | build_type = "Release" | ||
| 1274 | extra_context = { | ||
| 1275 | "ARCH": arch_platform.arch, | ||
| 1276 | "PLATFORM": arch_platform.platform, | ||
| 1277 | } | ||
| 1278 | |||
| 1279 | built_paths = set(install_path / configure_text(f, context=platform_context) for file_mapping in (self.release_info["msvc"]["cmake"]["files-lib"], self.release_info["msvc"]["cmake"]["files-devel"]) for files_list in file_mapping.values() for f in files_list) | ||
| 1280 | logger.info("CMake builds these files, to be included in the package: %s", built_paths) | ||
| 1281 | if not self.fast: | ||
| 1282 | for b in built_paths: | ||
| 1283 | b.unlink(missing_ok=True) | ||
| 1284 | |||
| 1285 | shutil.rmtree(install_path, ignore_errors=True) | ||
| 1286 | build_path.mkdir(parents=True, exist_ok=True) | ||
| 1287 | with self.section_printer.group(f"Configure VC CMake project for {arch_platform.arch}"): | ||
| 1288 | self.executer.run([ | ||
| 1289 | "cmake", "-S", str(self.root), "-B", str(build_path), | ||
| 1290 | "-A", arch_platform.platform, | ||
| 1291 | "-DCMAKE_INSTALL_BINDIR=bin", | ||
| 1292 | "-DCMAKE_INSTALL_DATAROOTDIR=share", | ||
| 1293 | "-DCMAKE_INSTALL_INCLUDEDIR=include", | ||
| 1294 | "-DCMAKE_INSTALL_LIBDIR=lib", | ||
| 1295 | f"-DCMAKE_BUILD_TYPE={build_type}", | ||
| 1296 | f"-DCMAKE_INSTALL_PREFIX={install_path}", | ||
| 1297 | # MSVC debug information format flags are selected by an abstraction | ||
| 1298 | "-DCMAKE_POLICY_DEFAULT_CMP0141=NEW", | ||
| 1299 | # MSVC debug information format | ||
| 1300 | "-DCMAKE_MSVC_DEBUG_INFORMATION_FORMAT=ProgramDatabase", | ||
| 1301 | # Linker flags for executables | ||
| 1302 | "-DCMAKE_EXE_LINKER_FLAGS=-INCREMENTAL:NO -DEBUG -OPT:REF -OPT:ICF", | ||
| 1303 | # Linker flag for shared libraries | ||
| 1304 | "-DCMAKE_SHARED_LINKER_FLAGS=-INCREMENTAL:NO -DEBUG -OPT:REF -OPT:ICF", | ||
| 1305 | # MSVC runtime library flags are selected by an abstraction | ||
| 1306 | "-DCMAKE_POLICY_DEFAULT_CMP0091=NEW", | ||
| 1307 | # Use statically linked runtime (-MT) (ideally, should be "MultiThreaded$<$<CONFIG:Debug>:Debug>") | ||
| 1308 | "-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded", | ||
| 1309 | f"-DCMAKE_PREFIX_PATH={';'.join(str(s) for s in dep_roots)}", | ||
| 1310 | ] + self.release_info["msvc"]["cmake"]["args"] + ([] if self.fast else ["--fresh"])) | ||
| 1311 | |||
| 1312 | with self.section_printer.group(f"Build VC CMake project for {arch_platform.arch}"): | ||
| 1313 | self.executer.run(["cmake", "--build", str(build_path), "--verbose", "--config", build_type]) | ||
| 1314 | with self.section_printer.group(f"Install VC CMake project for {arch_platform.arch}"): | ||
| 1315 | self.executer.run(["cmake", "--install", str(build_path), "--config", build_type]) | ||
| 1316 | |||
| 1317 | if self.dry: | ||
| 1318 | for b in built_paths: | ||
| 1319 | b.parent.mkdir(parents=True, exist_ok=True) | ||
| 1320 | b.touch() | ||
| 1321 | |||
| 1322 | zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch_platform.arch}.zip" | ||
| 1323 | zip_path.unlink(missing_ok=True) | ||
| 1324 | |||
| 1325 | logger.info("Collecting files...") | ||
| 1326 | archive_file_tree = ArchiveFileTree() | ||
| 1327 | archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["cmake"]["files-lib"], file_mapping_root=install_path, context=platform_context, time=self.arc_time) | ||
| 1328 | archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["files-lib"], file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time) | ||
| 1329 | |||
| 1330 | logger.info("Creating %s", zip_path) | ||
| 1331 | with Archiver(zip_path=zip_path) as archiver: | ||
| 1332 | arc_root = f"" | ||
| 1333 | archive_file_tree.add_to_archiver(archive_base=arc_root, archiver=archiver) | ||
| 1334 | archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time) | ||
| 1335 | |||
| 1336 | for p in built_paths: | ||
| 1337 | assert p.is_file(), f"{p} should exist" | ||
| 1338 | |||
| 1339 | def _build_msvc_devel(self) -> None: | ||
| 1340 | zip_path = self.dist_path / f"{self.project}-devel-{self.version}-VC.zip" | ||
| 1341 | arc_root = f"{self.project}-{self.version}" | ||
| 1342 | |||
| 1343 | def copy_files_devel(ctx): | ||
| 1344 | archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["files-devel"], file_mapping_root=self.root, context=ctx, time=self.arc_time) | ||
| 1345 | |||
| 1346 | |||
| 1347 | logger.info("Collecting files...") | ||
| 1348 | archive_file_tree = ArchiveFileTree() | ||
| 1349 | if "msbuild" in self.release_info["msvc"]: | ||
| 1350 | for arch in self.release_info["msvc"]["msbuild"]["archs"]: | ||
| 1351 | arch_platform = self._arch_to_vs_platform(arch=arch) | ||
| 1352 | platform_context = self.get_context(arch_platform.extra_context()) | ||
| 1353 | archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["msbuild"]["files-devel"], file_mapping_root=self.root, context=platform_context, time=self.arc_time) | ||
| 1354 | copy_files_devel(ctx=platform_context) | ||
| 1355 | if "cmake" in self.release_info["msvc"]: | ||
| 1356 | for arch in self.release_info["msvc"]["cmake"]["archs"]: | ||
| 1357 | arch_platform = self._arch_to_vs_platform(arch=arch) | ||
| 1358 | platform_context = self.get_context(arch_platform.extra_context()) | ||
| 1359 | archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["cmake"]["files-devel"], file_mapping_root=self._arch_platform_to_install_path(arch_platform), context=platform_context, time=self.arc_time) | ||
| 1360 | copy_files_devel(ctx=platform_context) | ||
| 1361 | |||
| 1362 | with Archiver(zip_path=zip_path) as archiver: | ||
| 1363 | archive_file_tree.add_to_archiver(archive_base="", archiver=archiver) | ||
| 1364 | archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time) | ||
| 1365 | self.artifacts["VC-devel"] = zip_path | ||
| 1366 | |||
| 1367 | @classmethod | ||
| 1368 | def extract_sdl_version(cls, root: Path, release_info: dict) -> str: | ||
| 1369 | with open(root / release_info["version"]["file"], "r") as f: | ||
| 1370 | text = f.read() | ||
| 1371 | major = next(re.finditer(release_info["version"]["re_major"], text, flags=re.M)).group(1) | ||
| 1372 | minor = next(re.finditer(release_info["version"]["re_minor"], text, flags=re.M)).group(1) | ||
| 1373 | micro = next(re.finditer(release_info["version"]["re_micro"], text, flags=re.M)).group(1) | ||
| 1374 | return f"{major}.{minor}.{micro}" | ||
| 1375 | |||
| 1376 | |||
| 1377 | def main(argv=None) -> int: | ||
| 1378 | if sys.version_info < (3, 11): | ||
| 1379 | logger.error("This script needs at least python 3.11") | ||
| 1380 | return 1 | ||
| 1381 | |||
| 1382 | parser = argparse.ArgumentParser(allow_abbrev=False, description="Create SDL release artifacts") | ||
| 1383 | parser.add_argument("--root", metavar="DIR", type=Path, default=Path(__file__).absolute().parents[1], help="Root of project") | ||
| 1384 | parser.add_argument("--release-info", metavar="JSON", dest="path_release_info", type=Path, default=Path(__file__).absolute().parent / "release-info.json", help="Path of release-info.json") | ||
| 1385 | parser.add_argument("--dependency-folder", metavar="FOLDER", dest="deps_path", type=Path, default="deps", help="Directory containing pre-built archives of dependencies (will be removed when downloading archives)") | ||
| 1386 | parser.add_argument("--out", "-o", metavar="DIR", dest="dist_path", type=Path, default="dist", help="Output directory") | ||
| 1387 | parser.add_argument("--github", action="store_true", help="Script is running on a GitHub runner") | ||
| 1388 | parser.add_argument("--commit", default="HEAD", help="Git commit/tag of which a release should be created") | ||
| 1389 | parser.add_argument("--actions", choices=["download", "source", "android", "mingw", "msvc", "dmg"], required=True, nargs="+", dest="actions", help="What to do?") | ||
| 1390 | parser.set_defaults(loglevel=logging.INFO) | ||
| 1391 | parser.add_argument('--vs-year', dest="vs_year", help="Visual Studio year") | ||
| 1392 | parser.add_argument('--android-api', dest="android_api", help="Android API version") | ||
| 1393 | parser.add_argument('--android-home', dest="android_home", default=os.environ.get("ANDROID_HOME"), help="Android Home folder") | ||
| 1394 | parser.add_argument('--android-ndk-home', dest="android_ndk_home", default=os.environ.get("ANDROID_NDK_HOME"), help="Android NDK Home folder") | ||
| 1395 | parser.add_argument('--cmake-generator', dest="cmake_generator", default="Ninja", help="CMake Generator") | ||
| 1396 | parser.add_argument('--debug', action='store_const', const=logging.DEBUG, dest="loglevel", help="Print script debug information") | ||
| 1397 | parser.add_argument('--dry-run', action='store_true', dest="dry", help="Don't execute anything") | ||
| 1398 | parser.add_argument('--force', action='store_true', dest="force", help="Ignore a non-clean git tree") | ||
| 1399 | parser.add_argument('--overwrite', action='store_true', dest="overwrite", help="Allow potentially overwriting other projects") | ||
| 1400 | parser.add_argument('--fast', action='store_true', dest="fast", help="Don't do a rebuild") | ||
| 1401 | |||
| 1402 | args = parser.parse_args(argv) | ||
| 1403 | logging.basicConfig(level=args.loglevel, format='[%(levelname)s] %(message)s') | ||
| 1404 | args.deps_path = args.deps_path.absolute() | ||
| 1405 | args.dist_path = args.dist_path.absolute() | ||
| 1406 | args.root = args.root.absolute() | ||
| 1407 | args.dist_path = args.dist_path.absolute() | ||
| 1408 | if args.dry: | ||
| 1409 | args.dist_path = args.dist_path / "dry" | ||
| 1410 | |||
| 1411 | if args.github: | ||
| 1412 | section_printer: SectionPrinter = GitHubSectionPrinter() | ||
| 1413 | else: | ||
| 1414 | section_printer = SectionPrinter() | ||
| 1415 | |||
| 1416 | if args.github and "GITHUB_OUTPUT" not in os.environ: | ||
| 1417 | os.environ["GITHUB_OUTPUT"] = "/tmp/github_output.txt" | ||
| 1418 | |||
| 1419 | executer = Executer(root=args.root, dry=args.dry) | ||
| 1420 | |||
| 1421 | root_git_hash_path = args.root / GIT_HASH_FILENAME | ||
| 1422 | root_is_maybe_archive = root_git_hash_path.is_file() | ||
| 1423 | if root_is_maybe_archive: | ||
| 1424 | logger.warning("%s detected: Building from archive", GIT_HASH_FILENAME) | ||
| 1425 | archive_commit = root_git_hash_path.read_text().strip() | ||
| 1426 | if args.commit != archive_commit: | ||
| 1427 | logger.warning("Commit argument is %s, but archive commit is %s. Using %s.", args.commit, archive_commit, archive_commit) | ||
| 1428 | args.commit = archive_commit | ||
| 1429 | revision = (args.root / REVISION_TXT).read_text().strip() | ||
| 1430 | else: | ||
| 1431 | args.commit = executer.check_output(["git", "rev-parse", args.commit], dry_out="e5812a9fd2cda317b503325a702ba3c1c37861d9").strip() | ||
| 1432 | revision = executer.check_output(["git", "describe", "--always", "--tags", "--long", args.commit], dry_out="preview-3.1.3-96-g9512f2144").strip() | ||
| 1433 | logger.info("Using commit %s", args.commit) | ||
| 1434 | |||
| 1435 | try: | ||
| 1436 | with args.path_release_info.open() as f: | ||
| 1437 | release_info = json.load(f) | ||
| 1438 | except FileNotFoundError: | ||
| 1439 | logger.error(f"Could not find {args.path_release_info}") | ||
| 1440 | |||
| 1441 | releaser = Releaser( | ||
| 1442 | release_info=release_info, | ||
| 1443 | commit=args.commit, | ||
| 1444 | revision=revision, | ||
| 1445 | root=args.root, | ||
| 1446 | dist_path=args.dist_path, | ||
| 1447 | executer=executer, | ||
| 1448 | section_printer=section_printer, | ||
| 1449 | cmake_generator=args.cmake_generator, | ||
| 1450 | deps_path=args.deps_path, | ||
| 1451 | overwrite=args.overwrite, | ||
| 1452 | github=args.github, | ||
| 1453 | fast=args.fast, | ||
| 1454 | ) | ||
| 1455 | |||
| 1456 | if root_is_maybe_archive: | ||
| 1457 | logger.warning("Building from archive. Skipping clean git tree check.") | ||
| 1458 | else: | ||
| 1459 | porcelain_status = executer.check_output(["git", "status", "--ignored", "--porcelain"], dry_out="\n").strip() | ||
| 1460 | if porcelain_status: | ||
| 1461 | print(porcelain_status) | ||
| 1462 | logger.warning("The tree is dirty! Do not publish any generated artifacts!") | ||
| 1463 | if not args.force: | ||
| 1464 | raise Exception("The git repo contains modified and/or non-committed files. Run with --force to ignore.") | ||
| 1465 | |||
| 1466 | if args.fast: | ||
| 1467 | logger.warning("Doing fast build! Do not publish generated artifacts!") | ||
| 1468 | |||
| 1469 | with section_printer.group("Arguments"): | ||
| 1470 | print(f"project = {releaser.project}") | ||
| 1471 | print(f"version = {releaser.version}") | ||
| 1472 | print(f"revision = {revision}") | ||
| 1473 | print(f"commit = {args.commit}") | ||
| 1474 | print(f"out = {args.dist_path}") | ||
| 1475 | print(f"actions = {args.actions}") | ||
| 1476 | print(f"dry = {args.dry}") | ||
| 1477 | print(f"force = {args.force}") | ||
| 1478 | print(f"overwrite = {args.overwrite}") | ||
| 1479 | print(f"cmake_generator = {args.cmake_generator}") | ||
| 1480 | |||
| 1481 | releaser.prepare() | ||
| 1482 | |||
| 1483 | if "download" in args.actions: | ||
| 1484 | releaser.download_dependencies() | ||
| 1485 | |||
| 1486 | if set(args.actions).intersection({"msvc", "mingw", "android"}): | ||
| 1487 | print("Verifying presence of dependencies (run 'download' action to download) ...") | ||
| 1488 | releaser.verify_dependencies() | ||
| 1489 | print("... done") | ||
| 1490 | |||
| 1491 | if "source" in args.actions: | ||
| 1492 | if root_is_maybe_archive: | ||
| 1493 | raise Exception("Cannot build source archive from source archive") | ||
| 1494 | with section_printer.group("Create source archives"): | ||
| 1495 | releaser.create_source_archives() | ||
| 1496 | |||
| 1497 | if "dmg" in args.actions: | ||
| 1498 | if platform.system() != "Darwin" and not args.dry: | ||
| 1499 | parser.error("framework artifact(s) can only be built on Darwin") | ||
| 1500 | |||
| 1501 | releaser.create_dmg() | ||
| 1502 | |||
| 1503 | if "msvc" in args.actions: | ||
| 1504 | if platform.system() != "Windows" and not args.dry: | ||
| 1505 | parser.error("msvc artifact(s) can only be built on Windows") | ||
| 1506 | releaser.build_msvc() | ||
| 1507 | |||
| 1508 | if "mingw" in args.actions: | ||
| 1509 | releaser.create_mingw_archives() | ||
| 1510 | |||
| 1511 | if "android" in args.actions: | ||
| 1512 | if args.android_home is None or not Path(args.android_home).is_dir(): | ||
| 1513 | parser.error("Invalid $ANDROID_HOME or --android-home: must be a directory containing the Android SDK") | ||
| 1514 | if args.android_ndk_home is None or not Path(args.android_ndk_home).is_dir(): | ||
| 1515 | parser.error("Invalid $ANDROID_NDK_HOME or --android_ndk_home: must be a directory containing the Android NDK") | ||
| 1516 | if args.android_api is None: | ||
| 1517 | with section_printer.group("Detect Android APIS"): | ||
| 1518 | args.android_api = releaser._detect_android_api(android_home=args.android_home) | ||
| 1519 | else: | ||
| 1520 | try: | ||
| 1521 | android_api_ints = tuple(int(v) for v in args.android_api.split(".")) | ||
| 1522 | match len(android_api_ints): | ||
| 1523 | case 1: android_api_name = f"android-{android_api_ints[0]}" | ||
| 1524 | case 2: android_api_name = f"android-{android_api_ints[0]}-ext-{android_api_ints[1]}" | ||
| 1525 | case _: raise ValueError | ||
| 1526 | except ValueError: | ||
| 1527 | logger.error("Invalid --android-api, must be a 'X' or 'X.Y' version") | ||
| 1528 | args.android_api = AndroidApiVersion(ints=android_api_ints, name=android_api_name) | ||
| 1529 | if args.android_api is None: | ||
| 1530 | parser.error("Invalid --android-api, and/or could not be detected") | ||
| 1531 | android_api_path = Path(args.android_home) / f"platforms/{args.android_api.name}" | ||
| 1532 | if not android_api_path.is_dir(): | ||
| 1533 | parser.error(f"Android API directory does not exist ({android_api_path})") | ||
| 1534 | with section_printer.group("Android arguments"): | ||
| 1535 | print(f"android_home = {args.android_home}") | ||
| 1536 | print(f"android_ndk_home = {args.android_ndk_home}") | ||
| 1537 | print(f"android_api = {args.android_api}") | ||
| 1538 | releaser.create_android_archives( | ||
| 1539 | android_api=args.android_api.ints[0], | ||
| 1540 | android_home=args.android_home, | ||
| 1541 | android_ndk_home=args.android_ndk_home, | ||
| 1542 | ) | ||
| 1543 | with section_printer.group("Summary"): | ||
| 1544 | print(f"artifacts = {releaser.artifacts}") | ||
| 1545 | |||
| 1546 | if args.github: | ||
| 1547 | with open(os.environ["GITHUB_OUTPUT"], "a") as f: | ||
| 1548 | f.write(f"project={releaser.project}\n") | ||
| 1549 | f.write(f"version={releaser.version}\n") | ||
| 1550 | for k, v in releaser.artifacts.items(): | ||
| 1551 | f.write(f"{k}={v.name}\n") | ||
| 1552 | return 0 | ||
| 1553 | |||
| 1554 | |||
| 1555 | if __name__ == "__main__": | ||
| 1556 | raise SystemExit(main()) | ||
