build-release.py 45 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975
  1. #!/usr/bin/env python
  2. """
  3. This script is shared between SDL2, SDL2_image, SDL2_mixer and SDL2_ttf.
  4. Don't specialize this script for doing project-specific modifications.
  5. Rather, modify release-info.json.
  6. """
  7. import argparse
  8. import collections
  9. from collections.abc import Callable
  10. import contextlib
  11. import datetime
  12. import fnmatch
  13. import glob
  14. import io
  15. import json
  16. import logging
  17. import multiprocessing
  18. import os
  19. from pathlib import Path
  20. import platform
  21. import re
  22. import shutil
  23. import subprocess
  24. import sys
  25. import tarfile
  26. import tempfile
  27. import textwrap
  28. import typing
  29. import zipfile
  30. logger = logging.getLogger(__name__)
  31. GIT_HASH_FILENAME = ".git-hash"
  32. def safe_isotime_to_datetime(str_isotime: str) -> datetime.datetime:
  33. try:
  34. return datetime.datetime.fromisoformat(str_isotime)
  35. except ValueError:
  36. pass
  37. logger.warning("Invalid iso time: %s", str_isotime)
  38. if str_isotime[-6:-5] in ("+", "-"):
  39. # Commits can have isotime with invalid timezone offset (e.g. "2021-07-04T20:01:40+32:00")
  40. modified_str_isotime = str_isotime[:-6] + "+00:00"
  41. try:
  42. return datetime.datetime.fromisoformat(modified_str_isotime)
  43. except ValueError:
  44. pass
  45. raise ValueError(f"Invalid isotime: {str_isotime}")
  46. class VsArchPlatformConfig:
  47. def __init__(self, arch: str, platform: str, configuration: str):
  48. self.arch = arch
  49. self.platform = platform
  50. self.configuration = configuration
  51. def configure(self, s: str) -> str:
  52. return s.replace("@ARCH@", self.arch).replace("@PLATFORM@", self.platform).replace("@CONFIGURATION@", self.configuration)
  53. @contextlib.contextmanager
  54. def chdir(path):
  55. original_cwd = os.getcwd()
  56. try:
  57. os.chdir(path)
  58. yield
  59. finally:
  60. os.chdir(original_cwd)
  61. class Executer:
  62. def __init__(self, root: Path, dry: bool=False):
  63. self.root = root
  64. self.dry = dry
  65. def run(self, cmd, cwd=None, env=None):
  66. logger.info("Executing args=%r", cmd)
  67. sys.stdout.flush()
  68. if not self.dry:
  69. subprocess.run(cmd, check=True, cwd=cwd or self.root, env=env, text=True)
  70. def check_output(self, cmd, cwd=None, dry_out=None, env=None, text=True):
  71. logger.info("Executing args=%r", cmd)
  72. sys.stdout.flush()
  73. if self.dry:
  74. return dry_out
  75. return subprocess.check_output(cmd, cwd=cwd or self.root, env=env, text=text)
  76. class SectionPrinter:
  77. @contextlib.contextmanager
  78. def group(self, title: str):
  79. print(f"{title}:")
  80. yield
  81. class GitHubSectionPrinter(SectionPrinter):
  82. def __init__(self):
  83. super().__init__()
  84. self.in_group = False
  85. @contextlib.contextmanager
  86. def group(self, title: str):
  87. print(f"::group::{title}")
  88. assert not self.in_group, "Can enter a group only once"
  89. self.in_group = True
  90. yield
  91. self.in_group = False
  92. print("::endgroup::")
  93. class VisualStudio:
  94. def __init__(self, executer: Executer, year: typing.Optional[str]=None):
  95. self.executer = executer
  96. self.vsdevcmd = self.find_vsdevcmd(year)
  97. self.msbuild = self.find_msbuild()
  98. @property
  99. def dry(self) -> bool:
  100. return self.executer.dry
  101. VS_YEAR_TO_VERSION = {
  102. "2022": 17,
  103. "2019": 16,
  104. "2017": 15,
  105. "2015": 14,
  106. "2013": 12,
  107. }
  108. def find_vsdevcmd(self, year: typing.Optional[str]=None) -> typing.Optional[Path]:
  109. vswhere_spec = ["-latest"]
  110. if year is not None:
  111. try:
  112. version = self.VS_YEAR_TO_VERSION[year]
  113. except KeyError:
  114. logger.error("Invalid Visual Studio year")
  115. return None
  116. vswhere_spec.extend(["-version", f"[{version},{version+1})"])
  117. vswhere_cmd = ["vswhere"] + vswhere_spec + ["-property", "installationPath"]
  118. vs_install_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp").strip())
  119. logger.info("VS install_path = %s", vs_install_path)
  120. assert vs_install_path.is_dir(), "VS installation path does not exist"
  121. vsdevcmd_path = vs_install_path / "Common7/Tools/vsdevcmd.bat"
  122. logger.info("vsdevcmd path = %s", vsdevcmd_path)
  123. if self.dry:
  124. vsdevcmd_path.parent.mkdir(parents=True, exist_ok=True)
  125. vsdevcmd_path.touch(exist_ok=True)
  126. assert vsdevcmd_path.is_file(), "vsdevcmd.bat batch file does not exist"
  127. return vsdevcmd_path
  128. def find_msbuild(self) -> typing.Optional[Path]:
  129. vswhere_cmd = ["vswhere", "-latest", "-requires", "Microsoft.Component.MSBuild", "-find", r"MSBuild\**\Bin\MSBuild.exe"]
  130. msbuild_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp/MSBuild.exe").strip())
  131. logger.info("MSBuild path = %s", msbuild_path)
  132. if self.dry:
  133. msbuild_path.parent.mkdir(parents=True, exist_ok=True)
  134. msbuild_path.touch(exist_ok=True)
  135. assert msbuild_path.is_file(), "MSBuild.exe does not exist"
  136. return msbuild_path
  137. def build(self, arch_platform: VsArchPlatformConfig, projects: list[Path]):
  138. assert projects, "Need at least one project to build"
  139. vsdev_cmd_str = f"\"{self.vsdevcmd}\" -arch={arch_platform.arch}"
  140. msbuild_cmd_str = " && ".join([f"\"{self.msbuild}\" \"{project}\" /m /p:BuildInParallel=true /p:Platform={arch_platform.platform} /p:Configuration={arch_platform.configuration}" for project in projects])
  141. bat_contents = f"{vsdev_cmd_str} && {msbuild_cmd_str}\n"
  142. bat_path = Path(tempfile.gettempdir()) / "cmd.bat"
  143. with bat_path.open("w") as f:
  144. f.write(bat_contents)
  145. logger.info("Running cmd.exe script (%s): %s", bat_path, bat_contents)
  146. cmd = ["cmd.exe", "/D", "/E:ON", "/V:OFF", "/S", "/C", f"CALL {str(bat_path)}"]
  147. self.executer.run(cmd)
  148. class Archiver:
  149. def __init__(self, zip_path: typing.Optional[Path]=None, tgz_path: typing.Optional[Path]=None, txz_path: typing.Optional[Path]=None):
  150. self._zip_files = []
  151. self._tar_files = []
  152. self._added_files = set()
  153. if zip_path:
  154. self._zip_files.append(zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_DEFLATED))
  155. if tgz_path:
  156. self._tar_files.append(tarfile.open(tgz_path, "w:gz"))
  157. if txz_path:
  158. self._tar_files.append(tarfile.open(txz_path, "w:xz"))
  159. @property
  160. def added_files(self) -> set[str]:
  161. return self._added_files
  162. def add_file_data(self, arcpath: str, data: bytes, mode: int, time: datetime.datetime):
  163. for zf in self._zip_files:
  164. file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
  165. zip_info = zipfile.ZipInfo(filename=arcpath, date_time=file_data_time)
  166. zip_info.external_attr = mode << 16
  167. zip_info.compress_type = zipfile.ZIP_DEFLATED
  168. zf.writestr(zip_info, data=data)
  169. for tf in self._tar_files:
  170. tar_info = tarfile.TarInfo(arcpath)
  171. tar_info.type = tarfile.REGTYPE
  172. tar_info.mode = mode
  173. tar_info.size = len(data)
  174. tar_info.mtime = int(time.timestamp())
  175. tf.addfile(tar_info, fileobj=io.BytesIO(data))
  176. self._added_files.add(arcpath)
  177. def add_symlink(self, arcpath: str, target: str, time: datetime.datetime, files_for_zip):
  178. for zf in self._zip_files:
  179. file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
  180. for f in files_for_zip:
  181. zip_info = zipfile.ZipInfo(filename=f["arcpath"], date_time=file_data_time)
  182. zip_info.external_attr = f["mode"] << 16
  183. zip_info.compress_type = zipfile.ZIP_DEFLATED
  184. zf.writestr(zip_info, data=f["data"])
  185. for tf in self._tar_files:
  186. tar_info = tarfile.TarInfo(arcpath)
  187. tar_info.type = tarfile.SYMTYPE
  188. tar_info.mode = 0o777
  189. tar_info.mtime = int(time.timestamp())
  190. tar_info.linkname = target
  191. tf.addfile(tar_info)
  192. self._added_files.update(f["arcpath"] for f in files_for_zip)
  193. def add_git_hash(self, commit: str, arcdir: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None):
  194. arcpath = GIT_HASH_FILENAME
  195. if arcdir and arcdir[-1:] != "/":
  196. arcpath = f"{arcdir}/{arcpath}"
  197. if not time:
  198. time = datetime.datetime(year=2024, month=4, day=1)
  199. data = f"{commit}\n".encode()
  200. self.add_file_data(arcpath=arcpath, data=data, mode=0o100644, time=time)
  201. def add_file_path(self, arcpath: str, path: Path):
  202. assert path.is_file(), f"{path} should be a file"
  203. for zf in self._zip_files:
  204. zf.write(path, arcname=arcpath)
  205. for tf in self._tar_files:
  206. tf.add(path, arcname=arcpath)
  207. def add_file_directory(self, arcdirpath: str, dirpath: Path):
  208. assert dirpath.is_dir()
  209. if arcdirpath and arcdirpath[-1:] != "/":
  210. arcdirpath += "/"
  211. for f in dirpath.iterdir():
  212. if f.is_file():
  213. arcpath = f"{arcdirpath}{f.name}"
  214. logger.debug("Adding %s to %s", f, arcpath)
  215. self.add_file_path(arcpath=arcpath, path=f)
  216. def close(self):
  217. # Archiver is intentionally made invalid after this function
  218. del self._zip_files
  219. self._zip_files = None
  220. del self._tar_files
  221. self._tar_files = None
  222. def __enter__(self):
  223. return self
  224. def __exit__(self, type, value, traceback):
  225. self.close()
  226. class SourceCollector:
  227. TreeItem = collections.namedtuple("TreeItem", ("path", "mode", "data", "symtarget", "directory", "time"))
  228. def __init__(self, root: Path, commit: str, filter: typing.Optional[Callable[[str], bool]], executer: Executer):
  229. self.root = root
  230. self.commit = commit
  231. self.filter = filter
  232. self.executer = executer
  233. self._git_contents: typing.Optional[dict[str, SourceCollector.TreeItem]] = None
  234. def _get_git_contents(self) -> dict[str, TreeItem]:
  235. contents_tgz = subprocess.check_output(["git", "archive", "--format=tar.gz", self.commit, "-o", "/dev/stdout"], cwd=self.root, text=False)
  236. tar_archive = tarfile.open(fileobj=io.BytesIO(contents_tgz), mode="r:gz")
  237. filenames = tuple(m.name for m in tar_archive if (m.isfile() or m.issym()))
  238. file_times = self._get_file_times(paths=filenames)
  239. git_contents = {}
  240. for ti in tar_archive:
  241. if self.filter and not self.filter(ti.name):
  242. continue
  243. data = None
  244. symtarget = None
  245. directory = False
  246. file_time = None
  247. if ti.isfile():
  248. contents_file = tar_archive.extractfile(ti.name)
  249. data = contents_file.read()
  250. file_time = file_times[ti.name]
  251. elif ti.issym():
  252. symtarget = ti.linkname
  253. file_time = file_times[ti.name]
  254. elif ti.isdir():
  255. directory = True
  256. else:
  257. raise ValueError(f"{ti.name}: unknown type")
  258. git_contents[ti.name] = self.TreeItem(path=ti.name, mode=ti.mode, data=data, symtarget=symtarget, directory=directory, time=file_time)
  259. return git_contents
  260. @property
  261. def git_contents(self) -> dict[str, TreeItem]:
  262. if self._git_contents is None:
  263. self._git_contents = self._get_git_contents()
  264. return self._git_contents
  265. def _get_file_times(self, paths: tuple[str, ...]) -> dict[str, datetime.datetime]:
  266. dry_out = textwrap.dedent("""\
  267. time=2024-03-14T15:40:25-07:00
  268. M\tCMakeLists.txt
  269. """)
  270. git_log_out = self.executer.check_output(["git", "log", "--name-status", '--pretty=time=%cI', self.commit], dry_out=dry_out, cwd=self.root).splitlines(keepends=False)
  271. current_time = None
  272. set_paths = set(paths)
  273. path_times: dict[str, datetime.datetime] = {}
  274. for line in git_log_out:
  275. if not line:
  276. continue
  277. if line.startswith("time="):
  278. current_time = safe_isotime_to_datetime(line.removeprefix("time="))
  279. continue
  280. mod_type, file_paths = line.split(maxsplit=1)
  281. assert current_time is not None
  282. for file_path in file_paths.split("\t"):
  283. if file_path in set_paths and file_path not in path_times:
  284. path_times[file_path] = current_time
  285. # FIXME: find out why some files are not shown in "git log"
  286. # assert set(path_times.keys()) == set_paths
  287. if set(path_times.keys()) != set_paths:
  288. found_times = set(path_times.keys())
  289. paths_without_times = set_paths.difference(found_times)
  290. logger.warning("No times found for these paths: %s", paths_without_times)
  291. max_time = max(time for time in path_times.values())
  292. for path in paths_without_times:
  293. path_times[path] = max_time
  294. return path_times
  295. def add_to_archiver(self, archive_base: str, archiver: Archiver):
  296. remaining_symlinks = set()
  297. added_files = dict()
  298. def calculate_symlink_target(s: SourceCollector.TreeItem) -> str:
  299. dest_dir = os.path.dirname(s.path)
  300. if dest_dir:
  301. dest_dir += "/"
  302. target = dest_dir + s.symtarget
  303. while True:
  304. new_target, n = re.subn(r"([^/]+/+[.]{2}/)", "", target)
  305. print(f"{target=} {new_target=}")
  306. target = new_target
  307. if not n:
  308. break
  309. return target
  310. # Add files in first pass
  311. for git_file in self.git_contents.values():
  312. if git_file.data is not None:
  313. archiver.add_file_data(arcpath=f"{archive_base}/{git_file.path}", data=git_file.data, time=git_file.time, mode=git_file.mode)
  314. added_files[git_file.path] = git_file
  315. elif git_file.symtarget is not None:
  316. remaining_symlinks.add(git_file)
  317. # Resolve symlinks in second pass: zipfile does not support symlinks, so add files to zip archive
  318. while True:
  319. if not remaining_symlinks:
  320. break
  321. symlinks_this_time = set()
  322. extra_added_files = {}
  323. for symlink in remaining_symlinks:
  324. symlink_files_for_zip = {}
  325. symlink_target_path = calculate_symlink_target(symlink)
  326. if symlink_target_path in added_files:
  327. symlink_files_for_zip[symlink.path] = added_files[symlink_target_path]
  328. else:
  329. symlink_target_path_slash = symlink_target_path + "/"
  330. for added_file in added_files:
  331. if added_file.startswith(symlink_target_path_slash):
  332. path_in_symlink = symlink.path + "/" + added_file.removeprefix(symlink_target_path_slash)
  333. symlink_files_for_zip[path_in_symlink] = added_files[added_file]
  334. if symlink_files_for_zip:
  335. symlinks_this_time.add(symlink)
  336. extra_added_files.update(symlink_files_for_zip)
  337. files_for_zip = [{"arcpath": f"{archive_base}/{sym_path}", "data": sym_info.data, "mode": sym_info.mode} for sym_path, sym_info in symlink_files_for_zip.items()]
  338. archiver.add_symlink(arcpath=f"{archive_base}/{symlink.path}", target=symlink.symtarget, time=symlink.time, files_for_zip=files_for_zip)
  339. # if not symlinks_this_time:
  340. # logger.info("files added: %r", set(path for path in added_files.keys()))
  341. assert symlinks_this_time, f"No targets found for symlinks: {remaining_symlinks}"
  342. remaining_symlinks.difference_update(symlinks_this_time)
  343. added_files.update(extra_added_files)
  344. class Releaser:
  345. def __init__(self, release_info: dict, commit: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str, deps_path: Path, overwrite: bool, github: bool, fast: bool):
  346. self.release_info = release_info
  347. self.project = release_info["name"]
  348. self.version = self.extract_sdl_version(root=root, release_info=release_info)
  349. self.root = root
  350. self.commit = commit
  351. self.dist_path = dist_path
  352. self.section_printer = section_printer
  353. self.executer = executer
  354. self.cmake_generator = cmake_generator
  355. self.cpu_count = multiprocessing.cpu_count()
  356. self.deps_path = deps_path
  357. self.overwrite = overwrite
  358. self.github = github
  359. self.fast = fast
  360. self.artifacts: dict[str, Path] = {}
  361. @property
  362. def dry(self) -> bool:
  363. return self.executer.dry
  364. def prepare(self):
  365. logger.debug("Creating dist folder")
  366. self.dist_path.mkdir(parents=True, exist_ok=True)
  367. @classmethod
  368. def _path_filter(cls, path: str) -> bool:
  369. if ".gitmodules" in path:
  370. return True
  371. if path.startswith(".git"):
  372. return False
  373. return True
  374. @classmethod
  375. def _external_repo_path_filter(cls, path: str) -> bool:
  376. if not cls._path_filter(path):
  377. return False
  378. if path.startswith("test/") or path.startswith("tests/"):
  379. return False
  380. return True
  381. def create_source_archives(self) -> None:
  382. archive_base = f"{self.project}-{self.version}"
  383. project_souce_collector = SourceCollector(root=self.root, commit=self.commit, executer=self.executer, filter=self._path_filter)
  384. latest_mod_time = max(item.time for item in project_souce_collector.git_contents.values() if item.time)
  385. zip_path = self.dist_path / f"{archive_base}.zip"
  386. tgz_path = self.dist_path / f"{archive_base}.tar.gz"
  387. txz_path = self.dist_path / f"{archive_base}.tar.xz"
  388. logger.info("Creating zip/tgz/txz source archives ...")
  389. if self.dry:
  390. zip_path.touch()
  391. tgz_path.touch()
  392. txz_path.touch()
  393. else:
  394. with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver:
  395. archiver.add_file_data(arcpath=f"{archive_base}/VERSION.txt", data=f"{self.version}\n".encode(), mode=0o100644, time=latest_mod_time)
  396. archiver.add_file_data(arcpath=f"{archive_base}/{GIT_HASH_FILENAME}", data=f"{self.commit}\n".encode(), mode=0o100644, time=latest_mod_time)
  397. print(f"Adding source files of main project ...")
  398. project_souce_collector.add_to_archiver(archive_base=archive_base, archiver=archiver)
  399. for extra_repo in self.release_info["source"].get("extra-repos", []):
  400. extra_repo_root = self.root / extra_repo
  401. assert (extra_repo_root / ".git").exists(), f"{extra_repo_root} must be a git repo"
  402. extra_repo_commit = self.executer.check_output(["git", "rev-parse", "HEAD"], dry_out=f"gitsha-extra-repo-{extra_repo}", cwd=extra_repo_root).strip()
  403. extra_repo_source_collector = SourceCollector(root=extra_repo_root, commit=extra_repo_commit, executer=self.executer, filter=self._external_repo_path_filter)
  404. print(f"Adding source files of {extra_repo} ...")
  405. extra_repo_source_collector.add_to_archiver(archive_base=f"{archive_base}/{extra_repo}", archiver=archiver)
  406. for file in self.release_info["source"]["checks"]:
  407. assert f"{archive_base}/{file}" in archiver.added_files, f"'{archive_base}/{file}' must exist"
  408. logger.info("... done")
  409. self.artifacts["src-zip"] = zip_path
  410. self.artifacts["src-tar-gz"] = tgz_path
  411. self.artifacts["src-tar-xz"] = txz_path
  412. if not self.dry:
  413. with tgz_path.open("r+b") as f:
  414. # Zero the embedded timestamp in the gzip'ed tarball
  415. f.seek(4, 0)
  416. f.write(b"\x00\x00\x00\x00")
  417. def create_dmg(self, configuration: str="Release") -> None:
  418. dmg_in = self.root / self.release_info["dmg"]["path"]
  419. xcode_project = self.root / self.release_info["dmg"]["project"]
  420. assert xcode_project.is_dir(), f"{xcode_project} must be a directory"
  421. assert (xcode_project / "project.pbxproj").is_file, f"{xcode_project} must contain project.pbxproj"
  422. dmg_in.unlink(missing_ok=True)
  423. build_xcconfig = self.release_info["dmg"].get("build-xcconfig")
  424. if build_xcconfig:
  425. shutil.copy(self.root / build_xcconfig, xcode_project.parent / "build.xcconfig")
  426. xcode_scheme = self.release_info["dmg"].get("scheme")
  427. xcode_target = self.release_info["dmg"].get("target")
  428. assert xcode_scheme or xcode_target, "dmg needs scheme or target"
  429. assert not (xcode_scheme and xcode_target), "dmg cannot have both scheme and target set"
  430. if xcode_scheme:
  431. scheme_or_target = "-scheme"
  432. target_like = xcode_scheme
  433. else:
  434. scheme_or_target = "-target"
  435. target_like = xcode_target
  436. self.executer.run(["xcodebuild", "ONLY_ACTIVE_ARCH=NO", "-project", xcode_project, scheme_or_target, target_like, "-configuration", configuration])
  437. if self.dry:
  438. dmg_in.parent.mkdir(parents=True, exist_ok=True)
  439. dmg_in.touch()
  440. assert dmg_in.is_file(), f"{self.project}.dmg was not created by xcodebuild"
  441. dmg_out = self.dist_path / f"{self.project}-{self.version}.dmg"
  442. shutil.copy(dmg_in, dmg_out)
  443. self.artifacts["dmg"] = dmg_out
  444. @property
  445. def git_hash_data(self) -> bytes:
  446. return f"{self.commit}\n".encode()
  447. def _tar_add_git_hash(self, tar_object: tarfile.TarFile, root: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None):
  448. if not time:
  449. time = datetime.datetime(year=2024, month=4, day=1)
  450. path = GIT_HASH_FILENAME
  451. if root:
  452. path = f"{root}/{path}"
  453. tar_info = tarfile.TarInfo(path)
  454. tar_info.mode = 0o100644
  455. tar_info.size = len(self.git_hash_data)
  456. tar_info.mtime = int(time.timestamp())
  457. tar_object.addfile(tar_info, fileobj=io.BytesIO(self.git_hash_data))
  458. def create_mingw_archives(self) -> None:
  459. build_type = "Release"
  460. build_parent_dir = self.root / "build-mingw"
  461. assert "autotools" in self.release_info["mingw"]
  462. assert "cmake" not in self.release_info["mingw"]
  463. mingw_archs = self.release_info["mingw"]["autotools"]["archs"]
  464. ARCH_TO_TRIPLET = {
  465. "x86": "i686-w64-mingw32",
  466. "x64": "x86_64-w64-mingw32",
  467. }
  468. new_env = dict(os.environ)
  469. if "dependencies" in self.release_info["mingw"]:
  470. mingw_deps_path = self.deps_path / "mingw-deps"
  471. shutil.rmtree(mingw_deps_path, ignore_errors=True)
  472. mingw_deps_path.mkdir()
  473. for triplet in ARCH_TO_TRIPLET.values():
  474. (mingw_deps_path / triplet).mkdir()
  475. def extract_filter(member: tarfile.TarInfo, path: str, /):
  476. if member.name.startswith("SDL"):
  477. member.name = "/".join(Path(member.name).parts[1:])
  478. return member
  479. for dep in self.release_info["dependencies"].keys():
  480. extract_dir = mingw_deps_path / f"extract-{dep}"
  481. extract_dir.mkdir()
  482. with chdir(extract_dir):
  483. tar_path = glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)[0]
  484. logger.info("Extracting %s to %s", tar_path, mingw_deps_path)
  485. with tarfile.open(self.deps_path / tar_path, mode="r:gz") as tarf:
  486. tarf.extractall(filter=extract_filter)
  487. for triplet in ARCH_TO_TRIPLET.values():
  488. self.executer.run(["make", f"-j{os.cpu_count()}", "-C", str(extract_dir), "install-package", f"arch={triplet}", f"prefix={str(mingw_deps_path / triplet)}"])
  489. dep_binpath = mingw_deps_path / triplet / "bin"
  490. assert dep_binpath.is_dir(), f"{dep_binpath} for PATH should exist"
  491. dep_pkgconfig = mingw_deps_path / triplet / "lib/pkgconfig"
  492. assert dep_pkgconfig.is_dir(), f"{dep_pkgconfig} for PKG_CONFIG_PATH should exist"
  493. new_env["PATH"] = os.pathsep.join([str(dep_binpath), new_env["PATH"]])
  494. new_env["PKG_CONFIG_PATH"] = str(dep_pkgconfig)
  495. new_env["CFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}"
  496. new_env["CXXFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}"
  497. arch_install_paths = {}
  498. arch_files = {}
  499. for arch in mingw_archs:
  500. triplet = ARCH_TO_TRIPLET[arch]
  501. new_env["CC"] = f"{triplet}-gcc"
  502. new_env["CXX"] = f"{triplet}-g++"
  503. new_env["RC"] = f"{triplet}-windres"
  504. build_path = build_parent_dir / f"build-{triplet}"
  505. install_path = build_parent_dir / f"install-{triplet}"
  506. arch_install_paths[arch] = install_path
  507. shutil.rmtree(install_path, ignore_errors=True)
  508. build_path.mkdir(parents=True, exist_ok=True)
  509. with self.section_printer.group(f"Configuring MinGW {triplet}"):
  510. extra_args = [arg.replace("@DEP_PREFIX@", str(mingw_deps_path / triplet)) for arg in self.release_info["mingw"]["autotools"]["args"]]
  511. assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})"
  512. self.executer.run([
  513. self.root / "configure",
  514. f"--prefix={install_path}",
  515. f"--includedir={install_path}/include",
  516. f"--libdir={install_path}/lib",
  517. f"--bindir={install_path}/bin",
  518. f"--host={triplet}",
  519. f"--build=x86_64-none-linux-gnu",
  520. ] + extra_args, cwd=build_path, env=new_env)
  521. with self.section_printer.group(f"Build MinGW {triplet}"):
  522. self.executer.run(["make", f"-j{self.cpu_count}"], cwd=build_path, env=new_env)
  523. with self.section_printer.group(f"Install MinGW {triplet}"):
  524. self.executer.run(["make", "install"], cwd=build_path, env=new_env)
  525. arch_files[arch] = list(Path(r) / f for r, _, files in os.walk(install_path) for f in files)
  526. print("Collecting files for MinGW development archive ...")
  527. archived_files = {}
  528. arc_root = f"{self.project}-{self.version}"
  529. for arch in mingw_archs:
  530. triplet = ARCH_TO_TRIPLET[arch]
  531. install_path = arch_install_paths[arch]
  532. arcname_parent = f"{arc_root}/{triplet}"
  533. for file in arch_files[arch]:
  534. arcname = os.path.join(arcname_parent, file.relative_to(install_path))
  535. logger.debug("Adding %s as %s", file, arcname)
  536. archived_files[arcname] = file
  537. for meta_destdir, file_globs in self.release_info["mingw"]["files"].items():
  538. assert meta_destdir[0] == "/" and meta_destdir[-1] == "/", f"'{meta_destdir}' must begin and end with '/'"
  539. if "@" in meta_destdir:
  540. destdirs = list(meta_destdir.replace("@TRIPLET@", triplet) for triplet in ARCH_TO_TRIPLET.values())
  541. assert not any("A" in d for d in destdirs)
  542. else:
  543. destdirs = [meta_destdir]
  544. assert isinstance(file_globs, list), f"'{file_globs}' in release_info.json must be a list of globs instead"
  545. for file_glob in file_globs:
  546. file_paths = glob.glob(file_glob, root_dir=self.root)
  547. assert file_paths, f"glob '{file_glob}' does not match any file"
  548. for file_path in file_paths:
  549. file_path = self.root / file_path
  550. for destdir in destdirs:
  551. arcname = f"{arc_root}{destdir}{file_path.name}"
  552. logger.debug("Adding %s as %s", file_path, arcname)
  553. archived_files[arcname] = file_path
  554. print("... done")
  555. print("Creating zip/tgz/txz development archives ...")
  556. zip_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.zip"
  557. tgz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.gz"
  558. txz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.xz"
  559. with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver:
  560. for arcpath, path in archived_files.items():
  561. archiver.add_file_path(arcpath=arcpath, path=path)
  562. print("... done")
  563. self.artifacts["mingw-devel-zip"] = zip_path
  564. self.artifacts["mingw-devel-tar-gz"] = tgz_path
  565. self.artifacts["mingw-devel-tar-xz"] = txz_path
  566. def download_dependencies(self):
  567. shutil.rmtree(self.deps_path, ignore_errors=True)
  568. self.deps_path.mkdir(parents=True)
  569. if self.github:
  570. with open(os.environ["GITHUB_OUTPUT"], "a") as f:
  571. f.write(f"dep-path={self.deps_path.absolute()}\n")
  572. for dep, depinfo in self.release_info["dependencies"].items():
  573. startswith = depinfo["startswith"]
  574. dep_repo = depinfo["repo"]
  575. dep_string_data = self.executer.check_output(["gh", "-R", dep_repo, "release", "list", "--exclude-drafts", "--exclude-pre-releases", "--json", "name,createdAt,tagName", "--jq", f'[.[]|select(.name|startswith("{startswith}"))]|max_by(.createdAt)']).strip()
  576. dep_data = json.loads(dep_string_data)
  577. dep_tag = dep_data["tagName"]
  578. dep_version = dep_data["name"]
  579. logger.info("Download dependency %s version %s (tag=%s) ", dep, dep_version, dep_tag)
  580. self.executer.run(["gh", "-R", dep_repo, "release", "download", dep_tag], cwd=self.deps_path)
  581. if self.github:
  582. with open(os.environ["GITHUB_OUTPUT"], "a") as f:
  583. f.write(f"dep-{dep.lower()}-version={dep_version}\n")
  584. def verify_dependencies(self):
  585. for dep, depinfo in self.release_info.get("dependencies", {}).items():
  586. mingw_matches = glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
  587. assert len(mingw_matches) == 1, f"Exactly one archive matches mingw {dep} dependency: {mingw_matches}"
  588. dmg_matches = glob.glob(self.release_info["dmg"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
  589. assert len(dmg_matches) == 1, f"Exactly one archive matches dmg {dep} dependency: {dmg_matches}"
  590. msvc_matches = glob.glob(self.release_info["msvc"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
  591. assert len(msvc_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {msvc_matches}"
  592. def build_vs(self, arch_platform: VsArchPlatformConfig, vs: VisualStudio):
  593. msvc_deps_path = self.deps_path / "msvc-deps"
  594. shutil.rmtree(msvc_deps_path, ignore_errors=True)
  595. if "dependencies" in self.release_info["msvc"]:
  596. for dep, depinfo in self.release_info["msvc"]["dependencies"].items():
  597. msvc_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0]
  598. src_globs = [arch_platform.configure(instr["src"]) for instr in depinfo["copy"]]
  599. with zipfile.ZipFile(msvc_zip, "r") as zf:
  600. for member in zf.namelist():
  601. member_path = "/".join(Path(member).parts[1:])
  602. for src_i, src_glob in enumerate(src_globs):
  603. if fnmatch.fnmatch(member_path, src_glob):
  604. dst = (self.root / arch_platform.configure(depinfo["copy"][src_i]["dst"])).resolve() / Path(member_path).name
  605. zip_data = zf.read(member)
  606. if dst.exists():
  607. identical = False
  608. if dst.is_file():
  609. orig_bytes = dst.read_bytes()
  610. if orig_bytes == zip_data:
  611. identical = True
  612. if not identical:
  613. logger.warning("Extracting dependency %s, will cause %s to be overwritten", dep, dst)
  614. if not self.overwrite:
  615. raise RuntimeError("Run with --overwrite to allow overwriting")
  616. logger.debug("Extracting %s -> %s", member, dst)
  617. dst.parent.mkdir(exist_ok=True, parents=True)
  618. dst.write_bytes(zip_data)
  619. assert "msbuild" in self.release_info["msvc"]
  620. assert "cmake" not in self.release_info["msvc"]
  621. built_paths = [
  622. self.root / arch_platform.configure(f) for msbuild_files in self.release_info["msvc"]["msbuild"]["files"] for f in msbuild_files["paths"]
  623. ]
  624. for b in built_paths:
  625. b.unlink(missing_ok=True)
  626. projects = self.release_info["msvc"]["msbuild"]["projects"]
  627. with self.section_printer.group(f"Build {arch_platform.arch} VS binary"):
  628. vs.build(arch_platform=arch_platform, projects=projects)
  629. if self.dry:
  630. for b in built_paths:
  631. b.parent.mkdir(parents=True, exist_ok=True)
  632. b.touch()
  633. for b in built_paths:
  634. assert b.is_file(), f"{b} has not been created"
  635. b.parent.mkdir(parents=True, exist_ok=True)
  636. b.touch()
  637. zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch_platform.arch}.zip"
  638. zip_path.unlink(missing_ok=True)
  639. logger.info("Creating %s", zip_path)
  640. with Archiver(zip_path=zip_path) as archiver:
  641. for msbuild_files in self.release_info["msvc"]["msbuild"]["files"]:
  642. if "lib" in msbuild_files:
  643. arcdir = arch_platform.configure(msbuild_files["lib"])
  644. for p in msbuild_files["paths"]:
  645. p = arch_platform.configure(p)
  646. archiver.add_file_path(path=self.root / p, arcpath=f"{arcdir}/{Path(p).name}")
  647. for extra_files in self.release_info["msvc"]["files"]:
  648. if "lib" in extra_files:
  649. arcdir = arch_platform.configure(extra_files["lib"])
  650. for p in extra_files["paths"]:
  651. p = arch_platform.configure(p)
  652. archiver.add_file_path(path=self.root / p, arcpath=f"{arcdir}/{Path(p).name}")
  653. archiver.add_git_hash(commit=self.commit)
  654. self.artifacts[f"VC-{arch_platform.arch}"] = zip_path
  655. for p in built_paths:
  656. assert p.is_file(), f"{p} should exist"
  657. def build_vs_devel(self, arch_platforms: list[VsArchPlatformConfig]) -> None:
  658. zip_path = self.dist_path / f"{self.project}-devel-{self.version}-VC.zip"
  659. archive_prefix = f"{self.project}-{self.version}"
  660. with Archiver(zip_path=zip_path) as archiver:
  661. for msbuild_files in self.release_info["msvc"]["msbuild"]["files"]:
  662. if "devel" in msbuild_files:
  663. for meta_glob_path in msbuild_files["paths"]:
  664. if "@" in meta_glob_path or "@" in msbuild_files["devel"]:
  665. for arch_platform in arch_platforms:
  666. glob_path = arch_platform.configure(meta_glob_path)
  667. paths = glob.glob(glob_path, root_dir=self.root)
  668. dst_subdirpath = arch_platform.configure(msbuild_files['devel'])
  669. for path in paths:
  670. path = self.root / path
  671. arcpath = f"{archive_prefix}/{dst_subdirpath}/{Path(path).name}"
  672. archiver.add_file_path(path=path, arcpath=arcpath)
  673. else:
  674. paths = glob.glob(meta_glob_path, root_dir=self.root)
  675. for path in paths:
  676. path = self.root / path
  677. arcpath = f"{archive_prefix}/{msbuild_files['devel']}/{Path(path).name}"
  678. archiver.add_file_path(path=path, arcpath=arcpath)
  679. for extra_files in self.release_info["msvc"]["files"]:
  680. if "devel" in extra_files:
  681. for meta_glob_path in extra_files["paths"]:
  682. if "@" in meta_glob_path or "@" in extra_files["devel"]:
  683. for arch_platform in arch_platforms:
  684. glob_path = arch_platform.configure(meta_glob_path)
  685. paths = glob.glob(glob_path, root_dir=self.root)
  686. dst_subdirpath = arch_platform.configure(extra_files['devel'])
  687. for path in paths:
  688. path = self.root / path
  689. arcpath = f"{archive_prefix}/{dst_subdirpath}/{Path(path).name}"
  690. archiver.add_file_path(path=path, arcpath=arcpath)
  691. else:
  692. paths = glob.glob(meta_glob_path, root_dir=self.root)
  693. for path in paths:
  694. path = self.root / path
  695. arcpath = f"{archive_prefix}/{extra_files['devel']}/{Path(path).name}"
  696. archiver.add_file_path(path=path, arcpath=arcpath)
  697. archiver.add_git_hash(commit=self.commit, arcdir=archive_prefix)
  698. self.artifacts["VC-devel"] = zip_path
  699. @classmethod
  700. def extract_sdl_version(cls, root: Path, release_info: dict) -> str:
  701. with open(root / release_info["version"]["file"], "r") as f:
  702. text = f.read()
  703. major = next(re.finditer(release_info["version"]["re_major"], text, flags=re.M)).group(1)
  704. minor = next(re.finditer(release_info["version"]["re_minor"], text, flags=re.M)).group(1)
  705. micro = next(re.finditer(release_info["version"]["re_micro"], text, flags=re.M)).group(1)
  706. return f"{major}.{minor}.{micro}"
  707. def main(argv=None) -> int:
  708. if sys.version_info < (3, 11):
  709. logger.error("This script needs at least python 3.11")
  710. return 1
  711. parser = argparse.ArgumentParser(allow_abbrev=False, description="Create SDL release artifacts")
  712. parser.add_argument("--root", metavar="DIR", type=Path, default=Path(__file__).absolute().parents[1], help="Root of project")
  713. parser.add_argument("--release-info", metavar="JSON", dest="path_release_info", type=Path, default=Path(__file__).absolute().parent / "release-info.json", help="Path of release-info.json")
  714. parser.add_argument("--dependency-folder", metavar="FOLDER", dest="deps_path", type=Path, default="deps", help="Directory containing pre-built archives of dependencies (will be removed when downloading archives)")
  715. parser.add_argument("--out", "-o", metavar="DIR", dest="dist_path", type=Path, default="dist", help="Output directory")
  716. parser.add_argument("--github", action="store_true", help="Script is running on a GitHub runner")
  717. parser.add_argument("--commit", default="HEAD", help="Git commit/tag of which a release should be created")
  718. parser.add_argument("--actions", choices=["download", "source", "mingw", "msvc", "dmg"], required=True, nargs="+", dest="actions", help="What to do?")
  719. parser.set_defaults(loglevel=logging.INFO)
  720. parser.add_argument('--vs-year', dest="vs_year", help="Visual Studio year")
  721. parser.add_argument('--cmake-generator', dest="cmake_generator", default="Ninja", help="CMake Generator")
  722. parser.add_argument('--debug', action='store_const', const=logging.DEBUG, dest="loglevel", help="Print script debug information")
  723. parser.add_argument('--dry-run', action='store_true', dest="dry", help="Don't execute anything")
  724. parser.add_argument('--force', action='store_true', dest="force", help="Ignore a non-clean git tree")
  725. parser.add_argument('--overwrite', action='store_true', dest="overwrite", help="Allow potentially overwriting other projects")
  726. parser.add_argument('--fast', action='store_true', dest="fast", help="Don't do a rebuild")
  727. args = parser.parse_args(argv)
  728. logging.basicConfig(level=args.loglevel, format='[%(levelname)s] %(message)s')
  729. args.deps_path = args.deps_path.absolute()
  730. args.dist_path = args.dist_path.absolute()
  731. args.root = args.root.absolute()
  732. args.dist_path = args.dist_path.absolute()
  733. if args.dry:
  734. args.dist_path = args.dist_path / "dry"
  735. if args.github:
  736. section_printer: SectionPrinter = GitHubSectionPrinter()
  737. else:
  738. section_printer = SectionPrinter()
  739. if args.github and "GITHUB_OUTPUT" not in os.environ:
  740. os.environ["GITHUB_OUTPUT"] = "/tmp/github_output.txt"
  741. executer = Executer(root=args.root, dry=args.dry)
  742. root_git_hash_path = args.root / GIT_HASH_FILENAME
  743. root_is_maybe_archive = root_git_hash_path.is_file()
  744. if root_is_maybe_archive:
  745. logger.warning("%s detected: Building from archive", GIT_HASH_FILENAME)
  746. archive_commit = root_git_hash_path.read_text().strip()
  747. if args.commit != archive_commit:
  748. logger.warning("Commit argument is %s, but archive commit is %s. Using %s.", args.commit, archive_commit, archive_commit)
  749. args.commit = archive_commit
  750. else:
  751. args.commit = executer.check_output(["git", "rev-parse", args.commit], dry_out="e5812a9fd2cda317b503325a702ba3c1c37861d9").strip()
  752. logger.info("Using commit %s", args.commit)
  753. try:
  754. with args.path_release_info.open() as f:
  755. release_info = json.load(f)
  756. except FileNotFoundError:
  757. logger.error(f"Could not find {args.path_release_info}")
  758. releaser = Releaser(
  759. release_info=release_info,
  760. commit=args.commit,
  761. root=args.root,
  762. dist_path=args.dist_path,
  763. executer=executer,
  764. section_printer=section_printer,
  765. cmake_generator=args.cmake_generator,
  766. deps_path=args.deps_path,
  767. overwrite=args.overwrite,
  768. github=args.github,
  769. fast=args.fast,
  770. )
  771. if root_is_maybe_archive:
  772. logger.warning("Building from archive. Skipping clean git tree check.")
  773. else:
  774. porcelain_status = executer.check_output(["git", "status", "--ignored", "--porcelain"], dry_out="\n").strip()
  775. if porcelain_status:
  776. print(porcelain_status)
  777. logger.warning("The tree is dirty! Do not publish any generated artifacts!")
  778. if not args.force:
  779. raise Exception("The git repo contains modified and/or non-committed files. Run with --force to ignore.")
  780. if args.fast:
  781. logger.warning("Doing fast build! Do not publish generated artifacts!")
  782. with section_printer.group("Arguments"):
  783. print(f"project = {releaser.project}")
  784. print(f"version = {releaser.version}")
  785. print(f"commit = {args.commit}")
  786. print(f"out = {args.dist_path}")
  787. print(f"actions = {args.actions}")
  788. print(f"dry = {args.dry}")
  789. print(f"force = {args.force}")
  790. print(f"overwrite = {args.overwrite}")
  791. print(f"cmake_generator = {args.cmake_generator}")
  792. releaser.prepare()
  793. if "download" in args.actions:
  794. releaser.download_dependencies()
  795. if set(args.actions).intersection({"msvc", "mingw"}):
  796. print("Verifying presence of dependencies (run 'download' action to download) ...")
  797. releaser.verify_dependencies()
  798. print("... done")
  799. if "source" in args.actions:
  800. if root_is_maybe_archive:
  801. raise Exception("Cannot build source archive from source archive")
  802. with section_printer.group("Create source archives"):
  803. releaser.create_source_archives()
  804. if "dmg" in args.actions:
  805. if platform.system() != "Darwin" and not args.dry:
  806. parser.error("framework artifact(s) can only be built on Darwin")
  807. releaser.create_dmg()
  808. if "msvc" in args.actions:
  809. if platform.system() != "Windows" and not args.dry:
  810. parser.error("msvc artifact(s) can only be built on Windows")
  811. with section_printer.group("Find Visual Studio"):
  812. vs = VisualStudio(executer=executer)
  813. arch_platforms = [
  814. VsArchPlatformConfig(arch="x86", platform="Win32", configuration="Release"),
  815. VsArchPlatformConfig(arch="x64", platform="x64", configuration="Release"),
  816. ]
  817. for arch_platform in arch_platforms:
  818. releaser.build_vs(arch_platform=arch_platform, vs=vs)
  819. with section_printer.group("Create SDL VC development zip"):
  820. releaser.build_vs_devel(arch_platforms)
  821. if "mingw" in args.actions:
  822. releaser.create_mingw_archives()
  823. with section_printer.group("Summary"):
  824. print(f"artifacts = {releaser.artifacts}")
  825. if args.github:
  826. with open(os.environ["GITHUB_OUTPUT"], "a") as f:
  827. f.write(f"project={releaser.project}\n")
  828. f.write(f"version={releaser.version}\n")
  829. for k, v in releaser.artifacts.items():
  830. f.write(f"{k}={v.name}\n")
  831. return 0
  832. if __name__ == "__main__":
  833. raise SystemExit(main())