Просмотр исходного кода

Add scripts to create release artifacts for sources/MSVC/MingW/Android

Anonymous Maarten 3 месяцев назад
Родитель
Сommit
104f73b439

+ 636 - 0
.github/workflows/release.yml

@@ -0,0 +1,636 @@
+name: 'release'
+run-name: 'Create PhysicsFS release artifacts for ${{ inputs.commit }}'
+
+on:
+  workflow_dispatch:
+    inputs:
+      commit:
+        description: 'Commit of PhysicsFS'
+        required: true
+
+jobs:
+
+  src:
+    runs-on: ubuntu-latest
+    outputs:
+      project: ${{ steps.releaser.outputs.project }}
+      version: ${{ steps.releaser.outputs.version }}
+      src-tar-gz: ${{ steps.releaser.outputs.src-tar-gz }}
+      src-tar-xz: ${{ steps.releaser.outputs.src-tar-xz }}
+      src-zip: ${{ steps.releaser.outputs.src-zip }}
+    steps:
+      - name: 'Set up Python'
+        uses: actions/setup-python@v5
+        with:
+          python-version: '3.11'
+      - name: 'Fetch build-release.py'
+        uses: actions/checkout@v4
+        with:
+          sparse-checkout: 'build-scripts/build-release.py'
+      - name: 'Set up SDL sources'
+        uses: actions/checkout@v4
+        with:
+          path: 'physfs'
+          fetch-depth: 0
+      - name: 'Build Source archive'
+        id: releaser
+        shell: bash
+        run: |
+          python build-scripts/build-release.py \
+            --actions source \
+            --commit ${{ inputs.commit }} \
+            --root "${{ github.workspace }}/physfs" \
+            --github \
+            --debug
+      - name: 'Store source archives'
+        uses: actions/upload-artifact@v4
+        with:
+          name: sources
+          path: '${{ github.workspace}}/dist'
+      - name: 'Generate summary'
+        run: |
+          echo "Run the following commands to download all artifacts:" >> $GITHUB_STEP_SUMMARY
+          echo '```' >> $GITHUB_STEP_SUMMARY
+          echo "mkdir -p /tmp/${{ steps.releaser.outputs.project }}-${{ steps.releaser.outputs.version }}" >> $GITHUB_STEP_SUMMARY
+          echo "cd /tmp/${{ steps.releaser.outputs.project }}-${{ steps.releaser.outputs.version }}" >> $GITHUB_STEP_SUMMARY
+          echo "gh run -R ${{ github.repository }} download ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
+          echo '```' >> $GITHUB_STEP_SUMMARY
+
+  linux-verify:
+    needs: [src]
+    runs-on: ubuntu-latest
+    steps:
+      - name: 'Download source archives'
+        uses: actions/download-artifact@v4
+        with:
+          name: sources
+          path: '${{ github.workspace }}'
+      - name: 'Unzip ${{ needs.src.outputs.src-zip }}'
+        id: zip
+        run: |
+          mkdir /tmp/zipdir
+          cd /tmp/zipdir
+          unzip "${{ github.workspace }}/${{ needs.src.outputs.src-zip }}"
+          echo "path=/tmp/zipdir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT
+      - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}'
+        id: tar
+        run: |
+          mkdir -p /tmp/tardir
+          tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}"
+          echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT
+      - name: 'Compare contents of ${{ needs.src.outputs.src-zip }} and  ${{ needs.src.outputs.src-tar-gz }}'
+        run: |
+          diff /tmp/zipdir /tmp/tardir
+      - name: 'Test versioning'
+        shell: bash
+        run: |
+          ${{ steps.tar.outputs.path }}/build-scripts/test-versioning.sh
+      - name: 'CMake (configure + build + tests + examples)'
+        run: |
+          cmake -S ${{ steps.tar.outputs.path }} -B /tmp/build -DPHYSFS_BUILD_TEST=TRUE
+          cmake --build /tmp/build --verbose
+
+#  dmg:
+#    needs: [src]
+#    runs-on: macos-latest
+#    outputs:
+#      dmg: ${{ steps.releaser.outputs.dmg }}
+#    steps:
+#      - name: 'Set up Python'
+#        uses: actions/setup-python@v5
+#        with:
+#          python-version: '3.11'
+#      - name: 'Fetch build-release.py'
+#        uses: actions/checkout@v4
+#        with:
+#          sparse-checkout: 'build-scripts/build-release.py'
+#      - name: 'Download source archives'
+#        uses: actions/download-artifact@v4
+#        with:
+#          name: sources
+#          path: '${{ github.workspace }}'
+#      - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}'
+#        id: tar
+#        run: |
+#          mkdir -p "${{ github.workspace }}/tardir"
+#          tar -C "${{ github.workspace }}/tardir" -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}"
+#          echo "path=${{ github.workspace }}/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT
+#      - name: 'Build PhysicsFS.dmg'
+#        id: releaser
+#        shell: bash
+#        run: |
+#          python build-scripts/build-release.py \
+#            --actions dmg \
+#            --commit ${{ inputs.commit }} \
+#            --root "${{ steps.tar.outputs.path }}" \
+#            --github \
+#            --debug
+#      - name: 'Store DMG image file'
+#        uses: actions/upload-artifact@v4
+#        with:
+#          name: dmg
+#          path: '${{ github.workspace }}/dist'
+#
+#  dmg-verify:
+#    needs: [dmg, src]
+#    runs-on: macos-latest
+#    steps:
+#      - name: 'Download source archives'
+#        uses: actions/download-artifact@v4
+#        with:
+#          name: sources
+#          path: '${{ github.workspace }}'
+#      - name: 'Download ${{ needs.dmg.outputs.dmg }}'
+#        uses: actions/download-artifact@v4
+#        with:
+#          name: dmg
+#          path: '${{ github.workspace }}'
+#      - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}'
+#        id: src
+#        run: |
+#          mkdir -p /tmp/tardir
+#          tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}"
+#          echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT
+#      - name: 'Mount ${{ needs.dmg.outputs.dmg }}'
+#        id: mount
+#        run: |
+#          hdiutil attach '${{ github.workspace }}/${{ needs.dmg.outputs.dmg }}'
+#          mount_point="/Volumes/${{ needs.src.outputs.project }}"
+#          if [ ! -d "$mount_point/${{ needs.src.outputs.project }}.xcframework" ]; then
+#            echo "Cannot find ${{ needs.src.outputs.project }}.xcframework!"
+#            exit 1
+#          fi
+#          echo "mount_point=$mount_point">>$GITHUB_OUTPUT
+#      - name: 'CMake (configure + build) Darwin'
+#        run: |
+#          cmake -S "${{ steps.src.outputs.path }}/cmake/test"               \
+#              -DTEST_STATIC=FALSE                                           \
+#              -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}"  \
+#              -DCMAKE_SYSTEM_NAME=Darwin                                    \
+#              -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64"                      \
+#              -DCMAKE_OSX_DEPLOYMENT_TARGET=10.13                           \
+#              -Werror=dev                                                   \
+#              -B build_darwin
+#          cmake --build build_darwin --config Release --verbose
+#
+#          cmake -S "${{ steps.src.outputs.path }}/cmake/test"                                                   \
+#              -DTEST_STATIC=FALSE                                                                               \
+#              -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}/PhysicsFS.xcframework/macos-arm64_x86_64"  \
+#              -DCMAKE_SYSTEM_NAME=Darwin                                                                        \
+#              -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64"                                                          \
+#              -DCMAKE_OSX_DEPLOYMENT_TARGET=10.13                                                               \
+#              -Werror=dev                                                                                       \
+#              -B build_darwin_2
+#          cmake --build build_darwin --config Release --verbose
+#      - name: 'CMake (configure + build) iOS'
+#        run: |
+#          cmake -S "${{ steps.src.outputs.path }}/cmake/test"               \
+#              -DTEST_STATIC=FALSE                                           \
+#              -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}"  \
+#              -DCMAKE_SYSTEM_NAME=iOS                                       \
+#              -DCMAKE_OSX_ARCHITECTURES="arm64"                             \
+#              -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0                            \
+#              -Werror=dev                                                   \
+#              -B build_ios
+#          cmake --build build_ios --config Release --verbose
+#
+#          cmake -S "${{ steps.src.outputs.path }}/cmake/test"                                          \
+#              -DTEST_STATIC=FALSE                                                                      \
+#              -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}/PhysicsFS.xcframework/ios-arm64"  \
+#              -DCMAKE_SYSTEM_NAME=iOS                                                                  \
+#              -DCMAKE_OSX_ARCHITECTURES="arm64"                                                        \
+#              -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0                                                       \
+#              -Werror=dev                                                                              \
+#              -B build_ios2
+#          cmake --build build_ios2 --config Release --verbose
+#      - name: 'CMake (configure + build) tvOS'
+#        run: |
+#          cmake -S "${{ steps.src.outputs.path }}/cmake/test"               \
+#              -DTEST_STATIC=FALSE                                           \
+#              -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}"  \
+#              -DCMAKE_SYSTEM_NAME=tvOS                                      \
+#              -DCMAKE_OSX_ARCHITECTURES="arm64"                             \
+#              -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0                            \
+#              -Werror=dev                                                   \
+#              -B build_tvos
+#          cmake --build build_tvos --config Release --verbose
+#
+#          cmake -S "${{ steps.src.outputs.path }}/cmake/test"                                          \
+#              -DTEST_STATIC=FALSE                                                                      \
+#              -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}/PhysicsFS.xcframework/tvos-arm64" \
+#              -DCMAKE_SYSTEM_NAME=tvOS                                                                 \
+#              -DCMAKE_OSX_ARCHITECTURES="arm64"                                                        \
+#              -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0                                                       \
+#              -Werror=dev                                                                              \
+#              -B build_tvos2
+#          cmake --build build_tvos2 --config Release --verbose
+#      - name: 'CMake (configure + build) iOS simulator'
+#        run: |
+#          sysroot=$(xcodebuild -version -sdk iphonesimulator Path)
+#          echo "sysroot=$sysroot"
+#          cmake -S "${{ steps.src.outputs.path }}/cmake/test"               \
+#              -DTEST_STATIC=FALSE                                           \
+#              -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}"  \
+#              -DCMAKE_SYSTEM_NAME=iOS                                       \
+#              -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64"                      \
+#              -DCMAKE_OSX_SYSROOT="${sysroot}"                              \
+#              -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0                            \
+#              -Werror=dev                                                   \
+#              -B build_ios_simulator
+#          cmake --build build_ios_simulator --config Release --verbose
+#
+#          cmake -S "${{ steps.src.outputs.path }}/cmake/test"                                                          \
+#              -DTEST_STATIC=FALSE                                                                                      \
+#              -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}/PhysicsFS.xcframework/ios-arm64_x86_64-simulator" \
+#              -DCMAKE_SYSTEM_NAME=iOS                                                                                  \
+#              -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64"                                                                 \
+#              -DCMAKE_OSX_SYSROOT="${sysroot}"                                                                         \
+#              -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0                                                                       \
+#              -Werror=dev                                                                                              \
+#              -B build_ios_simulator2
+#          cmake --build build_ios_simulator2 --config Release --verbose
+#      - name: 'CMake (configure + build) tvOS simulator'
+#        run: |
+#          sysroot=$(xcodebuild -version -sdk appletvsimulator Path)
+#          echo "sysroot=$sysroot"
+#          cmake -S "${{ steps.src.outputs.path }}/cmake/test"               \
+#              -DTEST_STATIC=FALSE                                           \
+#              -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}"  \
+#              -DCMAKE_SYSTEM_NAME=tvOS                                      \
+#              -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64"                      \
+#              -DCMAKE_OSX_SYSROOT="${sysroot}"                              \
+#              -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0                            \
+#              -Werror=dev                                                   \
+#              -B build_tvos_simulator
+#          cmake --build build_tvos_simulator --config Release --verbose
+#
+#          cmake -S "${{ steps.src.outputs.path }}/cmake/test"                                                           \
+#              -DTEST_STATIC=FALSE                                                                                       \
+#              -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}/PhysicsFS.xcframework/tvos-arm64_x86_64-simulator" \
+#              -DCMAKE_SYSTEM_NAME=tvOS                                                                                  \
+#              -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64"                                                                  \
+#              -DCMAKE_OSX_SYSROOT="${sysroot}"                                                                          \
+#              -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0                                                                        \
+#              -Werror=dev                                                                                               \
+#              -B build_tvos_simulator2
+#          cmake --build build_tvos_simulator2 --config Release --verbose
+
+  msvc:
+    needs: [src]
+    runs-on: windows-2025
+    outputs:
+      VC-x86: ${{ steps.releaser.outputs.VC-x86 }}
+      VC-x64: ${{ steps.releaser.outputs.VC-x64 }}
+      VC-arm64: ${{ steps.releaser.outputs.VC-arm64 }}
+      VC-devel: ${{ steps.releaser.outputs.VC-devel }}
+    steps:
+      - name: 'Set up Python'
+        uses: actions/setup-python@v5
+        with:
+          python-version: '3.11'
+      - name: 'Fetch build-release.py'
+        uses: actions/checkout@v4
+        with:
+          sparse-checkout: 'build-scripts/build-release.py'
+      - name: 'Download source archives'
+        uses: actions/download-artifact@v4
+        with:
+          name: sources
+          path: '${{ github.workspace }}'
+      - name: 'Unzip ${{ needs.src.outputs.src-zip }}'
+        id: zip
+        run: |
+          New-Item C:\temp -ItemType Directory -ErrorAction SilentlyContinue
+          cd C:\temp
+          unzip "${{ github.workspace }}/${{ needs.src.outputs.src-zip }}"
+          echo "path=C:\temp\${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$Env:GITHUB_OUTPUT
+      - name: 'Build MSVC binary archives'
+        id: releaser
+        run: |
+          python build-scripts/build-release.py `
+            --actions msvc `
+            --commit ${{ inputs.commit }} `
+            --root "${{ steps.zip.outputs.path }}" `
+            --github `
+            --debug
+      - name: 'Store MSVC archives'
+        uses: actions/upload-artifact@v4
+        with:
+          name: win32
+          path: '${{ github.workspace }}/dist'
+
+  msvc-verify:
+    needs: [msvc, src]
+    runs-on: windows-latest
+    steps:
+      - name: 'Fetch .github/actions/setup-ninja/action.yml'
+        uses: actions/checkout@v4
+        with:
+          sparse-checkout: '.github/actions/setup-ninja/action.yml'
+      - name: 'Download source archives'
+        uses: actions/download-artifact@v4
+        with:
+          name: sources
+          path: '${{ github.workspace }}'
+      - name: 'Download MSVC binaries'
+        uses: actions/download-artifact@v4
+        with:
+          name: win32
+          path: '${{ github.workspace }}'
+      - name: 'Unzip ${{ needs.src.outputs.src-zip }}'
+        id: src
+        run: |
+          mkdir '${{ github.workspace }}/sources'
+          cd '${{ github.workspace }}/sources'
+          unzip "${{ github.workspace }}/${{ needs.src.outputs.src-zip }}"
+          echo "path=${{ github.workspace }}/sources/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$env:GITHUB_OUTPUT
+      - name: 'Unzip ${{ needs.msvc.outputs.VC-devel }}'
+        id: bin
+        run: |
+          mkdir '${{ github.workspace }}/vc'
+          cd '${{ github.workspace }}/vc'
+          unzip "${{ github.workspace }}/${{ needs.msvc.outputs.VC-devel }}"
+          echo "path=${{ github.workspace }}/vc/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$env:GITHUB_OUTPUT
+      - name: Set up ninja
+        uses: ./.github/actions/setup-ninja
+      - name: 'Configure vcvars x86'
+        uses: ilammy/msvc-dev-cmd@v1
+        with:
+          arch: x64_x86
+      - name: 'CMake (configure + build + tests) x86'
+        run: |
+          cmake -S "${{ steps.src.outputs.path }}/cmake/test"     `
+              -B build_x86                                        `
+              -GNinja                                             `
+              -DCMAKE_BUILD_TYPE=Debug                            `
+              -Werror=dev                                         `
+              -DTEST_STATIC=FALSE                                 `
+              -DTEST_SHARED=TRUE                                  `
+              -DCMAKE_SUPPRESS_REGENERATION=TRUE                  `
+              -DCMAKE_PREFIX_PATH="${{ steps.bin.outputs.path }}"
+          Start-Sleep -Seconds 2
+          cmake --build build_x86 --verbose
+      - name: 'Configure vcvars x64'
+        uses: ilammy/msvc-dev-cmd@v1
+        with:
+          arch: x64
+      - name: 'CMake (configure + build + tests) x64'
+        run: |
+          cmake -S "${{ steps.src.outputs.path }}/cmake/test"     `
+              -B build_x64                                        `
+              -GNinja                                             `
+              -DCMAKE_BUILD_TYPE=Debug                            `
+              -Werror=dev                                         `
+              -DTEST_STATIC=FALSE                                 `
+              -DTEST_SHARED=TRUE                                  `
+              -DCMAKE_SUPPRESS_REGENERATION=TRUE                  `
+              -DCMAKE_PREFIX_PATH="${{ steps.bin.outputs.path }}"
+          Start-Sleep -Seconds 2
+          cmake --build build_x64 --verbose
+      - name: 'Configure vcvars arm64'
+        uses: ilammy/msvc-dev-cmd@v1
+        with:
+          arch: x64_arm64
+      - name: 'CMake (configure + build) arm64'
+        run: |
+          cmake -S "${{ steps.src.outputs.path }}/cmake/test"     `
+              -B build_arm64                                      `
+              -GNinja                                             `
+              -DCMAKE_BUILD_TYPE=Debug                            `
+              -Werror=dev                                         `
+              -DTEST_STATIC=FALSE                                 `
+              -DTEST_SHARED=TRUE                                  `
+              -DCMAKE_SUPPRESS_REGENERATION=TRUE                  `
+              -DCMAKE_PREFIX_PATH="${{ steps.bin.outputs.path }}"
+          Start-Sleep -Seconds 2
+          cmake --build build_arm64 --verbose
+      - name: 'CMake (configure + build) arm64ec'
+        run: |
+          cmake -S "${{ steps.src.outputs.path }}/cmake/test"     `
+              -B build_arm64ec                                    `
+              -GNinja                                             `
+              -DCMAKE_BUILD_TYPE=Debug                            `
+              -Werror=dev                                         `
+              -DTEST_STATIC=FALSE                                 `
+              -DTEST_SHARED=TRUE                                  `
+              -DCMAKE_SUPPRESS_REGENERATION=TRUE                  `
+              -DCMAKE_C_FLAGS="/arm64EC" `
+              -DCMAKE_CXX_FLAGS="/arm64EC" `
+              -DCMAKE_EXE_LINKER_FLAGS="/MACHINE:ARM64EC"         `
+              -DCMAKE_SHARED_LINKER_FLAGS="/MACHINE:ARM64EC"      `
+              -DCMAKE_STATIC_LINKER_FLAGS="/MACHINE:ARM64EC"      `
+              -DCMAKE_PREFIX_PATH="${{ steps.bin.outputs.path }}"
+          Start-Sleep -Seconds 2
+          cmake --build build_arm64ec --verbose
+
+  mingw:
+    needs: [src]
+    runs-on: ubuntu-24.04  # FIXME: current ubuntu-latest ships an outdated mingw, replace with ubuntu-latest once 24.04 becomes the new default
+    outputs:
+      mingw-devel-tar-gz: ${{ steps.releaser.outputs.mingw-devel-tar-gz }}
+      mingw-devel-tar-xz: ${{ steps.releaser.outputs.mingw-devel-tar-xz }}
+    steps:
+      - name: 'Set up Python'
+        uses: actions/setup-python@v5
+        with:
+          python-version: '3.11'
+      - name: 'Fetch build-release.py'
+        uses: actions/checkout@v4
+        with:
+          sparse-checkout: 'build-scripts/build-release.py'
+      - name: 'Install Mingw toolchain'
+        run: |
+          sudo apt-get update -y
+          sudo apt-get install -y gcc-mingw-w64 g++-mingw-w64 ninja-build
+      - name: 'Download source archives'
+        uses: actions/download-artifact@v4
+        with:
+          name: sources
+          path: '${{ github.workspace }}'
+      - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}'
+        id: tar
+        run: |
+          mkdir -p /tmp/tardir
+          tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}"
+          echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT
+      - name: 'Build MinGW binary archives'
+        id: releaser
+        run: |
+          python build-scripts/build-release.py \
+            --actions mingw \
+            --commit ${{ inputs.commit }} \
+            --root "${{ steps.tar.outputs.path }}" \
+            --github \
+            --debug
+      - name: 'Store MinGW archives'
+        uses: actions/upload-artifact@v4
+        with:
+          name: mingw
+          path: '${{ github.workspace }}/dist'
+
+  mingw-verify:
+    needs: [mingw, src]
+    runs-on: ubuntu-latest
+    steps:
+      - name: 'Install Mingw toolchain'
+        run: |
+          sudo apt-get update -y
+          sudo apt-get install -y gcc-mingw-w64 g++-mingw-w64 ninja-build
+      - name: 'Download source archives'
+        uses: actions/download-artifact@v4
+        with:
+          name: sources
+          path: '${{ github.workspace }}'
+      - name: 'Download MinGW binaries'
+        uses: actions/download-artifact@v4
+        with:
+          name: mingw
+          path: '${{ github.workspace }}'
+      - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}'
+        id: src
+        run: |
+          mkdir -p /tmp/tardir
+          tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}"
+          echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT
+      - name: 'Untar ${{ needs.mingw.outputs.mingw-devel-tar-gz }}'
+        id: bin
+        run: |
+          mkdir -p /tmp/mingw-tardir
+          tar -C /tmp/mingw-tardir -v -x -f "${{ github.workspace }}/${{ needs.mingw.outputs.mingw-devel-tar-gz }}"
+          echo "path=/tmp/mingw-tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT
+      - name: 'CMake (configure + build) i686'
+        run: |
+          cmake -S "${{ steps.src.outputs.path }}/cmake/test"                                                           \
+              -DCMAKE_BUILD_TYPE="Release"                                                                              \
+              -DTEST_STATIC=FALSE                                                                                       \
+              -DCMAKE_PREFIX_PATH="${{ steps.bin.outputs.path }}"                                                       \
+              -DCMAKE_TOOLCHAIN_FILE="${{ steps.src.outputs.path }}/build-scripts/cmake-toolchain-mingw64-i686.cmake"   \
+              -Werror=dev                                                                                               \
+              -B build_x86
+          cmake --build build_x86 --config Release --verbose
+      - name: 'CMake (configure + build) x86_64'
+        run: |
+          cmake -S "${{ steps.src.outputs.path }}/cmake/test"                                                           \
+              -DCMAKE_BUILD_TYPE="Release"                                                                              \
+              -DTEST_STATIC=false                                                                                       \
+              -DCMAKE_PREFIX_PATH="${{ steps.bin.outputs.path }}"                                                       \
+              -DCMAKE_TOOLCHAIN_FILE="${{ steps.src.outputs.path }}/build-scripts/cmake-toolchain-mingw64-x86_64.cmake" \
+              -Werror=dev                                                                                               \
+              -B build_x64
+          cmake --build build_x64 --config Release --verbose
+
+  android:
+    needs: [src]
+    runs-on: ubuntu-latest
+    outputs:
+      android-aar: ${{ steps.releaser.outputs.android-aar }}
+    steps:
+      - name: 'Set up Python'
+        uses: actions/setup-python@v5
+        with:
+          python-version: '3.11'
+      - name: 'Fetch build-release.py'
+        uses: actions/checkout@v4
+        with:
+          sparse-checkout: 'build-scripts/build-release.py'
+      - name: 'Setup Android NDK'
+        id: setup-ndk
+        uses: nttld/setup-ndk@v1
+        with:
+          local-cache: false
+          ndk-version: r28c
+      - name: 'Setup Java JDK'
+        uses: actions/setup-java@v4
+        with:
+          distribution: 'temurin'
+          java-version: '11'
+      - name: 'Install ninja'
+        run: |
+          sudo apt-get update -y
+          sudo apt-get install -y ninja-build
+      - name: 'Download source archives'
+        uses: actions/download-artifact@v4
+        with:
+          name: sources
+          path: '${{ github.workspace }}'
+      - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}'
+        id: tar
+        run: |
+          mkdir -p /tmp/tardir
+          tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}"
+          echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT
+      - name: 'Build Android prefab binary archive(s)'
+        id: releaser
+        run: |
+          python build-scripts/build-release.py \
+            --actions android \
+            --android-api 21 \
+            --android-ndk-home "${{ steps.setup-ndk.outputs.ndk-path }}" \
+            --commit ${{ inputs.commit }} \
+            --root "${{ steps.tar.outputs.path }}" \
+            --github \
+            --debug
+      - name: 'Store Android archive(s)'
+        uses: actions/upload-artifact@v4
+        with:
+          name: android
+          path: '${{ github.workspace }}/dist'
+
+  android-verify:
+    needs: [android, src]
+    runs-on: ubuntu-latest
+    steps:
+      - name: 'Set up Python'
+        uses: actions/setup-python@v5
+        with:
+          python-version: '3.11'
+      - uses: actions/setup-java@v4
+        with:
+          distribution: 'temurin'
+          java-version: '17'
+      - name: 'Download source archives'
+        uses: actions/download-artifact@v4
+        with:
+          name: sources
+          path: '${{ github.workspace }}'
+      - name: 'Download Android .aar archive'
+        uses: actions/download-artifact@v4
+        with:
+          name: android
+          path: '${{ github.workspace }}'
+      - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}'
+        id: src
+        run: |
+          mkdir -p /tmp/tardir
+          tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}"
+          echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT
+      - name: 'Extract Android SDK from AAR'
+        id: sdk
+        run: |
+          cd /tmp
+          unzip "${{ github.workspace }}/${{ needs.android.outputs.android-aar }}"
+          python "${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}.aar" -o /tmp/PhysicsFS3-android
+          echo "prefix=/tmp/PhysicsFS3-android" >>$GITHUB_OUTPUT
+          echo "physicsfs-aar=/tmp/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}.aar" >>$GITHUB_OUTPUT
+      - name: 'Verify alignment of libphysfs.so (arm64-v8a/x86_64)'
+        run: |
+          set -e
+          ${{ steps.src.outputs.path }}/build-scripts/check_elf_alignment.sh ${{ steps.sdk.outputs.prefix }}/lib/arm64-v8a/libphysfs.so
+          ${{ steps.src.outputs.path }}/build-scripts/check_elf_alignment.sh ${{ steps.sdk.outputs.prefix }}/lib/x86_64/libphysfs.so
+      - name: 'CMake (configure + build) x86, x64, arm32, arm64'
+        run: |
+          set -e
+          android_abis="x86 x86_64 armeabi-v7a arm64-v8a"
+          for android_abi in ${android_abis}; do
+            echo "Configuring ${android_abi}..."
+            cmake -S "${{ steps.src.outputs.path }}/cmake/test" \
+              -DTEST_STATIC=FALSE \
+              -DCMAKE_PREFIX_PATH="${{ steps.sdk.outputs.prefix }}" \
+              -DCMAKE_TOOLCHAIN_FILE=${ANDROID_NDK_HOME}/build/cmake/android.toolchain.cmake \
+              -DANDROID_USE_LEGACY_TOOLCHAIN=0 \
+              -DANDROID_ABI=${android_abi} \
+              -DCMAKE_BUILD_TYPE=Release \
+              -B "${android_abi}"
+            echo "Building ${android_abi}..."
+            cmake --build "${android_abi}" --verbose
+          done

+ 1 - 0
.gitignore

@@ -1,3 +1,4 @@
+/dist
 cmake-build
 CMakeFiles/
 CMakeCache.txt

+ 6 - 0
CMakeLists.txt

@@ -285,6 +285,9 @@ if(PHYSFS_INSTALL)
             NAMESPACE PhysFS::
         )
         export(TARGETS physfs-shared NAMESPACE PhysFS:: FILE PhysFS-shared-targets.cmake)
+        if(MSVC)
+            SDL_install_pdb(physfs-shared "${CMAKE_INSTALL_BINDIR}")
+        endif()
     endif()
     if(TARGET physfs-static)
         install(TARGETS physfs-static EXPORT physfs-static-exports
@@ -298,6 +301,9 @@ if(PHYSFS_INSTALL)
             NAMESPACE PhysFS::
         )
         export(TARGETS physfs-static NAMESPACE PhysFS:: FILE PhysFS-static-targets.cmake)
+        if(MSVC)
+            SDL_install_pdb(physfs-static "${CMAKE_INSTALL_LIBDIR}")
+        endif()
     endif()
     if(TARGET test_physfs)
         install(TARGETS test_physfs RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})

+ 1572 - 0
build-scripts/build-release.py

@@ -0,0 +1,1572 @@
+#!/usr/bin/env python3
+
+"""
+This script is shared between SDL2, SDL3, and all satellite libraries.
+Don't specialize this script for doing project-specific modifications.
+Rather, modify release-info.json.
+"""
+
+import argparse
+import collections
+import dataclasses
+from collections.abc import Callable
+import contextlib
+import datetime
+import fnmatch
+import glob
+import io
+import json
+import logging
+import multiprocessing
+import os
+from pathlib import Path
+import platform
+import re
+import shlex
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+import textwrap
+import typing
+import zipfile
+
+
+logger = logging.getLogger(__name__)
+GIT_HASH_FILENAME = ".git-hash"
+REVISION_TXT = "REVISION.txt"
+
+RE_ILLEGAL_MINGW_LIBRARIES = re.compile(r"(?:lib)?(?:gcc|(?:std)?c[+][+]|(?:win)?pthread).*", flags=re.I)
+
+
+def safe_isotime_to_datetime(str_isotime: str) -> datetime.datetime:
+    try:
+        return datetime.datetime.fromisoformat(str_isotime)
+    except ValueError:
+        pass
+    logger.warning("Invalid iso time: %s", str_isotime)
+    if str_isotime[-6:-5] in ("+", "-"):
+        # Commits can have isotime with invalid timezone offset (e.g. "2021-07-04T20:01:40+32:00")
+        modified_str_isotime = str_isotime[:-6] + "+00:00"
+        try:
+            return datetime.datetime.fromisoformat(modified_str_isotime)
+        except ValueError:
+            pass
+    raise ValueError(f"Invalid isotime: {str_isotime}")
+
+
+def arc_join(*parts: list[str]) -> str:
+    assert all(p[:1] != "/" and p[-1:] != "/" for p in parts), f"None of {parts} may start or end with '/'"
+    return "/".join(p for p in parts if p)
+
+
+@dataclasses.dataclass(frozen=True)
+class VsArchPlatformConfig:
+    arch: str
+    configuration: str
+    platform: str
+
+    def extra_context(self):
+        return {
+            "ARCH": self.arch,
+            "CONFIGURATION": self.configuration,
+            "PLATFORM": self.platform,
+        }
+
+
+@contextlib.contextmanager
+def chdir(path):
+    original_cwd = os.getcwd()
+    try:
+        os.chdir(path)
+        yield
+    finally:
+        os.chdir(original_cwd)
+
+
+class Executer:
+    def __init__(self, root: Path, dry: bool=False):
+        self.root = root
+        self.dry = dry
+
+    def run(self, cmd, cwd=None, env=None):
+        logger.info("Executing args=%r", cmd)
+        sys.stdout.flush()
+        if not self.dry:
+            subprocess.check_call(cmd, cwd=cwd or self.root, env=env, text=True)
+
+    def check_output(self, cmd, cwd=None, dry_out=None, env=None, text=True):
+        logger.info("Executing args=%r", cmd)
+        sys.stdout.flush()
+        if self.dry:
+            return dry_out
+        return subprocess.check_output(cmd, cwd=cwd or self.root, env=env, text=text)
+
+
+class SectionPrinter:
+    @contextlib.contextmanager
+    def group(self, title: str):
+        print(f"{title}:")
+        yield
+
+
+class GitHubSectionPrinter(SectionPrinter):
+    def __init__(self):
+        super().__init__()
+        self.in_group = False
+
+    @contextlib.contextmanager
+    def group(self, title: str):
+        print(f"::group::{title}")
+        assert not self.in_group, "Can enter a group only once"
+        self.in_group = True
+        yield
+        self.in_group = False
+        print("::endgroup::")
+
+
+class VisualStudio:
+    def __init__(self, executer: Executer, year: typing.Optional[str]=None):
+        self.executer = executer
+        self.vsdevcmd = self.find_vsdevcmd(year)
+        self.msbuild = self.find_msbuild()
+
+    @property
+    def dry(self) -> bool:
+        return self.executer.dry
+
+    VS_YEAR_TO_VERSION = {
+        "2022": 17,
+        "2019": 16,
+        "2017": 15,
+        "2015": 14,
+        "2013": 12,
+    }
+
+    def find_vsdevcmd(self, year: typing.Optional[str]=None) -> typing.Optional[Path]:
+        vswhere_spec = ["-latest"]
+        if year is not None:
+            try:
+                version = self.VS_YEAR_TO_VERSION[year]
+            except KeyError:
+                logger.error("Invalid Visual Studio year")
+                return None
+            vswhere_spec.extend(["-version", f"[{version},{version+1})"])
+        vswhere_cmd = ["vswhere"] + vswhere_spec + ["-property", "installationPath"]
+        vs_install_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp").strip())
+        logger.info("VS install_path = %s", vs_install_path)
+        assert vs_install_path.is_dir(), "VS installation path does not exist"
+        vsdevcmd_path = vs_install_path / "Common7/Tools/vsdevcmd.bat"
+        logger.info("vsdevcmd path = %s", vsdevcmd_path)
+        if self.dry:
+            vsdevcmd_path.parent.mkdir(parents=True, exist_ok=True)
+            vsdevcmd_path.touch(exist_ok=True)
+        assert vsdevcmd_path.is_file(), "vsdevcmd.bat batch file does not exist"
+        return vsdevcmd_path
+
+    def find_msbuild(self) -> typing.Optional[Path]:
+        vswhere_cmd = ["vswhere", "-latest", "-requires", "Microsoft.Component.MSBuild", "-find", r"MSBuild\**\Bin\MSBuild.exe"]
+        msbuild_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp/MSBuild.exe").strip())
+        logger.info("MSBuild path = %s", msbuild_path)
+        if self.dry:
+            msbuild_path.parent.mkdir(parents=True, exist_ok=True)
+            msbuild_path.touch(exist_ok=True)
+        assert msbuild_path.is_file(), "MSBuild.exe does not exist"
+        return msbuild_path
+
+    def build(self, arch_platform: VsArchPlatformConfig, projects: list[Path]):
+        assert projects, "Need at least one project to build"
+
+        vsdev_cmd_str = f"\"{self.vsdevcmd}\" -arch={arch_platform.arch}"
+        msbuild_cmd_str = " && ".join([f"\"{self.msbuild}\" \"{project}\" /m /p:BuildInParallel=true /p:Platform={arch_platform.platform} /p:Configuration={arch_platform.configuration}" for project in projects])
+        bat_contents = f"{vsdev_cmd_str} && {msbuild_cmd_str}\n"
+        bat_path = Path(tempfile.gettempdir()) / "cmd.bat"
+        with bat_path.open("w") as f:
+            f.write(bat_contents)
+
+        logger.info("Running cmd.exe script (%s): %s", bat_path, bat_contents)
+        cmd = ["cmd.exe", "/D", "/E:ON", "/V:OFF", "/S", "/C", f"CALL {str(bat_path)}"]
+        self.executer.run(cmd)
+
+
+class Archiver:
+    def __init__(self, zip_path: typing.Optional[Path]=None, tgz_path: typing.Optional[Path]=None, txz_path: typing.Optional[Path]=None):
+        self._zip_files = []
+        self._tar_files = []
+        self._added_files = set()
+        if zip_path:
+            self._zip_files.append(zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_DEFLATED))
+        if tgz_path:
+            self._tar_files.append(tarfile.open(tgz_path, "w:gz"))
+        if txz_path:
+            self._tar_files.append(tarfile.open(txz_path, "w:xz"))
+
+    @property
+    def added_files(self) -> set[str]:
+        return self._added_files
+
+    def add_file_data(self, arcpath: str, data: bytes, mode: int, time: datetime.datetime):
+        for zf in self._zip_files:
+            file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
+            zip_info = zipfile.ZipInfo(filename=arcpath, date_time=file_data_time)
+            zip_info.external_attr = mode << 16
+            zip_info.compress_type = zipfile.ZIP_DEFLATED
+            zf.writestr(zip_info, data=data)
+        for tf in self._tar_files:
+            tar_info = tarfile.TarInfo(arcpath)
+            tar_info.type = tarfile.REGTYPE
+            tar_info.mode = mode
+            tar_info.size = len(data)
+            tar_info.mtime = int(time.timestamp())
+            tf.addfile(tar_info, fileobj=io.BytesIO(data))
+
+        self._added_files.add(arcpath)
+
+    def add_symlink(self, arcpath: str, target: str, time: datetime.datetime, files_for_zip):
+        logger.debug("Adding symlink (target=%r) -> %s", target, arcpath)
+        for zf in self._zip_files:
+            file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second)
+            for f in files_for_zip:
+                zip_info = zipfile.ZipInfo(filename=f["arcpath"], date_time=file_data_time)
+                zip_info.external_attr = f["mode"] << 16
+                zip_info.compress_type = zipfile.ZIP_DEFLATED
+                zf.writestr(zip_info, data=f["data"])
+        for tf in self._tar_files:
+            tar_info = tarfile.TarInfo(arcpath)
+            tar_info.type = tarfile.SYMTYPE
+            tar_info.mode = 0o777
+            tar_info.mtime = int(time.timestamp())
+            tar_info.linkname = target
+            tf.addfile(tar_info)
+
+        self._added_files.update(f["arcpath"] for f in files_for_zip)
+
+    def add_git_hash(self, arcdir: str, commit: str, time: datetime.datetime):
+        arcpath = arc_join(arcdir, GIT_HASH_FILENAME)
+        data = f"{commit}\n".encode()
+        self.add_file_data(arcpath=arcpath, data=data, mode=0o100644, time=time)
+
+    def add_file_path(self, arcpath: str, path: Path):
+        assert path.is_file(), f"{path} should be a file"
+        logger.debug("Adding %s -> %s", path, arcpath)
+        for zf in self._zip_files:
+            zf.write(path, arcname=arcpath)
+        for tf in self._tar_files:
+            tf.add(path, arcname=arcpath)
+
+    def add_file_directory(self, arcdirpath: str, dirpath: Path):
+        assert dirpath.is_dir()
+        if arcdirpath and arcdirpath[-1:] != "/":
+            arcdirpath += "/"
+        for f in dirpath.iterdir():
+            if f.is_file():
+                arcpath = f"{arcdirpath}{f.name}"
+                logger.debug("Adding %s to %s", f, arcpath)
+                self.add_file_path(arcpath=arcpath, path=f)
+
+    def close(self):
+        # Archiver is intentionally made invalid after this function
+        for zf in self._zip_files:
+            zf.close()
+        for tf in self._tar_files:
+            tf.close()
+        del self._zip_files
+        self._zip_files = None
+        del self._tar_files
+        self._tar_files = None
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type, value, traceback):
+        try:
+            self.close()
+        except RuntimeError as e:
+            print(e)
+
+
+class NodeInArchive:
+    def __init__(self, arcpath: str, path: typing.Optional[Path]=None, data: typing.Optional[bytes]=None, mode: typing.Optional[int]=None, symtarget: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None, directory: bool=False):
+        self.arcpath = arcpath
+        self.path = path
+        self.data = data
+        self.mode = mode
+        self.symtarget = symtarget
+        self.time = time
+        self.directory = directory
+
+    @classmethod
+    def from_fs(cls, arcpath: str, path: Path, mode: int=0o100644, time: typing.Optional[datetime.datetime]=None) -> "NodeInArchive":
+        if time is None:
+            time = datetime.datetime.fromtimestamp(os.stat(path).st_mtime)
+        return cls(arcpath=arcpath, path=path, mode=mode)
+
+    @classmethod
+    def from_data(cls, arcpath: str, data: bytes, time: datetime.datetime) -> "NodeInArchive":
+        return cls(arcpath=arcpath, data=data, time=time, mode=0o100644)
+
+    @classmethod
+    def from_text(cls, arcpath: str, text: str, time: datetime.datetime) -> "NodeInArchive":
+        return cls.from_data(arcpath=arcpath, data=text.encode(), time=time)
+
+    @classmethod
+    def from_symlink(cls, arcpath: str, symtarget: str) -> "NodeInArchive":
+        return cls(arcpath=arcpath, symtarget=symtarget)
+
+    @classmethod
+    def from_directory(cls, arcpath: str) -> "NodeInArchive":
+        return cls(arcpath=arcpath, directory=True)
+
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__}:arcpath={self.arcpath},path='{str(self.path)}',len(data)={len(self.data) if self.data else 'n/a'},directory={self.directory},symtarget={self.symtarget}>"
+
+
+def configure_file(path: Path, context: dict[str, str]) -> bytes:
+    text = path.read_text()
+    return configure_text(text, context=context).encode()
+
+
+def configure_text(text: str, context: dict[str, str]) -> str:
+    original_text = text
+    for txt, repl in context.items():
+        text = text.replace(f"@<@{txt}@>@", repl)
+    success = all(thing not in text for thing in ("@<@", "@>@"))
+    if not success:
+        raise ValueError(f"Failed to configure {repr(original_text)}")
+    return text
+
+
+def configure_text_list(text_list: list[str], context: dict[str, str]) -> list[str]:
+    return [configure_text(text=e, context=context) for e in text_list]
+
+
+class ArchiveFileTree:
+    def __init__(self):
+        self._tree: dict[str, NodeInArchive] = {}
+
+    def add_file(self, file: NodeInArchive):
+        self._tree[file.arcpath] = file
+
+    def __iter__(self) -> typing.Iterable[NodeInArchive]:
+        yield from self._tree.values()
+
+    def __contains__(self, value: str) -> bool:
+        return value in self._tree
+
+    def get_latest_mod_time(self) -> datetime.datetime:
+        return max(item.time for item in self._tree.values() if item.time)
+
+    def add_to_archiver(self, archive_base: str, archiver: Archiver):
+        remaining_symlinks = set()
+        added_files = dict()
+
+        def calculate_symlink_target(s: NodeInArchive) -> str:
+            dest_dir = os.path.dirname(s.arcpath)
+            if dest_dir:
+                dest_dir += "/"
+            target = dest_dir + s.symtarget
+            while True:
+                new_target, n = re.subn(r"([^/]+/+[.]{2}/)", "", target)
+                target = new_target
+                if not n:
+                    break
+            return target
+
+        # Add files in first pass
+        for arcpath, node in self._tree.items():
+            assert node is not None, f"{arcpath} -> node"
+            if node.data is not None:
+                archiver.add_file_data(arcpath=arc_join(archive_base, arcpath), data=node.data, time=node.time, mode=node.mode)
+                assert node.arcpath is not None, f"{node=}"
+                added_files[node.arcpath] = node
+            elif node.path is not None:
+                archiver.add_file_path(arcpath=arc_join(archive_base, arcpath), path=node.path)
+                assert node.arcpath is not None, f"{node=}"
+                added_files[node.arcpath] = node
+            elif node.symtarget is not None:
+                remaining_symlinks.add(node)
+            elif node.directory:
+                pass
+            else:
+                raise ValueError(f"Invalid Archive Node: {repr(node)}")
+
+        assert None not in added_files
+
+        # Resolve symlinks in second pass: zipfile does not support symlinks, so add files to zip archive
+        while True:
+            if not remaining_symlinks:
+                break
+            symlinks_this_time = set()
+            extra_added_files = {}
+            for symlink in remaining_symlinks:
+                symlink_files_for_zip = {}
+                symlink_target_path = calculate_symlink_target(symlink)
+                if symlink_target_path in added_files:
+                    symlink_files_for_zip[symlink.arcpath] = added_files[symlink_target_path]
+                else:
+                    symlink_target_path_slash = symlink_target_path + "/"
+                    for added_file in added_files:
+                        if added_file.startswith(symlink_target_path_slash):
+                            path_in_symlink = symlink.arcpath + "/" + added_file.removeprefix(symlink_target_path_slash)
+                            symlink_files_for_zip[path_in_symlink] = added_files[added_file]
+                if symlink_files_for_zip:
+                    symlinks_this_time.add(symlink)
+                    extra_added_files.update(symlink_files_for_zip)
+                    files_for_zip = [{"arcpath": f"{archive_base}/{sym_path}", "data": sym_info.data, "mode": sym_info.mode} for sym_path, sym_info in symlink_files_for_zip.items()]
+                    archiver.add_symlink(arcpath=f"{archive_base}/{symlink.arcpath}", target=symlink.symtarget, time=symlink.time, files_for_zip=files_for_zip)
+            # if not symlinks_this_time:
+            #     logger.info("files added: %r", set(path for path in added_files.keys()))
+            assert symlinks_this_time, f"No targets found for symlinks: {remaining_symlinks}"
+            remaining_symlinks.difference_update(symlinks_this_time)
+            added_files.update(extra_added_files)
+
+    def add_directory_tree(self, arc_dir: str, path: Path, time: datetime.datetime):
+        assert path.is_dir()
+        for files_dir, _, filenames in os.walk(path):
+            files_dir_path = Path(files_dir)
+            rel_files_path = files_dir_path.relative_to(path)
+            for filename in filenames:
+                self.add_file(NodeInArchive.from_fs(arcpath=arc_join(arc_dir, str(rel_files_path), filename), path=files_dir_path / filename, time=time))
+
+    def _add_files_recursively(self, arc_dir: str, paths: list[Path], time: datetime.datetime):
+        logger.debug(f"_add_files_recursively({arc_dir=} {paths=})")
+        for path in paths:
+            arcpath = arc_join(arc_dir, path.name)
+            if path.is_file():
+                logger.debug("Adding %s as %s", path, arcpath)
+                self.add_file(NodeInArchive.from_fs(arcpath=arcpath, path=path, time=time))
+            elif path.is_dir():
+                self._add_files_recursively(arc_dir=arc_join(arc_dir, path.name), paths=list(path.iterdir()), time=time)
+            else:
+                raise ValueError(f"Unsupported file type to add recursively: {path}")
+
+    def add_file_mapping(self, arc_dir: str, file_mapping: dict[str, list[str]], file_mapping_root: Path, context: dict[str, str], time: datetime.datetime):
+        for meta_rel_destdir, meta_file_globs in file_mapping.items():
+            rel_destdir = configure_text(meta_rel_destdir, context=context)
+            assert "@" not in rel_destdir, f"archive destination should not contain an @ after configuration ({repr(meta_rel_destdir)}->{repr(rel_destdir)})"
+            for meta_file_glob in meta_file_globs:
+                file_glob = configure_text(meta_file_glob, context=context)
+                assert "@" not in rel_destdir, f"archive glob should not contain an @ after configuration ({repr(meta_file_glob)}->{repr(file_glob)})"
+                if ":" in file_glob:
+                    original_path, new_filename = file_glob.rsplit(":", 1)
+                    assert ":" not in original_path, f"Too many ':' in {repr(file_glob)}"
+                    assert "/" not in new_filename, f"New filename cannot contain a '/' in {repr(file_glob)}"
+                    path = file_mapping_root / original_path
+                    arcpath = arc_join(arc_dir, rel_destdir, new_filename)
+                    if path.suffix == ".in":
+                        data = configure_file(path, context=context)
+                        logger.debug("Adding processed %s -> %s", path, arcpath)
+                        self.add_file(NodeInArchive.from_data(arcpath=arcpath, data=data, time=time))
+                    else:
+                        logger.debug("Adding %s -> %s", path, arcpath)
+                        self.add_file(NodeInArchive.from_fs(arcpath=arcpath, path=path, time=time))
+                else:
+                    relative_file_paths = glob.glob(file_glob, root_dir=file_mapping_root)
+                    assert relative_file_paths, f"Glob '{file_glob}' does not match any file"
+                    self._add_files_recursively(arc_dir=arc_join(arc_dir, rel_destdir), paths=[file_mapping_root / p for p in relative_file_paths], time=time)
+
+
+class SourceCollector:
+    # TreeItem = collections.namedtuple("TreeItem", ("path", "mode", "data", "symtarget", "directory", "time"))
+    def __init__(self, root: Path, commit: str, filter: typing.Optional[Callable[[str], bool]], executer: Executer):
+        self.root = root
+        self.commit = commit
+        self.filter = filter
+        self.executer = executer
+
+    def get_archive_file_tree(self) -> ArchiveFileTree:
+        git_archive_args = ["git", "archive", "--format=tar.gz", self.commit, "-o", "/dev/stdout"]
+        logger.info("Executing args=%r", git_archive_args)
+        contents_tgz = subprocess.check_output(git_archive_args, cwd=self.root, text=False)
+        tar_archive = tarfile.open(fileobj=io.BytesIO(contents_tgz), mode="r:gz")
+        filenames = tuple(m.name for m in tar_archive if (m.isfile() or m.issym()))
+
+        file_times = self._get_file_times(paths=filenames)
+        git_contents = ArchiveFileTree()
+        for ti in tar_archive:
+            if self.filter and not self.filter(ti.name):
+                continue
+            data = None
+            symtarget = None
+            directory = False
+            file_time = None
+            if ti.isfile():
+                contents_file = tar_archive.extractfile(ti.name)
+                data = contents_file.read()
+                file_time = file_times[ti.name]
+            elif ti.issym():
+                symtarget = ti.linkname
+                file_time = file_times[ti.name]
+            elif ti.isdir():
+                directory = True
+            else:
+                raise ValueError(f"{ti.name}: unknown type")
+            node = NodeInArchive(arcpath=ti.name, data=data, mode=ti.mode, symtarget=symtarget, time=file_time, directory=directory)
+            git_contents.add_file(node)
+        return git_contents
+
+    def _get_file_times(self, paths: tuple[str, ...]) -> dict[str, datetime.datetime]:
+        dry_out = textwrap.dedent("""\
+            time=2024-03-14T15:40:25-07:00
+
+            M\tCMakeLists.txt
+        """)
+        git_log_out = self.executer.check_output(["git", "log", "--name-status", '--pretty=time=%cI', self.commit], dry_out=dry_out, cwd=self.root).splitlines(keepends=False)
+        current_time = None
+        set_paths = set(paths)
+        path_times: dict[str, datetime.datetime] = {}
+        for line in git_log_out:
+            if not line:
+                continue
+            if line.startswith("time="):
+                current_time = safe_isotime_to_datetime(line.removeprefix("time="))
+                continue
+            mod_type, file_paths = line.split(maxsplit=1)
+            assert current_time is not None
+            for file_path in file_paths.split("\t"):
+                if file_path in set_paths and file_path not in path_times:
+                    path_times[file_path] = current_time
+
+        # FIXME: find out why some files are not shown in "git log"
+        # assert set(path_times.keys()) == set_paths
+        if set(path_times.keys()) != set_paths:
+            found_times = set(path_times.keys())
+            paths_without_times = set_paths.difference(found_times)
+            logger.warning("No times found for these paths: %s", paths_without_times)
+            max_time = max(time for time in path_times.values())
+            for path in paths_without_times:
+                path_times[path] = max_time
+
+        return path_times
+
+
+class AndroidApiVersion:
+    def __init__(self, name: str, ints: tuple[int, ...]):
+        self.name = name
+        self.ints = ints
+
+    def __repr__(self) -> str:
+        return f"<{self.name} ({'.'.join(str(v) for v in self.ints)})>"
+
+ANDROID_ABI_EXTRA_LINK_OPTIONS = {}
+
+class Releaser:
+    def __init__(self, release_info: dict, commit: str, revision: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str, deps_path: Path, overwrite: bool, github: bool, fast: bool):
+        self.release_info = release_info
+        self.project = release_info["name"]
+        self.version = self.extract_sdl_version(root=root, release_info=release_info)
+        self.root = root
+        self.commit = commit
+        self.revision = revision
+        self.dist_path = dist_path
+        self.section_printer = section_printer
+        self.executer = executer
+        self.cmake_generator = cmake_generator
+        self.cpu_count = multiprocessing.cpu_count()
+        self.deps_path = deps_path
+        self.overwrite = overwrite
+        self.github = github
+        self.fast = fast
+        self.arc_time = datetime.datetime.now()
+
+        self.artifacts: dict[str, Path] = {}
+
+    def get_context(self, extra_context: typing.Optional[dict[str, str]]=None) -> dict[str, str]:
+        ctx = {
+            "PROJECT_NAME": self.project,
+            "PROJECT_VERSION": self.version,
+            "PROJECT_COMMIT": self.commit,
+            "PROJECT_REVISION": self.revision,
+            "PROJECT_ROOT": str(self.root),
+        }
+        if extra_context:
+            ctx.update(extra_context)
+        return ctx
+
+    @property
+    def dry(self) -> bool:
+        return self.executer.dry
+
+    def prepare(self):
+        logger.debug("Creating dist folder")
+        self.dist_path.mkdir(parents=True, exist_ok=True)
+
+    @classmethod
+    def _path_filter(cls, path: str) -> bool:
+        if ".gitmodules" in path:
+            return True
+        if path.startswith(".git"):
+            return False
+        return True
+
+    @classmethod
+    def _external_repo_path_filter(cls, path: str) -> bool:
+        if not cls._path_filter(path):
+            return False
+        if path.startswith("test/") or path.startswith("tests/"):
+            return False
+        return True
+
+    def create_source_archives(self) -> None:
+        source_collector = SourceCollector(root=self.root, commit=self.commit, executer=self.executer, filter=self._path_filter)
+        print(f"Collecting sources of {self.project}...")
+        archive_tree: ArchiveFileTree = source_collector.get_archive_file_tree()
+        latest_mod_time = archive_tree.get_latest_mod_time()
+        archive_tree.add_file(NodeInArchive.from_text(arcpath=REVISION_TXT, text=f"{self.revision}\n", time=latest_mod_time))
+        archive_tree.add_file(NodeInArchive.from_text(arcpath=f"{GIT_HASH_FILENAME}", text=f"{self.commit}\n", time=latest_mod_time))
+        archive_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["source"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=latest_mod_time)
+
+        if "Makefile.am" in archive_tree:
+            patched_time = latest_mod_time + datetime.timedelta(minutes=1)
+            print(f"Makefile.am detected -> touching aclocal.m4, */Makefile.in, configure")
+            for node_data in archive_tree:
+                arc_name = os.path.basename(node_data.arcpath)
+                arc_name_we, arc_name_ext = os.path.splitext(arc_name)
+                if arc_name in ("aclocal.m4", "configure", "Makefile.in"):
+                    print(f"Bumping time of {node_data.arcpath}")
+                    node_data.time = patched_time
+
+        archive_base = f"{self.project}-{self.version}"
+        zip_path = self.dist_path / f"{archive_base}.zip"
+        tgz_path = self.dist_path / f"{archive_base}.tar.gz"
+        txz_path = self.dist_path / f"{archive_base}.tar.xz"
+
+        logger.info("Creating zip/tgz/txz source archives ...")
+        if self.dry:
+            zip_path.touch()
+            tgz_path.touch()
+            txz_path.touch()
+        else:
+            with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver:
+                print(f"Adding source files of {self.project}...")
+                archive_tree.add_to_archiver(archive_base=archive_base, archiver=archiver)
+
+                for extra_repo in self.release_info["source"].get("extra-repos", []):
+                    extra_repo_root = self.root / extra_repo
+                    assert (extra_repo_root / ".git").exists(), f"{extra_repo_root} must be a git repo"
+                    extra_repo_commit = self.executer.check_output(["git", "rev-parse", "HEAD"], dry_out=f"gitsha-extra-repo-{extra_repo}", cwd=extra_repo_root).strip()
+                    extra_repo_source_collector = SourceCollector(root=extra_repo_root, commit=extra_repo_commit, executer=self.executer, filter=self._external_repo_path_filter)
+                    print(f"Collecting sources of {extra_repo} ...")
+                    extra_repo_archive_tree = extra_repo_source_collector.get_archive_file_tree()
+                    print(f"Adding source files of {extra_repo} ...")
+                    extra_repo_archive_tree.add_to_archiver(archive_base=f"{archive_base}/{extra_repo}", archiver=archiver)
+
+            for file in self.release_info["source"]["checks"]:
+                assert f"{archive_base}/{file}" in archiver.added_files, f"'{archive_base}/{file}' must exist"
+
+        logger.info("... done")
+
+        self.artifacts["src-zip"] = zip_path
+        self.artifacts["src-tar-gz"] = tgz_path
+        self.artifacts["src-tar-xz"] = txz_path
+
+        if not self.dry:
+            with tgz_path.open("r+b") as f:
+                # Zero the embedded timestamp in the gzip'ed tarball
+                f.seek(4, 0)
+                f.write(b"\x00\x00\x00\x00")
+
+    def create_dmg(self, configuration: str="Release") -> None:
+        dmg_in = self.root / self.release_info["dmg"]["path"]
+        xcode_project = self.root / self.release_info["dmg"]["project"]
+        assert xcode_project.is_dir(), f"{xcode_project} must be a directory"
+        assert (xcode_project / "project.pbxproj").is_file, f"{xcode_project} must contain project.pbxproj"
+        if not self.fast:
+            dmg_in.unlink(missing_ok=True)
+        build_xcconfig = self.release_info["dmg"].get("build-xcconfig")
+        if build_xcconfig:
+            shutil.copy(self.root / build_xcconfig, xcode_project.parent / "build.xcconfig")
+
+        xcode_scheme = self.release_info["dmg"].get("scheme")
+        xcode_target = self.release_info["dmg"].get("target")
+        assert xcode_scheme or xcode_target, "dmg needs scheme or target"
+        assert not (xcode_scheme and xcode_target), "dmg cannot have both scheme and target set"
+        if xcode_scheme:
+            scheme_or_target = "-scheme"
+            target_like = xcode_scheme
+        else:
+            scheme_or_target = "-target"
+            target_like = xcode_target
+        self.executer.run(["xcodebuild", "ONLY_ACTIVE_ARCH=NO", "-project", xcode_project, scheme_or_target, target_like, "-configuration", configuration])
+        if self.dry:
+            dmg_in.parent.mkdir(parents=True, exist_ok=True)
+            dmg_in.touch()
+
+        assert dmg_in.is_file(), f"{self.project}.dmg was not created by xcodebuild"
+
+        dmg_out = self.dist_path / f"{self.project}-{self.version}.dmg"
+        shutil.copy(dmg_in, dmg_out)
+        self.artifacts["dmg"] = dmg_out
+
+    @property
+    def git_hash_data(self) -> bytes:
+        return f"{self.commit}\n".encode()
+
+    def verify_mingw_library(self, triplet: str, path: Path):
+        objdump_output = self.executer.check_output([f"{triplet}-objdump", "-p", str(path)])
+        libraries = re.findall(r"DLL Name: ([^\n]+)", objdump_output)
+        logger.info("%s (%s) libraries: %r", path, triplet, libraries)
+        illegal_libraries = list(filter(RE_ILLEGAL_MINGW_LIBRARIES.match, libraries))
+        logger.error("Detected 'illegal' libraries: %r", illegal_libraries)
+        if illegal_libraries:
+            raise Exception(f"{path} links to illegal libraries: {illegal_libraries}")
+
+    def create_mingw_archives(self) -> None:
+        build_type = "Release"
+        build_parent_dir = self.root / "build-mingw"
+        ARCH_TO_GNU_ARCH = {
+            # "arm64": "aarch64",
+            "x86": "i686",
+            "x64": "x86_64",
+        }
+        ARCH_TO_TRIPLET = {
+            # "arm64": "aarch64-w64-mingw32",
+            "x86": "i686-w64-mingw32",
+            "x64": "x86_64-w64-mingw32",
+        }
+
+        new_env = dict(os.environ)
+
+        cmake_prefix_paths = []
+        mingw_deps_path = self.deps_path / "mingw-deps"
+
+        if "dependencies" in self.release_info["mingw"]:
+            shutil.rmtree(mingw_deps_path, ignore_errors=True)
+            mingw_deps_path.mkdir()
+
+            for triplet in ARCH_TO_TRIPLET.values():
+                (mingw_deps_path / triplet).mkdir()
+
+            def extract_filter(member: tarfile.TarInfo, path: str, /):
+                if member.name.startswith("SDL"):
+                    member.name = "/".join(Path(member.name).parts[1:])
+                return member
+            for dep in self.release_info.get("dependencies", {}):
+                extract_path = mingw_deps_path / f"extract-{dep}"
+                extract_path.mkdir()
+                with chdir(extract_path):
+                    tar_path = self.deps_path / glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)[0]
+                    logger.info("Extracting %s to %s", tar_path, mingw_deps_path)
+                    assert tar_path.suffix in (".gz", ".xz")
+                    with tarfile.open(tar_path, mode=f"r:{tar_path.suffix.strip('.')}") as tarf:
+                        tarf.extractall(filter=extract_filter)
+                    for arch, triplet in ARCH_TO_TRIPLET.items():
+                        install_cmd = self.release_info["mingw"]["dependencies"][dep]["install-command"]
+                        extra_configure_data = {
+                            "ARCH": ARCH_TO_GNU_ARCH[arch],
+                            "TRIPLET": triplet,
+                            "PREFIX": str(mingw_deps_path / triplet),
+                        }
+                        install_cmd = configure_text(install_cmd, context=self.get_context(extra_configure_data))
+                        self.executer.run(shlex.split(install_cmd), cwd=str(extract_path))
+
+            dep_binpath = mingw_deps_path / triplet / "bin"
+            assert dep_binpath.is_dir(), f"{dep_binpath} for PATH should exist"
+            dep_pkgconfig = mingw_deps_path / triplet / "lib/pkgconfig"
+            assert dep_pkgconfig.is_dir(), f"{dep_pkgconfig} for PKG_CONFIG_PATH should exist"
+
+            new_env["PATH"] = os.pathsep.join([str(dep_binpath), new_env["PATH"]])
+            new_env["PKG_CONFIG_PATH"] = str(dep_pkgconfig)
+            cmake_prefix_paths.append(mingw_deps_path)
+
+        new_env["CFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}"
+        new_env["CXXFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}"
+
+        assert any(system in self.release_info["mingw"] for system in ("autotools", "cmake"))
+        assert not all(system in self.release_info["mingw"] for system in ("autotools", "cmake"))
+
+        mingw_archs = set()
+        arc_root = f"{self.project}-{self.version}"
+        archive_file_tree = ArchiveFileTree()
+
+        if "autotools" in self.release_info["mingw"]:
+            for arch in self.release_info["mingw"]["autotools"]["archs"]:
+                triplet = ARCH_TO_TRIPLET[arch]
+                new_env["CC"] = f"{triplet}-gcc"
+                new_env["CXX"] = f"{triplet}-g++"
+                new_env["RC"] = f"{triplet}-windres"
+
+                assert arch not in mingw_archs
+                mingw_archs.add(arch)
+
+                build_path = build_parent_dir / f"build-{triplet}"
+                install_path = build_parent_dir / f"install-{triplet}"
+                shutil.rmtree(install_path, ignore_errors=True)
+                build_path.mkdir(parents=True, exist_ok=True)
+                context = self.get_context({
+                    "ARCH": arch,
+                    "DEP_PREFIX": str(mingw_deps_path / triplet),
+                })
+                extra_args = configure_text_list(text_list=self.release_info["mingw"]["autotools"]["args"], context=context)
+
+                with self.section_printer.group(f"Configuring MinGW {triplet} (autotools)"):
+                    assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})"
+                    self.executer.run([
+                        self.root / "configure",
+                        f"--prefix={install_path}",
+                        f"--includedir=${{prefix}}/include",
+                        f"--libdir=${{prefix}}/lib",
+                        f"--bindir=${{prefix}}/bin",
+                        f"--host={triplet}",
+                        f"--build=x86_64-none-linux-gnu",
+                        "CFLAGS=-O2",
+                        "CXXFLAGS=-O2",
+                        "LDFLAGS=-Wl,-s",
+                    ] + extra_args, cwd=build_path, env=new_env)
+                with self.section_printer.group(f"Build MinGW {triplet} (autotools)"):
+                    self.executer.run(["make", f"-j{self.cpu_count}"], cwd=build_path, env=new_env)
+                with self.section_printer.group(f"Install MinGW {triplet} (autotools)"):
+                    self.executer.run(["make", "install"], cwd=build_path, env=new_env)
+                self.verify_mingw_library(triplet=ARCH_TO_TRIPLET[arch], path=install_path / "bin" / f"{self.project}.dll")
+                archive_file_tree.add_directory_tree(arc_dir=arc_join(arc_root, triplet), path=install_path, time=self.arc_time)
+
+                print("Recording arch-dependent extra files for MinGW development archive ...")
+                extra_context = {
+                    "TRIPLET": ARCH_TO_TRIPLET[arch],
+                }
+                archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"]["autotools"].get("files", {}), file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time)
+
+        if "cmake" in self.release_info["mingw"]:
+            assert self.release_info["mingw"]["cmake"]["shared-static"] in ("args", "both")
+            for arch in self.release_info["mingw"]["cmake"]["archs"]:
+                triplet = ARCH_TO_TRIPLET[arch]
+                new_env["CC"] = f"{triplet}-gcc"
+                new_env["CXX"] = f"{triplet}-g++"
+                new_env["RC"] = f"{triplet}-windres"
+
+                assert arch not in mingw_archs
+                mingw_archs.add(arch)
+
+                context = self.get_context({
+                    "ARCH": arch,
+                    "DEP_PREFIX": str(mingw_deps_path / triplet),
+                })
+                extra_args = configure_text_list(text_list=self.release_info["mingw"]["cmake"]["args"], context=context)
+
+                build_path = build_parent_dir / f"build-{triplet}"
+                install_path = build_parent_dir / f"install-{triplet}"
+                shutil.rmtree(install_path, ignore_errors=True)
+                build_path.mkdir(parents=True, exist_ok=True)
+                if self.release_info["mingw"]["cmake"]["shared-static"] == "args":
+                    args_for_shared_static = ([], )
+                elif self.release_info["mingw"]["cmake"]["shared-static"] == "both":
+                    args_for_shared_static = (["-DBUILD_SHARED_LIBS=ON"], ["-DBUILD_SHARED_LIBS=OFF"])
+                for arg_for_shared_static in args_for_shared_static:
+                    with self.section_printer.group(f"Configuring MinGW {triplet} (CMake)"):
+                        assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})"
+                        self.executer.run([
+                            f"cmake",
+                            f"-S", str(self.root), "-B", str(build_path),
+                            f"-DCMAKE_BUILD_TYPE={build_type}",
+                            f'''-DCMAKE_C_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
+                            f'''-DCMAKE_CXX_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
+                            f"-DCMAKE_PREFIX_PATH={mingw_deps_path / triplet}",
+                            f"-DCMAKE_INSTALL_PREFIX={install_path}",
+                            f"-DCMAKE_INSTALL_INCLUDEDIR=include",
+                            f"-DCMAKE_INSTALL_LIBDIR=lib",
+                            f"-DCMAKE_INSTALL_BINDIR=bin",
+                            f"-DCMAKE_INSTALL_DATAROOTDIR=share",
+                            f"-DCMAKE_TOOLCHAIN_FILE={self.root}/build-scripts/cmake-toolchain-mingw64-{ARCH_TO_GNU_ARCH[arch]}.cmake",
+                            f"-G{self.cmake_generator}",
+                        ] + extra_args + ([] if self.fast else ["--fresh"]) + arg_for_shared_static, cwd=build_path, env=new_env)
+                    with self.section_printer.group(f"Build MinGW {triplet} (CMake)"):
+                        self.executer.run(["cmake", "--build", str(build_path), "--verbose", "--config", build_type], cwd=build_path, env=new_env)
+                    with self.section_printer.group(f"Install MinGW {triplet} (CMake)"):
+                        self.executer.run(["cmake", "--install", str(build_path)], cwd=build_path, env=new_env)
+                self.verify_mingw_library(triplet=ARCH_TO_TRIPLET[arch], path=install_path / "bin" / f"{self.project}.dll")
+                archive_file_tree.add_directory_tree(arc_dir=arc_join(arc_root, triplet), path=install_path, time=self.arc_time)
+
+                print("Recording arch-dependent extra files for MinGW development archive ...")
+                extra_context = {
+                    "TRIPLET": ARCH_TO_TRIPLET[arch],
+                }
+                archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"]["cmake"].get("files", {}), file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time)
+                print("... done")
+
+        print("Recording extra files for MinGW development archive ...")
+        archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
+        print("... done")
+
+        print("Creating zip/tgz/txz development archives ...")
+        zip_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.zip"
+        tgz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.gz"
+        txz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.xz"
+
+        with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver:
+            try:
+                archive_file_tree.add_to_archiver(archive_base="", archiver=archiver)
+                archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time)
+            except RuntimeError as e:
+                print(e)
+        print("... done")
+
+        self.artifacts["mingw-devel-zip"] = zip_path
+        self.artifacts["mingw-devel-tar-gz"] = tgz_path
+        self.artifacts["mingw-devel-tar-xz"] = txz_path
+
+    def _detect_android_api(self, android_home: str) -> typing.Optional[AndroidApiVersion]:
+        platform_dirs = list(Path(p) for p in glob.glob(f"{android_home}/platforms/android-*"))
+        re_platform = re.compile("^android-([0-9]+)(?:-ext([0-9]+))?$")
+        platform_versions: list[AndroidApiVersion] = []
+        for platform_dir in platform_dirs:
+            logger.debug("Found Android Platform SDK: %s", platform_dir)
+            if not (platform_dir / "android.jar").is_file():
+                logger.debug("Skipping SDK, missing android.jar")
+                continue
+            if m:= re_platform.match(platform_dir.name):
+                platform_versions.append(AndroidApiVersion(name=platform_dir.name, ints=(int(m.group(1)), int(m.group(2) or 0))))
+        platform_versions.sort(key=lambda v: v.ints)
+        logger.info("Available platform versions: %s", platform_versions)
+        platform_versions = list(filter(lambda v: v.ints >= self._android_api_minimum.ints, platform_versions))
+        logger.info("Valid platform versions (>=%s): %s", self._android_api_minimum.ints, platform_versions)
+        if not platform_versions:
+            return None
+        android_api = platform_versions[0]
+        logger.info("Selected API version %s", android_api)
+        return android_api
+
+    def _get_prefab_json_text(self) -> str:
+        prefab_name = self.release_info["android"].get("name", self.project)
+        return textwrap.dedent(f"""\
+            {{
+                "schema_version": 2,
+                "name": "{prefab_name}",
+                "version": "{self.version}",
+                "dependencies": []
+            }}
+        """)
+
+    def _get_prefab_module_json_text(self, library_name: typing.Optional[str], export_libraries: list[str]) -> str:
+        for lib in export_libraries:
+            assert isinstance(lib, str), f"{lib} must be a string"
+        module_json_dict = {
+            "export_libraries": export_libraries,
+        }
+        if library_name:
+            module_json_dict["library_name"] = f"lib{library_name}"
+        return json.dumps(module_json_dict, indent=4)
+
+    @property
+    def _android_api_minimum(self) -> AndroidApiVersion:
+        value = self.release_info["android"]["api-minimum"]
+        if isinstance(value, int):
+            ints = (value, )
+        elif isinstance(value, str):
+            ints = tuple(split("."))
+        else:
+            raise ValueError("Invalid android.api-minimum: must be X or X.Y")
+        match len(ints):
+            case 1: name = f"android-{ints[0]}"
+            case 2: name = f"android-{ints[0]}-ext-{ints[1]}"
+            case _: raise ValueError("Invalid android.api-minimum: must be X or X.Y")
+        return AndroidApiVersion(name=name, ints=ints)
+
+    @property
+    def _android_api_target(self):
+        return self.release_info["android"]["api-target"]
+
+    @property
+    def _android_ndk_minimum(self):
+        return self.release_info["android"]["ndk-minimum"]
+
+    def _get_prefab_abi_json_text(self, abi: str, cpp: bool, shared: bool) -> str:
+        abi_json_dict = {
+            "abi": abi,
+            "api": self._android_api_minimum.ints[0],
+            "ndk": self._android_ndk_minimum,
+            "stl": "c++_shared" if cpp else "none",
+            "static": not shared,
+        }
+        return json.dumps(abi_json_dict, indent=4)
+
+    def _get_android_manifest_text(self) -> str:
+        return textwrap.dedent(f"""\
+            <manifest
+                xmlns:android="http://schemas.android.com/apk/res/android"
+                package="org.libsdl.android.{self.project}" android:versionCode="1"
+                android:versionName="1.0">
+                <uses-sdk android:minSdkVersion="{self._android_api_minimum.ints[0]}"
+                          android:targetSdkVersion="{self._android_api_target}" />
+            </manifest>
+        """)
+
+    def create_android_archives(self, android_api: int, android_home: Path, android_ndk_home: Path) -> None:
+        cmake_toolchain_file = Path(android_ndk_home) / "build/cmake/android.toolchain.cmake"
+        if not cmake_toolchain_file.exists():
+            logger.error("CMake toolchain file does not exist (%s)", cmake_toolchain_file)
+            raise SystemExit(1)
+        aar_path = self.root / "build-android" / f"{self.project}-{self.version}.aar"
+        android_dist_path = self.dist_path / f"{self.project}-devel-{self.version}-android.zip"
+        android_abis = self.release_info["android"]["abis"]
+        java_jars_added = False
+        module_data_added = False
+        android_deps_path = self.deps_path / "android-deps"
+        shutil.rmtree(android_deps_path, ignore_errors=True)
+
+        for dep, depinfo in self.release_info["android"].get("dependencies", {}).items():
+            dep_devel_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0]
+
+            dep_extract_path = self.deps_path / f"extract/android/{dep}"
+            shutil.rmtree(dep_extract_path, ignore_errors=True)
+            dep_extract_path.mkdir(parents=True, exist_ok=True)
+
+            with self.section_printer.group(f"Extracting Android dependency {dep} ({dep_devel_zip})"):
+                with zipfile.ZipFile(dep_devel_zip, "r") as zf:
+                    zf.extractall(dep_extract_path)
+
+                dep_devel_aar = dep_extract_path / glob.glob("*.aar", root_dir=dep_extract_path)[0]
+                self.executer.run([sys.executable, str(dep_devel_aar), "-o", str(android_deps_path)])
+
+        for module_name, module_info in self.release_info["android"]["modules"].items():
+            assert "type" in module_info and module_info["type"] in ("interface", "library"), f"module {module_name} must have a valid type"
+
+        aar_file_tree = ArchiveFileTree()
+        android_devel_file_tree = ArchiveFileTree()
+
+        for android_abi in android_abis:
+            extra_link_options = ANDROID_ABI_EXTRA_LINK_OPTIONS.get(android_abi, "")
+            with self.section_printer.group(f"Building for Android {android_api} {android_abi}"):
+                build_dir = self.root / "build-android" / f"{android_abi}-build"
+                install_dir = self.root / "install-android" / f"{android_abi}-install"
+                shutil.rmtree(install_dir, ignore_errors=True)
+                assert not install_dir.is_dir(), f"{install_dir} should not exist prior to build"
+                build_type = "Release"
+                cmake_args = [
+                    "cmake",
+                    "-S", str(self.root),
+                    "-B", str(build_dir),
+                    # NDK 21e does not support -ffile-prefix-map
+                    # f'''-DCMAKE_C_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
+                    # f'''-DCMAKE_CXX_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''',
+                    f"-DANDROID_USE_LEGACY_TOOLCHAIN=0",
+                    f"-DCMAKE_EXE_LINKER_FLAGS={extra_link_options}",
+                    f"-DCMAKE_SHARED_LINKER_FLAGS={extra_link_options}",
+                    f"-DCMAKE_TOOLCHAIN_FILE={cmake_toolchain_file}",
+                    f"-DCMAKE_PREFIX_PATH={str(android_deps_path)}",
+                    f"-DCMAKE_FIND_ROOT_PATH_MODE_PACKAGE=BOTH",
+                    f"-DANDROID_HOME={android_home}",
+                    f"-DANDROID_PLATFORM={android_api}",
+                    f"-DANDROID_ABI={android_abi}",
+                    "-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
+                    f"-DCMAKE_INSTALL_PREFIX={install_dir}",
+                    "-DCMAKE_INSTALL_INCLUDEDIR=include ",
+                    "-DCMAKE_INSTALL_LIBDIR=lib",
+                    "-DCMAKE_INSTALL_DATAROOTDIR=share",
+                    f"-DCMAKE_BUILD_TYPE={build_type}",
+                    f"-G{self.cmake_generator}",
+                ] + self.release_info["android"]["cmake"]["args"] + ([] if self.fast else ["--fresh"])
+                build_args = [
+                    "cmake",
+                    "--build", str(build_dir),
+                    "--verbose",
+                    "--config", build_type,
+                ]
+                install_args = [
+                    "cmake",
+                    "--install", str(build_dir),
+                    "--config", build_type,
+                ]
+                self.executer.run(cmake_args)
+                self.executer.run(build_args)
+                self.executer.run(install_args)
+
+                for module_name, module_info in self.release_info["android"]["modules"].items():
+                    arcdir_prefab_module = f"prefab/modules/{module_name}"
+                    if module_info["type"] == "library":
+                        library = install_dir / module_info["library"]
+                        assert library.suffix in (".so", ".a")
+                        assert library.is_file(), f"CMake should have built library '{library}' for module {module_name}"
+                        arcdir_prefab_libs = f"{arcdir_prefab_module}/libs/android.{android_abi}"
+                        aar_file_tree.add_file(NodeInArchive.from_fs(arcpath=f"{arcdir_prefab_libs}/{library.name}", path=library, time=self.arc_time))
+                        aar_file_tree.add_file(NodeInArchive.from_text(arcpath=f"{arcdir_prefab_libs}/abi.json", text=self._get_prefab_abi_json_text(abi=android_abi, cpp=False, shared=library.suffix == ".so"), time=self.arc_time))
+
+                    if not module_data_added:
+                        library_name = None
+                        if module_info["type"] == "library":
+                            library_name = Path(module_info["library"]).stem.removeprefix("lib")
+                        export_libraries = module_info.get("export-libraries", [])
+                        aar_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_module, "module.json"), text=self._get_prefab_module_json_text(library_name=library_name, export_libraries=export_libraries), time=self.arc_time))
+                        arcdir_prefab_include = f"prefab/modules/{module_name}/include"
+                        if "includes" in module_info:
+                            aar_file_tree.add_file_mapping(arc_dir=arcdir_prefab_include, file_mapping=module_info["includes"], file_mapping_root=install_dir, context=self.get_context(), time=self.arc_time)
+                        else:
+                            aar_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_include, ".keep"), text="\n", time=self.arc_time))
+                module_data_added = True
+
+                if not java_jars_added:
+                    java_jars_added = True
+                    if "jars" in self.release_info["android"]:
+                        classes_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["classes"], context=self.get_context())
+                        sources_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["sources"], context=self.get_context())
+                        doc_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["doc"], context=self.get_context())
+                        assert classes_jar_path.is_file(), f"CMake should have compiled the java sources and archived them into a JAR ({classes_jar_path})"
+                        assert sources_jar_path.is_file(), f"CMake should have archived the java sources into a JAR ({sources_jar_path})"
+                        assert doc_jar_path.is_file(), f"CMake should have archived javadoc into a JAR ({doc_jar_path})"
+
+                        aar_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes.jar", path=classes_jar_path, time=self.arc_time))
+                        aar_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-sources.jar", path=sources_jar_path, time=self.arc_time))
+                        aar_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-doc.jar", path=doc_jar_path, time=self.arc_time))
+
+        assert ("jars" in self.release_info["android"] and java_jars_added) or "jars" not in self.release_info["android"], "Must have archived java JAR archives"
+
+        aar_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["android"]["aar-files"], file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
+
+        aar_file_tree.add_file(NodeInArchive.from_text(arcpath="prefab/prefab.json", text=self._get_prefab_json_text(), time=self.arc_time))
+        aar_file_tree.add_file(NodeInArchive.from_text(arcpath="AndroidManifest.xml", text=self._get_android_manifest_text(), time=self.arc_time))
+
+        with Archiver(zip_path=aar_path) as archiver:
+            aar_file_tree.add_to_archiver(archive_base="", archiver=archiver)
+            archiver.add_git_hash(arcdir="", commit=self.commit, time=self.arc_time)
+
+        android_devel_file_tree.add_file(NodeInArchive.from_fs(arcpath=aar_path.name, path=aar_path))
+        android_devel_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["android"]["files"], file_mapping_root=self.root, context=self.get_context(), time=self.arc_time)
+        with Archiver(zip_path=android_dist_path) as archiver:
+            android_devel_file_tree.add_to_archiver(archive_base="", archiver=archiver)
+            archiver.add_git_hash(arcdir="", commit=self.commit, time=self.arc_time)
+
+        self.artifacts[f"android-aar"] = android_dist_path
+
+    def download_dependencies(self):
+        shutil.rmtree(self.deps_path, ignore_errors=True)
+        self.deps_path.mkdir(parents=True)
+
+        if self.github:
+            with open(os.environ["GITHUB_OUTPUT"], "a") as f:
+                f.write(f"dep-path={self.deps_path.absolute()}\n")
+
+        for dep, depinfo in self.release_info.get("dependencies", {}).items():
+            startswith = depinfo["startswith"]
+            dep_repo = depinfo["repo"]
+            dep_string_data = self.executer.check_output(["gh", "-R", dep_repo, "release", "list", "--exclude-drafts", "--exclude-pre-releases", "--json", "name,createdAt,tagName", "--jq", f'[.[]|select(.name|startswith("{startswith}"))]|max_by(.createdAt)']).strip()
+            dep_data = json.loads(dep_string_data)
+            dep_tag = dep_data["tagName"]
+            dep_version = dep_data["name"]
+            logger.info("Download dependency %s version %s (tag=%s) ", dep, dep_version, dep_tag)
+            self.executer.run(["gh", "-R", dep_repo, "release", "download", dep_tag], cwd=self.deps_path)
+            if self.github:
+                with open(os.environ["GITHUB_OUTPUT"], "a") as f:
+                    f.write(f"dep-{dep.lower()}-version={dep_version}\n")
+
+    def verify_dependencies(self):
+        for dep, depinfo in self.release_info.get("dependencies", {}).items():
+            if "mingw" in self.release_info:
+                mingw_matches = glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
+                assert len(mingw_matches) == 1, f"Exactly one archive matches mingw {dep} dependency: {mingw_matches}"
+            if "dmg" in self.release_info:
+                dmg_matches = glob.glob(self.release_info["dmg"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
+                assert len(dmg_matches) == 1, f"Exactly one archive matches dmg {dep} dependency: {dmg_matches}"
+            if "msvc" in self.release_info:
+                msvc_matches = glob.glob(self.release_info["msvc"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
+                assert len(msvc_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {msvc_matches}"
+            if "android" in self.release_info:
+                android_matches = glob.glob(self.release_info["android"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)
+                assert len(android_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {android_matches}"
+
+    @staticmethod
+    def _arch_to_vs_platform(arch: str, configuration: str="Release") -> VsArchPlatformConfig:
+        ARCH_TO_VS_PLATFORM = {
+            "x86": VsArchPlatformConfig(arch="x86", platform="Win32", configuration=configuration),
+            "x64": VsArchPlatformConfig(arch="x64", platform="x64", configuration=configuration),
+            "arm64": VsArchPlatformConfig(arch="arm64", platform="ARM64", configuration=configuration),
+        }
+        return ARCH_TO_VS_PLATFORM[arch]
+
+    def build_msvc(self):
+        with self.section_printer.group("Find Visual Studio"):
+            vs = VisualStudio(executer=self.executer)
+        for arch in self.release_info["msvc"].get("msbuild", {}).get("archs", []):
+            self._build_msvc_msbuild(arch_platform=self._arch_to_vs_platform(arch=arch), vs=vs)
+        if "cmake" in self.release_info["msvc"]:
+            deps_path = self.root / "msvc-deps"
+            shutil.rmtree(deps_path, ignore_errors=True)
+            dep_roots = []
+            for dep, depinfo in self.release_info["msvc"].get("dependencies", {}).items():
+                dep_extract_path = deps_path / f"extract-{dep}"
+                msvc_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0]
+                with zipfile.ZipFile(msvc_zip, "r") as zf:
+                    zf.extractall(dep_extract_path)
+                contents_msvc_zip = glob.glob(str(dep_extract_path / "*"))
+                assert len(contents_msvc_zip) == 1, f"There must be exactly one root item in the root directory of {dep}"
+                dep_roots.append(contents_msvc_zip[0])
+
+            for arch in self.release_info["msvc"].get("cmake", {}).get("archs", []):
+                self._build_msvc_cmake(arch_platform=self._arch_to_vs_platform(arch=arch), dep_roots=dep_roots)
+        with self.section_printer.group("Create SDL VC development zip"):
+            self._build_msvc_devel()
+
+    def _build_msvc_msbuild(self, arch_platform: VsArchPlatformConfig, vs: VisualStudio):
+        platform_context = self.get_context(arch_platform.extra_context())
+        for dep, depinfo in self.release_info["msvc"].get("dependencies", {}).items():
+            msvc_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0]
+
+            src_globs = [configure_text(instr["src"], context=platform_context) for instr in depinfo["copy"]]
+            with zipfile.ZipFile(msvc_zip, "r") as zf:
+                for member in zf.namelist():
+                    member_path = "/".join(Path(member).parts[1:])
+                    for src_i, src_glob in enumerate(src_globs):
+                        if fnmatch.fnmatch(member_path, src_glob):
+                            dst = (self.root / configure_text(depinfo["copy"][src_i]["dst"], context=platform_context)).resolve() / Path(member_path).name
+                            zip_data = zf.read(member)
+                            if dst.exists():
+                                identical = False
+                                if dst.is_file():
+                                    orig_bytes = dst.read_bytes()
+                                    if orig_bytes == zip_data:
+                                        identical = True
+                                if not identical:
+                                    logger.warning("Extracting dependency %s, will cause %s to be overwritten", dep, dst)
+                                    if not self.overwrite:
+                                        raise RuntimeError("Run with --overwrite to allow overwriting")
+                            logger.debug("Extracting %s -> %s", member, dst)
+
+                            dst.parent.mkdir(exist_ok=True, parents=True)
+                            dst.write_bytes(zip_data)
+
+        prebuilt_paths = set(self.root / full_prebuilt_path for prebuilt_path in self.release_info["msvc"]["msbuild"].get("prebuilt", []) for full_prebuilt_path in glob.glob(configure_text(prebuilt_path, context=platform_context), root_dir=self.root))
+        msbuild_paths = set(self.root / configure_text(f, context=platform_context) for file_mapping in (self.release_info["msvc"]["msbuild"]["files-lib"], self.release_info["msvc"]["msbuild"]["files-devel"]) for files_list in file_mapping.values() for f in files_list)
+        assert prebuilt_paths.issubset(msbuild_paths), f"msvc.msbuild.prebuilt must be a subset of (msvc.msbuild.files-lib, msvc.msbuild.files-devel)"
+        built_paths = msbuild_paths.difference(prebuilt_paths)
+        logger.info("MSbuild builds these files, to be included in the package: %s", built_paths)
+        if not self.fast:
+            for b in built_paths:
+                b.unlink(missing_ok=True)
+
+        rel_projects: list[str] = self.release_info["msvc"]["msbuild"]["projects"]
+        projects = list(self.root / p for p in rel_projects)
+
+        directory_build_props_src_relpath = self.release_info["msvc"]["msbuild"].get("directory-build-props")
+        for project in projects:
+            dir_b_props = project.parent / "Directory.Build.props"
+            dir_b_props.unlink(missing_ok = True)
+            if directory_build_props_src_relpath:
+                src = self.root / directory_build_props_src_relpath
+                logger.debug("Copying %s -> %s", src, dir_b_props)
+                shutil.copy(src=src, dst=dir_b_props)
+
+        with self.section_printer.group(f"Build {arch_platform.arch} VS binary"):
+            vs.build(arch_platform=arch_platform, projects=projects)
+
+        if self.dry:
+            for b in built_paths:
+                b.parent.mkdir(parents=True, exist_ok=True)
+                b.touch()
+
+        for b in built_paths:
+            assert b.is_file(), f"{b} has not been created"
+            b.parent.mkdir(parents=True, exist_ok=True)
+            b.touch()
+
+        zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch_platform.arch}.zip"
+        zip_path.unlink(missing_ok=True)
+
+        logger.info("Collecting files...")
+        archive_file_tree = ArchiveFileTree()
+        archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["msbuild"]["files-lib"], file_mapping_root=self.root, context=platform_context, time=self.arc_time)
+        archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["files-lib"], file_mapping_root=self.root, context=platform_context, time=self.arc_time)
+
+        logger.info("Writing to %s", zip_path)
+        with Archiver(zip_path=zip_path) as archiver:
+            arc_root = f""
+            archive_file_tree.add_to_archiver(archive_base=arc_root, archiver=archiver)
+            archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time)
+        self.artifacts[f"VC-{arch_platform.arch}"] = zip_path
+
+        for p in built_paths:
+            assert p.is_file(), f"{p} should exist"
+
+    def _arch_platform_to_build_path(self, arch_platform: VsArchPlatformConfig) -> Path:
+        return self.root / f"build-vs-{arch_platform.arch}"
+
+    def _arch_platform_to_install_path(self, arch_platform: VsArchPlatformConfig) -> Path:
+        return self._arch_platform_to_build_path(arch_platform) / "prefix"
+
+    def _build_msvc_cmake(self, arch_platform: VsArchPlatformConfig, dep_roots: list[Path]):
+        build_path = self._arch_platform_to_build_path(arch_platform)
+        install_path = self._arch_platform_to_install_path(arch_platform)
+        platform_context = self.get_context(extra_context=arch_platform.extra_context())
+
+        build_type = "Release"
+        extra_context = {
+            "ARCH": arch_platform.arch,
+            "PLATFORM": arch_platform.platform,
+        }
+
+        built_paths = set(install_path / configure_text(f, context=platform_context) for file_mapping in (self.release_info["msvc"]["cmake"]["files-lib"], self.release_info["msvc"]["cmake"]["files-devel"]) for files_list in file_mapping.values() for f in files_list)
+        logger.info("CMake builds these files, to be included in the package: %s", built_paths)
+        if not self.fast:
+            for b in built_paths:
+                b.unlink(missing_ok=True)
+
+        shutil.rmtree(install_path, ignore_errors=True)
+        build_path.mkdir(parents=True, exist_ok=True)
+        with self.section_printer.group(f"Configure VC CMake project for {arch_platform.arch}"):
+            self.executer.run([
+                "cmake", "-S", str(self.root), "-B", str(build_path),
+                "-A", arch_platform.platform,
+                "-DCMAKE_INSTALL_BINDIR=bin",
+                "-DCMAKE_INSTALL_DATAROOTDIR=share",
+                "-DCMAKE_INSTALL_INCLUDEDIR=include",
+                "-DCMAKE_INSTALL_LIBDIR=lib",
+                f"-DCMAKE_BUILD_TYPE={build_type}",
+                f"-DCMAKE_INSTALL_PREFIX={install_path}",
+                # MSVC debug information format flags are selected by an abstraction
+                "-DCMAKE_POLICY_DEFAULT_CMP0141=NEW",
+                # MSVC debug information format
+                "-DCMAKE_MSVC_DEBUG_INFORMATION_FORMAT=ProgramDatabase",
+                # Linker flags for executables
+                "-DCMAKE_EXE_LINKER_FLAGS=-INCREMENTAL:NO -DEBUG -OPT:REF -OPT:ICF",
+                # Linker flag for shared libraries
+                "-DCMAKE_SHARED_LINKER_FLAGS=-INCREMENTAL:NO -DEBUG -OPT:REF -OPT:ICF",
+                # MSVC runtime library flags are selected by an abstraction
+                "-DCMAKE_POLICY_DEFAULT_CMP0091=NEW",
+                # Use statically linked runtime (-MT) (ideally, should be "MultiThreaded$<$<CONFIG:Debug>:Debug>")
+                "-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded",
+                f"-DCMAKE_PREFIX_PATH={';'.join(str(s) for s in dep_roots)}",
+            ] + self.release_info["msvc"]["cmake"]["args"] + ([] if self.fast else ["--fresh"]))
+
+        with self.section_printer.group(f"Build VC CMake project for {arch_platform.arch}"):
+            self.executer.run(["cmake", "--build", str(build_path), "--verbose", "--config", build_type])
+        with self.section_printer.group(f"Install VC CMake project for {arch_platform.arch}"):
+            self.executer.run(["cmake", "--install", str(build_path), "--config", build_type])
+
+        if self.dry:
+            for b in built_paths:
+                b.parent.mkdir(parents=True, exist_ok=True)
+                b.touch()
+
+        zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch_platform.arch}.zip"
+        zip_path.unlink(missing_ok=True)
+
+        logger.info("Collecting files...")
+        archive_file_tree = ArchiveFileTree()
+        archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["cmake"]["files-lib"], file_mapping_root=install_path, context=platform_context, time=self.arc_time)
+        archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["files-lib"], file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time)
+
+        logger.info("Creating %s", zip_path)
+        with Archiver(zip_path=zip_path) as archiver:
+            arc_root = f""
+            archive_file_tree.add_to_archiver(archive_base=arc_root, archiver=archiver)
+            archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time)
+
+        for p in built_paths:
+            assert p.is_file(), f"{p} should exist"
+
+    def _build_msvc_devel(self) -> None:
+        zip_path = self.dist_path / f"{self.project}-devel-{self.version}-VC.zip"
+        arc_root = f"{self.project}-{self.version}"
+
+        def copy_files_devel(ctx):
+            archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["files-devel"], file_mapping_root=self.root, context=ctx, time=self.arc_time)
+
+
+        logger.info("Collecting files...")
+        archive_file_tree = ArchiveFileTree()
+        if "msbuild" in self.release_info["msvc"]:
+            for arch in self.release_info["msvc"]["msbuild"]["archs"]:
+                arch_platform = self._arch_to_vs_platform(arch=arch)
+                platform_context = self.get_context(arch_platform.extra_context())
+                archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["msbuild"]["files-devel"], file_mapping_root=self.root, context=platform_context, time=self.arc_time)
+                copy_files_devel(ctx=platform_context)
+        if "cmake" in self.release_info["msvc"]:
+            for arch in self.release_info["msvc"]["cmake"]["archs"]:
+                arch_platform = self._arch_to_vs_platform(arch=arch)
+                platform_context = self.get_context(arch_platform.extra_context())
+                archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["cmake"]["files-devel"], file_mapping_root=self._arch_platform_to_install_path(arch_platform), context=platform_context, time=self.arc_time)
+                copy_files_devel(ctx=platform_context)
+
+        with Archiver(zip_path=zip_path) as archiver:
+            archive_file_tree.add_to_archiver(archive_base="", archiver=archiver)
+            archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time)
+        self.artifacts["VC-devel"] = zip_path
+
+    @classmethod
+    def extract_sdl_version(cls, root: Path, release_info: dict) -> str:
+        with open(root / release_info["version"]["file"], "r") as f:
+            text = f.read()
+        major = next(re.finditer(release_info["version"]["re_major"], text, flags=re.M)).group(1)
+        minor = next(re.finditer(release_info["version"]["re_minor"], text, flags=re.M)).group(1)
+        micro = next(re.finditer(release_info["version"]["re_micro"], text, flags=re.M)).group(1)
+        return f"{major}.{minor}.{micro}"
+
+
+def main(argv=None) -> int:
+    if sys.version_info < (3, 11):
+        logger.error("This script needs at least python 3.11")
+        return 1
+
+    parser = argparse.ArgumentParser(allow_abbrev=False, description="Create SDL release artifacts")
+    parser.add_argument("--root", metavar="DIR", type=Path, default=Path(__file__).absolute().parents[1], help="Root of project")
+    parser.add_argument("--release-info", metavar="JSON", dest="path_release_info", type=Path, default=Path(__file__).absolute().parent / "release-info.json", help="Path of release-info.json")
+    parser.add_argument("--dependency-folder", metavar="FOLDER", dest="deps_path", type=Path, default="deps", help="Directory containing pre-built archives of dependencies (will be removed when downloading archives)")
+    parser.add_argument("--out", "-o", metavar="DIR", dest="dist_path", type=Path, default="dist", help="Output directory")
+    parser.add_argument("--github", action="store_true", help="Script is running on a GitHub runner")
+    parser.add_argument("--commit", default="HEAD", help="Git commit/tag of which a release should be created")
+    parser.add_argument("--actions", choices=["download", "source", "android", "mingw", "msvc", "dmg"], required=True, nargs="+", dest="actions", help="What to do?")
+    parser.set_defaults(loglevel=logging.INFO)
+    parser.add_argument('--vs-year', dest="vs_year", help="Visual Studio year")
+    parser.add_argument('--android-api', dest="android_api", help="Android API version")
+    parser.add_argument('--android-home', dest="android_home", default=os.environ.get("ANDROID_HOME"), help="Android Home folder")
+    parser.add_argument('--android-ndk-home', dest="android_ndk_home", default=os.environ.get("ANDROID_NDK_HOME"), help="Android NDK Home folder")
+    parser.add_argument('--cmake-generator', dest="cmake_generator", default="Ninja", help="CMake Generator")
+    parser.add_argument('--debug', action='store_const', const=logging.DEBUG, dest="loglevel", help="Print script debug information")
+    parser.add_argument('--dry-run', action='store_true', dest="dry", help="Don't execute anything")
+    parser.add_argument('--force', action='store_true', dest="force", help="Ignore a non-clean git tree")
+    parser.add_argument('--overwrite', action='store_true', dest="overwrite", help="Allow potentially overwriting other projects")
+    parser.add_argument('--fast', action='store_true', dest="fast", help="Don't do a rebuild")
+
+    args = parser.parse_args(argv)
+    logging.basicConfig(level=args.loglevel, format='[%(levelname)s] %(message)s')
+    args.deps_path = args.deps_path.absolute()
+    args.dist_path = args.dist_path.absolute()
+    args.root = args.root.absolute()
+    args.dist_path = args.dist_path.absolute()
+    if args.dry:
+        args.dist_path = args.dist_path / "dry"
+
+    if args.github:
+        section_printer: SectionPrinter = GitHubSectionPrinter()
+    else:
+        section_printer = SectionPrinter()
+
+    if args.github and "GITHUB_OUTPUT" not in os.environ:
+        os.environ["GITHUB_OUTPUT"] = "/tmp/github_output.txt"
+
+    executer = Executer(root=args.root, dry=args.dry)
+
+    root_git_hash_path = args.root / GIT_HASH_FILENAME
+    root_is_maybe_archive = root_git_hash_path.is_file()
+    if root_is_maybe_archive:
+        logger.warning("%s detected: Building from archive", GIT_HASH_FILENAME)
+        archive_commit = root_git_hash_path.read_text().strip()
+        if args.commit != archive_commit:
+            logger.warning("Commit argument is %s, but archive commit is %s. Using %s.", args.commit, archive_commit, archive_commit)
+        args.commit = archive_commit
+        revision = (args.root / REVISION_TXT).read_text().strip()
+    else:
+        args.commit = executer.check_output(["git", "rev-parse", args.commit], dry_out="e5812a9fd2cda317b503325a702ba3c1c37861d9").strip()
+        revision = executer.check_output(["git", "describe", "--always", "--tags", "--long", args.commit], dry_out="preview-3.1.3-96-g9512f2144").strip()
+        logger.info("Using commit %s", args.commit)
+
+    try:
+        with args.path_release_info.open() as f:
+            release_info = json.load(f)
+    except FileNotFoundError:
+        logger.error(f"Could not find {args.path_release_info}")
+
+    releaser = Releaser(
+        release_info=release_info,
+        commit=args.commit,
+        revision=revision,
+        root=args.root,
+        dist_path=args.dist_path,
+        executer=executer,
+        section_printer=section_printer,
+        cmake_generator=args.cmake_generator,
+        deps_path=args.deps_path,
+        overwrite=args.overwrite,
+        github=args.github,
+        fast=args.fast,
+    )
+
+    if root_is_maybe_archive:
+        logger.warning("Building from archive. Skipping clean git tree check.")
+    else:
+        porcelain_status = executer.check_output(["git", "status", "--ignored", "--porcelain"], dry_out="\n").strip()
+        if porcelain_status:
+            print(porcelain_status)
+            logger.warning("The tree is dirty! Do not publish any generated artifacts!")
+            if not args.force:
+                raise Exception("The git repo contains modified and/or non-committed files. Run with --force to ignore.")
+
+    if args.fast:
+        logger.warning("Doing fast build! Do not publish generated artifacts!")
+
+    with section_printer.group("Arguments"):
+        print(f"project          = {releaser.project}")
+        print(f"version          = {releaser.version}")
+        print(f"revision         = {revision}")
+        print(f"commit           = {args.commit}")
+        print(f"out              = {args.dist_path}")
+        print(f"actions          = {args.actions}")
+        print(f"dry              = {args.dry}")
+        print(f"force            = {args.force}")
+        print(f"overwrite        = {args.overwrite}")
+        print(f"cmake_generator  = {args.cmake_generator}")
+
+    releaser.prepare()
+
+    if "download" in args.actions:
+        releaser.download_dependencies()
+
+    if set(args.actions).intersection({"msvc", "mingw", "android"}):
+        print("Verifying presence of dependencies (run 'download' action to download) ...")
+        releaser.verify_dependencies()
+        print("... done")
+
+    if "source" in args.actions:
+        if root_is_maybe_archive:
+            raise Exception("Cannot build source archive from source archive")
+        with section_printer.group("Create source archives"):
+            releaser.create_source_archives()
+
+    if "dmg" in args.actions:
+        if platform.system() != "Darwin" and not args.dry:
+            parser.error("framework artifact(s) can only be built on Darwin")
+
+        releaser.create_dmg()
+
+    if "msvc" in args.actions:
+        if platform.system() != "Windows" and not args.dry:
+            parser.error("msvc artifact(s) can only be built on Windows")
+        releaser.build_msvc()
+
+    if "mingw" in args.actions:
+        releaser.create_mingw_archives()
+
+    if "android" in args.actions:
+        if args.android_home is None or not Path(args.android_home).is_dir():
+            parser.error("Invalid $ANDROID_HOME or --android-home: must be a directory containing the Android SDK")
+        if args.android_ndk_home is None or not Path(args.android_ndk_home).is_dir():
+            parser.error("Invalid $ANDROID_NDK_HOME or --android-ndk-home: must be a directory containing the Android NDK")
+        if args.android_api is None:
+            with section_printer.group("Detect Android APIS"):
+                args.android_api = releaser._detect_android_api(android_home=args.android_home)
+        else:
+            try:
+                android_api_ints = tuple(int(v) for v in args.android_api.split("."))
+                match len(android_api_ints):
+                    case 1: android_api_name = f"android-{android_api_ints[0]}"
+                    case 2: android_api_name = f"android-{android_api_ints[0]}-ext-{android_api_ints[1]}"
+                    case _: raise ValueError
+            except ValueError:
+                logger.error("Invalid --android-api, must be a 'X' or 'X.Y' version")
+            args.android_api = AndroidApiVersion(ints=android_api_ints, name=android_api_name)
+        if args.android_api is None:
+            parser.error("Invalid --android-api, and/or could not be detected")
+        android_api_path = Path(args.android_home) / f"platforms/{args.android_api.name}"
+        if not android_api_path.is_dir():
+            logger.warning(f"Android API directory does not exist ({android_api_path})")
+        with section_printer.group("Android arguments"):
+            print(f"android_home     = {args.android_home}")
+            print(f"android_ndk_home = {args.android_ndk_home}")
+            print(f"android_api      = {args.android_api}")
+        releaser.create_android_archives(
+            android_api=args.android_api.ints[0],
+            android_home=args.android_home,
+            android_ndk_home=args.android_ndk_home,
+        )
+    with section_printer.group("Summary"):
+        print(f"artifacts = {releaser.artifacts}")
+
+    if args.github:
+        with open(os.environ["GITHUB_OUTPUT"], "a") as f:
+            f.write(f"project={releaser.project}\n")
+            f.write(f"version={releaser.version}\n")
+            for k, v in releaser.artifacts.items():
+                f.write(f"{k}={v.name}\n")
+    return 0
+
+
+if __name__ == "__main__":
+    raise SystemExit(main())

+ 127 - 0
build-scripts/check_elf_alignment.sh

@@ -0,0 +1,127 @@
+#!/bin/bash
+progname="${0##*/}"
+progname="${progname%.sh}"
+
+# usage: check_elf_alignment.sh [path to *.so files|path to *.apk]
+
+cleanup_trap() {
+  if [ -n "${tmp}" -a -d "${tmp}" ]; then
+    rm -rf ${tmp}
+  fi
+  exit $1
+}
+
+usage() {
+  echo "Host side script to check the ELF alignment of shared libraries."
+  echo "Shared libraries are reported ALIGNED when their ELF regions are"
+  echo "16 KB or 64 KB aligned. Otherwise they are reported as UNALIGNED."
+  echo
+  echo "Usage: ${progname} [input-path|input-APK|input-APEX]"
+}
+
+if [ ${#} -ne 1 ]; then
+  usage
+  exit
+fi
+
+case ${1} in
+  --help | -h | -\?)
+    usage
+    exit
+    ;;
+
+  *)
+    dir="${1}"
+    ;;
+esac
+
+if ! [ -f "${dir}" -o -d "${dir}" ]; then
+  echo "Invalid file: ${dir}" >&2
+  exit 1
+fi
+
+if [[ "${dir}" == *.apk ]]; then
+  trap 'cleanup_trap' EXIT
+
+  echo
+  echo "Recursively analyzing $dir"
+  echo
+
+  if { zipalign --help 2>&1 | grep -q "\-P <pagesize_kb>"; }; then
+    echo "=== APK zip-alignment ==="
+    zipalign -v -c -P 16 4 "${dir}" | egrep 'lib/arm64-v8a|lib/x86_64|Verification'
+    echo "========================="
+  else
+    echo "NOTICE: Zip alignment check requires build-tools version 35.0.0-rc3 or higher."
+    echo "  You can install the latest build-tools by running the below command"
+    echo "  and updating your \$PATH:"
+    echo
+    echo "    sdkmanager \"build-tools;35.0.0-rc3\""
+  fi
+
+  dir_filename=$(basename "${dir}")
+  tmp=$(mktemp -d -t "${dir_filename%.apk}_out_XXXXX")
+  unzip "${dir}" lib/* -d "${tmp}" >/dev/null 2>&1
+  dir="${tmp}"
+fi
+
+if [[ "${dir}" == *.apex ]]; then
+  trap 'cleanup_trap' EXIT
+
+  echo
+  echo "Recursively analyzing $dir"
+  echo
+
+  dir_filename=$(basename "${dir}")
+  tmp=$(mktemp -d -t "${dir_filename%.apex}_out_XXXXX")
+  deapexer extract "${dir}" "${tmp}" || { echo "Failed to deapex." && exit 1; }
+  dir="${tmp}"
+fi
+
+RED="\e[31m"
+GREEN="\e[32m"
+ENDCOLOR="\e[0m"
+
+unaligned_libs=()
+unaligned_critical_libs=()
+
+echo
+echo "=== ELF alignment ==="
+
+matches="$(find "${dir}" -type f)"
+IFS=$'\n'
+for match in $matches; do
+  # We could recursively call this script or rewrite it to though.
+  [[ "${match}" == *".apk" ]] && echo "WARNING: doesn't recursively inspect .apk file: ${match}"
+  [[ "${match}" == *".apex" ]] && echo "WARNING: doesn't recursively inspect .apex file: ${match}"
+
+  [[ $(file "${match}") == *"ELF"* ]] || continue
+
+  res="$(objdump -p "${match}" | grep LOAD | awk '{ print $NF }' | head -1)"
+  if [[ $res =~ 2\*\*(1[4-9]|[2-9][0-9]|[1-9][0-9]{2,}) ]]; then
+    echo -e "${match}: ${GREEN}ALIGNED${ENDCOLOR} ($res)"
+  else
+    unaligned_libs+=("${match}")
+    # Check if this is a critical architecture (arm64-v8a or x86_64)
+    if [[ "${match}" == *"arm64-v8a"* ]] || [[ "${match}" == *"x86_64"* ]]; then
+      unaligned_critical_libs+=("${match}")
+      echo -e "${match}: ${RED}UNALIGNED${ENDCOLOR} ($res)"
+    else
+      echo -e "${match}: UNALIGNED ($res)"
+    fi
+  fi
+done
+
+if [ ${#unaligned_libs[@]} -gt 0 ]; then
+  echo -e "Found ${#unaligned_libs[@]} unaligned libs (only arm64-v8a/x86_64 libs need to be aligned).${ENDCOLOR}"
+fi
+echo "====================="
+
+# Exit with appropriate code: 1 if critical unaligned libs found, 0 otherwise
+if [ ${#unaligned_critical_libs[@]} -gt 0 ]; then
+  echo -e "${RED}Found ${#unaligned_critical_libs[@]} critical unaligned libs.${ENDCOLOR}"
+  exit 1
+else
+  echo -e "${GREEN}ELF Verification Successful${ENDCOLOR}"
+  exit 0
+fi

+ 18 - 0
build-scripts/cmake-toolchain-mingw64-i686.cmake

@@ -0,0 +1,18 @@
+set(CMAKE_SYSTEM_NAME Windows)
+set(CMAKE_SYSTEM_PROCESSOR x86)
+
+find_program(CMAKE_C_COMPILER NAMES i686-w64-mingw32-gcc)
+find_program(CMAKE_CXX_COMPILER NAMES i686-w64-mingw32-g++)
+find_program(CMAKE_RC_COMPILER NAMES i686-w64-mingw32-windres windres)
+
+if(NOT CMAKE_C_COMPILER)
+	message(FATAL_ERROR "Failed to find CMAKE_C_COMPILER.")
+endif()
+
+if(NOT CMAKE_CXX_COMPILER)
+	message(FATAL_ERROR "Failed to find CMAKE_CXX_COMPILER.")
+endif()
+
+if(NOT CMAKE_RC_COMPILER)
+        message(FATAL_ERROR "Failed to find CMAKE_RC_COMPILER.")
+endif()

+ 18 - 0
build-scripts/cmake-toolchain-mingw64-x86_64.cmake

@@ -0,0 +1,18 @@
+set(CMAKE_SYSTEM_NAME Windows)
+set(CMAKE_SYSTEM_PROCESSOR x86_64)
+
+find_program(CMAKE_C_COMPILER NAMES x86_64-w64-mingw32-gcc)
+find_program(CMAKE_CXX_COMPILER NAMES x86_64-w64-mingw32-g++)
+find_program(CMAKE_RC_COMPILER NAMES x86_64-w64-mingw32-windres windres)
+
+if(NOT CMAKE_C_COMPILER)
+	message(FATAL_ERROR "Failed to find CMAKE_C_COMPILER.")
+endif()
+
+if(NOT CMAKE_CXX_COMPILER)
+	message(FATAL_ERROR "Failed to find CMAKE_CXX_COMPILER.")
+endif()
+
+if(NOT CMAKE_RC_COMPILER)
+        message(FATAL_ERROR "Failed to find CMAKE_RC_COMPILER.")
+endif()

+ 45 - 0
build-scripts/create-release.py

@@ -0,0 +1,45 @@
+#!/usr/bin/env python3
+
+import argparse
+from pathlib import Path
+import json
+import logging
+import re
+import subprocess
+
+ROOT = Path(__file__).resolve().parents[1]
+
+
+def determine_remote() -> str:
+    text = (ROOT / "build-scripts/release-info.json").read_text()
+    release_info = json.loads(text)
+    if "remote" in release_info:
+        return release_info["remote"]
+    project_with_version = release_info["name"]
+    project, _ = re.subn("([^a-zA-Z_])", "", project_with_version)
+    return f"libsdl-org/{project}"
+
+
+def main():
+    default_remote = determine_remote()
+
+    parser = argparse.ArgumentParser(allow_abbrev=False)
+    parser.add_argument("--ref", required=True, help=f"Name of branch or tag containing release.yml")
+    parser.add_argument("--remote", "-R", default=default_remote, help=f"Remote repo (default={default_remote})")
+    parser.add_argument("--commit", help=f"Input 'commit' of release.yml (default is the hash of the ref)")
+    args = parser.parse_args()
+
+    if args.commit is None:
+        args.commit = subprocess.check_output(["git", "rev-parse", args.ref], cwd=ROOT, text=True).strip()
+
+
+    print(f"Running release.yml workflow:")
+    print(f"  remote = {args.remote}")
+    print(f"     ref = {args.ref}")
+    print(f"  commit = {args.commit}")
+
+    subprocess.check_call(["gh", "-R", args.remote, "workflow", "run", "release.yml", "--ref", args.ref, "-f", f"commit={args.commit}"], cwd=ROOT)
+
+
+if __name__ == "__main__":
+    raise SystemExit(main())

+ 64 - 0
build-scripts/pkg-support/android/INSTALL.md.in

@@ -0,0 +1,64 @@
+
+# Using this package
+
+This package contains @<@PROJECT_NAME@>@ built for the Android platform.
+
+## Gradle integration
+
+For integration with CMake/ndk-build, it uses [prefab](https://google.github.io/prefab/).
+
+Copy the aar archive (@<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@.aar) to a `app/libs` directory of your project.
+
+In `app/build.gradle` of your Android project, add:
+```
+android {
+    /* ... */
+    buildFeatures {
+        prefab true
+    }
+}
+dependencies {
+    implementation files('libs/@<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@.aar')
+    /* ... */
+}
+```
+
+If you're using CMake, add the following to your CMakeLists.txt:
+```
+find_package(PhysFS REQUIRED CONFIG)
+target_link_libraries(yourgame PRIVATE PhysFS::PhysFS)
+```
+
+If you use ndk-build, add the following before `include $(BUILD_SHARED_LIBRARY)` to your `Android.mk`:
+```
+LOCAL_SHARED_LIBARARIES := PhysFS
+```
+And add the following at the bottom:
+```
+# https://google.github.io/prefab/build-systems.html
+
+# Add the prefab modules to the import path.
+$(call import-add-path,/out)
+
+# Import @<@PROJECT_NAME@>@ so we can depend on it.
+$(call import-module,prefab/@<@PROJECT_NAME@>@)
+```
+
+---
+
+## Other build systems (advanced)
+
+If you want to build a project without Gradle,
+running the following command will extract the Android archive into a more common directory structure.
+```
+python @<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@.aar -o android_prefix
+```
+Add `--help` for a list of all available options.
+
+# Documentation
+
+An API reference, tutorials, and additional documentation is available at:
+
+https://wiki.icculus.org/PhysicsFS3/QuickReference
+
+https://icculus.org/physfs/docs/html/

+ 104 - 0
build-scripts/pkg-support/android/aar/__main__.py.in

@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+
+"""
+Create a @<@PROJECT_NAME@>@ SDK prefix from an Android archive
+This file is meant to be placed in a the root of an android .aar archive
+
+Example usage:
+```sh
+python @<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@.aar -o /usr/opt/android-sdks
+cmake -S my-project \
+    -DCMAKE_PREFIX_PATH=/usr/opt/android-sdks \
+    -DCMAKE_TOOLCHAIN_FILE=$ANDROID_NDK_HOME/build/cmake/android.toolchain.cmake \
+    -B build-arm64 -DANDROID_ABI=arm64-v8a \
+    -DCMAKE_BUILD_TYPE=Releaase
+cmake --build build-arm64
+```
+"""
+import argparse
+import io
+import json
+import os
+import pathlib
+import re
+import stat
+import zipfile
+
+
+AAR_PATH = pathlib.Path(__file__).resolve().parent
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description="Convert a @<@PROJECT_NAME@>@ Android .aar archive into a SDK",
+        allow_abbrev=False,
+    )
+    parser.add_argument("--version", action="version", version="@<@PROJECT_NAME@>@ @<@PROJECT_VERSION@>@")
+    parser.add_argument("-o", dest="output", type=pathlib.Path, required=True, help="Folder where to store the SDK")
+    args = parser.parse_args()
+
+    print(f"Creating a @<@PROJECT_NAME@>@ SDK at {args.output}...")
+
+    prefix = args.output
+    incdir = prefix / "include"
+    libdir = prefix / "lib"
+
+    RE_LIB_MODULE_ARCH = re.compile(r"prefab/modules/(?P<module>[A-Za-z0-9_-]+)/libs/android\.(?P<arch>[a-zA-Z0-9_-]+)/(?P<filename>lib[A-Za-z0-9_]+\.(?:so|a))")
+    RE_INC_MODULE_ARCH = re.compile(r"prefab/modules/(?P<module>[A-Za-z0-9_-]+)/include/(?P<header>[a-zA-Z0-9_./-]+)")
+    RE_LICENSE = re.compile(r"(?:.*/)?(?P<filename>(?:license|copying)(?:\.md|\.txt)?)", flags=re.I)
+    RE_PROGUARD = re.compile(r"(?:.*/)?(?P<filename>proguard.*\.(?:pro|txt))", flags=re.I)
+    RE_CMAKE = re.compile(r"(?:.*/)?(?P<filename>.*\.cmake)", flags=re.I)
+
+    with zipfile.ZipFile(AAR_PATH) as zf:
+        project_description = json.loads(zf.read("description.json"))
+        project_name = project_description["name"]
+        prefab_name = project_description.get("prefab-name", project_name)
+        project_version = project_description["version"]
+        licensedir = prefix / "share/licenses" / prefab_name
+        cmakedir = libdir / "cmake" / prefab_name
+        javadir = prefix / "share/java" / project_name
+        javadocdir = prefix / "share/javadoc" / project_name
+
+        def read_zipfile_and_write(path: pathlib.Path, zippath: str):
+            data = zf.read(zippath)
+            path.parent.mkdir(parents=True, exist_ok=True)
+            path.write_bytes(data)
+
+        for zip_info in zf.infolist():
+            zippath = zip_info.filename
+            if m := RE_LIB_MODULE_ARCH.match(zippath):
+                lib_path = libdir / m["arch"] / m["filename"]
+                read_zipfile_and_write(lib_path, zippath)
+                if m["filename"].endswith(".so"):
+                    os.chmod(lib_path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
+
+            elif m := RE_INC_MODULE_ARCH.match(zippath):
+                header_path = incdir / m["header"]
+                read_zipfile_and_write(header_path, zippath)
+            elif m:= RE_LICENSE.match(zippath):
+                license_path = licensedir / m["filename"]
+                read_zipfile_and_write(license_path, zippath)
+            elif m:= RE_PROGUARD.match(zippath):
+                proguard_path = javadir / m["filename"]
+                read_zipfile_and_write(proguard_path, zippath)
+            elif m:= RE_CMAKE.match(zippath):
+                cmake_path = cmakedir / m["filename"]
+                read_zipfile_and_write(cmake_path, zippath)
+            elif zippath == "classes.jar":
+                versioned_jar_path = javadir / f"{project_name}-{project_version}.jar"
+                unversioned_jar_path = javadir / f"{project_name}.jar"
+                read_zipfile_and_write(versioned_jar_path, zippath)
+                os.symlink(src=versioned_jar_path.name, dst=unversioned_jar_path)
+            elif zippath == "classes-sources.jar":
+                jarpath = javadir / f"{project_name}-{project_version}-sources.jar"
+                read_zipfile_and_write(jarpath, zippath)
+            elif zippath == "classes-doc.jar":
+                jarpath = javadocdir / f"{project_name}-{project_version}-javadoc.jar"
+                read_zipfile_and_write(jarpath, zippath)
+
+    print("... done")
+    return 0
+
+
+if __name__ == "__main__":
+    raise SystemExit(main())

+ 96 - 0
build-scripts/pkg-support/android/aar/cmake/PhysFSConfig.cmake

@@ -0,0 +1,96 @@
+# PhysicsFS CMake configuration file:
+# This file is meant to be placed in lib/cmake/PhysFS subfolder of a reconstructed Android PhysFS SDK
+
+cmake_minimum_required(VERSION 3.0...4.0)
+
+include(FeatureSummary)
+set_package_properties(PhysicsFS PROPERTIES
+    URL "https://icculus.org/physfs/"
+    DESCRIPTION "Library to provide abstract access to various archives"
+)
+
+# Copied from `configure_package_config_file`
+macro(set_and_check _var _file)
+    set(${_var} "${_file}")
+    if(NOT EXISTS "${_file}")
+        message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !")
+    endif()
+endmacro()
+
+# Copied from `configure_package_config_file`
+macro(check_required_components _NAME)
+    foreach(comp ${${_NAME}_FIND_COMPONENTS})
+        if(NOT ${_NAME}_${comp}_FOUND)
+            if(${_NAME}_FIND_REQUIRED_${comp})
+                set(${_NAME}_FOUND FALSE)
+            endif()
+        endif()
+    endforeach()
+endmacro()
+
+set(PhysFS_FOUND TRUE)
+
+if(SDL_CPU_X86)
+    set(_sdl_arch_subdir "x86")
+elseif(SDL_CPU_X64)
+    set(_sdl_arch_subdir "x86_64")
+elseif(SDL_CPU_ARM32)
+    set(_sdl_arch_subdir "armeabi-v7a")
+elseif(SDL_CPU_ARM64)
+    set(_sdl_arch_subdir "arm64-v8a")
+else()
+    set(PhysFS_FOUND FALSE)
+    return()
+endif()
+
+get_filename_component(_physfs_prefix "${CMAKE_CURRENT_LIST_DIR}/../../.." ABSOLUTE)
+set_and_check(_physfs_prefix          "${_physfs_prefix}")
+set_and_check(_physfs_include_dirs    "${_physfs_prefix}/include")
+
+set_and_check(_physfs_lib             "${_physfs_prefix}/lib/${_sdl_arch_subdir}/libphysfs.so")
+unset(_sdl_arch_subdir)
+unset(_physfs_prefix)
+
+# All targets are created, even when some might not be requested though COMPONENTS.
+# This is done for compatibility with CMake generated PhysFS-target.cmake files.
+
+if(EXISTS "${_physfs_lib}")
+    if(NOT TARGET PhysFS::PhysFS-shared)
+        add_library(PhysFS::PhysFS-shared SHARED IMPORTED)
+        set_target_properties(PhysFS::PhysFS-shared
+            PROPERTIES
+                INTERFACE_INCLUDE_DIRECTORIES "${_physfs_include_dirs}"
+                IMPORTED_LOCATION "${_physfs_lib}"
+    )
+    endif()
+    set(PhysFS_PhysFS-shared_FOUND TRUE)
+else()
+    set(PhysFS_PhysFS-shared_FOUND FALSE)
+endif()
+unset(_physfs_lib)
+unset(_physfs_include_dirs)
+
+set(PhysFS_PhysFS-static_FOUND FALSE)
+
+if(PhysFS_PhysFS-shared_FOUND)
+    set(PhysFS_PhysFS_FOUND TRUE)
+endif()
+
+function(_sdl_create_target_alias_compat NEW_TARGET TARGET)
+    if(CMAKE_VERSION VERSION_LESS "3.18")
+        # Aliasing local targets is not supported on CMake < 3.18, so make it global.
+        add_library(${NEW_TARGET} INTERFACE IMPORTED)
+        set_target_properties(${NEW_TARGET} PROPERTIES INTERFACE_LINK_LIBRARIES "${TARGET}")
+    else()
+        add_library(${NEW_TARGET} ALIAS ${TARGET})
+    endif()
+endfunction()
+
+# Make sure PhysFS::PhysFS always exists
+if(NOT TARGET PhysFS::PhysFS)
+    if(TARGET PhysFS::PhysFS-shared)
+        _sdl_create_target_alias_compat(PhysFS::PhysFS PhysFS::PhysFS-shared)
+    endif()
+endif()
+
+check_required_components(PhysFS)

+ 38 - 0
build-scripts/pkg-support/android/aar/cmake/PhysFSConfigVersion.cmake.in

@@ -0,0 +1,38 @@
+# @<@PROJECT_NAME@>@ CMake version configuration file:
+# This file is meant to be placed in a lib/cmake/PhysFS subfolder of a reconstructed Android PhysFS SDK
+
+set(PACKAGE_VERSION "@<@PROJECT_VERSION@>@")
+
+if(PACKAGE_FIND_VERSION_RANGE)
+    # Package version must be in the requested version range
+    if ((PACKAGE_FIND_VERSION_RANGE_MIN STREQUAL "INCLUDE" AND PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION_MIN)
+            OR ((PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "INCLUDE" AND PACKAGE_VERSION VERSION_GREATER PACKAGE_FIND_VERSION_MAX)
+            OR (PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "EXCLUDE" AND PACKAGE_VERSION VERSION_GREATER_EQUAL PACKAGE_FIND_VERSION_MAX)))
+        set(PACKAGE_VERSION_COMPATIBLE FALSE)
+    else()
+        set(PACKAGE_VERSION_COMPATIBLE TRUE)
+    endif()
+else()
+    if(PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION)
+        set(PACKAGE_VERSION_COMPATIBLE FALSE)
+    else()
+        set(PACKAGE_VERSION_COMPATIBLE TRUE)
+        if(PACKAGE_FIND_VERSION STREQUAL PACKAGE_VERSION)
+            set(PACKAGE_VERSION_EXACT TRUE)
+        endif()
+    endif()
+endif()
+
+# if the using project doesn't have CMAKE_SIZEOF_VOID_P set, fail.
+if("${CMAKE_SIZEOF_VOID_P}" STREQUAL "")
+    set(PACKAGE_VERSION_UNSUITABLE TRUE)
+endif()
+
+include("${CMAKE_CURRENT_LIST_DIR}/sdlcpu.cmake")
+SDL_DetectTargetCPUArchitectures(_detected_archs)
+
+# check that the installed version has a compatible architecture as the one which is currently searching:
+if(NOT(SDL_CPU_X86 OR SDL_CPU_X64 OR SDL_CPU_ARM32 OR SDL_CPU_ARM64))
+    set(PACKAGE_VERSION "${PACKAGE_VERSION} (X86,X64,ARM32,ARM64)")
+    set(PACKAGE_VERSION_UNSUITABLE TRUE)
+endif()

+ 6 - 0
build-scripts/pkg-support/android/aar/description.json.in

@@ -0,0 +1,6 @@
+{
+    "name": "@<@PROJECT_NAME@>@",
+    "prefab-name": "PhysFS",
+    "version": "@<@PROJECT_VERSION@>@",
+    "git-hash": "@<@PROJECT_COMMIT@>@"
+}

+ 26 - 0
build-scripts/pkg-support/mingw/INSTALL.md.in

@@ -0,0 +1,26 @@
+
+# Using this package
+
+This package contains @<@PROJECT_NAME@>@ built for the mingw-w64 toolchain.
+
+The files for 32-bit architecture are in i686-w64-mingw32
+The files for 64-bit architecture are in x86_64-w64-mingw32
+
+You can install them to another location, just type `make` for help.
+
+To use this package, point your include path at _arch_/include and your library path at _arch_/lib, link with the @<@PROJECT_NAME@>@ library and copy _arch_/bin/@<@PROJECT_NAME@>@.dll next to your executable.
+
+e.g.
+```sh
+gcc -o hello.exe hello.c -Ix86_64-w64-mingw32/include -Lx86_64-w64-mingw32/lib -l@<@PROJECT_NAME@>@
+cp x86_64-w64-mingw32/bin/physfs.dll .
+./hello.exe
+```
+
+# Documentation
+
+An API reference, tutorials, and additional documentation is available at:
+
+https://wiki.icculus.org/PhysicsFS3/QuickReference
+
+https://icculus.org/physfs/docs/html/

+ 39 - 0
build-scripts/pkg-support/mingw/Makefile

@@ -0,0 +1,39 @@
+#
+# Makefile for installing the mingw32 version of the SDL library
+
+DESTDIR = /usr/local
+ARCHITECTURES := i686-w64-mingw32 x86_64-w64-mingw32
+
+default:
+	@echo "Run \"make install-i686\" to install 32-bit"
+	@echo "Run \"make install-x86_64\" to install 64-bit"
+	@echo "Run \"make install-all\" to install both"
+	@echo "Add DESTDIR=/custom/path to change the destination folder"
+
+install:
+	@if test -d $(ARCH) && test -d $(DESTDIR); then \
+		(cd $(ARCH) && cp -rv bin include lib share $(DESTDIR)/); \
+	else \
+		echo "*** ERROR: $(ARCH) or $(DESTDIR) does not exist!"; \
+		exit 1; \
+	fi
+
+install-i686:
+	$(MAKE) install ARCH=i686-w64-mingw32
+
+install-x86_64:
+	$(MAKE) install ARCH=x86_64-w64-mingw32
+
+install-all:
+	@if test -d $(DESTDIR); then \
+		mkdir -p $(DESTDIR)/cmake; \
+		cp -rv cmake/* $(DESTDIR)/cmake; \
+		for arch in $(ARCHITECTURES); do \
+			$(MAKE) install ARCH=$$arch DESTDIR=$(DESTDIR)/$$arch; \
+		done \
+	else \
+		echo "*** ERROR: $(DESTDIR) does not exist!"; \
+		exit 1; \
+	fi
+
+.PHONY: default install install-i686 install-x86_64 install-all

+ 19 - 0
build-scripts/pkg-support/mingw/cmake/PhysFSConfig.cmake

@@ -0,0 +1,19 @@
+# PhysFS CMake configuration file:
+# This file is meant to be placed in a cmake subfolder of physfs-devel-3.x.y-mingw
+
+if(CMAKE_SIZEOF_VOID_P EQUAL 4)
+    set(physfs_config_path "${CMAKE_CURRENT_LIST_DIR}/../i686-w64-mingw32/lib/cmake/PhysFS/PhysFSConfig.cmake")
+elseif(CMAKE_SIZEOF_VOID_P EQUAL 8)
+    set(physfs_config_path "${CMAKE_CURRENT_LIST_DIR}/../x86_64-w64-mingw32/lib/cmake/PhysFS/PhysFSConfig.cmake")
+else()
+    set(PhysFS_FOUND FALSE)
+    return()
+endif()
+
+if(NOT EXISTS "${physfs_config_path}")
+    message(WARNING "${physfs_config_path} does not exist: MinGW development package is corrupted")
+    set(PhysFS_FOUND FALSE)
+    return()
+endif()
+
+include("${physfs_config_path}")

+ 19 - 0
build-scripts/pkg-support/mingw/cmake/PhysFSConfigVersion.cmake

@@ -0,0 +1,19 @@
+# PhysFS CMake version configuration file:
+# This file is meant to be placed in a cmake subfolder of physfs-devel-3.x.y-mingw
+
+if(CMAKE_SIZEOF_VOID_P EQUAL 4)
+    set(physfs_config_path "${CMAKE_CURRENT_LIST_DIR}/../i686-w64-mingw32/lib/cmake/PhysFS/PhysFSConfigVersion.cmake")
+elseif(CMAKE_SIZEOF_VOID_P EQUAL 8)
+    set(physfs_config_path "${CMAKE_CURRENT_LIST_DIR}/../x86_64-w64-mingw32/lib/cmake/PhysFS/PhysFSConfigVersion.cmake")
+else()
+    set(PACKAGE_VERSION_UNSUITABLE TRUE)
+    return()
+endif()
+
+if(NOT EXISTS "${physfs_config_path}")
+    message(WARNING "${physfs_config_path} does not exist: MinGW development package is corrupted")
+    set(PACKAGE_VERSION_UNSUITABLE TRUE)
+    return()
+endif()
+
+include("${physfs_config_path}")

+ 25 - 0
build-scripts/pkg-support/msvc/INSTALL.md.in

@@ -0,0 +1,25 @@
+
+# Using this package
+
+This package contains @<@PROJECT_NAME@>@ built for Visual Studio.
+
+To use this package, edit your project properties:
+- Add the include directory to "VC++ Directories" -> "Include Directories"
+- Add the lib/_arch_ directory to "VC++ Directories" -> "Library Directories"
+- Add @<@PROJECT_NAME@>@.lib to Linker -> Input -> "Additional Dependencies"
+- Copy lib/_arch_/physfs.dll to your project directory.
+
+# Documentation
+
+An API reference, tutorials, and additional documentation is available at:
+
+https://wiki.icculus.org/PhysicsFS3/QuickReference
+
+https://icculus.org/physfs/docs/html/
+
+## Announcement list
+
+You can sign up for the low traffic announcement list at:
+
+https://www.libsdl.org/mailing-list.php
+

+ 13 - 0
build-scripts/pkg-support/msvc/arm64/INSTALL.md.in

@@ -0,0 +1,13 @@
+
+# Using this package
+
+This package contains @<@PROJECT_NAME@>@ built for arm64 Windows.
+
+To use this package, simply replace an existing 64-bit ARM physfs.dll with the one included here.
+
+# Development packages
+
+If you're looking for packages with headers and libraries, you can download one of these:
+-  @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-VC.zip, for development using Visual Studio
+-  @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-mingw.zip, for development using mingw-w64
+

+ 98 - 0
build-scripts/pkg-support/msvc/cmake/PhysFSConfig.cmake.in

@@ -0,0 +1,98 @@
+# @<@PROJECT_NAME@>@ CMake configuration file:
+# This file is meant to be placed in a cmake subfolder of @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-VC.zip
+
+cmake_minimum_required(VERSION 3.0...4.0)
+
+include(FeatureSummary)
+set_package_properties(PhysicsFS PROPERTIES
+    URL "https://icculus.org/physfs/"
+    DESCRIPTION "Library to provide abstract access to various archives"
+)
+
+# Copied from `configure_package_config_file`
+macro(set_and_check _var _file)
+    set(${_var} "${_file}")
+    if(NOT EXISTS "${_file}")
+        message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !")
+    endif()
+endmacro()
+
+# Copied from `configure_package_config_file`
+macro(check_required_components _NAME)
+    foreach(comp ${${_NAME}_FIND_COMPONENTS})
+        if(NOT ${_NAME}_${comp}_FOUND)
+            if(${_NAME}_FIND_REQUIRED_${comp})
+                set(${_NAME}_FOUND FALSE)
+            endif()
+        endif()
+    endforeach()
+endmacro()
+
+set(PhysFS_FOUND TRUE)
+
+if(SDL_CPU_X86)
+    set(_sdl_arch_subdir "x86")
+elseif(SDL_CPU_X64 OR SDL_CPU_ARM64EC)
+    set(_sdl_arch_subdir "x64")
+elseif(SDL_CPU_ARM64)
+    set(_sdl_arch_subdir "arm64")
+else()
+    set(PhysFS_FOUND FALSE)
+    return()
+endif()
+
+get_filename_component(_physfs_prefix "${CMAKE_CURRENT_LIST_DIR}/.." ABSOLUTE)
+set_and_check(_physfs_prefix      "${_physfs_prefix}")
+set(_physfs_include_dirs          "${_physfs_prefix}/include")
+
+set(_physfs_implib      "${_physfs_prefix}/lib/${_sdl_arch_subdir}/physfs.lib")
+set(_physfs_dll         "${_physfs_prefix}/lib/${_sdl_arch_subdir}/physfs.dll")
+
+unset(_sdl_arch_subdir)
+unset(_physfs_prefix)
+
+# All targets are created, even when some might not be requested though COMPONENTS.
+# This is done for compatibility with CMake generated PhysFS-target.cmake files.
+
+if(EXISTS "${_physfs_implib}" AND EXISTS "${_physfs_dll}")
+    if(NOT TARGET PhysFS::PhysFS-shared)
+        add_library(PhysFS::PhysFS-shared SHARED IMPORTED)
+        set_target_properties(PhysFS::PhysFS-shared
+            PROPERTIES
+                INTERFACE_INCLUDE_DIRECTORIES "${_physfs_include_dirs}"
+                IMPORTED_IMPLIB "${_physfs_implib}"
+                IMPORTED_LOCATION "${_physfs_dll}"
+        )
+    endif()
+    set(PhysFS_PhysFS-shared_FOUND TRUE)
+else()
+    set(PhysFS_PhysFS-shared_FOUND FALSE)
+endif()
+unset(_physfs_implib)
+unset(_physfs_dll)
+unset(_physfs_include_dirs)
+
+set(PhysFS_PhysFS-static_FOUND FALSE)
+
+if(PhysFS_PhysFS-shared_FOUND OR PhysFS_PhysFS-static_FOUND)
+    set(PhysFS_PhysFS_FOUND TRUE)
+endif()
+
+function(_sdl_create_target_alias_compat NEW_TARGET TARGET)
+    if(CMAKE_VERSION VERSION_LESS "3.18")
+        # Aliasing local targets is not supported on CMake < 3.18, so make it global.
+        add_library(${NEW_TARGET} INTERFACE IMPORTED)
+        set_target_properties(${NEW_TARGET} PROPERTIES INTERFACE_LINK_LIBRARIES "${TARGET}")
+    else()
+        add_library(${NEW_TARGET} ALIAS ${TARGET})
+    endif()
+endfunction()
+
+# Make sure PhysFS::PhysFS always exists
+if(NOT TARGET PhysFS::PhysFS)
+    if(TARGET PhysFS::PhysFS-shared)
+        _sdl_create_target_alias_compat(PhysFS::PhysFS PhysFS::PhysFS-shared)
+    endif()
+endif()
+
+check_required_components(PhysFS)

+ 38 - 0
build-scripts/pkg-support/msvc/cmake/PhysFSConfigVersion.cmake.in

@@ -0,0 +1,38 @@
+# @<@PROJECT_NAME@>@ CMake version configuration file:
+# This file is meant to be placed in a cmake subfolder of @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-VC.zip
+
+set(PACKAGE_VERSION "@<@PROJECT_VERSION@>@")
+
+if(PACKAGE_FIND_VERSION_RANGE)
+    # Package version must be in the requested version range
+    if ((PACKAGE_FIND_VERSION_RANGE_MIN STREQUAL "INCLUDE" AND PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION_MIN)
+        OR ((PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "INCLUDE" AND PACKAGE_VERSION VERSION_GREATER PACKAGE_FIND_VERSION_MAX)
+        OR (PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "EXCLUDE" AND PACKAGE_VERSION VERSION_GREATER_EQUAL PACKAGE_FIND_VERSION_MAX)))
+        set(PACKAGE_VERSION_COMPATIBLE FALSE)
+    else()
+        set(PACKAGE_VERSION_COMPATIBLE TRUE)
+    endif()
+else()
+    if(PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION)
+        set(PACKAGE_VERSION_COMPATIBLE FALSE)
+    else()
+        set(PACKAGE_VERSION_COMPATIBLE TRUE)
+        if(PACKAGE_FIND_VERSION STREQUAL PACKAGE_VERSION)
+            set(PACKAGE_VERSION_EXACT TRUE)
+        endif()
+    endif()
+endif()
+
+# if the using project doesn't have CMAKE_SIZEOF_VOID_P set, fail.
+if("${CMAKE_SIZEOF_VOID_P}" STREQUAL "")
+    set(PACKAGE_VERSION_UNSUITABLE TRUE)
+endif()
+
+include("${CMAKE_CURRENT_LIST_DIR}/sdlcpu.cmake")
+SDL_DetectTargetCPUArchitectures(_detected_archs)
+
+# check that the installed version has a compatible architecture as the one which is currently searching:
+if(NOT(SDL_CPU_X86 OR SDL_CPU_X64 OR SDL_CPU_ARM64 OR SDL_CPU_ARM64EC))
+    set(PACKAGE_VERSION "${PACKAGE_VERSION} (X86,X64,ARM64)")
+    set(PACKAGE_VERSION_UNSUITABLE TRUE)
+endif()

+ 13 - 0
build-scripts/pkg-support/msvc/x64/INSTALL.md.in

@@ -0,0 +1,13 @@
+
+# Using this package
+
+This package contains @<@PROJECT_NAME@>@ built for x64 Windows.
+
+To use this package, simply replace an existing 64-bit physfs.dll with the one included here.
+
+# Development packages
+
+If you're looking for packages with headers and libraries, you can download one of these:
+-  @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-VC.zip, for development using Visual Studio
+-  @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-mingw.zip, for development using mingw-w64
+

+ 13 - 0
build-scripts/pkg-support/msvc/x86/INSTALL.md.in

@@ -0,0 +1,13 @@
+
+# Using this package
+
+This package contains @<@PROJECT_NAME@>@ built for x86 Windows.
+
+To use this package, simply replace an existing 32-bit physfs.dll with the one included here.
+
+# Development packages
+
+If you're looking for packages with headers and libraries, you can download one of these:
+-  @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-VC.zip, for development using Visual Studio
+-  @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-mingw.zip, for development using mingw-w64
+

+ 151 - 0
build-scripts/release-info.json

@@ -0,0 +1,151 @@
+{
+  "name": "physfs",
+  "remote": "icculus/physfs",
+  "version": {
+    "file": "src/physfs.h",
+    "re_major": "^#define PHYSFS_VER_MAJOR\\s+([0-9]+)$",
+    "re_minor": "^#define PHYSFS_VER_MINOR\\s+([0-9]+)$",
+    "re_micro": "^#define PHYSFS_VER_PATCH\\s+([0-9]+)$"
+  },
+  "source": {
+    "checks": [
+      "src/physfs.c",
+      "src/physfs.h",
+      "test/test_physfs.c",
+      "extras/physfssdl3.h"
+    ]
+  },
+  "mingw": {
+    "cmake": {
+      "archs": ["x86", "x64"],
+      "args": [
+        "-DPHYSFS_BUILD_SHARED=ON",
+        "-DPHYSFS_BUILD_STATIC=OFF",
+        "-DPHYSFS_INSTALL=ON",
+        "-DPHYSFS_INSTALL_MAN=OFF",
+        "-DPHYSFS_BUILD_DOCS=OFF",
+        "-DPHYSFS_BUILD_TEST=OFF"
+      ],
+      "shared-static": "args"
+    },
+    "files": {
+      "": [
+        "build-scripts/pkg-support/mingw/INSTALL.md.in:INSTALL.md",
+        "build-scripts/pkg-support/mingw/Makefile",
+        "LICENSE.txt",
+        "README.md"
+      ],
+      "cmake": [
+        "build-scripts/pkg-support/mingw/cmake/PhysFSConfig.cmake",
+        "build-scripts/pkg-support/mingw/cmake/PhysFSConfigVersion.cmake"
+      ]
+    }
+  },
+  "msvc": {
+    "cmake": {
+      "archs": [
+        "x86",
+        "x64",
+        "arm64"
+      ],
+      "args": [
+        "-DPHYSFS_BUILD_SHARED=ON",
+        "-DPHYSFS_BUILD_STATIC=OFF",
+        "-DPHYSFS_INSTAL=ON",
+        "-DPHYSFS_INSTALL_MAN=OFF",
+        "-DPHYSFS_BUILD_DOCS=OFF",
+        "-DPHYSFS_BUILD_TEST=OFF"
+      ],
+      "files-lib": {
+        "": [
+          "bin/physfs.dll"
+        ]
+      },
+      "files-devel": {
+        "lib/@<@ARCH@>@": [
+          "bin/physfs.dll",
+          "bin/physfs.pdb",
+          "lib/physfs.lib"
+        ]
+      }
+    },
+    "files-lib": {
+      "": [
+        "build-scripts/pkg-support/msvc/@<@ARCH@>@/INSTALL.md.in:INSTALL.md",
+        "LICENSE.txt",
+        "README.md"
+      ]
+    },
+    "files-devel": {
+      "": [
+        "build-scripts/pkg-support/msvc/INSTALL.md.in:INSTALL.md",
+        "LICENSE.txt",
+        "README.md"
+      ],
+      "cmake": [
+        "build-scripts/pkg-support/msvc/cmake/PhysFSConfig.cmake.in:PhysFSConfig.cmake",
+        "build-scripts/pkg-support/msvc/cmake/PhysFSConfigVersion.cmake.in:PhysFSConfigVersion.cmake",
+        "cmake/sdlcpu.cmake"
+      ],
+      "include": [
+        "src/physfs.h"
+      ]
+    }
+  },
+  "android": {
+    "cmake": {
+      "args": [
+        "-DPHYSFS_BUILD_SHARED=ON",
+        "-DPHYSFS_BUILD_STATIC=OFF",
+        "-DPHYSFS_INSTAL=ON",
+        "-DPHYSFS_INSTALL_MAN=OFF",
+        "-DPHYSFS_BUILD_DOCS=OFF",
+        "-DPHYSFS_BUILD_TEST=OFF"
+      ]
+    },
+    "name": "PhysFS",
+    "modules": {
+      "PhysFS-shared": {
+        "type": "library",
+        "library": "lib/libphysfs.so",
+        "includes": {
+          ".": ["include/physfs.h"]
+        }
+      },
+      "PhysFS": {
+        "type": "interface",
+        "export-libraries": [":PhysFS-shared"]
+      }
+    },
+    "abis": [
+      "armeabi-v7a",
+      "arm64-v8a",
+      "x86",
+      "x86_64"
+    ],
+    "api-minimum": 21,
+    "api-target": 35,
+    "ndk-minimum": 28,
+    "aar-files": {
+      "": [
+        "build-scripts/pkg-support/android/aar/__main__.py.in:__main__.py",
+        "build-scripts/pkg-support/android/aar/description.json.in:description.json"
+      ],
+      "META-INF": [
+        "LICENSE.txt"
+      ],
+      "cmake": [
+        "cmake/sdlcpu.cmake",
+        "build-scripts/pkg-support/android/aar/cmake/PhysFSConfig.cmake",
+        "build-scripts/pkg-support/android/aar/cmake/PhysFSConfigVersion.cmake.in:PhysFSConfigVersion.cmake"
+      ]
+    },
+    "files": {
+      "": [
+        "build-scripts/pkg-support/android/INSTALL.md.in:INSTALL.md",
+        "LICENSE.txt",
+        "README.md"
+      ]
+    }
+  }
+}

+ 158 - 0
cmake/sdlcpu.cmake

@@ -0,0 +1,158 @@
+function(SDL_DetectTargetCPUArchitectures DETECTED_ARCHS)
+
+  set(known_archs EMSCRIPTEN ARM32 ARM64 ARM64EC LOONGARCH64 POWERPC32 POWERPC64 RISCV32 RISCV64 X86 X64)
+
+  if(APPLE AND CMAKE_OSX_ARCHITECTURES)
+    foreach(known_arch IN LISTS known_archs)
+      set(SDL_CPU_${known_arch} "0" PARENT_SCOPE)
+    endforeach()
+    set(detected_archs)
+    foreach(osx_arch IN LISTS CMAKE_OSX_ARCHITECTURES)
+      if(osx_arch STREQUAL "x86_64")
+        set(SDL_CPU_X64 "1" PARENT_SCOPE)
+        list(APPEND detected_archs "X64")
+      elseif(osx_arch STREQUAL "arm64")
+        set(SDL_CPU_ARM64 "1" PARENT_SCOPE)
+        list(APPEND detected_archs "ARM64")
+      endif()
+    endforeach()
+    set("${DETECTED_ARCHS}" "${detected_archs}" PARENT_SCOPE)
+    return()
+  endif()
+
+  set(detected_archs)
+  foreach(known_arch IN LISTS known_archs)
+    if(SDL_CPU_${known_arch})
+      list(APPEND detected_archs "${known_arch}")
+    endif()
+  endforeach()
+
+  if(detected_archs)
+    set("${DETECTED_ARCHS}" "${detected_archs}" PARENT_SCOPE)
+    return()
+  endif()
+
+  set(arch_check_ARM32 "defined(__arm__) || defined(_M_ARM)")
+  set(arch_check_ARM64 "defined(__aarch64__) || defined(_M_ARM64)")
+  set(arch_check_ARM64EC "defined(_M_ARM64EC)")
+  set(arch_check_EMSCRIPTEN "defined(__EMSCRIPTEN__)")
+  set(arch_check_LOONGARCH64 "defined(__loongarch64)")
+  set(arch_check_POWERPC32 "(defined(__PPC__) || defined(__powerpc__)) && !defined(__powerpc64__)")
+  set(arch_check_POWERPC64 "defined(__PPC64__) || defined(__powerpc64__)")
+  set(arch_check_RISCV32 "defined(__riscv) && defined(__riscv_xlen) && __riscv_xlen == 32")
+  set(arch_check_RISCV64 "defined(__riscv) && defined(__riscv_xlen) && __riscv_xlen == 64")
+  set(arch_check_X86 "defined(__i386__) || defined(__i486__) || defined(__i586__) || defined(__i686__) ||defined( __i386) || defined(_M_IX86)")
+  set(arch_check_X64 "(defined(__amd64__) || defined(__amd64) || defined(__x86_64__) || defined(__x86_64) || defined(_M_X64) || defined(_M_AMD64)) && !defined(_M_ARM64EC)")
+
+  set(src_vars "")
+  set(src_main "")
+  foreach(known_arch IN LISTS known_archs)
+    set(detected_${known_arch} "0")
+
+    string(APPEND src_vars "
+#if ${arch_check_${known_arch}}
+#define ARCH_${known_arch} \"1\"
+#else
+#define ARCH_${known_arch} \"0\"
+#endif
+const char *arch_${known_arch} = \"INFO<${known_arch}=\" ARCH_${known_arch} \">\";
+")
+    string(APPEND src_main "
+  result += arch_${known_arch}[argc];")
+  endforeach()
+
+  set(src_arch_detect "${src_vars}
+int main(int argc, char *argv[]) {
+  int result = 0;
+  (void)argv;
+${src_main}
+  return result;
+}")
+
+  if(CMAKE_C_COMPILER)
+    set(ext ".c")
+  elseif(CMAKE_CXX_COMPILER)
+    set(ext ".cpp")
+  else()
+    enable_language(C)
+    set(ext ".c")
+  endif()
+  set(path_src_arch_detect "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/CMakeTmp/SDL_detect_arch${ext}")
+  file(WRITE "${path_src_arch_detect}" "${src_arch_detect}")
+  set(path_dir_arch_detect "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/CMakeTmp/SDL_detect_arch")
+  set(path_bin_arch_detect "${path_dir_arch_detect}/bin")
+
+  set(detected_archs)
+
+  set(msg "Detecting Target CPU Architecture")
+  message(STATUS "${msg}")
+
+  include(CMakePushCheckState)
+
+  set(CMAKE_TRY_COMPILE_TARGET_TYPE "STATIC_LIBRARY")
+
+  cmake_push_check_state(RESET)
+  try_compile(SDL_CPU_CHECK_ALL
+    "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/CMakeTmp/SDL_detect_arch"
+    SOURCES "${path_src_arch_detect}"
+    COPY_FILE "${path_bin_arch_detect}"
+  )
+  cmake_pop_check_state()
+  if(NOT SDL_CPU_CHECK_ALL)
+    message(STATUS "${msg} - <ERROR>")
+    message(WARNING "Failed to compile source detecting the target CPU architecture")
+  else()
+    set(re "INFO<([A-Z0-9]+)=([01])>")
+    file(STRINGS "${path_bin_arch_detect}" infos REGEX "${re}")
+
+    foreach(info_arch_01 IN LISTS infos)
+      string(REGEX MATCH "${re}" A "${info_arch_01}")
+      if(NOT "${CMAKE_MATCH_1}" IN_LIST known_archs)
+        message(WARNING "Unknown architecture: \"${CMAKE_MATCH_1}\"")
+        continue()
+      endif()
+      set(arch "${CMAKE_MATCH_1}")
+      set(arch_01 "${CMAKE_MATCH_2}")
+      set(detected_${arch} "${arch_01}")
+    endforeach()
+
+    foreach(known_arch IN LISTS known_archs)
+      if(detected_${known_arch})
+        list(APPEND detected_archs ${known_arch})
+      endif()
+    endforeach()
+  endif()
+
+  if(detected_archs)
+    foreach(known_arch IN LISTS known_archs)
+      set("SDL_CPU_${known_arch}" "${detected_${known_arch}}" CACHE BOOL "Detected architecture ${known_arch}")
+    endforeach()
+    message(STATUS "${msg} - ${detected_archs}")
+  else()
+    include(CheckCSourceCompiles)
+    cmake_push_check_state(RESET)
+    foreach(known_arch IN LISTS known_archs)
+      if(NOT detected_archs)
+        set(cache_variable "SDL_CPU_${known_arch}")
+          set(test_src "
+        int main(int argc, char *argv[]) {
+        #if ${arch_check_${known_arch}}
+          return 0;
+        #else
+          choke
+        #endif
+        }
+        ")
+        check_c_source_compiles("${test_src}" "${cache_variable}")
+        if(${cache_variable})
+          set(SDL_CPU_${known_arch} "1" CACHE BOOL "Detected architecture ${known_arch}")
+          set(detected_archs ${known_arch})
+        else()
+          set(SDL_CPU_${known_arch} "0" CACHE BOOL "Detected architecture ${known_arch}")
+        endif()
+      endif()
+    endforeach()
+    cmake_pop_check_state()
+  endif()
+  set("${DETECTED_ARCHS}" "${detected_archs}" PARENT_SCOPE)
+endfunction()