diff --git a/.github/actions/setup-ninja/action.yml b/.github/actions/setup-ninja/action.yml new file mode 100644 index 00000000..690c248c --- /dev/null +++ b/.github/actions/setup-ninja/action.yml @@ -0,0 +1,62 @@ +name: 'Setup ninja' +description: 'Download ninja and add it to the PATH environment variable' +inputs: + version: + description: 'Ninja version' + default: '1.13.1' +runs: + using: 'composite' + steps: + - name: 'Calculate variables' + id: calc + shell: sh + run: | + case "${{ runner.os }}-${{ runner.arch }}" in + "Linux-X86" | "Linux-X64") + archive="ninja-linux.zip" + ;; + "Linux-ARM64") + archive="ninja-linux-aarch64.zip" + ;; + "macOS-X86" | "macOS-X64" | "macOS-ARM64") + archive="ninja-mac.zip" + ;; + "Windows-X86" | "Windows-X64") + archive="ninja-win.zip" + ;; + "Windows-ARM64") + archive="ninja-winarm64.zip" + ;; + *) + echo "Unsupported ${{ runner.os }}-${{ runner.arch }}" + exit 1; + ;; + esac + echo "archive=${archive}" >> ${GITHUB_OUTPUT} + echo "cache-key=${archive}-${{ inputs.version }}-${{ runner.os }}-${{ runner.arch }}" >> ${GITHUB_OUTPUT} + - name: 'Restore cached ${{ steps.calc.outputs.archive }}' + id: cache-restore + uses: actions/cache/restore@v4 + with: + path: '${{ runner.temp }}/${{ steps.calc.outputs.archive }}' + key: ${{ steps.calc.outputs.cache-key }} + - name: 'Download ninja ${{ inputs.version }} for ${{ runner.os }} (${{ runner.arch }})' + if: ${{ (!steps.cache-restore.outputs.cache-hit || steps.cache-restore.outputs.cache-hit == 'false') }} + shell: pwsh + run: | + Invoke-WebRequest "https://github.com/ninja-build/ninja/releases/download/v${{ inputs.version }}/${{ steps.calc.outputs.archive }}" -OutFile "${{ runner.temp }}/${{ steps.calc.outputs.archive }}" + - name: 'Cache ${{ steps.calc.outputs.archive }}' + if: ${{ (!steps.cache-restore.outputs.cache-hit || steps.cache-restore.outputs.cache-hit == 'false') }} + uses: actions/cache/save@v4 + with: + path: '${{ runner.temp }}/${{ steps.calc.outputs.archive }}' + key: ${{ steps.calc.outputs.cache-key }} + - name: 'Extract ninja' + shell: pwsh + run: | + 7z "-o${{ runner.temp }}/ninja-${{ inputs.version }}-${{ runner.arch }}" x "${{ runner.temp }}/${{ steps.calc.outputs.archive }}" + - name: 'Set output variables' + id: final + shell: pwsh + run: | + echo "${{ runner.temp }}/ninja-${{ inputs.version }}-${{ runner.arch }}" >> $env:GITHUB_PATH diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 5f86fae2..80210b2e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -3,28 +3,67 @@ name: Build on: [push, pull_request] jobs: - Build: + desktop: name: ${{ matrix.platform.name }} runs-on: ${{ matrix.platform.os }} + defaults: + run: + shell: sh strategy: + fail-fast: false matrix: platform: # !!! FIXME: figure out an efficient way to get SDL2 on the Windows/Mac bots. - - { name: Linux, os: ubuntu-latest, flags: -GNinja } - - { name: MinGW, os: windows-latest, flags: -GNinja -DCMAKE_C_COMPILER=x86_64-w64-mingw32-gcc -DCMAKE_SYSTEM_NAME=Windows } - - { name: Windows, os: windows-latest } - - { name: MacOS, os: macos-latest } + - { name: Linux, os: ubuntu-latest } + - { name: MinGW, os: windows-latest, flags: -DCMAKE_C_COMPILER=x86_64-w64-mingw32-gcc -DCMAKE_SYSTEM_NAME=Windows } + - { name: Windows, os: windows-latest, msvc: true } + - { name: macOS, os: macos-latest } steps: - - name: Setup Linux dependencies - if: runner.os == 'Linux' - run: | - sudo apt-get update - sudo apt-get install ninja-build - - name: Setup MinGW dependencies - if: contains(matrix.platform.name, 'MinGW') - run: choco install ninja - name: Get PhysicsFS sources - uses: actions/checkout@v4 - - name: Configure CMake - run: cmake -B build ${{ matrix.platform.flags }} - - name: Build - run: cmake --build build/ + uses: actions/checkout@v6 + - uses: ilammy/msvc-dev-cmd@v1 + if: ${{ !!matrix.platform.msvc }} + with: + arch: x64 + - name: Set up ninja + uses: ./.github/actions/setup-ninja + - name: Configure (CMake) + run: | + cmake -B build -GNinja \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=prefix_cmake \ + -DPHYSFS_INSTALL_MAN=ON \ + -DPHYSFS_WERROR=ON \ + ${{ matrix.platform.flags }} + - name: Build (CMake) + run: | + cmake --build build/ --verbose + - name: Install (CMake) + run: | + set -eu + cmake --install build/ + echo "PhysFS_ROOT=$(pwd)/prefix_cmake" >> $GITHUB_ENV + - name: Verify CMake configuration files + run: | + cmake -S cmake/test -B cmake_config_build \ + -DCMAKE_BUILD_TYPE=Release \ + -DTEST_SHARED=${{ matrix.platform.shared }} \ + -DTEST_STATIC=${{ matrix.platform.static }} + cmake --build cmake_config_build --verbose + - name: 'Test versioning' + run: | + build-scripts/test-versioning.sh + + os2: + name: OS/2 + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + - uses: open-watcom/setup-watcom@v0 + - name: Build physfs.dll + run: | + cd src + wmake -sn -f Makefile.os2 + - name: distclean + run: | + cd src + wmake -sn -f Makefile.os2 distclean diff --git a/.github/workflows/os2.yml b/.github/workflows/os2.yml deleted file mode 100644 index 1ba67d59..00000000 --- a/.github/workflows/os2.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Build (OS/2) - -on: [push, pull_request] - -jobs: - os2: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: open-watcom/setup-watcom@v0 - - name: Build physfs.dll - run: | - cd src - wmake -f Makefile.os2 - cd .. - - name: distclean - run: | - cd src - wmake -f Makefile.os2 distclean - cd .. diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..d049ffd7 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,636 @@ +name: 'release' +run-name: 'Create PhysicsFS release artifacts for ${{ inputs.commit }}' + +on: + workflow_dispatch: + inputs: + commit: + description: 'Commit of PhysicsFS' + required: true + +jobs: + + src: + runs-on: ubuntu-latest + outputs: + project: ${{ steps.releaser.outputs.project }} + version: ${{ steps.releaser.outputs.version }} + src-tar-gz: ${{ steps.releaser.outputs.src-tar-gz }} + src-tar-xz: ${{ steps.releaser.outputs.src-tar-xz }} + src-zip: ${{ steps.releaser.outputs.src-zip }} + steps: + - name: 'Set up Python' + uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: 'Fetch build-release.py' + uses: actions/checkout@v4 + with: + sparse-checkout: 'build-scripts/build-release.py' + - name: 'Set up SDL sources' + uses: actions/checkout@v4 + with: + path: 'physfs' + fetch-depth: 0 + - name: 'Build Source archive' + id: releaser + shell: bash + run: | + python build-scripts/build-release.py \ + --actions source \ + --commit ${{ inputs.commit }} \ + --root "${{ github.workspace }}/physfs" \ + --github \ + --debug + - name: 'Store source archives' + uses: actions/upload-artifact@v4 + with: + name: sources + path: '${{ github.workspace}}/dist' + - name: 'Generate summary' + run: | + echo "Run the following commands to download all artifacts:" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + echo "mkdir -p /tmp/${{ steps.releaser.outputs.project }}-${{ steps.releaser.outputs.version }}" >> $GITHUB_STEP_SUMMARY + echo "cd /tmp/${{ steps.releaser.outputs.project }}-${{ steps.releaser.outputs.version }}" >> $GITHUB_STEP_SUMMARY + echo "gh run -R ${{ github.repository }} download ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + linux-verify: + needs: [src] + runs-on: ubuntu-latest + steps: + - name: 'Download source archives' + uses: actions/download-artifact@v4 + with: + name: sources + path: '${{ github.workspace }}' + - name: 'Unzip ${{ needs.src.outputs.src-zip }}' + id: zip + run: | + mkdir /tmp/zipdir + cd /tmp/zipdir + unzip "${{ github.workspace }}/${{ needs.src.outputs.src-zip }}" + echo "path=/tmp/zipdir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT + - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}' + id: tar + run: | + mkdir -p /tmp/tardir + tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}" + echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT + - name: 'Compare contents of ${{ needs.src.outputs.src-zip }} and ${{ needs.src.outputs.src-tar-gz }}' + run: | + diff /tmp/zipdir /tmp/tardir + - name: 'Test versioning' + shell: bash + run: | + ${{ steps.tar.outputs.path }}/build-scripts/test-versioning.sh + - name: 'CMake (configure + build + tests + examples)' + run: | + cmake -S ${{ steps.tar.outputs.path }} -B /tmp/build -DPHYSFS_BUILD_TEST=TRUE + cmake --build /tmp/build --verbose + +# dmg: +# needs: [src] +# runs-on: macos-latest +# outputs: +# dmg: ${{ steps.releaser.outputs.dmg }} +# steps: +# - name: 'Set up Python' +# uses: actions/setup-python@v5 +# with: +# python-version: '3.11' +# - name: 'Fetch build-release.py' +# uses: actions/checkout@v4 +# with: +# sparse-checkout: 'build-scripts/build-release.py' +# - name: 'Download source archives' +# uses: actions/download-artifact@v4 +# with: +# name: sources +# path: '${{ github.workspace }}' +# - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}' +# id: tar +# run: | +# mkdir -p "${{ github.workspace }}/tardir" +# tar -C "${{ github.workspace }}/tardir" -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}" +# echo "path=${{ github.workspace }}/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT +# - name: 'Build PhysicsFS.dmg' +# id: releaser +# shell: bash +# run: | +# python build-scripts/build-release.py \ +# --actions dmg \ +# --commit ${{ inputs.commit }} \ +# --root "${{ steps.tar.outputs.path }}" \ +# --github \ +# --debug +# - name: 'Store DMG image file' +# uses: actions/upload-artifact@v4 +# with: +# name: dmg +# path: '${{ github.workspace }}/dist' +# +# dmg-verify: +# needs: [dmg, src] +# runs-on: macos-latest +# steps: +# - name: 'Download source archives' +# uses: actions/download-artifact@v4 +# with: +# name: sources +# path: '${{ github.workspace }}' +# - name: 'Download ${{ needs.dmg.outputs.dmg }}' +# uses: actions/download-artifact@v4 +# with: +# name: dmg +# path: '${{ github.workspace }}' +# - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}' +# id: src +# run: | +# mkdir -p /tmp/tardir +# tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}" +# echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT +# - name: 'Mount ${{ needs.dmg.outputs.dmg }}' +# id: mount +# run: | +# hdiutil attach '${{ github.workspace }}/${{ needs.dmg.outputs.dmg }}' +# mount_point="/Volumes/${{ needs.src.outputs.project }}" +# if [ ! -d "$mount_point/${{ needs.src.outputs.project }}.xcframework" ]; then +# echo "Cannot find ${{ needs.src.outputs.project }}.xcframework!" +# exit 1 +# fi +# echo "mount_point=$mount_point">>$GITHUB_OUTPUT +# - name: 'CMake (configure + build) Darwin' +# run: | +# cmake -S "${{ steps.src.outputs.path }}/cmake/test" \ +# -DTEST_STATIC=FALSE \ +# -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}" \ +# -DCMAKE_SYSTEM_NAME=Darwin \ +# -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" \ +# -DCMAKE_OSX_DEPLOYMENT_TARGET=10.13 \ +# -Werror=dev \ +# -B build_darwin +# cmake --build build_darwin --config Release --verbose +# +# cmake -S "${{ steps.src.outputs.path }}/cmake/test" \ +# -DTEST_STATIC=FALSE \ +# -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}/PhysicsFS.xcframework/macos-arm64_x86_64" \ +# -DCMAKE_SYSTEM_NAME=Darwin \ +# -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" \ +# -DCMAKE_OSX_DEPLOYMENT_TARGET=10.13 \ +# -Werror=dev \ +# -B build_darwin_2 +# cmake --build build_darwin --config Release --verbose +# - name: 'CMake (configure + build) iOS' +# run: | +# cmake -S "${{ steps.src.outputs.path }}/cmake/test" \ +# -DTEST_STATIC=FALSE \ +# -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}" \ +# -DCMAKE_SYSTEM_NAME=iOS \ +# -DCMAKE_OSX_ARCHITECTURES="arm64" \ +# -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 \ +# -Werror=dev \ +# -B build_ios +# cmake --build build_ios --config Release --verbose +# +# cmake -S "${{ steps.src.outputs.path }}/cmake/test" \ +# -DTEST_STATIC=FALSE \ +# -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}/PhysicsFS.xcframework/ios-arm64" \ +# -DCMAKE_SYSTEM_NAME=iOS \ +# -DCMAKE_OSX_ARCHITECTURES="arm64" \ +# -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 \ +# -Werror=dev \ +# -B build_ios2 +# cmake --build build_ios2 --config Release --verbose +# - name: 'CMake (configure + build) tvOS' +# run: | +# cmake -S "${{ steps.src.outputs.path }}/cmake/test" \ +# -DTEST_STATIC=FALSE \ +# -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}" \ +# -DCMAKE_SYSTEM_NAME=tvOS \ +# -DCMAKE_OSX_ARCHITECTURES="arm64" \ +# -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 \ +# -Werror=dev \ +# -B build_tvos +# cmake --build build_tvos --config Release --verbose +# +# cmake -S "${{ steps.src.outputs.path }}/cmake/test" \ +# -DTEST_STATIC=FALSE \ +# -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}/PhysicsFS.xcframework/tvos-arm64" \ +# -DCMAKE_SYSTEM_NAME=tvOS \ +# -DCMAKE_OSX_ARCHITECTURES="arm64" \ +# -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 \ +# -Werror=dev \ +# -B build_tvos2 +# cmake --build build_tvos2 --config Release --verbose +# - name: 'CMake (configure + build) iOS simulator' +# run: | +# sysroot=$(xcodebuild -version -sdk iphonesimulator Path) +# echo "sysroot=$sysroot" +# cmake -S "${{ steps.src.outputs.path }}/cmake/test" \ +# -DTEST_STATIC=FALSE \ +# -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}" \ +# -DCMAKE_SYSTEM_NAME=iOS \ +# -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" \ +# -DCMAKE_OSX_SYSROOT="${sysroot}" \ +# -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 \ +# -Werror=dev \ +# -B build_ios_simulator +# cmake --build build_ios_simulator --config Release --verbose +# +# cmake -S "${{ steps.src.outputs.path }}/cmake/test" \ +# -DTEST_STATIC=FALSE \ +# -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}/PhysicsFS.xcframework/ios-arm64_x86_64-simulator" \ +# -DCMAKE_SYSTEM_NAME=iOS \ +# -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" \ +# -DCMAKE_OSX_SYSROOT="${sysroot}" \ +# -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 \ +# -Werror=dev \ +# -B build_ios_simulator2 +# cmake --build build_ios_simulator2 --config Release --verbose +# - name: 'CMake (configure + build) tvOS simulator' +# run: | +# sysroot=$(xcodebuild -version -sdk appletvsimulator Path) +# echo "sysroot=$sysroot" +# cmake -S "${{ steps.src.outputs.path }}/cmake/test" \ +# -DTEST_STATIC=FALSE \ +# -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}" \ +# -DCMAKE_SYSTEM_NAME=tvOS \ +# -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" \ +# -DCMAKE_OSX_SYSROOT="${sysroot}" \ +# -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 \ +# -Werror=dev \ +# -B build_tvos_simulator +# cmake --build build_tvos_simulator --config Release --verbose +# +# cmake -S "${{ steps.src.outputs.path }}/cmake/test" \ +# -DTEST_STATIC=FALSE \ +# -DCMAKE_PREFIX_PATH="${{ steps.mount.outputs.mount_point }}/PhysicsFS.xcframework/tvos-arm64_x86_64-simulator" \ +# -DCMAKE_SYSTEM_NAME=tvOS \ +# -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" \ +# -DCMAKE_OSX_SYSROOT="${sysroot}" \ +# -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 \ +# -Werror=dev \ +# -B build_tvos_simulator2 +# cmake --build build_tvos_simulator2 --config Release --verbose + + msvc: + needs: [src] + runs-on: windows-2025 + outputs: + VC-x86: ${{ steps.releaser.outputs.VC-x86 }} + VC-x64: ${{ steps.releaser.outputs.VC-x64 }} + VC-arm64: ${{ steps.releaser.outputs.VC-arm64 }} + VC-devel: ${{ steps.releaser.outputs.VC-devel }} + steps: + - name: 'Set up Python' + uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: 'Fetch build-release.py' + uses: actions/checkout@v4 + with: + sparse-checkout: 'build-scripts/build-release.py' + - name: 'Download source archives' + uses: actions/download-artifact@v4 + with: + name: sources + path: '${{ github.workspace }}' + - name: 'Unzip ${{ needs.src.outputs.src-zip }}' + id: zip + run: | + New-Item C:\temp -ItemType Directory -ErrorAction SilentlyContinue + cd C:\temp + unzip "${{ github.workspace }}/${{ needs.src.outputs.src-zip }}" + echo "path=C:\temp\${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$Env:GITHUB_OUTPUT + - name: 'Build MSVC binary archives' + id: releaser + run: | + python build-scripts/build-release.py ` + --actions msvc ` + --commit ${{ inputs.commit }} ` + --root "${{ steps.zip.outputs.path }}" ` + --github ` + --debug + - name: 'Store MSVC archives' + uses: actions/upload-artifact@v4 + with: + name: win32 + path: '${{ github.workspace }}/dist' + + msvc-verify: + needs: [msvc, src] + runs-on: windows-latest + steps: + - name: 'Fetch .github/actions/setup-ninja/action.yml' + uses: actions/checkout@v4 + with: + sparse-checkout: '.github/actions/setup-ninja/action.yml' + - name: 'Download source archives' + uses: actions/download-artifact@v4 + with: + name: sources + path: '${{ github.workspace }}' + - name: 'Download MSVC binaries' + uses: actions/download-artifact@v4 + with: + name: win32 + path: '${{ github.workspace }}' + - name: 'Unzip ${{ needs.src.outputs.src-zip }}' + id: src + run: | + mkdir '${{ github.workspace }}/sources' + cd '${{ github.workspace }}/sources' + unzip "${{ github.workspace }}/${{ needs.src.outputs.src-zip }}" + echo "path=${{ github.workspace }}/sources/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$env:GITHUB_OUTPUT + - name: 'Unzip ${{ needs.msvc.outputs.VC-devel }}' + id: bin + run: | + mkdir '${{ github.workspace }}/vc' + cd '${{ github.workspace }}/vc' + unzip "${{ github.workspace }}/${{ needs.msvc.outputs.VC-devel }}" + echo "path=${{ github.workspace }}/vc/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$env:GITHUB_OUTPUT + - name: Set up ninja + uses: ./.github/actions/setup-ninja + - name: 'Configure vcvars x86' + uses: ilammy/msvc-dev-cmd@v1 + with: + arch: x64_x86 + - name: 'CMake (configure + build + tests) x86' + run: | + cmake -S "${{ steps.src.outputs.path }}/cmake/test" ` + -B build_x86 ` + -GNinja ` + -DCMAKE_BUILD_TYPE=Debug ` + -Werror=dev ` + -DTEST_STATIC=FALSE ` + -DTEST_SHARED=TRUE ` + -DCMAKE_SUPPRESS_REGENERATION=TRUE ` + -DCMAKE_PREFIX_PATH="${{ steps.bin.outputs.path }}" + Start-Sleep -Seconds 2 + cmake --build build_x86 --verbose + - name: 'Configure vcvars x64' + uses: ilammy/msvc-dev-cmd@v1 + with: + arch: x64 + - name: 'CMake (configure + build + tests) x64' + run: | + cmake -S "${{ steps.src.outputs.path }}/cmake/test" ` + -B build_x64 ` + -GNinja ` + -DCMAKE_BUILD_TYPE=Debug ` + -Werror=dev ` + -DTEST_STATIC=FALSE ` + -DTEST_SHARED=TRUE ` + -DCMAKE_SUPPRESS_REGENERATION=TRUE ` + -DCMAKE_PREFIX_PATH="${{ steps.bin.outputs.path }}" + Start-Sleep -Seconds 2 + cmake --build build_x64 --verbose + - name: 'Configure vcvars arm64' + uses: ilammy/msvc-dev-cmd@v1 + with: + arch: x64_arm64 + - name: 'CMake (configure + build) arm64' + run: | + cmake -S "${{ steps.src.outputs.path }}/cmake/test" ` + -B build_arm64 ` + -GNinja ` + -DCMAKE_BUILD_TYPE=Debug ` + -Werror=dev ` + -DTEST_STATIC=FALSE ` + -DTEST_SHARED=TRUE ` + -DCMAKE_SUPPRESS_REGENERATION=TRUE ` + -DCMAKE_PREFIX_PATH="${{ steps.bin.outputs.path }}" + Start-Sleep -Seconds 2 + cmake --build build_arm64 --verbose + - name: 'CMake (configure + build) arm64ec' + run: | + cmake -S "${{ steps.src.outputs.path }}/cmake/test" ` + -B build_arm64ec ` + -GNinja ` + -DCMAKE_BUILD_TYPE=Debug ` + -Werror=dev ` + -DTEST_STATIC=FALSE ` + -DTEST_SHARED=TRUE ` + -DCMAKE_SUPPRESS_REGENERATION=TRUE ` + -DCMAKE_C_FLAGS="/arm64EC" ` + -DCMAKE_CXX_FLAGS="/arm64EC" ` + -DCMAKE_EXE_LINKER_FLAGS="/MACHINE:ARM64EC" ` + -DCMAKE_SHARED_LINKER_FLAGS="/MACHINE:ARM64EC" ` + -DCMAKE_STATIC_LINKER_FLAGS="/MACHINE:ARM64EC" ` + -DCMAKE_PREFIX_PATH="${{ steps.bin.outputs.path }}" + Start-Sleep -Seconds 2 + cmake --build build_arm64ec --verbose + + mingw: + needs: [src] + runs-on: ubuntu-24.04 # FIXME: current ubuntu-latest ships an outdated mingw, replace with ubuntu-latest once 24.04 becomes the new default + outputs: + mingw-devel-tar-gz: ${{ steps.releaser.outputs.mingw-devel-tar-gz }} + mingw-devel-tar-xz: ${{ steps.releaser.outputs.mingw-devel-tar-xz }} + steps: + - name: 'Set up Python' + uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: 'Fetch build-release.py' + uses: actions/checkout@v4 + with: + sparse-checkout: 'build-scripts/build-release.py' + - name: 'Install Mingw toolchain' + run: | + sudo apt-get update -y + sudo apt-get install -y gcc-mingw-w64 g++-mingw-w64 ninja-build + - name: 'Download source archives' + uses: actions/download-artifact@v4 + with: + name: sources + path: '${{ github.workspace }}' + - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}' + id: tar + run: | + mkdir -p /tmp/tardir + tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}" + echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT + - name: 'Build MinGW binary archives' + id: releaser + run: | + python build-scripts/build-release.py \ + --actions mingw \ + --commit ${{ inputs.commit }} \ + --root "${{ steps.tar.outputs.path }}" \ + --github \ + --debug + - name: 'Store MinGW archives' + uses: actions/upload-artifact@v4 + with: + name: mingw + path: '${{ github.workspace }}/dist' + + mingw-verify: + needs: [mingw, src] + runs-on: ubuntu-latest + steps: + - name: 'Install Mingw toolchain' + run: | + sudo apt-get update -y + sudo apt-get install -y gcc-mingw-w64 g++-mingw-w64 ninja-build + - name: 'Download source archives' + uses: actions/download-artifact@v4 + with: + name: sources + path: '${{ github.workspace }}' + - name: 'Download MinGW binaries' + uses: actions/download-artifact@v4 + with: + name: mingw + path: '${{ github.workspace }}' + - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}' + id: src + run: | + mkdir -p /tmp/tardir + tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}" + echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT + - name: 'Untar ${{ needs.mingw.outputs.mingw-devel-tar-gz }}' + id: bin + run: | + mkdir -p /tmp/mingw-tardir + tar -C /tmp/mingw-tardir -v -x -f "${{ github.workspace }}/${{ needs.mingw.outputs.mingw-devel-tar-gz }}" + echo "path=/tmp/mingw-tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT + - name: 'CMake (configure + build) i686' + run: | + cmake -S "${{ steps.src.outputs.path }}/cmake/test" \ + -DCMAKE_BUILD_TYPE="Release" \ + -DTEST_STATIC=FALSE \ + -DCMAKE_PREFIX_PATH="${{ steps.bin.outputs.path }}" \ + -DCMAKE_TOOLCHAIN_FILE="${{ steps.src.outputs.path }}/build-scripts/cmake-toolchain-mingw64-i686.cmake" \ + -Werror=dev \ + -B build_x86 + cmake --build build_x86 --config Release --verbose + - name: 'CMake (configure + build) x86_64' + run: | + cmake -S "${{ steps.src.outputs.path }}/cmake/test" \ + -DCMAKE_BUILD_TYPE="Release" \ + -DTEST_STATIC=false \ + -DCMAKE_PREFIX_PATH="${{ steps.bin.outputs.path }}" \ + -DCMAKE_TOOLCHAIN_FILE="${{ steps.src.outputs.path }}/build-scripts/cmake-toolchain-mingw64-x86_64.cmake" \ + -Werror=dev \ + -B build_x64 + cmake --build build_x64 --config Release --verbose + + android: + needs: [src] + runs-on: ubuntu-latest + outputs: + android-aar: ${{ steps.releaser.outputs.android-aar }} + steps: + - name: 'Set up Python' + uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: 'Fetch build-release.py' + uses: actions/checkout@v4 + with: + sparse-checkout: 'build-scripts/build-release.py' + - name: 'Setup Android NDK' + id: setup-ndk + uses: nttld/setup-ndk@v1 + with: + local-cache: false + ndk-version: r28c + - name: 'Setup Java JDK' + uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '11' + - name: 'Install ninja' + run: | + sudo apt-get update -y + sudo apt-get install -y ninja-build + - name: 'Download source archives' + uses: actions/download-artifact@v4 + with: + name: sources + path: '${{ github.workspace }}' + - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}' + id: tar + run: | + mkdir -p /tmp/tardir + tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}" + echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT + - name: 'Build Android prefab binary archive(s)' + id: releaser + run: | + python build-scripts/build-release.py \ + --actions android \ + --android-api 21 \ + --android-ndk-home "${{ steps.setup-ndk.outputs.ndk-path }}" \ + --commit ${{ inputs.commit }} \ + --root "${{ steps.tar.outputs.path }}" \ + --github \ + --debug + - name: 'Store Android archive(s)' + uses: actions/upload-artifact@v4 + with: + name: android + path: '${{ github.workspace }}/dist' + + android-verify: + needs: [android, src] + runs-on: ubuntu-latest + steps: + - name: 'Set up Python' + uses: actions/setup-python@v5 + with: + python-version: '3.11' + - uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '17' + - name: 'Download source archives' + uses: actions/download-artifact@v4 + with: + name: sources + path: '${{ github.workspace }}' + - name: 'Download Android .aar archive' + uses: actions/download-artifact@v4 + with: + name: android + path: '${{ github.workspace }}' + - name: 'Untar ${{ needs.src.outputs.src-tar-gz }}' + id: src + run: | + mkdir -p /tmp/tardir + tar -C /tmp/tardir -v -x -f "${{ github.workspace }}/${{ needs.src.outputs.src-tar-gz }}" + echo "path=/tmp/tardir/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}" >>$GITHUB_OUTPUT + - name: 'Extract Android SDK from AAR' + id: sdk + run: | + cd /tmp + unzip "${{ github.workspace }}/${{ needs.android.outputs.android-aar }}" + python "${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}.aar" -o /tmp/PhysicsFS3-android + echo "prefix=/tmp/PhysicsFS3-android" >>$GITHUB_OUTPUT + echo "physicsfs-aar=/tmp/${{ needs.src.outputs.project }}-${{ needs.src.outputs.version }}.aar" >>$GITHUB_OUTPUT + - name: 'Verify alignment of libphysfs.so (arm64-v8a/x86_64)' + run: | + set -e + ${{ steps.src.outputs.path }}/build-scripts/check_elf_alignment.sh ${{ steps.sdk.outputs.prefix }}/lib/arm64-v8a/libphysfs.so + ${{ steps.src.outputs.path }}/build-scripts/check_elf_alignment.sh ${{ steps.sdk.outputs.prefix }}/lib/x86_64/libphysfs.so + - name: 'CMake (configure + build) x86, x64, arm32, arm64' + run: | + set -e + android_abis="x86 x86_64 armeabi-v7a arm64-v8a" + for android_abi in ${android_abis}; do + echo "Configuring ${android_abi}..." + cmake -S "${{ steps.src.outputs.path }}/cmake/test" \ + -DTEST_STATIC=FALSE \ + -DCMAKE_PREFIX_PATH="${{ steps.sdk.outputs.prefix }}" \ + -DCMAKE_TOOLCHAIN_FILE=${ANDROID_NDK_HOME}/build/cmake/android.toolchain.cmake \ + -DANDROID_USE_LEGACY_TOOLCHAIN=0 \ + -DANDROID_ABI=${android_abi} \ + -DCMAKE_BUILD_TYPE=Release \ + -B "${android_abi}" + echo "Building ${android_abi}..." + cmake --build "${android_abi}" --verbose + done diff --git a/.gitignore b/.gitignore index 709eee09..67eb52cd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,14 @@ +/dist cmake-build CMakeFiles/ CMakeCache.txt +build.ninja Makefile test_physfs* libphysfs.* +physfs*.dll install_manifest.txt +PhysFS-*.cmake cmake_install.cmake CMakeDoxy* extras/physfs.pc diff --git a/CMakeLists.txt b/CMakeLists.txt index 2ea14a49..ce2b796c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -15,7 +15,11 @@ cmake_minimum_required(VERSION 3.5...4.0) project(PhysicsFS VERSION ${PHYSFS_VERSION} LANGUAGES C ) +include(CMakeDependentOption) include(GNUInstallDirs) +include("${PROJECT_SOURCE_DIR}/cmake/GetGitRevisionDescription.cmake") +include("${PROJECT_SOURCE_DIR}/cmake/PrivateSdlFunctions.cmake") +include("${PROJECT_SOURCE_DIR}/cmake/sdlmanpages.cmake") # Increment this if/when we break backwards compatibility. set(PHYSFS_SOVERSION 1) @@ -23,6 +27,8 @@ set(PHYSFS_SOVERSION 1) set(PHYSFS_M_SRCS) set(PHYSFS_CPP_SRCS) +option(PHYSFS_WERROR "Treat warnings as errors" OFF) + if(WIN32) list(APPEND OPTIONAL_LIBRARY_LIBS advapi32 shell32) endif() @@ -100,6 +106,7 @@ set(PHYSFS_SRCS src/physfs_archiver_iso9660.c src/physfs_archiver_vdf.c src/physfs_archiver_lec3d.c + src/physfs_version.rc ${PHYSFS_CPP_SRCS} ${PHYSFS_M_SRCS} ) @@ -174,18 +181,19 @@ option(PHYSFS_BUILD_STATIC "Build static library" TRUE) if(PHYSFS_BUILD_STATIC) add_library(physfs-static STATIC ${PHYSFS_SRCS}) add_library(PhysFS::PhysFS-static ALIAS physfs-static) - set_target_properties(physfs-static PROPERTIES EXPORT_NAME PhysFS-static) + sdl_add_warning_options(physfs-static WARNING_AS_ERROR ${PHYSFS_WERROR}) + set_property(TARGET physfs-static PROPERTY EXPORT_NAME PhysFS-static) # Don't rename this on Windows, since DLLs will also produce an import # library named "physfs.lib" which would conflict; Unix tend to like the # same library name with a different extension for static libs, but # Windows can just have a separate name. if(NOT MSVC) - set_target_properties(physfs-static PROPERTIES OUTPUT_NAME "physfs") + set_property(TARGET physfs-static PROPERTY OUTPUT_NAME "physfs") endif() if(WINRT) # Ignore LNK4264 warnings; we don't author any WinRT components, just consume them, so we're okay in a static library. - set_target_properties(physfs-static PROPERTIES VS_WINRT_COMPONENT True) - set_target_properties(physfs-static PROPERTIES STATIC_LIBRARY_FLAGS "/ignore:4264") + set_property(TARGET physfs-static PROPERTY VS_WINRT_COMPONENT TRUE) + set_property(TARGET physfs-static PROPERTY STATIC_LIBRARY_FLAGS "/ignore:4264") endif() if(WIN32 OR WINRT OR OS2) # no dll exports from the static library @@ -193,39 +201,49 @@ if(PHYSFS_BUILD_STATIC) endif() target_include_directories(physfs-static PUBLIC "$") target_link_libraries(physfs-static PRIVATE ${OPTIONAL_LIBRARY_LIBS} ${OTHER_LDFLAGS}) - set(PHYSFS_LIB_TARGET PhysFS::PhysFS-static) list(APPEND PHYSFS_INSTALL_TARGETS "physfs-static") endif() option(PHYSFS_BUILD_SHARED "Build shared library" TRUE) if(PHYSFS_BUILD_SHARED) - add_library(physfs SHARED ${PHYSFS_SRCS}) - add_library(PhysFS::PhysFS ALIAS physfs) - set_target_properties(physfs PROPERTIES MACOSX_RPATH 1) - set_target_properties(physfs PROPERTIES VERSION ${PHYSFS_VERSION}) - set_target_properties(physfs PROPERTIES SOVERSION ${PHYSFS_SOVERSION}) - set_target_properties(physfs PROPERTIES EXPORT_NAME PhysFS) - if(WINRT) - set_target_properties(physfs PROPERTIES VS_WINRT_COMPONENT TRUE) + add_library(physfs-shared SHARED ${PHYSFS_SRCS}) + add_library(PhysFS::PhysFS-shared ALIAS physfs-shared) + sdl_add_warning_options(physfs-shared WARNING_AS_ERROR ${PHYSFS_WERROR}) + set_property(TARGET physfs-shared PROPERTY OUTPUT_NAME "physfs") + set_property(TARGET physfs-shared PROPERTY MACOSX_RPATH 1) + set_property(TARGET physfs-shared PROPERTY VERSION ${PHYSFS_VERSION}) + set_property(TARGET physfs-shared PROPERTY SOVERSION ${PHYSFS_SOVERSION}) + set_property(TARGET physfs-shared PROPERTY EXPORT_NAME PhysFS-shared) + if(WIN32) + set_property(TARGET physfs-shared PROPERTY PREFIX "") endif() - if(OS2) # OS/2 does not support a DLL name longer than 8 characters. - set_target_properties(physfs PROPERTIES OUTPUT_NAME "physfs") + if(MINGW) + target_link_options(physfs-shared PRIVATE -static-libgcc) endif() - target_include_directories(physfs PUBLIC "$") - target_link_libraries(physfs PRIVATE ${OPTIONAL_LIBRARY_LIBS} ${OTHER_LDFLAGS}) - set(PHYSFS_LIB_TARGET PhysFS::PhysFS) - list(APPEND PHYSFS_INSTALL_TARGETS "physfs") + if(WINRT) + set_property(TARGET physfs-shared PROPERTY VS_WINRT_COMPONENT TRUE) + endif() + target_include_directories(physfs-shared PUBLIC "$") + target_link_libraries(physfs-shared PRIVATE ${OPTIONAL_LIBRARY_LIBS} ${OTHER_LDFLAGS}) + list(APPEND PHYSFS_INSTALL_TARGETS "physfs-shared") endif() if(NOT PHYSFS_BUILD_SHARED AND NOT PHYSFS_BUILD_STATIC) message(FATAL "Both shared and static libraries are disabled!") endif() +if(TARGET physfs-shared) + add_library(PhysFS::PhysFS ALIAS physfs-shared) +elseif(TARGET PhysFS::PhysFS-static) + add_library(PhysFS::PhysFS ALIAS physfs-static) +endif() + option(PHYSFS_BUILD_TEST "Build stdio test program." TRUE) mark_as_advanced(PHYSFS_BUILD_TEST) if(PHYSFS_BUILD_TEST) add_executable(test_physfs test/test_physfs.c) - target_link_libraries(test_physfs PRIVATE ${PHYSFS_LIB_TARGET} ${OTHER_LDFLAGS}) + target_link_libraries(test_physfs PRIVATE PhysFS::PhysFS) + sdl_add_warning_options(test_physfs WARNING_AS_ERROR ${PHYSFS_WERROR}) find_path(READLINE_H readline/readline.h) find_path(HISTORY_H readline/history.h) @@ -238,36 +256,98 @@ if(PHYSFS_BUILD_TEST) target_compile_definitions(test_physfs PRIVATE PHYSFS_HAVE_READLINE=1) endif() list(APPEND PHYSFS_INSTALL_TARGETS test_physfs) + + if(UNIX) + add_executable(physfshttpd extras/physfshttpd.c) + target_link_libraries(physfshttpd PRIVATE PhysFS::PhysFS) + sdl_add_warning_options(physfshttpd WARNING_AS_ERROR ${PHYSFS_WERROR}) + endif() endif() -option(PHYSFS_DISABLE_INSTALL "Disable installing PhysFS" OFF) -if(NOT PHYSFS_DISABLE_INSTALL) +option(PHYSFS_INSTALL "Enable PhysFS installation" ON) +cmake_dependent_option(PHYSFS_INSTALL_MAN "Install man pages for PhysicsFS" OFF "PHYSFS_INSTALL" OFF) +if(PHYSFS_INSTALL) + if(MSVC) + set(PHYSFS_INSTALL_CMAKEDIR "cmake") + else() + set(PHYSFS_INSTALL_CMAKEDIR "${CMAKE_INSTALL_LIBDIR}/cmake/PhysFS") + endif() - install(TARGETS ${PHYSFS_INSTALL_TARGETS} EXPORT PhysFSExport + if(TARGET physfs-shared) + install(TARGETS physfs-shared EXPORT physfs-shared-exports + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) + install(EXPORT physfs-shared-exports + DESTINATION "${PHYSFS_INSTALL_CMAKEDIR}" + FILE PhysFS-shared-targets.cmake + NAMESPACE PhysFS:: + ) + export(TARGETS physfs-shared NAMESPACE PhysFS:: FILE PhysFS-shared-targets.cmake) + if(MSVC) + SDL_install_pdb(physfs-shared "${CMAKE_INSTALL_BINDIR}") + endif() + endif() + if(TARGET physfs-static) + install(TARGETS physfs-static EXPORT physfs-static-exports RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) + install(EXPORT physfs-static-exports + DESTINATION "${PHYSFS_INSTALL_CMAKEDIR}" + FILE PhysFS-static-targets.cmake + NAMESPACE PhysFS:: + ) + export(TARGETS physfs-static NAMESPACE PhysFS:: FILE PhysFS-static-targets.cmake) + if(MSVC) + SDL_install_pdb(physfs-static "${CMAKE_INSTALL_LIBDIR}") + endif() + endif() + if(TARGET test_physfs) + install(TARGETS test_physfs RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) + endif() install(FILES src/physfs.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) - install(EXPORT PhysFSExport - DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/PhysFS" - FILE PhysFSConfig.cmake - NAMESPACE PhysFS:: + include(CMakePackageConfigHelpers) + configure_package_config_file(cmake/PhysFSConfig.cmake.in PhysFSConfig.cmake + NO_SET_AND_CHECK_MACRO + PATH_VARS CMAKE_INSTALL_PREFIX + INSTALL_DESTINATION "${PHYSFS_INSTALL_CMAKEDIR}" ) + write_basic_package_version_file(PhysFSConfigVersion.cmake + COMPATIBILITY SameMajorVersion + ) + install(FILES + "${CMAKE_CURRENT_BINARY_DIR}/PhysFSConfig.cmake" + "${CMAKE_CURRENT_BINARY_DIR}/PhysFSConfigVersion.cmake" + DESTINATION "${PHYSFS_INSTALL_CMAKEDIR}" + ) + install(FILES LICENSE.txt + DESTINATION "${CMAKE_INSTALL_DATAROOTDIR}/licenses/PhysicsFS${PROJECT_VERSION_MAJOR}") - if(NOT MSVC) - configure_file( - "extras/physfs.pc.in" - "extras/physfs.pc" - @ONLY - ) + configure_file(cmake/cmake_uninstall.cmake.in cmake_uninstall.cmake IMMEDIATE @ONLY) + add_custom_target(uninstall-physfs + COMMAND ${CMAKE_COMMAND} -P "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake") + if(NOT MSVC) + configure_file(cmake/physfs.pc.in physfs.pc @ONLY) install( - FILES "${CMAKE_CURRENT_BINARY_DIR}/extras/physfs.pc" + FILES "${CMAKE_CURRENT_BINARY_DIR}/physfs.pc" DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig" ) endif() + + if(PHYSFS_INSTALL_MAN) + sdl_get_git_revision_hash(PHYSFS_REVISION) + SDL_generate_manpages( + HEADERS_DIR "${PROJECT_SOURCE_DIR}/src/physfs.h" + SYMBOL "PHYSFS_init" + WIKIHEADERS_PL_PATH "${PROJECT_SOURCE_DIR}/build-scripts/wikiheaders.pl" + REVISION "${PHYSFS_REVISION}" + ) + endif() endif() option(PHYSFS_BUILD_DOCS "Build doxygen based documentation" TRUE) @@ -309,14 +389,6 @@ if(UNIX) WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" COMMENT "Building source tarball '${PHYSFS_TARBALL}'..." ) - - set(PHYSFS_TARGETNAME_UNINSTALL "uninstall" CACHE STRING "Name of 'uninstall' build target") - add_custom_target( - ${PHYSFS_TARGETNAME_UNINSTALL} - "${CMAKE_CURRENT_SOURCE_DIR}/extras/uninstall.sh" - WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}" - COMMENT "Uninstall the project..." - ) endif() diff --git a/build-scripts/build-release.py b/build-scripts/build-release.py new file mode 100755 index 00000000..1403c164 --- /dev/null +++ b/build-scripts/build-release.py @@ -0,0 +1,1572 @@ +#!/usr/bin/env python3 + +""" +This script is shared between SDL2, SDL3, and all satellite libraries. +Don't specialize this script for doing project-specific modifications. +Rather, modify release-info.json. +""" + +import argparse +import collections +import dataclasses +from collections.abc import Callable +import contextlib +import datetime +import fnmatch +import glob +import io +import json +import logging +import multiprocessing +import os +from pathlib import Path +import platform +import re +import shlex +import shutil +import subprocess +import sys +import tarfile +import tempfile +import textwrap +import typing +import zipfile + + +logger = logging.getLogger(__name__) +GIT_HASH_FILENAME = ".git-hash" +REVISION_TXT = "REVISION.txt" + +RE_ILLEGAL_MINGW_LIBRARIES = re.compile(r"(?:lib)?(?:gcc|(?:std)?c[+][+]|(?:win)?pthread).*", flags=re.I) + + +def safe_isotime_to_datetime(str_isotime: str) -> datetime.datetime: + try: + return datetime.datetime.fromisoformat(str_isotime) + except ValueError: + pass + logger.warning("Invalid iso time: %s", str_isotime) + if str_isotime[-6:-5] in ("+", "-"): + # Commits can have isotime with invalid timezone offset (e.g. "2021-07-04T20:01:40+32:00") + modified_str_isotime = str_isotime[:-6] + "+00:00" + try: + return datetime.datetime.fromisoformat(modified_str_isotime) + except ValueError: + pass + raise ValueError(f"Invalid isotime: {str_isotime}") + + +def arc_join(*parts: list[str]) -> str: + assert all(p[:1] != "/" and p[-1:] != "/" for p in parts), f"None of {parts} may start or end with '/'" + return "/".join(p for p in parts if p) + + +@dataclasses.dataclass(frozen=True) +class VsArchPlatformConfig: + arch: str + configuration: str + platform: str + + def extra_context(self): + return { + "ARCH": self.arch, + "CONFIGURATION": self.configuration, + "PLATFORM": self.platform, + } + + +@contextlib.contextmanager +def chdir(path): + original_cwd = os.getcwd() + try: + os.chdir(path) + yield + finally: + os.chdir(original_cwd) + + +class Executer: + def __init__(self, root: Path, dry: bool=False): + self.root = root + self.dry = dry + + def run(self, cmd, cwd=None, env=None): + logger.info("Executing args=%r", cmd) + sys.stdout.flush() + if not self.dry: + subprocess.check_call(cmd, cwd=cwd or self.root, env=env, text=True) + + def check_output(self, cmd, cwd=None, dry_out=None, env=None, text=True): + logger.info("Executing args=%r", cmd) + sys.stdout.flush() + if self.dry: + return dry_out + return subprocess.check_output(cmd, cwd=cwd or self.root, env=env, text=text) + + +class SectionPrinter: + @contextlib.contextmanager + def group(self, title: str): + print(f"{title}:") + yield + + +class GitHubSectionPrinter(SectionPrinter): + def __init__(self): + super().__init__() + self.in_group = False + + @contextlib.contextmanager + def group(self, title: str): + print(f"::group::{title}") + assert not self.in_group, "Can enter a group only once" + self.in_group = True + yield + self.in_group = False + print("::endgroup::") + + +class VisualStudio: + def __init__(self, executer: Executer, year: typing.Optional[str]=None): + self.executer = executer + self.vsdevcmd = self.find_vsdevcmd(year) + self.msbuild = self.find_msbuild() + + @property + def dry(self) -> bool: + return self.executer.dry + + VS_YEAR_TO_VERSION = { + "2022": 17, + "2019": 16, + "2017": 15, + "2015": 14, + "2013": 12, + } + + def find_vsdevcmd(self, year: typing.Optional[str]=None) -> typing.Optional[Path]: + vswhere_spec = ["-latest"] + if year is not None: + try: + version = self.VS_YEAR_TO_VERSION[year] + except KeyError: + logger.error("Invalid Visual Studio year") + return None + vswhere_spec.extend(["-version", f"[{version},{version+1})"]) + vswhere_cmd = ["vswhere"] + vswhere_spec + ["-property", "installationPath"] + vs_install_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp").strip()) + logger.info("VS install_path = %s", vs_install_path) + assert vs_install_path.is_dir(), "VS installation path does not exist" + vsdevcmd_path = vs_install_path / "Common7/Tools/vsdevcmd.bat" + logger.info("vsdevcmd path = %s", vsdevcmd_path) + if self.dry: + vsdevcmd_path.parent.mkdir(parents=True, exist_ok=True) + vsdevcmd_path.touch(exist_ok=True) + assert vsdevcmd_path.is_file(), "vsdevcmd.bat batch file does not exist" + return vsdevcmd_path + + def find_msbuild(self) -> typing.Optional[Path]: + vswhere_cmd = ["vswhere", "-latest", "-requires", "Microsoft.Component.MSBuild", "-find", r"MSBuild\**\Bin\MSBuild.exe"] + msbuild_path = Path(self.executer.check_output(vswhere_cmd, dry_out="/tmp/MSBuild.exe").strip()) + logger.info("MSBuild path = %s", msbuild_path) + if self.dry: + msbuild_path.parent.mkdir(parents=True, exist_ok=True) + msbuild_path.touch(exist_ok=True) + assert msbuild_path.is_file(), "MSBuild.exe does not exist" + return msbuild_path + + def build(self, arch_platform: VsArchPlatformConfig, projects: list[Path]): + assert projects, "Need at least one project to build" + + vsdev_cmd_str = f"\"{self.vsdevcmd}\" -arch={arch_platform.arch}" + msbuild_cmd_str = " && ".join([f"\"{self.msbuild}\" \"{project}\" /m /p:BuildInParallel=true /p:Platform={arch_platform.platform} /p:Configuration={arch_platform.configuration}" for project in projects]) + bat_contents = f"{vsdev_cmd_str} && {msbuild_cmd_str}\n" + bat_path = Path(tempfile.gettempdir()) / "cmd.bat" + with bat_path.open("w") as f: + f.write(bat_contents) + + logger.info("Running cmd.exe script (%s): %s", bat_path, bat_contents) + cmd = ["cmd.exe", "/D", "/E:ON", "/V:OFF", "/S", "/C", f"CALL {str(bat_path)}"] + self.executer.run(cmd) + + +class Archiver: + def __init__(self, zip_path: typing.Optional[Path]=None, tgz_path: typing.Optional[Path]=None, txz_path: typing.Optional[Path]=None): + self._zip_files = [] + self._tar_files = [] + self._added_files = set() + if zip_path: + self._zip_files.append(zipfile.ZipFile(zip_path, "w", compression=zipfile.ZIP_DEFLATED)) + if tgz_path: + self._tar_files.append(tarfile.open(tgz_path, "w:gz")) + if txz_path: + self._tar_files.append(tarfile.open(txz_path, "w:xz")) + + @property + def added_files(self) -> set[str]: + return self._added_files + + def add_file_data(self, arcpath: str, data: bytes, mode: int, time: datetime.datetime): + for zf in self._zip_files: + file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second) + zip_info = zipfile.ZipInfo(filename=arcpath, date_time=file_data_time) + zip_info.external_attr = mode << 16 + zip_info.compress_type = zipfile.ZIP_DEFLATED + zf.writestr(zip_info, data=data) + for tf in self._tar_files: + tar_info = tarfile.TarInfo(arcpath) + tar_info.type = tarfile.REGTYPE + tar_info.mode = mode + tar_info.size = len(data) + tar_info.mtime = int(time.timestamp()) + tf.addfile(tar_info, fileobj=io.BytesIO(data)) + + self._added_files.add(arcpath) + + def add_symlink(self, arcpath: str, target: str, time: datetime.datetime, files_for_zip): + logger.debug("Adding symlink (target=%r) -> %s", target, arcpath) + for zf in self._zip_files: + file_data_time = (time.year, time.month, time.day, time.hour, time.minute, time.second) + for f in files_for_zip: + zip_info = zipfile.ZipInfo(filename=f["arcpath"], date_time=file_data_time) + zip_info.external_attr = f["mode"] << 16 + zip_info.compress_type = zipfile.ZIP_DEFLATED + zf.writestr(zip_info, data=f["data"]) + for tf in self._tar_files: + tar_info = tarfile.TarInfo(arcpath) + tar_info.type = tarfile.SYMTYPE + tar_info.mode = 0o777 + tar_info.mtime = int(time.timestamp()) + tar_info.linkname = target + tf.addfile(tar_info) + + self._added_files.update(f["arcpath"] for f in files_for_zip) + + def add_git_hash(self, arcdir: str, commit: str, time: datetime.datetime): + arcpath = arc_join(arcdir, GIT_HASH_FILENAME) + data = f"{commit}\n".encode() + self.add_file_data(arcpath=arcpath, data=data, mode=0o100644, time=time) + + def add_file_path(self, arcpath: str, path: Path): + assert path.is_file(), f"{path} should be a file" + logger.debug("Adding %s -> %s", path, arcpath) + for zf in self._zip_files: + zf.write(path, arcname=arcpath) + for tf in self._tar_files: + tf.add(path, arcname=arcpath) + + def add_file_directory(self, arcdirpath: str, dirpath: Path): + assert dirpath.is_dir() + if arcdirpath and arcdirpath[-1:] != "/": + arcdirpath += "/" + for f in dirpath.iterdir(): + if f.is_file(): + arcpath = f"{arcdirpath}{f.name}" + logger.debug("Adding %s to %s", f, arcpath) + self.add_file_path(arcpath=arcpath, path=f) + + def close(self): + # Archiver is intentionally made invalid after this function + for zf in self._zip_files: + zf.close() + for tf in self._tar_files: + tf.close() + del self._zip_files + self._zip_files = None + del self._tar_files + self._tar_files = None + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + try: + self.close() + except RuntimeError as e: + print(e) + + +class NodeInArchive: + def __init__(self, arcpath: str, path: typing.Optional[Path]=None, data: typing.Optional[bytes]=None, mode: typing.Optional[int]=None, symtarget: typing.Optional[str]=None, time: typing.Optional[datetime.datetime]=None, directory: bool=False): + self.arcpath = arcpath + self.path = path + self.data = data + self.mode = mode + self.symtarget = symtarget + self.time = time + self.directory = directory + + @classmethod + def from_fs(cls, arcpath: str, path: Path, mode: int=0o100644, time: typing.Optional[datetime.datetime]=None) -> "NodeInArchive": + if time is None: + time = datetime.datetime.fromtimestamp(os.stat(path).st_mtime) + return cls(arcpath=arcpath, path=path, mode=mode) + + @classmethod + def from_data(cls, arcpath: str, data: bytes, time: datetime.datetime) -> "NodeInArchive": + return cls(arcpath=arcpath, data=data, time=time, mode=0o100644) + + @classmethod + def from_text(cls, arcpath: str, text: str, time: datetime.datetime) -> "NodeInArchive": + return cls.from_data(arcpath=arcpath, data=text.encode(), time=time) + + @classmethod + def from_symlink(cls, arcpath: str, symtarget: str) -> "NodeInArchive": + return cls(arcpath=arcpath, symtarget=symtarget) + + @classmethod + def from_directory(cls, arcpath: str) -> "NodeInArchive": + return cls(arcpath=arcpath, directory=True) + + def __repr__(self) -> str: + return f"<{type(self).__name__}:arcpath={self.arcpath},path='{str(self.path)}',len(data)={len(self.data) if self.data else 'n/a'},directory={self.directory},symtarget={self.symtarget}>" + + +def configure_file(path: Path, context: dict[str, str]) -> bytes: + text = path.read_text() + return configure_text(text, context=context).encode() + + +def configure_text(text: str, context: dict[str, str]) -> str: + original_text = text + for txt, repl in context.items(): + text = text.replace(f"@<@{txt}@>@", repl) + success = all(thing not in text for thing in ("@<@", "@>@")) + if not success: + raise ValueError(f"Failed to configure {repr(original_text)}") + return text + + +def configure_text_list(text_list: list[str], context: dict[str, str]) -> list[str]: + return [configure_text(text=e, context=context) for e in text_list] + + +class ArchiveFileTree: + def __init__(self): + self._tree: dict[str, NodeInArchive] = {} + + def add_file(self, file: NodeInArchive): + self._tree[file.arcpath] = file + + def __iter__(self) -> typing.Iterable[NodeInArchive]: + yield from self._tree.values() + + def __contains__(self, value: str) -> bool: + return value in self._tree + + def get_latest_mod_time(self) -> datetime.datetime: + return max(item.time for item in self._tree.values() if item.time) + + def add_to_archiver(self, archive_base: str, archiver: Archiver): + remaining_symlinks = set() + added_files = dict() + + def calculate_symlink_target(s: NodeInArchive) -> str: + dest_dir = os.path.dirname(s.arcpath) + if dest_dir: + dest_dir += "/" + target = dest_dir + s.symtarget + while True: + new_target, n = re.subn(r"([^/]+/+[.]{2}/)", "", target) + target = new_target + if not n: + break + return target + + # Add files in first pass + for arcpath, node in self._tree.items(): + assert node is not None, f"{arcpath} -> node" + if node.data is not None: + archiver.add_file_data(arcpath=arc_join(archive_base, arcpath), data=node.data, time=node.time, mode=node.mode) + assert node.arcpath is not None, f"{node=}" + added_files[node.arcpath] = node + elif node.path is not None: + archiver.add_file_path(arcpath=arc_join(archive_base, arcpath), path=node.path) + assert node.arcpath is not None, f"{node=}" + added_files[node.arcpath] = node + elif node.symtarget is not None: + remaining_symlinks.add(node) + elif node.directory: + pass + else: + raise ValueError(f"Invalid Archive Node: {repr(node)}") + + assert None not in added_files + + # Resolve symlinks in second pass: zipfile does not support symlinks, so add files to zip archive + while True: + if not remaining_symlinks: + break + symlinks_this_time = set() + extra_added_files = {} + for symlink in remaining_symlinks: + symlink_files_for_zip = {} + symlink_target_path = calculate_symlink_target(symlink) + if symlink_target_path in added_files: + symlink_files_for_zip[symlink.arcpath] = added_files[symlink_target_path] + else: + symlink_target_path_slash = symlink_target_path + "/" + for added_file in added_files: + if added_file.startswith(symlink_target_path_slash): + path_in_symlink = symlink.arcpath + "/" + added_file.removeprefix(symlink_target_path_slash) + symlink_files_for_zip[path_in_symlink] = added_files[added_file] + if symlink_files_for_zip: + symlinks_this_time.add(symlink) + extra_added_files.update(symlink_files_for_zip) + files_for_zip = [{"arcpath": f"{archive_base}/{sym_path}", "data": sym_info.data, "mode": sym_info.mode} for sym_path, sym_info in symlink_files_for_zip.items()] + archiver.add_symlink(arcpath=f"{archive_base}/{symlink.arcpath}", target=symlink.symtarget, time=symlink.time, files_for_zip=files_for_zip) + # if not symlinks_this_time: + # logger.info("files added: %r", set(path for path in added_files.keys())) + assert symlinks_this_time, f"No targets found for symlinks: {remaining_symlinks}" + remaining_symlinks.difference_update(symlinks_this_time) + added_files.update(extra_added_files) + + def add_directory_tree(self, arc_dir: str, path: Path, time: datetime.datetime): + assert path.is_dir() + for files_dir, _, filenames in os.walk(path): + files_dir_path = Path(files_dir) + rel_files_path = files_dir_path.relative_to(path) + for filename in filenames: + self.add_file(NodeInArchive.from_fs(arcpath=arc_join(arc_dir, str(rel_files_path), filename), path=files_dir_path / filename, time=time)) + + def _add_files_recursively(self, arc_dir: str, paths: list[Path], time: datetime.datetime): + logger.debug(f"_add_files_recursively({arc_dir=} {paths=})") + for path in paths: + arcpath = arc_join(arc_dir, path.name) + if path.is_file(): + logger.debug("Adding %s as %s", path, arcpath) + self.add_file(NodeInArchive.from_fs(arcpath=arcpath, path=path, time=time)) + elif path.is_dir(): + self._add_files_recursively(arc_dir=arc_join(arc_dir, path.name), paths=list(path.iterdir()), time=time) + else: + raise ValueError(f"Unsupported file type to add recursively: {path}") + + def add_file_mapping(self, arc_dir: str, file_mapping: dict[str, list[str]], file_mapping_root: Path, context: dict[str, str], time: datetime.datetime): + for meta_rel_destdir, meta_file_globs in file_mapping.items(): + rel_destdir = configure_text(meta_rel_destdir, context=context) + assert "@" not in rel_destdir, f"archive destination should not contain an @ after configuration ({repr(meta_rel_destdir)}->{repr(rel_destdir)})" + for meta_file_glob in meta_file_globs: + file_glob = configure_text(meta_file_glob, context=context) + assert "@" not in rel_destdir, f"archive glob should not contain an @ after configuration ({repr(meta_file_glob)}->{repr(file_glob)})" + if ":" in file_glob: + original_path, new_filename = file_glob.rsplit(":", 1) + assert ":" not in original_path, f"Too many ':' in {repr(file_glob)}" + assert "/" not in new_filename, f"New filename cannot contain a '/' in {repr(file_glob)}" + path = file_mapping_root / original_path + arcpath = arc_join(arc_dir, rel_destdir, new_filename) + if path.suffix == ".in": + data = configure_file(path, context=context) + logger.debug("Adding processed %s -> %s", path, arcpath) + self.add_file(NodeInArchive.from_data(arcpath=arcpath, data=data, time=time)) + else: + logger.debug("Adding %s -> %s", path, arcpath) + self.add_file(NodeInArchive.from_fs(arcpath=arcpath, path=path, time=time)) + else: + relative_file_paths = glob.glob(file_glob, root_dir=file_mapping_root) + assert relative_file_paths, f"Glob '{file_glob}' does not match any file" + self._add_files_recursively(arc_dir=arc_join(arc_dir, rel_destdir), paths=[file_mapping_root / p for p in relative_file_paths], time=time) + + +class SourceCollector: + # TreeItem = collections.namedtuple("TreeItem", ("path", "mode", "data", "symtarget", "directory", "time")) + def __init__(self, root: Path, commit: str, filter: typing.Optional[Callable[[str], bool]], executer: Executer): + self.root = root + self.commit = commit + self.filter = filter + self.executer = executer + + def get_archive_file_tree(self) -> ArchiveFileTree: + git_archive_args = ["git", "archive", "--format=tar.gz", self.commit, "-o", "/dev/stdout"] + logger.info("Executing args=%r", git_archive_args) + contents_tgz = subprocess.check_output(git_archive_args, cwd=self.root, text=False) + tar_archive = tarfile.open(fileobj=io.BytesIO(contents_tgz), mode="r:gz") + filenames = tuple(m.name for m in tar_archive if (m.isfile() or m.issym())) + + file_times = self._get_file_times(paths=filenames) + git_contents = ArchiveFileTree() + for ti in tar_archive: + if self.filter and not self.filter(ti.name): + continue + data = None + symtarget = None + directory = False + file_time = None + if ti.isfile(): + contents_file = tar_archive.extractfile(ti.name) + data = contents_file.read() + file_time = file_times[ti.name] + elif ti.issym(): + symtarget = ti.linkname + file_time = file_times[ti.name] + elif ti.isdir(): + directory = True + else: + raise ValueError(f"{ti.name}: unknown type") + node = NodeInArchive(arcpath=ti.name, data=data, mode=ti.mode, symtarget=symtarget, time=file_time, directory=directory) + git_contents.add_file(node) + return git_contents + + def _get_file_times(self, paths: tuple[str, ...]) -> dict[str, datetime.datetime]: + dry_out = textwrap.dedent("""\ + time=2024-03-14T15:40:25-07:00 + + M\tCMakeLists.txt + """) + git_log_out = self.executer.check_output(["git", "log", "--name-status", '--pretty=time=%cI', self.commit], dry_out=dry_out, cwd=self.root).splitlines(keepends=False) + current_time = None + set_paths = set(paths) + path_times: dict[str, datetime.datetime] = {} + for line in git_log_out: + if not line: + continue + if line.startswith("time="): + current_time = safe_isotime_to_datetime(line.removeprefix("time=")) + continue + mod_type, file_paths = line.split(maxsplit=1) + assert current_time is not None + for file_path in file_paths.split("\t"): + if file_path in set_paths and file_path not in path_times: + path_times[file_path] = current_time + + # FIXME: find out why some files are not shown in "git log" + # assert set(path_times.keys()) == set_paths + if set(path_times.keys()) != set_paths: + found_times = set(path_times.keys()) + paths_without_times = set_paths.difference(found_times) + logger.warning("No times found for these paths: %s", paths_without_times) + max_time = max(time for time in path_times.values()) + for path in paths_without_times: + path_times[path] = max_time + + return path_times + + +class AndroidApiVersion: + def __init__(self, name: str, ints: tuple[int, ...]): + self.name = name + self.ints = ints + + def __repr__(self) -> str: + return f"<{self.name} ({'.'.join(str(v) for v in self.ints)})>" + +ANDROID_ABI_EXTRA_LINK_OPTIONS = {} + +class Releaser: + def __init__(self, release_info: dict, commit: str, revision: str, root: Path, dist_path: Path, section_printer: SectionPrinter, executer: Executer, cmake_generator: str, deps_path: Path, overwrite: bool, github: bool, fast: bool): + self.release_info = release_info + self.project = release_info["name"] + self.version = self.extract_sdl_version(root=root, release_info=release_info) + self.root = root + self.commit = commit + self.revision = revision + self.dist_path = dist_path + self.section_printer = section_printer + self.executer = executer + self.cmake_generator = cmake_generator + self.cpu_count = multiprocessing.cpu_count() + self.deps_path = deps_path + self.overwrite = overwrite + self.github = github + self.fast = fast + self.arc_time = datetime.datetime.now() + + self.artifacts: dict[str, Path] = {} + + def get_context(self, extra_context: typing.Optional[dict[str, str]]=None) -> dict[str, str]: + ctx = { + "PROJECT_NAME": self.project, + "PROJECT_VERSION": self.version, + "PROJECT_COMMIT": self.commit, + "PROJECT_REVISION": self.revision, + "PROJECT_ROOT": str(self.root), + } + if extra_context: + ctx.update(extra_context) + return ctx + + @property + def dry(self) -> bool: + return self.executer.dry + + def prepare(self): + logger.debug("Creating dist folder") + self.dist_path.mkdir(parents=True, exist_ok=True) + + @classmethod + def _path_filter(cls, path: str) -> bool: + if ".gitmodules" in path: + return True + if path.startswith(".git"): + return False + return True + + @classmethod + def _external_repo_path_filter(cls, path: str) -> bool: + if not cls._path_filter(path): + return False + if path.startswith("test/") or path.startswith("tests/"): + return False + return True + + def create_source_archives(self) -> None: + source_collector = SourceCollector(root=self.root, commit=self.commit, executer=self.executer, filter=self._path_filter) + print(f"Collecting sources of {self.project}...") + archive_tree: ArchiveFileTree = source_collector.get_archive_file_tree() + latest_mod_time = archive_tree.get_latest_mod_time() + archive_tree.add_file(NodeInArchive.from_text(arcpath=REVISION_TXT, text=f"{self.revision}\n", time=latest_mod_time)) + archive_tree.add_file(NodeInArchive.from_text(arcpath=f"{GIT_HASH_FILENAME}", text=f"{self.commit}\n", time=latest_mod_time)) + archive_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["source"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=latest_mod_time) + + if "Makefile.am" in archive_tree: + patched_time = latest_mod_time + datetime.timedelta(minutes=1) + print(f"Makefile.am detected -> touching aclocal.m4, */Makefile.in, configure") + for node_data in archive_tree: + arc_name = os.path.basename(node_data.arcpath) + arc_name_we, arc_name_ext = os.path.splitext(arc_name) + if arc_name in ("aclocal.m4", "configure", "Makefile.in"): + print(f"Bumping time of {node_data.arcpath}") + node_data.time = patched_time + + archive_base = f"{self.project}-{self.version}" + zip_path = self.dist_path / f"{archive_base}.zip" + tgz_path = self.dist_path / f"{archive_base}.tar.gz" + txz_path = self.dist_path / f"{archive_base}.tar.xz" + + logger.info("Creating zip/tgz/txz source archives ...") + if self.dry: + zip_path.touch() + tgz_path.touch() + txz_path.touch() + else: + with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver: + print(f"Adding source files of {self.project}...") + archive_tree.add_to_archiver(archive_base=archive_base, archiver=archiver) + + for extra_repo in self.release_info["source"].get("extra-repos", []): + extra_repo_root = self.root / extra_repo + assert (extra_repo_root / ".git").exists(), f"{extra_repo_root} must be a git repo" + extra_repo_commit = self.executer.check_output(["git", "rev-parse", "HEAD"], dry_out=f"gitsha-extra-repo-{extra_repo}", cwd=extra_repo_root).strip() + extra_repo_source_collector = SourceCollector(root=extra_repo_root, commit=extra_repo_commit, executer=self.executer, filter=self._external_repo_path_filter) + print(f"Collecting sources of {extra_repo} ...") + extra_repo_archive_tree = extra_repo_source_collector.get_archive_file_tree() + print(f"Adding source files of {extra_repo} ...") + extra_repo_archive_tree.add_to_archiver(archive_base=f"{archive_base}/{extra_repo}", archiver=archiver) + + for file in self.release_info["source"]["checks"]: + assert f"{archive_base}/{file}" in archiver.added_files, f"'{archive_base}/{file}' must exist" + + logger.info("... done") + + self.artifacts["src-zip"] = zip_path + self.artifacts["src-tar-gz"] = tgz_path + self.artifacts["src-tar-xz"] = txz_path + + if not self.dry: + with tgz_path.open("r+b") as f: + # Zero the embedded timestamp in the gzip'ed tarball + f.seek(4, 0) + f.write(b"\x00\x00\x00\x00") + + def create_dmg(self, configuration: str="Release") -> None: + dmg_in = self.root / self.release_info["dmg"]["path"] + xcode_project = self.root / self.release_info["dmg"]["project"] + assert xcode_project.is_dir(), f"{xcode_project} must be a directory" + assert (xcode_project / "project.pbxproj").is_file, f"{xcode_project} must contain project.pbxproj" + if not self.fast: + dmg_in.unlink(missing_ok=True) + build_xcconfig = self.release_info["dmg"].get("build-xcconfig") + if build_xcconfig: + shutil.copy(self.root / build_xcconfig, xcode_project.parent / "build.xcconfig") + + xcode_scheme = self.release_info["dmg"].get("scheme") + xcode_target = self.release_info["dmg"].get("target") + assert xcode_scheme or xcode_target, "dmg needs scheme or target" + assert not (xcode_scheme and xcode_target), "dmg cannot have both scheme and target set" + if xcode_scheme: + scheme_or_target = "-scheme" + target_like = xcode_scheme + else: + scheme_or_target = "-target" + target_like = xcode_target + self.executer.run(["xcodebuild", "ONLY_ACTIVE_ARCH=NO", "-project", xcode_project, scheme_or_target, target_like, "-configuration", configuration]) + if self.dry: + dmg_in.parent.mkdir(parents=True, exist_ok=True) + dmg_in.touch() + + assert dmg_in.is_file(), f"{self.project}.dmg was not created by xcodebuild" + + dmg_out = self.dist_path / f"{self.project}-{self.version}.dmg" + shutil.copy(dmg_in, dmg_out) + self.artifacts["dmg"] = dmg_out + + @property + def git_hash_data(self) -> bytes: + return f"{self.commit}\n".encode() + + def verify_mingw_library(self, triplet: str, path: Path): + objdump_output = self.executer.check_output([f"{triplet}-objdump", "-p", str(path)]) + libraries = re.findall(r"DLL Name: ([^\n]+)", objdump_output) + logger.info("%s (%s) libraries: %r", path, triplet, libraries) + illegal_libraries = list(filter(RE_ILLEGAL_MINGW_LIBRARIES.match, libraries)) + logger.error("Detected 'illegal' libraries: %r", illegal_libraries) + if illegal_libraries: + raise Exception(f"{path} links to illegal libraries: {illegal_libraries}") + + def create_mingw_archives(self) -> None: + build_type = "Release" + build_parent_dir = self.root / "build-mingw" + ARCH_TO_GNU_ARCH = { + # "arm64": "aarch64", + "x86": "i686", + "x64": "x86_64", + } + ARCH_TO_TRIPLET = { + # "arm64": "aarch64-w64-mingw32", + "x86": "i686-w64-mingw32", + "x64": "x86_64-w64-mingw32", + } + + new_env = dict(os.environ) + + cmake_prefix_paths = [] + mingw_deps_path = self.deps_path / "mingw-deps" + + if "dependencies" in self.release_info["mingw"]: + shutil.rmtree(mingw_deps_path, ignore_errors=True) + mingw_deps_path.mkdir() + + for triplet in ARCH_TO_TRIPLET.values(): + (mingw_deps_path / triplet).mkdir() + + def extract_filter(member: tarfile.TarInfo, path: str, /): + if member.name.startswith("SDL"): + member.name = "/".join(Path(member.name).parts[1:]) + return member + for dep in self.release_info.get("dependencies", {}): + extract_path = mingw_deps_path / f"extract-{dep}" + extract_path.mkdir() + with chdir(extract_path): + tar_path = self.deps_path / glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path)[0] + logger.info("Extracting %s to %s", tar_path, mingw_deps_path) + assert tar_path.suffix in (".gz", ".xz") + with tarfile.open(tar_path, mode=f"r:{tar_path.suffix.strip('.')}") as tarf: + tarf.extractall(filter=extract_filter) + for arch, triplet in ARCH_TO_TRIPLET.items(): + install_cmd = self.release_info["mingw"]["dependencies"][dep]["install-command"] + extra_configure_data = { + "ARCH": ARCH_TO_GNU_ARCH[arch], + "TRIPLET": triplet, + "PREFIX": str(mingw_deps_path / triplet), + } + install_cmd = configure_text(install_cmd, context=self.get_context(extra_configure_data)) + self.executer.run(shlex.split(install_cmd), cwd=str(extract_path)) + + dep_binpath = mingw_deps_path / triplet / "bin" + assert dep_binpath.is_dir(), f"{dep_binpath} for PATH should exist" + dep_pkgconfig = mingw_deps_path / triplet / "lib/pkgconfig" + assert dep_pkgconfig.is_dir(), f"{dep_pkgconfig} for PKG_CONFIG_PATH should exist" + + new_env["PATH"] = os.pathsep.join([str(dep_binpath), new_env["PATH"]]) + new_env["PKG_CONFIG_PATH"] = str(dep_pkgconfig) + cmake_prefix_paths.append(mingw_deps_path) + + new_env["CFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}" + new_env["CXXFLAGS"] = f"-O2 -ffile-prefix-map={self.root}=/src/{self.project}" + + assert any(system in self.release_info["mingw"] for system in ("autotools", "cmake")) + assert not all(system in self.release_info["mingw"] for system in ("autotools", "cmake")) + + mingw_archs = set() + arc_root = f"{self.project}-{self.version}" + archive_file_tree = ArchiveFileTree() + + if "autotools" in self.release_info["mingw"]: + for arch in self.release_info["mingw"]["autotools"]["archs"]: + triplet = ARCH_TO_TRIPLET[arch] + new_env["CC"] = f"{triplet}-gcc" + new_env["CXX"] = f"{triplet}-g++" + new_env["RC"] = f"{triplet}-windres" + + assert arch not in mingw_archs + mingw_archs.add(arch) + + build_path = build_parent_dir / f"build-{triplet}" + install_path = build_parent_dir / f"install-{triplet}" + shutil.rmtree(install_path, ignore_errors=True) + build_path.mkdir(parents=True, exist_ok=True) + context = self.get_context({ + "ARCH": arch, + "DEP_PREFIX": str(mingw_deps_path / triplet), + }) + extra_args = configure_text_list(text_list=self.release_info["mingw"]["autotools"]["args"], context=context) + + with self.section_printer.group(f"Configuring MinGW {triplet} (autotools)"): + assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})" + self.executer.run([ + self.root / "configure", + f"--prefix={install_path}", + f"--includedir=${{prefix}}/include", + f"--libdir=${{prefix}}/lib", + f"--bindir=${{prefix}}/bin", + f"--host={triplet}", + f"--build=x86_64-none-linux-gnu", + "CFLAGS=-O2", + "CXXFLAGS=-O2", + "LDFLAGS=-Wl,-s", + ] + extra_args, cwd=build_path, env=new_env) + with self.section_printer.group(f"Build MinGW {triplet} (autotools)"): + self.executer.run(["make", f"-j{self.cpu_count}"], cwd=build_path, env=new_env) + with self.section_printer.group(f"Install MinGW {triplet} (autotools)"): + self.executer.run(["make", "install"], cwd=build_path, env=new_env) + self.verify_mingw_library(triplet=ARCH_TO_TRIPLET[arch], path=install_path / "bin" / f"{self.project}.dll") + archive_file_tree.add_directory_tree(arc_dir=arc_join(arc_root, triplet), path=install_path, time=self.arc_time) + + print("Recording arch-dependent extra files for MinGW development archive ...") + extra_context = { + "TRIPLET": ARCH_TO_TRIPLET[arch], + } + archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"]["autotools"].get("files", {}), file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time) + + if "cmake" in self.release_info["mingw"]: + assert self.release_info["mingw"]["cmake"]["shared-static"] in ("args", "both") + for arch in self.release_info["mingw"]["cmake"]["archs"]: + triplet = ARCH_TO_TRIPLET[arch] + new_env["CC"] = f"{triplet}-gcc" + new_env["CXX"] = f"{triplet}-g++" + new_env["RC"] = f"{triplet}-windres" + + assert arch not in mingw_archs + mingw_archs.add(arch) + + context = self.get_context({ + "ARCH": arch, + "DEP_PREFIX": str(mingw_deps_path / triplet), + }) + extra_args = configure_text_list(text_list=self.release_info["mingw"]["cmake"]["args"], context=context) + + build_path = build_parent_dir / f"build-{triplet}" + install_path = build_parent_dir / f"install-{triplet}" + shutil.rmtree(install_path, ignore_errors=True) + build_path.mkdir(parents=True, exist_ok=True) + if self.release_info["mingw"]["cmake"]["shared-static"] == "args": + args_for_shared_static = ([], ) + elif self.release_info["mingw"]["cmake"]["shared-static"] == "both": + args_for_shared_static = (["-DBUILD_SHARED_LIBS=ON"], ["-DBUILD_SHARED_LIBS=OFF"]) + for arg_for_shared_static in args_for_shared_static: + with self.section_printer.group(f"Configuring MinGW {triplet} (CMake)"): + assert "@" not in " ".join(extra_args), f"@ should not be present in extra arguments ({extra_args})" + self.executer.run([ + f"cmake", + f"-S", str(self.root), "-B", str(build_path), + f"-DCMAKE_BUILD_TYPE={build_type}", + f'''-DCMAKE_C_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''', + f'''-DCMAKE_CXX_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''', + f"-DCMAKE_PREFIX_PATH={mingw_deps_path / triplet}", + f"-DCMAKE_INSTALL_PREFIX={install_path}", + f"-DCMAKE_INSTALL_INCLUDEDIR=include", + f"-DCMAKE_INSTALL_LIBDIR=lib", + f"-DCMAKE_INSTALL_BINDIR=bin", + f"-DCMAKE_INSTALL_DATAROOTDIR=share", + f"-DCMAKE_TOOLCHAIN_FILE={self.root}/build-scripts/cmake-toolchain-mingw64-{ARCH_TO_GNU_ARCH[arch]}.cmake", + f"-G{self.cmake_generator}", + ] + extra_args + ([] if self.fast else ["--fresh"]) + arg_for_shared_static, cwd=build_path, env=new_env) + with self.section_printer.group(f"Build MinGW {triplet} (CMake)"): + self.executer.run(["cmake", "--build", str(build_path), "--verbose", "--config", build_type], cwd=build_path, env=new_env) + with self.section_printer.group(f"Install MinGW {triplet} (CMake)"): + self.executer.run(["cmake", "--install", str(build_path)], cwd=build_path, env=new_env) + self.verify_mingw_library(triplet=ARCH_TO_TRIPLET[arch], path=install_path / "bin" / f"{self.project}.dll") + archive_file_tree.add_directory_tree(arc_dir=arc_join(arc_root, triplet), path=install_path, time=self.arc_time) + + print("Recording arch-dependent extra files for MinGW development archive ...") + extra_context = { + "TRIPLET": ARCH_TO_TRIPLET[arch], + } + archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"]["cmake"].get("files", {}), file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time) + print("... done") + + print("Recording extra files for MinGW development archive ...") + archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["mingw"].get("files", {}), file_mapping_root=self.root, context=self.get_context(), time=self.arc_time) + print("... done") + + print("Creating zip/tgz/txz development archives ...") + zip_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.zip" + tgz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.gz" + txz_path = self.dist_path / f"{self.project}-devel-{self.version}-mingw.tar.xz" + + with Archiver(zip_path=zip_path, tgz_path=tgz_path, txz_path=txz_path) as archiver: + try: + archive_file_tree.add_to_archiver(archive_base="", archiver=archiver) + archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time) + except RuntimeError as e: + print(e) + print("... done") + + self.artifacts["mingw-devel-zip"] = zip_path + self.artifacts["mingw-devel-tar-gz"] = tgz_path + self.artifacts["mingw-devel-tar-xz"] = txz_path + + def _detect_android_api(self, android_home: str) -> typing.Optional[AndroidApiVersion]: + platform_dirs = list(Path(p) for p in glob.glob(f"{android_home}/platforms/android-*")) + re_platform = re.compile("^android-([0-9]+)(?:-ext([0-9]+))?$") + platform_versions: list[AndroidApiVersion] = [] + for platform_dir in platform_dirs: + logger.debug("Found Android Platform SDK: %s", platform_dir) + if not (platform_dir / "android.jar").is_file(): + logger.debug("Skipping SDK, missing android.jar") + continue + if m:= re_platform.match(platform_dir.name): + platform_versions.append(AndroidApiVersion(name=platform_dir.name, ints=(int(m.group(1)), int(m.group(2) or 0)))) + platform_versions.sort(key=lambda v: v.ints) + logger.info("Available platform versions: %s", platform_versions) + platform_versions = list(filter(lambda v: v.ints >= self._android_api_minimum.ints, platform_versions)) + logger.info("Valid platform versions (>=%s): %s", self._android_api_minimum.ints, platform_versions) + if not platform_versions: + return None + android_api = platform_versions[0] + logger.info("Selected API version %s", android_api) + return android_api + + def _get_prefab_json_text(self) -> str: + prefab_name = self.release_info["android"].get("name", self.project) + return textwrap.dedent(f"""\ + {{ + "schema_version": 2, + "name": "{prefab_name}", + "version": "{self.version}", + "dependencies": [] + }} + """) + + def _get_prefab_module_json_text(self, library_name: typing.Optional[str], export_libraries: list[str]) -> str: + for lib in export_libraries: + assert isinstance(lib, str), f"{lib} must be a string" + module_json_dict = { + "export_libraries": export_libraries, + } + if library_name: + module_json_dict["library_name"] = f"lib{library_name}" + return json.dumps(module_json_dict, indent=4) + + @property + def _android_api_minimum(self) -> AndroidApiVersion: + value = self.release_info["android"]["api-minimum"] + if isinstance(value, int): + ints = (value, ) + elif isinstance(value, str): + ints = tuple(split(".")) + else: + raise ValueError("Invalid android.api-minimum: must be X or X.Y") + match len(ints): + case 1: name = f"android-{ints[0]}" + case 2: name = f"android-{ints[0]}-ext-{ints[1]}" + case _: raise ValueError("Invalid android.api-minimum: must be X or X.Y") + return AndroidApiVersion(name=name, ints=ints) + + @property + def _android_api_target(self): + return self.release_info["android"]["api-target"] + + @property + def _android_ndk_minimum(self): + return self.release_info["android"]["ndk-minimum"] + + def _get_prefab_abi_json_text(self, abi: str, cpp: bool, shared: bool) -> str: + abi_json_dict = { + "abi": abi, + "api": self._android_api_minimum.ints[0], + "ndk": self._android_ndk_minimum, + "stl": "c++_shared" if cpp else "none", + "static": not shared, + } + return json.dumps(abi_json_dict, indent=4) + + def _get_android_manifest_text(self) -> str: + return textwrap.dedent(f"""\ + + + + """) + + def create_android_archives(self, android_api: int, android_home: Path, android_ndk_home: Path) -> None: + cmake_toolchain_file = Path(android_ndk_home) / "build/cmake/android.toolchain.cmake" + if not cmake_toolchain_file.exists(): + logger.error("CMake toolchain file does not exist (%s)", cmake_toolchain_file) + raise SystemExit(1) + aar_path = self.root / "build-android" / f"{self.project}-{self.version}.aar" + android_dist_path = self.dist_path / f"{self.project}-devel-{self.version}-android.zip" + android_abis = self.release_info["android"]["abis"] + java_jars_added = False + module_data_added = False + android_deps_path = self.deps_path / "android-deps" + shutil.rmtree(android_deps_path, ignore_errors=True) + + for dep, depinfo in self.release_info["android"].get("dependencies", {}).items(): + dep_devel_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0] + + dep_extract_path = self.deps_path / f"extract/android/{dep}" + shutil.rmtree(dep_extract_path, ignore_errors=True) + dep_extract_path.mkdir(parents=True, exist_ok=True) + + with self.section_printer.group(f"Extracting Android dependency {dep} ({dep_devel_zip})"): + with zipfile.ZipFile(dep_devel_zip, "r") as zf: + zf.extractall(dep_extract_path) + + dep_devel_aar = dep_extract_path / glob.glob("*.aar", root_dir=dep_extract_path)[0] + self.executer.run([sys.executable, str(dep_devel_aar), "-o", str(android_deps_path)]) + + for module_name, module_info in self.release_info["android"]["modules"].items(): + assert "type" in module_info and module_info["type"] in ("interface", "library"), f"module {module_name} must have a valid type" + + aar_file_tree = ArchiveFileTree() + android_devel_file_tree = ArchiveFileTree() + + for android_abi in android_abis: + extra_link_options = ANDROID_ABI_EXTRA_LINK_OPTIONS.get(android_abi, "") + with self.section_printer.group(f"Building for Android {android_api} {android_abi}"): + build_dir = self.root / "build-android" / f"{android_abi}-build" + install_dir = self.root / "install-android" / f"{android_abi}-install" + shutil.rmtree(install_dir, ignore_errors=True) + assert not install_dir.is_dir(), f"{install_dir} should not exist prior to build" + build_type = "Release" + cmake_args = [ + "cmake", + "-S", str(self.root), + "-B", str(build_dir), + # NDK 21e does not support -ffile-prefix-map + # f'''-DCMAKE_C_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''', + # f'''-DCMAKE_CXX_FLAGS="-ffile-prefix-map={self.root}=/src/{self.project}"''', + f"-DANDROID_USE_LEGACY_TOOLCHAIN=0", + f"-DCMAKE_EXE_LINKER_FLAGS={extra_link_options}", + f"-DCMAKE_SHARED_LINKER_FLAGS={extra_link_options}", + f"-DCMAKE_TOOLCHAIN_FILE={cmake_toolchain_file}", + f"-DCMAKE_PREFIX_PATH={str(android_deps_path)}", + f"-DCMAKE_FIND_ROOT_PATH_MODE_PACKAGE=BOTH", + f"-DANDROID_HOME={android_home}", + f"-DANDROID_PLATFORM={android_api}", + f"-DANDROID_ABI={android_abi}", + "-DCMAKE_POSITION_INDEPENDENT_CODE=ON", + f"-DCMAKE_INSTALL_PREFIX={install_dir}", + "-DCMAKE_INSTALL_INCLUDEDIR=include ", + "-DCMAKE_INSTALL_LIBDIR=lib", + "-DCMAKE_INSTALL_DATAROOTDIR=share", + f"-DCMAKE_BUILD_TYPE={build_type}", + f"-G{self.cmake_generator}", + ] + self.release_info["android"]["cmake"]["args"] + ([] if self.fast else ["--fresh"]) + build_args = [ + "cmake", + "--build", str(build_dir), + "--verbose", + "--config", build_type, + ] + install_args = [ + "cmake", + "--install", str(build_dir), + "--config", build_type, + ] + self.executer.run(cmake_args) + self.executer.run(build_args) + self.executer.run(install_args) + + for module_name, module_info in self.release_info["android"]["modules"].items(): + arcdir_prefab_module = f"prefab/modules/{module_name}" + if module_info["type"] == "library": + library = install_dir / module_info["library"] + assert library.suffix in (".so", ".a") + assert library.is_file(), f"CMake should have built library '{library}' for module {module_name}" + arcdir_prefab_libs = f"{arcdir_prefab_module}/libs/android.{android_abi}" + aar_file_tree.add_file(NodeInArchive.from_fs(arcpath=f"{arcdir_prefab_libs}/{library.name}", path=library, time=self.arc_time)) + aar_file_tree.add_file(NodeInArchive.from_text(arcpath=f"{arcdir_prefab_libs}/abi.json", text=self._get_prefab_abi_json_text(abi=android_abi, cpp=False, shared=library.suffix == ".so"), time=self.arc_time)) + + if not module_data_added: + library_name = None + if module_info["type"] == "library": + library_name = Path(module_info["library"]).stem.removeprefix("lib") + export_libraries = module_info.get("export-libraries", []) + aar_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_module, "module.json"), text=self._get_prefab_module_json_text(library_name=library_name, export_libraries=export_libraries), time=self.arc_time)) + arcdir_prefab_include = f"prefab/modules/{module_name}/include" + if "includes" in module_info: + aar_file_tree.add_file_mapping(arc_dir=arcdir_prefab_include, file_mapping=module_info["includes"], file_mapping_root=install_dir, context=self.get_context(), time=self.arc_time) + else: + aar_file_tree.add_file(NodeInArchive.from_text(arcpath=arc_join(arcdir_prefab_include, ".keep"), text="\n", time=self.arc_time)) + module_data_added = True + + if not java_jars_added: + java_jars_added = True + if "jars" in self.release_info["android"]: + classes_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["classes"], context=self.get_context()) + sources_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["sources"], context=self.get_context()) + doc_jar_path = install_dir / configure_text(text=self.release_info["android"]["jars"]["doc"], context=self.get_context()) + assert classes_jar_path.is_file(), f"CMake should have compiled the java sources and archived them into a JAR ({classes_jar_path})" + assert sources_jar_path.is_file(), f"CMake should have archived the java sources into a JAR ({sources_jar_path})" + assert doc_jar_path.is_file(), f"CMake should have archived javadoc into a JAR ({doc_jar_path})" + + aar_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes.jar", path=classes_jar_path, time=self.arc_time)) + aar_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-sources.jar", path=sources_jar_path, time=self.arc_time)) + aar_file_tree.add_file(NodeInArchive.from_fs(arcpath="classes-doc.jar", path=doc_jar_path, time=self.arc_time)) + + assert ("jars" in self.release_info["android"] and java_jars_added) or "jars" not in self.release_info["android"], "Must have archived java JAR archives" + + aar_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["android"]["aar-files"], file_mapping_root=self.root, context=self.get_context(), time=self.arc_time) + + aar_file_tree.add_file(NodeInArchive.from_text(arcpath="prefab/prefab.json", text=self._get_prefab_json_text(), time=self.arc_time)) + aar_file_tree.add_file(NodeInArchive.from_text(arcpath="AndroidManifest.xml", text=self._get_android_manifest_text(), time=self.arc_time)) + + with Archiver(zip_path=aar_path) as archiver: + aar_file_tree.add_to_archiver(archive_base="", archiver=archiver) + archiver.add_git_hash(arcdir="", commit=self.commit, time=self.arc_time) + + android_devel_file_tree.add_file(NodeInArchive.from_fs(arcpath=aar_path.name, path=aar_path)) + android_devel_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["android"]["files"], file_mapping_root=self.root, context=self.get_context(), time=self.arc_time) + with Archiver(zip_path=android_dist_path) as archiver: + android_devel_file_tree.add_to_archiver(archive_base="", archiver=archiver) + archiver.add_git_hash(arcdir="", commit=self.commit, time=self.arc_time) + + self.artifacts[f"android-aar"] = android_dist_path + + def download_dependencies(self): + shutil.rmtree(self.deps_path, ignore_errors=True) + self.deps_path.mkdir(parents=True) + + if self.github: + with open(os.environ["GITHUB_OUTPUT"], "a") as f: + f.write(f"dep-path={self.deps_path.absolute()}\n") + + for dep, depinfo in self.release_info.get("dependencies", {}).items(): + startswith = depinfo["startswith"] + dep_repo = depinfo["repo"] + dep_string_data = self.executer.check_output(["gh", "-R", dep_repo, "release", "list", "--exclude-drafts", "--exclude-pre-releases", "--json", "name,createdAt,tagName", "--jq", f'[.[]|select(.name|startswith("{startswith}"))]|max_by(.createdAt)']).strip() + dep_data = json.loads(dep_string_data) + dep_tag = dep_data["tagName"] + dep_version = dep_data["name"] + logger.info("Download dependency %s version %s (tag=%s) ", dep, dep_version, dep_tag) + self.executer.run(["gh", "-R", dep_repo, "release", "download", dep_tag], cwd=self.deps_path) + if self.github: + with open(os.environ["GITHUB_OUTPUT"], "a") as f: + f.write(f"dep-{dep.lower()}-version={dep_version}\n") + + def verify_dependencies(self): + for dep, depinfo in self.release_info.get("dependencies", {}).items(): + if "mingw" in self.release_info: + mingw_matches = glob.glob(self.release_info["mingw"]["dependencies"][dep]["artifact"], root_dir=self.deps_path) + assert len(mingw_matches) == 1, f"Exactly one archive matches mingw {dep} dependency: {mingw_matches}" + if "dmg" in self.release_info: + dmg_matches = glob.glob(self.release_info["dmg"]["dependencies"][dep]["artifact"], root_dir=self.deps_path) + assert len(dmg_matches) == 1, f"Exactly one archive matches dmg {dep} dependency: {dmg_matches}" + if "msvc" in self.release_info: + msvc_matches = glob.glob(self.release_info["msvc"]["dependencies"][dep]["artifact"], root_dir=self.deps_path) + assert len(msvc_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {msvc_matches}" + if "android" in self.release_info: + android_matches = glob.glob(self.release_info["android"]["dependencies"][dep]["artifact"], root_dir=self.deps_path) + assert len(android_matches) == 1, f"Exactly one archive matches msvc {dep} dependency: {android_matches}" + + @staticmethod + def _arch_to_vs_platform(arch: str, configuration: str="Release") -> VsArchPlatformConfig: + ARCH_TO_VS_PLATFORM = { + "x86": VsArchPlatformConfig(arch="x86", platform="Win32", configuration=configuration), + "x64": VsArchPlatformConfig(arch="x64", platform="x64", configuration=configuration), + "arm64": VsArchPlatformConfig(arch="arm64", platform="ARM64", configuration=configuration), + } + return ARCH_TO_VS_PLATFORM[arch] + + def build_msvc(self): + with self.section_printer.group("Find Visual Studio"): + vs = VisualStudio(executer=self.executer) + for arch in self.release_info["msvc"].get("msbuild", {}).get("archs", []): + self._build_msvc_msbuild(arch_platform=self._arch_to_vs_platform(arch=arch), vs=vs) + if "cmake" in self.release_info["msvc"]: + deps_path = self.root / "msvc-deps" + shutil.rmtree(deps_path, ignore_errors=True) + dep_roots = [] + for dep, depinfo in self.release_info["msvc"].get("dependencies", {}).items(): + dep_extract_path = deps_path / f"extract-{dep}" + msvc_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0] + with zipfile.ZipFile(msvc_zip, "r") as zf: + zf.extractall(dep_extract_path) + contents_msvc_zip = glob.glob(str(dep_extract_path / "*")) + assert len(contents_msvc_zip) == 1, f"There must be exactly one root item in the root directory of {dep}" + dep_roots.append(contents_msvc_zip[0]) + + for arch in self.release_info["msvc"].get("cmake", {}).get("archs", []): + self._build_msvc_cmake(arch_platform=self._arch_to_vs_platform(arch=arch), dep_roots=dep_roots) + with self.section_printer.group("Create SDL VC development zip"): + self._build_msvc_devel() + + def _build_msvc_msbuild(self, arch_platform: VsArchPlatformConfig, vs: VisualStudio): + platform_context = self.get_context(arch_platform.extra_context()) + for dep, depinfo in self.release_info["msvc"].get("dependencies", {}).items(): + msvc_zip = self.deps_path / glob.glob(depinfo["artifact"], root_dir=self.deps_path)[0] + + src_globs = [configure_text(instr["src"], context=platform_context) for instr in depinfo["copy"]] + with zipfile.ZipFile(msvc_zip, "r") as zf: + for member in zf.namelist(): + member_path = "/".join(Path(member).parts[1:]) + for src_i, src_glob in enumerate(src_globs): + if fnmatch.fnmatch(member_path, src_glob): + dst = (self.root / configure_text(depinfo["copy"][src_i]["dst"], context=platform_context)).resolve() / Path(member_path).name + zip_data = zf.read(member) + if dst.exists(): + identical = False + if dst.is_file(): + orig_bytes = dst.read_bytes() + if orig_bytes == zip_data: + identical = True + if not identical: + logger.warning("Extracting dependency %s, will cause %s to be overwritten", dep, dst) + if not self.overwrite: + raise RuntimeError("Run with --overwrite to allow overwriting") + logger.debug("Extracting %s -> %s", member, dst) + + dst.parent.mkdir(exist_ok=True, parents=True) + dst.write_bytes(zip_data) + + prebuilt_paths = set(self.root / full_prebuilt_path for prebuilt_path in self.release_info["msvc"]["msbuild"].get("prebuilt", []) for full_prebuilt_path in glob.glob(configure_text(prebuilt_path, context=platform_context), root_dir=self.root)) + msbuild_paths = set(self.root / configure_text(f, context=platform_context) for file_mapping in (self.release_info["msvc"]["msbuild"]["files-lib"], self.release_info["msvc"]["msbuild"]["files-devel"]) for files_list in file_mapping.values() for f in files_list) + assert prebuilt_paths.issubset(msbuild_paths), f"msvc.msbuild.prebuilt must be a subset of (msvc.msbuild.files-lib, msvc.msbuild.files-devel)" + built_paths = msbuild_paths.difference(prebuilt_paths) + logger.info("MSbuild builds these files, to be included in the package: %s", built_paths) + if not self.fast: + for b in built_paths: + b.unlink(missing_ok=True) + + rel_projects: list[str] = self.release_info["msvc"]["msbuild"]["projects"] + projects = list(self.root / p for p in rel_projects) + + directory_build_props_src_relpath = self.release_info["msvc"]["msbuild"].get("directory-build-props") + for project in projects: + dir_b_props = project.parent / "Directory.Build.props" + dir_b_props.unlink(missing_ok = True) + if directory_build_props_src_relpath: + src = self.root / directory_build_props_src_relpath + logger.debug("Copying %s -> %s", src, dir_b_props) + shutil.copy(src=src, dst=dir_b_props) + + with self.section_printer.group(f"Build {arch_platform.arch} VS binary"): + vs.build(arch_platform=arch_platform, projects=projects) + + if self.dry: + for b in built_paths: + b.parent.mkdir(parents=True, exist_ok=True) + b.touch() + + for b in built_paths: + assert b.is_file(), f"{b} has not been created" + b.parent.mkdir(parents=True, exist_ok=True) + b.touch() + + zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch_platform.arch}.zip" + zip_path.unlink(missing_ok=True) + + logger.info("Collecting files...") + archive_file_tree = ArchiveFileTree() + archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["msbuild"]["files-lib"], file_mapping_root=self.root, context=platform_context, time=self.arc_time) + archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["files-lib"], file_mapping_root=self.root, context=platform_context, time=self.arc_time) + + logger.info("Writing to %s", zip_path) + with Archiver(zip_path=zip_path) as archiver: + arc_root = f"" + archive_file_tree.add_to_archiver(archive_base=arc_root, archiver=archiver) + archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time) + self.artifacts[f"VC-{arch_platform.arch}"] = zip_path + + for p in built_paths: + assert p.is_file(), f"{p} should exist" + + def _arch_platform_to_build_path(self, arch_platform: VsArchPlatformConfig) -> Path: + return self.root / f"build-vs-{arch_platform.arch}" + + def _arch_platform_to_install_path(self, arch_platform: VsArchPlatformConfig) -> Path: + return self._arch_platform_to_build_path(arch_platform) / "prefix" + + def _build_msvc_cmake(self, arch_platform: VsArchPlatformConfig, dep_roots: list[Path]): + build_path = self._arch_platform_to_build_path(arch_platform) + install_path = self._arch_platform_to_install_path(arch_platform) + platform_context = self.get_context(extra_context=arch_platform.extra_context()) + + build_type = "Release" + extra_context = { + "ARCH": arch_platform.arch, + "PLATFORM": arch_platform.platform, + } + + built_paths = set(install_path / configure_text(f, context=platform_context) for file_mapping in (self.release_info["msvc"]["cmake"]["files-lib"], self.release_info["msvc"]["cmake"]["files-devel"]) for files_list in file_mapping.values() for f in files_list) + logger.info("CMake builds these files, to be included in the package: %s", built_paths) + if not self.fast: + for b in built_paths: + b.unlink(missing_ok=True) + + shutil.rmtree(install_path, ignore_errors=True) + build_path.mkdir(parents=True, exist_ok=True) + with self.section_printer.group(f"Configure VC CMake project for {arch_platform.arch}"): + self.executer.run([ + "cmake", "-S", str(self.root), "-B", str(build_path), + "-A", arch_platform.platform, + "-DCMAKE_INSTALL_BINDIR=bin", + "-DCMAKE_INSTALL_DATAROOTDIR=share", + "-DCMAKE_INSTALL_INCLUDEDIR=include", + "-DCMAKE_INSTALL_LIBDIR=lib", + f"-DCMAKE_BUILD_TYPE={build_type}", + f"-DCMAKE_INSTALL_PREFIX={install_path}", + # MSVC debug information format flags are selected by an abstraction + "-DCMAKE_POLICY_DEFAULT_CMP0141=NEW", + # MSVC debug information format + "-DCMAKE_MSVC_DEBUG_INFORMATION_FORMAT=ProgramDatabase", + # Linker flags for executables + "-DCMAKE_EXE_LINKER_FLAGS=-INCREMENTAL:NO -DEBUG -OPT:REF -OPT:ICF", + # Linker flag for shared libraries + "-DCMAKE_SHARED_LINKER_FLAGS=-INCREMENTAL:NO -DEBUG -OPT:REF -OPT:ICF", + # MSVC runtime library flags are selected by an abstraction + "-DCMAKE_POLICY_DEFAULT_CMP0091=NEW", + # Use statically linked runtime (-MT) (ideally, should be "MultiThreaded$<$:Debug>") + "-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded", + f"-DCMAKE_PREFIX_PATH={';'.join(str(s) for s in dep_roots)}", + ] + self.release_info["msvc"]["cmake"]["args"] + ([] if self.fast else ["--fresh"])) + + with self.section_printer.group(f"Build VC CMake project for {arch_platform.arch}"): + self.executer.run(["cmake", "--build", str(build_path), "--verbose", "--config", build_type]) + with self.section_printer.group(f"Install VC CMake project for {arch_platform.arch}"): + self.executer.run(["cmake", "--install", str(build_path), "--config", build_type]) + + if self.dry: + for b in built_paths: + b.parent.mkdir(parents=True, exist_ok=True) + b.touch() + + zip_path = self.dist_path / f"{self.project}-{self.version}-win32-{arch_platform.arch}.zip" + zip_path.unlink(missing_ok=True) + + logger.info("Collecting files...") + archive_file_tree = ArchiveFileTree() + archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["cmake"]["files-lib"], file_mapping_root=install_path, context=platform_context, time=self.arc_time) + archive_file_tree.add_file_mapping(arc_dir="", file_mapping=self.release_info["msvc"]["files-lib"], file_mapping_root=self.root, context=self.get_context(extra_context=extra_context), time=self.arc_time) + + logger.info("Creating %s", zip_path) + with Archiver(zip_path=zip_path) as archiver: + arc_root = f"" + archive_file_tree.add_to_archiver(archive_base=arc_root, archiver=archiver) + archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time) + + for p in built_paths: + assert p.is_file(), f"{p} should exist" + + def _build_msvc_devel(self) -> None: + zip_path = self.dist_path / f"{self.project}-devel-{self.version}-VC.zip" + arc_root = f"{self.project}-{self.version}" + + def copy_files_devel(ctx): + archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["files-devel"], file_mapping_root=self.root, context=ctx, time=self.arc_time) + + + logger.info("Collecting files...") + archive_file_tree = ArchiveFileTree() + if "msbuild" in self.release_info["msvc"]: + for arch in self.release_info["msvc"]["msbuild"]["archs"]: + arch_platform = self._arch_to_vs_platform(arch=arch) + platform_context = self.get_context(arch_platform.extra_context()) + archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["msbuild"]["files-devel"], file_mapping_root=self.root, context=platform_context, time=self.arc_time) + copy_files_devel(ctx=platform_context) + if "cmake" in self.release_info["msvc"]: + for arch in self.release_info["msvc"]["cmake"]["archs"]: + arch_platform = self._arch_to_vs_platform(arch=arch) + platform_context = self.get_context(arch_platform.extra_context()) + archive_file_tree.add_file_mapping(arc_dir=arc_root, file_mapping=self.release_info["msvc"]["cmake"]["files-devel"], file_mapping_root=self._arch_platform_to_install_path(arch_platform), context=platform_context, time=self.arc_time) + copy_files_devel(ctx=platform_context) + + with Archiver(zip_path=zip_path) as archiver: + archive_file_tree.add_to_archiver(archive_base="", archiver=archiver) + archiver.add_git_hash(arcdir=arc_root, commit=self.commit, time=self.arc_time) + self.artifacts["VC-devel"] = zip_path + + @classmethod + def extract_sdl_version(cls, root: Path, release_info: dict) -> str: + with open(root / release_info["version"]["file"], "r") as f: + text = f.read() + major = next(re.finditer(release_info["version"]["re_major"], text, flags=re.M)).group(1) + minor = next(re.finditer(release_info["version"]["re_minor"], text, flags=re.M)).group(1) + micro = next(re.finditer(release_info["version"]["re_micro"], text, flags=re.M)).group(1) + return f"{major}.{minor}.{micro}" + + +def main(argv=None) -> int: + if sys.version_info < (3, 11): + logger.error("This script needs at least python 3.11") + return 1 + + parser = argparse.ArgumentParser(allow_abbrev=False, description="Create SDL release artifacts") + parser.add_argument("--root", metavar="DIR", type=Path, default=Path(__file__).absolute().parents[1], help="Root of project") + parser.add_argument("--release-info", metavar="JSON", dest="path_release_info", type=Path, default=Path(__file__).absolute().parent / "release-info.json", help="Path of release-info.json") + parser.add_argument("--dependency-folder", metavar="FOLDER", dest="deps_path", type=Path, default="deps", help="Directory containing pre-built archives of dependencies (will be removed when downloading archives)") + parser.add_argument("--out", "-o", metavar="DIR", dest="dist_path", type=Path, default="dist", help="Output directory") + parser.add_argument("--github", action="store_true", help="Script is running on a GitHub runner") + parser.add_argument("--commit", default="HEAD", help="Git commit/tag of which a release should be created") + parser.add_argument("--actions", choices=["download", "source", "android", "mingw", "msvc", "dmg"], required=True, nargs="+", dest="actions", help="What to do?") + parser.set_defaults(loglevel=logging.INFO) + parser.add_argument('--vs-year', dest="vs_year", help="Visual Studio year") + parser.add_argument('--android-api', dest="android_api", help="Android API version") + parser.add_argument('--android-home', dest="android_home", default=os.environ.get("ANDROID_HOME"), help="Android Home folder") + parser.add_argument('--android-ndk-home', dest="android_ndk_home", default=os.environ.get("ANDROID_NDK_HOME"), help="Android NDK Home folder") + parser.add_argument('--cmake-generator', dest="cmake_generator", default="Ninja", help="CMake Generator") + parser.add_argument('--debug', action='store_const', const=logging.DEBUG, dest="loglevel", help="Print script debug information") + parser.add_argument('--dry-run', action='store_true', dest="dry", help="Don't execute anything") + parser.add_argument('--force', action='store_true', dest="force", help="Ignore a non-clean git tree") + parser.add_argument('--overwrite', action='store_true', dest="overwrite", help="Allow potentially overwriting other projects") + parser.add_argument('--fast', action='store_true', dest="fast", help="Don't do a rebuild") + + args = parser.parse_args(argv) + logging.basicConfig(level=args.loglevel, format='[%(levelname)s] %(message)s') + args.deps_path = args.deps_path.absolute() + args.dist_path = args.dist_path.absolute() + args.root = args.root.absolute() + args.dist_path = args.dist_path.absolute() + if args.dry: + args.dist_path = args.dist_path / "dry" + + if args.github: + section_printer: SectionPrinter = GitHubSectionPrinter() + else: + section_printer = SectionPrinter() + + if args.github and "GITHUB_OUTPUT" not in os.environ: + os.environ["GITHUB_OUTPUT"] = "/tmp/github_output.txt" + + executer = Executer(root=args.root, dry=args.dry) + + root_git_hash_path = args.root / GIT_HASH_FILENAME + root_is_maybe_archive = root_git_hash_path.is_file() + if root_is_maybe_archive: + logger.warning("%s detected: Building from archive", GIT_HASH_FILENAME) + archive_commit = root_git_hash_path.read_text().strip() + if args.commit != archive_commit: + logger.warning("Commit argument is %s, but archive commit is %s. Using %s.", args.commit, archive_commit, archive_commit) + args.commit = archive_commit + revision = (args.root / REVISION_TXT).read_text().strip() + else: + args.commit = executer.check_output(["git", "rev-parse", args.commit], dry_out="e5812a9fd2cda317b503325a702ba3c1c37861d9").strip() + revision = executer.check_output(["git", "describe", "--always", "--tags", "--long", args.commit], dry_out="preview-3.1.3-96-g9512f2144").strip() + logger.info("Using commit %s", args.commit) + + try: + with args.path_release_info.open() as f: + release_info = json.load(f) + except FileNotFoundError: + logger.error(f"Could not find {args.path_release_info}") + + releaser = Releaser( + release_info=release_info, + commit=args.commit, + revision=revision, + root=args.root, + dist_path=args.dist_path, + executer=executer, + section_printer=section_printer, + cmake_generator=args.cmake_generator, + deps_path=args.deps_path, + overwrite=args.overwrite, + github=args.github, + fast=args.fast, + ) + + if root_is_maybe_archive: + logger.warning("Building from archive. Skipping clean git tree check.") + else: + porcelain_status = executer.check_output(["git", "status", "--ignored", "--porcelain"], dry_out="\n").strip() + if porcelain_status: + print(porcelain_status) + logger.warning("The tree is dirty! Do not publish any generated artifacts!") + if not args.force: + raise Exception("The git repo contains modified and/or non-committed files. Run with --force to ignore.") + + if args.fast: + logger.warning("Doing fast build! Do not publish generated artifacts!") + + with section_printer.group("Arguments"): + print(f"project = {releaser.project}") + print(f"version = {releaser.version}") + print(f"revision = {revision}") + print(f"commit = {args.commit}") + print(f"out = {args.dist_path}") + print(f"actions = {args.actions}") + print(f"dry = {args.dry}") + print(f"force = {args.force}") + print(f"overwrite = {args.overwrite}") + print(f"cmake_generator = {args.cmake_generator}") + + releaser.prepare() + + if "download" in args.actions: + releaser.download_dependencies() + + if set(args.actions).intersection({"msvc", "mingw", "android"}): + print("Verifying presence of dependencies (run 'download' action to download) ...") + releaser.verify_dependencies() + print("... done") + + if "source" in args.actions: + if root_is_maybe_archive: + raise Exception("Cannot build source archive from source archive") + with section_printer.group("Create source archives"): + releaser.create_source_archives() + + if "dmg" in args.actions: + if platform.system() != "Darwin" and not args.dry: + parser.error("framework artifact(s) can only be built on Darwin") + + releaser.create_dmg() + + if "msvc" in args.actions: + if platform.system() != "Windows" and not args.dry: + parser.error("msvc artifact(s) can only be built on Windows") + releaser.build_msvc() + + if "mingw" in args.actions: + releaser.create_mingw_archives() + + if "android" in args.actions: + if args.android_home is None or not Path(args.android_home).is_dir(): + parser.error("Invalid $ANDROID_HOME or --android-home: must be a directory containing the Android SDK") + if args.android_ndk_home is None or not Path(args.android_ndk_home).is_dir(): + parser.error("Invalid $ANDROID_NDK_HOME or --android-ndk-home: must be a directory containing the Android NDK") + if args.android_api is None: + with section_printer.group("Detect Android APIS"): + args.android_api = releaser._detect_android_api(android_home=args.android_home) + else: + try: + android_api_ints = tuple(int(v) for v in args.android_api.split(".")) + match len(android_api_ints): + case 1: android_api_name = f"android-{android_api_ints[0]}" + case 2: android_api_name = f"android-{android_api_ints[0]}-ext-{android_api_ints[1]}" + case _: raise ValueError + except ValueError: + logger.error("Invalid --android-api, must be a 'X' or 'X.Y' version") + args.android_api = AndroidApiVersion(ints=android_api_ints, name=android_api_name) + if args.android_api is None: + parser.error("Invalid --android-api, and/or could not be detected") + android_api_path = Path(args.android_home) / f"platforms/{args.android_api.name}" + if not android_api_path.is_dir(): + logger.warning(f"Android API directory does not exist ({android_api_path})") + with section_printer.group("Android arguments"): + print(f"android_home = {args.android_home}") + print(f"android_ndk_home = {args.android_ndk_home}") + print(f"android_api = {args.android_api}") + releaser.create_android_archives( + android_api=args.android_api.ints[0], + android_home=args.android_home, + android_ndk_home=args.android_ndk_home, + ) + with section_printer.group("Summary"): + print(f"artifacts = {releaser.artifacts}") + + if args.github: + with open(os.environ["GITHUB_OUTPUT"], "a") as f: + f.write(f"project={releaser.project}\n") + f.write(f"version={releaser.version}\n") + for k, v in releaser.artifacts.items(): + f.write(f"{k}={v.name}\n") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/extras/buildbot-checker.sh b/build-scripts/buildbot-checker.sh similarity index 100% rename from extras/buildbot-checker.sh rename to build-scripts/buildbot-checker.sh diff --git a/extras/buildbot-emscripten.sh b/build-scripts/buildbot-emscripten.sh similarity index 100% rename from extras/buildbot-emscripten.sh rename to build-scripts/buildbot-emscripten.sh diff --git a/extras/buildbot-os2.sh b/build-scripts/buildbot-os2.sh similarity index 100% rename from extras/buildbot-os2.sh rename to build-scripts/buildbot-os2.sh diff --git a/extras/buildbot-raspberrypi.sh b/build-scripts/buildbot-raspberrypi.sh similarity index 100% rename from extras/buildbot-raspberrypi.sh rename to build-scripts/buildbot-raspberrypi.sh diff --git a/build-scripts/check_elf_alignment.sh b/build-scripts/check_elf_alignment.sh new file mode 100755 index 00000000..d3846bca --- /dev/null +++ b/build-scripts/check_elf_alignment.sh @@ -0,0 +1,127 @@ +#!/bin/bash +progname="${0##*/}" +progname="${progname%.sh}" + +# usage: check_elf_alignment.sh [path to *.so files|path to *.apk] + +cleanup_trap() { + if [ -n "${tmp}" -a -d "${tmp}" ]; then + rm -rf ${tmp} + fi + exit $1 +} + +usage() { + echo "Host side script to check the ELF alignment of shared libraries." + echo "Shared libraries are reported ALIGNED when their ELF regions are" + echo "16 KB or 64 KB aligned. Otherwise they are reported as UNALIGNED." + echo + echo "Usage: ${progname} [input-path|input-APK|input-APEX]" +} + +if [ ${#} -ne 1 ]; then + usage + exit +fi + +case ${1} in + --help | -h | -\?) + usage + exit + ;; + + *) + dir="${1}" + ;; +esac + +if ! [ -f "${dir}" -o -d "${dir}" ]; then + echo "Invalid file: ${dir}" >&2 + exit 1 +fi + +if [[ "${dir}" == *.apk ]]; then + trap 'cleanup_trap' EXIT + + echo + echo "Recursively analyzing $dir" + echo + + if { zipalign --help 2>&1 | grep -q "\-P "; }; then + echo "=== APK zip-alignment ===" + zipalign -v -c -P 16 4 "${dir}" | egrep 'lib/arm64-v8a|lib/x86_64|Verification' + echo "=========================" + else + echo "NOTICE: Zip alignment check requires build-tools version 35.0.0-rc3 or higher." + echo " You can install the latest build-tools by running the below command" + echo " and updating your \$PATH:" + echo + echo " sdkmanager \"build-tools;35.0.0-rc3\"" + fi + + dir_filename=$(basename "${dir}") + tmp=$(mktemp -d -t "${dir_filename%.apk}_out_XXXXX") + unzip "${dir}" lib/* -d "${tmp}" >/dev/null 2>&1 + dir="${tmp}" +fi + +if [[ "${dir}" == *.apex ]]; then + trap 'cleanup_trap' EXIT + + echo + echo "Recursively analyzing $dir" + echo + + dir_filename=$(basename "${dir}") + tmp=$(mktemp -d -t "${dir_filename%.apex}_out_XXXXX") + deapexer extract "${dir}" "${tmp}" || { echo "Failed to deapex." && exit 1; } + dir="${tmp}" +fi + +RED="\e[31m" +GREEN="\e[32m" +ENDCOLOR="\e[0m" + +unaligned_libs=() +unaligned_critical_libs=() + +echo +echo "=== ELF alignment ===" + +matches="$(find "${dir}" -type f)" +IFS=$'\n' +for match in $matches; do + # We could recursively call this script or rewrite it to though. + [[ "${match}" == *".apk" ]] && echo "WARNING: doesn't recursively inspect .apk file: ${match}" + [[ "${match}" == *".apex" ]] && echo "WARNING: doesn't recursively inspect .apex file: ${match}" + + [[ $(file "${match}") == *"ELF"* ]] || continue + + res="$(objdump -p "${match}" | grep LOAD | awk '{ print $NF }' | head -1)" + if [[ $res =~ 2\*\*(1[4-9]|[2-9][0-9]|[1-9][0-9]{2,}) ]]; then + echo -e "${match}: ${GREEN}ALIGNED${ENDCOLOR} ($res)" + else + unaligned_libs+=("${match}") + # Check if this is a critical architecture (arm64-v8a or x86_64) + if [[ "${match}" == *"arm64-v8a"* ]] || [[ "${match}" == *"x86_64"* ]]; then + unaligned_critical_libs+=("${match}") + echo -e "${match}: ${RED}UNALIGNED${ENDCOLOR} ($res)" + else + echo -e "${match}: UNALIGNED ($res)" + fi + fi +done + +if [ ${#unaligned_libs[@]} -gt 0 ]; then + echo -e "Found ${#unaligned_libs[@]} unaligned libs (only arm64-v8a/x86_64 libs need to be aligned).${ENDCOLOR}" +fi +echo "=====================" + +# Exit with appropriate code: 1 if critical unaligned libs found, 0 otherwise +if [ ${#unaligned_critical_libs[@]} -gt 0 ]; then + echo -e "${RED}Found ${#unaligned_critical_libs[@]} critical unaligned libs.${ENDCOLOR}" + exit 1 +else + echo -e "${GREEN}ELF Verification Successful${ENDCOLOR}" + exit 0 +fi diff --git a/build-scripts/cmake-toolchain-mingw64-i686.cmake b/build-scripts/cmake-toolchain-mingw64-i686.cmake new file mode 100644 index 00000000..8be7b3a8 --- /dev/null +++ b/build-scripts/cmake-toolchain-mingw64-i686.cmake @@ -0,0 +1,18 @@ +set(CMAKE_SYSTEM_NAME Windows) +set(CMAKE_SYSTEM_PROCESSOR x86) + +find_program(CMAKE_C_COMPILER NAMES i686-w64-mingw32-gcc) +find_program(CMAKE_CXX_COMPILER NAMES i686-w64-mingw32-g++) +find_program(CMAKE_RC_COMPILER NAMES i686-w64-mingw32-windres windres) + +if(NOT CMAKE_C_COMPILER) + message(FATAL_ERROR "Failed to find CMAKE_C_COMPILER.") +endif() + +if(NOT CMAKE_CXX_COMPILER) + message(FATAL_ERROR "Failed to find CMAKE_CXX_COMPILER.") +endif() + +if(NOT CMAKE_RC_COMPILER) + message(FATAL_ERROR "Failed to find CMAKE_RC_COMPILER.") +endif() diff --git a/build-scripts/cmake-toolchain-mingw64-x86_64.cmake b/build-scripts/cmake-toolchain-mingw64-x86_64.cmake new file mode 100644 index 00000000..8bf43669 --- /dev/null +++ b/build-scripts/cmake-toolchain-mingw64-x86_64.cmake @@ -0,0 +1,18 @@ +set(CMAKE_SYSTEM_NAME Windows) +set(CMAKE_SYSTEM_PROCESSOR x86_64) + +find_program(CMAKE_C_COMPILER NAMES x86_64-w64-mingw32-gcc) +find_program(CMAKE_CXX_COMPILER NAMES x86_64-w64-mingw32-g++) +find_program(CMAKE_RC_COMPILER NAMES x86_64-w64-mingw32-windres windres) + +if(NOT CMAKE_C_COMPILER) + message(FATAL_ERROR "Failed to find CMAKE_C_COMPILER.") +endif() + +if(NOT CMAKE_CXX_COMPILER) + message(FATAL_ERROR "Failed to find CMAKE_CXX_COMPILER.") +endif() + +if(NOT CMAKE_RC_COMPILER) + message(FATAL_ERROR "Failed to find CMAKE_RC_COMPILER.") +endif() diff --git a/build-scripts/create-release.py b/build-scripts/create-release.py new file mode 100755 index 00000000..14916fa8 --- /dev/null +++ b/build-scripts/create-release.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 + +import argparse +from pathlib import Path +import json +import logging +import re +import subprocess + +ROOT = Path(__file__).resolve().parents[1] + + +def determine_remote() -> str: + text = (ROOT / "build-scripts/release-info.json").read_text() + release_info = json.loads(text) + if "remote" in release_info: + return release_info["remote"] + project_with_version = release_info["name"] + project, _ = re.subn("([^a-zA-Z_])", "", project_with_version) + return f"libsdl-org/{project}" + + +def main(): + default_remote = determine_remote() + + parser = argparse.ArgumentParser(allow_abbrev=False) + parser.add_argument("--ref", required=True, help=f"Name of branch or tag containing release.yml") + parser.add_argument("--remote", "-R", default=default_remote, help=f"Remote repo (default={default_remote})") + parser.add_argument("--commit", help=f"Input 'commit' of release.yml (default is the hash of the ref)") + args = parser.parse_args() + + if args.commit is None: + args.commit = subprocess.check_output(["git", "rev-parse", args.ref], cwd=ROOT, text=True).strip() + + + print(f"Running release.yml workflow:") + print(f" remote = {args.remote}") + print(f" ref = {args.ref}") + print(f" commit = {args.commit}") + + subprocess.check_call(["gh", "-R", args.remote, "workflow", "run", "release.yml", "--ref", args.ref, "-f", f"commit={args.commit}"], cwd=ROOT) + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/build-scripts/pkg-support/android/INSTALL.md.in b/build-scripts/pkg-support/android/INSTALL.md.in new file mode 100644 index 00000000..42d9841d --- /dev/null +++ b/build-scripts/pkg-support/android/INSTALL.md.in @@ -0,0 +1,64 @@ + +# Using this package + +This package contains @<@PROJECT_NAME@>@ built for the Android platform. + +## Gradle integration + +For integration with CMake/ndk-build, it uses [prefab](https://google.github.io/prefab/). + +Copy the aar archive (@<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@.aar) to a `app/libs` directory of your project. + +In `app/build.gradle` of your Android project, add: +``` +android { + /* ... */ + buildFeatures { + prefab true + } +} +dependencies { + implementation files('libs/@<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@.aar') + /* ... */ +} +``` + +If you're using CMake, add the following to your CMakeLists.txt: +``` +find_package(PhysFS REQUIRED CONFIG) +target_link_libraries(yourgame PRIVATE PhysFS::PhysFS) +``` + +If you use ndk-build, add the following before `include $(BUILD_SHARED_LIBRARY)` to your `Android.mk`: +``` +LOCAL_SHARED_LIBARARIES := PhysFS +``` +And add the following at the bottom: +``` +# https://google.github.io/prefab/build-systems.html + +# Add the prefab modules to the import path. +$(call import-add-path,/out) + +# Import @<@PROJECT_NAME@>@ so we can depend on it. +$(call import-module,prefab/@<@PROJECT_NAME@>@) +``` + +--- + +## Other build systems (advanced) + +If you want to build a project without Gradle, +running the following command will extract the Android archive into a more common directory structure. +``` +python @<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@.aar -o android_prefix +``` +Add `--help` for a list of all available options. + +# Documentation + +An API reference, tutorials, and additional documentation is available at: + +https://wiki.icculus.org/PhysicsFS3/QuickReference + +https://icculus.org/physfs/docs/html/ diff --git a/build-scripts/pkg-support/android/aar/__main__.py.in b/build-scripts/pkg-support/android/aar/__main__.py.in new file mode 100755 index 00000000..46c2a1f7 --- /dev/null +++ b/build-scripts/pkg-support/android/aar/__main__.py.in @@ -0,0 +1,104 @@ +#!/usr/bin/env python + +""" +Create a @<@PROJECT_NAME@>@ SDK prefix from an Android archive +This file is meant to be placed in a the root of an android .aar archive + +Example usage: +```sh +python @<@PROJECT_NAME@>@-@<@PROJECT_VERSION@>@.aar -o /usr/opt/android-sdks +cmake -S my-project \ + -DCMAKE_PREFIX_PATH=/usr/opt/android-sdks \ + -DCMAKE_TOOLCHAIN_FILE=$ANDROID_NDK_HOME/build/cmake/android.toolchain.cmake \ + -B build-arm64 -DANDROID_ABI=arm64-v8a \ + -DCMAKE_BUILD_TYPE=Releaase +cmake --build build-arm64 +``` +""" +import argparse +import io +import json +import os +import pathlib +import re +import stat +import zipfile + + +AAR_PATH = pathlib.Path(__file__).resolve().parent + + +def main(): + parser = argparse.ArgumentParser( + description="Convert a @<@PROJECT_NAME@>@ Android .aar archive into a SDK", + allow_abbrev=False, + ) + parser.add_argument("--version", action="version", version="@<@PROJECT_NAME@>@ @<@PROJECT_VERSION@>@") + parser.add_argument("-o", dest="output", type=pathlib.Path, required=True, help="Folder where to store the SDK") + args = parser.parse_args() + + print(f"Creating a @<@PROJECT_NAME@>@ SDK at {args.output}...") + + prefix = args.output + incdir = prefix / "include" + libdir = prefix / "lib" + + RE_LIB_MODULE_ARCH = re.compile(r"prefab/modules/(?P[A-Za-z0-9_-]+)/libs/android\.(?P[a-zA-Z0-9_-]+)/(?Plib[A-Za-z0-9_]+\.(?:so|a))") + RE_INC_MODULE_ARCH = re.compile(r"prefab/modules/(?P[A-Za-z0-9_-]+)/include/(?P
[a-zA-Z0-9_./-]+)") + RE_LICENSE = re.compile(r"(?:.*/)?(?P(?:license|copying)(?:\.md|\.txt)?)", flags=re.I) + RE_PROGUARD = re.compile(r"(?:.*/)?(?Pproguard.*\.(?:pro|txt))", flags=re.I) + RE_CMAKE = re.compile(r"(?:.*/)?(?P.*\.cmake)", flags=re.I) + + with zipfile.ZipFile(AAR_PATH) as zf: + project_description = json.loads(zf.read("description.json")) + project_name = project_description["name"] + prefab_name = project_description.get("prefab-name", project_name) + project_version = project_description["version"] + licensedir = prefix / "share/licenses" / prefab_name + cmakedir = libdir / "cmake" / prefab_name + javadir = prefix / "share/java" / project_name + javadocdir = prefix / "share/javadoc" / project_name + + def read_zipfile_and_write(path: pathlib.Path, zippath: str): + data = zf.read(zippath) + path.parent.mkdir(parents=True, exist_ok=True) + path.write_bytes(data) + + for zip_info in zf.infolist(): + zippath = zip_info.filename + if m := RE_LIB_MODULE_ARCH.match(zippath): + lib_path = libdir / m["arch"] / m["filename"] + read_zipfile_and_write(lib_path, zippath) + if m["filename"].endswith(".so"): + os.chmod(lib_path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) + + elif m := RE_INC_MODULE_ARCH.match(zippath): + header_path = incdir / m["header"] + read_zipfile_and_write(header_path, zippath) + elif m:= RE_LICENSE.match(zippath): + license_path = licensedir / m["filename"] + read_zipfile_and_write(license_path, zippath) + elif m:= RE_PROGUARD.match(zippath): + proguard_path = javadir / m["filename"] + read_zipfile_and_write(proguard_path, zippath) + elif m:= RE_CMAKE.match(zippath): + cmake_path = cmakedir / m["filename"] + read_zipfile_and_write(cmake_path, zippath) + elif zippath == "classes.jar": + versioned_jar_path = javadir / f"{project_name}-{project_version}.jar" + unversioned_jar_path = javadir / f"{project_name}.jar" + read_zipfile_and_write(versioned_jar_path, zippath) + os.symlink(src=versioned_jar_path.name, dst=unversioned_jar_path) + elif zippath == "classes-sources.jar": + jarpath = javadir / f"{project_name}-{project_version}-sources.jar" + read_zipfile_and_write(jarpath, zippath) + elif zippath == "classes-doc.jar": + jarpath = javadocdir / f"{project_name}-{project_version}-javadoc.jar" + read_zipfile_and_write(jarpath, zippath) + + print("... done") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/build-scripts/pkg-support/android/aar/cmake/PhysFSConfig.cmake b/build-scripts/pkg-support/android/aar/cmake/PhysFSConfig.cmake new file mode 100644 index 00000000..116c51ab --- /dev/null +++ b/build-scripts/pkg-support/android/aar/cmake/PhysFSConfig.cmake @@ -0,0 +1,96 @@ +# PhysicsFS CMake configuration file: +# This file is meant to be placed in lib/cmake/PhysFS subfolder of a reconstructed Android PhysFS SDK + +cmake_minimum_required(VERSION 3.0...4.0) + +include(FeatureSummary) +set_package_properties(PhysicsFS PROPERTIES + URL "https://icculus.org/physfs/" + DESCRIPTION "Library to provide abstract access to various archives" +) + +# Copied from `configure_package_config_file` +macro(set_and_check _var _file) + set(${_var} "${_file}") + if(NOT EXISTS "${_file}") + message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !") + endif() +endmacro() + +# Copied from `configure_package_config_file` +macro(check_required_components _NAME) + foreach(comp ${${_NAME}_FIND_COMPONENTS}) + if(NOT ${_NAME}_${comp}_FOUND) + if(${_NAME}_FIND_REQUIRED_${comp}) + set(${_NAME}_FOUND FALSE) + endif() + endif() + endforeach() +endmacro() + +set(PhysFS_FOUND TRUE) + +if(SDL_CPU_X86) + set(_sdl_arch_subdir "x86") +elseif(SDL_CPU_X64) + set(_sdl_arch_subdir "x86_64") +elseif(SDL_CPU_ARM32) + set(_sdl_arch_subdir "armeabi-v7a") +elseif(SDL_CPU_ARM64) + set(_sdl_arch_subdir "arm64-v8a") +else() + set(PhysFS_FOUND FALSE) + return() +endif() + +get_filename_component(_physfs_prefix "${CMAKE_CURRENT_LIST_DIR}/../../.." ABSOLUTE) +set_and_check(_physfs_prefix "${_physfs_prefix}") +set_and_check(_physfs_include_dirs "${_physfs_prefix}/include") + +set_and_check(_physfs_lib "${_physfs_prefix}/lib/${_sdl_arch_subdir}/libphysfs.so") +unset(_sdl_arch_subdir) +unset(_physfs_prefix) + +# All targets are created, even when some might not be requested though COMPONENTS. +# This is done for compatibility with CMake generated PhysFS-target.cmake files. + +if(EXISTS "${_physfs_lib}") + if(NOT TARGET PhysFS::PhysFS-shared) + add_library(PhysFS::PhysFS-shared SHARED IMPORTED) + set_target_properties(PhysFS::PhysFS-shared + PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${_physfs_include_dirs}" + IMPORTED_LOCATION "${_physfs_lib}" + ) + endif() + set(PhysFS_PhysFS-shared_FOUND TRUE) +else() + set(PhysFS_PhysFS-shared_FOUND FALSE) +endif() +unset(_physfs_lib) +unset(_physfs_include_dirs) + +set(PhysFS_PhysFS-static_FOUND FALSE) + +if(PhysFS_PhysFS-shared_FOUND) + set(PhysFS_PhysFS_FOUND TRUE) +endif() + +function(_sdl_create_target_alias_compat NEW_TARGET TARGET) + if(CMAKE_VERSION VERSION_LESS "3.18") + # Aliasing local targets is not supported on CMake < 3.18, so make it global. + add_library(${NEW_TARGET} INTERFACE IMPORTED) + set_target_properties(${NEW_TARGET} PROPERTIES INTERFACE_LINK_LIBRARIES "${TARGET}") + else() + add_library(${NEW_TARGET} ALIAS ${TARGET}) + endif() +endfunction() + +# Make sure PhysFS::PhysFS always exists +if(NOT TARGET PhysFS::PhysFS) + if(TARGET PhysFS::PhysFS-shared) + _sdl_create_target_alias_compat(PhysFS::PhysFS PhysFS::PhysFS-shared) + endif() +endif() + +check_required_components(PhysFS) diff --git a/build-scripts/pkg-support/android/aar/cmake/PhysFSConfigVersion.cmake.in b/build-scripts/pkg-support/android/aar/cmake/PhysFSConfigVersion.cmake.in new file mode 100644 index 00000000..d73d64e1 --- /dev/null +++ b/build-scripts/pkg-support/android/aar/cmake/PhysFSConfigVersion.cmake.in @@ -0,0 +1,38 @@ +# @<@PROJECT_NAME@>@ CMake version configuration file: +# This file is meant to be placed in a lib/cmake/PhysFS subfolder of a reconstructed Android PhysFS SDK + +set(PACKAGE_VERSION "@<@PROJECT_VERSION@>@") + +if(PACKAGE_FIND_VERSION_RANGE) + # Package version must be in the requested version range + if ((PACKAGE_FIND_VERSION_RANGE_MIN STREQUAL "INCLUDE" AND PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION_MIN) + OR ((PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "INCLUDE" AND PACKAGE_VERSION VERSION_GREATER PACKAGE_FIND_VERSION_MAX) + OR (PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "EXCLUDE" AND PACKAGE_VERSION VERSION_GREATER_EQUAL PACKAGE_FIND_VERSION_MAX))) + set(PACKAGE_VERSION_COMPATIBLE FALSE) + else() + set(PACKAGE_VERSION_COMPATIBLE TRUE) + endif() +else() + if(PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION) + set(PACKAGE_VERSION_COMPATIBLE FALSE) + else() + set(PACKAGE_VERSION_COMPATIBLE TRUE) + if(PACKAGE_FIND_VERSION STREQUAL PACKAGE_VERSION) + set(PACKAGE_VERSION_EXACT TRUE) + endif() + endif() +endif() + +# if the using project doesn't have CMAKE_SIZEOF_VOID_P set, fail. +if("${CMAKE_SIZEOF_VOID_P}" STREQUAL "") + set(PACKAGE_VERSION_UNSUITABLE TRUE) +endif() + +include("${CMAKE_CURRENT_LIST_DIR}/sdlcpu.cmake") +SDL_DetectTargetCPUArchitectures(_detected_archs) + +# check that the installed version has a compatible architecture as the one which is currently searching: +if(NOT(SDL_CPU_X86 OR SDL_CPU_X64 OR SDL_CPU_ARM32 OR SDL_CPU_ARM64)) + set(PACKAGE_VERSION "${PACKAGE_VERSION} (X86,X64,ARM32,ARM64)") + set(PACKAGE_VERSION_UNSUITABLE TRUE) +endif() diff --git a/build-scripts/pkg-support/android/aar/description.json.in b/build-scripts/pkg-support/android/aar/description.json.in new file mode 100644 index 00000000..cd175524 --- /dev/null +++ b/build-scripts/pkg-support/android/aar/description.json.in @@ -0,0 +1,6 @@ +{ + "name": "@<@PROJECT_NAME@>@", + "prefab-name": "PhysFS", + "version": "@<@PROJECT_VERSION@>@", + "git-hash": "@<@PROJECT_COMMIT@>@" +} diff --git a/build-scripts/pkg-support/mingw/INSTALL.md.in b/build-scripts/pkg-support/mingw/INSTALL.md.in new file mode 100644 index 00000000..02a6ee93 --- /dev/null +++ b/build-scripts/pkg-support/mingw/INSTALL.md.in @@ -0,0 +1,26 @@ + +# Using this package + +This package contains @<@PROJECT_NAME@>@ built for the mingw-w64 toolchain. + +The files for 32-bit architecture are in i686-w64-mingw32 +The files for 64-bit architecture are in x86_64-w64-mingw32 + +You can install them to another location, just type `make` for help. + +To use this package, point your include path at _arch_/include and your library path at _arch_/lib, link with the @<@PROJECT_NAME@>@ library and copy _arch_/bin/@<@PROJECT_NAME@>@.dll next to your executable. + +e.g. +```sh +gcc -o hello.exe hello.c -Ix86_64-w64-mingw32/include -Lx86_64-w64-mingw32/lib -l@<@PROJECT_NAME@>@ +cp x86_64-w64-mingw32/bin/physfs.dll . +./hello.exe +``` + +# Documentation + +An API reference, tutorials, and additional documentation is available at: + +https://wiki.icculus.org/PhysicsFS3/QuickReference + +https://icculus.org/physfs/docs/html/ diff --git a/build-scripts/pkg-support/mingw/Makefile b/build-scripts/pkg-support/mingw/Makefile new file mode 100644 index 00000000..9b6cd558 --- /dev/null +++ b/build-scripts/pkg-support/mingw/Makefile @@ -0,0 +1,39 @@ +# +# Makefile for installing the mingw32 version of the SDL library + +DESTDIR = /usr/local +ARCHITECTURES := i686-w64-mingw32 x86_64-w64-mingw32 + +default: + @echo "Run \"make install-i686\" to install 32-bit" + @echo "Run \"make install-x86_64\" to install 64-bit" + @echo "Run \"make install-all\" to install both" + @echo "Add DESTDIR=/custom/path to change the destination folder" + +install: + @if test -d $(ARCH) && test -d $(DESTDIR); then \ + (cd $(ARCH) && cp -rv bin include lib share $(DESTDIR)/); \ + else \ + echo "*** ERROR: $(ARCH) or $(DESTDIR) does not exist!"; \ + exit 1; \ + fi + +install-i686: + $(MAKE) install ARCH=i686-w64-mingw32 + +install-x86_64: + $(MAKE) install ARCH=x86_64-w64-mingw32 + +install-all: + @if test -d $(DESTDIR); then \ + mkdir -p $(DESTDIR)/cmake; \ + cp -rv cmake/* $(DESTDIR)/cmake; \ + for arch in $(ARCHITECTURES); do \ + $(MAKE) install ARCH=$$arch DESTDIR=$(DESTDIR)/$$arch; \ + done \ + else \ + echo "*** ERROR: $(DESTDIR) does not exist!"; \ + exit 1; \ + fi + +.PHONY: default install install-i686 install-x86_64 install-all diff --git a/build-scripts/pkg-support/mingw/cmake/PhysFSConfig.cmake b/build-scripts/pkg-support/mingw/cmake/PhysFSConfig.cmake new file mode 100644 index 00000000..f9d04269 --- /dev/null +++ b/build-scripts/pkg-support/mingw/cmake/PhysFSConfig.cmake @@ -0,0 +1,19 @@ +# PhysFS CMake configuration file: +# This file is meant to be placed in a cmake subfolder of physfs-devel-3.x.y-mingw + +if(CMAKE_SIZEOF_VOID_P EQUAL 4) + set(physfs_config_path "${CMAKE_CURRENT_LIST_DIR}/../i686-w64-mingw32/lib/cmake/PhysFS/PhysFSConfig.cmake") +elseif(CMAKE_SIZEOF_VOID_P EQUAL 8) + set(physfs_config_path "${CMAKE_CURRENT_LIST_DIR}/../x86_64-w64-mingw32/lib/cmake/PhysFS/PhysFSConfig.cmake") +else() + set(PhysFS_FOUND FALSE) + return() +endif() + +if(NOT EXISTS "${physfs_config_path}") + message(WARNING "${physfs_config_path} does not exist: MinGW development package is corrupted") + set(PhysFS_FOUND FALSE) + return() +endif() + +include("${physfs_config_path}") diff --git a/build-scripts/pkg-support/mingw/cmake/PhysFSConfigVersion.cmake b/build-scripts/pkg-support/mingw/cmake/PhysFSConfigVersion.cmake new file mode 100644 index 00000000..97a5d45b --- /dev/null +++ b/build-scripts/pkg-support/mingw/cmake/PhysFSConfigVersion.cmake @@ -0,0 +1,19 @@ +# PhysFS CMake version configuration file: +# This file is meant to be placed in a cmake subfolder of physfs-devel-3.x.y-mingw + +if(CMAKE_SIZEOF_VOID_P EQUAL 4) + set(physfs_config_path "${CMAKE_CURRENT_LIST_DIR}/../i686-w64-mingw32/lib/cmake/PhysFS/PhysFSConfigVersion.cmake") +elseif(CMAKE_SIZEOF_VOID_P EQUAL 8) + set(physfs_config_path "${CMAKE_CURRENT_LIST_DIR}/../x86_64-w64-mingw32/lib/cmake/PhysFS/PhysFSConfigVersion.cmake") +else() + set(PACKAGE_VERSION_UNSUITABLE TRUE) + return() +endif() + +if(NOT EXISTS "${physfs_config_path}") + message(WARNING "${physfs_config_path} does not exist: MinGW development package is corrupted") + set(PACKAGE_VERSION_UNSUITABLE TRUE) + return() +endif() + +include("${physfs_config_path}") diff --git a/build-scripts/pkg-support/msvc/INSTALL.md.in b/build-scripts/pkg-support/msvc/INSTALL.md.in new file mode 100644 index 00000000..82157b69 --- /dev/null +++ b/build-scripts/pkg-support/msvc/INSTALL.md.in @@ -0,0 +1,25 @@ + +# Using this package + +This package contains @<@PROJECT_NAME@>@ built for Visual Studio. + +To use this package, edit your project properties: +- Add the include directory to "VC++ Directories" -> "Include Directories" +- Add the lib/_arch_ directory to "VC++ Directories" -> "Library Directories" +- Add @<@PROJECT_NAME@>@.lib to Linker -> Input -> "Additional Dependencies" +- Copy lib/_arch_/physfs.dll to your project directory. + +# Documentation + +An API reference, tutorials, and additional documentation is available at: + +https://wiki.icculus.org/PhysicsFS3/QuickReference + +https://icculus.org/physfs/docs/html/ + +## Announcement list + +You can sign up for the low traffic announcement list at: + +https://www.libsdl.org/mailing-list.php + diff --git a/build-scripts/pkg-support/msvc/arm64/INSTALL.md.in b/build-scripts/pkg-support/msvc/arm64/INSTALL.md.in new file mode 100644 index 00000000..956d8948 --- /dev/null +++ b/build-scripts/pkg-support/msvc/arm64/INSTALL.md.in @@ -0,0 +1,13 @@ + +# Using this package + +This package contains @<@PROJECT_NAME@>@ built for arm64 Windows. + +To use this package, simply replace an existing 64-bit ARM physfs.dll with the one included here. + +# Development packages + +If you're looking for packages with headers and libraries, you can download one of these: +- @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-VC.zip, for development using Visual Studio +- @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-mingw.zip, for development using mingw-w64 + diff --git a/build-scripts/pkg-support/msvc/cmake/PhysFSConfig.cmake.in b/build-scripts/pkg-support/msvc/cmake/PhysFSConfig.cmake.in new file mode 100644 index 00000000..77fc3528 --- /dev/null +++ b/build-scripts/pkg-support/msvc/cmake/PhysFSConfig.cmake.in @@ -0,0 +1,98 @@ +# @<@PROJECT_NAME@>@ CMake configuration file: +# This file is meant to be placed in a cmake subfolder of @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-VC.zip + +cmake_minimum_required(VERSION 3.0...4.0) + +include(FeatureSummary) +set_package_properties(PhysicsFS PROPERTIES + URL "https://icculus.org/physfs/" + DESCRIPTION "Library to provide abstract access to various archives" +) + +# Copied from `configure_package_config_file` +macro(set_and_check _var _file) + set(${_var} "${_file}") + if(NOT EXISTS "${_file}") + message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !") + endif() +endmacro() + +# Copied from `configure_package_config_file` +macro(check_required_components _NAME) + foreach(comp ${${_NAME}_FIND_COMPONENTS}) + if(NOT ${_NAME}_${comp}_FOUND) + if(${_NAME}_FIND_REQUIRED_${comp}) + set(${_NAME}_FOUND FALSE) + endif() + endif() + endforeach() +endmacro() + +set(PhysFS_FOUND TRUE) + +if(SDL_CPU_X86) + set(_sdl_arch_subdir "x86") +elseif(SDL_CPU_X64 OR SDL_CPU_ARM64EC) + set(_sdl_arch_subdir "x64") +elseif(SDL_CPU_ARM64) + set(_sdl_arch_subdir "arm64") +else() + set(PhysFS_FOUND FALSE) + return() +endif() + +get_filename_component(_physfs_prefix "${CMAKE_CURRENT_LIST_DIR}/.." ABSOLUTE) +set_and_check(_physfs_prefix "${_physfs_prefix}") +set(_physfs_include_dirs "${_physfs_prefix}/include") + +set(_physfs_implib "${_physfs_prefix}/lib/${_sdl_arch_subdir}/physfs.lib") +set(_physfs_dll "${_physfs_prefix}/lib/${_sdl_arch_subdir}/physfs.dll") + +unset(_sdl_arch_subdir) +unset(_physfs_prefix) + +# All targets are created, even when some might not be requested though COMPONENTS. +# This is done for compatibility with CMake generated PhysFS-target.cmake files. + +if(EXISTS "${_physfs_implib}" AND EXISTS "${_physfs_dll}") + if(NOT TARGET PhysFS::PhysFS-shared) + add_library(PhysFS::PhysFS-shared SHARED IMPORTED) + set_target_properties(PhysFS::PhysFS-shared + PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${_physfs_include_dirs}" + IMPORTED_IMPLIB "${_physfs_implib}" + IMPORTED_LOCATION "${_physfs_dll}" + ) + endif() + set(PhysFS_PhysFS-shared_FOUND TRUE) +else() + set(PhysFS_PhysFS-shared_FOUND FALSE) +endif() +unset(_physfs_implib) +unset(_physfs_dll) +unset(_physfs_include_dirs) + +set(PhysFS_PhysFS-static_FOUND FALSE) + +if(PhysFS_PhysFS-shared_FOUND OR PhysFS_PhysFS-static_FOUND) + set(PhysFS_PhysFS_FOUND TRUE) +endif() + +function(_sdl_create_target_alias_compat NEW_TARGET TARGET) + if(CMAKE_VERSION VERSION_LESS "3.18") + # Aliasing local targets is not supported on CMake < 3.18, so make it global. + add_library(${NEW_TARGET} INTERFACE IMPORTED) + set_target_properties(${NEW_TARGET} PROPERTIES INTERFACE_LINK_LIBRARIES "${TARGET}") + else() + add_library(${NEW_TARGET} ALIAS ${TARGET}) + endif() +endfunction() + +# Make sure PhysFS::PhysFS always exists +if(NOT TARGET PhysFS::PhysFS) + if(TARGET PhysFS::PhysFS-shared) + _sdl_create_target_alias_compat(PhysFS::PhysFS PhysFS::PhysFS-shared) + endif() +endif() + +check_required_components(PhysFS) diff --git a/build-scripts/pkg-support/msvc/cmake/PhysFSConfigVersion.cmake.in b/build-scripts/pkg-support/msvc/cmake/PhysFSConfigVersion.cmake.in new file mode 100644 index 00000000..82b6af0a --- /dev/null +++ b/build-scripts/pkg-support/msvc/cmake/PhysFSConfigVersion.cmake.in @@ -0,0 +1,38 @@ +# @<@PROJECT_NAME@>@ CMake version configuration file: +# This file is meant to be placed in a cmake subfolder of @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-VC.zip + +set(PACKAGE_VERSION "@<@PROJECT_VERSION@>@") + +if(PACKAGE_FIND_VERSION_RANGE) + # Package version must be in the requested version range + if ((PACKAGE_FIND_VERSION_RANGE_MIN STREQUAL "INCLUDE" AND PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION_MIN) + OR ((PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "INCLUDE" AND PACKAGE_VERSION VERSION_GREATER PACKAGE_FIND_VERSION_MAX) + OR (PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "EXCLUDE" AND PACKAGE_VERSION VERSION_GREATER_EQUAL PACKAGE_FIND_VERSION_MAX))) + set(PACKAGE_VERSION_COMPATIBLE FALSE) + else() + set(PACKAGE_VERSION_COMPATIBLE TRUE) + endif() +else() + if(PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION) + set(PACKAGE_VERSION_COMPATIBLE FALSE) + else() + set(PACKAGE_VERSION_COMPATIBLE TRUE) + if(PACKAGE_FIND_VERSION STREQUAL PACKAGE_VERSION) + set(PACKAGE_VERSION_EXACT TRUE) + endif() + endif() +endif() + +# if the using project doesn't have CMAKE_SIZEOF_VOID_P set, fail. +if("${CMAKE_SIZEOF_VOID_P}" STREQUAL "") + set(PACKAGE_VERSION_UNSUITABLE TRUE) +endif() + +include("${CMAKE_CURRENT_LIST_DIR}/sdlcpu.cmake") +SDL_DetectTargetCPUArchitectures(_detected_archs) + +# check that the installed version has a compatible architecture as the one which is currently searching: +if(NOT(SDL_CPU_X86 OR SDL_CPU_X64 OR SDL_CPU_ARM64 OR SDL_CPU_ARM64EC)) + set(PACKAGE_VERSION "${PACKAGE_VERSION} (X86,X64,ARM64)") + set(PACKAGE_VERSION_UNSUITABLE TRUE) +endif() diff --git a/build-scripts/pkg-support/msvc/x64/INSTALL.md.in b/build-scripts/pkg-support/msvc/x64/INSTALL.md.in new file mode 100644 index 00000000..6d3d3868 --- /dev/null +++ b/build-scripts/pkg-support/msvc/x64/INSTALL.md.in @@ -0,0 +1,13 @@ + +# Using this package + +This package contains @<@PROJECT_NAME@>@ built for x64 Windows. + +To use this package, simply replace an existing 64-bit physfs.dll with the one included here. + +# Development packages + +If you're looking for packages with headers and libraries, you can download one of these: +- @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-VC.zip, for development using Visual Studio +- @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-mingw.zip, for development using mingw-w64 + diff --git a/build-scripts/pkg-support/msvc/x86/INSTALL.md.in b/build-scripts/pkg-support/msvc/x86/INSTALL.md.in new file mode 100644 index 00000000..ed3fb410 --- /dev/null +++ b/build-scripts/pkg-support/msvc/x86/INSTALL.md.in @@ -0,0 +1,13 @@ + +# Using this package + +This package contains @<@PROJECT_NAME@>@ built for x86 Windows. + +To use this package, simply replace an existing 32-bit physfs.dll with the one included here. + +# Development packages + +If you're looking for packages with headers and libraries, you can download one of these: +- @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-VC.zip, for development using Visual Studio +- @<@PROJECT_NAME@>@-devel-@<@PROJECT_VERSION@>@-mingw.zip, for development using mingw-w64 + diff --git a/build-scripts/release-info.json b/build-scripts/release-info.json new file mode 100644 index 00000000..6eb835cc --- /dev/null +++ b/build-scripts/release-info.json @@ -0,0 +1,151 @@ +{ + "name": "physfs", + "remote": "icculus/physfs", + "version": { + "file": "src/physfs.h", + "re_major": "^#define PHYSFS_VER_MAJOR\\s+([0-9]+)$", + "re_minor": "^#define PHYSFS_VER_MINOR\\s+([0-9]+)$", + "re_micro": "^#define PHYSFS_VER_PATCH\\s+([0-9]+)$" + }, + "source": { + "checks": [ + "src/physfs.c", + "src/physfs.h", + "test/test_physfs.c", + "extras/physfssdl3.h" + ] + }, + "mingw": { + "cmake": { + "archs": ["x86", "x64"], + "args": [ + "-DPHYSFS_BUILD_SHARED=ON", + "-DPHYSFS_BUILD_STATIC=OFF", + "-DPHYSFS_INSTALL=ON", + "-DPHYSFS_INSTALL_MAN=OFF", + "-DPHYSFS_BUILD_DOCS=OFF", + "-DPHYSFS_BUILD_TEST=OFF" + ], + "shared-static": "args" + }, + "files": { + "": [ + "build-scripts/pkg-support/mingw/INSTALL.md.in:INSTALL.md", + "build-scripts/pkg-support/mingw/Makefile", + "LICENSE.txt", + "README.md" + ], + "cmake": [ + "build-scripts/pkg-support/mingw/cmake/PhysFSConfig.cmake", + "build-scripts/pkg-support/mingw/cmake/PhysFSConfigVersion.cmake" + ] + } + }, + "msvc": { + "cmake": { + "archs": [ + "x86", + "x64", + "arm64" + ], + "args": [ + "-DPHYSFS_BUILD_SHARED=ON", + "-DPHYSFS_BUILD_STATIC=OFF", + "-DPHYSFS_INSTAL=ON", + "-DPHYSFS_INSTALL_MAN=OFF", + "-DPHYSFS_BUILD_DOCS=OFF", + "-DPHYSFS_BUILD_TEST=OFF" + ], + "files-lib": { + "": [ + "bin/physfs.dll" + ] + }, + "files-devel": { + "lib/@<@ARCH@>@": [ + "bin/physfs.dll", + "bin/physfs.pdb", + "lib/physfs.lib" + ] + } + }, + "files-lib": { + "": [ + "build-scripts/pkg-support/msvc/@<@ARCH@>@/INSTALL.md.in:INSTALL.md", + "LICENSE.txt", + "README.md" + ] + }, + "files-devel": { + "": [ + "build-scripts/pkg-support/msvc/INSTALL.md.in:INSTALL.md", + "LICENSE.txt", + "README.md" + ], + "cmake": [ + "build-scripts/pkg-support/msvc/cmake/PhysFSConfig.cmake.in:PhysFSConfig.cmake", + "build-scripts/pkg-support/msvc/cmake/PhysFSConfigVersion.cmake.in:PhysFSConfigVersion.cmake", + "cmake/sdlcpu.cmake" + ], + "include": [ + "src/physfs.h" + ] + } + }, + "android": { + "cmake": { + "args": [ + "-DPHYSFS_BUILD_SHARED=ON", + "-DPHYSFS_BUILD_STATIC=OFF", + "-DPHYSFS_INSTAL=ON", + "-DPHYSFS_INSTALL_MAN=OFF", + "-DPHYSFS_BUILD_DOCS=OFF", + "-DPHYSFS_BUILD_TEST=OFF" + ] + }, + "name": "PhysFS", + "modules": { + "PhysFS-shared": { + "type": "library", + "library": "lib/libphysfs.so", + "includes": { + ".": ["include/physfs.h"] + } + }, + "PhysFS": { + "type": "interface", + "export-libraries": [":PhysFS-shared"] + } + }, + "abis": [ + "armeabi-v7a", + "arm64-v8a", + "x86", + "x86_64" + ], + "api-minimum": 21, + "api-target": 35, + "ndk-minimum": 28, + "aar-files": { + "": [ + "build-scripts/pkg-support/android/aar/__main__.py.in:__main__.py", + "build-scripts/pkg-support/android/aar/description.json.in:description.json" + ], + "META-INF": [ + "LICENSE.txt" + ], + "cmake": [ + "cmake/sdlcpu.cmake", + "build-scripts/pkg-support/android/aar/cmake/PhysFSConfig.cmake", + "build-scripts/pkg-support/android/aar/cmake/PhysFSConfigVersion.cmake.in:PhysFSConfigVersion.cmake" + ] + }, + "files": { + "": [ + "build-scripts/pkg-support/android/INSTALL.md.in:INSTALL.md", + "LICENSE.txt", + "README.md" + ] + } + } +} diff --git a/build-scripts/test-versioning.sh b/build-scripts/test-versioning.sh new file mode 100755 index 00000000..9acc6848 --- /dev/null +++ b/build-scripts/test-versioning.sh @@ -0,0 +1,85 @@ +#!/bin/sh +# Copyright 2022 Collabora Ltd. +# SPDX-License-Identifier: Zlib + +set -eu + +cd `dirname $0`/.. + +# Needed so sed doesn't report illegal byte sequences on macOS +export LC_CTYPE=C + +header=src/physfs.h +ref_major=$(sed -ne 's/^#define PHYSFS_VER_MAJOR *//p' $header) +ref_minor=$(sed -ne 's/^#define PHYSFS_VER_MINOR *//p' $header) +ref_micro=$(sed -ne 's/^#define PHYSFS_VER_PATCH *//p' $header) +ref_version="${ref_major}.${ref_minor}.${ref_micro}" + +tests=0 +failed=0 + +ok () { + tests=$(( tests + 1 )) + echo "ok - $*" +} + +not_ok () { + tests=$(( tests + 1 )) + echo "not ok - $*" + failed=1 +} + +version=$(sed -ne 's/^set(PHYSFS_VERSION \([0-9.]*\))$/\1/p' CMakeLists.txt) + +if [ "$ref_version" = "$version" ]; then + ok "CMakeLists.txt $version" +else + not_ok "CMakeLists.txt $version disagrees with physfs.h $ref_version" +fi + +version=$(sed -ne 's/^VERSION = \([0-9.]*\)$/\1/p' src/Makefile.os2) + +if [ "$ref_version" = "$version" ]; then + ok "src/Makefile.os $version" +else + not_ok "src/Makefile.os $version disagrees with physfs.h $ref_version" +fi + +for rcfile in src/physfs_version.rc; do + tuple=$(sed -ne 's/^ *FILEVERSION *//p' "$rcfile" | tr -d '\r') + ref_tuple="${ref_major},${ref_minor},${ref_micro},0" + + if [ "$ref_tuple" = "$tuple" ]; then + ok "$rcfile FILEVERSION $tuple" + else + not_ok "$rcfile FILEVERSION $tuple disagrees with physfs.h $ref_tuple" + fi + + tuple=$(sed -ne 's/^ *PRODUCTVERSION *//p' "$rcfile" | tr -d '\r') + + if [ "$ref_tuple" = "$tuple" ]; then + ok "$rcfile PRODUCTVERSION $tuple" + else + not_ok "$rcfile PRODUCTVERSION $tuple disagrees with physfs.h $ref_tuple" + fi + + tuple=$(sed -Ene 's/^ *VALUE "FileVersion", "([0-9, ]*)\\0"\r?$/\1/p' "$rcfile" | tr -d '\r') + ref_tuple="${ref_major}, ${ref_minor}, ${ref_micro}, 0" + + if [ "$ref_tuple" = "$tuple" ]; then + ok "$rcfile FileVersion $tuple" + else + not_ok "$rcfile FileVersion $tuple disagrees with physfs.h $ref_tuple" + fi + + tuple=$(sed -Ene 's/^ *VALUE "ProductVersion", "([0-9, ]*)\\0"\r?$/\1/p' "$rcfile" | tr -d '\r') + + if [ "$ref_tuple" = "$tuple" ]; then + ok "$rcfile ProductVersion $tuple" + else + not_ok "$rcfile ProductVersion $tuple disagrees with physfs.h $ref_tuple" + fi +done + +echo "1..$tests" +exit "$failed" diff --git a/build-scripts/wikiheaders.pl b/build-scripts/wikiheaders.pl new file mode 100755 index 00000000..bd843de5 --- /dev/null +++ b/build-scripts/wikiheaders.pl @@ -0,0 +1,3482 @@ +#!/usr/bin/perl -w + +# Simple DirectMedia Layer +# Copyright (C) 1997-2025 Sam Lantinga +# +# This software is provided 'as-is', without any express or implied +# warranty. In no event will the authors be held liable for any damages +# arising from the use of this software. +# +# Permission is granted to anyone to use this software for any purpose, +# including commercial applications, and to alter it and redistribute it +# freely, subject to the following restrictions: +# +# 1. The origin of this software must not be misrepresented; you must not +# claim that you wrote the original software. If you use this software +# in a product, an acknowledgment in the product documentation would be +# appreciated but is not required. +# 2. Altered source versions must be plainly marked as such, and must not be +# misrepresented as being the original software. +# 3. This notice may not be removed or altered from any source distribution. + +use warnings; +use strict; +use File::Path; +use Text::Wrap; + +$Text::Wrap::huge = 'overflow'; + +my $projectfullname = 'Simple Directmedia Layer'; +my $projectshortname = 'SDL'; +my $wikisubdir = ''; +my $incsubdir = 'include'; +my $readmesubdir = undef; +my $apiprefixregex = undef; +my $apipropertyregex = undef; +my $versionfname = 'include/SDL_version.h'; +my $versionmajorregex = '\A\#define\s+SDL_MAJOR_VERSION\s+(\d+)\Z'; +my $versionminorregex = '\A\#define\s+SDL_MINOR_VERSION\s+(\d+)\Z'; +my $versionmicroregex = '\A\#define\s+SDL_MICRO_VERSION\s+(\d+)\Z'; +my $wikidocsectionsym = 'SDL_WIKI_DOCUMENTATION_SECTION'; +my $forceinlinesym = 'SDL_FORCE_INLINE'; +my $deprecatedsym = 'SDL_DEPRECATED'; +my $declspecsym = '(?:SDLMAIN_|SDL_)?DECLSPEC'; +my $callconvsym = 'SDLCALL'; +my $mainincludefname = 'SDL.h'; +my $selectheaderregex = '\ASDL.*?\.h\Z'; +my $projecturl = 'https://libsdl.org/'; +my $wikiurl = 'https://wiki.libsdl.org'; +my $bugreporturl = 'https://github.com/libsdl-org/sdlwiki/issues/new'; +my $srcpath = undef; +my $wikipath = undef; +my $warn_about_missing = 0; +my $copy_direction = 0; +my $optionsfname = undef; +my $wikipreamble = undef; +my $wikiheaderfiletext = 'Defined in %fname%'; +my $manpageheaderfiletext = 'Defined in %fname%'; +my $manpagesymbolfilterregex = undef; +my $headercategoryeval = undef; +my $quickrefenabled = 0; +my @quickrefcategoryorder; +my $quickreftitle = undef; +my $quickrefurl = undef; +my $quickrefdesc = undef; +my $quickrefmacroregex = undef; +my $envvarenabled = 0; +my $envvartitle = 'Environment Variables'; +my $envvardesc = undef; +my $envvarsymregex = undef; +my $envvarsymreplace = undef; +my $changeformat = undef; +my $manpath = undef; +my $gitrev = undef; + +foreach (@ARGV) { + $warn_about_missing = 1, next if $_ eq '--warn-about-missing'; + $copy_direction = 1, next if $_ eq '--copy-to-headers'; + $copy_direction = 1, next if $_ eq '--copy-to-header'; + $copy_direction = -1, next if $_ eq '--copy-to-wiki'; + $copy_direction = -2, next if $_ eq '--copy-to-manpages'; + $copy_direction = -3, next if $_ eq '--report-coverage-gaps'; + $copy_direction = -4, next if $_ eq '--copy-to-latex'; + if (/\A--options=(.*)\Z/) { + $optionsfname = $1; + next; + } elsif (/\A--changeformat=(.*)\Z/) { + $changeformat = $1; + next; + } elsif (/\A--manpath=(.*)\Z/) { + $manpath = $1; + next; + } elsif (/\A--rev=(.*)\Z/) { + $gitrev = $1; + next; + } + $srcpath = $_, next if not defined $srcpath; + $wikipath = $_, next if not defined $wikipath; +} + +my $default_optionsfname = '.wikiheaders-options'; +$default_optionsfname = "$srcpath/$default_optionsfname" if defined $srcpath; + +if ((not defined $optionsfname) && (-f $default_optionsfname)) { + $optionsfname = $default_optionsfname; +} + +if (defined $optionsfname) { + open OPTIONS, '<', $optionsfname or die("Failed to open options file '$optionsfname': $!\n"); + while () { + next if /\A\s*\#/; # Skip lines that start with (optional whitespace, then) '#' as comments. + + chomp; + if (/\A(.*?)\=(.*)\Z/) { + my $key = $1; + my $val = $2; + $key =~ s/\A\s+//; + $key =~ s/\s+\Z//; + $val =~ s/\A\s+//; + $val =~ s/\s+\Z//; + $warn_about_missing = int($val), next if $key eq 'warn_about_missing'; + $srcpath = $val, next if $key eq 'srcpath'; + $wikipath = $val, next if $key eq 'wikipath'; + $apiprefixregex = $val, next if $key eq 'apiprefixregex'; + $apipropertyregex = $val, next if $key eq 'apipropertyregex'; + $projectfullname = $val, next if $key eq 'projectfullname'; + $projectshortname = $val, next if $key eq 'projectshortname'; + $wikisubdir = $val, next if $key eq 'wikisubdir'; + $incsubdir = $val, next if $key eq 'incsubdir'; + $readmesubdir = $val, next if $key eq 'readmesubdir'; + $versionmajorregex = $val, next if $key eq 'versionmajorregex'; + $versionminorregex = $val, next if $key eq 'versionminorregex'; + $versionmicroregex = $val, next if $key eq 'versionmicroregex'; + $versionfname = $val, next if $key eq 'versionfname'; + $mainincludefname = $val, next if $key eq 'mainincludefname'; + $selectheaderregex = $val, next if $key eq 'selectheaderregex'; + $projecturl = $val, next if $key eq 'projecturl'; + $wikiurl = $val, next if $key eq 'wikiurl'; + $bugreporturl = $val, next if $key eq 'bugreporturl'; + $wikipreamble = $val, next if $key eq 'wikipreamble'; + $wikiheaderfiletext = $val, next if $key eq 'wikiheaderfiletext'; + $manpageheaderfiletext = $val, next if $key eq 'manpageheaderfiletext'; + $manpagesymbolfilterregex = $val, next if $key eq 'manpagesymbolfilterregex'; + $headercategoryeval = $val, next if $key eq 'headercategoryeval'; + $quickrefenabled = int($val), next if $key eq 'quickrefenabled'; + @quickrefcategoryorder = split(/,/, $val), next if $key eq 'quickrefcategoryorder'; + $quickreftitle = $val, next if $key eq 'quickreftitle'; + $quickrefurl = $val, next if $key eq 'quickrefurl'; + $quickrefdesc = $val, next if $key eq 'quickrefdesc'; + $quickrefmacroregex = $val, next if $key eq 'quickrefmacroregex'; + $envvarenabled = int($val), next if $key eq 'envvarenabled'; + $envvartitle = $val, next if $key eq 'envvartitle'; + $envvardesc = $val, next if $key eq 'envvardesc'; + $envvarsymregex = $val, next if $key eq 'envvarsymregex'; + $envvarsymreplace = $val, next if $key eq 'envvarsymreplace'; + $wikidocsectionsym = $val, next if $key eq 'wikidocsectionsym'; + $forceinlinesym = $val, next if $key eq 'forceinlinesym'; + $deprecatedsym = $val, next if $key eq 'deprecatedsym'; + $declspecsym = $val, next if $key eq 'declspecsym'; + $callconvsym = $val, next if $key eq 'callconvsym'; + + } + } + close(OPTIONS); +} + +sub escLaTeX { + my $str = shift; + $str =~ s/([_\#\&\^])/\\$1/g; + return $str; +} + +my $wordwrap_mode = 'mediawiki'; +sub wordwrap_atom { # don't call this directly. + my $str = shift; + my $retval = ''; + + # wordwrap but leave links intact, even if they overflow. + if ($wordwrap_mode eq 'mediawiki') { + while ($str =~ s/(.*?)\s*(\[https?\:\/\/.*?\s+.*?\])\s*//ms) { + $retval .= fill('', '', $1); # wrap it. + $retval .= "\n$2\n"; # don't wrap it. + } + } elsif ($wordwrap_mode eq 'md') { + while ($str =~ s/(.*?)\s*(\[.*?\]\(https?\:\/\/.*?\))\s*//ms) { + $retval .= fill('', '', $1); # wrap it. + $retval .= "\n$2\n"; # don't wrap it. + } + } + + return $retval . fill('', '', $str); +} + +sub wordwrap_with_bullet_indent { # don't call this directly. + my $bullet = shift; + my $str = shift; + my $retval = ''; + + #print("WORDWRAP BULLET ('$bullet'):\n\n$str\n\n"); + + # You _can't_ (at least with Pandoc) have a bullet item with a newline in + # MediaWiki, so _remove_ wrapping! + if ($wordwrap_mode eq 'mediawiki') { + $retval = "$bullet$str"; + $retval =~ s/\n/ /gms; + $retval =~ s/\s+$//gms; + #print("WORDWRAP BULLET DONE:\n\n$retval\n\n"); + return "$retval\n"; + } + + my $bulletlen = length($bullet); + + # wrap it and then indent each line to be under the bullet. + $Text::Wrap::columns -= $bulletlen; + my @wrappedlines = split /\n/, wordwrap_atom($str); + $Text::Wrap::columns += $bulletlen; + + my $prefix = $bullet; + my $usual_prefix = ' ' x $bulletlen; + + foreach (@wrappedlines) { + s/\s*\Z//; + $retval .= "$prefix$_\n"; + $prefix = $usual_prefix; + } + + return $retval; +} + +sub wordwrap_one_paragraph { # don't call this directly. + my $retval = ''; + my $p = shift; + #print "\n\n\nPARAGRAPH: [$p]\n\n\n"; + if ($p =~ s/\A([\*\-] )//) { # bullet list, starts with "* " or "- ". + my $bullet = $1; + my $item = ''; + my @items = split /\n/, $p; + foreach (@items) { + if (s/\A([\*\-] )//) { + $retval .= wordwrap_with_bullet_indent($bullet, $item); + $item = ''; + } + s/\A\s*//; + $item .= "$_\n"; # accumulate lines until we hit the end or another bullet. + } + if ($item ne '') { + $retval .= wordwrap_with_bullet_indent($bullet, $item); + } + } elsif ($p =~ /\A\s*\|.*\|\s*\n/) { # Markdown table + $retval = "$p\n"; # don't wrap it (!!! FIXME: but maybe parse by lines until we run out of table...) + } else { + $retval = wordwrap_atom($p) . "\n"; + } + + return $retval; +} + +sub wordwrap_paragraphs { # don't call this directly. + my $str = shift; + my $retval = ''; + my @paragraphs = split /\n\n/, $str; + foreach (@paragraphs) { + next if $_ eq ''; + $retval .= wordwrap_one_paragraph($_); + $retval .= "\n"; + } + return $retval; +} + +my $wordwrap_default_columns = 76; +sub wordwrap { + my $str = shift; + my $columns = shift; + + $columns = $wordwrap_default_columns if not defined $columns; + $columns += $wordwrap_default_columns if $columns < 0; + $Text::Wrap::columns = $columns; + + my $retval = ''; + + #print("\n\nWORDWRAP:\n\n$str\n\n\n"); + + $str =~ s/\A\n+//ms; + + while ($str =~ s/(.*?)(\`\`\`.*?\`\`\`|\)//ms) { + #print("\n\nWORDWRAP BLOCK:\n\n$1\n\n ===\n\n$2\n\n\n"); + $retval .= wordwrap_paragraphs($1); # wrap it. + $retval .= "$2\n\n"; # don't wrap it. + } + + $retval .= wordwrap_paragraphs($str); # wrap what's left. + $retval =~ s/\n+\Z//ms; + + #print("\n\nWORDWRAP DONE:\n\n$retval\n\n\n"); + return $retval; +} + +# This assumes you're moving from Markdown (in the Doxygen data) to Wiki, which +# is why the 'md' section is so sparse. +sub wikify_chunk { + my $wikitype = shift; + my $str = shift; + my $codelang = shift; + my $code = shift; + + #print("\n\nWIKIFY CHUNK:\n\n$str\n\n\n"); + + if ($wikitype eq 'mediawiki') { + # convert `code` things first, so they aren't mistaken for other markdown items. + my $codedstr = ''; + while ($str =~ s/\A(.*?)\`(.*?)\`//ms) { + my $codeblock = $2; + $codedstr .= wikify_chunk($wikitype, $1, undef, undef); + if (defined $apiprefixregex) { + # Convert obvious API things to wikilinks, even inside `code` blocks. + $codeblock =~ s/(\A|[^\/a-zA-Z0-9_])($apiprefixregex[a-zA-Z0-9_]+)/$1\[\[$2\]\]/gms; + } + $codedstr .= "$codeblock"; + } + + # Convert obvious API things to wikilinks. + if (defined $apiprefixregex) { + $str =~ s/(\A|[^\/a-zA-Z0-9_])($apiprefixregex[a-zA-Z0-9_]+)/$1\[\[$2\]\]/gms; + } + + # Make some Markdown things into MediaWiki... + + # links + $str =~ s/\[(.*?)\]\((https?\:\/\/.*?)\)/\[$2 $1\]/g; + + # bold+italic + $str =~ s/\*\*\*(.*?)\*\*\*/'''''$1'''''/gms; + + # bold + $str =~ s/\*\*(.*?)\*\*/'''$1'''/gms; + + # italic + $str =~ s/\*(.*?)\*/''$1''/gms; + + # bullets + $str =~ s/^\- /* /gm; + + $str = $codedstr . $str; + + if (defined $code) { + $str .= "$code<\/syntaxhighlight>"; + } + } elsif ($wikitype eq 'md') { + # convert `code` things first, so they aren't mistaken for other markdown items. + my $codedstr = ''; + while ($str =~ s/\A(.*?)(\`.*?\`)//ms) { + my $codeblock = $2; + $codedstr .= wikify_chunk($wikitype, $1, undef, undef); + if (defined $apiprefixregex) { + # Convert obvious API things to wikilinks, even inside `code` blocks, + # BUT ONLY IF the entire code block is the API thing, + # So something like "just call `SDL_Whatever`" will become + # "just call [`SDL_Whatever`](SDL_Whatever)", but + # "just call `SDL_Whatever(7)`" will not. It's just the safest + # way to do this without resorting to wrapping things in html tags. + $codeblock =~ s/\A\`($apiprefixregex[a-zA-Z0-9_]+)\`\Z/[`$1`]($1)/gms; + } + $codedstr .= $codeblock; + } + + # Convert obvious API things to wikilinks. + if (defined $apiprefixregex) { + $str =~ s/(\A|[^\/a-zA-Z0-9_\[])($apiprefixregex[a-zA-Z0-9_]+)/$1\[$2\]\($2\)/gms; + } + + $str = $codedstr . $str; + + if (defined $code) { + $str .= "```$codelang\n$code\n```\n"; + } + } + + #print("\n\nWIKIFY CHUNK DONE:\n\n$str\n\n\n"); + + return $str; +} + +sub wikify { + my $wikitype = shift; + my $str = shift; + my $retval = ''; + + #print("WIKIFY WHOLE:\n\n$str\n\n\n"); + + while ($str =~ s/\A(.*?)\`\`\`(.*?)\n(.*?)\n\`\`\`(\n|\Z)//ms) { + $retval .= wikify_chunk($wikitype, $1, $2, $3); + } + $retval .= wikify_chunk($wikitype, $str, undef, undef); + + #print("WIKIFY WHOLE DONE:\n\n$retval\n\n\n"); + + return $retval; +} + + +my $dewikify_mode = 'md'; +my $dewikify_manpage_code_indent = 1; + +sub dewikify_chunk { + my $wikitype = shift; + my $str = shift; + my $codelang = shift; + my $code = shift; + + #print("\n\nDEWIKIFY CHUNK:\n\n$str\n\n\n"); + + if ($dewikify_mode eq 'md') { + if ($wikitype eq 'mediawiki') { + # Doxygen supports Markdown (and it just simply looks better than MediaWiki + # when looking at the raw headers), so do some conversions here as necessary. + + # Dump obvious wikilinks. + if (defined $apiprefixregex) { + $str =~ s/\[\[($apiprefixregex[a-zA-Z0-9_]+)\]\]/$1/gms; + } + + # links + $str =~ s/\[(https?\:\/\/.*?)\s+(.*?)\]/\[$2\]\($1\)/g; + + # is also popular. :/ + $str =~ s/\(.*?)<\/code>/`$1`/gms; + + # bold+italic + $str =~ s/'''''(.*?)'''''/***$1***/gms; + + # bold + $str =~ s/'''(.*?)'''/**$1**/gms; + + # italic + $str =~ s/''(.*?)''/*$1*/gms; + + # bullets + $str =~ s/^\* /- /gm; + } elsif ($wikitype eq 'md') { + # Dump obvious wikilinks. The rest can just passthrough. + if (defined $apiprefixregex) { + $str =~ s/\[(\`?$apiprefixregex[a-zA-Z0-9_]+\`?)\]\($apiprefixregex[a-zA-Z0-9_]+\)/$1/gms; + } + } + + if (defined $code) { + $str .= "\n```$codelang\n$code\n```\n"; + } + } elsif ($dewikify_mode eq 'manpage') { + # make sure these can't become part of roff syntax. + $str =~ s/\\/\\(rs/gms; + $str =~ s/\./\\[char46]/gms; + $str =~ s/"/\\(dq/gms; + $str =~ s/'/\\(aq/gms; + + if ($wikitype eq 'mediawiki') { + # Dump obvious wikilinks. + if (defined $apiprefixregex) { + $str =~ s/\s*\[\[($apiprefixregex[a-zA-Z0-9_]+)\]\]\s*/\n.BR $1\n/gms; + } + + # links + $str =~ s/\[(https?\:\/\/.*?)\s+(.*?)\]/\n.URL "$1" "$2"\n/g; + + # is also popular. :/ + $str =~ s/\s*\(.*?)<\/code>\s*/\n.BR $1\n/gms; + + # bold+italic (this looks bad, just make it bold). + $str =~ s/\s*'''''(.*?)'''''\s*/\n.B $1\n/gms; + + # bold + $str =~ s/\s*'''(.*?)'''\s*/\n.B $1\n/gms; + + # italic + $str =~ s/\s*''(.*?)''\s*/\n.I $1\n/gms; + + # bullets + $str =~ s/^\* /\n\\\(bu /gm; + } elsif ($wikitype eq 'md') { + # bullets + $str =~ s/^\- /\n\\(bu /gm; + # merge paragraphs + $str =~ s/^[ \t]+//gm; + $str =~ s/([^\-\n])\n([^\-\n])/$1 $2/g; + $str =~ s/\n\n/\n.PP\n/g; + + # Dump obvious wikilinks. + if (defined $apiprefixregex) { + my $apr = $apiprefixregex; + if(!($apr =~ /\A\(.*\)\Z/s)) { + # we're relying on the apiprefixregex having a capturing group. + $apr = "(" . $apr . ")"; + } + $str =~ s/(\S*?)\[\`?($apr[a-zA-Z0-9_]+)\`?\]\($apr[a-zA-Z0-9_]+\)(\S*)\s*/\n.BR "" "$1" "$2" "$5"\n/gm; + # handle cases like "[x](x), [y](y), [z](z)" being separated. + while($str =~ s/(\.BR[^\n]*)\n\n\.BR/$1\n.BR/gm) {} + } + + # links + $str =~ s/\[(.*?)]\((https?\:\/\/.*?)\)/\n.URL "$2" "$1"\n/g; + + # is also popular. :/ + $str =~ s/\s*(\S*?)\`([^\n]*?)\`(\S*)\s*/\n.BR "" "$1" "$2" "$3"\n/gms; + + # bold+italic (this looks bad, just make it bold). + $str =~ s/\s*(\S*?)\*\*\*([^\n]*?)\*\*\*(\S*)\s*/\n.BR "" "$1" "$2" "$3"\n/gms; + + # bold + $str =~ s/\s*(\S*?)\*\*([^\n]*?)\*\*(\S*)\s*/\n.BR "" "$1" "$2" "$3"\n/gms; + + # italic + $str =~ s/\s*(\S*?)\*([^\n]*?)\*(\S*)\s*/\n.IR "" "$1" "$2" "$3"\n/gms; + } + + # cleanup unnecessary quotes + $str =~ s/(\.[IB]R?)(.*?) ""\n/$1$2\n/gm; + $str =~ s/(\.[IB]R?) "" ""(.*?)\n/$1$2\n/gm; + $str =~ s/"(\S+)"/$1/gm; + # cleanup unnecessary whitespace + $str =~ s/ +\n/\n/gm; + + if (defined $code) { + $code =~ s/\A\n+//gms; + $code =~ s/\n+\Z//gms; + $code =~ s/\\/\\(rs/gms; + if ($dewikify_manpage_code_indent) { + $str .= "\n.IP\n" + } else { + $str .= "\n.PP\n" + } + $str .= ".EX\n$code\n.EE\n.PP\n"; + } + } elsif ($dewikify_mode eq 'LaTeX') { + if ($wikitype eq 'mediawiki') { + # Dump obvious wikilinks. + if (defined $apiprefixregex) { + $str =~ s/\s*\[\[($apiprefixregex[a-zA-Z0-9_]+)\]\]/$1/gms; + } + + # links + $str =~ s/\[(https?\:\/\/.*?)\s+(.*?)\]/\\href{$1}{$2}/g; + + # is also popular. :/ + $str =~ s/\s*\(.*?)<\/code>/ \\texttt{$1}/gms; + + # bold+italic + $str =~ s/\s*'''''(.*?)'''''/ \\textbf{\\textit{$1}}/gms; + + # bold + $str =~ s/\s*'''(.*?)'''/ \\textbf{$1}/gms; + + # italic + $str =~ s/\s*''(.*?)''/ \\textit{$1}/gms; + + # bullets + $str =~ s/^\*\s+/ \\item /gm; + } elsif ($wikitype eq 'md') { + # Dump obvious wikilinks. + if (defined $apiprefixregex) { + $str =~ s/\[(\`?$apiprefixregex[a-zA-Z0-9_]+\`?)\]\($apiprefixregex[a-zA-Z0-9_]+\)/$1/gms; + } + + # links + $str =~ s/\[(.*?)]\((https?\:\/\/.*?)\)/\\href{$2}{$1}/g; + + # is also popular. :/ + $str =~ s/\s*\`(.*?)\`/ \\texttt{$1}/gms; + + # bold+italic + $str =~ s/\s*\*\*\*(.*?)\*\*\*/ \\textbf{\\textit{$1}}/gms; + + # bold + $str =~ s/\s*\*\*(.*?)\*\*/ \\textbf{$1}/gms; + + # italic + $str =~ s/\s*\*(.*?)\*/ \\textit{$1}/gms; + + # bullets + $str =~ s/^\-\s+/ \\item /gm; + } + + # Wrap bullet lists in itemize blocks... + $str =~ s/^(\s*\\item .*?)(\n\n|\Z)/\n\\begin{itemize}\n$1$2\n\\end{itemize}\n\n/gms; + + $str = escLaTeX($str); + + if (defined $code) { + $code =~ s/\A\n+//gms; + $code =~ s/\n+\Z//gms; + + if (($codelang eq '') || ($codelang eq 'output')) { + $str .= "\\begin{verbatim}\n$code\n\\end{verbatim}\n"; + } else { + if ($codelang eq 'c') { + $codelang = 'C'; + } elsif ($codelang eq 'c++') { + $codelang = 'C++'; + } else { + die("Unexpected codelang '$codelang'"); + } + $str .= "\n\\lstset{language=$codelang}\n"; + $str .= "\\begin{lstlisting}\n$code\n\\end{lstlisting}\n"; + } + } + } else { + die("Unexpected dewikify_mode"); + } + + #print("\n\nDEWIKIFY CHUNK DONE:\n\n$str\n\n\n"); + + return $str; +} + +sub dewikify { + my $wikitype = shift; + my $str = shift; + return '' if not defined $str; + + #print("DEWIKIFY WHOLE:\n\n$str\n\n\n"); + + $str =~ s/\A[\s\n]*\= .*? \=\s*?\n+//ms; + $str =~ s/\A[\s\n]*\=\= .*? \=\=\s*?\n+//ms; + + my $retval = ''; + if ($wikitype eq 'mediawiki') { + while ($str =~ s/\A(.*?)(.*?)<\/syntaxhighlight\>//ms) { + $retval .= dewikify_chunk($wikitype, $1, $2, $3); + } + } elsif ($wikitype eq 'md') { + while ($str =~ s/\A(.*?)\n?```(.*?)\n(.*?)\n```\n//ms) { + $retval .= dewikify_chunk($wikitype, $1, $2, $3); + } + } + $retval .= dewikify_chunk($wikitype, $str, undef, undef); + + #print("DEWIKIFY WHOLE DONE:\n\n$retval\n\n\n"); + + return $retval; +} + +sub filecopy { + my $src = shift; + my $dst = shift; + my $endline = shift; + $endline = "\n" if not defined $endline; + + open(COPYIN, '<', $src) or die("Failed to open '$src' for reading: $!\n"); + open(COPYOUT, '>', $dst) or die("Failed to open '$dst' for writing: $!\n"); + while () { + chomp; + s/[ \t\r\n]*\Z//; + print COPYOUT "$_$endline"; + } + close(COPYOUT); + close(COPYIN); +} + +sub usage { + die("USAGE: $0 [--copy-to-headers|--copy-to-wiki|--copy-to-manpages] [--warn-about-missing] [--manpath=]\n\n"); +} + +usage() if not defined $srcpath; +usage() if not defined $wikipath; +#usage() if $copy_direction == 0; + +if (not defined $manpath) { + $manpath = "$srcpath/man"; +} + +my @standard_wiki_sections = ( + 'Draft', + '[Brief]', + 'Deprecated', + 'Header File', + 'Syntax', + 'Function Parameters', + 'Macro Parameters', + 'Fields', + 'Values', + 'Return Value', + 'Remarks', + 'Thread Safety', + 'Version', + 'Code Examples', + 'See Also' +); + +# Sections that only ever exist in the wiki and shouldn't be deleted when +# not found in the headers. +my %only_wiki_sections = ( # The ones don't mean anything, I just need to check for key existence. + 'Draft', 1, + 'Code Examples', 1, + 'Header File', 1 +); + + +my %headers = (); # $headers{"SDL_audio.h"} -> reference to an array of all lines of text in SDL_audio.h. +my %headersyms = (); # $headersyms{"SDL_OpenAudio"} -> string of header documentation for SDL_OpenAudio, with comment '*' bits stripped from the start. Newlines embedded! +my %headerdecls = (); +my %headersymslocation = (); # $headersymslocation{"SDL_OpenAudio"} -> name of header holding SDL_OpenAudio define ("SDL_audio.h" in this case). +my %headersymschunk = (); # $headersymschunk{"SDL_OpenAudio"} -> offset in array in %headers that should be replaced for this symbol. +my %headersymshasdoxygen = (); # $headersymshasdoxygen{"SDL_OpenAudio"} -> 1 if there was no existing doxygen for this function. +my %headersymstype = (); # $headersymstype{"SDL_OpenAudio"} -> 1 (function), 2 (macro), 3 (struct), 4 (enum), 5 (other typedef) +my %headersymscategory = (); # $headersymscategory{"SDL_OpenAudio"} -> 'Audio' ... this is set with a `/* WIKI CATEGEORY: Audio */` comment in the headers that sets it on all symbols until a new comment changes it. So usually, once at the top of the header file. +my %headercategorydocs = (); # $headercategorydocs{"Audio"} -> (fake) symbol for this category's documentation. Undefined if not documented. +my %headersymsparaminfo = (); # $headersymsparaminfo{"SDL_OpenAudio"} -> reference to array of parameters, pushed by name, then C type string, repeating. Undef'd if void params, or not a function. +my %headersymsrettype = (); # $headersymsrettype{"SDL_OpenAudio"} -> string of C datatype of return value. Undef'd if not a function. +my %wikitypes = (); # contains string of wiki page extension, like $wikitypes{"SDL_OpenAudio"} == 'mediawiki' +my %wikisyms = (); # contains references to hash of strings, each string being the full contents of a section of a wiki page, like $wikisyms{"SDL_OpenAudio"}{"Remarks"}. +my %wikisectionorder = (); # contains references to array, each array item being a key to a wikipage section in the correct order, like $wikisectionorder{"SDL_OpenAudio"}[2] == 'Remarks' +my %quickreffuncorder = (); # contains references to array, each array item being a key to a category with functions in the order they appear in the headers, like $quickreffuncorder{"Audio"}[0] == 'SDL_GetNumAudioDrivers' + +my %referenceonly = (); # $referenceonly{"Y"} -> symbol name that this symbol is bound to. This makes wiki pages that say "See X" where "X" is a typedef and "Y" is a define attached to it. These pages are generated in the wiki only and do not bridge to the headers or manpages. + +my @coverage_gap = (); # array of strings that weren't part of documentation, or blank, or basic preprocessor logic. Lets you see what this script is missing! + +sub add_coverage_gap { + if ($copy_direction == -3) { # --report-coverage-gaps + my $text = shift; + my $dent = shift; + my $lineno = shift; + return if $text =~ /\A\s*\Z/; # skip blank lines + return if $text =~ /\A\s*\#\s*(if|el|endif|include)/; # skip preprocessor floof. + push @coverage_gap, "$dent:$lineno: $text"; + } +} + +sub print_undocumented_section { + my $fh = shift; + my $typestr = shift; + my $typeval = shift; + + print $fh "## $typestr defined in the headers, but not in the wiki\n\n"; + my $header_only_sym = 0; + foreach (sort keys %headersyms) { + my $sym = $_; + if ((not defined $wikisyms{$sym}) && ($headersymstype{$sym} == $typeval)) { + print $fh "- [$sym]($sym)\n"; + $header_only_sym = 1; + } + } + if (!$header_only_sym) { + print $fh "(none)\n"; + } + print $fh "\n"; + + if (0) { # !!! FIXME: this lists things that _shouldn't_ be in the headers, like MigrationGuide, etc, but also we don't know if they're functions, macros, etc at this point (can we parse that from the wiki page, though?) + print $fh "## $typestr defined in the wiki, but not in the headers\n\n"; + + my $wiki_only_sym = 0; + foreach (sort keys %wikisyms) { + my $sym = $_; + if ((not defined $headersyms{$sym}) && ($headersymstype{$sym} == $typeval)) { + print $fh "- [$sym]($sym)\n"; + $wiki_only_sym = 1; + } + } + if (!$wiki_only_sym) { + print $fh "(none)\n"; + } + print $fh "\n"; + } +} + +# !!! FIXME: generalize this for other libraries to use. +sub strip_fn_declaration_metadata { + my $decl = shift; + $decl =~ s/SDL_(PRINTF|SCANF)_FORMAT_STRING\s*//; # don't want this metadata as part of the documentation. + $decl =~ s/SDL_ALLOC_SIZE2?\(.*?\)\s*//; # don't want this metadata as part of the documentation. + $decl =~ s/SDL_MALLOC\s*//; # don't want this metadata as part of the documentation. + $decl =~ s/SDL_(IN|OUT|INOUT)_.*?CAP\s*\(.*?\)\s*//g; # don't want this metadata as part of the documentation. + $decl =~ s/\)(\s*SDL_[a-zA-Z_]+(\(.*?\)|))*;/);/; # don't want this metadata as part of the documentation. + return $decl; +} + +sub sanitize_c_typename { + my $str = shift; + $str =~ s/\A\s+//; + $str =~ s/\s+\Z//; + $str =~ s/const\s*(\*+)/const $1/g; # one space between `const` and pointer stars: `char const* const *` becomes `char const * const *`. + $str =~ s/\*\s+\*/**/g; # drop spaces between pointers: `void * *` becomes `void **`. + $str =~ s/\s*(\*+)\Z/ $1/; # one space between pointer stars and what it points to: `void**` becomes `void **`. + return $str; +} + +my %big_ascii = ( + 'A' => [ "\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2550}\x{255D}\x{20}\x{20}\x{255A}\x{2550}\x{255D}" ], + 'B' => [ "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}" ], + 'C' => [ "\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{20}\x{20}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{20}\x{20}", "\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{20}\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}" ], + 'D' => [ "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}" ], + 'E' => [ "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{255D}\x{20}\x{20}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}" ], + 'F' => [ "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{255D}\x{20}\x{20}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{20}\x{20}", "\x{255A}\x{2550}\x{255D}\x{20}\x{20}\x{20}\x{20}\x{20}" ], + 'G' => [ "\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}" ], + 'H' => [ "\x{2588}\x{2588}\x{2557}\x{20}\x{20}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2550}\x{255D}\x{20}\x{20}\x{255A}\x{2550}\x{255D}" ], + 'I' => [ "\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2550}\x{255D}" ], + 'J' => [ "\x{20}\x{20}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2557}", "\x{20}\x{20}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{20}\x{20}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}" ], + 'K' => [ "\x{2588}\x{2588}\x{2557}\x{20}\x{20}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}\x{20}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{255D}\x{20}\x{20}\x{255A}\x{2550}\x{255D}" ], + 'L' => [ "\x{2588}\x{2588}\x{2557}\x{20}\x{20}\x{20}\x{20}\x{20}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{20}\x{20}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{20}\x{20}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{20}\x{20}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}" ], + 'M' => [ "\x{2588}\x{2588}\x{2588}\x{2557}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2554}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{255A}\x{2588}\x{2588}\x{2554}\x{255D}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{20}\x{255A}\x{2550}\x{255D}\x{20}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2550}\x{255D}\x{20}\x{20}\x{20}\x{20}\x{20}\x{255A}\x{2550}\x{255D}" ], + 'N' => [ "\x{2588}\x{2588}\x{2588}\x{2557}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2554}\x{2588}\x{2588}\x{2557}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{255A}\x{2588}\x{2588}\x{2557}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{20}\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2550}\x{255D}\x{20}\x{20}\x{255A}\x{2550}\x{2550}\x{2550}\x{255D}" ], + 'O' => [ "\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}" ], + 'P' => [ "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{20}\x{20}", "\x{255A}\x{2550}\x{255D}\x{20}\x{20}\x{20}\x{20}\x{20}" ], + 'Q' => [ "\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{2584}\x{2584}\x{20}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2550}\x{2550}\x{2580}\x{2580}\x{2550}\x{255D}\x{20}" ], + 'R' => [ "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2550}\x{255D}\x{20}\x{20}\x{255A}\x{2550}\x{255D}" ], + 'S' => [ "\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}" ], + 'T' => [ "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{2550}\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{255D}", "\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{255A}\x{2550}\x{255D}\x{20}\x{20}\x{20}" ], + 'U' => [ "\x{2588}\x{2588}\x{2557}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}" ], + 'V' => [ "\x{2588}\x{2588}\x{2557}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2588}\x{2588}\x{2557}\x{20}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}\x{20}", "\x{20}\x{20}\x{255A}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}\x{20}" ], + 'W' => [ "\x{2588}\x{2588}\x{2557}\x{20}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{20}\x{2588}\x{2557}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}\x{2588}\x{2588}\x{2588}\x{2557}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2588}\x{2588}\x{2588}\x{2554}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2550}\x{2550}\x{255D}\x{255A}\x{2550}\x{2550}\x{255D}\x{20}" ], + 'X' => [ "\x{2588}\x{2588}\x{2557}\x{20}\x{20}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2588}\x{2588}\x{2557}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}\x{20}", "\x{20}\x{2588}\x{2588}\x{2554}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2554}\x{255D}\x{20}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{255D}\x{20}\x{20}\x{255A}\x{2550}\x{255D}" ], + 'Y' => [ "\x{2588}\x{2588}\x{2557}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2588}\x{2588}\x{2557}\x{20}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}\x{20}", "\x{20}\x{20}\x{255A}\x{2588}\x{2588}\x{2554}\x{255D}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{255A}\x{2550}\x{255D}\x{20}\x{20}\x{20}" ], + 'Z' => [ "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{2550}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{20}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}\x{20}", "\x{20}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}\x{20}\x{20}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}" ], + ' ' => [ "\x{20}\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{20}" ], + '.' => [ "\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}", "\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{255D}" ], + ',' => [ "\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}", "\x{2584}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{255D}" ], + '/' => [ "\x{20}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2557}", "\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{20}\x{2588}\x{2588}\x{2554}\x{255D}\x{20}", "\x{20}\x{2588}\x{2588}\x{2554}\x{255D}\x{20}\x{20}", "\x{2588}\x{2588}\x{2554}\x{255D}\x{20}\x{20}\x{20}", "\x{255A}\x{2550}\x{255D}\x{20}\x{20}\x{20}\x{20}" ], + '!' => [ "\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2550}\x{255D}", "\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{255D}" ], + '_' => [ "\x{20}\x{20}\x{20}\x{20}\x{20}\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{20}\x{20}\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{20}\x{20}\x{20}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{20}\x{20}\x{20}\x{20}\x{20}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}" ], + '0' => [ "\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}\x{2588}\x{2588}\x{2554}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}" ], + '1' => [ "\x{20}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2588}\x{2588}\x{2551}", "\x{20}\x{2588}\x{2588}\x{2551}", "\x{20}\x{2588}\x{2588}\x{2551}", "\x{20}\x{255A}\x{2550}\x{255D}" ], + '2' => [ "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}" ], + '3' => [ "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2550}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}" ], + '4' => [ "\x{2588}\x{2588}\x{2557}\x{20}\x{20}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2551}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2588}\x{2588}\x{2551}", "\x{20}\x{20}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}", "\x{20}\x{20}\x{20}\x{20}\x{20}\x{255A}\x{2550}\x{255D}" ], + '5' => [ "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2588}\x{2588}\x{2551}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2551}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}" ], + '6' => [ "\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}", "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}" ], + '7' => [ "\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{2588}\x{2588}\x{2551}", "\x{20}\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2554}\x{255D}\x{20}", "\x{20}\x{20}\x{20}\x{2588}\x{2588}\x{2551}\x{20}\x{20}", "\x{20}\x{20}\x{20}\x{255A}\x{2550}\x{255D}\x{20}\x{20}" ], + '8' => [ "\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}" ], + '9' => [ "\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2557}\x{20}", "\x{2588}\x{2588}\x{2554}\x{2550}\x{2550}\x{2588}\x{2588}\x{2557}", "\x{255A}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2551}", "\x{20}\x{255A}\x{2550}\x{2550}\x{2550}\x{2588}\x{2588}\x{2551}", "\x{20}\x{2588}\x{2588}\x{2588}\x{2588}\x{2588}\x{2554}\x{255D}", "\x{20}\x{255A}\x{2550}\x{2550}\x{2550}\x{2550}\x{255D}\x{20}" ], +); + +sub print_big_ascii_string { + my $fh = shift; + my $str = shift; + my $comment = shift; + my $lowascii = shift; + $comment = '' if not defined $comment; + $lowascii = 0 if not defined $lowascii; + + my @chars = split //, $str; + my $charcount = scalar(@chars); + + binmode($fh, ":utf8"); + + my $maxrows = $lowascii ? 5 : 6; + + for(my $rownum = 0; $rownum < $maxrows; $rownum++){ + print $fh $comment; + my $charidx = 0; + foreach my $ch (@chars) { + my $rowsref = $big_ascii{uc($ch)}; + die("Don't have a big ascii entry for '$ch'!\n") if not defined $rowsref; + my $row = @$rowsref[$rownum]; + + my $outstr = ''; + if ($lowascii) { + my @x = split //, $row; + foreach (@x) { + $outstr .= ($_ eq "\x{2588}") ? 'X' : ' '; + } + } else { + $outstr = $row; + } + + $charidx++; + if ($charidx == $charcount) { + $outstr =~ s/\s*\Z//; # dump extra spaces at the end of the line. + } else { + $outstr .= ' '; # space between glyphs. + } + print $fh $outstr; + } + print $fh "\n"; + } +} + +sub generate_quickref { + my $briefsref = shift; + my $path = shift; + my $lowascii = shift; + + # !!! FIXME: this gitrev and majorver/etc stuff is copy/pasted a few times now. + if (!$gitrev) { + $gitrev = `cd "$srcpath" ; git rev-list HEAD~..`; + chomp($gitrev); + } + + # !!! FIXME + open(FH, '<', "$srcpath/$versionfname") or die("Can't open '$srcpath/$versionfname': $!\n"); + my $majorver = 0; + my $minorver = 0; + my $microver = 0; + while () { + chomp; + if (/$versionmajorregex/) { + $majorver = int($1); + } elsif (/$versionminorregex/) { + $minorver = int($1); + } elsif (/$versionmicroregex/) { + $microver = int($1); + } + } + close(FH); + my $fullversion = "$majorver.$minorver.$microver"; + + my $tmppath = "$path.tmp"; + open(my $fh, '>', $tmppath) or die("Can't open '$tmppath': $!\n"); + + if (not @quickrefcategoryorder) { + @quickrefcategoryorder = sort keys %headercategorydocs; + } + + #my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = gmtime(time); + #my $datestr = sprintf("%04d-%02d-%02d %02d:%02d:%02d GMT", $year+1900, $mon+1, $mday, $hour, $min, $sec); + + print $fh "\n\n"; + + # Just something to test big_ascii output. + #print_big_ascii_string($fh, "ABCDEFGHIJ", '', $lowascii); + #print_big_ascii_string($fh, "KLMNOPQRST", '', $lowascii); + #print_big_ascii_string($fh, "UVWXYZ0123", '', $lowascii); + #print_big_ascii_string($fh, "456789JT3A", '', $lowascii); + #print_big_ascii_string($fh, "hello, _a.b/c_!!", '', $lowascii); + + # Dan Bechard's work was on an SDL2 cheatsheet: + # https://blog.theprogrammingjunkie.com/post/sdl2-cheatsheet/ + + if ($lowascii) { + print $fh "# QuickReferenceNoUnicode\n\n"; + print $fh "If you want to paste this into a text editor that can handle\n"; + print $fh "fancy Unicode section headers, try using\n"; + print $fh "[QuickReference](QuickReference) instead.\n\n"; + } else { + print $fh "# QuickReference\n\n"; + print $fh "If you want to paste this into a text editor that can't handle\n"; + print $fh "the fancy Unicode section headers, try using\n"; + print $fh "[QuickReferenceNoUnicode](QuickReferenceNoUnicode) instead.\n\n"; + } + + print $fh "```c\n"; + print $fh "// $quickreftitle\n" if defined $quickreftitle; + print $fh "//\n"; + print $fh "// $quickrefurl\n//\n" if defined $quickrefurl; + print $fh "// $quickrefdesc\n" if defined $quickrefdesc; + #print $fh "// When this document was written: $datestr\n"; + print $fh "// Based on $projectshortname version $fullversion\n"; + #print $fh "// git revision $gitrev\n"; + print $fh "//\n"; + print $fh "// This can be useful in an IDE with search and syntax highlighting.\n"; + print $fh "//\n"; + print $fh "// Original idea for this document came from Dan Bechard (thanks!)\n"; + print $fh "// ASCII art generated by: https://patorjk.com/software/taag/#p=display&f=ANSI%20Shadow (with modified 'S' for readability)\n\n"; + + foreach (@quickrefcategoryorder) { + my $cat = $_; + my $maxlen = 0; + my @csigs = (); + my $funcorderref = $quickreffuncorder{$cat}; + next if not defined $funcorderref; + + foreach (@$funcorderref) { + my $sym = $_; + my $csig = ''; + + if ($headersymstype{$sym} == 1) { # function + $csig = "${headersymsrettype{$sym}} $sym"; + my $fnsigparams = $headersymsparaminfo{$sym}; + if (not defined($fnsigparams)) { + $csig .= '(void);'; + } else { + my $sep = '('; + for (my $i = 0; $i < scalar(@$fnsigparams); $i += 2) { + my $paramname = @$fnsigparams[$i]; + my $paramtype = @$fnsigparams[$i+1]; + my $spc = ($paramtype =~ /\*\Z/) ? '' : ' '; + $csig .= "$sep$paramtype$spc$paramname"; + $sep = ', '; + } + $csig .= ");"; + } + } elsif ($headersymstype{$sym} == 2) { # macro + next if defined $quickrefmacroregex && not $sym =~ /$quickrefmacroregex/; + + $csig = (split /\n/, $headerdecls{$sym})[0]; # get the first line from a multiline string. + if (not $csig =~ s/\A(\#define [a-zA-Z0-9_]*\(.*?\))(\s+.*)?\Z/$1/) { + $csig =~ s/\A(\#define [a-zA-Z0-9_]*)(\s+.*)?\Z/$1/; + } + chomp($csig); + } + + my $len = length($csig); + $maxlen = $len if $len > $maxlen; + + push @csigs, $sym; + push @csigs, $csig; + } + + $maxlen += 2; + + next if (not @csigs); + + print $fh "\n"; + print_big_ascii_string($fh, $cat, '// ', $lowascii); + print $fh "\n"; + + while (@csigs) { + my $sym = shift @csigs; + my $csig = shift @csigs; + my $brief = $$briefsref{$sym}; + if (defined $brief) { + $brief = "$brief"; + chomp($brief); + my $thiswikitype = defined $wikitypes{$sym} ? $wikitypes{$sym} : 'md'; # default to MarkDown for new stuff. + $brief = dewikify($thiswikitype, $brief); + my $spaces = ' ' x ($maxlen - length($csig)); + $brief = "$spaces// $brief"; + } else { + $brief = ''; + } + print $fh "$csig$brief\n"; + } + } + + print $fh "```\n\n"; + + close($fh); + +# # Don't overwrite the file if nothing has changed besides the timestamp +# # and git revision. +# my $matches = 1; +# if ( not -f $path ) { +# $matches = 0; # always write if the file hasn't been created yet. +# } else { +# open(my $fh_a, '<', $tmppath) or die("Can't open '$tmppath': $!\n"); +# open(my $fh_b, '<', $path) or die("Can't open '$path': $!\n"); +# while (1) { +# my $a = <$fh_a>; +# my $b = <$fh_b>; +# $matches = 0, last if ((not defined $a) != (not defined $b)); +# last if ((not defined $a) || (not defined $b)); +# if ($a ne $b) { +# next if ($a =~ /\A\/\/ When this document was written:/); +# next if ($a =~ /\A\/\/ git revision /); +# $matches = 0; +# last; +# } +# } +# +# close($fh_a); +# close($fh_b); +# } +# +# if ($matches) { +# unlink($tmppath); # it's the same file except maybe the date/gitrev. Don't overwrite it. +# } else { +# rename($tmppath, $path) or die("Can't rename '$tmppath' to '$path': $!\n"); +# } + rename($tmppath, $path) or die("Can't rename '$tmppath' to '$path': $!\n"); +} + + +sub generate_envvar_wiki_page { + my $briefsref = shift; + my $path = shift; + + return if not $envvarenabled or not defined $envvarsymregex or not defined $envvarsymreplace; + + my $replace = "\"$envvarsymreplace\""; + my $tmppath = "$path.tmp"; + open(my $fh, '>', $tmppath) or die("Can't open '$tmppath': $!\n"); + + print $fh "\n\n"; + print $fh "# $envvartitle\n\n"; + + if (defined $envvardesc) { + my $desc = "$envvardesc"; + $desc =~ s/\\n/\n/g; # replace "\n" strings with actual newlines. + print $fh "$desc\n\n"; + } + + print $fh "## Environment Variable List\n\n"; + + foreach (sort keys %headersyms) { + my $sym = $_; + next if $headersymstype{$sym} != 2; # not a #define? skip it. + my $hint = "$_"; + next if not $hint =~ s/$envvarsymregex/$replace/ee; + + my $brief = $$briefsref{$sym}; + if (not defined $brief) { + $brief = ''; + } else { + $brief = "$brief"; + chomp($brief); + my $thiswikitype = defined $wikitypes{$sym} ? $wikitypes{$sym} : 'md'; # default to MarkDown for new stuff. + $brief = ": " . dewikify($thiswikitype, $brief); + } + print $fh "- [$hint]($sym)$brief\n"; + } + + print $fh "\n"; + + close($fh); + + rename($tmppath, $path) or die("Can't rename '$tmppath' to '$path': $!\n"); +} + + + + +my $incpath = "$srcpath"; +$incpath .= "/$incsubdir" if $incsubdir ne ''; + +my $readmepath = undef; +if (defined $readmesubdir) { + $readmepath = "$srcpath/$readmesubdir"; +} + +opendir(DH, $incpath) or die("Can't opendir '$incpath': $!\n"); +while (my $d = readdir(DH)) { + my $dent = $d; + next if not $dent =~ /$selectheaderregex/; # just selected headers. + open(FH, '<', "$incpath/$dent") or die("Can't open '$incpath/$dent': $!\n"); + + # You can optionally set a wiki category with Perl code in .wikiheaders-options that gets eval()'d per-header, + # and also if you put `/* WIKI CATEGORY: blah */` on a line by itself, it'll change the category for any symbols + # below it in the same file. If no category is set, one won't be added for the symbol (beyond the standard CategoryFunction, etc) + my $current_wiki_category = undef; + if (defined $headercategoryeval) { + $_ = $dent; + $current_wiki_category = eval($headercategoryeval); + if (($current_wiki_category eq '') || ($current_wiki_category eq '-')) { + $current_wiki_category = undef; + } + #print("CATEGORY FOR '$dent' IS " . (defined($current_wiki_category) ? "'$current_wiki_category'" : '(undef)') . "\n"); + } + + my @contents = (); + my @function_order = (); + my $ignoring_lines = 0; + my $header_comment = -1; + my $saw_category_doxygen = -1; + my $lineno = 0; + + while () { + chomp; + $lineno++; + my $symtype = 0; # nothing, yet. + my $decl; + my @templines; + my $str; + my $has_doxygen = 1; + + # Since a lot of macros are just preprocessor logic spam and not all macros are worth documenting anyhow, we only pay attention to them when they have a Doxygen comment attached. + # Functions and other things are a different story, though! + + if ($header_comment == -1) { + $header_comment = /\A\/\*\s*\Z/ ? 1 : 0; + } elsif (($header_comment == 1) && (/\A\*\/\s*\Z/)) { + $header_comment = 0; + } + + if ($ignoring_lines && /\A\s*\#\s*endif\s*\Z/) { + $ignoring_lines = 0; + push @contents, $_; + next; + } elsif ($ignoring_lines) { + push @contents, $_; + next; + } elsif (/\A\s*\#\s*ifndef\s+$wikidocsectionsym\s*\Z/) { + $ignoring_lines = 1; + push @contents, $_; + next; + } elsif (/\A\s*\/\*\s*WIKI CATEGORY:\s*(.*?)\s*\*\/\s*\Z/) { + $current_wiki_category = (($1 eq '') || ($1 eq '-')) ? undef : $1; + #print("CATEGORY FOR '$dent' CHANGED TO " . (defined($current_wiki_category) ? "'$current_wiki_category'" : '(undef)') . "\n"); + push @contents, $_; + next; + } elsif (/\A\s*extern\s+(?:$deprecatedsym\s+|)$declspecsym/) { # a function declaration without a doxygen comment? + $symtype = 1; # function declaration + @templines = (); + $decl = $_; + $str = ''; + $has_doxygen = 0; + } elsif (/\A\s*$forceinlinesym/) { # a (forced-inline) function declaration without a doxygen comment? + $symtype = 1; # function declaration + @templines = (); + $decl = $_; + $str = ''; + $has_doxygen = 0; + } elsif (not /\A\/\*\*\s*\Z/) { # not doxygen comment start? + push @contents, $_; + add_coverage_gap($_, $dent, $lineno) if ($header_comment == 0); + next; + } else { # Start of a doxygen comment, parse it out. + my $is_category_doxygen = 0; + + @templines = ( $_ ); + while () { + chomp; + $lineno++; + push @templines, $_; + last if /\A\s*\*\/\Z/; + if (s/\A\s*\*\s*\`\`\`/```/) { # this is a hack, but a lot of other code relies on the whitespace being trimmed, but we can't trim it in code blocks... + $str .= "$_\n"; + while () { + chomp; + $lineno++; + push @templines, $_; + s/\A\s*\*\s?//; + if (s/\A\s*\`\`\`/```/) { + $str .= "$_\n"; + last; + } else { + $str .= "$_\n"; + } + } + } else { + s/\A\s*\*\s*//; # Strip off the " * " at the start of the comment line. + + # To add documentation to Category Pages, the rule is it has to + # be the first Doxygen comment in the header, and it must start with `# CategoryX` + # (otherwise we'll treat it as documentation for whatever's below it). `X` is + # the category name, which doesn't _necessarily_ have to match + # $current_wiki_category, but it probably should. + # + # For compatibility with Doxygen, if there's a `\file` here instead of + # `# CategoryName`, we'll accept it and use the $current_wiki_category if set. + if ($saw_category_doxygen == -1) { + $saw_category_doxygen = defined($current_wiki_category) && /\A\\file\s+/; + if ($saw_category_doxygen) { + $_ = "# Category$current_wiki_category"; + } else { + $saw_category_doxygen = /\A\# Category/; + } + $is_category_doxygen = $saw_category_doxygen; + } + + $str .= "$_\n"; + } + } + + if ($is_category_doxygen) { + $str =~ s/\s*\Z//; + $decl = ''; + $symtype = -1; # not a symbol at all. + } else { + $decl = ; + $lineno++ if defined $decl; + $decl = '' if not defined $decl; + chomp($decl); + if ($decl =~ /\A\s*extern\s+(?:$deprecatedsym\s+|)$declspecsym/) { + $symtype = 1; # function declaration + } elsif ($decl =~ /\A\s*$forceinlinesym/) { + $symtype = 1; # (forced-inline) function declaration + } elsif ($decl =~ /\A\s*\#\s*define\s+/) { + $symtype = 2; # macro + } elsif ($decl =~ /\A\s*(typedef\s+|)(struct|union)\s*([a-zA-Z0-9_]*?)\s*(\n|\{|\Z)/) { + $symtype = 3; # struct or union + } elsif ($decl =~ /\A\s*(typedef\s+|)enum\s*([a-zA-Z0-9_]*?)\s*(\n|\{|\Z)/) { + $symtype = 4; # enum + } elsif ($decl =~ /\A\s*typedef\s+.*\Z/) { + $symtype = 5; # other typedef + } else { + #print "Found doxygen but no function sig:\n$str\n\n"; + foreach (@templines) { + push @contents, $_; + add_coverage_gap($_, $dent, $lineno); + } + push @contents, $decl; + add_coverage_gap($decl, $dent, $lineno); + next; + } + } + } + + my @paraminfo = (); + my $rettype = undef; + my @decllines = ( $decl ); + my $sym = ''; + + if ($symtype == -1) { # category documentation with no symbol attached. + @decllines = (); + if ($str =~ /^#\s*Category(.*?)\s*$/m) { + $sym = "[category documentation] $1"; # make a fake, unique symbol that's not valid C. + } else { + die("Unexpected category documentation line '$str' in '$incpath/$dent' ...?"); + } + $headercategorydocs{$current_wiki_category} = $sym; + } elsif ($symtype == 1) { # a function + my $is_forced_inline = ($decl =~ /\A\s*$forceinlinesym/); + + if ($is_forced_inline) { + if (not $decl =~ /\)\s*(\{.*|)\s*\Z/) { + while () { + chomp; + $lineno++; + push @decllines, $_; + s/\A\s+//; + s/\s+\Z//; + $decl .= " $_"; + last if /\)\s*(\{.*|)\s*\Z/; + } + } + $decl =~ s/\s*\)\s*(\{.*|)\s*\Z/);/; + } else { + if (not $decl =~ /;/) { + while () { + chomp; + $lineno++; + push @decllines, $_; + s/\A\s+//; + s/\s+\Z//; + $decl .= " $_"; + last if /;/; + } + } + $decl =~ s/\s+\);\Z/);/; + $decl =~ s/\s+;\Z/;/; + } + + $decl =~ s/\s+\Z//; + + $decl = strip_fn_declaration_metadata($decl); + + my $paramsstr = undef; + + if (!$is_forced_inline && $decl =~ /\A\s*extern\s+(?:$deprecatedsym\s+|)$declspecsym\w*\s+(const\s+|)(unsigned\s+|)(.*?)([\*\s]+)(\*?)\s*$callconvsym\s+(.*?)\s*\((.*?)\);/) { + $sym = $6; + $rettype = "$1$2$3$4$5"; + $paramsstr = $7; + } elsif ($is_forced_inline && $decl =~ /\A\s*$forceinlinesym\s+(?:$deprecatedsym\s+|)(const\s+|)(unsigned\s+|)(.*?)([\*\s]+)(.*?)\s*\((.*?)\);/) { + $sym = $5; + $rettype = "$1$2$3$4"; + $paramsstr = $6; + } else { + #print "Found doxygen but no function sig:\n$str\n\n"; + foreach (@templines) { + push @contents, $_; + } + foreach (@decllines) { + push @contents, $_; + } + next; + } + + $rettype = sanitize_c_typename($rettype); + + if ($paramsstr =~ /\(/) { + die("\n\n$0 FAILURE!\n" . + "There's a '(' in the parameters for function '$sym' '$incpath/$dent'.\n" . + "This usually means there's a parameter that's a function pointer type.\n" . + "This causes problems for wikiheaders.pl and is less readable, too.\n" . + "Please put that function pointer into a typedef,\n" . + "and use the new type in this function's signature instead!\n\n"); + } + + my @params = split(/,/, $paramsstr); + my $dotdotdot = 0; + foreach (@params) { + my $p = $_; + $p =~ s/\A\s+//; + $p =~ s/\s+\Z//; + if (($p eq 'void') || ($p eq '')) { + die("Void parameter in a function with multiple params?! ('$sym' in '$incpath/$dent')") if (scalar(@params) != 1); + } elsif ($p eq '...') { + die("Mutiple '...' params?! ('$sym' in '$incpath/$dent')") if ($dotdotdot); + $dotdotdot = 1; + push @paraminfo, '...'; + push @paraminfo, '...'; + } elsif ($p =~ /\A(.*)\s+([a-zA-Z0-9_\*\[\]]+)\Z/) { + die("Parameter after '...' param?! ('$sym' in '$incpath/$dent')") if ($dotdotdot); + my $t = $1; + my $n = $2; + if ($n =~ s/\A(\*+)//) { + $t .= $1; # move any `*` that stuck to the name over. + } + if ($n =~ s/\[\]\Z//) { + $t = "$t*"; # move any `[]` that stuck to the name over, as a pointer. + } + $t = sanitize_c_typename($t); + #print("$t\n"); + #print("$n\n"); + push @paraminfo, $n; + push @paraminfo, $t; + } else { + die("Unexpected parameter '$p' in function '$sym' in '$incpath/$dent'!"); + } + } + + if (!$is_forced_inline) { # don't do with forced-inline because we don't want the implementation inserted in the wiki. + my $shrink_length = 0; + + $decl = ''; # rebuild this with the line breaks, since it looks better for syntax highlighting. + foreach (@decllines) { + if ($decl eq '') { + my $temp; + + $decl = $_; + $temp = $decl; + $temp =~ s/\Aextern\s+(?:$deprecatedsym\s+|)$declspecsym\w*\s+(.*?)\s+(\*?)$callconvsym\s+/$1$2 /; + $shrink_length = length($decl) - length($temp); + $decl = $temp; + } else { + my $trimmed = $_; + $trimmed =~ s/\A\s{$shrink_length}//; # shrink to match the removed "extern SDL_DECLSPEC SDLCALL " + $decl .= $trimmed; + } + $decl .= "\n"; + } + } + + $decl = strip_fn_declaration_metadata($decl); + + # !!! FIXME: code duplication with typedef processing, below. + # We assume any `#define`s directly after the function are related to it: probably bitflags for an integer typedef. + # We'll also allow some other basic preprocessor lines. + # Blank lines are allowed, anything else, even comments, are not. + my $blank_lines = 0; + my $lastpos = tell(FH); + my $lastlineno = $lineno; + my $additional_decl = ''; + my $saw_define = 0; + while () { + chomp; + + $lineno++; + + if (/\A\s*\Z/) { + $blank_lines++; + } elsif (/\A\s*\#\s*(define|if|else|elif|endif)(\s+|\Z)/) { + if (/\A\s*\#\s*define\s+([a-zA-Z0-9_]*)/) { + $referenceonly{$1} = $sym; + $saw_define = 1; + } elsif (!$saw_define) { + # if the first non-blank thing isn't a #define, assume we're done. + seek(FH, $lastpos, 0); # re-read eaten lines again next time. + $lineno = $lastlineno; + last; + } + + # update strings now that we know everything pending is to be applied to this declaration. Add pending blank lines and the new text. + + # At Sam's request, don't list property defines with functions. (See #9440) + my $is_property = (defined $apipropertyregex) ? /$apipropertyregex/ : 0; + if (!$is_property) { + if ($blank_lines > 0) { + while ($blank_lines > 0) { + $additional_decl .= "\n"; + push @decllines, ''; + $blank_lines--; + } + } + $additional_decl .= "\n$_"; + push @decllines, $_; + $lastpos = tell(FH); + } + } else { + seek(FH, $lastpos, 0); # re-read eaten lines again next time. + $lineno = $lastlineno; + last; + } + } + $decl .= $additional_decl; + } elsif ($symtype == 2) { # a macro + if ($decl =~ /\A\s*\#\s*define\s+(.*?)(\(.*?\)|)(\s+|\Z)/) { + $sym = $1; + } else { + #print "Found doxygen but no macro:\n$str\n\n"; + foreach (@templines) { + push @contents, $_; + } + foreach (@decllines) { + push @contents, $_; + } + next; + } + + while ($decl =~ /\\\Z/) { + my $l = ; + last if not $l; + $lineno++; + chomp($l); + push @decllines, $l; + #$l =~ s/\A\s+//; + $l =~ s/\s+\Z//; + $decl .= "\n$l"; + } + } elsif (($symtype == 3) || ($symtype == 4)) { # struct or union or enum + my $has_definition = 0; + if ($decl =~ /\A\s*(typedef\s+|)(struct|union|enum)\s*([a-zA-Z0-9_]*?)\s*(\n|\{|\;|\Z)/) { + my $ctype = $2; + my $origsym = $3; + my $ending = $4; + $sym = $origsym; + if ($sym =~ s/\A(.*?)(\s+)(.*?)\Z/$1/) { + die("Failed to parse '$origsym' correctly!") if ($sym ne $1); # Thought this was "typedef struct MySym MySym;" ... it was not. :( This is a hack! + } + if ($sym eq '') { + die("\n\n$0 FAILURE!\n" . + "There's a 'typedef $ctype' in $incpath/$dent without a name at the top.\n" . + "Instead of `typedef $ctype {} x;`, this should be `typedef $ctype x {} x;`.\n" . + "This causes problems for wikiheaders.pl and scripting language bindings.\n" . + "Please fix it!\n\n"); + } + $has_definition = ($ending ne ';'); + } else { + #print "Found doxygen but no datatype:\n$str\n\n"; + foreach (@templines) { + push @contents, $_; + } + foreach (@decllines) { + push @contents, $_; + } + next; + } + + # This block attempts to find the whole struct/union/enum definition by counting matching brackets. Kind of yucky. + # It also "parses" enums enough to find out the elements of it. + if ($has_definition) { + my $started = 0; + my $brackets = 0; + my $pending = $decl; + my $skipping_comment = 0; + + $decl = ''; + while (!$started || ($brackets != 0)) { + foreach my $seg (split(/([{}])/, $pending)) { # (this will pick up brackets in comments! Be careful!) + $decl .= $seg; + if ($seg eq '{') { + $started = 1; + $brackets++; + } elsif ($seg eq '}') { + die("Something is wrong with header $incpath/$dent while parsing $sym; is a bracket missing?\n") if ($brackets <= 0); + $brackets--; + } + } + + if ($skipping_comment) { + if ($pending =~ s/\A.*?\*\///) { + $skipping_comment = 0; + } + } + + if (!$skipping_comment && $started && ($symtype == 4)) { # Pick out elements of an enum. + my $stripped = "$pending"; + $stripped =~ s/\/\*.*?\*\///g; # dump /* comments */ that exist fully on one line. + if ($stripped =~ /\/\*/) { # uhoh, a /* comment */ that crosses newlines. + $skipping_comment = 1; + } elsif ($stripped =~ /\A\s*([a-zA-Z0-9_]+)(.*)\Z/) { #\s*(\=\s*.*?|)\s*,?(.*?)\Z/) { + if ($1 ne 'typedef') { # make sure we didn't just eat the first line by accident. :/ + #print("ENUM [$1] $incpath/$dent:$lineno\n"); + $referenceonly{$1} = $sym; + } + } + } + + if (!$started || ($brackets != 0)) { + $pending = ; + die("EOF/error reading $incpath/$dent while parsing $sym\n") if not $pending; + $lineno++; + chomp($pending); + push @decllines, $pending; + $decl .= "\n"; + } + } + # this currently assumes the struct/union/enum ends on the line with the final bracket. I'm not writing a C parser here, fix the header! + } + } elsif ($symtype == 5) { # other typedef + if ($decl =~ /\A\s*typedef\s+(.*)\Z/) { + my $tdstr = $1; + + if (not $decl =~ /;/) { + while () { + chomp; + $lineno++; + push @decllines, $_; + s/\A\s+//; + s/\s+\Z//; + $decl .= " $_"; + last if /;/; + } + } + $decl =~ s/\s+(\))?;\Z/$1;/; + + $tdstr =~ s/;\s*\Z//; + + #my $datatype; + if ($tdstr =~ /\A(.*?)\s*\((.*?)\s*\*\s*(.*?)\)\s*\((.*?)(\))?/) { # a function pointer type + $sym = $3; + #$datatype = "$1 ($2 *$sym)($4)"; + } elsif ($tdstr =~ /\A(.*[\s\*]+)(.*?)\s*\Z/) { + $sym = $2; + #$datatype = $1; + } else { + die("Failed to parse typedef '$tdstr' in $incpath/$dent!\n"); # I'm hitting a C grammar nail with a regexp hammer here, y'all. + } + + $sym =~ s/\A\s+//; + $sym =~ s/\s+\Z//; + #$datatype =~ s/\A\s+//; + #$datatype =~ s/\s+\Z//; + } else { + #print "Found doxygen but no datatype:\n$str\n\n"; + foreach (@templines) { + push @contents, $_; + } + foreach (@decllines) { + push @contents, $_; + } + next; + } + + # We assume any `#define`s directly after the typedef are related to it: probably bitflags for an integer typedef. + # We'll also allow some other basic preprocessor lines. + # Blank lines are allowed, anything else, even comments, are not. + my $blank_lines = 0; + my $lastpos = tell(FH); + my $lastlineno = $lineno; + my $additional_decl = ''; + my $saw_define = 0; + while () { + chomp; + + $lineno++; + + if (/\A\s*\Z/) { + $blank_lines++; + } elsif (/\A\s*\#\s*(define|if|else|elif|endif)(\s+|\Z)/) { + if (/\A\s*\#\s*define\s+([a-zA-Z0-9_]*)/) { + $referenceonly{$1} = $sym; + $saw_define = 1; + } elsif (!$saw_define) { + # if the first non-blank thing isn't a #define, assume we're done. + seek(FH, $lastpos, 0); # re-read eaten lines again next time. + $lineno = $lastlineno; + last; + } + # update strings now that we know everything pending is to be applied to this declaration. Add pending blank lines and the new text. + if ($blank_lines > 0) { + while ($blank_lines > 0) { + $additional_decl .= "\n"; + push @decllines, ''; + $blank_lines--; + } + } + $additional_decl .= "\n$_"; + push @decllines, $_; + $lastpos = tell(FH); + } else { + seek(FH, $lastpos, 0); # re-read eaten lines again next time. + $lineno = $lastlineno; + last; + } + } + $decl .= $additional_decl; + } else { + die("Unexpected symtype $symtype"); + } + + #print("DECL: [$decl]\n"); + + #print("$sym:\n$str\n\n"); + + # There might be multiple declarations of a function due to #ifdefs, + # and only one of them will have documentation. If we hit an + # undocumented one before, delete the placeholder line we left for + # it so it doesn't accumulate a new blank line on each run. + my $skipsym = 0; + if (defined $headersymshasdoxygen{$sym}) { + if ($headersymshasdoxygen{$sym} == 0) { # An undocumented declaration already exists, nuke its placeholder line. + delete $contents[$headersymschunk{$sym}]; # delete DOES NOT RENUMBER existing elements! + } else { # documented function already existed? + $skipsym = 1; # don't add this copy to the list of functions. + if ($has_doxygen) { + print STDERR "WARNING: Symbol '$sym' appears to be documented in multiple locations. Only keeping the first one we saw!\n"; + } + push @contents, join("\n", @decllines) if (scalar(@decllines) > 0); # just put the existing declation in as-is. + } + } + + if (!$skipsym) { + $headersymscategory{$sym} = $current_wiki_category if defined $current_wiki_category; + $headersyms{$sym} = $str; + $headerdecls{$sym} = $decl; + $headersymslocation{$sym} = $dent; + $headersymschunk{$sym} = scalar(@contents); + $headersymshasdoxygen{$sym} = $has_doxygen; + $headersymstype{$sym} = $symtype; + $headersymsparaminfo{$sym} = \@paraminfo if (scalar(@paraminfo) > 0); + $headersymsrettype{$sym} = $rettype if (defined($rettype)); + push @function_order, $sym if ($symtype == 1) || ($symtype == 2); + push @contents, join("\n", @templines); + push @contents, join("\n", @decllines) if (scalar(@decllines) > 0); + } + + } + close(FH); + + $headers{$dent} = \@contents; + $quickreffuncorder{$current_wiki_category} = \@function_order if defined $current_wiki_category; +} +closedir(DH); + + +opendir(DH, $wikipath) or die("Can't opendir '$wikipath': $!\n"); +while (my $d = readdir(DH)) { + my $dent = $d; + my $type = ''; + if ($dent =~ /\.(md|mediawiki)\Z/) { + $type = $1; + } else { + next; # only dealing with wiki pages. + } + + my $sym = $dent; + $sym =~ s/\..*\Z//; + + # (There are other pages to ignore, but these are known ones to not bother parsing.) + # Ignore FrontPage. + next if $sym eq 'FrontPage'; + + open(FH, '<', "$wikipath/$dent") or die("Can't open '$wikipath/$dent': $!\n"); + + if ($sym =~ /\ACategory(.*?)\Z/) { # Special case for Category pages. + # Find the end of the category documentation in the existing file and append everything else to the new file. + my $cat = $1; + my $docstr = ''; + my $notdocstr = ''; + my $docs = 1; + while () { + chomp; + if ($docs) { + $docs = 0 if /\A\-\-\-\-\Z/; # Hit a footer? We're done. + $docs = 0 if /\A) { + chomp; + my $orig = $_; + s/\A\s*//; + s/\s*\Z//; + + if ($type eq 'mediawiki') { + if (defined($wikipreamble) && $firstline && /\A\=\=\=\=\=\= (.*?) \=\=\=\=\=\=\Z/ && ($1 eq $wikipreamble)) { + $firstline = 0; # skip this. + next; + } elsif (/\A\= (.*?) \=\Z/) { + $firstline = 0; + $current_section = ($1 eq $sym) ? '[Brief]' : $1; + die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section}; + push @section_order, $current_section; + $sections{$current_section} = ''; + } elsif (/\A\=\= (.*?) \=\=\Z/) { + $firstline = 0; + $current_section = ($1 eq $sym) ? '[Brief]' : $1; + die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section}; + push @section_order, $current_section; + $sections{$current_section} = ''; + next; + } elsif (/\A\-\-\-\-\Z/) { + $firstline = 0; + $current_section = '[footer]'; + die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section}; + push @section_order, $current_section; + $sections{$current_section} = ''; + next; + } + } elsif ($type eq 'md') { + if (defined($wikipreamble) && $firstline && /\A\#\#\#\#\#\# (.*?)\Z/ && ($1 eq $wikipreamble)) { + $firstline = 0; # skip this. + next; + } elsif (/\A\#+ (.*?)\Z/) { + $firstline = 0; + $current_section = ($1 eq $sym) ? '[Brief]' : $1; + die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section}; + push @section_order, $current_section; + $sections{$current_section} = ''; + next; + } elsif (/\A\-\-\-\-\Z/) { + $firstline = 0; + $current_section = '[footer]'; + die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section}; + push @section_order, $current_section; + $sections{$current_section} = ''; + next; + } + } else { + die("Unexpected wiki file type. Fixme!"); + } + + if ($firstline) { + $firstline = ($_ ne ''); + } + if (!$firstline) { + $sections{$current_section} .= "$orig\n"; + } + } + close(FH); + + foreach (keys %sections) { + $sections{$_} =~ s/\A\n+//; + $sections{$_} =~ s/\n+\Z//; + $sections{$_} .= "\n"; + } + + # older section name we used, migrate over from it. + if (defined $sections{'Related Functions'}) { + if (not defined $sections{'See Also'}) { + $sections{'See Also'} = $sections{'Related Functions'}; + } + delete $sections{'Related Functions'}; + } + + if (0) { + foreach (@section_order) { + print("$sym SECTION '$_':\n"); + print($sections{$_}); + print("\n\n"); + } + } + + $wikitypes{$sym} = $type; + $wikisyms{$sym} = \%sections; + $wikisectionorder{$sym} = \@section_order; +} +closedir(DH); + +delete $wikisyms{"Undocumented"}; + +{ + my $path = "$wikipath/Undocumented.md"; + open(my $fh, '>', $path) or die("Can't open '$path': $!\n"); + + print $fh "# Undocumented\n\n"; + print_undocumented_section($fh, 'Functions', 1); + #print_undocumented_section($fh, 'Macros', 2); + + close($fh); +} + +if ($warn_about_missing) { + foreach (keys %wikisyms) { + my $sym = $_; + if (not defined $headersyms{$sym}) { + print STDERR "WARNING: $sym defined in the wiki but not the headers!\n"; + } + } + + foreach (keys %headersyms) { + my $sym = $_; + if (not defined $wikisyms{$sym}) { + print STDERR "WARNING: $sym defined in the headers but not the wiki!\n"; + } + } +} + +if ($copy_direction == 1) { # --copy-to-headers + my %changed_headers = (); + + $dewikify_mode = 'md'; + $wordwrap_mode = 'md'; # the headers use Markdown format. + + foreach (keys %headersyms) { + my $sym = $_; + next if not defined $wikisyms{$sym}; # don't have a page for that function, skip it. + my $symtype = $headersymstype{$sym}; + my $wikitype = $wikitypes{$sym}; + my $sectionsref = $wikisyms{$sym}; + my $remarks = $sectionsref->{'Remarks'}; + my $returns = $sectionsref->{'Return Value'}; + my $threadsafety = $sectionsref->{'Thread Safety'}; + my $version = $sectionsref->{'Version'}; + my $related = $sectionsref->{'See Also'}; + my $deprecated = $sectionsref->{'Deprecated'}; + my $brief = $sectionsref->{'[Brief]'}; + my $addblank = 0; + my $str = ''; + + my $params = undef; + my $paramstr = undef; + + if ($symtype == -1) { # category documentation block. + # nothing to be done here. + } elsif (($symtype == 1) || (($symtype == 5))) { # we'll assume a typedef (5) with a \param is a function pointer typedef. + $params = $sectionsref->{'Function Parameters'}; + $paramstr = '\param'; + } elsif ($symtype == 2) { + $params = $sectionsref->{'Macro Parameters'}; + $paramstr = '\param'; + } elsif ($symtype == 3) { + $params = $sectionsref->{'Fields'}; + $paramstr = '\field'; + } elsif ($symtype == 4) { + $params = $sectionsref->{'Values'}; + $paramstr = '\value'; + } else { + die("Unexpected symtype $symtype"); + } + + $headersymshasdoxygen{$sym} = 1; # Added/changed doxygen for this header. + + $brief = dewikify($wikitype, $brief); + $brief =~ s/\A(.*?\.) /$1\n/; # \brief should only be one sentence, delimited by a period+space. Split if necessary. + my @briefsplit = split /\n/, $brief; + $brief = shift @briefsplit; + + if (defined $remarks) { + $remarks = join("\n", @briefsplit) . dewikify($wikitype, $remarks); + } + + if (defined $brief) { + $str .= "\n" if $addblank; $addblank = 1; + $str .= wordwrap($brief) . "\n"; + } + + if (defined $remarks) { + $str .= "\n" if $addblank; $addblank = 1; + $str .= wordwrap($remarks) . "\n"; + } + + if (defined $deprecated) { + # !!! FIXME: lots of code duplication in all of these. + $str .= "\n" if $addblank; $addblank = 1; + my $v = dewikify($wikitype, $deprecated); + my $whitespacelen = length("\\deprecated") + 1; + my $whitespace = ' ' x $whitespacelen; + $v = wordwrap($v, -$whitespacelen); + my @desclines = split /\n/, $v; + my $firstline = shift @desclines; + $str .= "\\deprecated $firstline\n"; + foreach (@desclines) { + $str .= "${whitespace}$_\n"; + } + } + + if (defined $params) { + $str .= "\n" if $addblank; $addblank = (defined $returns) ? 0 : 1; + my @lines = split /\n/, dewikify($wikitype, $params); + if ($wikitype eq 'mediawiki') { + die("Unexpected data parsing MediaWiki table") if (shift @lines ne '{|'); # Dump the '{|' start + while (scalar(@lines) >= 3) { + my $c_datatype = shift @lines; + my $name = shift @lines; + my $desc = shift @lines; + my $terminator; # the '|-' or '|}' line. + + if (($desc eq '|-') or ($desc eq '|}') or (not $desc =~ /\A\|/)) { # we seem to be out of cells, which means there was no datatype column on this one. + $terminator = $desc; + $desc = $name; + $name = $c_datatype; + $c_datatype = ''; + } else { + $terminator = shift @lines; + } + + last if ($terminator ne '|-') and ($terminator ne '|}'); # we seem to have run out of table. + $name =~ s/\A\|\s*//; + $name =~ s/\A\*\*(.*?)\*\*/$1/; + $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/; + $desc =~ s/\A\|\s*//; + #print STDERR "SYM: $sym CDATATYPE: $c_datatype NAME: $name DESC: $desc TERM: $terminator\n"; + my $whitespacelen = length($name) + 8; + my $whitespace = ' ' x $whitespacelen; + $desc = wordwrap($desc, -$whitespacelen); + my @desclines = split /\n/, $desc; + my $firstline = shift @desclines; + $str .= "$paramstr $name $firstline\n"; + foreach (@desclines) { + $str .= "${whitespace}$_\n"; + } + } + } elsif ($wikitype eq 'md') { + my $l; + $l = shift @lines; + die("Unexpected data parsing Markdown table") if (not $l =~ /\A(\s*\|)?\s*\|\s*\|\s*\|\s*\Z/); + $l = shift @lines; + die("Unexpected data parsing Markdown table") if (not $l =~ /\A\s*(\|\s*\-*\s*)?\|\s*\-*\s*\|\s*\-*\s*\|\s*\Z/); + while (scalar(@lines) >= 1) { + $l = shift @lines; + my $name; + my $desc; + if ($l =~ /\A\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*\Z/) { + # c datatype is $1, but we don't care about it here. + $name = $2; + $desc = $3; + } elsif ($l =~ /\A\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*\Z/) { + $name = $1; + $desc = $2; + } else { + last; # we seem to have run out of table. + } + + $name =~ s/\A\*\*(.*?)\*\*/$1/; + $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/; + #print STDERR "SYM: $sym NAME: $name DESC: $desc\n"; + my $whitespacelen = length($name) + 8; + my $whitespace = ' ' x $whitespacelen; + $desc = wordwrap($desc, -$whitespacelen); + my @desclines = split /\n/, $desc; + my $firstline = shift @desclines; + $str .= "$paramstr $name $firstline\n"; + foreach (@desclines) { + $str .= "${whitespace}$_\n"; + } + } + } else { + die("write me"); + } + } + + if (defined $returns) { + $str .= "\n" if $addblank; $addblank = 1; + my $r = dewikify($wikitype, $returns); + $r =~ s/\A\(.*?\)\s*//; # Chop datatype in parentheses off the front. + my $retstr = "\\returns"; + if ($r =~ s/\AReturn(s?)\s+//) { + $retstr = "\\return$1"; + } + + my $whitespacelen = length($retstr) + 1; + my $whitespace = ' ' x $whitespacelen; + $r = wordwrap($r, -$whitespacelen); + my @desclines = split /\n/, $r; + my $firstline = shift @desclines; + $str .= "$retstr $firstline\n"; + foreach (@desclines) { + $str .= "${whitespace}$_\n"; + } + } + + if (defined $threadsafety) { + # !!! FIXME: lots of code duplication in all of these. + $str .= "\n" if $addblank; $addblank = 1; + my $v = dewikify($wikitype, $threadsafety); + my $whitespacelen = length("\\threadsafety") + 1; + my $whitespace = ' ' x $whitespacelen; + $v = wordwrap($v, -$whitespacelen); + my @desclines = split /\n/, $v; + my $firstline = shift @desclines; + $str .= "\\threadsafety $firstline\n"; + foreach (@desclines) { + $str .= "${whitespace}$_\n"; + } + } + + if (defined $version) { + # !!! FIXME: lots of code duplication in all of these. + $str .= "\n" if $addblank; $addblank = 1; + my $v = dewikify($wikitype, $version); + my $whitespacelen = length("\\since") + 1; + my $whitespace = ' ' x $whitespacelen; + $v = wordwrap($v, -$whitespacelen); + my @desclines = split /\n/, $v; + my $firstline = shift @desclines; + $str .= "\\since $firstline\n"; + foreach (@desclines) { + $str .= "${whitespace}$_\n"; + } + } + + if (defined $related) { + # !!! FIXME: lots of code duplication in all of these. + $str .= "\n" if $addblank; $addblank = 1; + my $v = dewikify($wikitype, $related); + my @desclines = split /\n/, $v; + foreach (@desclines) { + s/\(\)\Z//; # Convert "SDL_Func()" to "SDL_Func" + s/\[\[(.*?)\]\]/$1/; # in case some wikilinks remain. + s/\[(.*?)\]\(.*?\)/$1/; # in case some wikilinks remain. + s/\A\/*//; + s/\A\s*[\:\*\-]\s*//; + s/\A\s+//; + s/\s+\Z//; + $str .= "\\sa $_\n"; + } + } + + my $header = $headersymslocation{$sym}; + my $contentsref = $headers{$header}; + my $chunk = $headersymschunk{$sym}; + + my @lines = split /\n/, $str; + + my $addnewline = (($chunk > 0) && ($$contentsref[$chunk-1] ne '')) ? "\n" : ''; + + my $output = "$addnewline/**\n"; + foreach (@lines) { + chomp; + s/\s*\Z//; + if ($_ eq '') { + $output .= " *\n"; + } else { + $output .= " * $_\n"; + } + } + $output .= " */"; + + #print("$sym:\n[$output]\n\n"); + + $$contentsref[$chunk] = $output; + #$$contentsref[$chunk+1] = $headerdecls{$sym}; + + $changed_headers{$header} = 1; + } + + foreach (keys %changed_headers) { + my $header = $_; + + # this is kinda inefficient, but oh well. + my @removelines = (); + foreach (keys %headersymslocation) { + my $sym = $_; + next if $headersymshasdoxygen{$sym}; + next if $headersymslocation{$sym} ne $header; + # the index of the blank line we put before the function declaration in case we needed to replace it with new content from the wiki. + push @removelines, $headersymschunk{$sym}; + } + + my $contentsref = $headers{$header}; + foreach (@removelines) { + delete $$contentsref[$_]; # delete DOES NOT RENUMBER existing elements! + } + + my $path = "$incpath/$header.tmp"; + open(FH, '>', $path) or die("Can't open '$path': $!\n"); + foreach (@$contentsref) { + print FH "$_\n" if defined $_; + } + close(FH); + rename($path, "$incpath/$header") or die("Can't rename '$path' to '$incpath/$header': $!\n"); + } + + if (defined $readmepath) { + mkdir($readmepath); # just in case + opendir(DH, $wikipath) or die("Can't opendir '$wikipath': $!\n"); + while (readdir(DH)) { + my $dent = $_; + if ($dent =~ /\A(README|INTRO)\-.*?\.md\Z/) { # we only bridge Markdown files here that start with "README-" or "INTRO-". + filecopy("$wikipath/$dent", "$readmepath/$dent", "\n"); + } + } + closedir(DH); + } + +} elsif ($copy_direction == -1) { # --copy-to-wiki + + my %briefs = (); # $briefs{'SDL_OpenAudio'} -> the \brief string for the function. + + if (defined $changeformat) { + $dewikify_mode = $changeformat; + $wordwrap_mode = $changeformat; + } + + foreach (keys %headersyms) { + my $sym = $_; + next if not $headersymshasdoxygen{$sym}; + next if $sym =~ /\A\[category documentation\]/; # not real symbols, we handle this elsewhere. + my $symtype = $headersymstype{$sym}; + my $origwikitype = defined $wikitypes{$sym} ? $wikitypes{$sym} : 'md'; # default to MarkDown for new stuff. + my $wikitype = (defined $changeformat) ? $changeformat : $origwikitype; + die("Unexpected wikitype '$wikitype'") if (($wikitype ne 'mediawiki') and ($wikitype ne 'md') and ($wikitype ne 'manpage')); + + #print("$sym\n"); next; + + $wordwrap_mode = $wikitype; + + my $raw = $headersyms{$sym}; # raw doxygen text with comment characters stripped from start/end and start of each line. + next if not defined $raw; + $raw =~ s/\A\s*\\brief\s+//; # Technically we don't need \brief (please turn on JAVADOC_AUTOBRIEF if you use Doxygen), so just in case one is present, strip it. + + my @doxygenlines = split /\n/, $raw; + my $brief = ''; + while (@doxygenlines) { + last if $doxygenlines[0] =~ /\A\\/; # some sort of doxygen command, assume we're past the general remarks. + last if $doxygenlines[0] =~ /\A\s*\Z/; # blank line? End of paragraph, done. + my $l = shift @doxygenlines; + chomp($l); + $l =~ s/\A\s*//; + $l =~ s/\s*\Z//; + $brief .= "$l "; + } + + $brief =~ s/\s+\Z//; + $brief =~ s/\A(.*?\.) /$1\n\n/; # \brief should only be one sentence, delimited by a period+space. Split if necessary. + my @briefsplit = split /\n/, $brief; + + next if not defined $briefsplit[0]; # No brief text? Probably a bogus Doxygen comment, skip it. + + $brief = wikify($wikitype, shift @briefsplit) . "\n"; + @doxygenlines = (@briefsplit, @doxygenlines); + + my $remarks = ''; + while (@doxygenlines) { + last if $doxygenlines[0] =~ /\A\\/; # some sort of doxygen command, assume we're past the general remarks. + my $l = shift @doxygenlines; + $remarks .= "$l\n"; + } + + #print("REMARKS:\n\n $remarks\n\n"); + + $remarks = wordwrap(wikify($wikitype, $remarks)); + $remarks =~ s/\A\s*//; + $remarks =~ s/\s*\Z//; + + my $decl = $headerdecls{$sym}; + + my $syntax = ''; + if ($wikitype eq 'mediawiki') { + $syntax = "\n$decl\n"; + } elsif ($wikitype eq 'md') { + $decl =~ s/\n+\Z//; + $syntax = "```c\n$decl\n```\n"; + } else { die("Expected wikitype '$wikitype'"); } + + my %sections = (); + $sections{'[Brief]'} = $brief; # include this section even if blank so we get a title line. + $sections{'Remarks'} = "$remarks\n" if $remarks ne ''; + $sections{'Syntax'} = $syntax; + + $briefs{$sym} = $brief; + + my %params = (); # have to parse these and build up the wiki tables after, since Markdown needs to know the length of the largest string. :/ + my @paramsorder = (); + my $fnsigparams = $headersymsparaminfo{$sym}; + my $has_returns = 0; + my $has_threadsafety = 0; + + while (@doxygenlines) { + my $l = shift @doxygenlines; + # We allow param/field/value interchangeably, even if it doesn't make sense. The next --copy-to-headers will correct it anyhow. + if ($l =~ /\A\\(param|field|value)\s+(.*?)\s+(.*)\Z/) { + my $arg = $2; + my $desc = $3; + while (@doxygenlines) { + my $subline = $doxygenlines[0]; + $subline =~ s/\A\s*//; + last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing. + shift @doxygenlines; # dump this line from the array; we're using it. + if ($subline eq '') { # empty line, make sure it keeps the newline char. + $desc .= "\n"; + } else { + $desc .= " $subline"; + } + } + + $desc =~ s/[\s\n]+\Z//ms; + + if (0) { + if (($desc =~ /\A[a-z]/) && (not $desc =~ /$apiprefixregex/)) { + print STDERR "WARNING: $sym\'s '\\param $arg' text starts with a lowercase letter: '$desc'. Fixing.\n"; + $desc = ucfirst($desc); + } + } + + if (not $desc =~ /[\.\!]\Z/) { + print STDERR "WARNING: $sym\'s '\\param $arg' text doesn't end with punctuation: '$desc'. Fixing.\n"; + $desc .= '.'; + } + + # Validate this param. + if (defined($params{$arg})) { + print STDERR "WARNING: Symbol '$sym' has multiple '\\param $arg' declarations! Only keeping the first one!\n"; + } elsif (defined $fnsigparams) { + my $found = 0; + for (my $i = 0; $i < scalar(@$fnsigparams); $i += 2) { + $found = 1, last if (@$fnsigparams[$i] eq $arg); + } + if (!$found) { + print STDERR "WARNING: Symbol '$sym' has a '\\param $arg' for a param that doesn't exist. It will be removed!\n"; + } + } + + # We need to know the length of the longest string to make Markdown tables, so we just store these off until everything is parsed. + $params{$arg} = $desc; + push @paramsorder, $arg; + } elsif ($l =~ /\A\\r(eturns?)\s+(.*)\Z/) { + $has_returns = 1; + # !!! FIXME: complain if this isn't a function or macro. + my $retstr = "R$1"; # "Return" or "Returns" + my $desc = $2; + + while (@doxygenlines) { + my $subline = $doxygenlines[0]; + $subline =~ s/\A\s*//; + last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing. + shift @doxygenlines; # dump this line from the array; we're using it. + if ($subline eq '') { # empty line, make sure it keeps the newline char. + $desc .= "\n"; + } else { + $desc .= " $subline"; + } + } + $desc =~ s/[\s\n]+\Z//ms; + + if (0) { + if (($desc =~ /\A[A-Z]/) && (not $desc =~ /$apiprefixregex/)) { + print STDERR "WARNING: $sym\'s '\\returns' text starts with a capital letter: '$desc'. Fixing.\n"; + $desc = lcfirst($desc); + } + } + + if (not $desc =~ /[\.\!]\Z/) { + print STDERR "WARNING: $sym\'s '\\returns' text doesn't end with punctuation: '$desc'. Fixing.\n"; + $desc .= '.'; + } + + # Make sure the \returns info is valid. + my $rettype = $headersymsrettype{$sym}; + die("Don't have a rettype for '$sym' for some reason!") if (($symtype == 1) && (not defined($rettype))); + if (defined($sections{'Return Value'})) { + print STDERR "WARNING: Symbol '$sym' has multiple '\\return' declarations! Only keeping the first one!\n"; + } elsif (($symtype != 1) && ($symtype != 2) && ($symtype != 5)) { # !!! FIXME: if 5, make sure it's a function pointer typedef! + print STDERR "WARNING: Symbol '$sym' has a '\\return' declaration but isn't a function or macro! Removing it!\n"; + } elsif (($symtype == 1) && ($headersymsrettype{$sym} eq 'void')) { + print STDERR "WARNING: Function '$sym' has a '\\returns' declaration but function returns void! Removing it!\n"; + } else { + my $rettypestr = defined($rettype) ? ('(' . wikify($wikitype, $rettype) . ') ') : ''; + $sections{'Return Value'} = wordwrap("$rettypestr$retstr ". wikify($wikitype, $desc)) . "\n"; + } + } elsif ($l =~ /\A\\deprecated\s+(.*)\Z/) { + my $desc = $1; + while (@doxygenlines) { + my $subline = $doxygenlines[0]; + $subline =~ s/\A\s*//; + last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing. + shift @doxygenlines; # dump this line from the array; we're using it. + if ($subline eq '') { # empty line, make sure it keeps the newline char. + $desc .= "\n"; + } else { + $desc .= " $subline"; + } + } + $desc =~ s/[\s\n]+\Z//ms; + $sections{'Deprecated'} = wordwrap(wikify($wikitype, $desc)) . "\n"; + } elsif ($l =~ /\A\\since\s+(.*)\Z/) { + my $desc = $1; + while (@doxygenlines) { + my $subline = $doxygenlines[0]; + $subline =~ s/\A\s*//; + last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing. + shift @doxygenlines; # dump this line from the array; we're using it. + if ($subline eq '') { # empty line, make sure it keeps the newline char. + $desc .= "\n"; + } else { + $desc .= " $subline"; + } + } + $desc =~ s/[\s\n]+\Z//ms; + $sections{'Version'} = wordwrap(wikify($wikitype, $desc)) . "\n"; + } elsif ($l =~ /\A\\threadsafety\s+(.*)\Z/) { + my $desc = $1; + while (@doxygenlines) { + my $subline = $doxygenlines[0]; + $subline =~ s/\A\s*//; + last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing. + shift @doxygenlines; # dump this line from the array; we're using it. + if ($subline eq '') { # empty line, make sure it keeps the newline char. + $desc .= "\n"; + } else { + $desc .= " $subline"; + } + } + $desc =~ s/[\s\n]+\Z//ms; + $sections{'Thread Safety'} = wordwrap(wikify($wikitype, $desc)) . "\n"; + $has_threadsafety = 1; + } elsif ($l =~ /\A\\sa\s+(.*)\Z/) { + my $sa = $1; + $sa =~ s/\(\)\Z//; # Convert "SDL_Func()" to "SDL_Func" + $sections{'See Also'} = '' if not defined $sections{'See Also'}; + if ($wikitype eq 'mediawiki') { + $sections{'See Also'} .= ":[[$sa]]\n"; + } elsif ($wikitype eq 'md') { + $sections{'See Also'} .= "- [$sa]($sa)\n"; + } else { die("Expected wikitype '$wikitype'"); } + } + } + + if (($symtype == 1) && ($headersymsrettype{$sym} ne 'void') && !$has_returns) { + print STDERR "WARNING: Function '$sym' has a non-void return type but no '\\returns' declaration\n"; + } + + # !!! FIXME: uncomment this when we're trying to clean this up in the headers. + #if (($symtype == 1) && !$has_threadsafety) { + # print STDERR "WARNING: Function '$sym' doesn't have a '\\threadsafety' declaration\n"; + #} + + # Make sure %params is in the same order as the actual function signature and add C datatypes... + my $params_has_c_datatype = 0; + my @final_params = (); + if (($symtype == 1) && (defined($headersymsparaminfo{$sym}))) { # is a function and we have param info for it... + my $fnsigparams = $headersymsparaminfo{$sym}; + for (my $i = 0; $i < scalar(@$fnsigparams); $i += 2) { + my $paramname = @$fnsigparams[$i]; + my $paramdesc = $params{$paramname}; + if (defined($paramdesc)) { + push @final_params, $paramname; # name + push @final_params, @$fnsigparams[$i+1]; # C datatype + push @final_params, $paramdesc; # description + $params_has_c_datatype = 1 if (defined(@$fnsigparams[$i+1])); + } else { + print STDERR "WARNING: Symbol '$sym' is missing a '\\param $paramname' declaration!\n"; + } + } + } else { + foreach (@paramsorder) { + my $paramname = $_; + my $paramdesc = $params{$paramname}; + if (defined($paramdesc)) { + push @final_params, $_; + push @final_params, undef; + push @final_params, $paramdesc; + } + } + } + + my $hfiletext = $wikiheaderfiletext; + $hfiletext =~ s/\%fname\%/$headersymslocation{$sym}/g; + $sections{'Header File'} = "$hfiletext\n"; + + # Make sure this ends with a double-newline. + $sections{'See Also'} .= "\n" if defined $sections{'See Also'}; + + if (0) { # !!! FIXME: this was a useful hack, but this needs to be generalized if we're going to do this always. + # Plug in a \since section if one wasn't listed. + if (not defined $sections{'Version'}) { + my $symtypename; + if ($symtype == 1) { + $symtypename = 'function'; + } elsif ($symtype == 2) { + $symtypename = 'macro'; + } elsif ($symtype == 3) { + $symtypename = 'struct'; + } elsif ($symtype == 4) { + $symtypename = 'enum'; + } elsif ($symtype == 5) { + $symtypename = 'datatype'; + } else { + die("Unexpected symbol type $symtype!"); + } + my $str = "This $symtypename is available since $projectshortname 3.0.0."; + $sections{'Version'} = wordwrap(wikify($wikitype, $str)) . "\n"; + } + } + + # We can build the wiki table now that we have all the data. + if (scalar(@final_params) > 0) { + my $str = ''; + if ($wikitype eq 'mediawiki') { + while (scalar(@final_params) > 0) { + my $arg = shift @final_params; + my $c_datatype = shift @final_params; + my $desc = wikify($wikitype, shift @final_params); + $c_datatype = '' if not defined $c_datatype; + $str .= ($str eq '') ? "{|\n" : "|-\n"; + $str .= "|$c_datatype\n" if $params_has_c_datatype; + $str .= "|'''$arg'''\n"; + $str .= "|$desc\n"; + } + $str .= "|}\n"; + } elsif ($wikitype eq 'md') { + my $longest_arg = 0; + my $longest_c_datatype = 0; + my $longest_desc = 0; + my $which = 0; + foreach (@final_params) { + if ($which == 0) { + my $len = length($_); + $longest_arg = $len if ($len > $longest_arg); + $which = 1; + } elsif ($which == 1) { + if (defined($_)) { + my $len = length(wikify($wikitype, $_)); + $longest_c_datatype = $len if ($len > $longest_c_datatype); + } + $which = 2; + } else { + my $len = length(wikify($wikitype, $_)); + $longest_desc = $len if ($len > $longest_desc); + $which = 0; + } + } + + # Markdown tables are sort of obnoxious. + my $c_datatype_cell; + $c_datatype_cell = ($longest_c_datatype > 0) ? ('| ' . (' ' x ($longest_c_datatype)) . ' ') : ''; + $str .= $c_datatype_cell . '| ' . (' ' x ($longest_arg+4)) . ' | ' . (' ' x $longest_desc) . " |\n"; + $c_datatype_cell = ($longest_c_datatype > 0) ? ('| ' . ('-' x ($longest_c_datatype)) . ' ') : ''; + $str .= $c_datatype_cell . '| ' . ('-' x ($longest_arg+4)) . ' | ' . ('-' x $longest_desc) . " |\n"; + + while (@final_params) { + my $arg = shift @final_params; + my $c_datatype = shift @final_params; + $c_datatype_cell = ''; + if ($params_has_c_datatype) { + $c_datatype = defined($c_datatype) ? wikify($wikitype, $c_datatype) : ''; + $c_datatype_cell = ($longest_c_datatype > 0) ? ("| $c_datatype " . (' ' x ($longest_c_datatype - length($c_datatype)))) : ''; + } + my $desc = wikify($wikitype, shift @final_params); + $str .= $c_datatype_cell . "| **$arg** " . (' ' x ($longest_arg - length($arg))) . "| $desc" . (' ' x ($longest_desc - length($desc))) . " |\n"; + } + } else { + die("Unexpected wikitype!"); # should have checked this elsewhere. + } + $sections{'Function Parameters'} = $str; + } + + my $path = "$wikipath/$sym.${wikitype}.tmp"; + open(FH, '>', $path) or die("Can't open '$path': $!\n"); + + my $sectionsref = $wikisyms{$sym}; + + foreach (@standard_wiki_sections) { + # drop sections we either replaced or removed from the original wiki's contents. + if (not defined $only_wiki_sections{$_}) { + delete($$sectionsref{$_}); + } + } + + my $wikisectionorderref = $wikisectionorder{$sym}; + + # Make sure there's a footer in the wiki that puts this function in CategoryAPI... + if (not $$sectionsref{'[footer]'}) { + $$sectionsref{'[footer]'} = ''; + push @$wikisectionorderref, '[footer]'; + } + + # If changing format, convert things that otherwise are passed through unmolested. + if (defined $changeformat) { + if (($dewikify_mode eq 'md') and ($origwikitype eq 'mediawiki')) { + $$sectionsref{'[footer]'} =~ s/\[\[(Category[a-zA-Z0-9_]+)\]\]/[$1]($1)/g; + } elsif (($dewikify_mode eq 'mediawiki') and ($origwikitype eq 'md')) { + $$sectionsref{'[footer]'} =~ s/\[(Category[a-zA-Z0-9_]+)\]\(.*?\)/[[$1]]/g; + } + + foreach (keys %only_wiki_sections) { + my $sect = $_; + if (defined $$sectionsref{$sect}) { + $$sectionsref{$sect} = wikify($wikitype, dewikify($origwikitype, $$sectionsref{$sect})); + } + } + } + + if ($symtype != -1) { # Don't do these in category documentation block + my $footer = $$sectionsref{'[footer]'}; + + my $symtypename; + if ($symtype == 1) { + $symtypename = 'Function'; + } elsif ($symtype == 2) { + $symtypename = 'Macro'; + } elsif ($symtype == 3) { + $symtypename = 'Struct'; + } elsif ($symtype == 4) { + $symtypename = 'Enum'; + } elsif ($symtype == 5) { + $symtypename = 'Datatype'; + } else { + die("Unexpected symbol type $symtype!"); + } + + my $symcategory = $headersymscategory{$sym}; + if ($wikitype eq 'mediawiki') { + $footer =~ s/\[\[CategoryAPI\]\],?\s*//g; + $footer =~ s/\[\[CategoryAPI${symtypename}\]\],?\s*//g; + $footer =~ s/\[\[Category${symcategory}\]\],?\s*//g if defined $symcategory; + $footer = "[[CategoryAPI]], [[CategoryAPI$symtypename]]" . (defined $symcategory ? ", [[Category$symcategory]]" : '') . (($footer eq '') ? "\n" : ", $footer"); + } elsif ($wikitype eq 'md') { + $footer =~ s/\[CategoryAPI\]\(CategoryAPI\),?\s*//g; + $footer =~ s/\[CategoryAPI${symtypename}\]\(CategoryAPI${symtypename}\),?\s*//g; + $footer =~ s/\[Category${symcategory}\]\(Category${symcategory}\),?\s*//g if defined $symcategory; + $footer = "[CategoryAPI](CategoryAPI), [CategoryAPI$symtypename](CategoryAPI$symtypename)" . (defined $symcategory ? ", [Category$symcategory](Category$symcategory)" : '') . (($footer eq '') ? '' : ', ') . $footer; + } else { die("Unexpected wikitype '$wikitype'"); } + $$sectionsref{'[footer]'} = $footer; + + if (defined $wikipreamble) { + my $wikified_preamble = wikify($wikitype, $wikipreamble); + if ($wikitype eq 'mediawiki') { + print FH "====== $wikified_preamble ======\n"; + } elsif ($wikitype eq 'md') { + print FH "###### $wikified_preamble\n"; + } else { die("Unexpected wikitype '$wikitype'"); } + } + } + + my $prevsectstr = ''; + my @ordered_sections = (@standard_wiki_sections, defined $wikisectionorderref ? @$wikisectionorderref : ()); # this copies the arrays into one. + foreach (@ordered_sections) { + my $sect = $_; + next if $sect eq '[start]'; + next if (not defined $sections{$sect} and not defined $$sectionsref{$sect}); + my $section = defined $sections{$sect} ? $sections{$sect} : $$sectionsref{$sect}; + + if ($sect eq '[footer]') { + # Make sure previous section ends with two newlines. + if (substr($prevsectstr, -1) ne "\n") { + print FH "\n\n"; + } elsif (substr($prevsectstr, -2) ne "\n\n") { + print FH "\n"; + } + print FH "----\n"; # It's the same in Markdown and MediaWiki. + } elsif ($sect eq '[Brief]') { + if ($wikitype eq 'mediawiki') { + print FH "= $sym =\n\n"; + } elsif ($wikitype eq 'md') { + print FH "# $sym\n\n"; + } else { die("Unexpected wikitype '$wikitype'"); } + } else { + my $sectname = $sect; + if ($sectname eq 'Function Parameters') { # We use this same table for different things depending on what we're documenting, so rename it now. + if (($symtype == 1) || ($symtype == 5)) { # function (or typedef, in case it's a function pointer type). + } elsif ($symtype == 2) { # macro + $sectname = 'Macro Parameters'; + } elsif ($symtype == 3) { # struct/union + $sectname = 'Fields'; + } elsif ($symtype == 4) { # enum + $sectname = 'Values'; + } else { + die("Unexpected symtype $symtype"); + } + } + + if ($symtype != -1) { # Not for category documentation block + if ($wikitype eq 'mediawiki') { + print FH "\n== $sectname ==\n\n"; + } elsif ($wikitype eq 'md') { + print FH "\n## $sectname\n\n"; + } else { die("Unexpected wikitype '$wikitype'"); } + } + } + + my $sectstr = defined $sections{$sect} ? $sections{$sect} : $$sectionsref{$sect}; + print FH $sectstr; + + $prevsectstr = $sectstr; + + # make sure these don't show up twice. + delete($sections{$sect}); + delete($$sectionsref{$sect}); + } + + print FH "\n\n"; + close(FH); + + if (defined $changeformat and ($origwikitype ne $wikitype)) { + system("cd '$wikipath' ; git mv '$_.${origwikitype}' '$_.${wikitype}'"); + unlink("$wikipath/$_.${origwikitype}"); + } + + rename($path, "$wikipath/$_.${wikitype}") or die("Can't rename '$path' to '$wikipath/$_.${wikitype}': $!\n"); + } + + # Write out simple redirector pages if they don't already exist. + foreach (keys %referenceonly) { + my $sym = $_; + my $refersto = $referenceonly{$sym}; + my $path = "$wikipath/$sym.md"; # we only do Markdown for these. + next if (-f $path); # don't overwrite if it already exists. Delete the file if you need a rebuild! + open(FH, '>', $path) or die("Can't open '$path': $!\n"); + + if (defined $wikipreamble) { + my $wikified_preamble = wikify('md', $wikipreamble); + print FH "###### $wikified_preamble\n"; + } + + my $category = 'CategoryAPIMacro'; + if ($headersymstype{$refersto} == 4) { + $category = 'CategoryAPIEnumerators'; # NOT CategoryAPIEnum! + } + + print FH "# $sym\n\nPlease refer to [$refersto]($refersto) for details.\n\n"; + print FH "----\n"; + print FH "[CategoryAPI](CategoryAPI), [$category]($category)\n\n"; + + close(FH); + } + + # Write out Category pages... + foreach (keys %headercategorydocs) { + my $cat = $_; + my $sym = $headercategorydocs{$cat}; # fake symbol + my $raw = $headersyms{$sym}; # raw doxygen text with comment characters stripped from start/end and start of each line. + my $wikitype = defined($wikitypes{$sym}) ? $wikitypes{$sym} : 'md'; + my $path = "$wikipath/Category$cat.$wikitype"; + + $raw = wordwrap(wikify($wikitype, $raw)); + + my $tmppath = "$path.tmp"; + open(FH, '>', $tmppath) or die("Can't open '$tmppath': $!\n"); + print FH "$raw\n\n"; + + if (! -f $path) { # Doesn't exist at all? Write out a template file. + # If writing from scratch, it's always a Markdown file. + die("Unexpected wikitype '$wikitype'!") if $wikitype ne 'md'; + print FH <<__EOF__ + + + +## Functions + + + + + +## Datatypes + + + + + +## Structs + + + + + +## Enums + + + + + +## Macros + + + + + +---- +[CategoryAPICategory](CategoryAPICategory) + +__EOF__ +; + } else { + my $endstr = $wikisyms{$sym}->{'[footer]'}; + if (defined($endstr)) { + print FH $endstr; + } + } + + close(FH); + rename($tmppath, $path) or die("Can't rename '$tmppath' to '$path': $!\n"); + } + + # Write out READMEs... + if (defined $readmepath) { + if ( -d $readmepath ) { + mkdir($wikipath); # just in case + opendir(DH, $readmepath) or die("Can't opendir '$readmepath': $!\n"); + while (my $d = readdir(DH)) { + my $dent = $d; + if ($dent =~ /\A(README|INTRO)\-.*?\.md\Z/) { # we only bridge Markdown files here that start with "README-" or "INTRO". + filecopy("$readmepath/$dent", "$wikipath/$dent", "\n"); + } + } + closedir(DH); + + my @pages = (); + opendir(DH, $wikipath) or die("Can't opendir '$wikipath': $!\n"); + while (my $d = readdir(DH)) { + my $dent = $d; + if ($dent =~ /\A((README|INTRO)\-.*?)\.md\Z/) { + push @pages, $1; + } + } + closedir(DH); + + open(FH, '>', "$wikipath/READMEs.md") or die("Can't open '$wikipath/READMEs.md': $!\n"); + print FH "# All READMEs available here\n\n"; + foreach (sort @pages) { + my $wikiname = $_; + print FH "- [$wikiname]($wikiname)\n"; + } + close(FH); + } + } + + # Write out quick reference pages... + if ($quickrefenabled) { + generate_quickref(\%briefs, "$wikipath/QuickReference.md", 0); + generate_quickref(\%briefs, "$wikipath/QuickReferenceNoUnicode.md", 1); + } + + if ($envvarenabled and defined $envvarsymregex and defined $envvarsymreplace) { + generate_envvar_wiki_page(\%briefs, "$wikipath/EnvironmentVariables.md"); + } + +} elsif ($copy_direction == -2) { # --copy-to-manpages + # This only takes from the wiki data, since it has sections we omit from the headers, like code examples. + + File::Path::make_path("$manpath/man3"); + + $dewikify_mode = 'manpage'; + $wordwrap_mode = 'manpage'; + + my $introtxt = ''; + if (0) { + open(FH, '<', "$srcpath/LICENSE.txt") or die("Can't open '$srcpath/LICENSE.txt': $!\n"); + while () { + chomp; + $introtxt .= ".\\\" $_\n"; + } + close(FH); + } + + if (!$gitrev) { + $gitrev = `cd "$srcpath" ; git rev-list HEAD~..`; + chomp($gitrev); + } + + # !!! FIXME + open(FH, '<', "$srcpath/$versionfname") or die("Can't open '$srcpath/$versionfname': $!\n"); + my $majorver = 0; + my $minorver = 0; + my $microver = 0; + while () { + chomp; + if (/$versionmajorregex/) { + $majorver = int($1); + } elsif (/$versionminorregex/) { + $minorver = int($1); + } elsif (/$versionmicroregex/) { + $microver = int($1); + } + } + close(FH); + my $fullversion = "$majorver.$minorver.$microver"; + + foreach (keys %headersyms) { + my $sym = $_; + next if not defined $wikisyms{$sym}; # don't have a page for that function, skip it. + next if $sym =~ /\A\[category documentation\]/; # not real symbols + next if (defined $manpagesymbolfilterregex) && ($sym =~ /$manpagesymbolfilterregex/); + my $symtype = $headersymstype{$sym}; + my $wikitype = $wikitypes{$sym}; + my $sectionsref = $wikisyms{$sym}; + my $remarks = $sectionsref->{'Remarks'}; + my $returns = $sectionsref->{'Return Value'}; + my $version = $sectionsref->{'Version'}; + my $threadsafety = $sectionsref->{'Thread Safety'}; + my $related = $sectionsref->{'See Also'}; + my $examples = $sectionsref->{'Code Examples'}; + my $deprecated = $sectionsref->{'Deprecated'}; + my $headerfile = $manpageheaderfiletext; + + my $params = undef; + + if ($symtype == -1) { # category documentation block. + # nothing to be done here. + } elsif (($symtype == 1) || (($symtype == 5))) { # we'll assume a typedef (5) with a \param is a function pointer typedef. + $params = $sectionsref->{'Function Parameters'}; + } elsif ($symtype == 2) { + $params = $sectionsref->{'Macro Parameters'}; + } elsif ($symtype == 3) { + $params = $sectionsref->{'Fields'}; + } elsif ($symtype == 4) { + $params = $sectionsref->{'Values'}; + } else { + die("Unexpected symtype $symtype"); + } + + $headerfile =~ s/\%fname\%/$headersymslocation{$sym}/g; + $headerfile .= "\n"; + + my $mansection; + my $mansectionname; + if (($symtype == 1) || ($symtype == 2)) { # functions or macros + $mansection = '3'; + $mansectionname = 'FUNCTIONS'; + } elsif (($symtype >= 3) && ($symtype <= 5)) { # struct/union/enum/typedef + $mansection = '3type'; + $mansectionname = 'DATATYPES'; + } else { + die("Unexpected symtype $symtype"); + } + + my $brief = $sectionsref->{'[Brief]'}; + my $decl = $headerdecls{$sym}; + my $str = ''; + + # the "$brief" makes sure this is a copy of the string, which is doing some weird reference thing otherwise. + $brief = defined $brief ? "$brief" : ''; + $brief =~ s/\A[\s\n]*\= .*? \=\s*?\n+//ms; + $brief =~ s/\A[\s\n]*\=\= .*? \=\=\s*?\n+//ms; + $brief =~ s/\A(.*?\.) /$1\n/; # \brief should only be one sentence, delimited by a period+space. Split if necessary. + my @briefsplit = split /\n/, $brief; + $brief = shift @briefsplit; + $brief = dewikify($wikitype, $brief); + + if (defined $remarks) { + $remarks = dewikify($wikitype, join("\n", @briefsplit) . $remarks); + } + + $str .= $introtxt; + + $str .= ".\\\" This manpage content is licensed under Creative Commons\n"; + $str .= ".\\\" Attribution 4.0 International (CC BY 4.0)\n"; + $str .= ".\\\" https://creativecommons.org/licenses/by/4.0/\n"; + $str .= ".\\\" This manpage was generated from ${projectshortname}'s wiki page for $sym:\n"; + $str .= ".\\\" $wikiurl/$sym\n"; + $str .= ".\\\" Generated with SDL/build-scripts/wikiheaders.pl\n"; + $str .= ".\\\" revision $gitrev\n" if $gitrev ne ''; + $str .= ".\\\" Please report issues in this manpage's content at:\n"; + $str .= ".\\\" $bugreporturl\n"; + $str .= ".\\\" Please report issues in the generation of this manpage from the wiki at:\n"; + $str .= ".\\\" https://github.com/libsdl-org/SDL/issues/new?title=Misgenerated%20manpage%20for%20$sym\n"; # !!! FIXME: if this becomes a problem for other projects, we'll generalize this. + $str .= ".\\\" $projectshortname can be found at $projecturl\n"; + + # Define a .URL macro. The "www.tmac" thing decides if we're using GNU roff (which has a .URL macro already), and if so, overrides the macro we just created. + # This wizadry is from https://web.archive.org/web/20060102165607/http://people.debian.org/~branden/talks/wtfm/wtfm.pdf + $str .= ".de URL\n"; + $str .= '\\$2 \(laURL: \\$1 \(ra\\$3' . "\n"; + $str .= "..\n"; + $str .= '.if \n[.g] .mso www.tmac' . "\n"; + + $str .= ".TH $sym $mansection \"$projectshortname $fullversion\" \"$projectfullname\" \"$projectshortname$majorver $mansectionname\"\n"; + $str .= ".SH NAME\n"; + + $str .= "$sym"; + $str .= " \\- $brief" if (defined $brief); + $str .= "\n"; + + if (defined $deprecated) { + $str .= ".SH DEPRECATED\n"; + $str .= dewikify($wikitype, $deprecated) . "\n"; + } + + my $incfile = $mainincludefname; + if (defined $headerfile) { + if($headerfile =~ /Defined in (.*)/) { + $incfile = $1; + } + } + + $str .= ".SH SYNOPSIS\n"; + $str .= ".nf\n"; + $str .= ".B #include <$incfile>\n"; + $str .= ".PP\n"; + + my @decllines = split /\n/, $decl; + foreach (@decllines) { + $_ =~ s/\\/\\(rs/g; # fix multiline macro defs + $_ =~ s/"/\\(dq/g; + $str .= ".BI \"$_\n"; + } + $str .= ".fi\n"; + + if (defined $remarks) { + $str .= ".SH DESCRIPTION\n"; + $str .= $remarks . "\n"; + } + + if (defined $params) { + if (($symtype == 1) || ($symtype == 5)) { + $str .= ".SH FUNCTION PARAMETERS\n"; + } elsif ($symtype == 2) { # macro + $str .= ".SH MACRO PARAMETERS\n"; + } elsif ($symtype == 3) { # struct/union + $str .= ".SH FIELDS\n"; + } elsif ($symtype == 4) { # enum + $str .= ".SH VALUES\n"; + } else { + die("Unexpected symtype $symtype"); + } + + my @lines = split /\n/, $params; + if ($wikitype eq 'mediawiki') { + die("Unexpected data parsing MediaWiki table") if (shift @lines ne '{|'); # Dump the '{|' start + while (scalar(@lines) >= 3) { + my $c_datatype = shift @lines; + my $name = shift @lines; + my $desc = shift @lines; + my $terminator; # the '|-' or '|}' line. + + if (($desc eq '|-') or ($desc eq '|}') or (not $desc =~ /\A\|/)) { # we seem to be out of cells, which means there was no datatype column on this one. + $terminator = $desc; + $desc = $name; + $name = $c_datatype; + $c_datatype = ''; + } else { + $terminator = shift @lines; + } + + last if ($terminator ne '|-') and ($terminator ne '|}'); # we seem to have run out of table. + $name =~ s/\A\|\s*//; + $name =~ s/\A\*\*(.*?)\*\*/$1/; + $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/; + $desc =~ s/\A\|\s*//; + $desc = dewikify($wikitype, $desc); + #print STDERR "SYM: $sym CDATATYPE: $c_datatype NAME: $name DESC: $desc TERM: $terminator\n"; + + $str .= ".TP\n"; + $str .= ".I $name\n"; + $str .= "$desc\n"; + } + } elsif ($wikitype eq 'md') { + my $l; + $l = shift @lines; + die("Unexpected data parsing Markdown table") if (not $l =~ /\A(\s*\|)?\s*\|\s*\|\s*\|\s*\Z/); + $l = shift @lines; + die("Unexpected data parsing Markdown table") if (not $l =~ /\A\s*(\|\s*\-*\s*)?\|\s*\-*\s*\|\s*\-*\s*\|\s*\Z/); + while (scalar(@lines) >= 1) { + $l = shift @lines; + my $name; + my $desc; + if ($l =~ /\A\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*\Z/) { + # c datatype is $1, but we don't care about it here. + $name = $2; + $desc = $3; + } elsif ($l =~ /\A\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*\Z/) { + $name = $1; + $desc = $2; + } else { + last; # we seem to have run out of table. + } + + $name =~ s/\A\*\*(.*?)\*\*/$1/; + $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/; + $desc = dewikify($wikitype, $desc); + + $str .= ".TP\n"; + $str .= ".I $name\n"; + $str .= "$desc\n"; + } + } else { + die("write me"); + } + } + + if (defined $returns) { + # Check for md link in return type: ([SDL_Renderer](SDL_Renderer) *) + # This would've prevented the next regex from working properly (it'd leave " *)") + $returns =~ s/\A\(\[.*?\]\((.*?)\)/\($1/ms; + # Chop datatype in parentheses off the front. + $returns =~ s/\A\(.*?\) //; + + $returns = dewikify($wikitype, $returns); + $str .= ".SH RETURN VALUE\n"; + $str .= "$returns\n"; + } + + if (defined $examples) { + $str .= ".SH CODE EXAMPLES\n"; + $dewikify_manpage_code_indent = 0; + $str .= dewikify($wikitype, $examples) . "\n"; + $dewikify_manpage_code_indent = 1; + } + + if (defined $threadsafety) { + $str .= ".SH THREAD SAFETY\n"; + $str .= dewikify($wikitype, $threadsafety) . "\n"; + } + + if (defined $version) { + $str .= ".SH AVAILABILITY\n"; + $str .= dewikify($wikitype, $version) . "\n"; + } + + if (defined $related) { + $str .= ".SH SEE ALSO\n"; + # !!! FIXME: lots of code duplication in all of these. + my $v = dewikify($wikitype, $related); + my @desclines = split /\n/, $v; + my $nextstr = ''; + foreach (@desclines) { + s/\(\)\Z//; # Convert "SDL_Func()" to "SDL_Func" + s/\[\[(.*?)\]\]/$1/; # in case some wikilinks remain. + s/\[(.*?)\]\(.*?\)/$1/; # in case some wikilinks remain. + s/\A\*\s*\Z//; + s/\A\/*//; + s/\A\.BR\s+//; # dewikify added this, but we want to handle it. + s/\A\.I\s+//; # dewikify added this, but we want to handle it. + s/\A\.PP\s*//; # dewikify added this, but we want to handle it. + s/\\\(bu//; # dewikify added this, but we want to handle it. + s/\A\s*[\:\*\-]\s*//; + s/\A\s+//; + s/\s+\Z//; + next if $_ eq ''; + my $seealso_symtype = $headersymstype{$_}; + my $seealso_mansection = '3'; + if (defined($seealso_symtype) && ($seealso_symtype >= 3) && ($seealso_symtype <= 5)) { # struct/union/enum/typedef + $seealso_mansection = '3type'; + } + $str .= "$nextstr.BR $_ ($seealso_mansection)"; + $nextstr = ",\n"; + } + $str .= "\n"; + } + + if (0) { + $str .= ".SH COPYRIGHT\n"; + $str .= "This manpage is licensed under\n"; + $str .= ".UR https://creativecommons.org/licenses/by/4.0/\n"; + $str .= "Creative Commons Attribution 4.0 International (CC BY 4.0)\n"; + $str .= ".UE\n"; + $str .= ".PP\n"; + $str .= "This manpage was generated from\n"; + $str .= ".UR $wikiurl/$sym\n"; + $str .= "${projectshortname}'s wiki\n"; + $str .= ".UE\n"; + $str .= "using SDL/build-scripts/wikiheaders.pl"; + $str .= " revision $gitrev" if $gitrev ne ''; + $str .= ".\n"; + $str .= "Please report issues in this manpage at\n"; + $str .= ".UR $bugreporturl\n"; + $str .= "our bugtracker!\n"; + $str .= ".UE\n"; + } + + my $path = "$manpath/man3/$_.$mansection"; + my $tmppath = "$path.tmp"; + open(FH, '>', $tmppath) or die("Can't open '$tmppath': $!\n"); + print FH $str; + close(FH); + rename($tmppath, $path) or die("Can't rename '$tmppath' to '$path': $!\n"); + } + +} elsif ($copy_direction == -4) { # --copy-to-latex + # This only takes from the wiki data, since it has sections we omit from the headers, like code examples. + + print STDERR "\n(The --copy-to-latex code is known to not be ready for serious use; send patches, not bug reports, please.)\n\n"; + + $dewikify_mode = 'LaTeX'; + $wordwrap_mode = 'LaTeX'; + + # !!! FIXME: code duplication with --copy-to-manpages section. + + my $introtxt = ''; + if (0) { + open(FH, '<', "$srcpath/LICENSE.txt") or die("Can't open '$srcpath/LICENSE.txt': $!\n"); + while () { + chomp; + $introtxt .= ".\\\" $_\n"; + } + close(FH); + } + + if (!$gitrev) { + $gitrev = `cd "$srcpath" ; git rev-list HEAD~..`; + chomp($gitrev); + } + + # !!! FIXME + open(FH, '<', "$srcpath/$versionfname") or die("Can't open '$srcpath/$versionfname': $!\n"); + my $majorver = 0; + my $minorver = 0; + my $microver = 0; + while () { + chomp; + if (/$versionmajorregex/) { + $majorver = int($1); + } elsif (/$versionminorregex/) { + $minorver = int($1); + } elsif (/$versionmicroregex/) { + $microver = int($1); + } + } + close(FH); + my $fullversion = "$majorver.$minorver.$microver"; + + my $latex_fname = "$srcpath/$projectshortname.tex"; + my $latex_tmpfname = "$latex_fname.tmp"; + open(TEXFH, '>', "$latex_tmpfname") or die("Can't open '$latex_tmpfname' for writing: $!\n"); + + print TEXFH <<__EOF__ +\\documentclass{book} + +\\usepackage{listings} +\\usepackage{color} +\\usepackage{hyperref} + +\\definecolor{dkgreen}{rgb}{0,0.6,0} +\\definecolor{gray}{rgb}{0.5,0.5,0.5} +\\definecolor{mauve}{rgb}{0.58,0,0.82} + +\\setcounter{secnumdepth}{0} + +\\lstset{frame=tb, + language=C, + aboveskip=3mm, + belowskip=3mm, + showstringspaces=false, + columns=flexible, + basicstyle={\\small\\ttfamily}, + numbers=none, + numberstyle=\\tiny\\color{gray}, + keywordstyle=\\color{blue}, + commentstyle=\\color{dkgreen}, + stringstyle=\\color{mauve}, + breaklines=true, + breakatwhitespace=true, + tabsize=3 +} + +\\begin{document} +\\frontmatter + +\\title{$projectfullname $majorver.$minorver.$microver Reference Manual} +\\author{The $projectshortname Developers} +\\maketitle + +\\mainmatter + +__EOF__ +; + + # !!! FIXME: Maybe put this in the book intro? print TEXFH $introtxt; + + # Sort symbols by symbol type, then alphabetically. + my @headersymskeys = sort { + my $symtypea = $headersymstype{$a}; + my $symtypeb = $headersymstype{$b}; + $symtypea = 3 if ($symtypea > 3); + $symtypeb = 3 if ($symtypeb > 3); + my $rc = $symtypea <=> $symtypeb; + if ($rc == 0) { + $rc = lc($a) cmp lc($b); + } + return $rc; + } keys %headersyms; + + my $current_symtype = 0; + my $current_chapter = ''; + + foreach (@headersymskeys) { + my $sym = $_; + next if not defined $wikisyms{$sym}; # don't have a page for that function, skip it. + next if $sym =~ /\A\[category documentation\]/; # not real symbols. + my $symtype = $headersymstype{$sym}; + my $wikitype = $wikitypes{$sym}; + my $sectionsref = $wikisyms{$sym}; + my $remarks = $sectionsref->{'Remarks'}; + my $params = $sectionsref->{'Function Parameters'}; + my $returns = $sectionsref->{'Return Value'}; + my $version = $sectionsref->{'Version'}; + my $threadsafety = $sectionsref->{'Thread Safety'}; + my $related = $sectionsref->{'See Also'}; + my $examples = $sectionsref->{'Code Examples'}; + my $deprecated = $sectionsref->{'Deprecated'}; + my $headerfile = $manpageheaderfiletext; + $headerfile =~ s/\%fname\%/$headersymslocation{$sym}/g; + $headerfile .= "\n"; + + my $brief = $sectionsref->{'[Brief]'}; + my $decl = $headerdecls{$sym}; + my $str = ''; + + if ($current_symtype != $symtype) { + my $newchapter = ''; + if ($symtype == 1) { + $newchapter = 'Functions'; + } elsif ($symtype == 2) { + $newchapter = 'Macros'; + } else { + $newchapter = 'Datatypes'; + } + + if ($current_chapter ne $newchapter) { + $str .= "\n\n\\chapter{$projectshortname $newchapter}\n\n\\clearpage\n\n"; + $current_chapter = $newchapter; + } + $current_symtype = $symtype; + } + + $brief = "$brief"; + $brief =~ s/\A[\s\n]*\= .*? \=\s*?\n+//ms; + $brief =~ s/\A[\s\n]*\=\= .*? \=\=\s*?\n+//ms; + $brief =~ s/\A(.*?\.) /$1\n/; # \brief should only be one sentence, delimited by a period+space. Split if necessary. + my @briefsplit = split /\n/, $brief; + $brief = shift @briefsplit; + $brief = dewikify($wikitype, $brief); + + if (defined $remarks) { + $remarks = dewikify($wikitype, join("\n", @briefsplit) . $remarks); + } + + my $escapedsym = escLaTeX($sym); + $str .= "\\hypertarget{$sym}{%\n\\section{$escapedsym}\\label{$sym}}\n\n"; + $str .= $brief if (defined $brief); + $str .= "\n\n"; + + if (defined $deprecated) { + $str .= "\\subsection{Deprecated}\n\n"; + $str .= dewikify($wikitype, $deprecated) . "\n"; + } + + if (defined $headerfile) { + $str .= "\\subsection{Header File}\n\n"; + $str .= dewikify($wikitype, $headerfile) . "\n"; + } + + $str .= "\\subsection{Syntax}\n\n"; + $str .= "\\begin{lstlisting}\n$decl\n\\end{lstlisting}\n"; + + if (defined $params) { + if (($symtype == 1) || ($symtype == 5)) { + $str .= "\\subsection{Function Parameters}\n\n"; + } elsif ($symtype == 2) { # macro + $str .= "\\subsection{Macro Parameters}\n\n"; + } elsif ($symtype == 3) { # struct/union + $str .= "\\subsection{Fields}\n\n"; + } elsif ($symtype == 4) { # enum + $str .= "\\subsection{Values}\n\n"; + } else { + die("Unexpected symtype $symtype"); + } + + $str .= "\\begin{center}\n"; + $str .= " \\begin{tabular}{ | l | p{0.75\\textwidth} |}\n"; + $str .= " \\hline\n"; + + # !!! FIXME: this table parsing has gotten complicated and is pasted three times in this file; move it to a subroutine! + my @lines = split /\n/, $params; + if ($wikitype eq 'mediawiki') { + die("Unexpected data parsing MediaWiki table") if (shift @lines ne '{|'); # Dump the '{|' start + while (scalar(@lines) >= 3) { + my $name = shift @lines; + my $desc = shift @lines; + my $terminator = shift @lines; # the '|-' or '|}' line. + last if ($terminator ne '|-') and ($terminator ne '|}'); # we seem to have run out of table. + $name =~ s/\A\|\s*//; + $name =~ s/\A\*\*(.*?)\*\*/$1/; + $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/; + $name = escLaTeX($name); + $desc =~ s/\A\|\s*//; + $desc = dewikify($wikitype, $desc); + #print STDERR "FN: $sym NAME: $name DESC: $desc TERM: $terminator\n"; + $str .= " \\textbf{$name} & $desc \\\\ \\hline\n"; + } + } elsif ($wikitype eq 'md') { + my $l; + $l = shift @lines; + die("Unexpected data parsing Markdown table") if (not $l =~ /\A(\s*\|)?\s*\|\s*\|\s*\|\s*\Z/); + $l = shift @lines; + die("Unexpected data parsing Markdown table") if (not $l =~ /\A\s*(\|\s*\-*\s*)?\|\s*\-*\s*\|\s*\-*\s*\|\s*\Z/); + while (scalar(@lines) >= 1) { + $l = shift @lines; + my $name; + my $desc; + if ($l =~ /\A\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*\Z/) { + # c datatype is $1, but we don't care about it here. + $name = $2; + $desc = $3; + } elsif ($l =~ /\A\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*\Z/) { + $name = $1; + $desc = $2; + } else { + last; # we seem to have run out of table. + } + + $name =~ s/\A\*\*(.*?)\*\*/$1/; + $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/; + $name = escLaTeX($name); + $desc = dewikify($wikitype, $desc); + $str .= " \\textbf{$name} & $desc \\\\ \\hline\n"; + } + } else { + die("write me"); + } + + $str .= " \\end{tabular}\n"; + $str .= "\\end{center}\n"; + } + + if (defined $returns) { + $returns = dewikify($wikitype, $returns); + $returns =~ s/\A\(.*?\)\s*//; # Chop datatype in parentheses off the front. + $str .= "\\subsection{Return Value}\n\n"; + $str .= "$returns\n"; + } + + if (defined $remarks) { + $str .= "\\subsection{Remarks}\n\n"; + $str .= $remarks . "\n"; + } + + if (defined $examples) { + $str .= "\\subsection{Code Examples}\n\n"; + $dewikify_manpage_code_indent = 0; + $str .= dewikify($wikitype, $examples) . "\n"; + $dewikify_manpage_code_indent = 1; + } + + if (defined $threadsafety) { + $str .= "\\subsection{Thread Safety}\n\n"; + $str .= dewikify($wikitype, $threadsafety) . "\n"; + } + + if (defined $version) { + $str .= "\\subsection{Version}\n\n"; + $str .= dewikify($wikitype, $version) . "\n"; + } + + if (defined $related) { + $str .= "\\subsection{See Also}\n\n"; + $str .= "\\begin{itemize}\n"; + # !!! FIXME: lots of code duplication in all of these. + my $v = dewikify($wikitype, $related); + my @desclines = split /\n/, $v; + my $nextstr = ''; + foreach (@desclines) { + s/\(\)\Z//; # Convert "SDL_Func()" to "SDL_Func" + s/\[\[(.*?)\]\]/$1/; # in case some wikilinks remain. + s/\[(.*?)\]\(.*?\)/$1/; # in case some wikilinks remain. + s/\A\*\s*\Z//; + s/\A\s*\\item\s*//; + s/\A\/*//; + s/\A\s*[\:\*\-]\s*//; + s/\A\s+//; + s/\s+\Z//; + next if $_ eq ''; + next if $_ eq '\begin{itemize}'; + next if $_ eq '\end{itemize}'; + $str .= " \\item $_\n"; + } + $str .= "\\end{itemize}\n"; + $str .= "\n"; + } + + # !!! FIXME: Maybe put copyright in the book intro? + if (0) { + $str .= ".SH COPYRIGHT\n"; + $str .= "This manpage is licensed under\n"; + $str .= ".UR https://creativecommons.org/licenses/by/4.0/\n"; + $str .= "Creative Commons Attribution 4.0 International (CC BY 4.0)\n"; + $str .= ".UE\n"; + $str .= ".PP\n"; + $str .= "This manpage was generated from\n"; + $str .= ".UR $wikiurl/$sym\n"; + $str .= "${projectshortname}'s wiki\n"; + $str .= ".UE\n"; + $str .= "using SDL/build-scripts/wikiheaders.pl"; + $str .= " revision $gitrev" if $gitrev ne ''; + $str .= ".\n"; + $str .= "Please report issues in this manpage at\n"; + $str .= ".UR $bugreporturl\n"; + $str .= "our bugtracker!\n"; + $str .= ".UE\n"; + } + + $str .= "\\clearpage\n\n"; + + print TEXFH $str; + } + + print TEXFH "\\end{document}\n\n"; + close(TEXFH); + rename($latex_tmpfname, $latex_fname) or die("Can't rename '$latex_tmpfname' to '$latex_fname': $!\n"); + +} elsif ($copy_direction == -3) { # --report-coverage-gaps + foreach (@coverage_gap) { + print("$_\n"); + } +} + +# end of wikiheaders.pl ... + diff --git a/cmake/GetGitRevisionDescription.cmake b/cmake/GetGitRevisionDescription.cmake new file mode 100644 index 00000000..a08895c6 --- /dev/null +++ b/cmake/GetGitRevisionDescription.cmake @@ -0,0 +1,284 @@ +# - Returns a version string from Git +# +# These functions force a re-configure on each git commit so that you can +# trust the values of the variables in your build system. +# +# get_git_head_revision( [ALLOW_LOOKING_ABOVE_CMAKE_SOURCE_DIR]) +# +# Returns the refspec and sha hash of the current head revision +# +# git_describe( [ ...]) +# +# Returns the results of git describe on the source tree, and adjusting +# the output so that it tests false if an error occurs. +# +# git_describe_working_tree( [ ...]) +# +# Returns the results of git describe on the working tree (--dirty option), +# and adjusting the output so that it tests false if an error occurs. +# +# git_get_exact_tag( [ ...]) +# +# Returns the results of git describe --exact-match on the source tree, +# and adjusting the output so that it tests false if there was no exact +# matching tag. +# +# git_local_changes() +# +# Returns either "CLEAN" or "DIRTY" with respect to uncommitted changes. +# Uses the return code of "git diff-index --quiet HEAD --". +# Does not regard untracked files. +# +# Requires CMake 2.6 or newer (uses the 'function' command) +# +# Original Author: +# 2009-2020 Ryan Pavlik +# http://academic.cleardefinition.com +# +# Copyright 2009-2013, Iowa State University. +# Copyright 2013-2020, Ryan Pavlik +# Copyright 2013-2020, Contributors +# SPDX-License-Identifier: BSL-1.0 +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or copy at +# http://www.boost.org/LICENSE_1_0.txt) + +if(__get_git_revision_description) + return() +endif() +set(__get_git_revision_description YES) + +# We must run the following at "include" time, not at function call time, +# to find the path to this module rather than the path to a calling list file +get_filename_component(_gitdescmoddir ${CMAKE_CURRENT_LIST_FILE} PATH) + +# Function _git_find_closest_git_dir finds the next closest .git directory +# that is part of any directory in the path defined by _start_dir. +# The result is returned in the parent scope variable whose name is passed +# as variable _git_dir_var. If no .git directory can be found, the +# function returns an empty string via _git_dir_var. +# +# Example: Given a path C:/bla/foo/bar and assuming C:/bla/.git exists and +# neither foo nor bar contain a file/directory .git. This will return +# C:/bla/.git +# +function(_git_find_closest_git_dir _start_dir _git_dir_var) + set(cur_dir "${_start_dir}") + set(git_dir "${_start_dir}/.git") + while(NOT EXISTS "${git_dir}") + # .git dir not found, search parent directories + set(git_previous_parent "${cur_dir}") + get_filename_component(cur_dir "${cur_dir}" DIRECTORY) + if(cur_dir STREQUAL git_previous_parent) + # We have reached the root directory, we are not in git + set(${_git_dir_var} + "" + PARENT_SCOPE) + return() + endif() + set(git_dir "${cur_dir}/.git") + endwhile() + set(${_git_dir_var} + "${git_dir}" + PARENT_SCOPE) +endfunction() + +function(get_git_head_revision _refspecvar _hashvar) + _git_find_closest_git_dir("${CMAKE_CURRENT_SOURCE_DIR}" GIT_DIR) + + if("${ARGN}" STREQUAL "ALLOW_LOOKING_ABOVE_CMAKE_SOURCE_DIR") + set(ALLOW_LOOKING_ABOVE_CMAKE_SOURCE_DIR TRUE) + else() + set(ALLOW_LOOKING_ABOVE_CMAKE_SOURCE_DIR FALSE) + endif() + if(NOT "${GIT_DIR}" STREQUAL "") + file(RELATIVE_PATH _relative_to_source_dir "${CMAKE_SOURCE_DIR}" + "${GIT_DIR}") + if("${_relative_to_source_dir}" MATCHES "[.][.]" AND NOT ALLOW_LOOKING_ABOVE_CMAKE_SOURCE_DIR) + # We've gone above the CMake root dir. + set(GIT_DIR "") + endif() + endif() + if("${GIT_DIR}" STREQUAL "") + set(${_refspecvar} + "GITDIR-NOTFOUND" + PARENT_SCOPE) + set(${_hashvar} + "GITDIR-NOTFOUND" + PARENT_SCOPE) + return() + endif() + + # Check if the current source dir is a git submodule or a worktree. + # In both cases .git is a file instead of a directory. + # + if(NOT IS_DIRECTORY ${GIT_DIR}) + # The following git command will return a non empty string that + # points to the super project working tree if the current + # source dir is inside a git submodule. + # Otherwise the command will return an empty string. + # + execute_process( + COMMAND "${GIT_EXECUTABLE}" rev-parse + --show-superproject-working-tree + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" + OUTPUT_VARIABLE out + ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE) + if(NOT "${out}" STREQUAL "") + # If out is empty, GIT_DIR/CMAKE_CURRENT_SOURCE_DIR is in a submodule + file(READ ${GIT_DIR} submodule) + string(REGEX REPLACE "gitdir: (.*)$" "\\1" GIT_DIR_RELATIVE + ${submodule}) + string(STRIP ${GIT_DIR_RELATIVE} GIT_DIR_RELATIVE) + get_filename_component(SUBMODULE_DIR ${GIT_DIR} PATH) + get_filename_component(GIT_DIR ${SUBMODULE_DIR}/${GIT_DIR_RELATIVE} + ABSOLUTE) + set(HEAD_SOURCE_FILE "${GIT_DIR}/HEAD") + else() + # GIT_DIR/CMAKE_CURRENT_SOURCE_DIR is in a worktree + file(READ ${GIT_DIR} worktree_ref) + # The .git directory contains a path to the worktree information directory + # inside the parent git repo of the worktree. + # + string(REGEX REPLACE "gitdir: (.*)$" "\\1" git_worktree_dir + ${worktree_ref}) + string(STRIP ${git_worktree_dir} git_worktree_dir) + _git_find_closest_git_dir("${git_worktree_dir}" GIT_DIR) + set(HEAD_SOURCE_FILE "${git_worktree_dir}/HEAD") + endif() + else() + set(HEAD_SOURCE_FILE "${GIT_DIR}/HEAD") + endif() + set(GIT_DATA "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/git-data") + if(NOT EXISTS "${GIT_DATA}") + file(MAKE_DIRECTORY "${GIT_DATA}") + endif() + + if(NOT EXISTS "${HEAD_SOURCE_FILE}") + return() + endif() + set(HEAD_FILE "${GIT_DATA}/HEAD") + configure_file("${HEAD_SOURCE_FILE}" "${HEAD_FILE}" COPYONLY) + + configure_file("${_gitdescmoddir}/GetGitRevisionDescription.cmake.in" + "${GIT_DATA}/grabRef.cmake" @ONLY) + include("${GIT_DATA}/grabRef.cmake") + + set(${_refspecvar} + "${HEAD_REF}" + PARENT_SCOPE) + set(${_hashvar} + "${HEAD_HASH}" + PARENT_SCOPE) +endfunction() + +function(git_describe _var) + if(NOT GIT_FOUND) + find_package(Git QUIET) + endif() + get_git_head_revision(refspec hash) + if(NOT GIT_FOUND) + set(${_var} + "GIT-NOTFOUND" + PARENT_SCOPE) + return() + endif() + if(NOT hash) + set(${_var} + "HEAD-HASH-NOTFOUND" + PARENT_SCOPE) + return() + endif() + + # TODO sanitize + #if((${ARGN}" MATCHES "&&") OR + # (ARGN MATCHES "||") OR + # (ARGN MATCHES "\\;")) + # message("Please report the following error to the project!") + # message(FATAL_ERROR "Looks like someone's doing something nefarious with git_describe! Passed arguments ${ARGN}") + #endif() + + #message(STATUS "Arguments to execute_process: ${ARGN}") + + execute_process( + COMMAND "${GIT_EXECUTABLE}" describe --tags --always ${hash} ${ARGN} + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" + RESULT_VARIABLE res + OUTPUT_VARIABLE out + ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE) + if(NOT res EQUAL 0) + set(out "${out}-${res}-NOTFOUND") + endif() + + set(${_var} + "${out}" + PARENT_SCOPE) +endfunction() + +function(git_describe_working_tree _var) + if(NOT GIT_FOUND) + find_package(Git QUIET) + endif() + if(NOT GIT_FOUND) + set(${_var} + "GIT-NOTFOUND" + PARENT_SCOPE) + return() + endif() + + execute_process( + COMMAND "${GIT_EXECUTABLE}" describe --dirty ${ARGN} + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" + RESULT_VARIABLE res + OUTPUT_VARIABLE out + ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE) + if(NOT res EQUAL 0) + set(out "${out}-${res}-NOTFOUND") + endif() + + set(${_var} + "${out}" + PARENT_SCOPE) +endfunction() + +function(git_get_exact_tag _var) + git_describe(out --exact-match ${ARGN}) + set(${_var} + "${out}" + PARENT_SCOPE) +endfunction() + +function(git_local_changes _var) + if(NOT GIT_FOUND) + find_package(Git QUIET) + endif() + get_git_head_revision(refspec hash) + if(NOT GIT_FOUND) + set(${_var} + "GIT-NOTFOUND" + PARENT_SCOPE) + return() + endif() + if(NOT hash) + set(${_var} + "HEAD-HASH-NOTFOUND" + PARENT_SCOPE) + return() + endif() + + execute_process( + COMMAND "${GIT_EXECUTABLE}" diff-index --quiet HEAD -- + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" + RESULT_VARIABLE res + OUTPUT_VARIABLE out + ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE) + if(res EQUAL 0) + set(${_var} + "CLEAN" + PARENT_SCOPE) + else() + set(${_var} + "DIRTY" + PARENT_SCOPE) + endif() +endfunction() diff --git a/cmake/GetGitRevisionDescription.cmake.in b/cmake/GetGitRevisionDescription.cmake.in new file mode 100644 index 00000000..116efc4e --- /dev/null +++ b/cmake/GetGitRevisionDescription.cmake.in @@ -0,0 +1,43 @@ +# +# Internal file for GetGitRevisionDescription.cmake +# +# Requires CMake 2.6 or newer (uses the 'function' command) +# +# Original Author: +# 2009-2010 Ryan Pavlik +# http://academic.cleardefinition.com +# Iowa State University HCI Graduate Program/VRAC +# +# Copyright 2009-2012, Iowa State University +# Copyright 2011-2015, Contributors +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or copy at +# http://www.boost.org/LICENSE_1_0.txt) +# SPDX-License-Identifier: BSL-1.0 + +set(HEAD_HASH) + +file(READ "@HEAD_FILE@" HEAD_CONTENTS LIMIT 1024) + +string(STRIP "${HEAD_CONTENTS}" HEAD_CONTENTS) +if(HEAD_CONTENTS MATCHES "ref") + # named branch + string(REPLACE "ref: " "" HEAD_REF "${HEAD_CONTENTS}") + if(EXISTS "@GIT_DIR@/${HEAD_REF}") + configure_file("@GIT_DIR@/${HEAD_REF}" "@GIT_DATA@/head-ref" COPYONLY) + else() + configure_file("@GIT_DIR@/packed-refs" "@GIT_DATA@/packed-refs" COPYONLY) + file(READ "@GIT_DATA@/packed-refs" PACKED_REFS) + if(${PACKED_REFS} MATCHES "([0-9a-z]*) ${HEAD_REF}") + set(HEAD_HASH "${CMAKE_MATCH_1}") + endif() + endif() +else() + # detached HEAD + configure_file("@GIT_DIR@/HEAD" "@GIT_DATA@/head-ref" COPYONLY) +endif() + +if(NOT HEAD_HASH) + file(READ "@GIT_DATA@/head-ref" HEAD_HASH LIMIT 1024) + string(STRIP "${HEAD_HASH}" HEAD_HASH) +endif() diff --git a/cmake/PhysFSConfig.cmake.in b/cmake/PhysFSConfig.cmake.in new file mode 100644 index 00000000..fec6cd6c --- /dev/null +++ b/cmake/PhysFSConfig.cmake.in @@ -0,0 +1,44 @@ +include(FeatureSummary) +set_package_properties(PhysicsFS PROPERTIES + URL "https://icculus.org/physfs/" + DESCRIPTION "Library to provide abstract access to various archives" +) + +@PACKAGE_INIT@ + +set(PhysFS_PhysFS_FOUND) + +set(PhysFS_PhysFS-shared_FOUND) +if(EXISTS "${CMAKE_CURRENT_LIST_DIR}/PhysFS-shared-targets.cmake") + include("${CMAKE_CURRENT_LIST_DIR}/PhysFS-shared-targets.cmake") + set(PhysFS_PhysFS-shared_FOUND TRUE) + set(PhysFS_PhysFS_FOUND TRUE) +endif() + +set(PhysFS_PhysFS-static_FOUND) +if(EXISTS "${CMAKE_CURRENT_LIST_DIR}/PhysFS-static-targets.cmake") + include("${CMAKE_CURRENT_LIST_DIR}/PhysFS-static-targets.cmake") + set(PhysFS_PhysFS-static_FOUND TRUE) + set(PhysFS_PhysFS_FOUND TRUE) +endif() + +function(_physfsl_create_target_alias_compat NEW_TARGET TARGET) + if(CMAKE_VERSION VERSION_LESS "3.18") + # Aliasing local targets is not supported on CMake < 3.18, so make it global. + add_library(${NEW_TARGET} INTERFACE IMPORTED) + set_target_properties(${NEW_TARGET} PROPERTIES INTERFACE_LINK_LIBRARIES "${TARGET}") + else() + add_library(${NEW_TARGET} ALIAS ${TARGET}) + endif() +endfunction() + +# Make sure PhysFS::PhysFS always exists +if(NOT TARGET PhysFS::PhysFS) + if(TARGET PhysFS::PhysFS-shared) + _physfsl_create_target_alias_compat(PhysFS::PhysFS PhysFS::PhysFS-shared) + elseif(TARGET PhysFS::PhysFS-static) + _physfsl_create_target_alias_compat(PhysFS::PhysFS PhysFS::PhysFS-static) + endif() +endif() + +check_required_components(PhysFS) diff --git a/cmake/PrivateSdlFunctions.cmake b/cmake/PrivateSdlFunctions.cmake new file mode 100644 index 00000000..76226f7a --- /dev/null +++ b/cmake/PrivateSdlFunctions.cmake @@ -0,0 +1,355 @@ +# This file is shared amongst SDL_image/SDL_mixer/SDL_ttf + +include(CheckCCompilerFlag) +include(CheckCSourceCompiles) +include(CMakePushCheckState) + +macro(sdl_calculate_derived_version_variables MAJOR MINOR MICRO) + set(SO_VERSION_MAJOR "0") + set(SO_VERSION_MINOR "${MINOR_VERSION}") + set(SO_VERSION_MICRO "${MICRO_VERSION}") + set(SO_VERSION "${SO_VERSION_MAJOR}.${SO_VERSION_MINOR}.${SO_VERSION_MICRO}") + + if(MINOR MATCHES "[02468]$") + math(EXPR DYLIB_COMPAT_VERSION_MAJOR "100 * ${MINOR} + 1") + set(DYLIB_COMPAT_VERSION_MINOR "0") + math(EXPR DYLIB_CURRENT_VERSION_MAJOR "${DYLIB_COMPAT_VERSION_MAJOR}") + set(DYLIB_CURRENT_VERSION_MINOR "${MICRO}") + else() + math(EXPR DYLIB_COMPAT_VERSION_MAJOR "100 * ${MINOR} + ${MICRO} + 1") + set(DYLIB_COMPAT_VERSION_MINOR "0") + math(EXPR DYLIB_CURRENT_VERSION_MAJOR "${DYLIB_COMPAT_VERSION_MAJOR}") + set(DYLIB_CURRENT_VERSION_MINOR "0") + endif() + set(DYLIB_COMPAT_VERSION_MICRO "0") + set(DYLIB_CURRENT_VERSION_MICRO "0") + + set(DYLIB_CURRENT_VERSION "${DYLIB_CURRENT_VERSION_MAJOR}.${DYLIB_CURRENT_VERSION_MINOR}.${DYLIB_CURRENT_VERSION_MICRO}") + set(DYLIB_COMPAT_VERSION "${DYLIB_COMPAT_VERSION_MAJOR}.${DYLIB_COMPAT_VERSION_MINOR}.${DYLIB_COMPAT_VERSION_MICRO}") +endmacro() + +function(read_absolute_symlink DEST PATH) + file(READ_SYMLINK "${PATH}" p) + if(NOT IS_ABSOLUTE "${p}") + get_filename_component(pdir "${PATH}" DIRECTORY) + set(p "${pdir}/${p}") + endif() + get_filename_component(p "${p}" ABSOLUTE) + set("${DEST}" "${p}" PARENT_SCOPE) +endfunction() + +function(win32_implib_identify_dll DEST IMPLIB) + cmake_parse_arguments(ARGS "NOTFATAL" "" "" ${ARGN}) + if(CMAKE_DLLTOOL) + execute_process( + COMMAND "${CMAKE_DLLTOOL}" --identify "${IMPLIB}" + RESULT_VARIABLE retcode + OUTPUT_VARIABLE stdout + ERROR_VARIABLE stderr) + if(NOT retcode EQUAL 0) + if(NOT ARGS_NOTFATAL) + message(FATAL_ERROR "${CMAKE_DLLTOOL} failed.") + else() + set("${DEST}" "${DEST}-NOTFOUND" PARENT_SCOPE) + return() + endif() + endif() + string(STRIP "${stdout}" result) + set(${DEST} "${result}" PARENT_SCOPE) + elseif(MSVC) + get_filename_component(CMAKE_C_COMPILER_DIRECTORY "${CMAKE_C_COMPILER}" DIRECTORY CACHE) + find_program(CMAKE_DUMPBIN NAMES dumpbin PATHS "${CMAKE_C_COMPILER_DIRECTORY}") + if(CMAKE_DUMPBIN) + execute_process( + COMMAND "${CMAKE_DUMPBIN}" "-headers" "${IMPLIB}" + RESULT_VARIABLE retcode + OUTPUT_VARIABLE stdout + ERROR_VARIABLE stderr) + if(NOT retcode EQUAL 0) + if(NOT ARGS_NOTFATAL) + message(FATAL_ERROR "dumpbin failed.") + else() + set(${DEST} "${DEST}-NOTFOUND" PARENT_SCOPE) + return() + endif() + endif() + string(REGEX MATCH "DLL name[ ]+:[ ]+([^\n]+)\n" match "${stdout}") + if(NOT match) + if(NOT ARGS_NOTFATAL) + message(FATAL_ERROR "dumpbin did not find any associated dll for ${IMPLIB}.") + else() + set(${DEST} "${DEST}-NOTFOUND" PARENT_SCOPE) + return() + endif() + endif() + set(result "${CMAKE_MATCH_1}") + set(${DEST} "${result}" PARENT_SCOPE) + else() + message(FATAL_ERROR "Cannot find dumpbin, please set CMAKE_DUMPBIN cmake variable") + endif() + else() + if(NOT ARGS_NOTFATAL) + message(FATAL_ERROR "Don't know how to identify dll from import library. Set CMAKE_DLLTOOL (for mingw) or CMAKE_DUMPBIN (for MSVC)") + else() + set(${DEST} "${DEST}-NOTFOUND") + endif() + endif() +endfunction() + +function(get_actual_target) + set(dst "${ARGV0}") + set(target "${${dst}}") + set(input "${target}") + get_target_property(alias "${target}" ALIASED_TARGET) + while(alias) + set(target "${alias}") + get_target_property(alias "${target}" ALIASED_TARGET) + endwhile() + message(DEBUG "get_actual_target(\"${input}\") -> \"${target}\"") + set("${dst}" "${target}" PARENT_SCOPE) +endfunction() + +function(target_get_dynamic_library DEST TARGET) + set(result) + get_actual_target(TARGET) + if(WIN32) + # Use the target dll of the import library + set(props_to_check IMPORTED_IMPLIB) + if(CMAKE_BUILD_TYPE) + list(APPEND props_to_check IMPORTED_IMPLIB_${CMAKE_BUILD_TYPE}) + endif() + list(APPEND props_to_check IMPORTED_LOCATION) + if(CMAKE_BUILD_TYPE) + list(APPEND props_to_check IMPORTED_LOCATION_${CMAKE_BUILD_TYPE}) + endif() + foreach (config_type ${CMAKE_CONFIGURATION_TYPES} RELEASE DEBUG RELWITHDEBINFO MINSIZEREL) + list(APPEND props_to_check IMPORTED_IMPLIB_${config_type}) + list(APPEND props_to_check IMPORTED_LOCATION_${config_type}) + endforeach() + + foreach(prop_to_check ${props_to_check}) + if(NOT result) + get_target_property(propvalue "${TARGET}" ${prop_to_check}) + if(propvalue AND EXISTS "${propvalue}") + win32_implib_identify_dll(result "${propvalue}" NOTFATAL) + endif() + endif() + endforeach() + else() + # 1. find the target library a file might be symbolic linking to + # 2. find all other files in the same folder that symolic link to it + # 3. sort all these files, and select the 1st item on Linux, and last on Macos + set(location_properties IMPORTED_LOCATION) + if(CMAKE_BUILD_TYPE) + list(APPEND location_properties IMPORTED_LOCATION_${CMAKE_BUILD_TYPE}) + endif() + foreach (config_type ${CMAKE_CONFIGURATION_TYPES} RELEASE DEBUG RELWITHDEBINFO MINSIZEREL) + list(APPEND location_properties IMPORTED_LOCATION_${config_type}) + endforeach() + if(APPLE) + set(valid_shared_library_regex "\\.[0-9]+\\.dylib$") + else() + set(valid_shared_library_regex "\\.so\\.([0-9.]+)?[0-9]") + endif() + foreach(location_property ${location_properties}) + if(NOT result) + get_target_property(library_path "${TARGET}" ${location_property}) + message(DEBUG "get_target_property(${TARGET} ${location_propert}) -> ${library_path}") + if(EXISTS "${library_path}") + get_filename_component(library_path "${library_path}" ABSOLUTE) + while (IS_SYMLINK "${library_path}") + read_absolute_symlink(library_path "${library_path}") + endwhile() + message(DEBUG "${TARGET} -> ${library_path}") + get_filename_component(libdir "${library_path}" DIRECTORY) + file(GLOB subfiles "${libdir}/*") + set(similar_files "${library_path}") + foreach(subfile ${subfiles}) + if(IS_SYMLINK "${subfile}") + read_absolute_symlink(subfile_target "${subfile}") + while(IS_SYMLINK "${subfile_target}") + read_absolute_symlink(subfile_target "${subfile_target}") + endwhile() + get_filename_component(subfile_target "${subfile_target}" ABSOLUTE) + if(subfile_target STREQUAL library_path AND subfile MATCHES "${valid_shared_library_regex}") + list(APPEND similar_files "${subfile}") + endif() + endif() + endforeach() + list(SORT similar_files) + message(DEBUG "files that are similar to \"${library_path}\"=${similar_files}") + if(APPLE) + list(REVERSE similar_files) + endif() + list(GET similar_files 0 item) + get_filename_component(result "${item}" NAME) + endif() + endif() + endforeach() + endif() + if(result) + string(TOLOWER "${result}" result_lower) + if(WIN32 OR OS2) + if(NOT result_lower MATCHES ".*dll") + message(FATAL_ERROR "\"${result}\" is not a .dll library") + endif() + elseif(APPLE) + if(NOT result_lower MATCHES ".*dylib.*") + message(FATAL_ERROR "\"${result}\" is not a .dylib shared library") + endif() + else() + if(NOT result_lower MATCHES ".*so.*") + message(FATAL_ERROR "\"${result}\" is not a .so shared library") + endif() + endif() + else() + get_target_property(target_type ${TARGET} TYPE) + if(target_type MATCHES "SHARED_LIBRARY|MODULE_LIBRARY") + # OK + elseif(target_type MATCHES "STATIC_LIBRARY|OBJECT_LIBRARY|INTERFACE_LIBRARY|EXECUTABLE") + message(SEND_ERROR "${TARGET} is not a shared library, but has type=${target_type}") + else() + message(WARNING "Unable to extract dynamic library from target=${TARGET}, type=${target_type}.") + endif() + # TARGET_SONAME_FILE is not allowed for DLL target platforms. + if(WIN32) + set(result "$") + else() + set(result "$") + endif() + endif() + set(${DEST} ${result} PARENT_SCOPE) +endfunction() + +function(sdl_check_project_in_subfolder relative_subfolder name vendored_option) + cmake_parse_arguments(ARG "" "FILE" "" ${ARGN}) + if(NOT ARG_FILE) + set(ARG_FILE "CMakeLists.txt") + endif() + if(NOT EXISTS "${PROJECT_SOURCE_DIR}/${relative_subfolder}/${ARG_FILE}") + message(FATAL_ERROR "Could not find ${ARG_FILE} for ${name} in ${relative_subfolder}.\n" + "Run the download script in the external folder, or re-configure with -D${vendored_option}=OFF to use system packages.") + endif() +endfunction() + +macro(sdl_check_linker_flag flag var) + # FIXME: Use CheckLinkerFlag module once cmake minimum version >= 3.18 + cmake_push_check_state(RESET) + set(CMAKE_REQUIRED_LINK_OPTIONS "${flag}") + check_c_source_compiles("int main() { return 0; }" ${var} FAIL_REGEX "(unsupported|syntax error|unrecognized option)") + cmake_pop_check_state() +endmacro() + +function(SDL_detect_linker) + if(CMAKE_VERSION VERSION_LESS 3.29) + if(NOT DEFINED SDL_CMAKE_C_COMPILER_LINKER_ID) + execute_process(COMMAND ${CMAKE_LINKER} -v OUTPUT_VARIABLE LINKER_OUTPUT ERROR_VARIABLE LINKER_OUTPUT) + string(REGEX REPLACE "[\r\n]" " " LINKER_OUTPUT "${LINKER_OUTPUT}") + if(LINKER_OUTPUT MATCHES ".*Microsoft.*") + set(linker MSVC) + else() + set(linker GNUlike) + endif() + message(STATUS "Linker identification: ${linker}") + set(SDL_CMAKE_C_COMPILER_LINKER_ID "${linker}" CACHE STRING "Linker identification") + mark_as_advanced(SDL_CMAKE_C_COMPILER_LINKER_ID) + endif() + set(CMAKE_C_COMPILER_LINKER_ID "${SDL_CMAKE_C_COMPILER_LINKER_ID}" PARENT_SCOPE) + endif() +endfunction() + +function(check_linker_support_version_script VAR) + SDL_detect_linker() + if(CMAKE_C_COMPILER_LINKER_ID MATCHES "^(MSVC)$") + set(LINKER_SUPPORTS_VERSION_SCRIPT FALSE) + else() + cmake_push_check_state(RESET) + file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/dummy.sym" "n_0 {\n global:\n func;\n local: *;\n};\n") + list(APPEND CMAKE_REQUIRED_LINK_OPTIONS "-Wl,--version-script=${CMAKE_CURRENT_BINARY_DIR}/dummy.sym") + check_c_source_compiles("int func(void) {return 0;} int main(int argc,char*argv[]){(void)argc;(void)argv;return func();}" LINKER_SUPPORTS_VERSION_SCRIPT FAIL_REGEX "(unsupported|syntax error|unrecognized option)") + cmake_pop_check_state() + endif() + set(${VAR} "${LINKER_SUPPORTS_VERSION_SCRIPT}" PARENT_SCOPE) +endfunction() + +function(sdl_target_link_options_no_undefined TARGET) + if(NOT MSVC AND NOT CMAKE_SYSTEM_NAME MATCHES ".*OpenBSD.*") + if(CMAKE_C_COMPILER_ID MATCHES "AppleClang") + target_link_options(${TARGET} PRIVATE "-Wl,-undefined,error") + else() + sdl_check_linker_flag("-Wl,--no-undefined" HAVE_WL_NO_UNDEFINED) + if(HAVE_WL_NO_UNDEFINED AND NOT ((CMAKE_C_COMPILER_ID MATCHES "Clang") AND WIN32)) + target_link_options(${TARGET} PRIVATE "-Wl,--no-undefined") + endif() + endif() + endif() +endfunction() + +function(sdl_target_link_option_version_file TARGET VERSION_SCRIPT) + check_linker_support_version_script(HAVE_WL_VERSION_SCRIPT) + if(HAVE_WL_VERSION_SCRIPT) + target_link_options(${TARGET} PRIVATE "-Wl,--version-script=${VERSION_SCRIPT}") + set_property(TARGET ${TARGET} APPEND PROPERTY LINK_DEPENDS "${VERSION_SCRIPT}") + else() + if(LINUX OR ANDROID) + message(FATAL_ERROR "Linker does not support '-Wl,--version-script=xxx.sym'. This is required on the current host platform.") + endif() + endif() +endfunction() + +function(sdl_add_warning_options TARGET) + cmake_parse_arguments(ARGS "" "WARNING_AS_ERROR" "" ${ARGN}) + if(MSVC) + target_compile_options(${TARGET} PRIVATE /W2) + else() + target_compile_options(${TARGET} PRIVATE -Wall -Wextra -Wno-unused-parameter) + endif() + if(ARGS_WARNING_AS_ERROR) + if(MSVC) + target_compile_options(${TARGET} PRIVATE /WX) + else() + target_compile_options(${TARGET} PRIVATE -Werror) + endif() + endif() +endfunction() + +function(sdl_no_deprecated_errors TARGET) + check_c_compiler_flag(-Wno-error=deprecated-declarations HAVE_WNO_ERROR_DEPRECATED_DECLARATIONS) + if(HAVE_WNO_ERROR_DEPRECATED_DECLARATIONS) + target_compile_options(${TARGET} PRIVATE "-Wno-error=deprecated-declarations") +endif() +endfunction() + +function(sdl_get_git_revision_hash VARNAME) + set("${VARNAME}" "" CACHE STRING "${PROJECT_NAME} revision") + set(revision "${${VARNAME}}") + if(NOT revision) + if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/VERSION.txt") + # If VERSION.txt exists, it contains the SDL version + file(READ "${CMAKE_CURRENT_SOURCE_DIR}/VERSION.txt" revision_version) + string(STRIP "${revision_version}" revision_version) + else() + # If VERSION.txt does not exist, use git to calculate a version + git_describe(revision_version) + if(NOT revision_version) + set(revision_version "${PROJECT_VERSION}-no-vcs") + endif() + endif() + set(revision "${revision_version}") + endif() + set("${VARNAME}" "${revision}" PARENT_SCOPE) +endfunction() + +function(SDL_install_pdb TARGET DIRECTORY) + get_property(type TARGET ${TARGET} PROPERTY TYPE) + if(type MATCHES "^(SHARED_LIBRARY|EXECUTABLE)$") + install(FILES $ DESTINATION "${DIRECTORY}" OPTIONAL) + elseif(type STREQUAL "STATIC_LIBRARY") + # FIXME: Use $\"; +") + string(APPEND src_main " + result += arch_${known_arch}[argc];") + endforeach() + + set(src_arch_detect "${src_vars} +int main(int argc, char *argv[]) { + int result = 0; + (void)argv; +${src_main} + return result; +}") + + if(CMAKE_C_COMPILER) + set(ext ".c") + elseif(CMAKE_CXX_COMPILER) + set(ext ".cpp") + else() + enable_language(C) + set(ext ".c") + endif() + set(path_src_arch_detect "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/CMakeTmp/SDL_detect_arch${ext}") + file(WRITE "${path_src_arch_detect}" "${src_arch_detect}") + set(path_dir_arch_detect "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/CMakeTmp/SDL_detect_arch") + set(path_bin_arch_detect "${path_dir_arch_detect}/bin") + + set(detected_archs) + + set(msg "Detecting Target CPU Architecture") + message(STATUS "${msg}") + + include(CMakePushCheckState) + + set(CMAKE_TRY_COMPILE_TARGET_TYPE "STATIC_LIBRARY") + + cmake_push_check_state(RESET) + try_compile(SDL_CPU_CHECK_ALL + "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/CMakeTmp/SDL_detect_arch" + SOURCES "${path_src_arch_detect}" + COPY_FILE "${path_bin_arch_detect}" + ) + cmake_pop_check_state() + if(NOT SDL_CPU_CHECK_ALL) + message(STATUS "${msg} - ") + message(WARNING "Failed to compile source detecting the target CPU architecture") + else() + set(re "INFO<([A-Z0-9]+)=([01])>") + file(STRINGS "${path_bin_arch_detect}" infos REGEX "${re}") + + foreach(info_arch_01 IN LISTS infos) + string(REGEX MATCH "${re}" A "${info_arch_01}") + if(NOT "${CMAKE_MATCH_1}" IN_LIST known_archs) + message(WARNING "Unknown architecture: \"${CMAKE_MATCH_1}\"") + continue() + endif() + set(arch "${CMAKE_MATCH_1}") + set(arch_01 "${CMAKE_MATCH_2}") + set(detected_${arch} "${arch_01}") + endforeach() + + foreach(known_arch IN LISTS known_archs) + if(detected_${known_arch}) + list(APPEND detected_archs ${known_arch}) + endif() + endforeach() + endif() + + if(detected_archs) + foreach(known_arch IN LISTS known_archs) + set("SDL_CPU_${known_arch}" "${detected_${known_arch}}" CACHE BOOL "Detected architecture ${known_arch}") + endforeach() + message(STATUS "${msg} - ${detected_archs}") + else() + include(CheckCSourceCompiles) + cmake_push_check_state(RESET) + foreach(known_arch IN LISTS known_archs) + if(NOT detected_archs) + set(cache_variable "SDL_CPU_${known_arch}") + set(test_src " + int main(int argc, char *argv[]) { + #if ${arch_check_${known_arch}} + return 0; + #else + choke + #endif + } + ") + check_c_source_compiles("${test_src}" "${cache_variable}") + if(${cache_variable}) + set(SDL_CPU_${known_arch} "1" CACHE BOOL "Detected architecture ${known_arch}") + set(detected_archs ${known_arch}) + else() + set(SDL_CPU_${known_arch} "0" CACHE BOOL "Detected architecture ${known_arch}") + endif() + endif() + endforeach() + cmake_pop_check_state() + endif() + set("${DETECTED_ARCHS}" "${detected_archs}" PARENT_SCOPE) +endfunction() diff --git a/cmake/sdlmanpages.cmake b/cmake/sdlmanpages.cmake new file mode 100644 index 00000000..dc3ebb6b --- /dev/null +++ b/cmake/sdlmanpages.cmake @@ -0,0 +1,68 @@ +include(CMakeParseArguments) +include(GNUInstallDirs) + +function(SDL_generate_manpages) + cmake_parse_arguments(ARG "" "RESULT_VARIABLE;NAME;BUILD_DOCDIR;HEADERS_DIR;SOURCE_DIR;SYMBOL;OPTION_FILE;WIKIHEADERS_PL_PATH;REVISION" "" ${ARGN}) + + set(wikiheaders_extra_args) + + if(NOT ARG_NAME) + set(ARG_NAME "${PROJECT_NAME}") + endif() + + if(NOT ARG_SOURCE_DIR) + set(ARG_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}") + endif() + + if(NOT ARG_OPTION_FILE) + set(ARG_OPTION_FILE "${PROJECT_SOURCE_DIR}/.wikiheaders-options") + endif() + + if(NOT ARG_HEADERS_DIR) + message(FATAL_ERROR "Missing required HEADERS_DIR argument") + endif() + + # FIXME: get rid of SYMBOL and let the perl script figure out the dependencies + if(NOT ARG_SYMBOL) + message(FATAL_ERROR "Missing required SYMBOL argument") + endif() + + if(ARG_REVISION) + list(APPEND wikiheaders_extra_args "--rev=${ARG_REVISION}") + endif() + + if(NOT ARG_BUILD_DOCDIR) + set(ARG_BUILD_DOCDIR "${CMAKE_CURRENT_BINARY_DIR}/docs") + endif() + set(BUILD_WIKIDIR "${ARG_BUILD_DOCDIR}/wiki") + set(BUILD_MANDIR "${ARG_BUILD_DOCDIR}/man") + + find_package(Perl) + file(GLOB HEADER_FILES "${ARG_HEADERS_DIR}/*.h") + + set(result FALSE) + + if(PERL_FOUND AND EXISTS "${ARG_WIKIHEADERS_PL_PATH}") + add_custom_command( + OUTPUT "${BUILD_WIKIDIR}/${ARG_SYMBOL}.md" + COMMAND "${CMAKE_COMMAND}" -E make_directory "${BUILD_WIKIDIR}" + COMMAND "${PERL_EXECUTABLE}" "${ARG_WIKIHEADERS_PL_PATH}" "${ARG_SOURCE_DIR}" "${BUILD_WIKIDIR}" "--options=${ARG_OPTION_FILE}" --copy-to-wiki ${wikiheaders_extra_args} + DEPENDS ${HEADER_FILES} "${ARG_WIKIHEADERS_PL_PATH}" "${ARG_OPTION_FILE}" + COMMENT "Generating ${ARG_NAME} wiki markdown files" + ) + add_custom_command( + OUTPUT "${BUILD_MANDIR}/man3/${ARG_SYMBOL}.3" + COMMAND "${PERL_EXECUTABLE}" "${ARG_WIKIHEADERS_PL_PATH}" "${ARG_SOURCE_DIR}" "${BUILD_WIKIDIR}" "--options=${ARG_OPTION_FILE}" "--manpath=${BUILD_MANDIR}" --copy-to-manpages ${wikiheaders_extra_args} + DEPENDS "${BUILD_WIKIDIR}/${ARG_SYMBOL}.md" "${ARG_WIKIHEADERS_PL_PATH}" "${ARG_OPTION_FILE}" + COMMENT "Generating ${ARG_NAME} man pages" + ) + add_custom_target(${ARG_NAME}-docs ALL DEPENDS "${BUILD_MANDIR}/man3/${ARG_SYMBOL}.3") + + install(DIRECTORY "${BUILD_MANDIR}/" DESTINATION "${CMAKE_INSTALL_MANDIR}") + set(result TRUE) + endif() + + if(ARG_RESULT_VARIABLE) + set(${ARG_RESULT_VARIABLE} ${result} PARENT_SCOPE) + endif() +endfunction() diff --git a/cmake/test/CMakeLists.txt b/cmake/test/CMakeLists.txt new file mode 100644 index 00000000..646e8d79 --- /dev/null +++ b/cmake/test/CMakeLists.txt @@ -0,0 +1,23 @@ +cmake_minimum_required(VERSION 3.0...4.0) +project(test_physfs C) + +# Override CMAKE_FIND_ROOT_PATH_MODE to allow search for SDL3_mixer outside of sysroot +set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE NEVER) + +option(TEST_STATIC "Test static PhysFS library" ON) +if(TEST_STATIC) + find_package(PhysFS CONFIG REQUIRED COMPONENTS PhysFS-static) + add_executable(a-static ../../test/test_physfs.c) + target_link_libraries(a-static PRIVATE PhysFS::PhysFS-static) +endif() + +option(TEST_SHARED "Test shared PhysFS library" ON) +if(TEST_SHARED) + find_package(PhysFS CONFIG REQUIRED COMPONENTS PhysFS-shared) + add_executable(a-shared ../../test/test_physfs.c) + target_link_libraries(a-shared PRIVATE PhysFS::PhysFS-shared) +endif() + +find_package(PhysFS CONFIG REQUIRED COMPONENTS PhysFS) +add_executable(a ../../test/test_physfs.c) +target_link_libraries(a PRIVATE PhysFS::PhysFS) diff --git a/extras/physfshttpd.c b/extras/physfshttpd.c index 9e0df6bb..54a8683e 100644 --- a/extras/physfshttpd.c +++ b/extras/physfshttpd.c @@ -86,7 +86,8 @@ static const char *lastError(void) static int writeAll(const char *ipstr, const int sock, void *buf, const size_t len) { - if (write(sock, buf, len) != len) + PHYSFS_sint64 amount_written = write(sock, buf, len); + if (amount_written < 0 || (size_t)amount_written != len) { printf("%s: Write error to socket.\n", ipstr); return 0; @@ -218,34 +219,38 @@ static void *do_http(void *_args) char ipstr[128]; char buffer[512]; char *ptr; + int amount_read; strncpy(ipstr, inet_ntoa(((struct sockaddr_in *) args->addr)->sin_addr), sizeof (ipstr)); ipstr[sizeof (ipstr) - 1] = '\0'; printf("%s: connected.\n", ipstr); - read(args->sock, buffer, sizeof (buffer)); - buffer[sizeof (buffer) - 1] = '\0'; - ptr = strchr(buffer, '\n'); - if (!ptr) - printf("%s: potentially bogus request.\n", ipstr); - else + amount_read = read(args->sock, buffer, sizeof (buffer)); + if (amount_read > 0) { - *ptr = '\0'; - ptr = strchr(buffer, '\r'); - if (ptr != NULL) - *ptr = '\0'; - - if ((toupper(buffer[0]) == 'G') && - (toupper(buffer[1]) == 'E') && - (toupper(buffer[2]) == 'T') && - (toupper(buffer[3]) == ' ') && - (toupper(buffer[4]) == '/')) + buffer[sizeof (buffer) - 1] = '\0'; + ptr = strchr(buffer, '\n'); + if (!ptr) + printf("%s: potentially bogus request.\n", ipstr); + else { - ptr = strchr(buffer + 5, ' '); + *ptr = '\0'; + ptr = strchr(buffer, '\r'); if (ptr != NULL) *ptr = '\0'; - feed_http_request(ipstr, args->sock, buffer + 4); - } /* if */ + + if ((toupper(buffer[0]) == 'G') && + (toupper(buffer[1]) == 'E') && + (toupper(buffer[2]) == 'T') && + (toupper(buffer[3]) == ' ') && + (toupper(buffer[4]) == '/')) + { + ptr = strchr(buffer + 5, ' '); + if (ptr != NULL) + *ptr = '\0'; + feed_http_request(ipstr, args->sock, buffer + 4); + } /* if */ + } /* else */ } /* else */ /* !!! FIXME: Time the transfer. */ @@ -377,6 +382,7 @@ int main(int argc, char **argv) printf("listen socket failed to create.\n"); return 42; } /* if */ + printf("Listening on port %d\n", portnum); while (1) /* infinite loop for now. */ { diff --git a/extras/uninstall.sh b/extras/uninstall.sh deleted file mode 100755 index 69e01111..00000000 --- a/extras/uninstall.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh - -if [ ! -f "./install_manifest.txt" ]; then - echo "ERROR: This needs to be run from your CMake build directory after installing." 1>&2 - exit 1 -fi - -xargs rm -vf < install_manifest.txt -exit 0 - diff --git a/src/physfs_platform_windows.c b/src/physfs_platform_windows.c index 16d5abc3..88536144 100644 --- a/src/physfs_platform_windows.c +++ b/src/physfs_platform_windows.c @@ -11,6 +11,8 @@ #ifdef PHYSFS_PLATFORM_WINDOWS +typedef void (*fnFunctionPointer)(void); + /* Forcibly disable UNICODE macro, since we manage this ourselves. */ #ifdef UNICODE #undef UNICODE @@ -249,7 +251,7 @@ static DWORD pollDiscDrives(void) DWORD i; if (lib) - stem = (fnSTEM) GetProcAddress(lib, "SetThreadErrorMode"); + stem = (fnSTEM)(fnFunctionPointer) GetProcAddress(lib, "SetThreadErrorMode"); if (stem) stem(SEM_FAILCRITICALERRORS, &oldErrorMode); @@ -558,7 +560,7 @@ char *__PHYSFS_platformCalcUserDir(void) lib = LoadLibraryA("userenv.dll"); BAIL_IF(!lib, errcodeFromWinApi(), NULL); - pGetDir=(fnGetUserProfDirW) GetProcAddress(lib,"GetUserProfileDirectoryW"); + pGetDir=(fnGetUserProfDirW)(fnFunctionPointer) GetProcAddress(lib,"GetUserProfileDirectoryW"); GOTO_IF(!pGetDir, errcodeFromWinApi(), done); if (!OpenProcessToken(GetCurrentProcess(), TOKEN_QUERY, &accessToken)) diff --git a/src/physfs_version.rc b/src/physfs_version.rc new file mode 100644 index 00000000..940a88b2 --- /dev/null +++ b/src/physfs_version.rc @@ -0,0 +1,38 @@ + +#include "winresrc.h" + +LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US + +///////////////////////////////////////////////////////////////////////////// +// +// Version +// + +VS_VERSION_INFO VERSIONINFO + FILEVERSION 3,3,0,0 + PRODUCTVERSION 3,3,0,0 + FILEFLAGSMASK 0x3fL + FILEFLAGS 0x0L + FILEOS 0x40004L + FILETYPE 0x2L + FILESUBTYPE 0x0L +BEGIN + BLOCK "StringFileInfo" + BEGIN + BLOCK "040904b0" + BEGIN + VALUE "CompanyName", "\0" + VALUE "FileDescription", "PhysFS\0" + VALUE "FileVersion", "3, 3, 0, 0\0" + VALUE "InternalName", "PhysFS\0" + VALUE "LegalCopyright", "Copyright (C) 2025 Ryan C. Gordon\0" + VALUE "OriginalFilename", "physfs.dll\0" + VALUE "ProductName", "PhysicsFS\0" + VALUE "ProductVersion", "3, 3, 0, 0\0" + END + END + BLOCK "VarFileInfo" + BEGIN + VALUE "Translation", 0x409, 1200 + END +END diff --git a/test/test_physfs.c b/test/test_physfs.c index d90baeb7..8cff0121 100644 --- a/test/test_physfs.c +++ b/test/test_physfs.c @@ -1335,12 +1335,12 @@ static int cmd_write(char *args) static char* modTimeToStr(PHYSFS_sint64 modtime, char *modstr, size_t strsize) { if (modtime < 0) - strncpy(modstr, "Unknown\n", strsize); + strncpy(modstr, "Unknown\n", strsize - 1); else { time_t t = (time_t) modtime; char *str = ctime(&t); - strncpy(modstr, str, strsize); + strncpy(modstr, str, strsize - 1); } /* else */ modstr[strsize-1] = '\0';