From 96050dfd95395e20ebec69a7eb90e03c5fcccdcc Mon Sep 17 00:00:00 2001 From: Matt Young Date: Sun, 26 Feb 2023 10:51:45 +0100 Subject: [PATCH 01/12] Add Meson/bt as alternative build and packaging system; make enum deserialisation more robust --- .github/workflows/linux-ubuntu.yml | 131 +- .github/workflows/mac.yml | 309 ++- .github/workflows/windows.yml | 178 +- .gitmodules | 3 + CMakeLists.txt | 41 +- bt | 2152 +++++++++++++++++ doc/manpage.1.md.in | 29 + .../brewtarget.desktop | 1 - meson.build | 1449 +++++++++++ packaging/config.toml.in | 28 + packaging/darwin/Info.plist.in | 66 + packaging/generateCompressedChangeLog.sh | 130 + packaging/linux/control.in | 124 + packaging/linux/rpm.spec.in | 102 + packaging/{ => linux}/rpmlintFilters.toml | 0 packaging/windows/NsisInstallerScript.nsi.in | 564 +++++ src/Algorithms.cpp | 2 +- src/AncestorDialog.cpp | 20 +- src/AncestorDialog.h | 4 +- src/BrewNoteWidget.cpp | 2 +- src/BtDigitWidget.cpp | 2 +- src/BtFieldType.cpp | 2 +- src/BtFieldType.h | 2 +- src/BtLabel.cpp | 2 +- src/BtTextEdit.cpp | 25 +- src/BtTreeFilterProxyModel.cpp | 32 +- src/BtTreeModel.cpp | 51 +- src/CMakeLists.txt | 2 +- src/EquipmentEditor.cpp | 2 +- src/EquipmentListModel.cpp | 17 +- src/FermentableEditor.cpp | 2 +- src/FermentableSortFilterProxyModel.cpp | 3 +- src/HopSortFilterProxyModel.cpp | 3 +- src/HydrometerTool.cpp | 9 +- src/InstructionWidget.cpp | 7 +- src/Localization.cpp | 2 +- src/Localization.h | 2 +- src/MainWindow.cpp | 2 +- src/MashComboBox.cpp | 6 +- src/MashDesigner.cpp | 2 +- src/MashEditor.cpp | 2 +- src/MashListModel.cpp | 23 +- src/MashStepEditor.cpp | 2 +- src/MashWizard.cpp | 2 +- src/MiscEditor.cpp | 2 +- src/MiscSortFilterProxyModel.cpp | 3 +- src/NamedMashEditor.cpp | 2 +- src/OgAdjuster.cpp | 3 +- src/PersistentSettings.h | 2 +- src/PitchDialog.cpp | 2 +- src/PrimingDialog.cpp | 3 +- src/PrintAndPreviewDialog.cpp | 199 +- src/RangedSlider.cpp | 3 +- src/RecipeExtrasWidget.cpp | 2 +- src/StrikeWaterDialog.cpp | 3 +- src/StrikeWaterDialog.h | 2 +- src/StyleEditor.cpp | 2 +- src/StyleListModel.cpp | 25 +- src/StyleSortFilterProxyModel.cpp | 15 +- src/UiAmountWithUnits.cpp | 2 +- src/UiAmountWithUnits.h | 2 +- src/WaterDialog.cpp | 2 +- src/WaterEditor.cpp | 2 +- src/WaterListModel.cpp | 3 +- src/YeastEditor.cpp | 2 +- src/YeastSortFilterProxyModel.cpp | 3 +- src/config.h.in | 50 + src/config.in | 10 +- src/main.cpp | 6 +- src/model/BrewNote.cpp | 2 +- src/model/BrewNote.h | 2 +- src/model/Equipment.cpp | 2 +- src/model/Inventory.cpp | 2 +- src/model/Mash.cpp | 2 +- src/model/MashStep.cpp | 2 +- src/model/Misc.cpp | 2 +- src/model/NamedEntity.cpp | 2 +- src/model/NamedEntityWithInventory.cpp | 2 +- src/model/NamedParameterBundle.cpp | 2 +- src/model/NamedParameterBundle.h | 2 +- src/model/Style.cpp | 2 +- src/model/Water.cpp | 2 +- src/model/Water.h | 21 +- src/model/Yeast.cpp | 2 +- src/tableModels/BtTableModel.cpp | 2 +- src/tableModels/FermentableTableModel.cpp | 2 +- src/tableModels/HopTableModel.cpp | 2 +- src/tableModels/MashStepTableModel.cpp | 2 +- src/tableModels/MiscTableModel.cpp | 2 +- src/tableModels/SaltTableModel.cpp | 2 +- src/tableModels/WaterTableModel.cpp | 2 +- src/tableModels/YeastTableModel.cpp | 2 +- src/unitTests/Testing.cpp | 17 +- src/unitTests/Testing.h | 2 +- src/utils/BtStringConst.cpp | 17 +- src/utils/BtStringConst.h | 19 +- src/utils/EnumStringMapping.cpp | 17 +- src/utils/EnumStringMapping.h | 7 +- src/utils/OptionalHelpers.h | 2 +- third-party/libbacktrace | 1 + translations/bt_ca.ts | 29 + translations/bt_cs.ts | 29 + translations/bt_de.ts | 29 + translations/bt_el.ts | 29 + translations/bt_en.ts | 29 + translations/bt_es.ts | 29 + translations/bt_et.ts | 29 + translations/bt_eu.ts | 29 + translations/bt_fr.ts | 29 + translations/bt_gl.ts | 29 + translations/bt_hu.ts | 29 + translations/bt_it.ts | 29 + translations/bt_lv.ts | 29 + translations/bt_nb.ts | 29 + translations/bt_nl.ts | 18 +- translations/bt_pl.ts | 29 + translations/bt_pt.ts | 29 + translations/bt_ru.ts | 29 + translations/bt_sr.ts | 29 + translations/bt_sv.ts | 29 + translations/bt_tr.ts | 29 + translations/bt_zh.ts | 29 + 122 files changed, 6147 insertions(+), 516 deletions(-) create mode 100644 .gitmodules create mode 100755 bt create mode 100644 doc/manpage.1.md.in rename brewtarget.desktop => linux/brewtarget.desktop (99%) create mode 100644 meson.build create mode 100644 packaging/config.toml.in create mode 100644 packaging/darwin/Info.plist.in create mode 100755 packaging/generateCompressedChangeLog.sh create mode 100644 packaging/linux/control.in create mode 100644 packaging/linux/rpm.spec.in rename packaging/{ => linux}/rpmlintFilters.toml (100%) create mode 100644 packaging/windows/NsisInstallerScript.nsi.in create mode 100644 src/config.h.in create mode 160000 third-party/libbacktrace diff --git a/.github/workflows/linux-ubuntu.yml b/.github/workflows/linux-ubuntu.yml index a4fd75951..8884621cb 100644 --- a/.github/workflows/linux-ubuntu.yml +++ b/.github/workflows/linux-ubuntu.yml @@ -1,3 +1,22 @@ +# +# .github/workflows/linux-ubuntu.yml is part of Brewtarget, and is copyright the following authors 2021-2023: +# • Artem Martynov +# • Chris Speck +# • Mattias Måhl +# • Matt Young +# • Mik Firestone +# +# Brewtarget is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Brewtarget is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied +# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. +# +# You should have received a copy of the GNU General Public License along with this program. If not, see +# . +# name: Linux on: @@ -21,45 +40,40 @@ jobs: # for info on runner images # # Usually "ubuntu-latest" is the most recent LTS version of Ubuntu, but there can be a bit of lag between a new - # LTS release and the update of ubuntu-latest (eg in October 2022, it was still Ubuntu 20.04 rather than 22.04. + # LTS release and the update of ubuntu-latest (eg in October 2022, it was still Ubuntu 20.04 rather than 22.04). # So we explicitly specify here which versions we want to build on. # - # Note that ubuntu-18.04 is deprecated and we need to remove it by April 2023 - # - os: [ubuntu-18.04, ubuntu-20.04, ubuntu-22.04] + os: [ubuntu-20.04, ubuntu-22.04] steps: - uses: actions/checkout@v3 with: fetch-depth: 0 - - name: Dependencies + # + # See https://github.com/Brewtarget/brewtarget/wiki/Development:-Getting-Started for more on what is needed to build + # the software. Most of this is now automated in 'bt setup all'. + # + # Some of the things that the bt script installs could be installed via actions (eg jurplel/install-qt-action@v3) + # and some are already installed by default for GitHub actions (eg cmake, git, debhelper, rpm) but there's an + # advantage, where we can, in doing the exact same steps that give as instructions to developers to set up their + # build environment. + # + # Of course, since 'bt' is a Python script, it can't install Python, so we need to do that first. We need Python + # 3.10 or newer, which means you can't just use `sudo apt install` on older Ubuntus. (Eg Ubuntu 18.04 packages + # have only Python 3.6.7 and Ubuntu 20.04 only have Python 3.8.2. However Ubuntu 22.04 has Python 3.10.6.) There + # are ways to get around this, but, in this context, it's simplest to use a canned GitHub action. + # + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Install Frameworks and Libraries, and set up Meson build environment + working-directory: ${{github.workspace}} shell: bash run: | - sudo apt-get update && sudo apt-get install -y \ - libqt5multimedia5-plugins \ - libqt5sql5-psql \ - libqt5sql5-sqlite \ - libqt5svg5-dev \ - libxalan-c-dev \ - libxerces-c-dev \ - lintian \ - qtbase5-dev \ - qtmultimedia5-dev \ - qttools5-dev \ - qttools5-dev-tools \ - rpmlint - cd ~ - mkdir ~/boost-tmp - cd ~/boost-tmp - wget https://boostorg.jfrog.io/artifactory/main/release/1.79.0/source/boost_1_79_0.tar.bz2 - tar --bzip2 -xf boost_1_79_0.tar.bz2 - cd boost_1_79_0 - ./bootstrap.sh --prefix=/usr - sudo ./b2 install - cd ~ - sudo rm -rf ~/boost-tmp - - - name: Create Build Environment + pwd + ./bt -v setup all + + - name: Create CMake build environment run: cmake -E make_directory ${{github.workspace}}/build - name: Configure CMake @@ -73,13 +87,20 @@ jobs: -DNO_MESSING_WITH_FLAGS=ON \ $GITHUB_WORKSPACE - - name: Build + - name: Build (with CMake) working-directory: ${{github.workspace}}/build shell: bash run: | make - - name: Test + - name: Build (with Meson) + working-directory: ${{github.workspace}}/mbuild + shell: bash + run: | + pwd + meson compile + + - name: Test (via CMake) working-directory: ${{github.workspace}}/build shell: bash env: @@ -88,13 +109,31 @@ jobs: run: | make test - - name: Package + # The 'export QT_DEBUG_PLUGINS=1' give us diagnostics in the event that there are problems initialising QT + # The 'export QT_QPA_PLATFORM=offscreen' stops Qt's xcb sub-module trying to connect to a non-existent display + # (which would cause the test runner to abort before running any tests). + - name: Test (via Meson) + working-directory: ${{github.workspace}}/mbuild + shell: bash + run: | + export QT_DEBUG_PLUGINS=1 + export QT_QPA_PLATFORM=offscreen + meson test + + - name: Package (via CMake/CPack) working-directory: ${{github.workspace}}/build shell: bash run: | umask 022 make package + - name: Package (New) + working-directory: ${{github.workspace}}/mbuild + shell: bash + run: | + umask 022 + ../bt package + - name: LintianAndRpmLint continue-on-error: true working-directory: ${{github.workspace}}/build @@ -113,12 +152,36 @@ jobs: build/brewtarget*.deb.sha256 build/brewtarget*.tar.bz2 build/brewtarget*.tar.bz2.sha256 + mbuild/packages/source/brewtarget*.tar.xz + mbuild/packages/source/brewtarget*.tar.xz.sha256sum + mbuild/packages/linux/brewtarget*.deb + mbuild/packages/linux/brewtarget*.deb.sha256sum + mbuild/packages/linux/brewtarget*.rpm + mbuild/packages/linux/brewtarget*.rpm.sha256sum retention-days: 7 - - name: Recover Debris Artifacts + - name: Recover Debris Artifacts (CMake) if: ${{ failure() }} uses: actions/upload-artifact@v3 with: name: build-results-${{matrix.os}} path: build retention-days: 1 + + - name: Recover Debris Artifacts (Meson) + if: ${{ failure() }} + uses: actions/upload-artifact@v3 + with: + name: mbuild-results-${{matrix.os}} + path: mbuild + retention-days: 1 + + # Meson test doesn't show log output on the terminal, but puts it straight to a log file. We don't want to have + # to download the whole compressed mbuild tree just to see the log in event of a test failure, so we show it here + # (provided it exists). + - name: Show Meson test logs + if: ${{ failure() }} + working-directory: ${{github.workspace}} + shell: bash + run: | + if [[ -f mbuild/meson-logs/testlog.txt ]]; then cat mbuild/meson-logs/testlog.txt; fi diff --git a/.github/workflows/mac.yml b/.github/workflows/mac.yml index 82949cbe0..1a9cef5fc 100644 --- a/.github/workflows/mac.yml +++ b/.github/workflows/mac.yml @@ -1,3 +1,20 @@ +# +# .github/workflows/mac.yml is part of Brewtarget, and is copyright the following authors 2021-2023: +# • Artem Martynov +# • Mattias Måhl +# • Matt Young +# +# Brewtarget is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Brewtarget is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied +# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. +# +# You should have received a copy of the GNU General Public License along with this program. If not, see +# . +# name: Mac on: @@ -5,7 +22,6 @@ on: branches: - develop - "stable/**" - pull_request: branches: - develop @@ -19,26 +35,11 @@ env: jobs: build-mac: runs-on: macos-latest - steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: Install Qt - # Version 5.15.2 is, according to https://github.com/jurplel/install-qt-action, the last Qt 5 LTS - # When we're ready to migrate to Qt 6, we'll need to tweak this - uses: jurplel/install-qt-action@v3 - with: - version: 5.15.2 - - - name: Install dependencies - # - # Installing Xalan-C will cause Xerces-C to be installed too (as the former depends on the latter) - # .:TBD:. Installing Boost here doesn't seem to give us libboost_stacktrace_backtrace - # Also, trying to use the "--cc=clang" option to install boost gives an error ("Error: boost: no bottle - # available!") For the moment, we're just using Boost header files on Mac though, so this should be OK. # # The `brew doctor` command just checks that Homebrew (https://brew.sh/) is installed OK (expected output is "Your # system is ready to brew". Having Homebrew installed should imply the Xcode Command Line Tools are also @@ -46,110 +47,168 @@ jobs: # elsewhere we use the echo trick to ensure that a non-zero return value from these diagnostic commands is not # treated as a build script failure. # - # We use the tree command for diagnosing certain build problems (specifically to see what changes certain parts of - # the build have made to the build directory tree). (If need be, you can also download the entire build directory - # within a day of a failed build running, but you need a decent internet connection for this as it's quite large.) - # - run: | - echo "Output from brew doctor: $(brew doctor)" - echo "Output from xcode-select -p: $(xcode-select -p)" - brew install xalan-c - brew install boost - brew install tree - - - name: Build - env: - QT_QPA_PLATFORM: offscreen - # Change `make` to `make VERBOSE=1` to get hugely detailed output - run: | - mkdir build - cd build - cmake .. - make - - - name: Prep for tests - # If a test fails and we get a core, we'd like to analyse it. This will be easier if we have access to the - # relevant directories and there aren't any other files cluttering up the place. - # - # Running the commands inside an echo statement is a bit of a trick to ensure failure of the rm command (eg - # because there are no files to delete) does not count as a build script failure (because the echo command will - # return 0 = success). - run: | - sudo chmod -R +rwx /cores - sudo chmod -R +rwx /Library/Logs/DiagnosticReports - echo "Clearing contents of /cores directory: $(ls -ltr /cores) $(rm -rf /cores/*)" - echo "Clearing contents of /Library/Logs/DiagnosticReports directory: $(ls -ltr /Library/Logs/DiagnosticReports) $(rm -rf /Library/Logs/DiagnosticReports/*)" - - - name: Automated tests - # If something does crash we'd like to capture the core, so we need to enable core dumps - hence the call to - # ulimit. - # - # Running "make test" boils down to running ctest (because the invocation of make in the Build step above will - # have done all the necessary prep. Running ctest directly allows us to pass in extra parameters to try to get as - # much diagnostics as possible out of a remote build such as this. - run: | - ulimit -c unlimited - echo "Core size limit is $(ulimit -c)" - cd build - ctest --extra-verbose --output-on-failure 2>&1 - - - name: Make package - # Change `make package` to `make package VERBOSE=1` to get hugely detailed output - run: | - cd build - pwd - make package - pwd - tree -sh - - - name: Upload Mac Packages (Installers) - if: ${{ success() }} - uses: actions/upload-artifact@v3 - with: - name: brewtarget-dev-mac - path: | - ${{github.workspace}}/build/brewtarget*.dmg - ${{github.workspace}}/build/brewtarget*.dmg.sha256 - retention-days: 7 - - - name: Post-processing on any core dump - if: ${{ failure() }} - # It's all very well capturing core files, but if you don't have a Mac to analyse them on they are not a fat lot - # of use. So, if we did get a core, let's at least get a stack trace out of it. + # We install the tree command here as, although it's not needed to do the build itself, it's useful for diagnosing + # certain build problems (eg to see what changes certain parts of the build have made to the build directory + # tree) when the build is running as a GitHub action. (If need be, you can also download the entire build + # directory within a day of a failed build running, but you need a decent internet connection for this as it's + # quite large.) # - # The loop in the last line should run either 0 or 1 times, depending on whether the build failure did or did not - # generate a core file. - # ls -1 | while read ii; do echo "bt" | lldb -c $ii; done - run: | - pwd - tree -sh - sudo chmod -R +rwx /cores - sudo chmod -R +rwx /Library/Logs/DiagnosticReports - echo "Contents of /cores directory: $(ls -ltr /cores)" - echo "Contents of /Library/Logs/DiagnosticReports directory: $(ls -ltr /Library/Logs/DiagnosticReports)" - cd /cores - ls -1 | while read ii; do echo "bt" | lldb -c $ii; done - - - name: Recover Debris Artifacts (aka build output) - if: ${{ failure() }} - uses: actions/upload-artifact@v3 - with: - name: build-results - path: ${{github.workspace}}/build - retention-days: 1 - - - name: Recover DiagnosticReports (if any) - if: ${{ failure() }} - uses: actions/upload-artifact@v3 - with: - name: DiagnosticReports - path: /Library/Logs/DiagnosticReports - retention-days: 1 - - - name: Recover Cores (if any) - if: ${{ failure() }} - uses: actions/upload-artifact@v3 - with: - name: cores - path: /cores - retention-days: 1 + - name: Install Frameworks and Libraries, and set up Meson build environment + run: | + echo "Output from brew doctor: $(brew doctor)" + echo "Output from xcode-select -p: $(xcode-select -p)" + brew install tree + brew install python@3.11 + echo "Python3 ($(which python3)) version" + /usr/bin/env python3 --version + echo "Running ./bt -v setup all" + ./bt -v setup all + + - name: Build (with CMake) + env: + QT_QPA_PLATFORM: offscreen + # Change `make` to `make VERBOSE=1` to get hugely detailed output + run: | + export PATH=/usr/local/opt/qt5/bin:$PATH + mkdir build + cd build + cmake .. + make + + - name: Build (with Meson) + run: | + cd mbuild + pwd + meson compile + + - name: Prep for tests + # If a test fails and we get a core, we'd like to analyse it. This will be easier if we have access to the + # relevant directories and there aren't any other files cluttering up the place. + # + # Running the commands inside an echo statement is a bit of a trick to ensure failure of the rm command (eg + # because there are no files to delete) does not count as a build script failure (because the echo command will + # return 0 = success). + run: | + sudo chmod -R +rwx /cores + sudo chmod -R +rwx /Library/Logs/DiagnosticReports + echo "Clearing contents of /cores directory: $(ls -ltr /cores) $(rm -rf /cores/*)" + echo "Clearing contents of /Library/Logs/DiagnosticReports directory: $(ls -ltr /Library/Logs/DiagnosticReports) $(rm -rf /Library/Logs/DiagnosticReports/*)" + + - name: Automated tests (via CMake) + # If something does crash we'd like to capture the core, so we need to enable core dumps - hence the call to + # ulimit. + # + # Running "make test" boils down to running ctest (because the invocation of make in the Build step above will + # have done all the necessary prep. Running ctest directly allows us to pass in extra parameters to try to get as + # much diagnostics as possible out of a remote build such as this. + run: | + ulimit -c unlimited + echo "Core size limit is $(ulimit -c)" + cd build + ctest --extra-verbose --output-on-failure 2>&1 + + - name: Automated tests (via Meson) + # If something does crash we'd like to capture the core, so we need to enable core dumps - hence the call to + # ulimit. + # + # The 'export QT_DEBUG_PLUGINS=1' give us diagnostics in the event that there are problems initialising QT + # The 'export QT_QPA_PLATFORM=offscreen' stops Qt's xcb sub-module trying to connect to a non-existent display + # (which would cause the test runner to abort before running any tests). + run: | + ulimit -c unlimited + echo "Core size limit is $(ulimit -c)" + cd mbuild + export QT_DEBUG_PLUGINS=1 + export QT_QPA_PLATFORM=offscreen + meson test + + - name: Package (via CMake/CPack) + # Change `make package` to `make package VERBOSE=1` to get hugely detailed output + run: | + cd build + pwd + make package + pwd + tree -sh + + - name: Package (New) + shell: bash + run: | + cd mbuild + umask 022 + ../bt -v package + cd packages + pwd + tree -sh + + - name: Upload Mac Packages (Installers) + if: ${{ success() }} + uses: actions/upload-artifact@v3 + with: + name: brewtarget-dev-mac + path: | + ${{github.workspace}}/build/brewtarget*.dmg + ${{github.workspace}}/build/brewtarget*.dmg.sha256 + ${{github.workspace}}/mbuild/packages/darwin/Brewtarget*.dmg + ${{github.workspace}}/mbuild/packages/darwin/Brewtarget*.dmg.sha256sum + retention-days: 7 + + - name: Post-processing on any core dump + if: ${{ failure() }} + # It's all very well capturing core files, but if you don't have a Mac to analyse them on they are not a fat lot + # of use. So, if we did get a core, let's at least get a stack trace out of it. + # + # The loop in the last line should run either 0 or 1 times, depending on whether the build failure did or did not + # generate a core file. + # ls -1 | while read ii; do echo "bt" | lldb -c $ii; done + run: | + pwd + tree -sh + sudo chmod -R +rwx /cores + sudo chmod -R +rwx /Library/Logs/DiagnosticReports + echo "Contents of /cores directory: $(ls -ltr /cores)" + echo "Contents of /Library/Logs/DiagnosticReports directory: $(ls -ltr /Library/Logs/DiagnosticReports)" + cd /cores + ls -1 | while read ii; do echo "bt" | lldb -c $ii; done + + - name: Recover Debris Artifacts (aka build output) - CMake + if: ${{ failure() }} + uses: actions/upload-artifact@v3 + with: + name: build-results + path: ${{github.workspace}}/build + retention-days: 1 + + - name: Recover Debris Artifacts (aka build output) - Meson + if: ${{ failure() }} + uses: actions/upload-artifact@v3 + with: + name: mbuild-results + path: ${{github.workspace}}/mbuild + retention-days: 1 + + - name: Recover DiagnosticReports (if any) + if: ${{ failure() }} + uses: actions/upload-artifact@v3 + with: + name: DiagnosticReports + path: /Library/Logs/DiagnosticReports + retention-days: 1 + + - name: Recover Cores (if any) + if: ${{ failure() }} + uses: actions/upload-artifact@v3 + with: + name: cores + path: /cores + retention-days: 1 + + # Meson test doesn't show log output on the terminal, but puts it straight to a log file. We don't want to have + # to download the whole compressed mbuild tree just to see the log in event of a test failure, so we show it here + # (provided it exists). + - name: Show Meson test logs + if: ${{ failure() }} + working-directory: ${{github.workspace}} + shell: bash + run: | + if [[ -f mbuild/meson-logs/testlog.txt ]]; then cat mbuild/meson-logs/testlog.txt; fi diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 95d55244f..218a52b39 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -1,3 +1,21 @@ +# +# .github/workflows/windows.yml is part of Brewtarget, and is copyright the following authors 2021-2023: +# • Artem Martynov +# • Chris Speck +# • Mattias Måhl +# • Matt Young +# +# Brewtarget is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Brewtarget is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied +# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. +# +# You should have received a copy of the GNU General Public License along with this program. If not, see +# . +# name: Windows on: @@ -35,64 +53,170 @@ jobs: fetch-depth: 0 submodules: recursive + # + # Install MSYS2, then Python, then Pip + # + # We need Python 3.10 or later to run the bt script + # + # I tried using the separate actions/setup-python@v4 action, but it doesn't seem to result in the Python + # executable being visible in the MSYS2 environment. So, instead, we install from inside MSYS2. (According to + # https://packages.msys2.org/package/mingw-w64-x86_64-python, this is Python 3.10.9 as of 2022-12-10.) + # + # (In theory, an alternative approach would be to install Python, then run 'python -m ensurepip --upgrade' which, + # per https://docs.python.org/3/library/ensurepip.html, is the official Python way to bootstrap Pip. However, + # this did not seem to work properly in MSYS2 when I tried it.) + # + # Note that you _don't_ want to install the 'python' package here as it has some subtle differences from + # installing 'mingw-w64-i686-python'. (Same applies for 'python-pip' vs 'mingw-w64-i686-python-pip'.) Some of + # these differences are about where things are installed, but some are about how Python behaves, eg what + # platform.system() returns. See comments at https://github.com/conan-io/conan/issues/2638 for more.) + # + # We install the tree command here as, although it's not needed to do the build itself, it's useful for diagnosing + # certain build problems (eg to see what changes certain parts of the build have made to the build directory + # tree) when the build is running as a GitHub action. (If need be, you can also download the entire build + # directory within a day of a failed build running, but you need a decent internet connection for this as it's + # quite large.) + # - uses: msys2/setup-msys2@v2 with: msystem: ${{ matrix.msystem }} install: >- - mingw-w64-${{ matrix.arch }}-boost - mingw-w64-${{ matrix.arch }}-cmake - mingw-w64-${{ matrix.arch }}-doxygen - mingw-w64-${{ matrix.arch }}-gcc - mingw-w64-${{ matrix.arch }}-libbacktrace - mingw-w64-${{ matrix.arch }}-make - mingw-w64-${{ matrix.arch }}-nsis - mingw-w64-${{ matrix.arch }}-qt5 - mingw-w64-${{ matrix.arch }}-xalan-c - mingw-w64-${{ matrix.arch }}-xerces-c + mingw-w64-${{ matrix.arch }}-python + mingw-w64-${{ matrix.arch }}-python-pip + tree update: true release: true path-type: strict - - name: Download nsis plugins - run: | - New-Item -ItemType Directory -Force -Path .\temp\build - Invoke-WebRequest -Uri https://nsis.sourceforge.io/mediawiki/images/a/af/Locate.zip -OutFile .\temp\build\Locate.zip - Expand-Archive -Path .\temp\build\Locate.zip -DestinationPath .\temp\build\Locate - Invoke-WebRequest -Uri https://nsis.sourceforge.io/mediawiki/images/7/76/Nsislog.zip -OutFile .\temp\build\Nsislog.zip - Expand-Archive -Path .\temp\build\Nsislog.zip -DestinationPath .\temp\build\Nsislog - - name: Move Checkout run: | Copy-Item -Path ".\temp" -Destination "C:\_" -Recurse - - name: CMake config + # + # On Windows, there are a couple of extra things we need to do before running the bt script: + # + # - For historical reasons, Linux and other platforms need to run both Python v2 (still used by some bits of + # system) and Python v3 (eg that you installed yourself) so there are usually two corresponding Python + # executables, python2 and python3. On Windows there is only whatever Python you installed and it's called + # python.exe. To keep the shebang in the bt script working, we just make a softlink to python called python3. + # + # - Getting Unicode input/output to work is fun. We should already have a Unicode locale, but it seems we also + # need to set PYTHONIOENCODING (see https://docs.python.org/3/using/cmdline.html#envvar-PYTHONIOENCODING, even + # though it seems to imply you don't need to set it on recent versions of Python). + # + # - The version of Pip we install above does not put it in the "right" place. Specifically it will not be in the + # PATH when we run bt. The following seems to be the least hacky way around this: + # curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py + # python get-pip.py + # python -m pip install -U --force-reinstall pip + # See https://stackoverflow.com/questions/48087004/installing-pip-on-msys for more discussion on this. + # + - name: Install Frameworks and Libraries, and set up Meson build environment shell: msys2 {0} run: | - cd /C/_/build - cp ./Locate/Include/Locate.nsh /mingw32/share/nsis/Include/ - cp ./Locate/Plugin/locate.dll /mingw32/share/nsis/Plugins/ansi/ - cp ./Nsislog/plugin/nsislog.dll /mingw32/share/nsis/Plugins/ansi + cd /C/_/ + echo "Working directory is:" + pwd + echo "Installed Python is:" + which python + python --version + echo "Installed pip is:" + which pip + pip --version + curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py + python get-pip.py + python -m pip install -U --force-reinstall pip + pip --version + echo "Locale:" + locale + export PYTHONIOENCODING=utf8 + echo "Ensuring that python3 symlink / executable exists" + if [[ ! -f $(dirname $(which python))/python3 ]]; then ln -s $(which python) $(dirname $(which python))/python3; fi + echo "Running ./bt -v setup all" + ./bt -v setup all + + #- name: Download nsis plugins + # run: | + # New-Item -ItemType Directory -Force -Path /C/_/build/nsis + # Invoke-WebRequest -Uri https://nsis.sourceforge.io/mediawiki/images/a/af/Locate.zip -OutFile /C/_/build/nsis/Locate.zip + # Expand-Archive -Path /C/_/build/nsis/Locate.zip -DestinationPath /C/_/build/nsis/Locate + # Invoke-WebRequest -Uri https://nsis.sourceforge.io/mediawiki/images/7/76/Nsislog.zip -OutFile /C/_/build/nsis/Nsislog.zip + # Expand-Archive -Path /C/_/build/nsis/Nsislog.zip -DestinationPath /C/_/build/nsis/Nsislog + + # Somehow, running the configure script sets up CMake to use Ninja rather than Makefiles, which is then in + # conflict with our other assumptions about CMake. Error message says remove CMakeCache.txt before running CMake, + # so we do that as a short-term fix (on the assumption that, longer term, we'll be moving to Meson). + - name: CMake Config + shell: msys2 {0} + run: | + cd /C/_ + ./configure + cd build + rm CMakeCache.txt cmake .. -DCMAKE_RC_COMPILER:FILEPATH=windres.exe -G "MinGW Makefiles" - - name: Build + # The pwd and find ../third-party commands below are just diagnostics, but it's generally useful to have too + # much rather than not enough diagnostic info on these GitHub action builds + - name: Build (with CMake) shell: msys2 {0} run: | cd /C/_/build pwd cmake --build . - - name: Test + - name: Build (with Meson) + shell: msys2 {0} + run: | + cd /C/_/mbuild + pwd + meson compile + + - name: Test (via CMake) shell: msys2 {0} run: | cd /C/_/build cmake --build . --target test - - name: Package + # The 'export QT_DEBUG_PLUGINS=1' give us diagnostics in the event that there are problems initialising QT + # The 'export QT_QPA_PLATFORM=offscreen' stops Qt's xcb sub-module trying to connect to a non-existent display + # (which would cause the test runner to abort before running any tests). + - name: Test (via Meson) + shell: msys2 {0} + run: | + cd /C/_/mbuild + export QT_DEBUG_PLUGINS=1 + export QT_QPA_PLATFORM=offscreen + meson test + + - name: Package (via CMake/CPack) shell: msys2 {0} run: | cd /C/_/build cmake --build . --target package --verbose + # + # See above for explanation of the extra things we need to do on Windows before running the bt script. Most of + # that does not need doing again here, but PYTHONIOENCODING does need setting again. + # + - name: Package (New) + shell: msys2 {0} + run: | + cd /C/_/ + echo "Working directory is:" + pwd + echo "Installed Python is:" + which python + python --version + echo "Installed pip is:" + which pip + pip --version + export PYTHONIOENCODING=utf8 + echo "Running ./bt -v package" + ./bt -v package + cd mbuild/packages + pwd + tree -sh + - name: Upload Windows binaries (installers) if: ${{ success()}} uses: actions/upload-artifact@v3 @@ -101,6 +225,8 @@ jobs: path: | C:/_/build/brewtarget*.exe C:/_/build/brewtarget*.exe.sha256 + C:/_/mbuild/packages/windows/Brewtarget*Installer.exe + C:/_/mbuild/packages/windows/Brewtarget*Installer.exe.sha256sum retention-days: 7 - name: Upload error info from failed build diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..c776e5c92 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "third-party/libbacktrace"] + path = third-party/libbacktrace + url = https://github.com/ianlancetaylor/libbacktrace diff --git a/CMakeLists.txt b/CMakeLists.txt index 2e3b60ed7..1b406db84 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -114,7 +114,7 @@ endif() #======================================================================================================================= # It's simplest to keep the project name all lower-case as it means we can use a lot more of the default settings for # Linux packaging (where directory names etc are expected to be all lower-case) -project(brewtarget VERSION 3.0.6 LANGUAGES CXX) +project(brewtarget VERSION 3.0.7 LANGUAGES CXX) message(STATUS "Building ${PROJECT_NAME} version ${PROJECT_VERSION}") message(STATUS "PROJECT_SOURCE_DIR is ${PROJECT_SOURCE_DIR}") # Sometimes we do need the capitalised version of the project name @@ -274,7 +274,7 @@ if(NOT ${NO_MESSING_WITH_FLAGS}) # "Marks the object as not requiring executable stack". # # However, this is not sufficient. So, for the moment, we suppress the rpmlint error (see - # packaging/rpmLintFilters.toml). + # packaging/linux/rpmLintFilters.toml). # set(CMAKE_CXX_FLAGS_RELEASE "-Wall -ansi -pedantic -Wno-long-long -O2 -z noexecstack") # @@ -626,21 +626,36 @@ message(STATUS "Using Qt version " ${Qt5Core_VERSION}) # .:TODO:. So far don't have stacktraces working properly on Windows (everything shows as register_frame_ctor), so # that needs some more investigation. (It could be that it's a bug in Boost, at least according to # https://stackoverflow.com/questions/54333608/boost-stacktrace-not-demangling-names-when-cross-compiled) +# +# ****************** +# *** Boost JSON *** +# ****************** +# +# Boost JSON is an (optionally) header-only library that was introduced in Boost 1.75 in December 2020. One of the +# features we use, JSON pointers (the equivalent of XML's XPaths) was only introduced in Boost 1.79. As of March +# 2022, Ubunutu 20.04 LTS only has packages for Boost 1.71 from August 2019, hence the need to manually install a +# newer Boost. +# +# ****************** +# *** Boost.Core *** +# ****************** +# +# Boost.Core, part of collection of the Boost C++ Libraries, is a collection of core utilities used by other Boost +# libraries. Boost JSON needs a more recent version than 1.71. +# set(Boost_USE_STATIC_LIBS ON) if(WIN32) -find_package(Boost REQUIRED) + find_package(Boost 1.79.0 REQUIRED) elseif(APPLE) -find_package(Boost REQUIRED) + find_package(Boost 1.79.0 REQUIRED) else() -# TBD Some users report problems getting CMake to find libboost_stacktrace_backtrace on Ubuntu and Gentoo, so disable it -# for now and fallback to the header-only version -#find_package(Boost REQUIRED COMPONENTS stacktrace_backtrace) -find_package(Boost REQUIRED) + # Note that header-only libraries don't have a component + find_package(Boost 1.79.0 REQUIRED COMPONENTS stacktrace_backtrace) endif() include_directories(${Boost_INCLUDE_DIRS}) # Uncomment the next two lines if you want to find where Boost headers and DLLs are on your system -message( "Boost include directories: " ${Boost_INCLUDE_DIRS} ) -message( "Boost libraries: " ${Boost_LIBRARIES} ) +message("Boost include directories: ${Boost_INCLUDE_DIRS}") +message("Boost libraries: ${Boost_LIBRARIES}") # # Extra requirements for Boost Stacktrace @@ -777,7 +792,7 @@ set(filesToInstall_data ${repoDir}/data/default_db.sqlite ${repoDir}/doc/manual-en.pdf) # Desktop files to install. -set(filesToInstall_desktop ${repoDir}/${PROJECT_NAME}.desktop) +set(filesToInstall_desktop ${repoDir}/linux/${PROJECT_NAME}.desktop) # Icon files to install. set(filesToInstall_icons ${repoDir}/images/${PROJECT_NAME}.svg) @@ -1765,7 +1780,7 @@ if(UNIX AND NOT APPLE) # so we have to set it explicitly here set(CPACK_RPM_PACKAGE_DESCRIPTION ${CPACK_PACKAGE_DESCRIPTION}) - # This has to match one of the ValidLicenses values in packaging/rpmLintFilters.toml. (See comment in that file for + # This has to match one of the ValidLicenses values in packaging/linux/rpmLintFilters.toml. (See comment in that file for # more info.) set(CPACK_RPM_PACKAGE_LICENSE "GPL-3.0-or-later") @@ -1938,7 +1953,7 @@ if(UNIX AND NOT APPLE) COMMAND lintian --no-tag-display-limit *.deb # Running rpmlint is the equivalent exercise for RPMs. Most common error and warning codes are listed at # https://fedoraproject.org/wiki/Common_Rpmlint_issues - COMMAND rpmlint --config ${repoDir}/packaging *.rpm + COMMAND rpmlint --config ${repoDir}/packaging/linux *.rpm WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} COMMENT "Running lintian and on deb package and rpmlint on rpm package. Warnings about man pages are expected!" ) diff --git a/bt b/bt new file mode 100755 index 000000000..cd160b03e --- /dev/null +++ b/bt @@ -0,0 +1,2152 @@ +#!/usr/bin/env python3 +# +# bt is part of Brewtarget, and is copyright the following authors 2022-2023: +# • Matt Young +# +# Brewtarget is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Brewtarget is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied +# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. +# +# You should have received a copy of the GNU General Public License along with this program. If not, see +# . +# + +#----------------------------------------------------------------------------------------------------------------------- +# This build tool (bt) script helps with Git setup, meson build configuration and packaging. Usage is: +# +# ./bt setup Sets up Git options and configures the 'mbuild' meson build directory +# +# ./bt setup all As above but also tries to install all tools and dependencies we need +# +# ./bt package Does the packaging. First runs 'meson install' (with extra options to "install" +# binaries, data etc to a subdirectory of the build directory rather than to where they +# need to be for a local install). Then creates a distributable package, making use +# of various build variables passed back from Meson. +# +# +# +# .:TODO:. At some point we should be able to retire: +# configure +# setupgit.sh +# CMakeLists.txt +# src/CMakeLists.txt +# +# .:TODO:. We should probably also break this file up into several smaller ones! +# +# Note that Python allows both single and double quotes for delimiting strings. In Meson, we need single quotes, in +# C++, we need double quotes. We mostly try to use single quotes below for consistency with Meson, except where using +# double quotes avoids having to escape a single quote. +#----------------------------------------------------------------------------------------------------------------------- + +#----------------------------------------------------------------------------------------------------------------------- +# Python built-in modules we use +#----------------------------------------------------------------------------------------------------------------------- +import argparse +import datetime +import glob +import logging +import os +import pathlib +import platform +import re +import shutil +import stat +import subprocess +import sys +import tempfile +from decimal import Decimal + +#----------------------------------------------------------------------------------------------------------------------- +# Global constants +#----------------------------------------------------------------------------------------------------------------------- +# There is some inevitable duplication with constants in meson.build, but we try to keep it to a minimum +projectName = 'brewtarget' +capitalisedProjectName = projectName.capitalize() +projectUrl = 'https://github.com/' + capitalisedProjectName + '/' + projectName + '/' + +#----------------------------------------------------------------------------------------------------------------------- +# Welcome banner and environment info +#----------------------------------------------------------------------------------------------------------------------- +# The '%c' argument to strftime means "Locale’s appropriate date and time representation" +print( + '⭐ ' + capitalisedProjectName + ' Build Tool (bt) running on ' + platform.system() + ' (' + platform.release() + + '), using Python ' + platform.python_version() + ', at ' + datetime.datetime.now().strftime('%c') + ' ⭐' +) + +#----------------------------------------------------------------------------------------------------------------------- +# Set up logging to stderr +#----------------------------------------------------------------------------------------------------------------------- +logging.basicConfig(format='%(message)s') +log = logging.getLogger(__name__) +# This is our default log level, but it can be overridden via the -v and -q command line options -- see below +log.setLevel(logging.INFO) +# Include the log level in the message +handler = logging.StreamHandler() +handler.setFormatter( + # You can add timestamps etc to logs, but that's overkill for this script. Source file location of log message is + # however pretty useful for debugging. + logging.Formatter('{levelname:s}: {message} [{filename:s}:{lineno:d}]', style='{') +) +log.addHandler(handler) +# If we don't do this, everything gets printed twice +log.propagate = False + +#----------------------------------------------------------------------------------------------------------------------- +# Python 3rd-party modules we use +#----------------------------------------------------------------------------------------------------------------------- +# Per https://docs.python.org/3/library/ensurepip.html, the official way to ensure Pip is installed and at the latest +# version is via 'python -m ensurepip --upgrade' (which should of course be 'python3 -m ensurepip --upgrade' on systems +# that have both Python 2 and Python 3). However, on Debian/Ubuntu, this will give an error "No module named ensurepip" +# because ensurepip is deliberately disabled to push you towards using 'sudo apt update' + +# 'sudo apt install python3-pip'. +if (platform.system() != 'Linux'): + # https://docs.python.org/3/library/sys.html#sys.executable says sys.executable is '"the absolute path of the + # executable binary for the Python interpreter, on systems where this makes sense". + log.info( + 'Attempting to ensure latest version of Pip is installed via ' + sys.executable + ' -m ensurepip --upgrade' + ) + subprocess.run([sys.executable, '-m', 'ensurepip', '--upgrade']) +else: + # We don't want to run a sudo command every time the script is invoked, so check whether it's necessary + exe_pip = shutil.which('pip3') + if (exe_pip is None or exe_pip == ''): + log.info('Attempting to install Pip') + subprocess.run(['sudo', 'apt', 'update']) + subprocess.run(['sudo', 'apt', 'install', 'python3-pip']) + + +# If Pip still isn't installed we need to bail here. +exe_pip = shutil.which('pip3') +if (exe_pip is None or exe_pip == ''): + pathEnvVar = '' + if ('PATH' in os.environ): + pathEnvVar = os.environ['PATH'] + log.critical( + 'Cannot find pip (PATH=' + pathEnvVar + ') - please see https://pip.pypa.io/en/stable/installation/ for how to ' + + 'install' + ) + exit(1) +# +# We use the packaging module (see https://pypi.org/project/packaging/) for handling version numbers (as described at +# https://packaging.pypa.io/en/stable/version.html). +# +# On MacOS at least, we also need to install setuptools to be able to access packaging.version. +# +subprocess.run([exe_pip, 'install', 'packaging']) +subprocess.run([exe_pip, 'install', 'setuptools']) +import packaging.version + +# The requests library (see https://pypi.org/project/requests/) is used for downloading files in a more Pythonic way +# than invoking wget through the shell. +subprocess.run([exe_pip, 'install', 'requests']) +import requests + +# +# Once all platforms we're running on have Python version 3.11 or above, we will be able to use the built-in tomllib +# library (see https://docs.python.org/3/library/tomllib.html) for parsing TOML. Until then, it's easier to import the +# tomlkit library (see https://pypi.org/project/tomlkit/) which actually has rather more functionality than we need +# +subprocess.run([exe_pip, 'install', 'tomlkit']) +import tomlkit + +#----------------------------------------------------------------------------------------------------------------------- +# Parse command line arguments +#----------------------------------------------------------------------------------------------------------------------- +# We do this (nearly) first as we want the program to exit straight away if incorrect arguments are specified +# Choosing which action to call is done a the end of the script, after all functions are defined +# +# Using Python argparse saves us writing a lot of boilerplate, although the help text it generates on the command line +# is perhaps a bit more than we want (eg having to separate 'bt --help' and 'bt setup --help' is overkill for us). +# There are ways around this -- eg see +# https://stackoverflow.com/questions/20094215/argparse-subparser-monolithic-help-output -- but they are probably more +# complexity than is merited here. +# +parser = argparse.ArgumentParser( + prog = 'bt', + description = capitalisedProjectName + ' build tool. A utility to help with installing dependencies, Git ' + + 'setup, Meson build configuration and packaging.', + epilog = 'See ' + projectUrl + ' for info and latest releases' +) + +# Log level +group = parser.add_mutually_exclusive_group() +group.add_argument('-v', '--verbose', action = 'store_true', help = 'Enable debug logging of this script') +group.add_argument('-q', '--quiet', action = 'store_true', help = 'Suppress info logging of this script') + +# Per https://docs.python.org/3/library/argparse.html#sub-commands, you use sub-parsers for sub-commands. +subparsers = parser.add_subparsers( + dest = 'subCommand', + required = True, + title = 'action', + description = "Exactly one of the following actions must be specified. (For actions marked ✴, specify -h or " + "--help AFTER the action for info about options -- eg '%(prog)s setup --help'.)" +) + +# Parser for 'setup' +parser_setup = subparsers.add_parser('setup', help = '✴ Set up meson build directory (mbuild) and git options') +subparsers_setup = parser_setup.add_subparsers(dest = 'setupOption', required = False) +parser_setup_all = subparsers_setup.add_parser( + 'all', + help = 'Specifying this will also automatically install libraries and frameworks we depend on' +) + +# Parser for 'package' +parser_package = subparsers.add_parser('package', help='Build a distributable installer') + +# +# Process the arguments for use below +# +# This try/expect ensures that help is printed if the script is invoked without arguments. It's not perfect as you get +# the usage line twice (because parser.parse_args() outputs it to stderr before throwing SystemExit) but it's good +# enough for now at least. +# +try: + args = parser.parse_args() +except SystemExit as se: + if (se.code != None and se.code != 0): + parser.print_help() + sys.exit(0) + +# +# The one thing we do set straight away is log level +# Possible levels are 'CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'NOTSET'. We choose 'INFO' for default, 'DEBUG' +# for verbose and 'WARNING' for quiet. You wouldn't want to suppress warnings, would you? :-) +# +if (args.verbose): + log.setLevel(logging.DEBUG) +elif (args.quiet): + log.setLevel(logging.WARNING) + +log.debug('Parsed command line arguments as ' + str(args)) + +#----------------------------------------------------------------------------------------------------------------------- +# Note the working directory from which we were invoked -- though it shouldn't matter as we try to be independent of +# this +#----------------------------------------------------------------------------------------------------------------------- +log.debug('Working directory when invoked: ' + pathlib.Path.cwd().as_posix()) + +#----------------------------------------------------------------------------------------------------------------------- +# Directories +#----------------------------------------------------------------------------------------------------------------------- +dir_base = pathlib.Path(__file__).parent.resolve() +dir_gitInfo = dir_base.joinpath('.git') +dir_build = dir_base.joinpath('mbuild') +# Where submodules live and how many there are. Currently there are 2: libbacktrace and valijson +dir_gitSubmodules = dir_base.joinpath('third-party') +num_gitSubmodules = 2 +# Top-level packaging directory - NB deliberately different name from 'packaging' (= dir_base.joinpath('packaging')) +dir_packages = dir_build.joinpath('packages') +dir_packages_platform = dir_packages.joinpath(platform.system().lower()) # Platform-specific packaging directory +dir_packages_source = dir_packages.joinpath('source') + +#----------------------------------------------------------------------------------------------------------------------- +# Helper function for checking result of running external commands +# +# Given a CompletedProcess object returned from subprocess.run(), this checks the return code and, if it is non-zero +# stops this script with an error message and the same return code. Otherwise the CompletedProcess object is returned +# to the caller (to make it easier to chain things together). +#----------------------------------------------------------------------------------------------------------------------- +def abortOnRunFail(runResult: subprocess.CompletedProcess): + if (runResult.returncode != 0): + # According to https://docs.python.org/3/library/subprocess.html#subprocess.CompletedProcess, + # CompletedProcess.args (the arguments used to launch the process) "may be a list or a string", but its not clear + # when it would be one or the other. + if (isinstance(runResult.args, str)): + log.critical('Error running ' + runResult.args) + else: + commandName = os.path.basename(runResult.args[0]) + log.critical('Error running ' + commandName + ' (' + ' '.join(str(ii) for ii in runResult.args) + ')') + + exit(runResult.returncode) + + return runResult + +#----------------------------------------------------------------------------------------------------------------------- +# Helper function for copying one or more files to a directory that might not yet exist +#----------------------------------------------------------------------------------------------------------------------- +def copyFilesToDir(files, directory): + os.makedirs(directory, exist_ok=True) + for currentFile in files: + shutil.copy2(currentFile, directory) + return + +#----------------------------------------------------------------------------------------------------------------------- +# Helper function for counting files in a directory tree +#----------------------------------------------------------------------------------------------------------------------- +def numFilesInTree(path): + numFiles = 0 + for root, dirs, files in os.walk(path): + numFiles += len(files) + return numFiles + +#----------------------------------------------------------------------------------------------------------------------- +# Helper function for downloading a file +#----------------------------------------------------------------------------------------------------------------------- +def downloadFile(url): + filename = url.split('/')[-1] + log.info('Downloading ' + url + ' to ' + filename + ' in directory ' + pathlib.Path.cwd().as_posix()) + response = requests.get(url) + if (response.status_code != 200): + log.critical('Error code ' + response.status_code + ' while downloading ' + url) + exit(1) + with open(filename, 'wb') as fd: + for chunk in response.iter_content(chunk_size = 128): + fd.write(chunk) + return + +#----------------------------------------------------------------------------------------------------------------------- +# Set global variables exe_git and exe_meson with the locations of the git and meson executables plus mesonVersion with +# the version of meson installed +# +# We want to give helpful error messages if Meson or Git is not installed. For other missing dependencies we can rely +# on Meson itself to give explanatory error messages. +#----------------------------------------------------------------------------------------------------------------------- +def findMesonAndGit(): + # Advice at https://docs.python.org/3/library/subprocess.html is "For maximum reliability, use a fully qualified path + # for the executable. To search for an unqualified name on PATH, use shutil.which()" + + # Check Meson is installed. (See installDependencies() below for what we do to attempt to install it from this + # script.) + global exe_meson + exe_meson = shutil.which("meson") + if (exe_meson is None or exe_meson == ""): + log.critical('Cannot find meson - please see https://mesonbuild.com/Getting-meson.html for how to install') + exit(1) + + global mesonVersion + rawVersion = abortOnRunFail(subprocess.run([exe_meson, '--version'], capture_output=True)).stdout.decode('UTF-8').rstrip() + log.debug('Meson version raw: ' + rawVersion) + mesonVersion = packaging.version.parse(rawVersion) + log.debug('Meson version parsed: ' + str(mesonVersion)) + + # Check Git is installed if its magic directory is present + global exe_git + exe_git = shutil.which("git") + if (dir_gitInfo.is_dir()): + log.debug('Found git information directory:' + dir_gitInfo.as_posix()) + if (exe_git is None or exe_git == ""): + log.critical('Cannot find git - please see https://git-scm.com/downloads for how to install') + exit(1) + + return + +#----------------------------------------------------------------------------------------------------------------------- +# Copy a file, removing comments and folded lines +# +# Have had various problems with comments in debian package control file, even though they are theoretically allowed, so +# we strip them out here, hence slightly more involved code than just +# shutil.copy2(dir_build.joinpath('control'), dir_packages_deb_control) +# +# Similarly, some of the fields in the debian control file that we want to split across multiple lines are not actually +# allowed to be so "folded" by the Debian package generator. So, we do our own folding here. (At the same time, we +# remove extra spaces that make sense on the unfolded line but not once everything is joined onto single line.) +#----------------------------------------------------------------------------------------------------------------------- +def copyWithoutCommentsOrFolds(inputPath, outputPath): + with open(inputPath, 'r') as inputFile, open(outputPath, 'w') as outputFile: + for line in inputFile: + if (not line.startswith('#')): + if (not line.endswith('\\\n')): + outputFile.write(line) + else: + foldedLine = "" + while (line.endswith('\\\n')): + foldedLine += line.removesuffix('\\\n') + line = next(inputFile) + foldedLine += line + # The split and join here is a handy trick for removing repeated spaces from the line without + # fumbling around with regular expressions. Note that this takes the newline off the end, hence + # why we have to add it back manually. + outputFile.write(' '.join(foldedLine.split())) + outputFile.write('\n') + return + +#----------------------------------------------------------------------------------------------------------------------- +# Create fileToDistribute.sha256sum for a given fileToDistribute in a given directory +#----------------------------------------------------------------------------------------------------------------------- +def writeSha256sum(directory, fileToDistribute): + # + # In Python 3.11 we could use the file_digest() function from the hashlib module to do this. But it's rather + # more work to do in Python 3.10, so we just use the `sha256sum` command instead. + # + # Note however, that `sha256sum` includes the supplied directory path of a file in its output. We want just the + # filename, not its full or partial path on the build machine. So we change into the directory of the file before + # running the `sha256sum` command. + # + previousWorkingDirectory = pathlib.Path.cwd().as_posix() + os.chdir(directory) + with open(directory.joinpath(fileToDistribute + '.sha256sum').as_posix(),'w') as sha256File: + abortOnRunFail( + subprocess.run(['sha256sum', fileToDistribute], + capture_output=False, + stdout=sha256File) + ) + os.chdir(previousWorkingDirectory) + return + +#----------------------------------------------------------------------------------------------------------------------- +# Ensure git submodules are present +# +# When a git repository is cloned, the submodules don't get cloned until you specifically ask for it via the +# --recurse-submodules flag. +# +# (Adding submodules is done via Git itself. Eg: +# cd ../third-party +# git submodule add https://github.com/ianlancetaylor/libbacktrace +# But this only needs to be done once, by one person, and committed to our repository, where the connection is +# stored in the .gitmodules file.) +#----------------------------------------------------------------------------------------------------------------------- +def ensureSubmodulesPresent(): + findMesonAndGit() + if (not dir_gitSubmodules.is_dir()): + log.info('Creating submodules directory: ' + dir_gitSubmodules.as_posix()) + os.makedirs(dir_gitSubmodules, exist_ok=True) + if (numFilesInTree(dir_gitSubmodules) < num_gitSubmodules): + log.info('Pulling in submodules in ' + dir_gitSubmodules.as_posix()) + abortOnRunFail(subprocess.run([exe_git, "submodule", "init"], capture_output=False)) + abortOnRunFail(subprocess.run([exe_git, "submodule", "update"], capture_output=False)) + return + +#----------------------------------------------------------------------------------------------------------------------- +# Function to install dependencies -- called if the user runs 'bt setup all' +#----------------------------------------------------------------------------------------------------------------------- +def installDependencies(): + log.info('Checking which dependencies need to be installed') + # + # I looked at using ConanCenter (https://conan.io/center/) as a source of libraries, so that we could automate + # installing dependencies, but it does not have all the ones we need. Eg it has Boost, Qt, Xerces-C and Valijson, + # but not Xalan-C. (Someone else has already requested Xalan-C, see + # https://github.com/conan-io/conan-center-index/issues/5546, but that request has been open a long time, so its + # fulfilment doesn't seem imminent.) It also doesn't yet integrate quite as well with meson as we might like (eg + # as at 2023-01-15, https://docs.conan.io/en/latest/reference/conanfile/tools/meson.html is listed as "experimental + # and subject to breaking changes". + # + # Another option is vcpkg (https://vcpkg.io/en/index.html), which does have both Xerces-C and Xalan-C, along with + # Boost, Qt and Valijson. There is an example here https://github.com/Neumann-A/meson-vcpkg of how to use vcpkg from + # Meson. However, it's pretty slow to get started with because it builds from source everything it installs + # (including tools it depends on such as CMake) -- even if they are already installed on your system from another + # source. This is laudably correct but I'm too impatient to do things that way. + # + # Will probably take another look at Conan in future, subject to working out how to have it use already-installed + # versions of libraries/frameworks if they are present. The recommended way to install Conan is via a Python + # package, which makes that part easy. However, there is a fair bit of other ramp-up to do, and some breaking + # changes between "current" Conan 1.X and "soon-to-be-released" Conan 2.0. So, will leave it for now and stick + # mostly to native installs for each of the 3 platforms (Linux, Windows, Mac). + # + # Other notes: + # - GNU coreutils (https://www.gnu.org/software/coreutils/manual/coreutils.html) is probably already installed on + # most Linux distros, but not necessarily on Mac and Windows. It gives us sha256sum. + # + match platform.system(): + + #----------------------------------------------------------------------------------------------------------------- + #---------------------------------------------- Linux Dependencies ----------------------------------------------- + #----------------------------------------------------------------------------------------------------------------- + case 'Linux': + # + # NOTE: For the moment at least, we are assuming you are on Ubuntu or another Debian-based Linux. For other + # flavours of the OS you need to install libraries and frameworks manually. + # + + # + # We need a recent version of Boost, ie Boost 1.79 or newer, to use Boost.JSON. For Windows and Mac this is + # fine if you are installing from MSYS2 (https://packages.msys2.org/package/mingw-w64-x86_64-boost) or + # Homebrew (https://formulae.brew.sh/formula/boost) respectively. Unfortunately, as of late-2022, many + # Linux distros provide only older versions of Boost. (Eg, on Ubuntu, you can see this by running + # 'apt-cache policy libboost-dev'.) + # + # First, check whether Boost is installed and if so, what version + # + # We'll look in the following places: + # /usr/include/boost/version.hpp <-- Distro-installed Boost + # /usr/local/include/boost/version.hpp <-- Manually-installed Boost + # ${BOOST_ROOT}/boost/version.hpp <-- If the BOOST_ROOT environment variable is set it gives an + # alternative place to look + # + # Although things should compile with 1.79.0, if we're going to all the bother of installing Boost, we'll + # install a more recent one + minBoostVersion = packaging.version.parse('1.79.0') + boostVersionToInstall = packaging.version.parse('1.80.0') # NB: This _must_ have the patch version + maxBoostVersionFound = packaging.version.parse('0') + possibleBoostVersionHeaders = [pathlib.Path('/usr/include/boost/version.hpp'), + pathlib.Path('/usr/local/include/boost/version.hpp')] + if ('BOOST_ROOT' in os.environ): + possibleBoostVersionHeaders.append(pathlib.Path(os.environ['BOOST_ROOT']).joinpath('boost/version.hpp')) + for boostVersionHeader in possibleBoostVersionHeaders: + if (boostVersionHeader.is_file()): + runResult = abortOnRunFail( + subprocess.run( + ['grep', '#define BOOST_LIB_VERSION ', boostVersionHeader.as_posix()], + encoding = "utf-8", + capture_output = True + ) + ) + log.debug('In ' + boostVersionHeader.as_posix() + ' found ' + str(runResult.stdout)) + versionFoundRaw = re.sub( + r'^.*BOOST_LIB_VERSION "([0-9_]*)".*$', r'\1', str(runResult.stdout).rstrip() + ).replace('_', '.') + versionFound = packaging.version.parse(versionFoundRaw) + if (versionFound > maxBoostVersionFound): + maxBoostVersionFound = versionFound + log.debug('Parsed as ' + str(versionFound) + '. (Highest found = ' + str(maxBoostVersionFound) + ')') + + # + # The Boost version.hpp configuration header file gives two constants for defining the version of Boost + # installed: + # + # BOOST_VERSION is a pure numeric value: + # BOOST_VERSION % 100 is the patch level + # BOOST_VERSION / 100 % 1000 is the minor version + # BOOST_VERSION / 100000 is the major version + # So, eg, for Boost 1.79.0 (= 1.079.00), BOOST_VERSION = 107900 + # + # BOOST_LIB_VERSION is a string value with underscores instead of dots (and without the patch level if that's + # 0). So, eg for Boost 1.79.0, BOOST_LIB_VERSION = "1_79" (and for 1.23.45 it would be "1_23_45") + # + # We use BOOST_LIB_VERSION as it's easy to convert it to a version number that Python can understand + # + log.debug( + 'Max version of Boost found: ' + str(maxBoostVersionFound) + '. Need >= ' + str(minBoostVersion) + + ', otherwise will try to install ' + str(boostVersionToInstall) + ) + if (maxBoostVersionFound < minBoostVersion): + log.info( + 'Installing Boost ' + str(boostVersionToInstall) + ' as newest version found was ' + + str(maxBoostVersionFound) + ) + # + # To manually install the latest version of Boost from source, first we uninstall any old version + # installed via the distro (eg, on Ubuntu, this means 'sudo apt remove libboost-all-dev'), then we follow + # the instructions at https://www.boost.org/more/getting_started/index.html. + # + # It's best to leave the default install location: headers in the 'boost' subdirectory of + # /usr/local/include and libraries in /usr/local/lib. + # + # (It might initially _seem_ a good idea to put things in the same place as the distro packages, ie + # running './bootstrap.sh --prefix=/usr' to put headers in /usr/include and libraries in /usr/lib. + # However, this will mean that Meson cannot find the manually-installed Boost, even though it can find + # distro-installed Boost in this location.) So, eg, for Boost 1.80 on Linux, this means the following + # in the shell: + # + # cd ~ + # mkdir ~/boost-tmp + # cd ~/boost-tmp + # wget https://boostorg.jfrog.io/artifactory/main/release/1.80.0/source/boost_1_80_0.tar.bz2 + # tar --bzip2 -xf boost_1_80_0.tar.bz2 + # cd boost_1_80_0 + # ./bootstrap.sh + # sudo ./b2 install + # cd ~ + # sudo rm -rf ~/boost-tmp + # + # We can handle the temporary directory stuff more elegantly (ie RAII style) in Python however + # + with tempfile.TemporaryDirectory(ignore_cleanup_errors = True) as tmpDirName: + previousWorkingDirectory = pathlib.Path.cwd().as_posix() + os.chdir(tmpDirName) + log.debug('Working directory now ' + pathlib.Path.cwd().as_posix()) + boostUnderscoreName = 'boost_' + str(boostVersionToInstall).replace('.', '_') + downloadFile( + 'https://boostorg.jfrog.io/artifactory/main/release/' + str(boostVersionToInstall) + '/source/' + + boostUnderscoreName + '.tar.bz2' + ) + log.debug('Boost download completed') + shutil.unpack_archive(boostUnderscoreName + '.tar.bz2') + log.debug('Boost archive extracted') + os.chdir(boostUnderscoreName) + log.debug('Working directory now ' + pathlib.Path.cwd().as_posix()) + abortOnRunFail(subprocess.run(['./bootstrap.sh'])) + log.debug('Boost bootstrap finished') + abortOnRunFail(subprocess.run(['sudo', './b2', 'install'])) + log.debug('Boost install finished') + os.chdir(previousWorkingDirectory) + log.debug('Working directory now ' + pathlib.Path.cwd().as_posix() + '. Removing ' + tmpDirName) + # + # The only issue with the RAII approach to removing the temporary directory is that some of the files + # inside it will be owned by root, so there will be a permissions error when Python attempts to delete + # the directory tree. Fixing the permissions beforehand is a slightly clunky way around this. + # + abortOnRunFail( + subprocess.run( + ['sudo', 'chmod', '--recursive', 'a+rw', tmpDirName] + ) + ) + + # + # Almost everything else we can rely on the distro packages. A few notes: + # - We need CMake even for the Meson build because meson uses CMake as one of its library-finding tools + # - The pandoc package helps us create man pages from markdown input + # - The build-essential and debhelper packages are for creating Debian packages + # - The rpm and rpmlint packages are for creating RPM packages + # + log.info('Ensuring other libraries and frameworks are installed') + abortOnRunFail(subprocess.run(['sudo', 'apt-get', 'update'])) + abortOnRunFail( + subprocess.run( + ['sudo', 'apt', 'install', '-y', 'build-essential', + 'cmake', + 'coreutils', + 'debhelper', + 'git', + 'libqt5multimedia5-plugins', + 'libqt5sql5-psql', + 'libqt5sql5-sqlite', + 'libqt5svg5-dev', + 'libxalan-c-dev', + 'libxerces-c-dev', + 'lintian', + 'meson', + 'ninja-build', + 'pandoc', + 'python3', + 'qtbase5-dev', + 'qtmultimedia5-dev', + 'qttools5-dev', + 'qttools5-dev-tools', + 'rpm', + 'rpmlint'] + ) + ) + + # + # Ubuntu 20.04 packages only have Meson 0.53.2, and we need 0.60.0 or later. In this case it means we have to + # install Meson via pip, which is not ideal on Linux. + # + # Specifically, as explained at https://mesonbuild.com/Getting-meson.html#installing-meson-with-pip, although + # using the pip3 install gets a newer version, we have to do the pip install as root (which is normally not + # recommended). If we don't do this, then running `meson install` (or even `sudo meson install`) will barf on + # Linux (because we need to be able to install files into system directories). + # + # So, where a sufficiently recent version of Meson is available in the distro packages (eg + # `sudo apt install meson` on Ubuntu etc) it is much better to install this. Installing via pip is a last + # resort. + # + # The distro ID we get from 'lsb_release -is' will be 'Ubuntu' for all the variants of Ubuntu (eg including + # Kubuntu). Not sure what happens on derivatives such as Linux Mint though. + # + distroName = str( + abortOnRunFail(subprocess.run(['lsb_release', '-is'], encoding = "utf-8", capture_output = True)).stdout + ).rstrip() + log.debug('Linux distro: ' + distroName) + if ('Ubuntu' == distroName): + ubuntuRelease = str( + abortOnRunFail(subprocess.run(['lsb_release', '-rs'], encoding = "utf-8", capture_output = True)).stdout + ).rstrip() + log.debug('Ubuntu release: ' + ubuntuRelease) + if (Decimal(ubuntuRelease) < Decimal('22.04')): + log.info('Installing newer version of Meson the hard way') + abortOnRunFail(subprocess.run(['sudo', 'apt', 'remove', '-y', 'meson'])) + abortOnRunFail(subprocess.run(['sudo', 'pip3', 'install', 'meson'])) + + #----------------------------------------------------------------------------------------------------------------- + #--------------------------------------------- Windows Dependencies ---------------------------------------------- + #----------------------------------------------------------------------------------------------------------------- + case 'Windows': + log.debug('Windows') + # + # First thing is to detect whether we're in the MSYS2 environment, and, if so, whether we're in the right + # version of it. + # + # We take the existence of an executable `uname` in the path as a pretty good indicator that we're in MSYS2 + # or similar environment). Then the result of running that should tell us if we're in the 32-bit version of + # MSYS2. (See comment below on why we don't yet support the 64-bit version, though I'm sure we'll fix this one + # day.) + # + exe_uname = shutil.which('uname') + if (exe_uname is None or exe_uname == ''): + log.critical('Cannot find uname. This script needs to be run under MSYS2 - see https://www.msys2.org/') + exit(1) + # We could just run uname without the -a option, but the latter gives some useful diagnostics to log + unameResult = str( + abortOnRunFail(subprocess.run([exe_uname, '-a'], encoding = "utf-8", capture_output = True)).stdout + ).rstrip() + log.debug('Running uname -a gives ' + unameResult) + # Output from `uname -a` will be of the form + # MINGW64_NT-10.0-19044 Matt-Virt-Win 3.4.3.x86_64 2023-01-11 20:20 UTC x86_64 Msys + # We just need the bit before the first underscore, eg + # MINGW64 + terminalVersion = unameResult.split(sep='_', maxsplit=1)[0] + if (terminalVersion != 'MINGW32'): + # One day we'll try to get the 64-bit build working on Windows. I think it's just the packaging step that's + # the problem. For now, it's easier to insist on 32-bit at set-up. (Obviously 32-bit apps run just fine on + # 64-bit Windows. I don't think there would be any noticeable difference to the end user in having a 64-bit + # version of the app.) + log.critical('Running in ' + terminalVersion + ' but need to run in MINGW32 (ie 32-bit build environment)') + exit(1) + + log.info('Ensuring required libraries and frameworks are installed') + # + # Before we install packages, we want to make sure the MSYS2 installation itself is up-to-date, otherwise you + # can hit problems + # + # pacman -S -y should download a fresh copy of the master package database + # pacman -S -u should upgrades all currently-installed packages that are out-of-date + # + abortOnRunFail(subprocess.run(['pacman', '-S', '-y', '--noconfirm'])) + abortOnRunFail(subprocess.run(['pacman', '-S', '-u', '--noconfirm'])) + + # + # We'd normally want to go with the 64-bit versions of things (x86_64) but AIUI it's a bit hard to handle this + # in the NSIS installer, so we the 32-bit versions (i686). + # + # We _could_ just invoke pacman once with the list of everything we want to install. However, this can make + # debugging a bit harder when there is a pacman problem, because it doesn't always give the most explanatory + # error messages. So we loop round and install one thing at a time. + # + arch = 'i686' + installList = ['base-devel', + 'cmake', + 'coreutils', + 'doxygen', + 'gcc', + 'git', + 'mingw-w64-' + arch + '-boost', + 'mingw-w64-' + arch + '-cmake', + 'mingw-w64-' + arch + '-libbacktrace', + 'mingw-w64-' + arch + '-meson', + 'mingw-w64-' + arch + '-nsis', + 'mingw-w64-' + arch + '-qt5', + 'mingw-w64-' + arch + '-toolchain', + 'mingw-w64-' + arch + '-xalan-c', + 'mingw-w64-' + arch + '-xerces-c'] + for packageToInstall in installList: + log.debug('Installing ' + packageToInstall) + abortOnRunFail( + subprocess.run( + ['pacman', '-S', '--needed', '--noconfirm', packageToInstall] + ) + ) + + # + # Download NSIS plugins + # + # In theory we can use RAII here, eg: + # + # with tempfile.TemporaryDirectory(ignore_cleanup_errors = True) as tmpDirName: + # previousWorkingDirectory = pathlib.Path.cwd().as_posix() + # os.chdir(tmpDirName) + # ... + # os.chdir(previousWorkingDirectory) + # + # However, in practice, this gets messy when there is an error (eg download fails) because Windows doesn't like + # deleting files or directories that are in use. So, in the event of the script needing to terminate early, + # you get loads of errors, up to and including "maximum recursion depth exceeded" which rather mask whatever + # the original problem was. + # + tmpDirName = tempfile.mkdtemp() + previousWorkingDirectory = pathlib.Path.cwd().as_posix() + os.chdir(tmpDirName) + downloadFile('https://nsis.sourceforge.io/mediawiki/images/a/af/Locate.zip') + shutil.unpack_archive('Locate.zip', 'Locate') + downloadFile('https://nsis.sourceforge.io/mediawiki/images/7/76/Nsislog.zip') + shutil.unpack_archive('Nsislog.zip', 'Nsislog') + copyFilesToDir(['Locate/Include/Locate.nsh'], '/mingw32/share/nsis/Include/') + copyFilesToDir(['Locate/Plugin/locate.dll', + 'Nsislog/plugin/nsislog.dll'],'/mingw32/share/nsis/Plugins/ansi/') + os.chdir(previousWorkingDirectory) + shutil.rmtree(tmpDirName, ignore_errors=False) + + #----------------------------------------------------------------------------------------------------------------- + #---------------------------------------------- Mac OS Dependencies ---------------------------------------------- + #----------------------------------------------------------------------------------------------------------------- + case 'Darwin': + log.debug('Mac') + # + # We could make this list shorter if we wanted as, eg, installing Xalan-C will cause Xerces-C to be installed + # too (as the former depends on the latter). However, I think it's clearer to explicitly list all the direct + # dependencies (eg we do make calls directly into Xerces). + # + # For the moment, we install Qt 5 (= 5.15.7), as there are code changes required to use Qt 6 + # + # .:TBD:. Installing Boost here doesn't seem to give us libboost_stacktrace_backtrace + # Also, trying to use the "--cc=clang" option to install boost gives an error ("Error: boost: no bottle + # available!") For the moment, we're just using Boost header files on Mac though, so this should be + # OK. + # + installList = ['boost', + 'cmake', + 'coreutils', + 'doxygen', + 'gcc', + 'git', + 'llvm', + 'meson', + 'ninja', + 'qt@5', + 'xalan-c', + 'xerces-c'] + for packageToInstall in installList: + log.debug('Installing ' + packageToInstall) + abortOnRunFail(subprocess.run(['brew', 'install', packageToInstall])) + # + # By default, even once Qt5 is installed, Meson will not find it + # + # See https://stackoverflow.com/questions/29431882/get-qt5-up-and-running-on-a-new-mac for suggestion to do + # the following to "symlink the various Qt5 binaries and libraries into your /usr/local/bin and /usr/local/lib + # directories". + # + # Additionally, per lengthy discussion at https://github.com/Homebrew/legacy-homebrew/issues/29938, it seems + # we might also need either: + # ln -s /usr/local/Cellar/qt5/5.15.7/mkspecs /usr/local/mkspecs + # ln -s /usr/local/Cellar/qt5/5.15.7/plugins /usr/local/plugins + # or: + # export PATH=/usr/local/opt/qt5/bin:$PATH + # The former gives permission errors, so we do the latter in mac.yml + # + abortOnRunFail(subprocess.run(['brew', 'link', '--force', 'qt5'])) + + # + # dmgbuild is a Python package that provides a command line tool to create macOS disk images (aka .dmg + # files) -- see https://dmgbuild.readthedocs.io/en/latest/ + # + # Note that we install with the [badge_icons] extra so we can use the badge_icon setting (see + # https://dmgbuild.readthedocs.io/en/latest/settings.html#badge_icon) + # + abortOnRunFail(subprocess.run(['pip3', 'install', 'dmgbuild[badge_icons]'])) + + case _: + log.critical('Unrecognised platform: ' + platform.system()) + exit(1) + + #-------------------------------------------------------------------------------------------------------------------- + #------------------------------------------- Cross-platform Dependencies -------------------------------------------- + #-------------------------------------------------------------------------------------------------------------------- + # + # We use libbacktrace from https://github.com/ianlancetaylor/libbacktrace. It's not available as a Debian package + # and not any more included with GCC by default. It's not a large library so, unless and until we start using Conan, + # the easiest approach seems to be to bring it in as a Git submodule and compile from source. + # + ensureSubmodulesPresent() + log.info('Checking libbacktrace is built') + previousWorkingDirectory = pathlib.Path.cwd().as_posix() + backtraceDir = dir_gitSubmodules.joinpath('libbacktrace') + os.chdir(backtraceDir) + log.debug('Run configure and make in ' + backtraceDir.as_posix()) + # + # We only want to configure and compile libbacktrace once, so we do it here rather than in Meson.build + # + # Libbacktrace uses autoconf/automake so it's relatively simple to build, but for a couple of gotchas + # + # Note that, although on Linux you can just invoke `./configure`, this doesn't work in the MSYS2 environment, so, + # knowing that 'configure' is a shell script, we invoke it as such. However, we must be careful to run it with the + # correct shell, specifically `sh` (aka dash on Linux) rather than `bash`. Otherwise, the Makefile it generates will + # not work properly, and we'll end up building a library with missing symbols that gives link errors on our own + # executables. + # + # (I haven't delved deeply into this but, confusingly, if you run `sh ./configure` it puts 'SHELL = /bin/bash' in the + # Makefile, whereas, if you run `bash ./configure`, it puts the line 'SHELL = /bin/sh' in the Makefile.) + # + abortOnRunFail(subprocess.run(['sh', './configure'])) + abortOnRunFail(subprocess.run(['make'])) + os.chdir(previousWorkingDirectory) + + log.info('*** Finished checking / installing dependencies ***') + return + +#----------------------------------------------------------------------------------------------------------------------- +# ./bt setup +#----------------------------------------------------------------------------------------------------------------------- +def doSetup(setupOption): + if (setupOption == 'all'): + installDependencies() + + findMesonAndGit() + + # If this is a git checkout then let's set up git with the project standards + if (dir_gitInfo.is_dir()): + log.info('Setting up ' + capitalisedProjectName + ' git preferences') + # Enforce indentation with spaces, not tabs. + abortOnRunFail( + subprocess.run( + [exe_git, + "config", + "--file", dir_gitInfo.joinpath('config').as_posix(), + "core.whitespace", + "tabwidth=3,tab-in-indent"], + capture_output=False + ) + ) + + # Enable the standard pre-commit hook that warns you about whitespace errors + shutil.copy2(dir_gitInfo.joinpath('hooks/pre-commit.sample'), + dir_gitInfo.joinpath('hooks/pre-commit')) + + ensureSubmodulesPresent() + + # Check whether Meson build directory is already set up. (Although nothing bad happens, if you run setup twice, + # it complains and tells you to run configure.) + # Best clue that set-up has been run (rather than, say, user just created empty mbuild directory by hand) is the + # presence of meson-info/meson-info.json (which is created by setup for IDE integration -- see + # https://mesonbuild.com/IDE-integration.html#ide-integration) + runMesonSetup = True + warnAboutCurrentDirectory = False + if (dir_build.joinpath('meson-info/meson-info.json').is_file()): + log.info('Meson build directory ' + dir_build.as_posix() + ' appears to be already set up') + # + # You usually only need to reset things after you've done certain edits to defaults etc in meson.build. There + # are a whole bunch of things you can control with the 'meson configure' command, but it's simplest in some ways + # just to reset the build directory and rely on meson setup picking up defaults from meson.build. + # + # Note that we don't have to worry about this prompt appearing in a GitHub action, because we are always creating + # the mbuild directory for the first time when this script is run in such actions -- ie we should never reach this + # part of the code. + # + response = "" + while (response != 'y' and response != 'n'): + response = input('Do you want to completely reset the build directory? [y or n] ').lower() + if (response == 'n'): + runMesonSetup = False + else: + # It's very possible that the user's current working directory is mbuild. If so, we need to warn them and move + # up a directory (as 'meson setup' gets upset if current working directory does not exist). + log.info('Removing existing Meson build directory ' + dir_build.as_posix()) + if (pathlib.Path.cwd().as_posix() == dir_build.as_posix()): + # We write a warning out here for completeness, but we really need to show it further down as it will have + # scrolled off the top of the terminal with all the output from 'meson setup' + log.warning('You are currently in the directory we are about to delete. ' + + 'You will need to change directory!') + warnAboutCurrentDirectory = True + os.chdir(dir_base) + shutil.rmtree(dir_build) + + if (runMesonSetup): + log.info('Setting up ' + dir_build.as_posix() + ' meson build directory') + # See https://mesonbuild.com/Commands.html#setup for all the optional parameters to meson setup + # Note that meson setup will create the build directory (along with various subdirectories) + abortOnRunFail(subprocess.run([exe_meson, "setup", dir_build.as_posix(), dir_base.as_posix()], + capture_output=False)) + + log.info('Finished setting up Meson build. Note that the warnings above about path separator and optimization ' + + 'level are expected!') + + if (warnAboutCurrentDirectory): + print("❗❗❗ Your current directory has been deleted! You need to run 'cd ../mbuild' ❗❗❗") + log.debug('Setup done') + print() + print('You can now build, test, install and run ' + capitalisedProjectName + ' with the following commands:') + print(' cd ' + os.path.relpath(dir_build)) + print(' meson compile') + print(' meson test') + if (platform.system() == 'Linux'): + print(' sudo meson install') + else: + print(' meson install') + print(' ' + projectName) + + + return + +#----------------------------------------------------------------------------------------------------------------------- +# ./bt package +#----------------------------------------------------------------------------------------------------------------------- +def doPackage(): + # + # Meson does not offer a huge amount of help on creating installable packages. It has no equivalent to CMake's CPack + # and there is generally not a lot of info out there about how to do packaging in Meson. In fact, it seems unlikely + # that packaging will ever come within it scope. (Movement is rather in the other direction - eg there _used_ to be + # a Meson module for creating RPMs, but it was discontinued per + # https://mesonbuild.com/Release-notes-for-0-62-0.html#removal-of-the-rpm-module because it was broken and badly + # designed etc.) + # + # At first, this seemed disappointing, but I've rather come around to thinking a different way about it. Although + # CPack has lots of features it is also very painful to use. Some of the things you can do are undocumented; some of + # the things you want to be able to do seem nigh on impossible. So perhaps taking a completely different approach, + # eg using a scripting language rather than a build tool to do packaging, is ultimately a good thing. + # + # I spent some time looking at and trying to use the Qt-Installer-Framework (QtIFW). Upsides are: + # - In principle we could write one set of install config that would then create install packages for Windows, Mac + # and Linux. + # - It should already know how to package Qt libraries(!) + # - It's the same licence as the rest of Qt. + # - We could use it in GitHub actions (courtesy of https://github.com/jurplel/install-qt-action). + # - It can handle in-place upgrades (including the check for whether an upgraded version is available), per + # https://doc.qt.io/qtinstallerframework/ifw-updates.html. + # Downsides are: + # - Outside of packaging Qt itself, I'm not sure that it's hugely widely used. It can be hard to find "how tos" or + # other assistance. + # - It's not a great advert for itself -- eg when I installed it locally on Kubuntu by downloading directly from + # https://download.qt.io/official_releases/qt-installer-framework/, it didn't put its own tools in the PATH, + # so I had to manually add ~/Qt/QtIFW-4.5.0/bin/ to my PATH. + # - It usually necessary to link against a static build of Qt, which is a bit of a pain as you have to download the + # source files for Qt and compile it locally -- see eg + # https://stackoverflow.com/questions/14932315/how-to-compile-qt-5-under-windows-or-linux-32-or-64-bit-static-or-dynamic-on-v + # for the whole process. + # - It's a change of installation method for people who have previously downloaded deb packages, RPMs, Mac DMG + # files, etc. + # - It puts things in different places than 'native' installers. Eg, on Linux, everything gets installed to + # subdirectories of the user's home directory rather than the "usual" system directories). Amongst other things, + # this makes it harder for distros etc that want to ship our software as "standard" packages. + # + # The alternative approach, which I resisted for a fair while, but have ultimately become persuaded is right, is to + # do Windows, Mac and Linux packaging separately: + # - For Mac, there is some info at https://mesonbuild.com/Creating-OSX-packages.html on creating app bundles + # - For Linux, there is some mention in the Meson manual of building deb and rpm packages eg + # https://mesonbuild.com/Installing.html#destdir-support, but I think you have to do most of the work yourself. + # https://blog.devgenius.io/how-to-build-debian-packages-from-meson-ninja-d1c28b60e709 gives some sketchy + # starting info on how to build deb packages. Maybe we could find the equivalent for creating RPMs. Also look + # at https://openbuildservice.org/. + # - For Windows, we could probably use NSIS (Nullsoft Scriptable Install System -- see + # https://nsis.sourceforge.io/) -- or similar to create a Windows installer. + # + # Although a lot of packaging is platform-specific, the initial set-up is generic. + # + # 1. This script (as invoked directly) creates some packaging sub-directories of the build directory and then + # invokes Meson + # 2. Meson installs all the binaries, data files and so on that we need to ship into the packaging directory tree + # 3. Meson also exports a bunch of build information into a TOML file that we read in. This saves us duplicating + # too many meseon.build settings in this file. + # + + findMesonAndGit() + + # + # The top-level directory structure we create inside the build directory (mbuild) for packaging is: + # + # packages/ Holds the subdirectories below, plus the source tarball and its checksum + # │ + # ├── windows/ For Windows + # │ + # ├── darwin/ For Mac + # │ + # ├── linux/ For Linux + # │ + # └── source/ For source code tarball + # + # NB: If we wanted to change this, we would need to make corresponding changes in meson.build + # + + # Step 1 : Create a top-level package directory structure + # We'll make the relevant top-level directory and ensure it starts out empty + # (We don't have to make dir_packages as it will automatically get created by os.makedirs when we ask it to + # create dir_packages_platform.) + if (dir_packages_platform.is_dir()): + log.info('Removing existing ' + dir_packages_platform.as_posix() + ' directory tree') + shutil.rmtree(dir_packages_platform) + log.info('Creating directory ' + dir_packages_platform.as_posix()) + os.makedirs(dir_packages_platform) + + # We change into the build directory. This doesn't affect the caller (of this script) because we're a separate + # sub-process from the (typically) shell that invoked us and we cannot change the parent process's working + # directory. + os.chdir(dir_build) + log.debug('Working directory now ' + pathlib.Path.cwd().as_posix()) + + # + # Meson can't do binary packaging, but it can create a source tarball for us via `meson dist`. We use the following + # options: + # --no-tests = stops Meson doing a full build and test, on the assumption that we've already done this by the + # time we come to packaging + # --allow-dirty = allow uncommitted changes, which is needed more often than you think (eg for the automatic + # builds done when a commit is pushed up to github). HOWEVER, note this option is only available + # in Meson 0.62 and later. + # + # Of course, we could create a compressed tarball directly in this script, but the advantage of having Meson do it is + # that it will (I believe) include only source & data files actually in the git repository in meson.build, so you + # won't pick up other things that happen to be hanging around in the source etc directory trees. + # + log.info('Creating source tarball') + if (mesonVersion >= packaging.version.parse('0.62.0')): + abortOnRunFail( + subprocess.run([exe_meson, 'dist', '--no-tests', '--allow-dirty'], capture_output=False) + ) + else: + abortOnRunFail( + subprocess.run([exe_meson, 'dist', '--no-tests'], capture_output=False) + ) + + # + # The source tarball and its checksum end up in the meson-dist subdirectory of mbuild, so we just move them into the + # packages/source directory (first making sure the latter exists and is empty!). + # + # We are only talking about 2 files, so some of this is overkill, but it's easier to be consistent with what we do + # for the other subdirectories of mbuild/packages + # + if (dir_packages_source.is_dir()): + log.info('Removing existing ' + dir_packages_source.as_posix() + ' directory tree') + shutil.rmtree(dir_packages_source) + log.info('Creating directory ' + dir_packages_source.as_posix()) + os.makedirs(dir_packages_source) + meson_dist_dir = dir_build.joinpath('meson-dist') + for fileName in os.listdir(meson_dist_dir.as_posix()): + log.debug('Moving ' + fileName + ' from ' + meson_dist_dir.as_posix() + ' to ' + dir_packages_source.as_posix()) + # shutil.move will error rather than overwrite an existing file, so we handle that case manually (although in + # theory it should never arise) + targetFile = dir_packages_source.joinpath(fileName) + if os.path.exists(targetFile): + log.debug('Removing old ' + targetFile) + os.remove(targetFile) + shutil.move(meson_dist_dir.joinpath(fileName), dir_packages_source) + + # + # Running 'meson install' with the --destdir option will put all the installable files (program executable, + # translation files, data files, etc) in subdirectories of the platform-specific packaging directory. However, it + # will not bundle up any shared libraries that we need to ship with the application on Windows and Mac. We handle + # this in the platform-specific code below. + # + log.info('Running meson install with --destdir option') + # See https://mesonbuild.com/Commands.html#install for the optional parameters to meson install + abortOnRunFail(subprocess.run([exe_meson, 'install', '--destdir', dir_packages_platform.as_posix()], + capture_output=False)) + + # + # At the direction of meson.build, Meson should have generated a config.toml file in the build directory that we can + # read in to get useful settings exported from the build system. + # + global buildConfig + with open(dir_build.joinpath('config.toml').as_posix()) as buildConfigFile: + buildConfig = tomlkit.parse(buildConfigFile.read()) + log.debug('Shared libraries: ' + ', '.join(buildConfig["CONFIG_SHARED_LIBRARY_PATHS"])) + + # + # Note however that there are some things that are (often intentionally) difficult or impossible to import to or + # export from Meson. (See + # https://mesonbuild.com/FAQ.html#why-is-meson-not-just-a-python-module-so-i-could-code-my-build-setup-in-python for + # why it an explicitly design goal not to have the Meson configuration language be Turing-complete.) + # + # We deal with some of these in platform-specific code below + # + + # + # If meson install worked, we can now do the actual packaging. + # + + + match platform.system(): + + #----------------------------------------------------------------------------------------------------------------- + #------------------------------------------------ Linux Packaging ------------------------------------------------ + #----------------------------------------------------------------------------------------------------------------- + case 'Linux': + # + # There are, of course, multiple package managers in the Linux world. We cater for two of the main ones, + # Debian and RPM. + # + # Note, per https://en.wikipedia.org/wiki/X86-64, that x86_64 and amd64 are the same thing; the latter is just + # a rebranding of the former by AMD. Debian packages use 'amd64' in the filename, while RPM ones use 'x86_64', + # but it's the same code being packaged and pretty much the same directory structure being installed into. + # + # Some of the processing we need to do is the same for Debian and RPM, so do that first before we copy things + # into separate trees for actually building the packages + # + log.debug('Linux Packaging') + + # + # First, note that Meson is geared up for building and installing locally. (It doesn't really know about + # packaging.) This means it installs locally to /usr/local/bin, /usr/local/share, etc. This is "correct" for + # locally-built software but not for packaged software, which needs to go in /usr/bin, /usr/share, etc. So, + # inside the mbuild/packages directory tree, we just need to move everything out of linux/usr/local up one + # level into linux/usr and then remove the empty linux/usr/local directory + # + log.debug('Moving usr/local files to usr inside ' + dir_packages_platform.as_posix()) + targetDir = dir_packages_platform.joinpath('usr') + sourceDir = targetDir.joinpath('local') + for fileName in os.listdir(sourceDir.as_posix()): + shutil.move(sourceDir.joinpath(fileName), targetDir) + os.rmdir(sourceDir.as_posix()) + + # + # Debian and RPM both want the debugging information stripped from the executable. + # + # .:TBD:. One day perhaps we could be friendlyi and still ship the debugging info, just in a separate .dbg + # file. The procedure to do this is described in the 'only-keep-debug' section of `man objcopy`. However, we + # need to work out where to put the .dbg file so that it remains usable but lintian does not complain about it. + # + dir_packages_bin = dir_packages_platform.joinpath('usr').joinpath('bin') + log.debug('Stripping debug symbols') + abortOnRunFail( + subprocess.run( + ['strip', + '--strip-unneeded', + '--remove-section=.comment', + '--remove-section=.note binaries', + dir_packages_bin.joinpath(projectName)], + capture_output=False + ) + ) + + #-------------------------------------------------------------------------------------------------------------- + #-------------------------------------------- Debian .deb Package --------------------------------------------- + #-------------------------------------------------------------------------------------------------------------- + # + # There are some relatively helpful notes on building debian packages at: + # https://unix.stackexchange.com/questions/30303/how-to-create-a-deb-file-manually + # https://www.internalpointers.com/post/build-binary-deb-package-practical-guide + # + # We skip a lot of things because we are not trying to ship a Debian source package, just a binary one. + # (Debian wants source packages to be built with an old-fashioned makefile, which seems a bit too painful to + # me. Since there are other very easy routes for people to get the source code, I'm not rushing to jump + # through a lot of hoops to package it up in a .deb file.) + # + # Skipping the source package means we don't (and indeed can't) use all the tools that come with dh-make and it + # means we need to do a tiny bit more manual work in creating some parts of the install tree. But, overall, + # the process itself is simple once you've worked out what you need to do (which was slightly more painful than + # you might have hoped). + # + # To create a deb package, we create the following directory structure, where items marked ✅ are copied as is + # from the tree generated by meson install with --destdir option, and those marked ❇ are ones we need to + # relocate, generate or modify. + # + # (When working on this bit, use ❌ for things that are generated automatically but not actually needed, and ✴ + # for things we still need to add. Not currently not aware of any of either.) + # debbuild + # └── [projectName]-[versionNumber]-1_amd64 + # ├── DEBIAN + # │ └── control ❇ # Contains info about dependencies, maintainer, etc + # │ + # └── usr + # ├── bin + # │ └── [projectName] ✅ <── the executable + # └── share + # ├── applications + # │ └── [projectName].desktop ✅ <── [filesToInstall_desktop] + # ├── [projectName] + # │ ├── DefaultData.xml ✅ <──┬── [filesToInstall_data] + # │ ├── default_db.sqlite ✅ <──┘ + # │ ├── sounds + # │ │ └── [All the filesToInstall_sounds .wav files] ✅ + # │ └── translations_qm + # │ └── [All the .qm files generated by qt.compile_translations] ✅ + # ├── doc + # │ └── [projectName] + # │ ├── changelog.Debian.gz ✅ + # │ ├── copyright ✅ + # │ ├── README.md (or README.markdown) ✅ + # │ └── RelaseNotes.markdown ✅ + # ├── icons + # │ └── hicolor + # │ └── scalable + # │ └── apps + # │ └── [projectName].svg ✅ <── [filesToInstall_icons] + # └── man + # └── man1 + # └── [projectName].1.gz ❇ <── English version of man page (compressed) + # + + # Make the top-level directory for the deb package and the DEBIAN subdirectory for the package control files + # etc + log.debug('Creating debian package top-level directories') + debPackageDirName = projectName + '-' + buildConfig['CONFIG_VERSION_STRING'] + '-1_amd64' + dir_packages_deb = dir_packages_platform.joinpath('debbuild').joinpath(debPackageDirName) + dir_packages_deb_control = dir_packages_deb.joinpath('DEBIAN') + os.makedirs(dir_packages_deb_control) # This will also automatically create parent directories + dir_packages_deb_doc = dir_packages_deb.joinpath('usr/share/doc').joinpath(projectName) + + # Copy the linux/usr tree inside the top-level directory for the deb package + log.debug('Copying deb package contents') + shutil.copytree(dir_packages_platform.joinpath('usr'), dir_packages_deb.joinpath('usr')) + + # + # Copy the Debian Binary package control file to where it belongs + # + log.debug('Copying deb package control file') + copyWithoutCommentsOrFolds(dir_build.joinpath('control').as_posix(), + dir_packages_deb_control.joinpath('control').as_posix()) + + + # + # Generate compressed changelog for Debian package from markdown + # + # Each Debian package (which provides a /usr/share/doc/pkg directory) must install a Debian changelog file in + # /usr/share/doc/pkg/changelog.Debian.gz + # + # This is done by a shell script because we already wrote that + # + log.debug('Generating compressed changelog') + os.environ['CONFIG_APPLICATION_NAME_LC' ] = buildConfig['CONFIG_APPLICATION_NAME_LC' ] + os.environ['CONFIG_CHANGE_LOG_UNCOMPRESSED'] = buildConfig['CONFIG_CHANGE_LOG_UNCOMPRESSED'] + os.environ['CONFIG_CHANGE_LOG_COMPRESSED' ] = dir_packages_deb_doc.joinpath('changelog.Debian.gz').as_posix() + os.environ['CONFIG_PACKAGE_MAINTAINER' ] = buildConfig['CONFIG_PACKAGE_MAINTAINER' ] + abortOnRunFail( + subprocess.run([dir_base.joinpath('packaging').joinpath('generateCompressedChangeLog.sh')], + capture_output=False) + ) + # Shell script gives wrong permissions on output (which lintian would complain about), so fix them here (from + # rw-rw-r-- to rw-r--r--). + os.chmod(dir_packages_deb_doc.joinpath('changelog.Debian.gz'), + stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH) + + # + # Debian packages want man pages to be compressed with gzip with the highest compression available (-9n). + # + # TBD: We'll need to expand this slightly when we support man pages in multiple languages. + # + # We _could_ do this all in Python with the gzip module, but it's somewhat less coding just to invoke the gzip + # program directly + # + dir_packages_deb_man = dir_packages_deb.joinpath('usr').joinpath('share').joinpath('man') + dir_packages_deb_man1 = dir_packages_deb_man.joinpath('man1') + log.debug('Compressing man page') + abortOnRunFail( + subprocess.run(['gzip', '-9n', dir_packages_deb_man1.joinpath(projectName + '.1')], capture_output=False) + ) + + # + # Now we actually generate the package + # + # Generates the package with the same name as the package directory plus '.deb' on the end + log.info('Generating deb package') + previousWorkingDirectory = pathlib.Path.cwd().as_posix() + os.chdir(dir_packages_platform.joinpath('debbuild')) + abortOnRunFail( + subprocess.run(['dpkg-deb', '--build', '--root-owner-group', debPackageDirName], capture_output=False) + ) + + # The debian package name is (I think) derived from the name of the directory we supplied as build parameter + debPackageName = debPackageDirName + '.deb' + + # Running lintian does a very strict check on the Debian package. You can find a list of all the error and + # warning codes at https://lintian.debian.org/tags. + # + # Some of the warnings are things that only matter for packages that actually ship with Debian itself - ie they + # won't stop the package working but are not strictly within the standards that the Debian project sets for the + # packages included in the distro. + # + # Still, we try to fix as many warnings as possible. As at 2022-08-11 we currently have one warning that we do + # not ship a man page. We should get to this at some point. + log.info('Running lintian to check the created deb package for errors and warnings') + abortOnRunFail( + subprocess.run(['lintian', '--no-tag-display-limit', debPackageName], capture_output=False) + ) + + # Move the .deb file to the top-level directory + shutil.move(debPackageName, dir_packages_platform) + + # We don't particularly need to change back to the previous working directory, but it's tidy to do so. + os.chdir(previousWorkingDirectory) + + # + # Make the checksum file + # + log.info('Generating checksum file for ' + debPackageName) + writeSha256sum(dir_packages_platform, debPackageName) + + #-------------------------------------------------------------------------------------------------------------- + #---------------------------------------------- RPM .rpm Package ---------------------------------------------- + #-------------------------------------------------------------------------------------------------------------- + # This script is written assuming you are on a Debian-based Linux. + # + # In theory we can use `alien` to convert a .deb to a .rpm, but I worry that this would not handle dependencies + # very well. So we prefer to build a bit more manually. + # + # To create a rpm package, we create the following directory structure, where items marked ✅ are copied as is + # from the tree generated by meson install with --destdir option, and those marked ❇ are ones we either + # generate or modify. + # + # (When working on this bit, use ❌ for things that are generated automatically but not actually needed, and ✴ + # for things we still need to add. Not currently not aware of any of either.) + # rpmbuild + # ├── SPECS + # │ └── rpm.spec ❇ + # └── BUILDROOT + # └── usr + # ├── bin + # │ └── [projectName] ✅ <── the executable + # ├── lib + # │ └── .build-id + # └── share + # ├── applications + # │ └── [projectName].desktop ✅ <── [filesToInstall_desktop] + # ├── [projectName] + # │ ├── DefaultData.xml ✅ <──┬── [filesToInstall_data] + # │ ├── default_db.sqlite ✅ <──┘ + # │ ├── sounds + # │ │ └── [All the filesToInstall_sounds .wav files] ✅ + # │ └── translations_qm + # │ └── [All the .qm files generated by qt.compile_translations] ✅ + # ├── doc + # │ └── [projectName] + # │ ├── copyright ✅ + # │ ├── README.md (or README.markdown) ✅ + # │ └── RelaseNotes.markdown ✅ + # ├── icons + # │ └── hicolor + # │ └── scalable + # │ └── apps + # │ └── [projectName].svg ✅ <── [filesToInstall_icons] + # └── man + # └── man1 + # └── [projectName].1.bz2 ❇ <── English version of man page (compressed) + # + # + + # Make the top-level directory for the rpm package and the SPECS subdirectory etc + log.debug('Creating rpm package top-level directories') + rpmPackageDirName = 'rpmbuild' + dir_packages_rpm = dir_packages_platform.joinpath(rpmPackageDirName) + dir_packages_rpm_specs = dir_packages_rpm.joinpath('SPECS') + os.makedirs(dir_packages_rpm_specs) # This will also automatically create dir_packages_rpm + dir_packages_rpm_buildroot = dir_packages_rpm.joinpath('BUILDROOT') + os.makedirs(dir_packages_rpm_buildroot) + + # Copy the linux/usr tree inside the top-level directory for the rpm package + log.debug('Copying rpm package contents') + shutil.copytree(dir_packages_platform.joinpath('usr'), dir_packages_rpm_buildroot.joinpath('usr')) + + # Copy the RPM spec file, doing the same unfolding etc as for the Debian control file above + log.debug('Copying rpm spec file') + copyWithoutCommentsOrFolds(dir_build.joinpath('rpm.spec').as_posix(), + dir_packages_rpm_specs.joinpath('rpm.spec').as_posix()) + + # + # In Debian packaging, the change log is a separate file. However, for RPM packaging, the change log needs to + # be, included in the spec file. The simplest way to do that is for us to append it to the file we've just + # copied. (NB: This relies on the last line of that file being `%changelog` -- ie the macro that introduces + # the change log.) + # + # Since we store our change log internally in markdown, we also convert it to the RPM format at the same time + # as appending it. (This is different from the Debian changelog format, so we can't just reuse what we've done + # above.) Per https://docs.fedoraproject.org/en-US/packaging-guidelines/#changelogs, the format we need is: + # %changelog + # * Wed Jun 14 2003 Joe Packager - 1.0-2 + # - Added README file (#42). + # (Note that we don't have to write '%changelog' as it's already in the spec file.) + # The format we have is: + # ## v3.0.2 + # Minor bug fixes for the 3.0.1 release (ie bugs in 3.0.1 are fixed in this 3.0.2 release). + # + # ### New Features + # + # * None + # + # ### Bug Fixes + # * LGPL-2.1-only and LGPL-3.0-only license text not shipped [#664](https://github.com/Brewtarget/brewtarget/issues/664) + # * Release 3.0.1 is uninstallable on Ubuntu 22.04.1 [#665](https://github.com/Brewtarget/brewtarget/issues/665) + # * Turkish Language selection in settings not working [#670])https://github.com/Brewtarget/brewtarget/issues/670) + # + # ### Release Timestamp + # Wed, 26 Oct 2022 10:10:10 +0100 + # + # ## v3.0.1 + # etc + # + with open(os.environ['CONFIG_CHANGE_LOG_UNCOMPRESSED'], 'r') as markdownChangeLog, open(dir_packages_rpm_specs.joinpath('rpm.spec'), 'a') as specFile: + inIntro = True + releaseDate = '' + versionNumber = '' + changes = [] + for line in markdownChangeLog: + if (inIntro): + # Skip over the introductory headings and paragraphs of CHANGES.markdown until we get to the first + # version line, which begins with '## v'. + if (not line.startswith('## v')): + # Skip straight to processing the next line + continue + # We've reached the end of the introductory stuff, so the current line is the first one that we + # process "as normal" below. + inIntro = False + # If this is a version line, it's the start of a change block (and the end of the previous one if there + # was one). Grab the version number (and write out the previous block if there was one). Note that we + # have to add the '-1' "package release" number on the end of the version number (but before the + # newline!), otherwise rpmlint will complain about "incoherent-version-in-changelog". + if (line.startswith('## v')): + versionNumber = line.removeprefix('## v').replace('\n', '-1\n') + if (len(changes) > 0): + specFile.write('* ' + releaseDate + ' ' + buildConfig['CONFIG_PACKAGE_MAINTAINER'] + ' - ' + + versionNumber) + for change in changes: + specFile.write('- ' + change) + changes = [] + continue + # If this is a line starting with '* ' then it's either a new feature or a bug fix. RPM doesn't + # distinguish, so we just add it to the list, stripping the '* ' off the front. + if (line.startswith('* ')): + changes.append(line.removeprefix('* ')) + continue + # If this line is '### Release Timestamp' then we want to grab the next line as the release timestamp + if (line.startswith('### Release Timestamp')): + # + # We need to: + # - take the comma out after the day of the week + # - change date format from "day month year" to "month day year" + # - strip the time off the end of the line + # - strip the newline off the end of the line + # We can do all of it all in one regexp with relatively little pain(!). Note the use of raw string + # notation (r prefix on string literal) to avoid the backslash plague (see + # https://docs.python.org/3/howto/regex.html#the-backslash-plague). + # + line = next(markdownChangeLog) + releaseDate = re.compile(r', (\d\d) ([A-Z][a-z][a-z]) (\d\d\d\d).*\n$').sub(r' \2 \1 \3', line) + continue + # Once we got to the end of the input, we need to write the last change block + if (len(changes) > 0): + specFile.write('* ' + releaseDate + ' ' + buildConfig['CONFIG_PACKAGE_MAINTAINER'] + ' - ' + + versionNumber) + for change in changes: + specFile.write('- ' + change) + + # + # RPM packages want man pages to be compressed with bzip2. Other than that, the same comments above for + # compressing man pages for deb packages apply here. + # + dir_packages_rpm_man = dir_packages_rpm_buildroot.joinpath('usr').joinpath('share').joinpath('man') + dir_packages_rpm_man1 = dir_packages_rpm_man.joinpath('man1') + log.debug('Compressing man page') + abortOnRunFail( + subprocess.run( + ['bzip2', '--compress', dir_packages_rpm_man1.joinpath(projectName + '.1')], + capture_output=False + ) + ) + + # + # Run rpmbuild to build the package + # + # Again, as with the .deb packaging, we are just trying to build a binary package and not use all the built-in + # magical makefiles of the full RPM build system. + # + # Note, per comments at + # https://unix.stackexchange.com/questions/553169/rpmbuild-isnt-using-the-current-working-directory-instead-using-users-home + # that you have to set the _topdir macro to stop rpmbuild wanting to put all its output under the current + # user's home directory. Also, we do not put quotes around this define because the subprocess module will do + # this already (I think) because it works out there's a space in the string. (If we do put quotes, we get an + # error "Macro % has illegal name".) + # + log.info('Generating rpm package') + abortOnRunFail( + subprocess.run( + ['rpmbuild', + '--define=_topdir ' + dir_packages_rpm.as_posix(), + '--noclean', # Do not remove the build tree after the packages are made + '--buildroot', + dir_packages_rpm_buildroot.as_posix(), + '--bb', + dir_packages_rpm_specs.joinpath('rpm.spec').as_posix()], + capture_output=False + ) + ) + + # rpmbuild will have put its output in RPMS/x86_64/[projectName]-[versionNumber]-1.x86_64.rpm + dir_packages_rpm_output = dir_packages_rpm.joinpath('RPMS').joinpath('x86_64') + rpmPackageName = projectName + '-' + buildConfig['CONFIG_VERSION_STRING'] + '-1.x86_64.rpm' + + # + # Running rpmlint is the lintian equivalent exercise for RPMs. Many, but by no means all, of the error and + # warning codes are listed at https://fedoraproject.org/wiki/Common_Rpmlint_issues, though there are some + # mistakes on that page (eg suggestion for dealing with unstripped-binary-or-object warning is "Make sure + # binaries are executable"!) + # + # See packaging/linux/rpmLintfilters.toml for suppression of various rpmlint warnings (with explanations of + # why). + # + # We don't however run rpmlint on old versions of Ubuntu (ie 20.04 or earlier) because they are still on + # version 1.X of the tool and there were a lot of big changes in the 2.0 release in May 2021, including in the + # call syntax -- see https://github.com/rpm-software-management/rpmlint/releases/tag/2.0.0 for details. + # (Interestingly, as of that 2.0 release, rpmlint is entirely written in Python and can even be installed via + # `pip install rpmlint` and imported as a Python module -- see https://pypi.org/project/rpmlint/. We should + # have a look at this, provided we can use it without messing up anything the user has already installed from + # distro packages.) + # + rawVersion = abortOnRunFail( + subprocess.run(['rpmlint', '--version'], capture_output=True)).stdout.decode('UTF-8' + ).rstrip() + log.debug('rpmlint version raw: ' + rawVersion) + # Older versions of rpmlint output eg "rpmlint version 1.11", whereas newer ones output eg "2.2.0". With the + # magic of regular expressions we can fix this. + trimmedVersion = re.sub(r'^[^0-9]*', '', rawVersion).replace('_', '.') + log.debug('rpmlint version trimmed: ' + trimmedVersion) + rpmlintVersion = packaging.version.parse(trimmedVersion) + log.debug('rpmlint version parsed: ' + str(rpmlintVersion)) + if (rpmlintVersion < packaging.version.parse('2.0.0')): + log.info('Skipping invocation of rpmlint as installed version (' + str(rpmlintVersion) + + ') is too old (< 2.0)') + else: + log.info('Running rpmlint (v' + str(rpmlintVersion) + + ') to check the created rpm package for errors and warnings') + abortOnRunFail( + subprocess.run( + ['rpmlint', + '--config', + dir_base.joinpath('packaging/linux'), + dir_packages_rpm_output.joinpath(rpmPackageName).as_posix()], + capture_output=False + ) + ) + + # Move the .rpm file to the top-level directory + shutil.move(dir_packages_rpm_output.joinpath(rpmPackageName), dir_packages_platform) + + # + # Make the checksum file + # + log.info('Generating checksum file for ' + rpmPackageName) + writeSha256sum(dir_packages_platform, rpmPackageName) + + #----------------------------------------------------------------------------------------------------------------- + #----------------------------------------------- Windows Packaging ----------------------------------------------- + #----------------------------------------------------------------------------------------------------------------- + case 'Windows': + log.debug('Windows Packaging') + # + # There are three main open-source packaging tools available for Windows: + # + # - NSIS (Nullsoft Scriptable Install System) -- see https://nsis.sourceforge.io/ + # This is widely used and reputedly simple to learn. Actually the documentation, although OK overall, is + # not brilliant for beginners. When you are trying to write your first installer script, you will find a + # frustrating number of errors, omissions and broken links in the documentation. If you give up on this + # and take an existing working script as a starting point, the reference documentation to explain each + # command is not too bad. Plus there are lots of useful titbits on Stack Overflow etc. + # What's less good is that the scripting language is rather primitive. Once you start looking at + # variable scope and how to pass arguments to functions, you'll have a good feel for what it was like to + # write mainframe assembly language in the 1970s. + # There is one other advantage that NSIS has over Wix and Inno Setup, specifically that it is available + # as an MSYS2 package (mingw-w64-x86_64-nsis for 64-bit and mingw-w64-i686-nsis for 32-bit), whereas the + # others are not. This makes it easier to script installations, including for the automated builds on + # GitHub. + # + # - WiX -- see https://wixtoolset.org/ and https://github.com/wixtoolset/ + # This is apparently used by a lot of Microsoft's own products and is supposedly pretty robust. Looks + # like you configure/script it with XML and PowerShell. Most discussion of it says you really first need + # to have a good understanding of Windows Installer (https://en.wikipedia.org/wiki/Windows_Installer) and + # its MSI package format. There is a 260 page book called "The Definitive Guide to Windows Installer" + # which either is or isn't beginner-friendly depending on who you ask but, either way is about 250 pages + # more than I want to have to know about Windows package installation. If we decided we _needed_ to + # produce MSI installers though, this would be the only choice. + # + # - Inno Setup -- see https://jrsoftware.org/isinfo.php and https://github.com/jrsoftware/issrc + # Has been around for ages, but is less widely used than NSIS. Basic configuration is supposedly simpler + # than NSIS, as it's based on an INI file (https://en.wikipedia.org/wiki/INI_file), but you also, by + # default, have a bit less control over how the installer works. If you do need to script something you + # have to do it in Pascal, so a trip back to the 1980s rather than the 1970s. + # + # For the moment, we're sticking with NSIS, which is the devil we know, aka what we've historically used. + # + + # + # As mentioned above, not all information about what Meson does is readily exportable. In particular, I can + # find no simple way to get the actual directory that a file was installed to. Eg, on Windows, in an MSYS2 + # environment, the main executable will be in mbuild/packages/windows/msys64/mingw32/bin/ or similar. The + # beginning (mbuild/packages/windows) and the end (bin) are parts we specify, but the middle bit + # (msys64/mingw32) is magicked up by Meson and not explicitly exposed to build script commands. + # + # Fortunately, we can just search for a directory called bin inside the platform-specific packaging directory + # and we'll have the right thing. + # + # (An alternative approach would be to invoke meson with the --bindir parameter to manually choose the + # directory for the executable.) + # + packageBinDirList = glob.glob('./**/bin/', root_dir=dir_packages_platform.as_posix(), recursive=True) + if (len(packageBinDirList) == 0): + log.critical( + 'Cannot find bin subdirectory of ' + dir_packages_platform.as_posix() + ' packaging directory' + ) + exit(1) + if (len(packageBinDirList) > 1): + log.warning( + 'Found more than one bin subdirectory of ' + dir_packages_platform.as_posix() + + ' packaging directory: ' + '; '.join(packageBinDirList) + '. Assuming first is the one we need' + ) + + dir_packages_win_bin = dir_packages_platform.joinpath(packageBinDirList[0]) + log.debug('Package bin dir: ' + dir_packages_win_bin.as_posix()) + + # + # We could do the same search for data and doc directories, but we happen to know that they should just be + # sibling directories of the bin directory we just found. + # + dir_packages_win_data = dir_packages_win_bin.parent.joinpath('data') + dir_packages_win_doc = dir_packages_win_bin.parent.joinpath('doc') + + # + # Now we have to deal with shared libraries. Windows does not have a built-in package manager and it's not + # realistic for us to require end users to install and use one. So, any shared library that we cannot + # statically link into the application needs to be included in the installer. This mainly applies to Qt. + # (Although you can, in principle, statically link against Qt, it requires downloading the entire Qt source + # code and doing a custom build.) Fortunately, Qt provides a handy utility called windeployqt that should do + # most of the work for us. + # + # Per https://doc.qt.io/qt-6/windows-deployment.html, the windeployqt executable creates all the necessary + # folder tree "containing the Qt-related dependencies (libraries, QML imports, plugins, and translations) + # required to run the application from that folder". + # + log.debug('Running windeployqt') + previousWorkingDirectory = pathlib.Path.cwd().as_posix() + os.chdir(dir_packages_win_bin) + abortOnRunFail( + subprocess.run(['windeployqt', + '--verbose', '2', # 2 is the maximum + projectName + '.exe'], + capture_output=False) + ) + os.chdir(previousWorkingDirectory) + + # + # We're not finished with shared libraries. Although windeployqt is theoretically capable of detecting all the + # shared libraries we need, including non-Qt ones, it doesn't, in practice, seem to be that good on the non-Qt + # bit. And although, somewhere in the heart of the Meson implementation, you would think it would or could + # know the full paths to the shared libraries on which we depend, this is not AFAICT extractable in the + # meson.build script. So, here, we have a list of libraries that we know we depend on and we search for them + # in the paths listed in the PATH environment variable. It's a bit less painful than you might think to + # construct and maintain this list of libraries, because, for the most part, if you miss a needed DLL from the + # package, Windows will give you an error message at start-up telling you which DLL(s) it needed but could not + # find. (There are also various platform-specific free-standing tools that claim to examine an executable and + # tell you what shared libraries it depends on. None that I know of is easy to install in an automated way in + # MSYS2 however.) + # + # We assume that the library 'foo' has a dll called 'libfoo.dll' or 'libfoo-X.dll' or 'libfooX.dll' where X is + # a (possibly multi-digit) version number present on some, but not all, libraries. If we find more matches + # than we were expecting, we log a warning and just include everything we found. (Sometimes we include the + # version number in the library name because we really are looking for a specific version or there are always + # multiple versions) It's not super pretty, but it should work. + # + # Just to keep us on our toes, the Python os module has two similarly-named but different things: + # - os.pathsep is the separator between paths (usually ';' or ':') eg in the PATH environment variable + # - os.sep is the separator between directories (usually '/' or '\\') in a path + # + # The comments below about the source of libraries are just FYI. In almost all cases, we are actually + # installing these things on the build machine via pacman, so we don't have to go directly to the upstream + # project. + # + pathsToSearch = os.environ['PATH'].split(os.pathsep) + for extraLib in [ + 'libbrotlicommon', # Brotli compression -- see https://en.wikipedia.org/wiki/Brotli + 'libbrotlidec', # Brotli compression + 'libbrotlienc', # Brotli compression + 'libbz2', # BZip2 compression -- see https://en.wikipedia.org/wiki/Bzip2 + 'libdouble-conversion', # See https://github.com/google/double-conversion + 'libfreetype', # See https://freetype.org/ + 'libgcc_s_dw2', + 'libglib-2.0', + 'libgraphite', + 'libharfbuzz', # HarfBuzz text shaping engine -- see https://github.com/harfbuzz/harfbuzz + 'libiconv', # See https://www.gnu.org/software/libiconv/ + 'libicudt', # Part of International Components for Unicode + 'libicuin', # Part of International Components for Unicode + 'libicuuc', # Part of International Components for Unicode + 'libintl', # See https://www.gnu.org/software/gettext/ + 'libmd4c', # Markdown for C -- see https://github.com/mity/md4c + 'libpcre2-8', # Perl Compatible Regular Expressions + 'libpcre2-16', # Perl Compatible Regular Expressions + 'libpcre2-32', # Perl Compatible Regular Expressions + 'libpng16', # Official PNG reference library -- see http://www.libpng.org/pub/png/libpng.html + 'libsqlite3', # Need this IN ADDITION to bin/sqldrivers/qsqlite.dll, which gets installed by windeployqt + 'libstdc++', + 'libwinpthread', + 'libxalan-c', + 'libxalanMsg', + 'libxerces-c-3', + 'libzstd', # ZStandard (aka zstd) = fast lossless compression algorithm + 'zlib', # ZLib compression library + ]: + found = False + for searchDir in pathsToSearch: + # We do a glob match to get approximate matches and then filter it with a regular expression for exact + # ones + matches = [] + globMatches = glob.glob(extraLib + '*.dll', root_dir=searchDir, recursive=False) + for globMatch in globMatches: + # We need to remove the first part of the glob match before doing a regexp match because we don't want + # the first part of the filename to be treated as a regular expression. In particular, this would be + # a problem for 'libstdc++'! + suffixOfGlobMatch = globMatch.removeprefix(extraLib) + # On Python 3.11 or later, we would write flags=re.NOFLAG instead of flags=0 + if re.fullmatch(re.compile('-?[0-9]*.dll'), suffixOfGlobMatch, flags=0): + matches.append(globMatch) + numMatches = len(matches) + if (numMatches > 0): + log.debug('Found ' + str(numMatches) + ' match(es) for ' + extraLib + ' in ' + searchDir) + if (numMatches > 1): + log.warning('Found more matches than expected (' + str(numMatches) + ' ' + + 'instead of 1) when searching for library "' + extraLib + '". This is not an ' + + 'error, but means we are possibly shipping additional shared libraries that we '+ + 'don\'t need to.') + for match in matches: + fullPathOfMatch = pathlib.Path(searchDir).joinpath(match) + log.debug('Copying ' + fullPathOfMatch.as_posix() + ' to ' + dir_packages_win_bin.as_posix()) + shutil.copy2(fullPathOfMatch, dir_packages_win_bin) + found = True + break; + if (not found): + log.critical('Could not find '+ extraLib + ' library in PATH ' + os.environ['PATH']) + exit(1) + + # Copy the NSIS installer script to where it belongs + shutil.copy2(dir_build.joinpath('NsisInstallerScript.nsi'), dir_packages_platform) + + # We change into the packaging directory and invoke the NSIS Compiler (aka MakeNSIS.exe) + os.chdir(dir_packages_platform) + log.debug('Working directory now ' + pathlib.Path.cwd().as_posix()) + abortOnRunFail( + # FYI, we don't need it here, but if you run makensis from the MSYS2 command line (Mintty), you need double + # slashes on the options (//V4 instead of /V4 etc). + subprocess.run( + [ + 'MakeNSIS.exe', # 'makensis' would also work on MSYS2 + '/V4', # Max verbosity/logging + # Variables coming from this script are passed in as command-line defines. Fortunately there aren't + # too many of them. + '/DBT_PACKAGING_BIN_DIR="' + dir_packages_win_bin.as_posix() + '"', + '/DBT_PACKAGING_DATA_DIR="' + dir_packages_win_data.as_posix() + '"', + '/DBT_PACKAGING_DOC_DIR="' + dir_packages_win_doc.as_posix() + '"', + 'NsisInstallerScript.nsi', + ], + capture_output=False + ) + ) + + # + # Make the checksum file TODO + # + winInstallerName = capitalisedProjectName + ' ' + buildConfig['CONFIG_VERSION_STRING'] + ' Installer.exe' + log.info('Generating checksum file for ' + winInstallerName) + writeSha256sum(dir_packages_platform, winInstallerName) + + #----------------------------------------------------------------------------------------------------------------- + #------------------------------------------------- Mac Packaging ------------------------------------------------- + #----------------------------------------------------------------------------------------------------------------- + case 'Darwin': + log.debug('Mac Packaging - TODO!') + # + # See https://stackoverflow.com/questions/1596945/building-osx-app-bundle for essential info on building Mac + # app bundles. Also https://mesonbuild.com/Creating-OSX-packages.html suggests how to do this with Meson, + # though it's mostly through having Meson call shell scripts, so I think we're better off sticking to this + # Python script. + # + # https://developer.apple.com/library/archive/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html + # is the "official" Apple info about the directory structure. + # + # To create a Mac app bundle , we create the following directory structure, where items marked ✅ are copied as + # is from the tree generated by meson install with --destdir option, those marked 🟢 are handled by + # `macdeployqt`, and those marked ❇ are ones we need to relocate, generate or modify ourselves. + # + # (When working on this bit, use ❌ for things that are generated automatically but not actually needed, and ✴ + # for things we still need to add.) + # [projectName]_[versionNumber].app + # └── Contents + # ├── Info.plist ❇ <── "Information property list" file = required configuration information (in XML) + # ├── Frameworks <── Contains any private shared libraries and frameworks used by the executable + # │ ├── QtCore.framework * NB: Directory and its contents * 🟢 + # │ ├── [Other Qt .framework directories and their contents] 🟢 + # │ ├── libfreetype.6.dylib 🟢 + # │ ├── libglib-2.0.0.dylib 🟢 + # │ ├── libgthread-2.0.0.dylib 🟢 + # │ ├── libintl.8.dylib 🟢 + # │ ├── libjpeg.8.dylib 🟢 + # │ ├── libpcre2-16.0.dylib 🟢 + # │ ├── libpcre2-8.0.dylib 🟢 + # │ ├── libpng16.16.dylib 🟢 + # │ ├── libsharpyuv.0.dylib 🟢 + # │ ├── libtiff.5.dylib 🟢 + # │ ├── libwebp.7.dylib 🟢 + # │ ├── libwebpdemux.2.dylib 🟢 + # │ ├── libwebpmux.3.dylib 🟢 + # │ ├── libxalan-c.112.dylib 🟢 + # │ ├── libxerces-c-3.2.dylib 🟢 + # │ ├── libzstd.1.dylib 🟢 + # │ └── libxalanMsg.112.dylib ❇ ✴ + # ├── MacOS + # │ └── [capitalisedProjectName] ❇ <── the executable + # ├── Plugins <── Contains loadable bundles that extend the basic features of the application + # │ ├── audio + # │ │   └── libqtaudio_coreaudio.dylib 🟢 + # │ ├── bearer + # │ │   └── libqgenericbearer.dylib 🟢 + # │ ├── iconengines + # │ │   └── libqsvgicon.dylib 🟢 + # │ ├── imageformats + # │ │   ├── libqgif.dylib 🟢 + # │ │   ├── libqicns.dylib 🟢 + # │ │   ├── libqico.dylib 🟢 + # │ │   ├── libqjpeg.dylib 🟢 + # │ │   ├── libqmacheif.dylib 🟢 + # │ │   ├── libqmacjp2.dylib 🟢 + # │ │   ├── libqsvg.dylib 🟢 + # │ │   ├── libqtga.dylib 🟢 + # │ │   ├── libqtiff.dylib 🟢 + # │ │   ├── libqwbmp.dylib 🟢 + # │ │   └── libqwebp.dylib 🟢 + # │ ├── mediaservice + # │ │   ├── libqavfcamera.dylib 🟢 + # │ │   ├── libqavfmediaplayer.dylib 🟢 + # │ │   └── libqtmedia_audioengine.dylib 🟢 + # │ ├── platforms + # │ │   └── libqcocoa.dylib 🟢 + # │ ├── printsupport + # │ │   └── libcocoaprintersupport.dylib 🟢 + # │ ├── sqldrivers + # │ │   ├── libqsqlite.dylib 🟢 + # │ │   ├── libqsqlodbc.dylib ✴ Not sure we need this one, but it got shipped with Brewtarget 2.3 + # │ │   └── libqsqlpsql.dylib ✴ + # │ ├── styles + # │ │ └── libqmacstyle.dylib 🟢 + # │ └── virtualkeyboard + # │ ├── libqtvirtualkeyboard_hangul.dylib 🟢 + # │ ├── libqtvirtualkeyboard_openwnn.dylib 🟢 + # │ ├── libqtvirtualkeyboard_pinyin.dylib 🟢 + # │ ├── libqtvirtualkeyboard_tcime.dylib 🟢 + # │ └── libqtvirtualkeyboard_thai.dylib 🟢 + # └── Resources + # ├── [capitalisedProjectName]Icon.icns ✅ <── Icon file + # ├── DefaultData.xml ✅ + # ├── default_db.sqlite ✅ + # ├── en.lproj <── Localized resources + # │ ├── COPYRIGHT ✅ + # │ └── README.md ✅ + # ├── sounds + # │ └── [All the filesToInstall_sounds .wav files] ✅ + # └── translations_qm + # └── [All the .qm files generated by qt.compile_translations] ✅ + # + # This will ultimately get bundled up into a disk image (.dmg) file. + # + + # + # Make the top-level directories + # + log.debug('Creating Mac app bundle top-level directories') + macBundleDirName = projectName + '_' + buildConfig['CONFIG_VERSION_STRING'] + '.app' + dir_packages_mac = dir_packages_platform.joinpath(macBundleDirName).joinpath('Contents') + dir_packages_mac_bin = dir_packages_mac.joinpath('MacOS') + dir_packages_mac_rsc = dir_packages_mac.joinpath('Resources') + dir_packages_mac_frm = dir_packages_mac.joinpath('Frameworks') + dir_packages_mac_plg = dir_packages_mac.joinpath('Plugins') + os.makedirs(dir_packages_mac_bin) # This will also automatically create parent directories + os.makedirs(dir_packages_mac_frm) + os.makedirs(dir_packages_mac_plg) + # Rather than create dir_packages_mac_rsc directly, it's simplest to copy the whole Resources tree from + # mbuild/mackages/darwin/usr/local/Contents/Resources, as we want everything that's inside it + log.debug('Copying Resources') + shutil.copytree(dir_packages_platform.joinpath('usr/local/Contents/Resources'), dir_packages_mac_rsc) + + # Copy the Information Property List file to where it belongs + log.debug('Copying Information Property List file') + shutil.copy2(dir_build.joinpath('Info.plist').as_posix(), dir_packages_mac) + + # Because Meson is geared towards local installs, in the mbuild/mackages/darwin directory, it is going to have + # placed the executable in the usr/local/bin subdirectory. Copy it to the right place. + log.debug('Copying executable') + shutil.copy2(dir_packages_platform.joinpath('usr/local/bin').joinpath(capitalisedProjectName).as_posix(), + dir_packages_mac_bin) + + # + # The macdeployqt executable shipped with Qt does for Mac what windeployqt does for Windows -- see + # https://doc.qt.io/qt-6/macos-deployment.html#the-mac-deployment-tool + # + # At first glance, you might thanks that, with a few name changes, we might share all the bt code for + # macdeployqt and windeployqt. However, the two programs share _only_ a top-level goal ("automate the process + # of creating a deployable [folder / application bundle] that contains [the necessary Qt dependencies]" - ie so + # that the end user does not have to install Qt to run our software). They have completely different + # implementations and command line options, so it would be unhelpful to try to treat them identically. + # + # With the verbose logging on, you can see that macdeployqt is calling: + # - otool (see https://www.unix.com/man-page/osx/1/otool/) to get information about which libraries etc the + # executable depends on + # - install_name_tool (see https://www.unix.com/man-page/osx/1/install_name_tool/) to change the paths in + # which the executable looks for a library + # - strip (see https://www.unix.com/man-page/osx/1/strip/) to remove symbols from shared libraries + # + # As discussed at https://stackoverflow.com/questions/2809930/macdeployqt-and-third-party-libraries, there are + # usually cases where you have to do some of the same work by hand because macdeployqt doesn't automatically + # detect all the dependencies. One example of this is that, if a shared library depends on another shared + # library then macdeployqt won't detect it, because it does not recursively run its dependency checking. + # + # For us, macdeployqt does seem to cover almost all the shared libraries and frameworks we need, including + # those that are not part of Qt. The exceptions are: + # - libxalanMsg -- a library that libxalan-c uses (so an indirect rather than direct dependency) + # - libqsqlpsql.dylib -- which would be needed for any user that wants to use PostgreSQL instead of SQLite + # + previousWorkingDirectory = pathlib.Path.cwd().as_posix() + log.debug('Running otool before macdeployqt') + os.chdir(dir_packages_mac_bin) + otoolOutputExe = abortOnRunFail( + subprocess.run(['otool', + '-L', + capitalisedProjectName], + capture_output=True) + ).stdout.decode('UTF-8') + log.debug('Output of `otool -L' + capitalisedProjectName + '`: ' + otoolOutputExe) + # + # The output from otool at this stage will be along the following lines: + # + # [capitalisedProjectName]: + # /usr/local/opt/qt@5/lib/QtCore.framework/Versions/5/QtCore (compatibility version 5.15.0, current version 5.15.8) + # /usr/local/opt/qt@5/lib/QtGui.framework/Versions/5/QtGui (compatibility version 5.15.0, current version 5.15.8) + # /usr/local/opt/qt@5/lib/QtMultimedia.framework/Versions/5/QtMultimedia (compatibility version 5.15.0, current version 5.15.8) + # /usr/local/opt/qt@5/lib/QtNetwork.framework/Versions/5/QtNetwork (compatibility version 5.15.0, current version 5.15.8) + # /usr/local/opt/qt@5/lib/QtPrintSupport.framework/Versions/5/QtPrintSupport (compatibility version 5.15.0, current version 5.15.8) + # /usr/local/opt/qt@5/lib/QtSql.framework/Versions/5/QtSql (compatibility version 5.15.0, current version 5.15.8) + # /usr/local/opt/qt@5/lib/QtWidgets.framework/Versions/5/QtWidgets (compatibility version 5.15.0, current version 5.15.8) + # /usr/local/opt/xerces-c/lib/libxerces-c-3.2.dylib (compatibility version 0.0.0, current version 0.0.0) + # /usr/local/opt/xalan-c/lib/libxalan-c.112.dylib (compatibility version 112.0.0, current version 112.0.0) + # /usr/lib/libc++.1.dylib (compatibility version 1.0.0, current version 1300.36.0) + # /usr/lib/libSystem.B.dylib (compatibility version 1.0.0, current version 1319.0.0) + # + # After running `macdeployqt`, all the paths for non-system libraries will be changed to ones beginning + # '@loader_path/../Frameworks/', as will be seen from the subsequent output of running `otool`. + # + # We want to grab: + # - the directory containing libxalan-c, as that's the same directory in which we should find libxalanMsg + # - information that would allow us to find libqsqlpsql.dylib .:TODO:. Still to work out how to do this. For + # now, I think that means users requiring PostgreSQL support on MacOS will need to build the app from + # source. + # + xalanDir = '' + xalanLibName = '' + xalanMatch = re.search(r'^\s*(\S+/)(libxalan-c\S*.dylib)', otoolOutputExe, re.MULTILINE) + if (xalanMatch): + # The [1] index gives us the first parenthesized subgroup of the regexp match, which in this case should be + # the directory path to libxalan-c.xxx.dylib + xalanDir = xalanMatch[1] + xalanLibName = xalanMatch[2] + else: + log.warning( + 'Could not find libxalan dependency in ' + capitalisedProjectName + + ' so assuming /usr/local/opt/xalan-c/lib/' + ) + xalanDir = '/usr/local/opt/xalan-c/lib/' + xalanLibName = 'libxalan-c.112.dylib' + log.debug('xalanDir: ' + xalanDir + '; contents:') + abortOnRunFail(subprocess.run(['ls', '-l', xalanDir], capture_output=False)) + + # + # Strictly speaking, we should look at every /usr/local/opt/.../*.dylib dependency of our executable, and run + # each of those .dylib files through otool to get its dependencies, then repeat until we find no new + # dependencies. Then we should ensure each dependency is copied into the app bundle and whatever depends on it + # knows where to find it etc. Pretty soon we'd have ended up reimplementing macdeployqt. Fortunately, in + # practice, for Xalan, it suffices to grab libxalanMsg and put it in the same directory in the bundle as + # libxalanc. + # + # We use otool to get the right name for libxalanMsg, which is typically listed as a relative path dependency + # eg '@rpath/libxalanMsg.112.dylib'. + # + log.debug('Running otool -L on ' + xalanLibName) + otoolOutputXalan = abortOnRunFail( + subprocess.run(['otool', + '-L', + xalanDir + xalanLibName], + capture_output=True) + ).stdout.decode('UTF-8') + log.debug('Output of `otool -L' + xalanDir + xalanLibName + '`: ' + otoolOutputXalan) + xalanMsgLibName = '' + xalanMsgMatch = re.search(r'^\s*(\S+/)(libxalanMsg\S*.dylib)', otoolOutputXalan, re.MULTILINE) + if (xalanMsgMatch): + xalanMsgLibName = xalanMsgMatch[2] + else: + log.warning( + 'Could not find libxalanMsg dependency in ' + xalanDir + xalanLibName + + ' so assuming libxalanMsg.112.dylib' + ) + xalanMsgLibName = 'libxalanMsg.112.dylib' + log.debug('Copying ' + xalanDir + xalanMsgLibName + ' to ' + dir_packages_mac_frm.as_posix()) + shutil.copy2(xalanDir + xalanMsgLibName, dir_packages_mac_frm) + + # + # Now let macdeployqt do most of the heavy lifting + # + log.debug('Running macdeployqt') + os.chdir(dir_packages_platform) + abortOnRunFail( + # + # Note that app bundle name has to be the first parameter and options come afterwards. + # The -executable argument is to automatically alter the search path of the executable for the Qt libraries + # (ie so the executable knows where to find them inside the bundle) + # + # We do not use the -dmg option, partly because we might still have some more fix-up work to do before + # packaging up the directory tree (although so far I think we can do it all beforehand) and partly because + # we want more control over the dmg (eg to specify an icon for it). + # + subprocess.run(['macdeployqt', + macBundleDirName, + '-verbose=2', # 0 = no output, 1 = error/warning (default), 2 = normal, 3 = debug + '-executable=' + macBundleDirName + '/Contents/MacOS/' + capitalisedProjectName], + capture_output=False) + ) + + log.debug('Running otool after macdeployqt') + os.chdir(dir_packages_mac_bin) + abortOnRunFail(subprocess.run(['otool', '-L', capitalisedProjectName], capture_output=False)) + abortOnRunFail(subprocess.run(['otool', '-l', capitalisedProjectName], capture_output=False)) + + # + # Now that we have the app bundle directory structure, we need to compress it into a dmg file + # + # You can pass parameters in to dmgbuild on the command line, but it gets a bit unwieldy if you have a lot of + # them. We use the alternative method of providing a configuration file (which is actually just a Python + # script). So we generate that first. + # + # It would be neat if we could invoke dmgbuild directly from Python, but I haven't found a way to do this. + # + log.debug('Creating .dmg file') + os.chdir(dir_packages_platform) + settingsFileName = 'dmgSettings.py' + dmgFileName = capitalisedProjectName + '-' + buildConfig['CONFIG_VERSION_STRING'] + '.dmg' + with open(settingsFileName, 'wt') as sf: + sf.write('#------------------------------------------------------------------------------------\n') + sf.write('# Settings file for dmgbuild to generate ' + dmgFileName + '\n') + sf.write('#------------------------------------------------------------------------------------\n') + sf.write('\n') + sf.write('# Compressed (bzip2) format\n') + sf.write('format = "UDBZ"\n') + sf.write('\n') + sf.write('# This is the default file system, but no harm to be explicit\n') + sf.write('filesystem = "HFS+"\n') + sf.write('\n') + sf.write('# Disk image holds just one folder\n') + sf.write('file = "'+ macBundleDirName + '"\n') + sf.write('\n') + sf.write('# Icon used to badge the system’s standard external disk icon\n') + sf.write('# This is a convenient way to construct a suitable icon from your application\’s icon\n') + # Doco implies path should start with /Applications/, but this does not work + sf.write('badge_icon = "'+ macBundleDirName + '/Contents/Resources/' + capitalisedProjectName + 'Icon.icns"\n') + sf.write('\n') + sf.write('# Expected usage is drag to install, so default view of icons makes most sense\n') + sf.write('default_view = "icon-view"\n') + # + # Confusingly, although the .dmg file name and the volume name can be passed in via the settings file they are + # nonetheless required parameters on the command line. Specifically, usage is: + # dmgbuild [-h] [-s SETTINGS] [-D DEFINES] [--no-hidpi] volume-name output.dmg + # + abortOnRunFail( + subprocess.run( + ['dmgbuild', + '-s', settingsFileName, + capitalisedProjectName + ' ' + buildConfig['CONFIG_VERSION_STRING'], + dmgFileName], + capture_output=False + ) + ) + os.chdir(previousWorkingDirectory) + log.info('Created ' + dmgFileName + ' in directory ' + dir_packages_platform.as_posix()) + + # + # Make the checksum file + # + log.info('Generating checksum file for ' + dmgFileName) + writeSha256sum(dir_packages_platform, dmgFileName) + + case _: + log.critical('Unrecognised platform: ' + platform.system()) + exit(1) + + # If we got this far, everything must have worked + print() + print('⭐ Packaging complete ⭐') + print('See:') + print(' ' + dir_packages_platform.as_posix() + ' for binaries') + print(' ' + dir_packages_source.as_posix() + ' for source') + return + +#----------------------------------------------------------------------------------------------------------------------- +# .:TBD:. Let's see if we can do a .deb package +#----------------------------------------------------------------------------------------------------------------------- +def doDebianPackage(): + return + +#----------------------------------------------------------------------------------------------------------------------- +# Act on command line arguments +#----------------------------------------------------------------------------------------------------------------------- +# See above for parsing +match args.subCommand: + + case 'setup': + doSetup(setupOption = args.setupOption) + + case 'package': + doPackage() + + # If we get here, it's a coding error as argparse should have already validated the command line arguments + case _: + log.error('Unrecognised command "' + command + '"') + exit(1) diff --git a/doc/manpage.1.md.in b/doc/manpage.1.md.in new file mode 100644 index 000000000..cc35a9ab6 --- /dev/null +++ b/doc/manpage.1.md.in @@ -0,0 +1,29 @@ +% @CONFIG_APPLICATION_NAME_AC@(1) @CONFIG_APPLICATION_NAME_LC@ @CONFIG_VERSION_STRING@ +% @CONFIG_ORGANIZATION_NAME@ +% @CONFIG_BUILD_MONTH_AND_YEAR@ + +# NAME +@CONFIG_APPLICATION_NAME_LC@ - GUI beer brewing software + +# SYNOPSIS +**@CONFIG_APPLICATION_NAME_LC@** + +# DESCRIPTION +@CONFIG_APPLICATION_NAME_UC@ is a calculator for brewing beer. It is a Qt-based program which +allows you to create recipes from a database of ingredients. It calculates +all the important parameters, helps you with mash temperatures, and just +makes the process of recipe formulation much easier. + +# COPYRIGHT +Copyright © various authors 2009-2023 -- see accompanying documentation + +@CONFIG_APPLICATION_NAME_UC@ is free software: you can redistribute it and/or modify it under the terms of the GNU +General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your +option) any later version. + +@CONFIG_APPLICATION_NAME_UC@ is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even +the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for +more details. + +You should have received a copy of the GNU General Public License along with this program. If not, see +. diff --git a/brewtarget.desktop b/linux/brewtarget.desktop similarity index 99% rename from brewtarget.desktop rename to linux/brewtarget.desktop index de206ff4b..3fee540d1 100644 --- a/brewtarget.desktop +++ b/linux/brewtarget.desktop @@ -8,4 +8,3 @@ X-KDE-StartupNotify=true Icon=brewtarget Terminal=false Type=Application - diff --git a/meson.build b/meson.build new file mode 100644 index 000000000..ad5a4e3df --- /dev/null +++ b/meson.build @@ -0,0 +1,1449 @@ +# +# meson.build is part of Brewtarget, and is copyright the following authors 2022-2023: +# • Matt Young +# +# Brewtarget is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Brewtarget is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied +# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. +# +# You should have received a copy of the GNU General Public License along with this program. If not, see +# . +# + +#----------------------------------------------------------------------------------------------------------------------- +# +# ⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐ +# ⭐⭐⭐ THIS IS EXPERIMENTAL - YOU CAN ALSO STILL USE TRIED-AND-TESTED CMAKE TO BUILD THE PRODUCT +# ⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐⭐ +# +# STEP 1: Ensure Python is installed: +# ----------------------------------- +# On Ubuntu and other Debian-based versions of Linux: +# sudo apt install python3 +# +# On Windows, in the 32-bit MSYS2 (https://www.msys2.org/) environment: +# pacman -S --needed mingw-w64-i686-python +# pacman -S --needed mingw-w64-i686-python-pip +# On Windows, in the 64-bit MSYS2 environment you would do the following HOWEVER NB WE HAVE NOT GOT PACKAGING WORKING +# FOR 64-BIT BUILDS YET SO THIS IS NOT SUPPORTED AND MAY REQUIRE CHANGES TO THE bt SCRIPT: +# pacman -S --needed mingw-w64-x86_64-python +# pacman -S --needed mingw-w64-x86_64-python-pip +# +# On a Mac with homebrew (https://brew.sh/) installed +# brew install python@3.11 +# +# +# STEP 2 (WINDOWS ONLY): Extra set-up +# ----------------------------------- +# On Windows, there are a couple of extra things we need to do before running the bt script: +# +# - For historical reasons, Linux and other platforms need to run both Python v2 (still used by some bits of +# system) and Python v3 (eg that you installed yourself) so there are usually two corresponding Python +# executables, python2 and python3. On Windows there is only whatever Python you installed and it's called +# python.exe. To keep the shebang in the bt script working, we just make a softlink to python called python3. +# +# - Getting Unicode input/output to work is fun. We should already have a Unicode locale, but it seems we also +# need to set PYTHONIOENCODING (see https://docs.python.org/3/using/cmdline.html#envvar-PYTHONIOENCODING, even +# though it seems to imply you don't need to set it on recent versions of Python). +# +# - The version of Pip we install above does not put it in the "right" place. Specifically it will not be in the +# PATH when we run bt. The following seems to be the least hacky way around this: +# curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py +# python get-pip.py +# python -m pip install -U --force-reinstall pip +# See https://stackoverflow.com/questions/48087004/installing-pip-on-msys for more discussion on this. +# +# TLDR: Here's what you need to run in the MSYS2 Mintty terminal: +# if [[ ! -f $(dirname $(which python))/python3 ]]; then ln -s $(which python) $(dirname $(which python))/python3; fi +# export PYTHONIOENCODING=utf8 +# curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py +# python get-pip.py +# python -m pip install -U --force-reinstall pip +# +# +# STEP 3: Automatically install other dependencies and set up the Meson build: +# ---------------------------------------------------------------------------- +# Then everything else can be installed by running: +# ./bt setup all +# +# This will also set up the Meson build. Amongst other things, this creates the 'mbuild' directory tree for the Meson +# build (so no clashes with a CMake build in the 'build' directory tree). +# +# Alternatively, if you decided to install all the dependencies manually, or if you need to reset the build directory +# after it got in a bad state, you can run: +# ./bt setup +# +# +# STEP 4: Compile, test, install: +# ------------------------------- +# Everything else is done from the 'mbuild' directory, so start with: +# cd mbuild +# +# To compile: +# meson compile +# Alternatively, to get more detailed output: +# meson compile --verbose +# +# To run unit tests: +# meson test +# +# Then to install: +# meson install +# Or, on Linux, you can do: +# sudo meson install +# which avoids the pop-up window +# +# To build source packages (in the meson-dist subdirectory of the build directory): +# meson dist +# This will build a .tar.xz file and create a corresponding .tar.xz.sha256sum file for it. +# +# +# STEP 5 (OPTIONAL): Build distributable packages +# ----------------------------------------------- +# To build binary packages: +# ../bt package ⭐⭐⭐ This is the bit that is not yet working on all platforms ⭐⭐⭐ +# +# +# Finally, note that if you want to add new build targets or change the names of existing targets, you have to run the +# following command from the same directory as this file: +# meson --reconfigure mbuild +# See https://stackoverflow.com/questions/63329923/how-to-make-meson-scan-for-new-target for more on this. +# Alternatively, you can run 'bt setup' again. +# +#----------------------------------------------------------------------------------------------------------------------- + + +#======================================================================================================================= +#================================================== Project Settings =================================================== +#======================================================================================================================= +# +# We'll get an error if 'project' isn't the first call in this file +# +# Note that we need: +# - Meson 0.56.0 or newer to use Meson's 'substring' call. +# - Meson 0.59.0 or newer to use qt.compile_resources, qt.compile_ui and qt.compile_moc +# - Meson 0.60.0 or newer to use + to append items to lists (aka 'list.' feature -- at least that's what the +# warning message says if you've specified a lower minimum version of Meson) +# We would/will need: +# - Meson 0.62.0 or newer for dep 'dl' custom lookup +# +# NB: Per https://mesonbuild.com/Reference-manual_functions.html#project the default_options settings "are only used +# when running Meson for the first time"! So if you change any of the default_options settings you *MUST* delete +# the entire build directory and run +# meson setup +# again to recreate the build directory and all its contained config. Eg, if you are in the mbuild directory, you +# need to run: +# cd .. +# rm -r mbuild +# meson setup mbuild +# cd mbuild +# meson compile +# Otherwise, as explained at +# https://mesonbuild.com/FAQ.html#why-are-changes-to-default-project-options-ignored, your changes WILL HAVE NO +# EFFECT. TLDR this is because you can change all the settings via the command line and it would be hard to keep +# track of where a setting had last been modified, so the default_options are only ever read once. +# See also https://github.com/mesonbuild/meson/issues/2193 for further discussion about this. +# +# Default options (mostly ultimately controlling compiler settings): +# +# - cpp_std We need C++20 for std::map::contains(), C++17 or later for nested namespaces and structured +# bindings, and C++11 or later for lambdas. +# +# - warning_level 3 is the highest level ie warns about the most things. For gcc it translates to +# -Wall -Winvalid-pch -Wnon-virtual-dtor -Wextra -Wpedantic +# +# - prefer_static We want to try static linking before shared linking because it makes packaging a lot easier on +# Windows and Mac. NB: This requires meson 0.63.0 or newer. Current version of meson in Ubuntu +# 22.04 repositories is 0.61.2. For the moment, we do static setting on a library-by-library basis +# (by setting 'static : true' on all the dependency() calls. +# +# - buildtype For the moment at least, I'm not making a distinction between debug and release builds. Unless we +# find some compelling performance etc reason to do otherwise, my instinct is to have as much diagnostic +# information in the build in "release" as we would in "development/debug", on the grounds that it can +# only help if an end user hits a core-dumping bug. +# Meson encourages you to use either the buildtype option or the debug and optimization options +# rather than setting compiler options directly. However, this does not give us as much control as we +# would like over compiler flags. Eg switching 'debug' to 'true' turns on the '-g' flag (equivalent to +# '-g2') on GCC, but there isn't a way via the meson options to set '-g3' for GCC. So, we set +# 'buildtype=plain' and manage compiler flags directly. +# +# +project('brewtarget', 'cpp', + version: '3.0.7', + license: 'GPL-3.0-or-later', + meson_version: '>=0.60.0', + default_options : ['cpp_std=c++20', + 'warning_level=3', +# 'prefer_static=true', See comment above for why this is commented-out for now + 'buildtype=plain']) + +# +# Although Meson itself is written in Python, Meson build files uses a slihgtly different syntax and have less +# functionality than Python. See +# https://mesonbuild.com/FAQ.html#why-is-meson-not-just-a-python-module-so-i-could-code-my-build-setup-in-python and +# links therefrom for the rationale for avoiding being a full programming language. +# +# Besides some (sometimes annoying) variations in syntax, this also means that you sometimes have to do things in a +# slightly more cumbersome way than you would in a Python script. Eg here, in regular Python, we would write: +# capitalisedProjectName = projectName.capitalize() +# But meson.project_name() returns a Meson String, not a Python String, so there's a bit more work to do to get the same +# result. +# +projectName = meson.project_name().to_lower() +capitalisedProjectName = projectName.substring(0, 1).to_upper() + projectName.substring(1) + +message('⭐ Building', projectName, 'version', meson.project_version(), 'for', host_machine.system(), + 'on', build_machine.system(), 'in', meson.project_source_root(), '⭐') +compiler = meson.get_compiler('cpp') +message('Using Meson', meson.version(), ', ', compiler.get_id(), 'compiler and', compiler.get_linker_id(), 'linker') + +# +# We need two versions of the main executable name because they are different on Windows: +# - We need a "target" name _without_ the '.exe' suffix for the executable() command because, on Windows, Meson will +# always add its own '.exe' suffix and we don't want to end up with '.exe.exe' as the suffix! +# - Everywhere else, we want the actual name of the file, including the '.exe' suffix. +# +# The default (on Windows and Linux) is to use Unix-style executable names, ie all lower case +# +mainExecutableTargetName = projectName +if host_machine.system() == 'darwin' + # On Mac we don't need a suffix but have always capitalised the executable name because "Don't question the APPLE". + mainExecutableTargetName = capitalisedProjectName +endif +if host_machine.system() == 'windows' + mainExecutableFileName = mainExecutableTargetName + '.exe' +else + mainExecutableFileName = mainExecutableTargetName +endif + +testRunnerTargetName = mainExecutableTargetName + '_tests' + +#======================================================================================================================= +#==================================================== Meson modules ==================================================== +#======================================================================================================================= +# Import the Qt tools. See https://mesonbuild.com/Qt5-module.html +qt = import('qt5') +# File System module - see https://mesonbuild.com/Fs-module.html +fs = import('fs') + +#======================================================================================================================= +#==================================================== Build options ==================================================== +#======================================================================================================================= +prefix = get_option('prefix') + +#======================================================================================================================= +#============================================== Frameworks and Libraries =============================================== +#======================================================================================================================= +# +# It would be neat within the build system to automate the _installation_ and upgrade of libraries and frameworks on +# which we depend. However, I have yet to find a universal pain-free solution. +# +# Meson has its own dependency management system, Wrap, but the list of pre-provided projects at +# https://mesonbuild.com/Wrapdb-projects.html does not include Qt, Boost, Xerces, Xalan or Valijson. +# +# You can bridge out to other systems such as Conan or vcpkg, but it's a bit clunky. +# +# So, for now at least, we manage dependency installation in the `bt` Python script. +# +# Although we request static libraries in a lot of places, we don't always get them, so we assume we need to deal with +# shared libraries (aka DLLs on Windows). +# +# Aside from ensuring all dependencies are present on the build machine, we also have to worry about run-time +# dependencies for packaging. In particular, on Windows and Mac, because there is not built-in default package manager, +# we typically need to include in our package all the non-system shared libraries on which it depends. For the Qt +# libraries, there are handy tools `windeployqt` and `macdeployqt` that do most of the necessary work. However, these +# tools do not (reliably) detect other shared libraries on which we depend. The paths of these shared libraries should +# be knowable during compilation (or more specifically linking). We want to get the paths during the build so that we +# can export them for use in the packaging step (which is done outside of Meson by the "build tool" bt Python script. +# +# Finding out how to get shared library paths information was, err, fun because it's not very well documented. +# Eventually, I realised that you can look in meson-private/cmake_XercesC/cmake_trace.txt, +# meson-private/cmake_XalanC/cmake_trace.txt and so on to see what information Meson got back from CMake and thus know +# which CMake variables are exposed via the get_variable() call. +# +sharedLibraryPaths = [] + +#========================================================= Qt ========================================================== +# We need not just the "core" bit of Qt but various "optional" elements. +# +# We try to keep the minimum Qt version we need as low as we can. +# +# Note that if you change the minimum Qt version, you need to make corresponding changes to the .github/workflows/*.yml +# files so that GitHub uses the appropriate version of Qt for the automated builds. +# +# For the moment, max version we can have here is 5.9.5, because that's what Ubuntu 18.04 topped out at + +# Tell meson which Qt modules we need +qtCommonDependencies = dependency('qt5', + version : '>=5.9.5', + modules : ['Core', + 'Gui', # Needed for QColor on Mac? + 'Multimedia', + 'Network', + 'PrintSupport', + 'Sql', + 'Svg', # Needed to make the deploy scripts pick up the svg plugins + 'Widgets', + 'Xml'], # TBD: Not sure we need this any more + include_type : 'system', + static : true) +# The Qt Gui module is only needed for the main program. (We don't want the tests to try to load it or it could barf +# in a GitHub action that does not have a display running.) +qtMainExeDependencies = dependency('qt5', version : '>=5.9.5', modules: ['Gui']) +# The Qt Test module is only needed for the unit tests +qtTestRunnerDependencies = dependency('qt5', version : '>=5.9.5', modules: ['Test']) + +#===================================================== Find Boost ====================================================== +# Boost is a collection of separate libraries, some, but not all, of which are header-only. We only specify the Boost +# libraries that we actually use. +# +# On Linux, there are cases where we need a more recent version of a Boost library than is readily-available in system- +# supplied packages. I haven't found a slick way to solve this in CMake, though https://github.com/Orphis/boost-cmake +# looks promising. (For header-only Boost libraries, you might think it would be relatively painless to pull them in +# from where they are hosted on GitHub (see https://github.com/boostorg), but this is not the case. AFAICT you can't +# easily pull a specific release, and just pulling master doesn't guarantee that everything compiles.) So, anyway, on +# Debian-based distros of Linux, such as Ubuntu, you need to do the following to install Boost 1.79 in place of whatever +# (if anything) is already installed: +# +# $ sudo apt remove boost-all-dev +# $ cd ~ +# $ mkdir boost-tmp +# $ cd boost-tmp +# $ wget https://boostorg.jfrog.io/artifactory/main/release/1.79.0/source/boost_1_79_0.tar.bz2 +# $ tar --bzip2 -xf boost_1_79_0.tar.bz2 +# $ cd boost_1_79_0 +# $ ./bootstrap.sh --prefix=/usr +# $ sudo ./b2 install +# $ cd ../.. +# $ sudo rm -rf boost-tmp +# +# (Obviously if you want to make the necessary change to install an even more recent version than Boost 1.79 then that +# should be fine.) +# +# We do the same in .github/workflows/linux-ubuntu.yml to make GitHub automated builds work. +# +# Note that this means we want to _statically_ link Boost rather than force end users to have to do all the palava above +# +# ************************ +# *** Boost Stacktrace *** +# ************************ +# +# We use this for diagnostics. In certain error cases it's very helpful to be able to log the call stack. +# +# On Windows, using MSYS2, the mingw-w64-boost packages do not include libboost_stacktrace_backtrace, but +# https://www.boost.org/doc/libs/1_76_0/doc/html/stacktrace/configuration_and_build.html suggests it is not required +# (because on Windows, if you have libbacktrace installed, you can set BOOST_STACKTRACE_USE_BACKTRACE in header-only +# mode). +# +# .:TODO:. Not sure how to get libboost_stacktrace_backtrace installed on Mac. It doesn't seem to be findable by +# CMake after installing Boost via Homebrew (https://brew.sh/). For the moment, skip trying to use +# libboost_stacktrace_backtrace on Mac +# +# .:TODO:. So far don't have stacktraces working properly on Windows (everything shows as register_frame_ctor), so +# that needs some more investigation. (It could be that it's a bug in Boost, at least according to +# https://stackoverflow.com/questions/54333608/boost-stacktrace-not-demangling-names-when-cross-compiled) +# +# ****************** +# *** Boost JSON *** +# ****************** +# +# Boost JSON is an (optionally) header-only library that was introduced in Boost 1.75 in December 2020. One of the +# features we use, JSON pointers (the equivalent of XML's XPaths) was only introduced in Boost 1.79. As of March +# 2022, Ubunutu 20.04 LTS only has packages for Boost 1.71 from August 2019, hence the need to manually install a +# newer Boost. +# +# ****************** +# *** Boost.Core *** +# ****************** +# +# Boost.Core, part of collection of the Boost C++ Libraries, is a collection of core utilities used by other Boost +# libraries. Boost JSON needs a more recent version than 1.71. +# +# For Boost, per https://mesonbuild.com/Dependencies.html#boost, we only need to supply module names for libraries we +# need to link against. For the header-only Boost libraries, the 'boost' dependency suffices. +boostModules = [] +if host_machine.system() == 'linux' + boostModules += 'stacktrace_backtrace' + add_global_arguments('-DBOOST_STACKTRACE_LINK', language : 'cpp') + add_global_arguments('-DBOOST_STACKTRACE_USE_BACKTRACE', language : 'cpp') +endif +boostDependency = dependency('boost', + version : '>=1.79.0', + modules : boostModules, + static : true) +message('Boost:', boostDependency.name(), 'found =', boostDependency.found(), 'version =', boostDependency.version()) + +# +# Extra requirements for Boost Stacktrace +# +# Per https://www.boost.org/doc/libs/1_76_0/doc/html/stacktrace/configuration_and_build.html, by default +# Boost.Stacktrace is a header-only library. However, you get better results by linking (either statically or +# dynamically) with a helper library. Of the various options, it seems like boost_stacktrace_backtrace gives the most +# functionality over the most platforms. This has dependencies on: +# - libdl on POSIX platforms -- but see note below +# - libbacktrace +# The latter is a widely-used cross-platform open source library available at +# https://github.com/ianlancetaylor/libbacktrace. On some POSIX plaforms it's already either installed on the system +# (eg see https://man7.org/linux/man-pages/man3/backtrace.3.html) or available as an optional component of the GCC +# compiler. However, it seems this is something that can change over time. It's a small and stable library, so we +# just build it from sources -- which is done in the `bt` script when you run `bt setup all`. +# +# Just to make things extra fun, in 2021, the GNU compilers did away with libdl and incorporated its functionality into +# libc, per section 2.3 of release info at https://sourceware.org/glibc/wiki/Release/2.34. This means, if we're using +# the GNU tool chain and libc is v2.34 or newer, then we should NOT look for libdl, as we won't find it! To find the +# version of libc, you execute 'ldd --version', which gives multi-line output, of which the first line will be something +# such as: +# ldd (Ubuntu GLIBC 2.35-0ubuntu3.1) 2.35 +# In this case, that means libc is version 2.35 +# +# If we _don't_ need libdl, we just create an "unrequired" dependency. This saves having to repeat a bunch of logic +# later on when we get to the main build. +# +needLibdl = false +if compiler.get_id() == 'gcc' + lddOutput = run_command('ldd', '--version', check: true).stdout().strip() + lddOutputLine1 = lddOutput.split('\n')[0] + message('ldd --version gives:', lddOutputLine1) + libcVersion = lddOutputLine1.split(' ')[-1] + message('libc version is:', libcVersion) + if libcVersion.version_compare('<2.34') + needLibdl = true + endif +endif +dlDependency = dependency('dl', required : needLibdl, static : true) +# +# On newer versions of Meson (0.62.0 and newer), the dependency has special-case code for dealing with libdl being built +# into the compiler. This means on Mac, for instance, we can have dlDependency.found() is true without there being any +# cmake variable such as PACKAGE_LIBRARIES (because the library was found by Meson special case code, not by cmake). +# +# This is why we have to supply a default dummy value, which we then check for. +# +if dlDependency.found() + dlLibraryPath = dlDependency.get_variable(default_value : '', cmake : 'PACKAGE_LIBRARIES') + message('dlLibraryPath is:', dlLibraryPath) + if dlLibraryPath != '' + sharedLibraryPaths += dlLibraryPath + endif +endif + +# Note that, unlike, say, the parameters to include_directories(), the dirs argument to find_library() must be absolute +# paths +libbacktraceDir = join_paths(meson.project_source_root(), 'third-party/libbacktrace/.libs') +backtraceDependency = compiler.find_library('backtrace', + required : true, + static : true, + dirs : [libbacktraceDir]) + +#======================================== Find the other libraries we depend on ======================================== +# +# See https://mesonbuild.com/Reference-manual_returned_dep.html for what info we can pull from a dependency object +# +# For BeerXML processing we need Xerces-C++ and Xalan-C++. Meson can find both of these automatically using CMake's +# find_package(), as long as (a) CMake is installed(!) and (b) we provide the right library names ('XercesC' per +# https://cmake.org/cmake/help/latest/module/FindXercesC.html and 'XalanC' per +# https://cmake.org/cmake/help/latest/module/FindXalanC.html) +# +xercesDependency = dependency('XercesC', + version : '>=3.2.2', + required : true, + static : true) +xercesLibPaths = xercesDependency.get_variable(cmake : 'PACKAGE_LIBRARIES') +message('Xerces Library:', xercesDependency.name(), 'found =', xercesDependency.found(), + 'version =', xercesDependency.version(), 'path(s)=', xercesLibPaths) +sharedLibraryPaths += xercesLibPaths +xalanDependency = dependency('XalanC', + version : '>=1.11.0', + required : true, + static : true) +xalanLibPaths = xalanDependency.get_variable(cmake : 'PACKAGE_LIBRARIES') +message('Xalan Library:', xalanDependency.name(), 'found =', xalanDependency.found(), + 'version =', xalanDependency.version(), 'path(s)=', xalanLibPaths) +sharedLibraryPaths += xalanLibPaths + +#====================================================== Valijson ======================================================= +# Don't need to do anything special, other than set include directories below, as it's header-only and we pull it in as +# a Git submodule. + +#==================================================== Other headers ==================================================== +# Other directories to search in for headers. Meson will barf an error if any of these directories does not exist. +includeDirs = include_directories('src', 'third-party/libbacktrace') + + +#======================================================================================================================= +#============================================= Extra Windows dependencies ============================================== +#======================================================================================================================= +if host_machine.system() == 'windows' + # + # We can't assume that the person running the code will have MSYS2/MinGW installed, so we need to include the DLLs + # that ship with them and get pulled in by the packaging process. There is a bit of trial-and-error in compiling + # this list, but, mostly, if you miss a needed DLL from the package, Windows will give you an error message at + # start-up telling you which DLL(s) it needed but could not find. + # + foreach extraLib : ['gcc', + 'winpthread', + 'stdc++', + 'xalanMsg'] + extraLibDependency = compiler.find_library(extraLib, required : true) + if extraLibDependency.found() +# extraLibPath = extraLibDependency.get_variable(cmake : 'LIB_ARCH_LIST') +# message(extraLib, ' found at', extraLibPath) +# sharedLibraryPaths += extraLibPath + else +# message(extraLib, ' not found') + endif + endforeach +endif + +#======================================================================================================================= +#============================================== Extra Apple dependencies =============================================== +#======================================================================================================================= +if host_machine.system() == 'darwin' + # Statically linking Xalan, Xerces etc requires CFStringLowercase, CFStringUppercase, etc on Mac + corefoundationDependency = dependency( + 'appleframeworks', + modules: ['CoreFoundation'], + required: false, + ) +endif + +#======================================================================================================================= +#===================================================== Input Files ===================================================== +#======================================================================================================================= +# Sub-directories of the one containing this (meson.build) file are +# src = C++ source code +# ui = QML UI layout files +# data = Binary files, including sounds and default database +# translations = Translated texts +# mac = Mac-specific files (desktop icon) +# win = Windows-specific files (desktop icon) +# packaging = Packaging-related config +#======================================================================================================================= + +# +# List of the C++ source files that are common to the app and the unit tests - ie all .cpp files _except_ main.cpp and +# test.cpp +# +# See https://mesonbuild.com/FAQ.html#why-cant-i-specify-target-files-with-a-wildcard for why it is strongly recommended +# not to use wildcard specification. (This is common to many build systems.) +# +# You can recreate the body of this list by running the following from the bash prompt in the mbuild directory: +# find ../src -name '*.cpp' | grep -v 'src/unitTests/' | grep -v '/main.cpp$' | sed "s+^../+ \'+; s+$+\',+" | sort +# +# The files() wrapper around the array ensures that all the files exist and means you don't have to worry as much about +# subdirectories as you might otherwise -- see https://mesonbuild.com/Reference-manual_functions.html#files +# +commonSourceFiles = files([ + 'src/AboutDialog.cpp', + 'src/AlcoholTool.cpp', + 'src/Algorithms.cpp', + 'src/AncestorDialog.cpp', + 'src/Application.cpp', + 'src/BeerColorWidget.cpp', + 'src/boiltime.cpp', + 'src/BrewDayScrollWidget.cpp', + 'src/BrewDayFormatter.cpp', + 'src/BrewNoteWidget.cpp', + 'src/BtColor.cpp', + 'src/BtDatePopup.cpp', + 'src/BtDigitWidget.cpp', + 'src/BtFieldType.cpp', + 'src/BtFolder.cpp', + 'src/BtLabel.cpp', + 'src/BtLineEdit.cpp', + 'src/BtSplashScreen.cpp', + 'src/BtTabWidget.cpp', + 'src/BtTextEdit.cpp', + 'src/BtTreeFilterProxyModel.cpp', + 'src/BtTreeItem.cpp', + 'src/BtTreeModel.cpp', + 'src/BtTreeView.cpp', + 'src/ConverterTool.cpp', + 'src/CustomComboBox.cpp', + 'src/database/BtSqlQuery.cpp', + 'src/database/Database.cpp', + 'src/database/DatabaseSchemaHelper.cpp', + 'src/database/DbTransaction.cpp', + 'src/database/ObjectStore.cpp', + 'src/database/ObjectStoreTyped.cpp', + 'src/EquipmentButton.cpp', + 'src/EquipmentEditor.cpp', + 'src/EquipmentListModel.cpp', + 'src/FermentableDialog.cpp', + 'src/FermentableEditor.cpp', + 'src/FermentableSortFilterProxyModel.cpp', + 'src/HeatCalculations.cpp', + 'src/HopDialog.cpp', + 'src/HopEditor.cpp', + 'src/HopSortFilterProxyModel.cpp', + 'src/Html.cpp', + 'src/HydrometerTool.cpp', + 'src/IbuGuSlider.cpp', + 'src/InstructionWidget.cpp', + 'src/InventoryFormatter.cpp', + 'src/Localization.cpp', + 'src/Logging.cpp', + 'src/MainWindow.cpp', + 'src/MashButton.cpp', + 'src/MashComboBox.cpp', + 'src/MashDesigner.cpp', + 'src/MashEditor.cpp', + 'src/MashListModel.cpp', + 'src/MashStepEditor.cpp', + 'src/MashStepTableWidget.cpp', + 'src/MashWizard.cpp', + 'src/matrix.cpp', + 'src/measurement/Amount.cpp', + 'src/measurement/ColorMethods.cpp', + 'src/measurement/IbuMethods.cpp', + 'src/measurement/Measurement.cpp', + 'src/measurement/PhysicalQuantity.cpp', + 'src/measurement/SucroseConversion.cpp', + 'src/measurement/SystemOfMeasurement.cpp', + 'src/measurement/Unit.cpp', + 'src/measurement/UnitSystem.cpp', + 'src/MiscDialog.cpp', + 'src/MiscEditor.cpp', + 'src/MiscSortFilterProxyModel.cpp', + 'src/model/BrewNote.cpp', + 'src/model/Equipment.cpp', + 'src/model/Fermentable.cpp', + 'src/model/Hop.cpp', + 'src/model/Instruction.cpp', + 'src/model/Inventory.cpp', + 'src/model/Mash.cpp', + 'src/model/MashStep.cpp', + 'src/model/Misc.cpp', + 'src/model/NamedEntity.cpp', + 'src/model/NamedEntityWithInventory.cpp', + 'src/model/NamedParameterBundle.cpp', + 'src/model/Recipe.cpp', + 'src/model/Salt.cpp', + 'src/model/Style.cpp', + 'src/model/Water.cpp', + 'src/model/Yeast.cpp', + 'src/NamedEntitySortProxyModel.cpp', + 'src/NamedMashEditor.cpp', + 'src/OgAdjuster.cpp', + 'src/OptionDialog.cpp', + 'src/PersistentSettings.cpp', + 'src/PitchDialog.cpp', + 'src/PreInstruction.cpp', + 'src/PrimingDialog.cpp', + 'src/PrintAndPreviewDialog.cpp', + 'src/RadarChart.cpp', + 'src/RangedSlider.cpp', + 'src/RecipeExtrasWidget.cpp', + 'src/RecipeFormatter.cpp', + 'src/RefractoDialog.cpp', + 'src/ScaleRecipeTool.cpp', + 'src/SimpleUndoableUpdate.cpp', + 'src/StrikeWaterDialog.cpp', + 'src/StyleButton.cpp', + 'src/StyleEditor.cpp', + 'src/StyleListModel.cpp', + 'src/StyleRangeWidget.cpp', + 'src/StyleSortFilterProxyModel.cpp', + 'src/tableModels/BtTableModel.cpp', + 'src/tableModels/BtTableModelInventory.cpp', + 'src/tableModels/FermentableTableModel.cpp', + 'src/tableModels/HopTableModel.cpp', + 'src/tableModels/MashStepTableModel.cpp', + 'src/tableModels/MiscTableModel.cpp', + 'src/tableModels/SaltTableModel.cpp', + 'src/tableModels/WaterTableModel.cpp', + 'src/tableModels/YeastTableModel.cpp', + 'src/TimerListDialog.cpp', + 'src/TimerMainDialog.cpp', + 'src/TimerWidget.cpp', + 'src/UiAmountWithUnits.cpp', + 'src/utils/BtException.cpp', + 'src/utils/BtStringConst.cpp', + 'src/utils/BtStringStream.cpp', + 'src/utils/EnumStringMapping.cpp', + 'src/utils/ImportRecordCount.cpp', + 'src/utils/TimerUtils.cpp', + 'src/WaterButton.cpp', + 'src/WaterDialog.cpp', + 'src/WaterEditor.cpp', + 'src/WaterListModel.cpp', + 'src/WaterSortFilterProxyModel.cpp', + 'src/WaterTableWidget.cpp', + 'src/widgets/Animator.cpp', + 'src/widgets/SelectionControl.cpp', + 'src/widgets/ToggleSwitch.cpp', + 'src/widgets/UnitAndScalePopUpMenu.cpp', + 'src/xml/BeerXml.cpp', + 'src/xml/BtDomErrorHandler.cpp', + 'src/xml/XercesHelpers.cpp', + 'src/xml/XmlCoding.cpp', + 'src/xml/XmlMashRecord.cpp', + 'src/xml/XmlMashStepRecord.cpp', + 'src/xml/XmlRecipeRecord.cpp', + 'src/xml/XmlRecord.cpp', + 'src/YeastDialog.cpp', + 'src/YeastEditor.cpp', + 'src/YeastSortFilterProxyModel.cpp' +]) + +applicationMainSourceFile = files([ + 'src/main.cpp' +]) + +unitTestMainSourceFile = files([ + 'src/unitTests/Testing.cpp' +]) + +# +# These are the headers that need to be processed by the Qt Meta Object Compiler (MOC). Note that this is _not_ all the +# headers in the project. Also, note that there is a separate (trivial) list of MOC headers for the unit test runner. +# +# You can recreate the body of this list by running the following from the bash prompt in the mbuild directory: +# grep -rl Q_OBJECT ../src | grep -v Testing.h | sort | sed "s+^../src/+ \'src/+; s/$/\',/" +# +mocHeaders = files([ + 'src/AboutDialog.h', + 'src/AlcoholTool.h', + 'src/AncestorDialog.h', + 'src/BeerColorWidget.h', + 'src/boiltime.h', + 'src/BrewDayFormatter.h', + 'src/BrewDayScrollWidget.h', + 'src/BrewNoteWidget.h', + 'src/BtDatePopup.h', + 'src/BtDigitWidget.h', + 'src/BtFolder.h', + 'src/BtLabel.h', + 'src/BtLineEdit.h', + 'src/BtSplashScreen.h', + 'src/BtTabWidget.h', + 'src/BtTextEdit.h', + 'src/BtTreeFilterProxyModel.h', + 'src/BtTreeModel.h', + 'src/BtTreeView.h', + 'src/ConverterTool.h', + 'src/CustomComboBox.h', + 'src/database/ObjectStore.h', + 'src/EquipmentButton.h', + 'src/EquipmentEditor.h', + 'src/EquipmentListModel.h', + 'src/FermentableDialog.h', + 'src/FermentableEditor.h', + 'src/FermentableSortFilterProxyModel.h', + 'src/HopDialog.h', + 'src/HopEditor.h', + 'src/HopSortFilterProxyModel.h', + 'src/HydrometerTool.h', + 'src/IbuGuSlider.h', + 'src/InstructionWidget.h', + 'src/MainWindow.h', + 'src/MashButton.h', + 'src/MashComboBox.h', + 'src/MashDesigner.h', + 'src/MashEditor.h', + 'src/MashListModel.h', + 'src/MashStepEditor.h', + 'src/MashStepTableWidget.h', + 'src/MashWizard.h', + 'src/MiscDialog.h', + 'src/MiscEditor.h', + 'src/MiscSortFilterProxyModel.h', + 'src/model/BrewNote.h', + 'src/model/Equipment.h', + 'src/model/Fermentable.h', + 'src/model/Hop.h', + 'src/model/Instruction.h', + 'src/model/Inventory.h', + 'src/model/Mash.h', + 'src/model/MashStep.h', + 'src/model/Misc.h', + 'src/model/NamedEntity.h', + 'src/model/NamedEntityWithInventory.h', + 'src/model/Recipe.h', + 'src/model/Salt.h', + 'src/model/Style.h', + 'src/model/Water.h', + 'src/model/Yeast.h', + 'src/NamedEntitySortProxyModel.h', + 'src/NamedMashEditor.h', + 'src/OgAdjuster.h', + 'src/OptionDialog.h', + 'src/PitchDialog.h', + 'src/PrimingDialog.h', + 'src/PrintAndPreviewDialog.h', + 'src/RangedSlider.h', + 'src/RecipeExtrasWidget.h', + 'src/RecipeFormatter.h', + 'src/RefractoDialog.h', + 'src/ScaleRecipeTool.h', + 'src/SimpleUndoableUpdate.h', + 'src/StrikeWaterDialog.h', + 'src/StyleButton.h', + 'src/StyleEditor.h', + 'src/StyleListModel.h', + 'src/StyleRangeWidget.h', + 'src/StyleSortFilterProxyModel.h', + 'src/tableModels/BtTableModel.h', + 'src/tableModels/FermentableTableModel.h', + 'src/tableModels/HopTableModel.h', + 'src/tableModels/MashStepTableModel.h', + 'src/tableModels/MiscTableModel.h', + 'src/tableModels/SaltTableModel.h', + 'src/tableModels/WaterTableModel.h', + 'src/tableModels/YeastTableModel.h', + 'src/TimerListDialog.h', + 'src/TimerMainDialog.h', + 'src/TimerWidget.h', + 'src/WaterButton.h', + 'src/WaterDialog.h', + 'src/WaterEditor.h', + 'src/WaterListModel.h', + 'src/WaterSortFilterProxyModel.h', + 'src/WaterTableWidget.h', + 'src/widgets/Animator.h', + 'src/widgets/SelectionControl.h', + 'src/widgets/ToggleSwitch.h', + 'src/YeastDialog.h', + 'src/YeastEditor.h', + 'src/YeastSortFilterProxyModel.h', +]) + +unitTestMocHeaders = files([ + 'src/unitTests/Testing.h' +]) + +# +# List of UI files +# +# You can recreate the body of this list by running the following from the bash prompt in the mbuild directory: +# find ../ui -name '*.ui' | sort | sed "s+^../ui/+ \'ui/+; s/$/\',/" +# +uiFiles = files([ + 'ui/ancestorDialog.ui', + 'ui/brewDayScrollWidget.ui', + 'ui/brewNoteWidget.ui', + 'ui/BtPrintAndPreview.ui', + 'ui/equipmentEditor.ui', + 'ui/fermentableEditor.ui', + 'ui/hopEditor.ui', + 'ui/instructionWidget.ui', + 'ui/mainWindow.ui', + 'ui/mashDesigner.ui', + 'ui/mashEditor.ui', + 'ui/mashStepEditor.ui', + 'ui/mashWizard.ui', + 'ui/miscEditor.ui', + 'ui/namedMashEditor.ui', + 'ui/ogAdjuster.ui', + 'ui/optionsDialog.ui', + 'ui/pitchDialog.ui', + 'ui/primingDialog.ui', + 'ui/recipeExtrasWidget.ui', + 'ui/refractoDialog.ui', + 'ui/strikeWaterDialog.ui', + 'ui/styleEditor.ui', + 'ui/timerDialog.ui', + 'ui/timerListDialog.ui', + 'ui/timerMainDialog.ui', + 'ui/timerWidget.ui', + 'ui/waterDialog.ui', + 'ui/waterEditor.ui', + 'ui/yeastEditor.ui' +]) + +# +# List of translation files to update (from translatable strings in the source code) and from which the binary .qm files +# will be generated and shipped. Note that src/OptionDialog.cpp controls which languages are shown to the user as +# options for the UI +# +# .:TBD:. At the moment we are hitting a warning message similar to the one described at +# https://github.com/mesonbuild/meson/issues/5019. I _think_ this is a minor Meson bug, but it might be that I've +# misunderstood how best to reference files in subdirectories. +# +translationSourceFiles = files([ + 'translations/bt_ca.ts', # Catalan + 'translations/bt_cs.ts', # Czech + 'translations/bt_de.ts', # German + 'translations/bt_en.ts', # English + 'translations/bt_el.ts', # Greek + 'translations/bt_es.ts', # Spanish + 'translations/bt_et.ts', # Estonian + 'translations/bt_eu.ts', # Basque + 'translations/bt_fr.ts', # French + 'translations/bt_gl.ts', # Galician + 'translations/bt_nb.ts', # Norwegian Bokmal + 'translations/bt_it.ts', # Italian + 'translations/bt_lv.ts', # Latvian + 'translations/bt_nl.ts', # Dutch + 'translations/bt_pl.ts', # Polish + 'translations/bt_pt.ts', # Portuguese + 'translations/bt_hu.ts', # Hungarian + 'translations/bt_ru.ts', # Russian + 'translations/bt_sr.ts', # Serbian + 'translations/bt_sv.ts', # Swedish + 'translations/bt_tr.ts', # Turkish + 'translations/bt_zh.ts', # Chinese +]) + +# List of documentation files to be installed. Note that ${repoDir}/COPYRIGHT is NOT included here as it needs special +# case handling below. + +filesToInstall_docs = files([ + 'README.markdown' +]) + +filesToInstall_data = files([ + 'data/default_db.sqlite', + 'data/DefaultData.xml' +]) + +filesToInstall_desktop = files([ + 'linux/' + projectName + '.desktop' +]) + +filesToInstall_icons = files([ + 'images/' + projectName + '.svg' +]) + +filesToInstall_windowsIcon = files([ + 'win/icon.rc' +]) + +filesToInstall_sounds = files([ + 'data/sounds/45minLeft.wav', + 'data/sounds/addFuckinHops.wav', + 'data/sounds/aromaHops.wav', + 'data/sounds/beep.wav', + 'data/sounds/bitteringHops.wav', + 'data/sounds/checkBoil.wav', + 'data/sounds/checkFirstRunnings.wav', + 'data/sounds/checkGravity.wav', + 'data/sounds/checkHydrometer.wav', + 'data/sounds/checkMashTemps.wav', + 'data/sounds/checkTemp.wav', + 'data/sounds/clarifyingAgent.wav', + 'data/sounds/cleanup.wav', + 'data/sounds/closeFuckinValves.wav', + 'data/sounds/closeValves.wav', + 'data/sounds/doughIn.wav', + 'data/sounds/drinkAnotherHomebrew.wav', + 'data/sounds/drinkHomebrew.wav', + 'data/sounds/emptyMashTun.wav', + 'data/sounds/extraPropane.wav', + 'data/sounds/flameout.wav', + 'data/sounds/flavorHops.wav', + 'data/sounds/heatWater.wav', + 'data/sounds/mashHops.wav', + 'data/sounds/pitchYeast.wav', + 'data/sounds/sanitize.wav', + 'data/sounds/sparge.wav', + 'data/sounds/startBurner.wav', + 'data/sounds/startChill.wav', + 'data/sounds/stirMash.wav', +]) + +filesToInstall_macPropertyList = files([ + 'mac/Info.plist' +]) + +filesToInstall_macIcons = files([ + 'mac/' + capitalisedProjectName + 'Icon.icns' +]) + +# This has to be a string because we're going to pass it into a script. +# AFAICT Meson does not provide a way for you to extract, say, full path from a file object +filesToInstall_changeLogUncompressed = 'CHANGES.markdown' + +# Summary copyright file, with names of all authors +filesToInstall_copyright = files([ + 'COPYRIGHT' +]) + +# +# GPL v3 Licence +# +# See https://www.gnu.org/licenses/translations.html for why this is only in English +# +# TBD: We have two files "COPYING.GPLv3" and "LICENSE" with identical content. I wonder if we can do away with one of +# them +# We cannot wrap this in a files() call because we need to be able to pass the name into join_paths below +# +filesToInstall_license = 'LICENSE' + +#======================================================================================================================= +#============================================ Installation sub-directories ============================================= +#======================================================================================================================= +# .:TBD:. We don't currently use installSubDir_bin, instead letting Meson decide where to put the binary +if host_machine.system() == 'linux' + #============================================= Linux Install Directories ============================================ + installSubDir_data = 'share/' + projectName + installSubDir_doc = 'share/doc/' + projectName + installSubDir_bin = 'bin' + # According to https://specifications.freedesktop.org/menu-spec/menu-spec-1.0.html#paths, .desktop files need to live + # in one of the $XDG_DATA_DIRS/applications/. (Note that $XDG_DATA_DIRS is a colon-separated list of directories, + # typically defaulting to /usr/local/share/:/usr/share/. but on another system it might be + # /usr/share/plasma:/usr/local/share:/usr/share:/var/lib/snapd/desktop:/var/lib/snapd/desktop). When combined with + # CMAKE_INSTALL_PREFIX, "share/applications" should end up being one of these. + installSubDir_applications = 'share/applications' + # It's a similar but slightly more complicated story for where to put icons. (See + # https://specifications.freedesktop.org/icon-theme-spec/icon-theme-spec-latest.html#directory_layout for all the + # details.) + installSubDir_icons = 'share/icons' +elif host_machine.system() == 'windows' + #============================================ Windows Install Directories =========================================== + installSubDir_data = 'data' + installSubDir_doc = 'doc' + installSubDir_bin = 'bin' +elif host_machine.system() == 'darwin' + #============================================== Mac Install Directories ============================================= + installSubDir_data = 'Contents/Resources' + installSubDir_doc = 'Contents/Resources/en.lproj' + installSubDir_bin = 'Contents/MacOS' +else + error('Unrecognised target OS type:', host_machine.system()) +endif + +#============================================== Common Install Directories ============================================= +installSubDir_translations = installSubDir_data + '/translations_qm' + + +#======================================================================================================================= +#=========================================== Qt Meta Object Compilation etc ============================================ +#======================================================================================================================= + +# Compile Qt's resources collection files (.qrc) into C++ files for compilation +generatedFromQrc = qt.compile_resources(sources : projectName + '.qrc') + +# Compile Qt's ui files (.ui) into header files. +generatedFromUi = qt.compile_ui(sources : uiFiles) + +# Compile Qt's moc files (.moc) into header and/or source files +generatedFromMoc = qt.compile_moc(headers : mocHeaders, + dependencies : qtCommonDependencies) +generatedFromMocForUnitTests = qt.compile_moc(headers : unitTestMocHeaders, + dependencies : qtCommonDependencies) + +# +# We need to do two processes with Translation Source (.ts) XML files: +# - Update them from the source code, ie to ensure they have all the tr(), QObject::tr() etc calls from the .cpp files +# and all the translatable strings from the .ui files -- which can be done manually from the command line with +# lupdate +# - Generate the binary .qm files that ship with the application and are used at run time -- which can be done +# manually from the command line with lrelease +# Calling qt.compile_translations will do only the latter, so we need to do the former directly +# + +# Call lupdate to ensure the .ts files are synced with the source code. We need: +# lupdate meson.project_source_root()/src meson.project_source_root()/ui -ts [list of .ts files] +# This tells lupdate to recursively scan the src/ and ui/ subdirectories and update the specified ts files +# Fortunately, we can pass a list of File objects as a parameter to run_command and Meson does the right thing +# +# We make a point here of displaying the output of run_command because we want to show message emitted by lupdate about +# what it did. +message('Running lupdate on the following ts files:', run_command('ls', translationSourceFiles, check: true).stdout()) +message( + run_command('lupdate', + meson.project_source_root() + '/src', + meson.project_source_root() + '/ui', + '-ts', + translationSourceFiles, + check: true).stdout() +) + +# Now we can generate the necessary targets to build translation files with lrelease +# Setting install to true means we want to ship all the .qm files (so users can change language at run time). +translations = qt.compile_translations(ts_files : translationSourceFiles, + build_by_default : true, + install : true, + install_dir : installSubDir_translations) + +#======================================================================================================================= +#=============================================== Lists of Dependencies ================================================= +#======================================================================================================================= +commonDependencies = [qtCommonDependencies, + xercesDependency, + xalanDependency, + boostDependency, + dlDependency, + backtraceDependency] +mainExeDependencies = commonDependencies + qtMainExeDependencies +testRunnerDependencies = commonDependencies + qtTestRunnerDependencies + +#======================================================================================================================= +#================================================= Exported variables ================================================== +#======================================================================================================================= +# +# There are a number of places where we want to "export" variables from this file. In a couple of instances we are +# generating a file (using configure_file()) and in other places we are running a shell script (with run_command() or +# run_target()). Although it's not always the exact same set of variables that we need to export, there is, I think, +# enough overlap that it's worth defining all the exports once to avoid repeating ourselves. +# +# The file generation and script execution take different types of object for their "exported variables": a +# Configuration Data (cfg_data) object (see https://mesonbuild.com/Reference-manual_returned_cfg_data.html) in for the +# former and an Environment (env) object (see https://mesonbuild.com/Reference-manual_returned_env.html) for the latter. +# Fortunately however, both types of object can be constructed from a Dictionary (see +# https://mesonbuild.com/Syntax.html#dictionaries), so that is what we define here. +# +# Also, note that the export is done during build, not install, so, eg the value of prefix below will typically _not_ be +# affected by any `--destdir` option passed in to `meson install` because the generation was done previously when +# `meson compile` was called. +# +exportedVariables = { + 'CONFIG_VERSION_STRING' : meson.project_version(), + 'CONFIG_DATA_DIR' : prefix + '/' + installSubDir_data, # This is a bit of a hack... + 'CONFIG_APPLICATION_NAME_UC' : capitalisedProjectName, + 'CONFIG_APPLICATION_NAME_LC' : projectName, + 'CONFIG_APPLICATION_NAME_AC' : projectName.to_upper(), + 'CONFIG_EXECUTABLE_NAME' : mainExecutableFileName, + # NB CMAKE_HOST_SYSTEM means something different than meson host_machine + 'CONFIG_BUILD_SYSTEM' : build_machine.system(), + 'CONFIG_RUN_SYSTEM' : host_machine.system(), + 'CONFIG_CXX_COMPILER_ID' : compiler.get_id(), + # Meson doesn't directly give you a way to obtain the current date and time. But it does allow you turn an external + # command, so this is one way to get it - relying on the fact that MSYS2 on Windows, Linux and Mac all have date + # available from the command line. + 'CONFIG_BUILD_TIMESTAMP' : run_command('date', check: true).stdout().strip(), + # Similarly, an approximate date (eg February 2023) is generated for use on man pages + 'CONFIG_BUILD_MONTH_AND_YEAR' : run_command('date', '+"%B %Y"', check: true).stdout().strip(), + # + # This is exported for generating the compressed changelog for building a Debian package + # + 'CONFIG_CHANGE_LOG_UNCOMPRESSED' : join_paths(meson.current_source_dir(), filesToInstall_changeLogUncompressed), + + # See https://www.debian.org/doc/debian-policy/ch-binary.html#s-maintainer for more on the "maintainer", but + # essentially it's a required package property that needs to be either one person or a group mailing list. In the + # latter case, the individual maintainers need be listed in a separate property called "uploaders". Right now, we + # don't have a mailing list, so this is a moot point. + # + # Also note, per https://www.debian.org/doc/debian-policy/ch-controlfields.html#s-f-maintainer, that it's simplest + # to avoid having full stops in the maintainer's name. + 'CONFIG_PACKAGE_MAINTAINER' : 'Matt Young ', + + # Info for InstallerWindowIcon which is "Filename for a custom window icon in PNG format for the Installer + # application. + # Used on Windows and Linux, no functionality on macOS." + 'CONFIG_INSTALLER_WINDOW_ICON' : capitalisedProjectName + 'Logo.png', + 'CONFIG_INSTALLER_WINDOW_ICON_DIR' : 'images', + + # Full path of file containing full GPL v3 license text + 'CONFIG_LICENSE_TEXT_PATH' : join_paths(meson.current_source_dir(), filesToInstall_license), + + # Some installers/package formats want a one-line description + 'CONFIG_DESCRIPTION_STRING' : 'Open source brewing software', + + # Some installers, eg NSIS on Windows, want a brief copyright string + 'CONFIG_COPYRIGHT_STRING' : 'Copyright 2009-2023. Distributed under the terms of the GNU General Public License (version 3).', + + # Installers often want the name of the organisation supplying the product, so we need something to put there + 'CONFIG_ORGANIZATION_NAME' : 'The ' + capitalisedProjectName + ' Team', + + # Similarly, installers often want a URL link + 'CONFIG_HOMEPAGE_URL' : 'https://github.com/' + capitalisedProjectName + '/' + projectName, + + # + # On Windows and Mac, the external packaging step (managed by the `bt` build tool script) needs to know about all the + # non-Qt shared libraries on which we depend. + # + # Meson only allows you to directly export strings and ints, not lists. So, we do a bit of a hack to convert the + # list to a string that will, in the TOML format file that we care about, be interpreted as a list. This isn't + # strictly correct when the list is empty, but we hereby assert that it never will be, so it doesn't matter. + # + 'CONFIG_SHARED_LIBRARY_PATHS' : '["' + '", "'.join(sharedLibraryPaths) + '"]', + +} + +# We need to set TargetDir in config.xml for the Qt IFW installer +if host_machine.system() == 'linux' + exportedVariables += { + 'CONFIG_TARGET_INSTALL_DIR' : '/' + } +endif + +# +# Export the name/location of the desktop icon (which, on Windows, is also the same icon for the installer) for the +# packaging scripts. +# +if host_machine.system() == 'windows' + exportedVariables += { + 'CONFIG_INSTALLER_APPLICATION_ICON_PATH' : join_paths(meson.current_source_dir(), 'win', capitalisedProjectName + 'Icon_96.ico'), + } +elif host_machine.system() == 'darwin' + exportedVariables += { + 'CONFIG_INSTALLER_APPLICATION_ICON' : capitalisedProjectName + 'Icon.icns', + } +endif + +#======================================================================================================================= +#===================================== Generate config.h etc from config.h.in etc ====================================== +#======================================================================================================================= + +# +# First we inject build-system variables into the source code. This saves defining certain things twice - in this file +# and in a C++ source code file. It also makes it easier for Brewken and Brewtarget to share code, because there are +# fewer places where the application name is hard-coded. +# +# Taking src/config.h.in as input, we generate (in the build subdirectory only) config.h. +# +# All variables written as "@VAR@" in src/config.h.in (input file) will be replaced in config.h (output file) by the +# value of VAR in the configuration_data dictionary we define here. +# +configure_file(input : 'src/config.h.in', + output : 'config.h', + configuration : exportedVariables, + install : false) + +# +# Next we make build-system variables available to the `bt` build helper Python script by injecting them into a TOML +# file. +# +configure_file(input : 'packaging/config.toml.in', + output : 'config.toml', + configuration : exportedVariables, + install : false) + +# On Linux (and TBD Mac) we inject build system variables into the markdown text that will be used to generate the man +# page for the application. The bt build tool script will then turn this markdown into a man page and include it in +# the package. (.:TBD:. This means man pages don't get installed locally, but that could be fixed by extending bt to +# wrap around all meson operations.) +configure_file(input : 'doc/manpage.1.md.in', + output : 'manpage.1.md', + configuration : exportedVariables, + install : false) + +# +# We also want to inject build-system variables into various platform-specific packaging scripts, control files and so +# on. +# +# We ultimately want the generated files inside the 'packaging' subtree of the the build directory, but that is not +# permitted by configure_file, so, that gets done by the bt script. +# +if host_machine.system() == 'linux' + # + # Linux: Debian Binary package control file + # + configure_file(input : 'packaging/linux/control.in', + output : 'control', + configuration : exportedVariables, + install : false) + # + # Linux: RPM binary package spec file + # + configure_file(input : 'packaging/linux/rpm.spec.in', + output : 'rpm.spec', + configuration : exportedVariables, + install : false) +elif host_machine.system() == 'windows' + # + # Windows: NSIS installer script + # + configure_file(input : 'packaging/windows/NsisInstallerScript.nsi.in', + output : 'NsisInstallerScript.nsi', + configuration : exportedVariables, + install : false) +elif host_machine.system() == 'darwin' + # + # Mac: Information Property List file + # + configure_file(input : 'packaging/darwin/Info.plist.in', + output : 'Info.plist', + configuration : exportedVariables, + install : false) +endif + +#======================================================================================================================= +#============================================= Generate manpage for Linux ============================================== +#======================================================================================================================= +if host_machine.system() == 'linux' + # + # Generate man page from markdown (which already had build system variables injected into it via meson.build) + # using the pandoc utility: + # --verbose = give verbose debugging output (though currently this does not have much effect) + # -t man = generate output in man format + # -s = generate a complete man page (rather than just some text in man format) + # -o = specifies the output file + # + # Note that, although man pages are frequently compressed, the exact compression mechanism is distro-specific, so + # Meson now considers such compression outside the scope of the build system. (We therefore do it in the bt build + # tool script.) + # + # TODO: For the moment we are only producing an English man page. It would not be huge effort to produce them for + # non-English locales, so we should do that at some point. + # + pandocResult = run_command( + 'pandoc', + join_paths(meson.current_build_dir(), 'manpage.1.md'), + '--verbose', + '-t', 'man', + '-s', + '-o', join_paths(meson.current_build_dir(), projectName + '.1'), + capture : true, + check : true + ) + message('Generating man page output:', pandocResult.stdout().strip()) + if pandocResult.returncode() != 0 + error('Generating man page ERRORS:', pandocResult.stderr().strip()) + else + install_man(join_paths(meson.current_build_dir(), projectName + '.1')) + endif +endif + +#======================================================================================================================= +#======================= Install files that we ship with the software (sounds, default DB, etc) ======================== +#======================================================================================================================= +# Note that we'll get a bunch of stuff in the meson logs about 'Failed to guess install tag' but this is entirely +# harmless as we are not using tags. +install_data(filesToInstall_data, install_dir : installSubDir_data) +install_data(filesToInstall_docs, install_dir : installSubDir_doc) +install_data(filesToInstall_sounds, install_dir : installSubDir_data + '/sounds') +if host_machine.system() == 'linux' + # Install the icons + # Per https://specifications.freedesktop.org/icon-theme-spec/icon-theme-spec-latest.html#install_icons, "installing a + # svg icon in $prefix/share/icons/hicolor/scalable/apps means most desktops will have one icon that works for all + # sizes". + install_data(filesToInstall_icons, install_dir : installSubDir_icons + '/hicolor/scalable/apps/') + + # Install the .desktop file + install_data(filesToInstall_desktop, install_dir : installSubDir_applications) + + # Install friendly-format change log aka release notes + # Note that lintian does not like having a file called CHANGES.markdown in the doc directory, as it thinks it is a + # misnamed changelog.Debian.gz (even when changelog.Debian.gz is also present!) so you get a + # wrong-name-for-upstream-changelog warning. + # The simplest way round this is to rename CHANGES.markdown to RelaseNotes.markdown + install_data(filesToInstall_changeLogUncompressed, rename : 'RelaseNotes.markdown', install_dir : installSubDir_doc) + + # Debian packages need to have the copyright file in a particular place (/usr/share/doc/PACKAGE/copyright) + # RPM packages don't like having duplicate files in the same directory (eg copyright and COPYRIGHT with same + # contents). So the simplest thing is to rename COPYRIGHT to copyright for both. + install_data(filesToInstall_copyright, rename : 'copyright', install_dir : installSubDir_doc) + +else + #----------- Windows and Mac ----------- + install_data(filesToInstall_copyright, install_dir : installSubDir_doc) +endif + +if host_machine.system() == 'darwin' + # Desktop icon + install_data(filesToInstall_macIcons, install_dir : installSubDir_data) +endif + +#======================================================================================================================= +#========================================= Compiler-specific settings & flags ========================================== +#======================================================================================================================= +if compiler.get_id() == 'gcc' + # + # -g3 should give even more debugging information than -g (which is equivalent to -g2) + # + # -O2 is hopefully a sensible optimisation level. It means "GCC performs nearly all supported optimizations that do + # not involve a space-speed tradeoff. As compared to -O, this option increases both compilation time and the + # performance of the generated code." + # + # -z noexecstack Is, in theory at least, to ensure/assert we do not have an executable stack. This is partly as a + # good thing in itself, and partly because, by default, rpmlint with throw a + # missing-PT_GNU_STACK-section error if we don't. + # In theory, the compiler should work out automatically whether we need an executable stack, + # decide the answer is "No" and pass all the right options to the linker. In practice, it seems + # this doesn't happen for reasons I have, as yet, to discover. + # So, we attempt to assert manually that the stack should not be executable. The "-z noexecstack" + # should get passed through by gcc the linker (see + # https://gcc.gnu.org/onlinedocs/gcc/Link-Options.html#Link-Options) and the GNU linker + # (https://sourceware.org/binutils/docs/ld/Options.html) should recognise "-z noexecstack" as "Marks + # the object as not requiring executable stack". + # However, even this is not sufficient(!). So, for the moment, we suppress the rpmlint error (see + # packaging/rpmLintFilters.toml). + # + # The following are, according to some comments at + # https://stackoverflow.com/questions/52583544/boost-stack-trace-not-showing-function-names-and-line-numbers, needed + # for Boost stacktrace to work properly: + # -no-pie + # -fno-pie + # -rdynamic + # + # HOWEVER, there are a couple of gotchas: + # - For some reason, gcc on Windows does not accept -rdynamic -- so we only set this on Linux + # - On Linux, executables in Debian packages are supposed to be compiled as position-independent code, otherwise + # we'll get a 'hardening-no-pie' Lintian warning -- so we only set no-pie options on Windows + # + add_global_arguments(['-g3', + '-O2', + '-z', 'noexecstack', # NB Not '-z noexecstack' as otherwise will be passed to gcc in quotes! + ], language : 'cpp') + if host_machine.system() == 'windows' + add_global_arguments (['-no-pie', '-fno-pie'], language : 'cpp') + if compiler.get_linker_id() == 'ld.bfd' + # + # GNU Linker + # + # See https://gcc.gnu.org/onlinedocs/gcc/Link-Options.html for options + # + add_global_link_arguments(['-no-pie', + '-fno-pie'], language : 'cpp') + endif + else + add_global_arguments (['-pie', '-fpie'], language : 'cpp') + add_global_link_arguments(['-pie', '-fpie'], language : 'cpp') + add_global_arguments ('-rdynamic', language : 'cpp') + add_global_link_arguments('-rdynamic', language : 'cpp') + endif +endif + +if host_machine.system() == 'darwin' + # As explained at https://stackoverflow.com/questions/5582211/what-does-define-gnu-source-imply, defining _GNU_SOURCE + # gives access to various non-standard GNU/Linux extension functions and changes the behaviour of some POSIX + # functions. + # + # This is needed for Boost stacktrace on Mac + add_project_arguments('-D_GNU_SOURCE', language : 'cpp') +endif + +#======================================================================================================================= +#========================================== Linker-specific settings & flags =========================================== +#======================================================================================================================= + +#======================================================================================================================= +#===================================================== Main builds ===================================================== +#======================================================================================================================= +# +# To keep things simple, we share almost all code between the actual executable and the unit test runner. However, we +# don't want to compile everything twice. So, as a trick we compile into a static library everything except the code +# that differs between actual executable and unit test runner, then this library is linked into both programs. +# +# Note however that you cannot put generatedFromQrc in the static_library as it doesn't work there. +# +commonCodeStaticLib = static_library('common_code', + commonSourceFiles, generatedFromUi, generatedFromMoc, translations, + include_directories : includeDirs, + dependencies: commonDependencies, + install : false) + +mainExecutable = executable(mainExecutableTargetName, + applicationMainSourceFile, + generatedFromQrc, + include_directories : includeDirs, + dependencies : mainExeDependencies, + link_with : commonCodeStaticLib, + install : true) + +testRunner = executable(testRunnerTargetName, + unitTestMainSourceFile, + generatedFromQrc, + generatedFromMocForUnitTests, + include_directories : includeDirs, + dependencies : testRunnerDependencies, + link_with : commonCodeStaticLib, + install : false) + +#======================================================================================================================= +#===================================================== Unit Tests ====================================================== +#======================================================================================================================= +test('Test integer sizes', testRunner, args : ['pstdintTest']) +test('Test recipe calculations - all grain', testRunner, args : ['recipeCalcTest_allGrain']) +test('Test post boil loss OG', testRunner, args : ['postBoilLossOgTest']) +test('Test unit conversions', testRunner, args : ['testUnitConversions']) +test('Test NamedParameterBundle', testRunner, args : ['testNamedParameterBundle']) +test('Test algorithms', testRunner, args : ['testAlgorithms']) +# Need a bit longer than the default 30 second timeout for the log rotation test on some platforms +test('Test log rotation', testRunner, args : ['testLogRotation'], timeout : 60) diff --git a/packaging/config.toml.in b/packaging/config.toml.in new file mode 100644 index 000000000..2d451e7e5 --- /dev/null +++ b/packaging/config.toml.in @@ -0,0 +1,28 @@ +# +# packaging/config.toml.in is part of Brewtarget, and is copyright the following authors 2023: +# • Matt Young +# +# Brewtarget is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Brewtarget is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied +# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. +# +# You should have received a copy of the GNU General Public License along with this program. If not, see +# . +# + +# +# This file is used by meson.build to export build system config into a TOML (https://toml.io/en/) file in the build +# directory that the bt Python script can use during packaging +# +CONFIG_VERSION_STRING = "@CONFIG_VERSION_STRING@" +CONFIG_APPLICATION_NAME_UC = "@CONFIG_APPLICATION_NAME_UC@" +CONFIG_APPLICATION_NAME_LC = "@CONFIG_APPLICATION_NAME_LC@" +# NB: This is an array so we don't want it in quotes! +CONFIG_SHARED_LIBRARY_PATHS = @CONFIG_SHARED_LIBRARY_PATHS@ + +CONFIG_CHANGE_LOG_UNCOMPRESSED = "@CONFIG_CHANGE_LOG_UNCOMPRESSED@" +CONFIG_PACKAGE_MAINTAINER = "@CONFIG_PACKAGE_MAINTAINER@" diff --git a/packaging/darwin/Info.plist.in b/packaging/darwin/Info.plist.in new file mode 100644 index 000000000..8d2703eec --- /dev/null +++ b/packaging/darwin/Info.plist.in @@ -0,0 +1,66 @@ + + + + + + + CFBundleDevelopmentRegion + en + + CFBundleExecutable + @CONFIG_APPLICATION_NAME_UC@ + CFBundleGetInfoString + + + CFBundleIconFile + @CONFIG_INSTALLER_APPLICATION_ICON@ + CFBundleIdentifier + com.brewtarget.Brewtarget + CFBundleInfoDictionaryVersion + 6.0 + CFBundleLongVersionString + + CFBundleName + @CONFIG_APPLICATION_NAME_UC@ + CFBundlePackageType + APPL + CFBundleShortVersionString + @CONFIG_VERSION_STRING@ + CFBundleSignature + ???? + CFBundleVersion + @CONFIG_VERSION_STRING@ + CSResourcesFileMapped + + NSHumanReadableCopyright + @CONFIG_COPYRIGHT_STRING@ + + diff --git a/packaging/generateCompressedChangeLog.sh b/packaging/generateCompressedChangeLog.sh new file mode 100755 index 000000000..cb35e90d8 --- /dev/null +++ b/packaging/generateCompressedChangeLog.sh @@ -0,0 +1,130 @@ +#!/bin/bash +# +# packaging/generateCompressedChangeLog.sh is part of Brewtarget, and is copyright the following authors 2022: +# • Matt Young +# +# Brewtarget is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Brewtarget is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied +# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. +# +# You should have received a copy of the GNU General Public License along with this program. If not, see +# . +# + +#----------------------------------------------------------------------------------------------------------------------- +# NB: This script is intended to be invoked from the bt build tool (see ../bt) with the following environment variables +# set: +# CONFIG_APPLICATION_NAME_LC - Same as projectName in meson.build +# CONFIG_CHANGE_LOG_UNCOMPRESSED - Input file - same as filesToInstall_changeLogUncompressed in meson.build +# CONFIG_CHANGE_LOG_COMPRESSED - Output file +# CONFIG_PACKAGE_MAINTAINER - Name and email of a project maintainer conforming to +# https://www.debian.org/doc/debian-policy/ch-binary.html#s-maintainer +# +# We assume that none of these variables contains single or double quotes (so we can save ourselves having to escape +# the values when we use them below). +# +# First thing we do is check that all these variables are set to something. +#----------------------------------------------------------------------------------------------------------------------- +for var in CONFIG_APPLICATION_NAME_LC CONFIG_CHANGE_LOG_UNCOMPRESSED CONFIG_CHANGE_LOG_COMPRESSED CONFIG_PACKAGE_MAINTAINER +do + if [ -z "${!var}" ] + then + echo "ERROR $var is unset or blank" >&2 + exit 1 + fi +done + +echo "Parsing ${CONFIG_CHANGE_LOG_UNCOMPRESSED}" + +# +# The rest of this script creates a compressed changelog in a Debian-friendly format +# +# Our change log (CHANGES.markdown) uses markdown format, with the following raw structure: +# ## v1.2.3 +# +# Optional one-line description of the release. +# +# ### New Features +# +# * Blah blah blah +# * etc +# +# ### Bug Fixes +# +# * Blah blah blah +# * etc +# +# ### Incompatibilities +# +# None +# +# ### Release Timestamp +# Sun, 06 Feb 2022 12:02:58 +0100 +# +# However, per https://www.debian.org/doc/debian-policy/ch-source.html#debian-changelog-debian-changelog, Debian change +# logs need to be in the following format: +# package (version) distribution(s); urgency=urgency +# [optional blank line(s), stripped] +# * change details +# more change details +# [blank line(s), included in output of dpkg-parsechangelog] +# * even more change details +# [optional blank line(s), stripped] +# -- maintainer name [two spaces] date +# +# We are being a bit fast-and-loose in hard-coding the same maintainer name for each release, but I don't thing it's a +# huge issue. +# +# Note that, to keep us on our toes, Debian change log lines are not supposed to be more than 80 characters long. This +# is non-trivial, but the ghastly bit of awk below gets us most of the way there. +# +cat "${CONFIG_CHANGE_LOG_UNCOMPRESSED}" | + # Skip over the introductory headings and paragraphs of CHANGES.markdown until we get to the first version line + sed -n '/^## v/,$p' | + # We want to change the release timestamp to maintainer + timestamp, but we don't want to create too long a line + # before we do the fold command below, so use "÷÷maintainer÷÷" as a placeholder for + # " -- ${CONFIG_PACKAGE_MAINTAINER} " + sed -z "s/\\n### Release Timestamp\\n\\([^\\n]*\\)\\n/\\n÷÷maintainer÷÷\\1\\n/g" | + # Join continued lines in bullet lists + sed -z "s/\\n / /g" | + # Change the version to package (version) etc. Stick a '÷' on the front of the line to protect it from + # modification below + sed "s/^## v\\(.*\\)$/÷${CONFIG_APPLICATION_NAME_LC} (\\1-1) unstable\; urgency=low/" | + # Change bullets to sub-bullets + sed "s/^\\* / - /" | + # Change headings to bullets + sed "s/^### / * /" | + # Change any lines that don't start with space OR a ÷ character to be bullets + sed "s/^\\([^ ÷]\\)/ * \\1/" | + # Split any long lines. Make the width less than 80 so we've got a margin go insert spaces at the start of + # bullet continuation lines. + fold -s --width=72 | + # With a lot of help from awk, reindent the lines that were split off from a long bullet line so that they align + # with that previous line. + awk "BEGIN { inBullet=0 } + { + if (!inBullet) { + inBullet=match(\$0, \"^( +)[^ ] \", spaces); + print; + } else { + bulletContinues=match(\$0, \"^[^ ]\"); + if (!bulletContinues) { + inBullet=match(\$0, \"^( +)[^ ] \", spaces); + print; + } else { + print spaces[1] \" \" \$0; + } + } + }" | + # Fix the "÷÷maintainer÷÷" placeholders + sed "s/÷÷maintainer÷÷/ -- ${CONFIG_PACKAGE_MAINTAINER} /" | + # Remove the protective "÷" from the start of any other lines + sed "s/^÷//" | + gzip --best -n --to-stdout > "${CONFIG_CHANGE_LOG_COMPRESSED}" + +echo "Wrote to ${CONFIG_CHANGE_LOG_COMPRESSED}" +exit 0 diff --git a/packaging/linux/control.in b/packaging/linux/control.in new file mode 100644 index 000000000..8366480c2 --- /dev/null +++ b/packaging/linux/control.in @@ -0,0 +1,124 @@ +# +# packaging/linux/control.in is part of Brewtarget, and is copyright the following authors 2023: +# • Matt Young +# +# Brewtarget is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Brewtarget is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied +# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. +# +# You should have received a copy of the GNU General Public License along with this program. If not, see +# . +# + +# +# See comments in meson.build for how this file gets processed into mbuild/control. Then see comments in the build tool +# script (bt) for how we strip out comments, join "folded" lines and output to +# mbuild/packaging/linux/[projectName]-[versionNumber]_amd64/DEBIAN/control. +# +# See https://www.debian.org/doc/debian-policy/ch-controlfields.html the format of a Debian package control file +# +# NB: The lack of blank lines below is deliberate! A control file consists of one or more "stanzas" of fields. The +# stanzas are separated by empty lines. Some control files allow only one stanza; others allow several (eg one for a +# source package and another for the binary packages generated from that source). To keep things simple, we only ship +# the binaries in the deb package, because the source code is easily available by other routes. So we only want one +# stanza. So, no blank lines. (I'm very much hoping that comments are OK inside a stanza.) +# +# See https://www.debian.org/doc/debian-policy/ch-controlfields.html#binary-package-control-files-debian-control for the +# fields in the stanza of a binary package control file. +# +# +# Package (Mandatory) : name of the binary package +# Package names (both source and binary) must consist only of lower case letters (a-z), digits (0-9), plus (+) and +# minus (-) signs, and periods (.). They must be at least two characters long and must start with an alphanumeric +# character. +# +Package: @CONFIG_APPLICATION_NAME_LC@ +# +# Source (Optional) : source package name +# We don't specify this as we don't ship the source as a deb package +# +# Version (Mandatory) : version number of a package. The format is: [epoch:]upstream_version[-debian_revision]. +# +Version: @CONFIG_VERSION_STRING@-1 +# +# Section (Recommended) : application area into which the package has been classified +# See https://packages.debian.org/unstable/ for a list of all the sections. TLDR is that misc is the closest fit for +# us. +# +Section: misc +# +# Priority (Recommended) : Represents how important it is that the user have the package installed +# Since not all Linux users brew beer, optional seems pretty reasonable here, especially as it is "the default +# priority for the majority of the [Debian] archive" +# +Priority: optional +# +# Architecture (Mandatory) : in this context it's "a unique single word identifying a Debian machine architecture" +# Fortunately we don't have to worry about catering to every possibility (which you can see eg by running +# `dpkg-architecture -L` on the command line on Ubuntu. +# +Architecture: amd64 +# +# Essential (Optional) : We don't need this. It's only for packages that aren't supposed to be removeable +# +# Depends, Recommends, Suggests, Enhances, Pre-Depends : Dependencies on, conflicts with, other packages +# If we were doing everything the true Debian way, including shipping a source package and its makefile (yikes!) then +# there are various tools such as `dh_makeshlibs` and `dh_shlibdeps` that help us generate the right dependencies. +# All we would have to put here is 'Depends: ${shlibs:Depends}' or some such. However, if we only want to ship a +# binary and not maintain a separate build with its own makefile for the source code, then those tools won't help and +# we need to maintain things manually here. Fortunately our list of dependencies is not horrendous. +# +# Note that you can see the version of a package libfoobar by running the following command from the shell: +# apt-cache show foobar | grep Version +# +# Note too that we need either libqt5gui5 or libqt5gui5-gles, not both (which is not possible as they conflict). +# +# Normally, this field is (surprisingly) not allowed to be "folded" (ie split across multiple lines). However, we do +# our own folding in the bt build script, so the backslash line continuations are OK here! +# +Depends: \ + libc6 (>= 2.34 ), \ + libgcc-s1 (>= 3.3 ), \ + libqt5core5a (>= 5.9.5), \ + libqt5gui5 (>= 5.9.5) | \ + libqt5gui5-gles (>= 5.9.5), \ + libqt5multimedia5 (>= 5.9.5), \ + libqt5network5 (>= 5.9.5), \ + libqt5printsupport5 (>= 5.9.5), \ + libqt5sql5 (>= 5.9.5), \ + libqt5widgets5 (>= 5.9.5), \ + libstdc++6 (>= 11 ), \ + libxalan-c112 (>= 1.12 ), \ + libxerces-c3.2 (>= 3.2 ) +# +# Installed-Size (Optional) : an estimate of the total amount of disk space required to install the named package +# The disk space is given as the integer value of the estimated installed size in bytes, divided by 1024 and rounded +# up. .:TODO:. At some point we should implement this, ideally by having the build system calculate the value +# +#Installed-Size: 17758 +# +# Maintainer (Mandatory) : The package maintainer’s name and email address. +# The name must come first, then the email address inside angle brackets <> (in RFC822 format). If the maintainer’s +# name contains a full stop then the whole field will not work directly as an email address due to a misfeature in +# the syntax specified in RFC822; a program using this field as an address must check for this and correct the +# problem if necessary (for example by putting the name in round brackets and moving it to the end, and bringing the +# email address forward). +# +Maintainer: @CONFIG_PACKAGE_MAINTAINER@ +# +# Description (Mandatory) : a description of the binary package, consisting of two parts, the synopsis or the short +# description, and the long description +# +Description: GUI beer brewing software + @CONFIG_APPLICATION_NAME_UC@ is a calculator for brewing beer. It is a Qt-based program which + allows you to create recipes from a database of ingredients. It calculates + all the important parameters, helps you with mash temperatures, and just + makes the process of recipe formulation much easier. +# +# Homepage (Optional) +# +Homepage: @CONFIG_HOMEPAGE_URL@ diff --git a/packaging/linux/rpm.spec.in b/packaging/linux/rpm.spec.in new file mode 100644 index 000000000..160a15e6b --- /dev/null +++ b/packaging/linux/rpm.spec.in @@ -0,0 +1,102 @@ +# +# packaging/linux/rpm.spec.in is part of Brewtarget, and is copyright the following authors 2023: +# • Matt Young +# +# Brewtarget is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Brewtarget is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied +# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. +# +# You should have received a copy of the GNU General Public License along with this program. If not, see +# . +# + +# +# See comments in meson.build for how this file gets processed into mbuild/packaging/linux/rpm.spec. Then see comments +# +# See https://rpm-software-management.github.io/rpm/manual/spec.html for format of an RPM spec file +# +# From the command line, you can use `rpm -qi` to query a lot of this info for an existing RPM package (without having +# to install it) +# + +# Proper name of the package. Must not include whitespace or any numeric operators (‘<’, ‘>’,’=’) but may include a +# hyphen ‘-‘ +Name : @CONFIG_APPLICATION_NAME_LC@ + +# Version +Version : @CONFIG_VERSION_STRING@ + +# Package release: used for distinguishing between different builds of the same software version. +Release : 1 + +# Short (< 70 characters) summary of the package license +License : GPL-3.0-or-later + +# Optional, short (< 70 characters) group of the package. +Group : Applications/Productivity + +# Short (< 70 characters) summary of the package. +Summary : GUI beer brewing software + +# URL supplying further information about the package, typically upstream website. +URL : @CONFIG_HOMEPAGE_URL@ +Vendor : @CONFIG_ORGANIZATION_NAME@ + +# Specifies the architecture which the resulting binary package will run on. Typically this is a CPU architecture. +BuildArch : x86_64 + +# +# Dependencies +# +# Format is similar to Dependencies in Debian (.deb) package control file, but (a) without brackets around version +# numbers and (b) '-' and '.' are sometimes replaced by '_' in package names. +# +# You can search online for rpm packages at, eg, http://download.opensuse.org/tumbleweed/repo/oss/x86_64/ +# +# As with .deb package control file, we do our own line folding in the bt build script, so the backslash line +# continuations are OK here! +# +Requires : \ + libgcc_s1 >= 3.3 , \ + libqt5core5 >= 5.9.5, \ + libqt5gui5 >= 5.9.5, \ + libqt5multimedia5 >= 5.9.5, \ + libqt5network5 >= 5.9.5, \ + libqt5printsupport5 >= 5.9.5, \ + libqt5sql5 >= 5.9.5, \ + libqt5widgets5 >= 5.9.5, \ + libstdc++6 >= 11 , \ + libxalan-c112 >= 1.12 , \ + libxerces-c3_2 >= 3.2 + +# Description is done in a different way, perhaps because it's a multi-line field +%description +@CONFIG_APPLICATION_NAME_UC@ is a calculator for brewing beer. It is a Qt-based program which +allows you to create recipes from a database of ingredients. It calculates +all the important parameters, helps you with mash temperatures, and just +makes the process of recipe formulation much easier. + +# The files in the package +# These are specified by where they will be installed, hence the absolute paths but we can use glob patterns based on +# what's in the build tree +%files +/usr/bin/* +/usr/share/applications/* +/usr/share/brewtarget/* +/usr/share/doc/brewtarget/* +/usr/share/icons/hicolor/scalable/apps/* +/usr/share/man/man1/* + +# +# Change log is a required section +# By default, you are expected to have the full change log right here in this spec file. (I think there might be a way +# to pull in the change log data from a separate file, but I didn't yet figure it out.) So, for now at least, we get +# the `bt` build tool script to append the changelog data after this file is processed. +# +# *** NB: THIS MEANS %changelog MUST BE THE LAST ENTRY IN THE FILE. DO NOT ADD ANY LINES AFTER IT. *** +# +%changelog diff --git a/packaging/rpmlintFilters.toml b/packaging/linux/rpmlintFilters.toml similarity index 100% rename from packaging/rpmlintFilters.toml rename to packaging/linux/rpmlintFilters.toml diff --git a/packaging/windows/NsisInstallerScript.nsi.in b/packaging/windows/NsisInstallerScript.nsi.in new file mode 100644 index 000000000..93dd61e6b --- /dev/null +++ b/packaging/windows/NsisInstallerScript.nsi.in @@ -0,0 +1,564 @@ +# +# packaging/windows/NsisInstallerScript.nsi.in is part of Brewtarget, and is copyright the following authors 2023: +# • Matt Young +# +# Brewtarget is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Brewtarget is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied +# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. +# +# You should have received a copy of the GNU General Public License along with this program. If not, see +# . +# + +# +# See comments in meson.build for how this file gets processed into mbuild/packaging/NsisInstallerScript.nsi +# This latter is then what we pass in to NSIS (aka nullsoft scriptable install system -- see +# https://nsis.sourceforge.io/) to tell it how to make the Windows installer. +# +# Note that, despite what it says at https://nsis.sourceforge.io/Simple_tutorials, I could not get trivial "Hello World" +# verions of a .nsi script to work properly. The generation of the installer would only appear to work and we'll then +# get a cryptic "cannot execute binary file: Exec format error" message when trying to run it. So, we started with one +# of the longer examples. +# + +#======================================================================================================================= +#================================================ Pre-Include Settings ================================================= +#======================================================================================================================= +# Without this appearing in the script, we'll get a '7998: ANSI targets are deprecated' error. If it appears too late +# in the script, eg after `!include MultiUser.nsh` below, we'll get a 'Can't change target charset after data already +# got compressed or header already changed!' error +Unicode True + +# Set the compression algorithm used to compress files/data in the installer. Options are zlib, bzip2 and lzma. This +# command can only be used outside of sections and functions and before any data is compressed. "It is recommended to +# use it on the very top of the script to avoid compilation errors." +SetCompressor lzma + +# Specifies the requested execution level of the installer. In particular, this helps determine whether the installer +# can install the software for all users or only for the current user. Possible values are: none, user, highest, & +# admin. "Installers that need not install anything into system folders or write to the local machine registry (HKLM) +# should specify user execution level." +# +# TBD: I think we need admin level, or at least we won't go far wrong by requesting it, but we could revisit this in +# future if need be. +RequestExecutionLevel admin + +#======================================================================================================================= +#====================================================== Includes ======================================================= +#======================================================================================================================= + +# Use the latest version of the "Modern User Interface" -- see +# https://nsis.sourceforge.io/Docs/Modern%20UI%202/Readme.html +!include "MUI2.nsh" + +# Allows us to detect the version of Windows on which we are running -- see +# https://nsis.sourceforge.io/Get_Windows_version +!include "WinVer.nsh" + +# Logic Lib adds some "familiar" flow control and logic to NSI Scripts, eg if, else, while loops, for loops and similar. +# Also known as the NSIS Logic Library. See https://nsis.sourceforge.io/LogicLib which mentions that it is "appallingly +# non-documented, but certainly handy". Sigh. +!include "LogicLib.nsh" + +# Allows us to detect whether we're running on 32-bit or 64-bit Windows +!include "x64.nsh" + +# Defines and macros for section control +!include "Sections.nsh" + +# File Functions Header +!include "FileFunc.nsh" + +# Installer configuration for multi-user Windows environments +# See https://nsis.sourceforge.io/Docs/MultiUser/Readme.html for more info +# `MULTIUSER_EXECUTIONLEVEL Highest` is for "Mixed-mode installer that can both be installed per-machine or per-user" +!define MULTIUSER_EXECUTIONLEVEL Highest +!include MultiUser.nsh + +#======================================================================================================================= +#================================================= Injected variables ================================================== +#======================================================================================================================= +# +# Paths - from Meson +# +# Most of the time, because the MSYS2 environments makes things work more like Linux, we can use forward slashes in file +# system paths on the Windows build and everything works. However, inside the NSIS scripts, this is not universally the +# case. In some circumstances forward slashes work and in others they don't. (In particular, I think it's a problem to +# have a mixture of forward and back slashes in a single path. But this is a common need where we're combining an NSIS +# built-in variable such as $INSTDIR or $PROGRAMFILES with something we've injected from Meson.) To keep life simple we +# convert all forward slashes to back slashes in any file system path that we inject from Meson. +# +# This means that, instead of, eg, using "@CONFIG_LICENSE_TEXT_PATH@" directly, we use the !searchreplace variant of +# !define to create a compile-time constant holding a modified version of the injected string: +# !searchreplace INJECTED_LICENSE_TEXT_PATH "@CONFIG_LICENSE_TEXT_PATH@" "\" "/" +# Then when we need to use the injected string, we refer to "${INJECTED_LICENSE_TEXT_PATH}" +# +# Note too that NSIS distinguishes between compile-time defines and run-time variables. +# +!searchreplace INJECTED_INSTALLER_APPLICATION_ICON_PATH "@CONFIG_INSTALLER_APPLICATION_ICON_PATH@" "/" "\" +!searchreplace INJECTED_LICENSE_TEXT_PATH "@CONFIG_LICENSE_TEXT_PATH@" "/" "\" +# +# Paths from the bt (build tool) Python script +# +# Per the comment in the bt script, some paths are not easily exportable from Meson. We work them out in the bt script +# and pass them in to NSIS as command-line defines. We still do the forward slash - backslash substututions here in the +# NSIS script because (a) it's consistent to do them all in one place and (b) the escaping is easier (because you don't +# need any!) +# +# For some reason, NSIS doesn't like quotes around the inputs here so, I think we have to hope they don't have any +# spaces in. +# +!searchreplace INJECTED_PACKAGING_BIN_DIR ${BT_PACKAGING_BIN_DIR} "/" "\" +!searchreplace INJECTED_PACKAGING_DATA_DIR ${BT_PACKAGING_DATA_DIR} "/" "\" +!searchreplace INJECTED_PACKAGING_DOC_DIR ${BT_PACKAGING_DOC_DIR} "/" "\" + +# +# Other variables injected from Meson +# +# Similarly, although we could use other injected variables directly, we don't to avoid another gotcha. When Meson +# is processing the file to do @BLAH@ substitutions, if it sees a backslash followed by an @, then it will think you're +# escaping the first @ symbol, so, eg "C:\Blah\@CONFIG_APPLICATION_NAME_UC@" will not get converted to +# "C:\Blah\Brewtarget" or "C:\Blah\Brewken". Instead, we take the injected variable into an NSIS compile-time constant +# (aka a 'define') via: +# !define INJECTED_APPLICATION_NAME_UC "@CONFIG_APPLICATION_NAME_UC@" +# and then we can write "C:\Blah\${INJECTED_APPLICATION_NAME_UC}" and the right substitutions will happen. (The +# alternative, of adding an extra slash, eg "C:\Blah\\@CONFIG_APPLICATION_NAME_UC@", would work but seems a bit less +# robust. +# +!define INJECTED_APPLICATION_NAME_UC "@CONFIG_APPLICATION_NAME_UC@" +!define INJECTED_APPLICATION_NAME_LC "@CONFIG_APPLICATION_NAME_LC@" +!define INJECTED_EXECUTABLE_NAME "@CONFIG_EXECUTABLE_NAME@" +!define INJECTED_VERSION_STRING "@CONFIG_VERSION_STRING@" +!define INJECTED_DESCRIPTION_STRING "@CONFIG_DESCRIPTION_STRING@" +!define INJECTED_COPYRIGHT_STRING "@CONFIG_COPYRIGHT_STRING@" +!define INJECTED_ORGANIZATION_NAME "@CONFIG_ORGANIZATION_NAME@" +!define INJECTED_HOMEPAGE_URL "@CONFIG_HOMEPAGE_URL@" + +#======================================================================================================================= +#==================================================== Our Constants ==================================================== +#======================================================================================================================= +# Some things get used in multiple places and it's convenient to have a single define for consistency + +# +# There are two schools of thought about whether we should include the version number in the application name. The +# advantage of doing it is that it makes super clear which version is installed. The disadvantage is that it makes +# upgrades not so easy. +# +!define APPLICATION_DISPLAY_NAME "${INJECTED_APPLICATION_NAME_UC} ${INJECTED_VERSION_STRING}" +!define APPLICATION_FOLDER_NAME "${INJECTED_APPLICATION_NAME_UC}-${INJECTED_VERSION_STRING}" + +# +# In some places, eg VIProductVersion, we'll get an error if the version is not in X.X.X.X format. Our version strings +# are X.X.X format. If we were a Windows-only product, we'd probably define the version as +# ${PRODUCT_MAJOR}.${PRODUCT_MINOR}.${PRODUCT_TIMESTAMP}.${PRODUCT_BUILD}. But if we did this, we'd either break things +# on other platforms or have to have different version numbers for different platforms. So we don't. Instead, for +# Windows, we just add a '.0' on the end and call it done +# +!define PRODUCT_VERSION "${INJECTED_VERSION_STRING}.0" + +# +# In theory, the installer can have a separate version number from the program it's installing. We don't need that +# level of sophistication, so we just give it the same version number as the program. +# +!define INSTALLER_VERSION "${PRODUCT_VERSION}" + +#======================================================================================================================= +#======================================================= Macros ======================================================== +#======================================================================================================================= +# See https://nsis.sourceforge.io/Macro_vs_Function for the differences between a function and a macro in NSIS +# +# We define our macros before our functions because some of our functions use macros. +# +#----------------------------------------------------------------------------------------------------------------------- +# VerifyUserIsAdmin +# +# We currently use this during install and uninstall, following the model at +# https://nsis.sourceforge.io/A_simple_installer_with_start_menu_shortcut_and_uninstaller +#----------------------------------------------------------------------------------------------------------------------- +!macro VerifyUserIsAdmin +UserInfo::GetAccountType +pop $0 +${If} $0 != "admin" + messageBox mb_iconstop "Administrator rights required!" + setErrorLevel 740 ;ERROR_ELEVATION_REQUIRED + quit +${EndIf} +!macroend + +#======================================================================================================================= +#====================================================== Functions ====================================================== +#======================================================================================================================= +# +# Functions are relatively primitive in NSIS. Amongst the things to be aware of are: +# +# - Parameters have to be passed on the stack (so the order the function retrieves them is the opposite of that in +# which the caller supplied them). +# +# - Functions do not have their own scope for variables. You either declare variables with names that you hope are +# globally unique, or you use the "register variables" ($0, $1, $2, $3, $4, $5, $6, $7, $8, $9, $R0, $R1, $R2, $R3, +# $R4, $R5, $R6, $R7, $R8, $R9) which do not have to be declared and "are usually used in shared functions or +# macros". Of course, if one function calls another then you might be in trouble with both functions using the +# same register variables, so "it's recommended [to] use the stack to save and restore their original values". +# Yes, that's the same global stack that you're using to pass parameters in to functions. +# + +#----------------------------------------------------------------------------------------------------------------------- +# un.onInit +# +# Per https://nsis.sourceforge.io/Reference/.onInit, this is a special callback function that is invoked "when the +# installer is nearly finished initializing. If the '.onInit' function calls Abort, the installer will quit instantly". +#----------------------------------------------------------------------------------------------------------------------- +function .onInit + setShellVarContext all + !insertmacro VerifyUserIsAdmin +functionEnd + +#----------------------------------------------------------------------------------------------------------------------- +# un.onInit +# +# Per https://nsis.sourceforge.io/Reference/un.onInit, this is a special callback function that is invoked "when the +# uninstaller is nearly finished initializing. If the 'un.onInit' function calls Abort, the uninstaller will quit +# instantly. Note that this function can verify and/or modify $INSTDIR if necessary". +#----------------------------------------------------------------------------------------------------------------------- +function un.onInit + SetShellVarContext all + + # Verify the uninstaller - last chance to back out + MessageBox MB_OKCANCEL "Permanantly remove ${APPLICATION_DISPLAY_NAME}?" IDOK next + Abort + next: + !insertmacro VerifyUserIsAdmin +functionEnd + +#======================================================================================================================= +#=================================================== Global Settings =================================================== +#======================================================================================================================= + +# Name of the installer, usually the same as the product name. We put the version number in here too so that people can +# be clear that they're installing the version they want. +Name "${APPLICATION_DISPLAY_NAME}" + +# Name of the installer executable to create +OutFile "${APPLICATION_DISPLAY_NAME} Installer.exe" + +# +# Default installation directory +# +# See https://nsis.sourceforge.io/Reference/InstallDir +# +# Note that omitting a trailling backslash means that, even if the user chooses a different installation location, the +# last folder in this path (the one called "${APPLICATION_FOLDER_NAME}") will be appended to that location. In other +# words, it ensures we always install inside a folder named after our application. The makes uninstall a lot easier +# because it's safe to remove "$INSTDIR" because it should only contain stuff we installed. +# +# TODO: Per https://nsis.sourceforge.io/Docs/Chapter4.html#varconstant both $PROGRAMFILES and $PROGRAMFILES32 point to +# the 32-bit program folder. If we were installing a 64-bit application, this would need to be replaced by +# $PROGRAMFILES64 +# +InstallDir "$PROGRAMFILES\${APPLICATION_FOLDER_NAME}" + +# +# Remembered installation directory +# +# See https://nsis.sourceforge.io/Reference/InstallDirRegKey +# +# If the given Windows registry setting is found it is used to override the default installation directory set with +# InstallDir above. AIUI this means that, if the software was installed before, we can "remember" that location and +# propose it to the user as the default location. This means when the user re-installs or installs a new version of the +# app it will overwrite/upgrade the existing install. +# +# Windows Registry settings are grouped into "hives", which have abbreviations as listed at +# https://nsis.sourceforge.io/Reference/WriteRegExpandStr: +# HKCR = HKEY_CLASSES_ROOT +# HKLM = HKEY_LOCAL_MACHINE +# HKCU = HKEY_CURRENT_USER +# HKU = HKEY_USERS +# HKCC = HKEY_CURRENT_CONFIG +# HKDD = HKEY_DYN_DATA +# HKPD = HKEY_PERFORMANCE_DATA +# SHCTX = SHELL_CONTEXT <-- This is an NSIS pseudo registry root key that will evaluate to HKLM or HKCU depending +# on whether SetShellVarContext is set to all or current (the default) +# +# .:TBD:. For the moment, I am leaving this commented out as it rather conflicts with using the version number in the +# install folder name. There are pros and cons to both approaches, but I don't think we can have our cake and +# eat it! +# +#InstallDirRegKey SHCTX "Software\${INJECTED_ORGANIZATION_NAME}\${INJECTED_APPLICATION_NAME_UC}" "" + +#======================================================================================================================= +#================================================= Modern UI Settings ================================================== +#======================================================================================================================= +# See https://nsis.sourceforge.io/Docs/Modern%20UI/Readme.html for details on a lot of the settings and options + +# Icon for the installer +!define MUI_ICON "${INJECTED_INSTALLER_APPLICATION_ICON_PATH}" + +# Setting this tells the installer to display an image on the header of the page +!define MUI_HEADERIMAGE + +# Bitmap image to display on the header of installers pages (recommended size: 150x57 pixels) +!define MUI_HEADERIMAGE_BITMAP "${NSISDIR}\Contrib\Graphics\Header\orange.bmp" + +# Bitmap for the Welcome page and the Finish page (recommended size: 164x314 pixels) +!define MUI_WELCOMEFINISHPAGE_BITMAP "${NSISDIR}\Contrib\Graphics\Wizard\orange.bmp" + +# Setting this tells the installer not to automatically jump to the finish page. This allows the user to check the +# install log. +!define MUI_FINISHPAGE_NOAUTOCLOSE + +# Setting this tells the installer to show a message box with a warning when the user wants to close the installer. +!define MUI_ABORTWARNING + +# Include WinMessages.nsh to have all of Windows messages defined in your script. +!include "WinMessages.NSH" + +# +# These macros control which pages appear in the installer. Available pages are: +# +# MUI_PAGE_WELCOME +# MUI_PAGE_LICENSE textfile <-- Shows the license (in English because legal reasons) +# MUI_PAGE_COMPONENTS <-- Don't need as we don't really have bits of the program that the user can +# choose whether to install +# MUI_PAGE_DIRECTORY <-- Allows the user to override the default install directory +# MUI_PAGE_STARTMENU pageid variable <-- Don't offer this as it's extra complexity for very small benefit (IMHO) +# MUI_PAGE_INSTFILES <-- Shows progress of the actual install +# MUI_PAGE_FINISH +# +!insertmacro MUI_PAGE_WELCOME +!insertmacro MUI_PAGE_LICENSE "${INJECTED_LICENSE_TEXT_PATH}" +!insertmacro MUI_PAGE_DIRECTORY +!insertmacro MUI_PAGE_INSTFILES +!insertmacro MUI_PAGE_FINISH + +# +# These macros control which pages appear in the uninstaller. It's pretty self-explanatory +# +!insertmacro MUI_UNPAGE_WELCOME +!insertmacro MUI_UNPAGE_CONFIRM +!insertmacro MUI_UNPAGE_INSTFILES +!insertmacro MUI_UNPAGE_FINISH + +#======================================================================================================================= +#=============================================== Installer UI Languages ================================================ +#======================================================================================================================= +# +# Insert the Modern UI language files for the languages we want to include. The first one is the default one. +# These calls obviate the need to call LoadLanguageFile directly, and also set language-related variables such as +# LANG_ENGLISH. +# +# Here, we try to list only the languages that we have translations for in the app itself (eg as listed in +# src/OptionDialog.cpp). It's a bit approximate as, eg, NSIS has both "SimpChinese" and "TradChinese", whereas we have +# just "Chinese", but the general idea is not to frustrate the user by presenting the installer in a language that is +# not available in the application they are trying to install. +# +# You can see the complete list of languages supported in NSIS at +# https://sourceforge.net/p/nsis/code/HEAD/tree/NSIS/trunk/Contrib/Language%20files/ +# +# ********************************************************************************************************************* +# * Note that this section needs to go _after_ the MUI_PAGE_* and MUI_UNPAGE_* macro invocations, otherwise we'll get * +# * a bunch of warnings when we run MakeNSIS.exe * +# ********************************************************************************************************************* +# +!insertmacro MUI_LANGUAGE "English" # Default +!insertmacro MUI_LANGUAGE "Basque" +!insertmacro MUI_LANGUAGE "Catalan" +!insertmacro MUI_LANGUAGE "Czech" +!insertmacro MUI_LANGUAGE "Danish" +!insertmacro MUI_LANGUAGE "Dutch" +!insertmacro MUI_LANGUAGE "Estonian" +!insertmacro MUI_LANGUAGE "French" +!insertmacro MUI_LANGUAGE "Galician" +!insertmacro MUI_LANGUAGE "German" +!insertmacro MUI_LANGUAGE "Greek" +!insertmacro MUI_LANGUAGE "Hungarian" +!insertmacro MUI_LANGUAGE "Italian" +!insertmacro MUI_LANGUAGE "Latvian" +!insertmacro MUI_LANGUAGE "Norwegian" +!insertmacro MUI_LANGUAGE "Polish" +!insertmacro MUI_LANGUAGE "Portuguese" +!insertmacro MUI_LANGUAGE "PortugueseBR" +!insertmacro MUI_LANGUAGE "Russian" +!insertmacro MUI_LANGUAGE "Serbian" +!insertmacro MUI_LANGUAGE "SimpChinese" +!insertmacro MUI_LANGUAGE "Spanish" +!insertmacro MUI_LANGUAGE "SpanishInternational" +!insertmacro MUI_LANGUAGE "Swedish" +!insertmacro MUI_LANGUAGE "TradChinese" +!insertmacro MUI_LANGUAGE "Turkish" + +#======================================================================================================================= +#==================================================== Version Info ===================================================== +#======================================================================================================================= +# Add the Product Version on top of the Version Tab in the Properties of the file. +# +VIProductVersion "${PRODUCT_VERSION}" + +# VIAddVersionKey adds a field in the Version Tab of the File Properties. This can either be a field provided by the +# system or a user defined field. The following fields are provided by the System: +# +# ProductName +# Comments +# CompanyName +# LegalCopyright +# FileDescription +# FileVersion +# ProductVersion +# InternalName +# LegalTrademarks +# OriginalFilename +# PrivateBuild +# SpecialBuild +# +# The name of these fields are translated on the target system, whereas user defined fields remain untranslated. +# +# ********************************************************************************************************************* +# * Note that this needs to go after the calls to MUI_LANGUAGE, otherwise LANG_ENGLISH won't be set and we'll get an * +# * error saying '"/LANG=${LANG_ENGLISH}" is not a valid language code!' * +# ********************************************************************************************************************* +# +VIAddVersionKey /LANG=${LANG_ENGLISH} "ProductName" "${INJECTED_APPLICATION_NAME_UC}" +VIAddVersionKey /LANG=${LANG_ENGLISH} "ProductVersion" "${PRODUCT_VERSION}" +VIAddVersionKey /LANG=${LANG_ENGLISH} "FileDescription" "${INJECTED_DESCRIPTION_STRING}" +VIAddVersionKey /LANG=${LANG_ENGLISH} "FileVersion" "${INSTALLER_VERSION}" +VIAddVersionKey /LANG=${LANG_ENGLISH} "CompanyName" "${INJECTED_ORGANIZATION_NAME}" +VIAddVersionKey /LANG=${LANG_ENGLISH} "LegalCopyright" "${INJECTED_COPYRIGHT_STRING}" + +# +# This is where we tell the installer what files to install where +# +# On Windows, applications typically get installed in an application-specific subdirectory of the relevant program files +# directory (typically "C:\Program Files (x86)" or something similar for 32-bit applications on 64-bit Windows). We +# don't have to know exactly where as NSIS can figure it out for us at run-time via $PROGRAMFILES, $PROGRAMFILES32, +# $PROGRAMFILES64. In fact, we don't even use these variables directly. Instead, we use $INSTDIR, which is a special +# variable holding the installation directory (see https://nsis.sourceforge.io/Reference/$INSTDIR). A sane default value +# is proposed to the user on the MUI_PAGE_DIRECTORY page, but the user can modify it to install the program somewhere +# else. +# +# Inside $INSTDIR, we want a folder named for the app and its version (eg "Brewtarget 3.1.0" or "Brewken 0.1.0"). And +# inside this folder we want: +# +# ├── bin # Directory containing the executable and any shared libraries (DLLs) that we need to ship +# │ # with it. This is the directory whose path will be returned by +# │ # QCoreApplication::applicationDirPath() to the application code at runtime +# │ +# ├── data # Directory containing any data files that are not built-in to the executable as resources +# │ +# ├── docs # Directory containing any documentation or read-me files that we want to ship +# │ +# └── Uninstall.exe # The uninstaller generated by NSIS +# +!define ADD_REMOVE_PROGRAMS_REG_KEY "Software\Microsoft\Windows\CurrentVersion\Uninstall\${APPLICATION_FOLDER_NAME}" + +# +# See https://nsis.sourceforge.io/A_simple_installer_with_start_menu_shortcut_and_uninstaller for a good starting point +# for install and uninstall. +# +# We could probably actually do a single recursive copy, but splitting it into three sticks with our general approach +# to packaging and opens the possibility that we might put, eg, docs, somewhere else at a future date. +# +Section "Install" + + SetOutPath "$INSTDIR" + !echo "Using icon ${INJECTED_PACKAGING_BIN_DIR}" + File /oname=logo.ico ${INJECTED_INSTALLER_APPLICATION_ICON_PATH} + + SetOutPath "$INSTDIR\bin" + !echo "Taking executable, DLLs and Qt stuff from ${INJECTED_PACKAGING_BIN_DIR}" + File /r "${INJECTED_PACKAGING_BIN_DIR}\*.*" + + SetOutPath "$INSTDIR\data" + !echo "Data files from ${INJECTED_PACKAGING_DATA_DIR}" + File /r "${INJECTED_PACKAGING_DATA_DIR}\*.*" + + SetOutPath "$INSTDIR\doc" + !echo "Documentation files from ${INJECTED_PACKAGING_DOC_DIR}" + File /r "${INJECTED_PACKAGING_DOC_DIR}\*.*" + + # + # Uninstall info + # + # Per https://nsis.sourceforge.io/Add_uninstall_information_to_Add/Remove_Programs, in order for the app to appear in + # the Windows add/remove program list, we need to set at least a couple of registry keys + # + WriteRegStr SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "DisplayName" "${APPLICATION_DISPLAY_NAME}" + WriteRegStr SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "UninstallString" "$\"$INSTDIR\uninstall.exe$\" /$MultiUser.InstallMode" + WriteRegStr SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "QuietUninstallString" "$\"$INSTDIR\uninstall.exe$\" /$MultiUser.InstallMode /S" + WriteRegStr SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "InstallLocation" "$\"$INSTDIR$\"" + WriteRegStr SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "DisplayIcon" "$\"$INSTDIR\logo.ico$\"" + WriteRegStr SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "Publisher" "$\"${INJECTED_ORGANIZATION_NAME}$\"" + WriteRegStr SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "HelpLink" "$\"${INJECTED_HOMEPAGE_URL}$\"" + WriteRegStr SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "URLUpdateInfo" "$\"${INJECTED_HOMEPAGE_URL}$\"" + WriteRegStr SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "URLInfoAbout" "$\"${INJECTED_HOMEPAGE_URL}$\"" + WriteRegStr SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "DisplayVersion" "$\"${INJECTED_VERSION_STRING}$\"" + # These tell Windows there is no option for modifying or repairing the install + WriteRegDWORD SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "NoModify" 1 + WriteRegDWORD SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "NoRepair" 1 + # We don't (yet) pass in major/minor version. If we did, here's where we'd note them in the registry +# WriteRegDWORD SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "VersionMajor" ${INJECTED_VERSION_MAJOR} +# WriteRegDWORD SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "VersionMinor" ${INJECTED_VERSION_MINOR} + + # + # Start Menu + # + # $SMPROGRAMS is the start menu programs folder. Per https://nsis.sourceforge.io/Docs/Chapter4.html#varconstant, the + # context of this constant (All Users or Current user) depends on the SetShellVarContext setting. The default is the + # current user. + # + # (I know it's traditional to ask the user whether they want the program added to the Start Menu, but surely >99.99% + # of people either just select the default or actively choose "Yes". The rest can remove the start menu shortcut + # manually if they really want.) + # + createDirectory "$SMPROGRAMS\${INJECTED_ORGANIZATION_NAME}" + createShortCut "$SMPROGRAMS\${INJECTED_ORGANIZATION_NAME}\${APPLICATION_DISPLAY_NAME}.lnk" "$INSTDIR\bin\${INJECTED_EXECUTABLE_NAME}" "" "$INSTDIR\logo.ico" + + # + # Put the estimated size of the program in the registry so that Windows can show on the add/remove programs menu how + # much space uninstalling it will free up + # + ${GetSize} "$INSTDIR" "/S=0K" $0 $1 $2 + IntFmt $0 "0x%08X" $0 + WriteRegDWORD SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" "EstimatedSize" "$0" + + # + # Write out the actual uninstaller + # + WriteUninstaller $INSTDIR\Uninstall.exe + +SectionEnd + + +Section +SectionEnd + +#------------------------------------------------------------------------------- +# Uninstaller Sections +Section "Uninstall" + + # Remove Start Menu short-cut + Delete "$SMPROGRAMS\${INJECTED_ORGANIZATION_NAME}\${APPLICATION_DISPLAY_NAME}.lnk" + # Remove the Start Menu folder, but only if it is empty + RMDir "$SMPROGRAMS\${INJECTED_ORGANIZATION_NAME}" + + # Remove the substance of the install: docs, default data files, executable, DLLs, and icon + RMDir /r "$INSTDIR\doc" + RMDir /r "$INSTDIR\data" + RMDir /r "$INSTDIR\bin" + Delete "$INSTDIR\logo.ico" + + # Note that we do _not_ delete any user data + + # Remove all the uninstall info from the registry + DeleteRegKey SHCTX "${ADD_REMOVE_PROGRAMS_REG_KEY}" + + # Always delete the uninstaller as the all-but-last action + Delete "$INSTDIR\Uninstall.exe" + + # This directory removal will only succeed if the directory is empty (which it should be, but it's best to be + # cautious). + RMDir "$INSTDIR" + +SectionEnd diff --git a/src/Algorithms.cpp b/src/Algorithms.cpp index ccd46ee0e..5e458b03d 100644 --- a/src/Algorithms.cpp +++ b/src/Algorithms.cpp @@ -1,6 +1,6 @@ /* * Algorithms.cpp is part of Brewtarget, and is Copyright the following - * authors 2009-2014 + * authors 2009-2023 * - Eric Tamme * - Matt Young * - Philip Greggory Lee diff --git a/src/AncestorDialog.cpp b/src/AncestorDialog.cpp index 4c0cc09f9..6a36f7771 100644 --- a/src/AncestorDialog.cpp +++ b/src/AncestorDialog.cpp @@ -40,8 +40,7 @@ #include "model/NamedEntity.h" #include "model/Recipe.h" -AncestorDialog::AncestorDialog(QWidget * parent) - : QDialog(parent) { +AncestorDialog::AncestorDialog(QWidget * parent) : QDialog(parent) { setupUi(this); @@ -57,8 +56,11 @@ AncestorDialog::AncestorDialog(QWidget * parent) // just some nice things connect(comboBox_ancestor, SIGNAL(activated(int)), this, SLOT(ancestorSelected(int))); // connect( comboBox_descendant, SIGNAL(activated(int)), this, SLOT(activateButton())); + return; } +AncestorDialog::~AncestorDialog() = default; + bool AncestorDialog::recipeLessThan(Recipe * right, Recipe * left) { if (right->name() == left->name()) { return right->key() < left->key(); @@ -77,6 +79,7 @@ void AncestorDialog::buildAncestorBox() { } } comboBox_ancestor->setCurrentIndex(-1); + return; } void AncestorDialog::buildDescendantBox(Recipe * ignore) { @@ -99,13 +102,12 @@ void AncestorDialog::buildDescendantBox(Recipe * ignore) { } comboBox_descendant->addItem(recipe->name(), recipe->key()); } + return; } void AncestorDialog::connectDescendant() { - Recipe * ancestor, *descendant; - - ancestor = ObjectStoreWrapper::getByIdRaw(comboBox_ancestor->currentData().toInt()); - descendant = ObjectStoreWrapper::getByIdRaw(comboBox_descendant->currentData().toInt()); + Recipe * ancestor = ObjectStoreWrapper::getByIdRaw(comboBox_ancestor->currentData().toInt()); + Recipe * descendant = ObjectStoreWrapper::getByIdRaw(comboBox_descendant->currentData().toInt()); // No loops in the inheritance if (! descendant->isMyAncestor(*ancestor)) { @@ -124,6 +126,7 @@ void AncestorDialog::connectDescendant() { // and rebuild the ancestors box comboBox_ancestor->clear(); buildAncestorBox(); + return; } void AncestorDialog::setAncestor(Recipe * anc) { @@ -132,19 +135,22 @@ void AncestorDialog::setAncestor(Recipe * anc) { comboBox_descendant->setEnabled(true); activateButton(); + return; } -void AncestorDialog::ancestorSelected(int ndx) { +void AncestorDialog::ancestorSelected([[maybe_unused]] int ndx) { Recipe * ancestor = ObjectStoreWrapper::getByIdRaw(comboBox_ancestor->currentData().toInt()); comboBox_descendant->setEnabled(true); buildDescendantBox(ancestor); activateButton(); + return; } void AncestorDialog::activateButton() { if (! pushButton_apply->isEnabled()) { pushButton_apply->setEnabled(true); } + return; } diff --git a/src/AncestorDialog.h b/src/AncestorDialog.h index c13d95ee5..1f657cd16 100644 --- a/src/AncestorDialog.h +++ b/src/AncestorDialog.h @@ -1,6 +1,6 @@ /* * AncestorDialog.h is part of Brewtarget, and is Copyright the following - * authors 2016-2021 + * authors 2016-2023 * - Matt Young * - Mik Firestone * @@ -43,7 +43,7 @@ class AncestorDialog : public QDialog, public Ui::ancestorDialog { public: AncestorDialog(QWidget * parent = nullptr); - virtual ~AncestorDialog() {} + virtual ~AncestorDialog(); void setAncestor(Recipe * anc); diff --git a/src/BrewNoteWidget.cpp b/src/BrewNoteWidget.cpp index 142999e5a..4125f1b07 100644 --- a/src/BrewNoteWidget.cpp +++ b/src/BrewNoteWidget.cpp @@ -1,6 +1,6 @@ /* * BrewNoteWidget.cpp is part of Brewtarget, and is Copyright the following - * authors 2009-2021 + * authors 2009-2023 * - Jeff Bailey * - Matt Young * - Mik Firestone diff --git a/src/BtDigitWidget.cpp b/src/BtDigitWidget.cpp index 8dcd036a8..ad171ca0e 100644 --- a/src/BtDigitWidget.cpp +++ b/src/BtDigitWidget.cpp @@ -1,6 +1,6 @@ /* * BtDigitWidget.cpp is part of Brewtarget, and is Copyright the following - * authors 2009-2021 + * authors 2009-2023 * = Matt Young * - Philip Greggory Lee * diff --git a/src/BtFieldType.cpp b/src/BtFieldType.cpp index 8fd488eb4..b5a538f6e 100644 --- a/src/BtFieldType.cpp +++ b/src/BtFieldType.cpp @@ -1,6 +1,6 @@ /* * BtFieldType.cpp is part of Brewtarget, and is copyright the following - * authors 2022: + * authors 2022-2023 * - Matt Young * * Brewtarget is free software: you can redistribute it and/or modify diff --git a/src/BtFieldType.h b/src/BtFieldType.h index 787056632..b8068b63b 100644 --- a/src/BtFieldType.h +++ b/src/BtFieldType.h @@ -1,6 +1,6 @@ /* * BtFieldType.h is part of Brewtarget, and is copyright the following - * authors 2022: + * authors 2022-2023 * • Matt Young * * Brewtarget is free software: you can redistribute it and/or modify diff --git a/src/BtLabel.cpp b/src/BtLabel.cpp index 4c3acc278..9fd47b8f2 100644 --- a/src/BtLabel.cpp +++ b/src/BtLabel.cpp @@ -1,6 +1,6 @@ /* * BtLabel.cpp is part of Brewtarget, and is Copyright the following - * authors 2009-2021 + * authors 2009-2023 * - Matt Young * - Mik Firestone * - Philip Greggory Lee diff --git a/src/BtTextEdit.cpp b/src/BtTextEdit.cpp index 002f6f4fc..5c0f41cba 100644 --- a/src/BtTextEdit.cpp +++ b/src/BtTextEdit.cpp @@ -22,42 +22,37 @@ #include #include -BtTextEdit::BtTextEdit(QWidget *parent) -{ - wasModified = false; +BtTextEdit::BtTextEdit([[maybe_unused]] QWidget * parent) : wasModified{false} { // We will see if this works... connect(this, &BtTextEdit::textChanged, this, &BtTextEdit::setTextChanged); - + return; } -BtTextEdit::BtTextEdit(const QString &text, QWidget *parent) -{ +BtTextEdit::BtTextEdit(QString const & text, + [[maybe_unused]] QWidget * parent) : wasModified{false} { setPlainText(text); - wasModified = false; - // We will see if this works... connect(this, &BtTextEdit::textChanged, this, &BtTextEdit::setTextChanged); - + return; } // I don't have faith in this. The concept is to call the super and then clear // the modified flag. The intent is that this is only done via the code, not // the user (e.g., loads and things) -void BtTextEdit::setPlainText(const QString & text) -{ +void BtTextEdit::setPlainText(const QString & text) { QPlainTextEdit::setPlainText(text); wasModified = false; + return; } -void BtTextEdit::focusOutEvent(QFocusEvent *e) -{ - if ( wasModified ) - { +void BtTextEdit::focusOutEvent([[maybe_unused]] QFocusEvent * e) { + if (wasModified) { wasModified = false; emit textModified(); } + return; } bool BtTextEdit::isModified() { return wasModified; } diff --git a/src/BtTreeFilterProxyModel.cpp b/src/BtTreeFilterProxyModel.cpp index 117c0f6ba..c9510217a 100644 --- a/src/BtTreeFilterProxyModel.cpp +++ b/src/BtTreeFilterProxyModel.cpp @@ -42,9 +42,9 @@ namespace { T * lhs, T * rhs); - template<> bool lessThan(BtTreeModel * model, + template<> bool lessThan([[maybe_unused]] BtTreeModel * model, QModelIndex const & left, - QModelIndex const & right, + [[maybe_unused]] QModelIndex const & right, Recipe * lhs, Recipe * rhs) { // Yog-Sothoth knows the gate @@ -72,9 +72,9 @@ namespace { return lhs->name() < rhs->name(); } - template<> bool lessThan(BtTreeModel * model, + template<> bool lessThan([[maybe_unused]] BtTreeModel * model, QModelIndex const & left, - QModelIndex const & right, + [[maybe_unused]] QModelIndex const & right, Equipment * lhs, Equipment * rhs) { switch (left.column()) { @@ -86,9 +86,9 @@ namespace { return lhs->name() < rhs->name(); } - template<> bool lessThan(BtTreeModel * model, + template<> bool lessThan([[maybe_unused]] BtTreeModel * model, QModelIndex const & left, - QModelIndex const & right, + [[maybe_unused]] QModelIndex const & right, Fermentable * lhs, Fermentable * rhs) { switch (left.column()) { @@ -102,9 +102,9 @@ namespace { return lhs->name() < rhs->name(); } - template<> bool lessThan(BtTreeModel * model, + template<> bool lessThan([[maybe_unused]] BtTreeModel * model, QModelIndex const & left, - QModelIndex const & right, + [[maybe_unused]] QModelIndex const & right, Hop * lhs, Hop * rhs) { switch (left.column()) { @@ -118,9 +118,9 @@ namespace { return lhs->name() < rhs->name(); } - template<> bool lessThan(BtTreeModel * model, + template<> bool lessThan([[maybe_unused]] BtTreeModel * model, QModelIndex const & left, - QModelIndex const & right, + [[maybe_unused]] QModelIndex const & right, Misc * lhs, Misc * rhs) { switch (left.column()) { @@ -134,9 +134,9 @@ namespace { return lhs->name() < rhs->name(); } - template<> bool lessThan