diff --git a/changes/1062.feature.rst b/changes/1062.feature.rst new file mode 100644 index 000000000..3a69cb7bb --- /dev/null +++ b/changes/1062.feature.rst @@ -0,0 +1 @@ +A Linux System backend was added, supporting ``.deb`` as a packaging format. diff --git a/changes/1063.feature.rst b/changes/1063.feature.rst new file mode 100644 index 000000000..0cdcb5506 --- /dev/null +++ b/changes/1063.feature.rst @@ -0,0 +1 @@ +Support for `.rpm` packaging was added to the Linux system backend. diff --git a/changes/986.feature.rst b/changes/986.feature.rst new file mode 100644 index 000000000..85e1089fb --- /dev/null +++ b/changes/986.feature.rst @@ -0,0 +1 @@ +When installing application sources and dependencies, any ``__pycache__`` folders are now automatically removed. diff --git a/docs/reference/configuration.rst b/docs/reference/configuration.rst index 8070f2580..a785bac29 100644 --- a/docs/reference/configuration.rst +++ b/docs/reference/configuration.rst @@ -83,7 +83,13 @@ be obtained by running ``briefcase -h``, and inspecting the help for the -------------------------------------------------------------- Configuration options that are specific to a particular output format. For -example, ``macOS`` applications can be generated in ``app`` or ``dmg`` format. +example, macOS applications can be generated in ``app`` or ``dmg`` format. + +This section can contain additional layers. for example, an app targeting the +Linux ``system`` backend can define a ``tool.briefcase.app..linux.system.ubuntu.jammy`` section to provide configurations specific to +Ubuntu 22.04 "Jammy" deployments. See the documentation for each backend for +more details. Project configuration ===================== @@ -278,6 +284,14 @@ to an image to use as the background for the installer. As with ``splash``, the path should *exclude* the extension, and a platform-appropriate extension will be appended when the application is built. +``long_description`` +~~~~~~~~~~~~~~~~~~~~ + +A longer description of the purpose of the application. This description can be +multiple paragraphs, if necessary. The long description *must not* be a copy of +the ``description``, or include the ``description`` as the first line of the +``long_description``. + ``requires`` ~~~~~~~~~~~~ @@ -289,6 +303,12 @@ application level, *and* platform level, the final set of requirements will be the *concatenation* of requirements from all levels, starting from least to most specific. +``revision`` +~~~~~~~~~~~~ + +An identifier used to differentiate specific builds of the same version of an +app. Defaults to ``1`` if not provided. + ``splash`` ~~~~~~~~~~ diff --git a/docs/reference/platforms/android.rst b/docs/reference/platforms/android.rst index 7a5e8ff6c..f8e10261a 100644 --- a/docs/reference/platforms/android.rst +++ b/docs/reference/platforms/android.rst @@ -10,21 +10,21 @@ Icon format Android projects use ``.png`` format icons, in round and square variants. An application must provide the icons in the following sizes, for 2 variants: - * ``round``: +* ``round``: - * 48px - * 72px - * 96px - * 144px - * 192px + * 48px + * 72px + * 96px + * 144px + * 192px - * ``square``: +* ``square``: - * 48px - * 72px - * 96px - * 144px - * 192px + * 48px + * 72px + * 96px + * 144px + * 192px Splash Image format =================== @@ -34,27 +34,27 @@ should be a square image with a transparent background. It must be specified in a range of sizes and variants, to suit different possible device sizes and device display densities: - * ``normal`` (typical phones; up to 480 density-independent pixels): +* ``normal`` (typical phones; up to 480 density-independent pixels): - * 320px - * 480px (hdpi) - * 640px (xhdpi) - * 1280px (xxxhdpi) + * 320px + * 480px (hdpi) + * 640px (xhdpi) + * 1280px (xxxhdpi) - * ``large`` (large format phones, or phone-tablet "phablet" hybrids; up to - 720 density-indpendent pixels): +* ``large`` (large format phones, or phone-tablet "phablet" hybrids; up to + 720 density-indpendent pixels): - * 480px - * 720px (hdpi) - * 960px (xhdpi) - * 1920px (xxxhdpi) + * 480px + * 720px (hdpi) + * 960px (xhdpi) + * 1920px (xxxhdpi) - * ``xlarge`` (tablets; larger than 720 density-independent pixels) +* ``xlarge`` (tablets; larger than 720 density-independent pixels) - * 720px - * 1080px (hdpi) - * 1440px (xhdpi) - * 2880px (xxxhdpi) + * 720px + * 1080px (hdpi) + * 1440px (xhdpi) + * 2880px (xxxhdpi) Consult `the Android documentation `__ diff --git a/docs/reference/platforms/iOS.rst b/docs/reference/platforms/iOS.rst index 2ea165d1b..c148aa778 100644 --- a/docs/reference/platforms/iOS.rst +++ b/docs/reference/platforms/iOS.rst @@ -10,19 +10,19 @@ Icon format iOS projects use ``.png`` format icons. An application must provide icons of the following sizes: - * 20px - * 29px - * 40px - * 58px - * 60px - * 76px - * 80px - * 87px - * 120px - * 152px - * 167px - * 180px - * 1024px +* 20px +* 29px +* 40px +* 58px +* 60px +* 76px +* 80px +* 87px +* 120px +* 152px +* 167px +* 180px +* 1024px Splash Image format =================== @@ -30,9 +30,9 @@ Splash Image format iOS projects use ``.png`` format splash screen images. A splash screen should be a square, transparent image, provided in the following sizes: - * 1024px - * 2048px - * 3072px +* 1024px +* 2048px +* 3072px You can specify a background color for the splash screen using the ``splash_background_color`` configuration setting. diff --git a/docs/reference/platforms/linux/appimage.rst b/docs/reference/platforms/linux/appimage.rst index 12d70eb24..67cdc873a 100644 --- a/docs/reference/platforms/linux/appimage.rst +++ b/docs/reference/platforms/linux/appimage.rst @@ -49,12 +49,12 @@ Icon format AppImages uses ``.png`` format icons. An application must provide icons in the following sizes: - * 16px - * 32px - * 64px - * 128px - * 256px - * 512px +* 16px +* 32px +* 64px +* 128px +* 256px +* 512px Splash Image format =================== @@ -76,8 +76,8 @@ Application configuration ========================= The following options can be added to the -``tool.briefcase.app..linux`` section of your ``pyproject.toml`` -file. +``tool.briefcase.app..linux.appimage`` section of your +``pyproject.toml`` file. ``system_requires`` ~~~~~~~~~~~~~~~~~~~ diff --git a/docs/reference/platforms/linux/flatpak.rst b/docs/reference/platforms/linux/flatpak.rst index 7daba049f..a865c3366 100644 --- a/docs/reference/platforms/linux/flatpak.rst +++ b/docs/reference/platforms/linux/flatpak.rst @@ -44,12 +44,12 @@ Icon format Flatpak uses ``.png`` format icons. An application must provide icons in the following sizes: - * 16px - * 32px - * 64px - * 128px - * 256px - * 512px +* 16px +* 32px +* 64px +* 128px +* 256px +* 512px Splash Image format =================== @@ -60,8 +60,8 @@ Application configuration ========================= The following options can be added to the -``tool.briefcase.app..linux`` section of your ``pyproject.toml`` -file. +``tool.briefcase.app..linux.flatpak`` section of your +``pyproject.toml`` file. ``flatpak_runtime_repo_alias`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/reference/platforms/linux/index.rst b/docs/reference/platforms/linux/index.rst index 47ceea5a4..aedd3ec20 100644 --- a/docs/reference/platforms/linux/index.rst +++ b/docs/reference/platforms/linux/index.rst @@ -2,13 +2,14 @@ Linux ===== -Briefcase supports packaging Linux apps in `AppImage `__ -and `Flatpak `__ formats. +Briefcase supports packaging Linux apps as native system packages, as an `AppImage +`__, and in `Flatpak `__ format. -The default output format for Linux is :doc:`AppImage <./appimage>`. +The default output format for Linux is :doc:`system packages <./system>`. .. toctree:: :maxdepth: 1 + system appimage flatpak diff --git a/docs/reference/platforms/linux/system.rst b/docs/reference/platforms/linux/system.rst new file mode 100644 index 000000000..f75a56b57 --- /dev/null +++ b/docs/reference/platforms/linux/system.rst @@ -0,0 +1,236 @@ +====================== +Native System Packages +====================== + +All modern Linux distributions have a native format for distributing packages +that are integrated into their overall operating system: + +* `.deb`, used by Debian, Ubuntu, Mint (and others) +* `.rpm`, used by Red Hat, Fedora, CentOS, AlmaLinux (and others) +* `.pkg.tar.zst`, used by Arch Linux and Manjaro Linux + +The Briefcase ``system`` backend provides a way to build your app in these +system package formats. + +.. note:: Not all Linux distributions are currently supported! + + At present, Briefcase only has full support for Debian-based distributions. + It should be possible to build and run an application on other Linux + distributions; however, it won't be possible to package the app for + distribution on those platforms. We intend to add support for other + packaging formats - patches are welcome. + + In addition, Briefcase cannot reliably identify *every* Linux vendor. If + your Linux distribution isn't being identified (or isn't being identified + correctly), please `open a ticket + `__ with the contents of your + ``/etc/os-release`` file. + +The packaged app includes a stub binary, so that the app will appear in process +lists using your app's name. It also includes a FreeDesktop registration so the +app will appear in system menus. + +When installed from a Briefcase-produced system package, the app will use the +system Python install, and the standard library provided by the system. However, +the app will be isolated from any packages that have been installed at a system +level. + +As the app uses the system Python, system packages are highly dependent on the +distribution version. It is therefore necessary to build a different system +package for every distribution you want to target. To help simplify this +process, Briefcase uses Docker to provide build environments. Using these Docker +environments, it is possible to build a system package for any target +distribution and version, regardless of the host distribution and version - that +is, you can build a Debian Buster package on an Ubuntu 20.04 machine, or an +Ubuntu 22.04 package on a RHEL8 machine. + +The usage of the system Python also means that system packages are different +from most other Briefcase-packaged apps. On other target platforms (macOS and +Windows apps, Linux AppImage, etc), the version of Python used to run Briefcase +will be the version of Python used by the bundled app. However, when building a +system package, Briefcase will use the operating system's Python3 installation +for system packages, regardless of the host Python version. This means you +will need to perform additional platform testing to ensure that your app is +compatible with that version of Python. + +Icon format +=========== + +Deb packages uses ``.png`` format icons. An application must provide icons in +the following sizes: + +* 16px +* 32px +* 64px +* 128px +* 256px +* 512px + +Splash Image format +=================== + +Linux System packages do not support splash screens or installer images. + +Additional files +================ + +The Linux system app template includes a ``LICENSE`` and ``CHANGELOG`` file, +with stub content. When the application is generated from template, Briefcase +will look in the project root folder (i.e., the folder that contains your +``pyproject.toml``) for files with the same name. If these files are found, they +will be copied into your project. You should ensure these files are complete and +correct before publishing your app. + +The Linux system app template also includes an initial draft manfile for your +app. This manfile will be populated with the ``description`` and +``long_description`` of your app. You may wish to add more details on app usage. + +Additional options +================== + +The following options can be provided at the command line when producing +Deb packages. + +``--target`` +~~~~~~~~~~~~ + +A Docker base image identifier for the Linux distribution you want to target. +The identifier will be in the pattern ``:`` (e.g., +``debian:buster`` or ``ubuntu:jammy``). You can also use the version number in +place of the codename (e.g., ``debian:10``, ``ubuntu:22.04``, or ``fedora:37``). +Whichever form you choose, you should be consistent; no normalization of +codename and version is performed, so ``ubuntu:jammy`` and ``ubuntu:22.04`` will +be identified as different versions (even though they the same version). + +You can specify any identifier you want, provided the distribution is still +supported by the vendor, and system Python is Python 3.8 or later. + +The following Linux vendors are known to work as Docker targets: + + * Debian (e.g., ``debian:bullseye`` or ``debian:11``) + * Ubuntu (e.g., ``ubuntu:jammy`` or ``ubuntu:22.04``) + * Fedora (e.g, ``fedora:37``) + * AlmaLinux (e.g., ``almalinux:9``) + * Red Hat Enterprise Linux (e.g., ``redhat/ubi9:9``) + * Arch Linux (e.g., ``archlinux:latest``) + * Manjaro Linux (e.g., ``manjarolinux/base:latest``) + +Application configuration +========================= + +The following options can be added to the +``tool.briefcase.app..linux.system`` section of your ``pyproject.toml`` +file; if defined in this section, the values will apply for *all* Linux +distributions for which you build packages. + +If you need to override these settings for a specific target vendor, or for a +specific distribution version, you can provide increasingly specific sections for +vendor and version information. Each distribution is identified by: + +* Vendor base (e.g., ``debian``, ``rhel``, ``arch``) +* Vendor (e.g, ``debian``, ``ubuntu``, ``rhel``, ``fedora``). The vendor + identifier *may* be the same as the vendor base (e.g, in the case of Debian or + Redhat) +* Codename (e.g., a version number, or ``jammy``). + +For example, a full configuration for ``myapp`` running on Ubuntu 22.04 (jammy) +would consist of the following sectionss: + +* ``tool.briefcase.app.myapp`` providing global configuration options +* ``tool.briefcase.app.myapp.linux`` providing definitions common to *all* Linux + packaging backends +* ``tool.briefcase.app.myapp.linux.system`` providing definitions for all Linux + system package targets +* ``tool.briefcase.app.myapp.linux.system.debian`` providing definitions common + to all Debian-based packaging targets +* ``tool.briefcase.app.myapp.linux.system.ubuntu`` providing definitions common + to all Ubuntu-based packaging targets +* ``tool.briefcase.app.myapp.linux.system.ubuntu.jammy`` providing definitions + specific to for Ubuntu 22.04 (Jammy). + +These configurations will be merged at runtime; any version-specific definitions +will override the generic vendor definitions; any vendor definitions will +override the vendor-base definitions; and any venor-base definitions will +override generic system package definitions. + +``system_requires`` +~~~~~~~~~~~~~~~~~~~ + +A list of operating system packages that must be installed for the system package +build to succeed. If a Docker build is requested, this list will be passed to +the Docker context when building the container for the app build. These entries +should be the format the target Linux distribution will accept. For example, if you're +using a Debian-derived distribution, you might use:: + + system_requires = ["libgirepository1.0-dev", "libcairo2-dev"] + +to make the GTK GI and Cairo operating system development packages available +to your app. However, if you're on a RedHat-derived distribution, you would use:: + + system_requires = ["gobject-introspection-devel", "python3-cairo-devel"] + +If you see errors during ``briefcase build`` of the form:: + + Could not find dependency: libSomething.so.1 + +but the app works under ``briefcase dev``, the problem may be an incomplete +``system_requires`` definition. The ``briefcase build`` process generates +a new environment that is completely isolated from your development +environment, so if your app has any operating system dependencies, they +*must* be listed in your ``system_requires`` definition. + +``system_requires`` are the packages required at *build* time. To specify +*runtime* system requirements, use the ``system_runtime_requires`` setting. + +``system_runtime_requires`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A list of system packages that your app requires at *runtime*. These will be +closely related to the ``system_requires`` setting, but will likely be +different; most notably, you will probably need ``-dev`` packages at build time, +but non ``-dev`` packages at runtime. + +``system_runtime_requires`` should be specified as system package requirements; +they can optionally include version pins. Briefcase will automatically include +the dependencies needed for Python. For example:: + + system_runtime_requires = ["libgtk-3-0 (>=3.14)", "libwebkit2gtk-4.0-37"] + +will specify that your app needs Python3, a version of libgtk >= 3.14, and any +version of libwebkit2gtk. + +Any problems with installing or running your system package likely indicate an +issue with your ``system_runtime_requires`` definition. + +``system_section`` +~~~~~~~~~~~~~~~~~~ + +When an application is published as a ``.deb`` file, Debian requires that you +specify a "section", describing a classification of the application area. The +template will provide a default section of ``utils``; if you want to override +that default, you can specify a value for ``system_section``. For details on the +allowed values for ``system_section``, refer to the `Debian Policy Manual +`__. + +``dockerfile_extra_content`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Any additional Docker instructions that are required to configure the container +used to build your Python app. For example, any dependencies that cannot be +configured with ``apt-get`` could be installed. ``dockerfile_extra_content`` is +string literal that will be added verbatim to the end of the project Dockerfile. + +Any Dockerfile instructions added by ``dockerfile_extra_content`` will be +executed as the ``brutus`` user, rather than the ``root`` user. If you need to +perform container setup operations as ``root``, switch the container's user to +``root``, perform whatever operations are required, then switch back to the +``brutus`` user - e.g.:: + + dockerfile_extra_content = """ + RUN + + USER root + RUN + + USER brutus + """ diff --git a/docs/reference/platforms/macOS/xcode.rst b/docs/reference/platforms/macOS/xcode.rst index 26e8041ee..3c0506562 100644 --- a/docs/reference/platforms/macOS/xcode.rst +++ b/docs/reference/platforms/macOS/xcode.rst @@ -18,13 +18,13 @@ Icon format macOS Xcode projects use ``.png`` format icons. An application must provide icons of the following sizes: - * 16px - * 32px - * 64px - * 128px - * 256px - * 512px - * 1024px +* 16px +* 32px +* 64px +* 128px +* 256px +* 512px +* 1024px Splash Image format =================== diff --git a/docs/reference/platforms/windows/app.rst b/docs/reference/platforms/windows/app.rst index 674a58664..3724b662b 100644 --- a/docs/reference/platforms/windows/app.rst +++ b/docs/reference/platforms/windows/app.rst @@ -9,10 +9,10 @@ Briefcase uses the `WiX Toolset `__ to build an MSI installer for a Windows App. WiX, in turn, requires that .NET Framework 3.5 is enabled. To ensure .NET Framework 3.5 is enabled: - 1. Open the Windows Control Panel - 2. Traverse to Programs -> Programs and Features - 3. Select "Turn Windows features On or Off" - 4. Ensure that ".NET framework 3.5 (includes .NET 2.0 and 3.0)" is selected. +1. Open the Windows Control Panel +2. Traverse to Programs -> Programs and Features +3. Select "Turn Windows features On or Off" +4. Ensure that ".NET framework 3.5 (includes .NET 2.0 and 3.0)" is selected. Icon format =========== @@ -20,11 +20,11 @@ Icon format Windows apps installers use multiformat ``.ico`` icons; these icons should contain images in the following sizes: -* 16x16 -* 32x32 -* 48x48 -* 64x64 -* 256x256 +* 16px +* 32px +* 48px +* 64px +* 256px Splash Image format =================== diff --git a/docs/reference/platforms/windows/visualstudio.rst b/docs/reference/platforms/windows/visualstudio.rst index 0d404206b..dc5d59b69 100644 --- a/docs/reference/platforms/windows/visualstudio.rst +++ b/docs/reference/platforms/windows/visualstudio.rst @@ -17,25 +17,25 @@ provided one of the following three things are true: 1. You install Visual Studio in the standard location in your Program Files folder. 2. ``MSBuild.exe`` is on your path. 3. You define the environment variable ``MSBUILD`` that points at the location of - your ``MSBuild.exe`` executable + your ``MSBuild.exe`` executable. When you install Visual Studio, there are many optional components. You should ensure that you have installed the following: - * .NET Desktop Development - - All default packages - * Desktop Development with C++ - - All default packages - - C++/CLI support for v143 build tools +* .NET Desktop Development + - All default packages +* Desktop Development with C++ + - All default packages + - C++/CLI support for v143 build tools Briefcase uses the `WiX Toolset `__ to build an MSI installer for a Windows App. WiX, in turn, requires that .NET Framework 3.5 is enabled. To ensure .NET Framework 3.5 is enabled: - 1. Open the Windows Control Panel - 2. Traverse to Programs -> Programs and Features - 3. Select "Turn Windows features On or Off" - 4. Ensure that ".NET framework 3.5 (includes .NET 2.0 and 3.0)" is selected. +1. Open the Windows Control Panel +2. Traverse to Programs -> Programs and Features +3. Select "Turn Windows features On or Off" +4. Ensure that ".NET framework 3.5 (includes .NET 2.0 and 3.0)" is selected. Icon format =========== @@ -43,11 +43,11 @@ Icon format Windows apps installers use multiformat ``.ico`` icons; these icons should contain images in the following sizes: -* 16x16 -* 32x32 -* 48x48 -* 64x64 -* 256x256 +* 16px +* 32px +* 48px +* 64px +* 256px Splash Image format =================== diff --git a/setup.cfg b/setup.cfg index 49011441f..19252d7d3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -125,8 +125,7 @@ briefcase.formats.iOS = briefcase.formats.linux = appimage = briefcase.platforms.linux.appimage flatpak = briefcase.platforms.linux.flatpak - # deb = briefcase.platforms.linux.deb - # rpm = briefcase.platforms.linux.rpm + system = briefcase.platforms.linux.system # snap = briefcase.platforms.linux.snap briefcase.formats.macOS = app = briefcase.platforms.macOS.app diff --git a/setup.py b/setup.py deleted file mode 100644 index c82334553..000000000 --- a/setup.py +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env python -from setuptools import setup - -setup() diff --git a/src/briefcase/commands/base.py b/src/briefcase/commands/base.py index bca970f36..ff36b31ec 100644 --- a/src/briefcase/commands/base.py +++ b/src/briefcase/commands/base.py @@ -9,12 +9,16 @@ from abc import ABC, abstractmethod from argparse import RawDescriptionHelpFormatter from pathlib import Path +from typing import Optional from cookiecutter import exceptions as cookiecutter_exceptions from cookiecutter.repository import is_repo_url from platformdirs import PlatformDirs -from briefcase.platforms import get_output_formats, get_platforms +try: + import importlib_metadata +except ImportError: + import importlib.metadata as importlib_metadata try: import tomllib @@ -35,6 +39,7 @@ from briefcase.integrations.base import ToolCache from briefcase.integrations.download import Download from briefcase.integrations.subprocess import Subprocess +from briefcase.platforms import get_output_formats, get_platforms def create_config(klass, config, msg): @@ -436,6 +441,21 @@ def app_module_path(self, app): return path + @property + def briefcase_required_python_version(self): + """The major.minor of the minimum Python version required by Briefcase itself. + + This is extracted from packaging metadata. + """ + # Native format is ">=3.8" + return tuple( + int(v) + for v in importlib_metadata.metadata("briefcase")["Requires-Python"] + .split("=")[1] + .strip() + .split(".") + ) + @property def python_version_tag(self): """The major.minor of the Python version in use, as a string. @@ -459,6 +479,51 @@ def verify_tools(self): """ pass + def finalize_app_config(self, app: BaseConfig): + """Finalize the application config. + + Some app configurations (notably, Linux system packages like .deb) have + configurations that are deeper than other platforms, because they need + to include components that are dependent on command-line arguments. They + may also require the existence of system tools to complete + configuration. + + The final app configuration merges those "deep" properties into the app + configuration, and performs any other app-specific platform + configuration and verification that is required as a result of + command-line arguments. + + :param app: The app configuration to finalize. + """ + pass + + def finalize(self, app: Optional[BaseConfig] = None): + """Finalize Briefcase configuration. + + This will: + + 1. Ensure that the host has been verified + 2. Ensure that the platform tools have been verified + 3. Ensure that app configurations have been finalized. + + App finalization will only occur once per invocation. + + :param app: If provided, the specific app configuration + to finalize. By default, all apps will be finalized. + """ + self.verify_host() + self.verify_tools() + + if app is None: + for app in self.apps.values(): + if hasattr(app, "__draft__"): + self.finalize_app_config(app) + delattr(app, "__draft__") + else: + if hasattr(app, "__draft__"): + self.finalize_app_config(app) + delattr(app, "__draft__") + def verify_app_tools(self, app: BaseConfig): """Verify that tools needed to run the command for this app exist.""" pass diff --git a/src/briefcase/commands/build.py b/src/briefcase/commands/build.py index d6213342c..03b8787d4 100644 --- a/src/briefcase/commands/build.py +++ b/src/briefcase/commands/build.py @@ -102,9 +102,9 @@ def __call__( "Cannot specify both --update-resources and --no-update" ) - # Confirm host compatibility and all required tools are available - self.verify_host() - self.verify_tools() + # Confirm host compatibility, that all required tools are available, + # and that the app configuration is finalized. + self.finalize(app) if app: state = self._build_app( diff --git a/src/briefcase/commands/create.py b/src/briefcase/commands/create.py index 4bfb84de9..88e671f62 100644 --- a/src/briefcase/commands/create.py +++ b/src/briefcase/commands/create.py @@ -228,6 +228,8 @@ def generate_app_template(self, app: BaseConfig): # Properties of the generating environment # The full Python version string, including minor and dev/a/b/c suffixes (e.g., 3.11.0rc2) "python_version": platform.python_version(), + # The Briefcase version + "briefcase_version": briefcase.__version__, # Transformations of explicit properties into useful forms "class_name": app.class_name, "module_name": app.module_name, @@ -708,21 +710,24 @@ def cleanup_app_content(self, app: BaseConfig): except AttributeError: pass - if paths_to_remove: - with self.input.wait_bar("Removing unneeded app bundle content..."): - for glob in paths_to_remove: - # Expand each glob into a full list of files that actually exist - # on the file system. - for path in self.bundle_path(app).glob(glob): - relative_path = path.relative_to(self.bundle_path(app)) - if path.is_dir(): - self.logger.info(f"Removing directory {relative_path}") - self.tools.shutil.rmtree(path) - else: - self.logger.info(f"Removing {relative_path}") - path.unlink() - else: - self.logger.info("No app content clean up required.") + # Remove __pycache__ folders. These folders might contain stale PYC + # artefacts, or encode file paths that reflect the original source + # location. The stub binaries all disable PYC generation, to avoid + # corrupting any app bundle signatures. + paths_to_remove.append("**/__pycache__") + + with self.input.wait_bar("Removing unneeded app bundle content..."): + for glob in paths_to_remove: + # Expand each glob into a full list of files that actually exist + # on the file system. + for path in self.bundle_path(app).glob(glob): + relative_path = path.relative_to(self.bundle_path(app)) + if path.is_dir(): + self.logger.info(f"Removing directory {relative_path}") + self.tools.shutil.rmtree(path) + else: + self.logger.info(f"Removing {relative_path}") + path.unlink() def create_app(self, app: BaseConfig, test_mode: bool = False, **options): """Create an application bundle. @@ -788,9 +793,9 @@ def verify_app_tools(self, app: BaseConfig): NativeAppContext.verify(tools=self.tools, app=app) def __call__(self, app: Optional[BaseConfig] = None, **options): - # Confirm host compatibility and all required tools are available - self.verify_host() - self.verify_tools() + # Confirm host compatibility, that all required tools are available, + # and that the app configuration is finalized. + self.finalize(app) if app: state = self.create_app(app, **options) diff --git a/src/briefcase/commands/dev.py b/src/briefcase/commands/dev.py index 21e4108a8..07d2c1996 100644 --- a/src/briefcase/commands/dev.py +++ b/src/briefcase/commands/dev.py @@ -161,10 +161,6 @@ def __call__( passthrough: Optional[List[str]] = None, **options, ): - # Confirm host compatibility and all required tools are available - self.verify_host() - self.verify_tools() - # Which app should we run? If there's only one defined # in pyproject.toml, then we can use it as a default; # otherwise look for a -a/--app option. @@ -182,6 +178,9 @@ def __call__( raise BriefcaseCommandError( "Project specifies more than one application; use --app to specify which one to start." ) + # Confirm host compatibility, that all required tools are available, + # and that the app configuration is finalized. + self.finalize(app) self.verify_app_tools(app) diff --git a/src/briefcase/commands/new.py b/src/briefcase/commands/new.py index cfc0a242f..9a35e5cb3 100644 --- a/src/briefcase/commands/new.py +++ b/src/briefcase/commands/new.py @@ -521,9 +521,9 @@ def __call__( template_branch: Optional[str] = None, **options, ): - # Confirm host compatibility and all required tools are available - self.verify_host() - self.verify_tools() + # Confirm host compatibility, and that all required tools are available. + # There are no apps, so finalize() will be a no op on app configurations. + self.finalize() return self.new_app( template=template, template_branch=template_branch, **options diff --git a/src/briefcase/commands/open.py b/src/briefcase/commands/open.py index 6e1062bc8..3b0fe4d3e 100644 --- a/src/briefcase/commands/open.py +++ b/src/briefcase/commands/open.py @@ -39,9 +39,9 @@ def open_app(self, app: BaseConfig, **options): return state def __call__(self, app: Optional[BaseConfig] = None, **options): - # Confirm host compatibility and all required tools are available - self.verify_host() - self.verify_tools() + # Confirm host compatibility, that all required tools are available, + # and that the app configuration is finalized. + self.finalize(app) if app: state = self.open_app(app, **options) diff --git a/src/briefcase/commands/package.py b/src/briefcase/commands/package.py index 6dac26e37..9a5341290 100644 --- a/src/briefcase/commands/package.py +++ b/src/briefcase/commands/package.py @@ -80,6 +80,11 @@ def _package_app( # Annotate the packaging format onto the app app.packaging_format = packaging_format + + # Verify the app tools, which will do final confirmation that we can + # package in the requested format. + self.verify_app_tools(app) + # If the distribution artefact already exists, remove it. if self.distribution_path(app).exists(): self.distribution_path(app).unlink() @@ -87,8 +92,7 @@ def _package_app( # Ensure the dist folder exists. self.dist_path.mkdir(exist_ok=True) - self.verify_app_tools(app) - + # Package the app state = self.package_app(app, **full_options(state, options)) filename = self.distribution_path(app).relative_to(self.base_path) @@ -132,9 +136,9 @@ def add_options(self, parser): def __call__( self, app: Optional[BaseConfig] = None, update: bool = False, **options ): - # Confirm host compatibility and all required tools are available - self.verify_host() - self.verify_tools() + # Confirm host compatibility, that all required tools are available, + # and that the app configuration is finalized. + self.finalize(app) if app: state = self._package_app(app, update=update, **options) diff --git a/src/briefcase/commands/publish.py b/src/briefcase/commands/publish.py index ab3bd8848..0c5d5b2af 100644 --- a/src/briefcase/commands/publish.py +++ b/src/briefcase/commands/publish.py @@ -54,9 +54,9 @@ def _publish_app(self, app: BaseConfig, channel: str, **options): return state def __call__(self, channel=None, **options): - # Confirm host compatibility and all required tools are available - self.verify_host() - self.verify_tools() + # Confirm host compatibility, that all required tools are available, + # and that all app configurations are finalized. + self.finalize() # Check the apps have been built first. for app_name, app in self.apps.items(): diff --git a/src/briefcase/commands/run.py b/src/briefcase/commands/run.py index 6d884c62e..07310c6a3 100644 --- a/src/briefcase/commands/run.py +++ b/src/briefcase/commands/run.py @@ -251,10 +251,6 @@ def __call__( passthrough: Optional[List[str]] = None, **options, ): - # Confirm host compatibility and all required tools are available - self.verify_host() - self.verify_tools() - # Which app should we run? If there's only one defined # in pyproject.toml, then we can use it as a default; # otherwise look for a -a/--app option. @@ -271,6 +267,11 @@ def __call__( raise BriefcaseCommandError( "Project specifies more than one application; use --app to specify which one to start." ) + + # Confirm host compatibility, that all required tools are available, + # and that the app configuration is finalized. + self.finalize(app) + template_file = self.bundle_path(app) binary_file = self.binary_path(app) if ( diff --git a/src/briefcase/commands/update.py b/src/briefcase/commands/update.py index 7bd8c6b40..c8c2da6d8 100644 --- a/src/briefcase/commands/update.py +++ b/src/briefcase/commands/update.py @@ -66,9 +66,9 @@ def __call__( test_mode: bool = False, **options, ): - # Confirm host compatibility and all required tools are available - self.verify_host() - self.verify_tools() + # Confirm host compatibility, that all required tools are available, + # and that the app configuration is finalized. + self.finalize(app) if app: state = self.update_app( diff --git a/src/briefcase/config.py b/src/briefcase/config.py index 5f14fcc65..8adfb4466 100644 --- a/src/briefcase/config.py +++ b/src/briefcase/config.py @@ -349,14 +349,19 @@ def __init__( test_sources=None, test_requires=None, supported=True, + long_description=None, **kwargs, ): super().__init__(**kwargs) + # All app configs are created in unfinalized draft form. + self.__draft__ = True + self.app_name = app_name self.version = version self.bundle = bundle - self.description = description + # Description can only be a single line. Ignore everything else. + self.description = description.split("\n")[0] self.sources = sources self.formal_name = app_name if formal_name is None else formal_name self.url = url @@ -371,6 +376,7 @@ def __init__( self.test_sources = test_sources self.test_requires = test_requires self.supported = supported + self.long_description = long_description if not is_valid_app_name(self.app_name): raise BriefcaseConfigError( diff --git a/src/briefcase/console.py b/src/briefcase/console.py index 97e037130..144a33996 100644 --- a/src/briefcase/console.py +++ b/src/briefcase/console.py @@ -137,6 +137,33 @@ def __init__(self, printer=Printer(), verbosity=1): self.stacktraces = [] # functions to run for additional logging if creating a logfile self.log_file_extras = [] + # The current context for the log + self._context = "" + + @contextmanager + def context(self, context): + """Wrap a collection of output in a logging context. + + A logging context is a prefix on every logging line. It is used when a + set of commands (and output) is being run in a very specific way that + needs to be highlighted, such as running a command in a Docker + container. + + :param context: The name of the context to enter. This *must* be + simple text, with no markup or other special characters. + """ + try: + self.info() + self.info(f"Entering {context} context...") + old_context = self._context + self._context = f"{context}| " + self.info("-" * (72 - len(context))) + yield + finally: + self.info("-" * (72 - len(context))) + self._context = old_context + self.info(f"Leaving {context} context.") + self.info() def _log( self, @@ -161,13 +188,13 @@ def _log( :param style: Rich style to apply to everything printed for message. """ if not message: - # When a message is not provided, do not output anything; + # When a message is not provided, only output the context; # This type of call is just clearing some vertical space. - self.print(show=show) + self.print(self._context, show=show) else: if prefix: # insert vertical space before for all messages with a prefix - self.print(show=show) + self.print(self._context, show=show) if not markup: preface, prefix, message = ( escape(text) for text in (preface, prefix, message) @@ -176,7 +203,7 @@ def _log( markup = True for line in message.splitlines(): self.print( - f"{preface}{prefix}{line}", + f"{self._context}{preface}{prefix}{line}", show=show, markup=markup, style=style, diff --git a/src/briefcase/exceptions.py b/src/briefcase/exceptions.py index c17f0c585..1abaab806 100644 --- a/src/briefcase/exceptions.py +++ b/src/briefcase/exceptions.py @@ -202,3 +202,7 @@ def __init__(self): class NoDistributionArtefact(BriefcaseWarning): def __init__(self, msg): super().__init__(error_code=0, msg=msg) + + +class ParseError(Exception): + """Raised by parser functions to signal parsing was unsuccessful.""" diff --git a/src/briefcase/integrations/base.py b/src/briefcase/integrations/base.py index 02f9afca9..e91f8f756 100644 --- a/src/briefcase/integrations/base.py +++ b/src/briefcase/integrations/base.py @@ -39,6 +39,9 @@ class Tool(ABC): class ToolCache(Mapping): + # Useful fixed filesystem locations + ETC_OS_RELEASE: Path = Path("/etc/os-release") + # Briefcase tools android_sdk: AndroidSDK app_context: Subprocess | DockerAppContext diff --git a/src/briefcase/integrations/docker.py b/src/briefcase/integrations/docker.py index bc7b3d3e4..d0abb875b 100644 --- a/src/briefcase/integrations/docker.py +++ b/src/briefcase/integrations/docker.py @@ -177,6 +177,52 @@ def verify(cls, tools: ToolCache): tools.docker = Docker(tools=tools) return tools.docker + def check_output(self, args, image_tag): + """Run a process inside a Docker container, capturing output. + + This is a bare Docker invocation; it's really only useful for running + simple commands on an image, ensuring that the container is destroyed + afterwards. In most cases, you'll want to use an app context, rather + than this. + + :param args: The list of arguments to pass to the Docker instance. + :param image_tag: The Docker image to run + """ + # Any exceptions from running the process are *not* caught. + # This ensures that "docker.check_output()" behaves as closely to + # "subprocess.check_output()" as possible. + return self.tools.subprocess.check_output( + [ + "docker", + "run", + "--rm", + image_tag, + ] + + args, + ) + + def prepare(self, image_tag): + """Ensure that the given image exists, and is cached locally. + + This is achieved by trying to run a no-op command (echo) on the image; + if it succeeds, the image exists locally. + + A pull is forced, so you can be certain that the image is up to date. + + :param image_tag: The Docker image to prepare + """ + try: + self.tools.subprocess.run( + ["docker", "run", "--rm", image_tag, "printf", ""], + check=True, + stream_output=False, + ) + except subprocess.CalledProcessError as e: + raise BriefcaseCommandError( + f"Unable to obtain the Docker base image {image_tag}. " + "Is the image name correct?" + ) from e + class DockerAppContext(Tool): def __init__(self, tools: ToolCache, app: AppConfig): @@ -259,36 +305,37 @@ def prepare( prefix=self.app.app_name, ) with self.tools.input.wait_bar("Building Docker image..."): - try: - self.tools.subprocess.run( - [ - "docker", - "build", - "--progress", - "plain", - "--tag", - self.image_tag, - "--file", - dockerfile_path, - "--build-arg", - f"PY_VERSION={self.python_version}", - "--build-arg", - f"SYSTEM_REQUIRES={' '.join(getattr(self.app, 'system_requires', ''))}", - "--build-arg", - f"HOST_UID={self.tools.os.getuid()}", - "--build-arg", - f"HOST_GID={self.tools.os.getgid()}", - Path( - self.app_base_path, - *self.app.sources[0].split("/")[:-1], - ), - ], - check=True, - ) - except subprocess.CalledProcessError as e: - raise BriefcaseCommandError( - f"Error building Docker container image for {self.app.app_name}." - ) from e + with self.tools.logger.context("Docker"): + try: + self.tools.subprocess.run( + [ + "docker", + "build", + "--progress", + "plain", + "--tag", + self.image_tag, + "--file", + dockerfile_path, + "--build-arg", + f"PY_VERSION={self.python_version}", + "--build-arg", + f"SYSTEM_REQUIRES={' '.join(getattr(self.app, 'system_requires', ''))}", + "--build-arg", + f"HOST_UID={self.tools.os.getuid()}", + "--build-arg", + f"HOST_GID={self.tools.os.getgid()}", + Path( + self.app_base_path, + *self.app.sources[0].split("/")[:-1], + ), + ], + check=True, + ) + except subprocess.CalledProcessError as e: + raise BriefcaseCommandError( + f"Error building Docker container image for {self.app.app_name}." + ) from e def _dockerize_path(self, arg: str): """Relocate any local path into the equivalent location on the docker @@ -309,7 +356,7 @@ def _dockerize_path(self, arg: str): return arg - def _dockerize_args(self, args, interactive=False, mounts=None, env=None): + def _dockerize_args(self, args, interactive=False, mounts=None, env=None, cwd=None): """Convert arguments and environment into a Docker-compatible form. Convert an argument and environment specification into a form that can be used as arguments to invoke Docker. This involves: @@ -321,6 +368,7 @@ def _dockerize_args(self, args, interactive=False, mounts=None, env=None): :param args: The arguments for the command to be invoked :param env: The environment specification for the command to be executed + :param cwd: The working directory for the command to be executed :returns: A list of arguments that can be used to invoke the command inside a docker container. """ @@ -355,6 +403,10 @@ def _dockerize_args(self, args, interactive=False, mounts=None, env=None): for key, value in env.items(): docker_args.extend(["--env", f"{key}={self._dockerize_path(value)}"]) + # If a working directory has been specified, pass it + if cwd: + docker_args.extend(["--workdir", self._dockerize_path(os.fsdecode(cwd))]) + # ... then the image name to create the temporary container with docker_args.append(self.image_tag) @@ -363,32 +415,32 @@ def _dockerize_args(self, args, interactive=False, mounts=None, env=None): return docker_args - def run(self, args, env=None, interactive=False, mounts=None, **kwargs): + def run(self, args, env=None, cwd=None, interactive=False, mounts=None, **kwargs): """Run a process inside a Docker container.""" # Any exceptions from running the process are *not* caught. # This ensures that "docker.run()" behaves as closely to # "subprocess.run()" as possible. - self.tools.logger.info("Entering Docker context...", prefix=self.app.app_name) - if interactive: - kwargs["stream_output"] = False - - self.tools.subprocess.run( - self._dockerize_args( - args, - interactive=interactive, - mounts=mounts, - env=env, - ), - **kwargs, - ) - self.tools.logger.info("Leaving Docker context", prefix=self.app.app_name) - - def check_output(self, args, env=None, mounts=None, **kwargs): + with self.tools.logger.context("Docker"): + if interactive: + kwargs["stream_output"] = False + + self.tools.subprocess.run( + self._dockerize_args( + args, + interactive=interactive, + mounts=mounts, + env=env, + cwd=cwd, + ), + **kwargs, + ) + + def check_output(self, args, env=None, cwd=None, mounts=None, **kwargs): """Run a process inside a Docker container, capturing output.""" # Any exceptions from running the process are *not* caught. # This ensures that "docker.check_output()" behaves as closely to # "subprocess.check_output()" as possible. return self.tools.subprocess.check_output( - self._dockerize_args(args, mounts=mounts, env=env), + self._dockerize_args(args, mounts=mounts, env=env, cwd=cwd), **kwargs, ) diff --git a/src/briefcase/integrations/subprocess.py b/src/briefcase/integrations/subprocess.py index d9b6f40a1..f8ec2af6e 100644 --- a/src/briefcase/integrations/subprocess.py +++ b/src/briefcase/integrations/subprocess.py @@ -13,14 +13,10 @@ from briefcase.config import AppConfig from briefcase.console import Log -from briefcase.exceptions import CommandOutputParseError +from briefcase.exceptions import CommandOutputParseError, ParseError from briefcase.integrations.base import Tool, ToolCache -class ParseError(Exception): - """Raised by parser functions to signal parsing was unsuccessful.""" - - class StopStreaming(Exception): """Raised by streaming filters to terminate the stream.""" diff --git a/src/briefcase/platforms/linux/__init__.py b/src/briefcase/platforms/linux/__init__.py index a18498a68..26f4f6047 100644 --- a/src/briefcase/platforms/linux/__init__.py +++ b/src/briefcase/platforms/linux/__init__.py @@ -1,5 +1,208 @@ -DEFAULT_OUTPUT_FORMAT = "appimage" +import ast +import re +import subprocess +import sys +from pathlib import Path +from typing import List + +from briefcase.commands.create import _is_local_requirement +from briefcase.commands.open import OpenCommand +from briefcase.config import AppConfig +from briefcase.exceptions import BriefcaseCommandError, ParseError + +DEFAULT_OUTPUT_FORMAT = "system" + +DEBIAN = "debian" +RHEL = "rhel" +ARCH = "arch" + + +def parse_freedesktop_os_release(content): + """Parse the content of an /etc/os-release file. + + Implementation adapted from Example 5 of + https://www.freedesktop.org/software/systemd/man/os-release.html + + :param content: The text content of the /etc/os-release file. + :returns: A dictionary of key-value pairs, in the same format returned by + `platform.freedesktop_os_release()`. + """ + values = {} + for line_number, line in enumerate(content.split("\n"), start=1): + line = line.rstrip() + if not line or line.startswith("#"): + continue + m = re.match(r"([A-Z][A-Z_0-9]+)=(.*)", line) + if m: + name, val = m.groups() + if val and val[0] in "\"'": + try: + val = ast.literal_eval(val) + except SyntaxError as e: + raise ParseError( + "Failed to parse output of FreeDesktop os-release file; " + f"Line {line_number}: {e}" + ) + values[name] = val + else: + raise ParseError( + "Failed to parse output of FreeDesktop os-release file; " + f"Line {line_number}: {line!r}" + ) + + return values class LinuxMixin: platform = "linux" + + def vendor_details(self, freedesktop_info): + """Normalize the identity of the target Linux vendor, version, and base. + + :param freedesktop_info: The parsed content of the FreeDesktop + /etc/os-release file. This is the same format returned by + `platform.freedesktop_os_release()`. + :returns: A tuple of (vendor, version, vendor_base). + """ + vendor = freedesktop_info["ID"] + try: + codename = freedesktop_info["VERSION_CODENAME"] + if not codename: + # Fedora *has* a VERSION_CODENAME key, but it is empty. + # Treat it as missing. + raise KeyError("VERSION_CODENAME") + except KeyError: + try: + # Arch uses a specific constant in VERSION_ID + if freedesktop_info["VERSION_ID"] == "TEMPLATE_VERSION_ID": + codename = "rolling" + else: + codename = freedesktop_info["VERSION_ID"].split(".")[0] + except KeyError: + # Manjaro doesn't have a VERSION_ID key + codename = "rolling" + + # Process the vendor_base from the vendor. + id_like = freedesktop_info.get("ID_LIKE", "").split() + if vendor == DEBIAN or DEBIAN in id_like or "ubuntu" in id_like: + vendor_base = DEBIAN + elif vendor == RHEL or vendor == "fedora" or RHEL in id_like: + vendor_base = RHEL + elif vendor == ARCH or ARCH in id_like: + vendor_base = ARCH + else: + vendor_base = None + + return vendor, codename, vendor_base + + +class LocalRequirementsMixin: + # A mixin that captures the process of compiling requirements that are specified + # as local file references into sdists, and then installing those requirements + # from the sdist. + + def local_requirements_path(self, app): + return self.bundle_path(app) / "_requirements" + + def _install_app_requirements( + self, + app: AppConfig, + requires: List[str], + app_packages_path: Path, + ): + """Install requirements for the app with pip. + + This method pre-compiles any requirement defined using a local path + reference into an sdist tarball. This will be used when installing under + Docker, as local file references can't be accessed in the Docker + container. + + :param app: The app configuration + :param requires: The list of requirements to install + :param app_packages_path: The full path of the app_packages folder into + which requirements should be installed. + """ + # If we're re-building requirements, purge any pre-existing local + # requirements. + local_requirements_path = self.local_requirements_path(app) + if local_requirements_path.exists(): + self.tools.shutil.rmtree(local_requirements_path) + self.tools.os.mkdir(local_requirements_path) + + # Iterate over every requirements, looking for local references + for requirement in requires: + if _is_local_requirement(requirement): + if Path(requirement).is_dir(): + # Requirement is a filesystem reference + # Build an sdist for the local requirement + with self.input.wait_bar(f"Building sdist for {requirement}..."): + try: + self.tools.subprocess.check_output( + [ + sys.executable, + "-m", + "build", + "--sdist", + "--outdir", + local_requirements_path, + requirement, + ], + ) + except subprocess.CalledProcessError as e: + raise BriefcaseCommandError( + f"Unable to build sdist for {requirement}" + ) from e + else: + try: + # Requirement is an existing sdist or wheel file. + self.tools.shutil.copy(requirement, local_requirements_path) + except FileNotFoundError as e: + raise BriefcaseCommandError( + f"Unable to find local requirement {requirement}" + ) from e + + # Continue with the default app requirement handling. + return super()._install_app_requirements( + app, + requires=requires, + app_packages_path=app_packages_path, + ) + + def _pip_requires(self, app: AppConfig, requires: List[str]): + """Convert the requirements list to an .deb project compatible format. + + Any local file requirements are converted into a reference to the file + generated by _install_app_requirements(). + + :param app: The app configuration + :param requires: The user-specified list of app requirements + :returns: The final list of requirement arguments to pass to pip + """ + # Copy all the requirements that are non-local + final = [ + requirement + for requirement in super()._pip_requires(app, requires) + if not _is_local_requirement(requirement) + ] + + # Add in any local packages. + # The sort is needed to ensure testing consistency + for filename in sorted(self.local_requirements_path(app).iterdir()): + final.append(filename) + + return final + + +class DockerOpenCommand(OpenCommand): + # A command that redirects Open to an interactive shell in the container + # if Docker is being used. Relies on the final command to provide + # verification that Docker is available, and verify the app context. + + def _open_app(self, app: AppConfig): + # If we're using Docker, open an interactive shell in the container. + # Rely on the default CMD statement in the image's Dockerfile to + # define a default shell. + if self.use_docker: + self.tools[app].app_context.run([], interactive=True) + else: + super()._open_app(app) diff --git a/src/briefcase/platforms/linux/appimage.py b/src/briefcase/platforms/linux/appimage.py index 601222ac0..5f06cf2d2 100644 --- a/src/briefcase/platforms/linux/appimage.py +++ b/src/briefcase/platforms/linux/appimage.py @@ -1,25 +1,25 @@ import os import subprocess -import sys -from pathlib import Path from typing import List from briefcase.commands import ( BuildCommand, CreateCommand, - OpenCommand, PackageCommand, PublishCommand, RunCommand, UpdateCommand, ) -from briefcase.commands.create import _is_local_requirement from briefcase.config import AppConfig from briefcase.exceptions import BriefcaseCommandError, UnsupportedHostError from briefcase.integrations.docker import Docker, DockerAppContext from briefcase.integrations.linuxdeploy import LinuxDeploy from briefcase.integrations.subprocess import NativeAppContext -from briefcase.platforms.linux import LinuxMixin +from briefcase.platforms.linux import ( + DockerOpenCommand, + LinuxMixin, + LocalRequirementsMixin, +) class LinuxAppImagePassiveMixin(LinuxMixin): @@ -38,9 +38,6 @@ def appdir_path(self, app): def project_path(self, app): return self.bundle_path(app) - def local_requirements_path(self, app): - return self.bundle_path(app) / "_requirements" - def binary_name(self, app): safe_name = app.formal_name.replace(" ", "_") return f"{safe_name}-{app.version}-{self.tools.host_arch}.AppImage" @@ -122,7 +119,9 @@ def verify_host(self): raise UnsupportedHostError(self.supported_host_os_reason) -class LinuxAppImageCreateCommand(LinuxAppImageMixin, CreateCommand): +class LinuxAppImageCreateCommand( + LinuxAppImageMixin, LocalRequirementsMixin, CreateCommand +): description = "Create and populate a Linux AppImage." def support_package_filename(self, support_revision): @@ -137,111 +136,16 @@ def support_package_url(self, support_revision): + self.support_package_filename(support_revision) ) - def _install_app_requirements( - self, - app: AppConfig, - requires: List[str], - app_packages_path: Path, - ): - """Install requirements for the app with pip. - - This method pre-compiles any requirement defined using a local path - reference into an sdist tarball. This will be used when installing under - Docker, as local file references can't be accessed in the Docker - container. - - :param app: The app configuration - :param requires: The list of requirements to install - :param app_packages_path: The full path of the app_packages folder into - which requirements should be installed. - """ - # If we're re-building requirements, purge any pre-existing local - # requirements. - local_requirements_path = self.local_requirements_path(app) - if local_requirements_path.exists(): - self.tools.shutil.rmtree(local_requirements_path) - self.tools.os.mkdir(local_requirements_path) - - # Iterate over every requirements, looking for local references - for requirement in requires: - if _is_local_requirement(requirement): - if Path(requirement).is_dir(): - # Requirement is a filesystem reference - # Build an sdist for the local requirement - with self.input.wait_bar(f"Building sdist for {requirement}..."): - try: - self.tools.subprocess.check_output( - [ - sys.executable, - "-m", - "build", - "--sdist", - "--outdir", - local_requirements_path, - requirement, - ], - ) - except subprocess.CalledProcessError as e: - raise BriefcaseCommandError( - f"Unable to build sdist for {requirement}" - ) from e - else: - try: - # Requirement is an existing sdist or wheel file. - self.tools.shutil.copy(requirement, local_requirements_path) - except FileNotFoundError as e: - raise BriefcaseCommandError( - f"Unable to find local requirement {requirement}" - ) from e - - # Continue with the default app requirement handling. - return super()._install_app_requirements( - app, - requires=requires, - app_packages_path=app_packages_path, - ) - - def _pip_requires(self, app: AppConfig, requires: List[str]): - """Convert the requirements list to an AppImage compatible format. - - Any local file requirements are converted into a reference to the file - generated by _install_app_requirements(). - - :param app: The app configuration - :param requires: The user-specified list of app requirements - :returns: The final list of requirement arguments to pass to pip - """ - # Copy all the requirements that are non-local - final = [ - requirement - for requirement in super()._pip_requires(app, requires) - if not _is_local_requirement(requirement) - ] - - # Add in any local packages. - # The sort is needed to ensure testing consistency - for filename in sorted(self.local_requirements_path(app).iterdir()): - final.append(filename) - - return final - class LinuxAppImageUpdateCommand(LinuxAppImageCreateCommand, UpdateCommand): description = "Update an existing Linux AppImage." -class LinuxAppImageOpenCommand(LinuxAppImageMostlyPassiveMixin, OpenCommand): +class LinuxAppImageOpenCommand(LinuxAppImageMostlyPassiveMixin, DockerOpenCommand): description = ( "Open a shell in a Docker container for an existing Linux AppImage project." ) - def _open_app(self, app: AppConfig): - # If we're using Docker, open an interactive shell in the container - if self.use_docker: - self.tools[app].app_context.run(["/bin/bash"], interactive=True) - else: - super()._open_app(app) - class LinuxAppImageBuildCommand(LinuxAppImageMixin, BuildCommand): description = "Build a Linux AppImage." diff --git a/src/briefcase/platforms/linux/deb.py b/src/briefcase/platforms/linux/deb.py deleted file mode 100644 index b3897f42a..000000000 --- a/src/briefcase/platforms/linux/deb.py +++ /dev/null @@ -1 +0,0 @@ -# An implementation would go here! diff --git a/src/briefcase/platforms/linux/rpm.py b/src/briefcase/platforms/linux/rpm.py deleted file mode 100644 index b3897f42a..000000000 --- a/src/briefcase/platforms/linux/rpm.py +++ /dev/null @@ -1 +0,0 @@ -# An implementation would go here! diff --git a/src/briefcase/platforms/linux/system.py b/src/briefcase/platforms/linux/system.py new file mode 100644 index 000000000..17eb900e8 --- /dev/null +++ b/src/briefcase/platforms/linux/system.py @@ -0,0 +1,1009 @@ +import gzip +import os +import re +import subprocess +import sys +from pathlib import Path +from typing import List + +from briefcase.commands import ( + BuildCommand, + CreateCommand, + PackageCommand, + PublishCommand, + RunCommand, + UpdateCommand, +) +from briefcase.config import AppConfig +from briefcase.exceptions import BriefcaseCommandError, UnsupportedHostError +from briefcase.integrations.docker import Docker, DockerAppContext +from briefcase.integrations.subprocess import NativeAppContext +from briefcase.platforms.linux import ( + ARCH, + DEBIAN, + RHEL, + DockerOpenCommand, + LinuxMixin, + LocalRequirementsMixin, + parse_freedesktop_os_release, +) + + +class LinuxSystemPassiveMixin(LinuxMixin): + # The Passive mixin honors the Docker options, but doesn't try to verify + # Docker exists. It is used by commands that are "passive" from the + # perspective of the build system (e.g., Run). + output_format = "system" + supported_host_os = {"Darwin", "Linux"} + supported_host_os_reason = ( + "Linux system projects can only be built on Linux, or on macOS using Docker." + ) + + @property + def use_docker(self): + # The system backend doesn't have a literal "--use-docker" option, but + # `use_docker` is a useful flag for shared logic purposes, so evaluate + # what "use docker" means in terms of target_image. + return bool(self.target_image) + + @property + def linux_arch(self): + # Linux uses different architecture identifiers for some platforms + return { + "x86_64": "amd64", + "aarch64": "arm64", + "armv6l": "armhf", + }.get(self.tools.host_arch, self.tools.host_arch) + + def build_path(self, app): + # Override the default build path to use the vendor name, + # rather than "linux" + return self.base_path / "build" / app.app_name / app.target_vendor + + def bundle_path(self, app): + # Override the default bundle path to use the codename, + # rather than "system" + return self.build_path(app) / app.target_codename + + def project_path(self, app): + return self.bundle_path(app) / f"{app.app_name}-{app.version}" + + def binary_path(self, app): + return self.project_path(app) / "usr" / "bin" / app.app_name + + def rpm_tag(self, app): + if app.target_vendor == "fedora": + return f"fc{app.target_codename}" + else: + return f"el{app.target_codename}" + + def distribution_filename(self, app): + if app.packaging_format == "deb": + return ( + f"{app.app_name}_{app.version}-{getattr(app, 'revision', 1)}" + f"~{app.target_vendor}-{app.target_codename}_{self.linux_arch}.deb" + ) + elif app.packaging_format == "rpm": + return ( + f"{app.app_name}-{app.version}-{getattr(app, 'revision', 1)}" + f".{self.rpm_tag(app)}.{self.tools.host_arch}.rpm" + ) + else: + raise BriefcaseCommandError( + "Briefcase doesn't currently know how to build system packages in " + f"{app.packaging_format.upper()} format." + ) + + def distribution_path(self, app): + return self.dist_path / self.distribution_filename(app) + + def add_options(self, parser): + super().add_options(parser) + parser.add_argument( + "--target", + dest="target", + help="Docker base image tag for the distribution to target for the build (e.g., `ubuntu:jammy`)", + required=False, + ) + + def parse_options(self, extra): + """Extract the target_image option.""" + options = super().parse_options(extra) + self.target_image = options.pop("target") + + return options + + def clone_options(self, command): + """Clone the target_image option.""" + super().clone_options(command) + self.target_image = command.target_image + + def target_glibc_version(self, app): + """Determine the glibc version. + + If running in Docker, this is done by interrogating libc.so.6; outside + docker, we can use os.confstr(). + """ + if self.use_docker: + try: + output = self.tools.docker.check_output( + ["ldd", "--version"], + image_tag=app.target_image, + ) + # On Debian/Ubuntu, ldd --version will give you output of the form: + # + # ldd (Ubuntu GLIBC 2.31-0ubuntu9.9) 2.31 + # Copyright (C) 2020 Free Software Foundation, Inc. + # ... + # + # Other platforms produce output of the form: + # + # ldd (GNU libc) 2.36 + # Copyright (C) 2020 Free Software Foundation, Inc. + # ... + # + # Note that the exact text will vary version to version. + # Look for the "2.NN" pattern. + if match := re.search(r"\d\.\d+", output): + target_glibc = match.group(0) + else: + raise BriefcaseCommandError( + "Unable to parse glibc dependency version from version string." + ) + except subprocess.CalledProcessError: + raise BriefcaseCommandError( + "Unable to determine glibc dependency version." + ) + + else: + target_glibc = self.tools.os.confstr("CS_GNU_LIBC_VERSION").split()[1] + + return target_glibc + + def finalize_app_config(self, app: AppConfig): + """Finalize app configuration. + + Linux .deb app configurations are deeper than other platforms, because + they need to include components that are dependent on the target vendor + and codename. Those properties are extracted from command-line options. + + The final app configuration merges the target-specific configuration + into the generic "linux.deb" app configuration, as well as setting the + Python version. + + :param app: The app configuration to finalize. + """ + self.logger.info("Finalizing application configuration...", prefix=app.app_name) + if self.use_docker: + # Preserve the target image on the command line as the app's target + app.target_image = self.target_image + + # Ensure that the Docker base image is available. + self.logger.info(f"Checking Docker target image {app.target_image}...") + self.tools.docker.prepare(app.target_image) + + # Extract release information from the image. + output = self.tools.docker.check_output( + ["cat", "/etc/os-release"], + image_tag=app.target_image, + ) + freedesktop_info = parse_freedesktop_os_release(output) + else: + try: + if sys.version_info < (3, 10): + # This reproduces the Python 3.10 + # platform.freedesktop_os_release() function. Yes, this + # should use a context manager, rather than raw file + # open/close operations. If you can get the context manager + # form of this to pass coverage, you get a shiny penny. For + # some reason, coverage generated on Py3.9, but reported on + # Py3.10+, finds a missing branch from the `with` statement + # to the first line after the `except FileNotFound` below. + # Since this is (a) a very simple file I/O sequence, and + # (b) will be removed once we're at a Python3.10 minimum, + # I can live with the Old Skool I/O calls. + f = self.tools.ETC_OS_RELEASE.open(encoding="utf-8") + freedesktop_info = parse_freedesktop_os_release(f.read()) + f.close() + else: + freedesktop_info = self.tools.platform.freedesktop_os_release() + + except FileNotFoundError: + raise BriefcaseCommandError( + "Could not find the /etc/os-release file. " + "Is this a FreeDesktop-compliant Linux distribution?" + ) + + # Process the FreeDesktop content to give the vendor, codename and vendor base. + ( + app.target_vendor, + app.target_codename, + app.target_vendor_base, + ) = self.vendor_details(freedesktop_info) + + self.logger.info( + f"Targeting {app.target_vendor}:{app.target_codename} (Vendor base {app.target_vendor_base})" + ) + + # Non-docker builds need an app representation of the target image + # for templating purposes. + if not self.use_docker: + app.target_image = f"{app.target_vendor}:{app.target_codename}" + + # Merge target-specific configuration items into the app config This + # means: + # * merging app.linux.debian into app, overwriting anything global + # * merging app.linux.ubuntu into app, overwriting anything vendor-base + # specific + # * merging app.linux.ubuntu.focal into app, overwriting anything vendor + # specific + # The vendor base config (e.g., redhat). The vendor base might not + # be known, so fall back to an empty vendor config. + if app.target_vendor_base: + vendor_base_config = getattr(app, app.target_vendor_base, {}) + else: + vendor_base_config = {} + vendor_config = getattr(app, app.target_vendor, {}) + try: + codename_config = vendor_config[app.target_codename] + except KeyError: + codename_config = {} + + # Copy all the specific configurations to the app config + for config in [ + vendor_base_config, + vendor_config, + codename_config, + ]: + for key, value in config.items(): + setattr(app, key, value) + + with self.input.wait_bar("Determining glibc version..."): + app.glibc_version = self.target_glibc_version(app) + self.logger.info(f"Targeting glibc {app.glibc_version}") + + if self.use_docker: + # If we're running in Docker, we can't know the Python3 version + # before rolling out the template; so we fall back to "3". Later, + # once we have a container in which we can run Python, this will be + # updated to the actual Python version as part of the + # `verify_python` app check. + app.python_version_tag = "3" + else: + # Use the version of Python that was used to run Briefcase. + app.python_version_tag = self.python_version_tag + + self.logger.info(f"Targeting Python{app.python_version_tag}") + + +class LinuxSystemMostlyPassiveMixin(LinuxSystemPassiveMixin): + # The Mostly Passive mixin verifies that Docker exists and can be run, but + # doesn't require that we're actually in a Linux environment. + + def docker_image_tag(self, app): + """The Docker image tag for an app.""" + return f"briefcase/{app.bundle}.{app.app_name.lower()}:{app.target_vendor}-{app.target_codename}" + + def verify_tools(self): + """If we're using Docker, verify that it is available.""" + super().verify_tools() + if self.use_docker: + Docker.verify(tools=self.tools) + + def verify_python(self, app): + """Verify that the version of Python being used to build the app in + Docker is compatible with the version being used to run Briefcase. + + Will raise an exception if the Python version is fundamentally + incompatible (i.e., if Briefcase doesn't support it); any other version + discrepancy will log a warning, but continue. + + Requires that the app tools have been verified. + + As a side effect of verifying Python, the `python_version_tag` will be + updated to reflect the *actual* python version, not just a generic "3". + + :param app: The application being built + """ + output = self.tools[app].app_context.check_output( + [ + f"python{app.python_version_tag}", + "-c", + ( + "import sys; " + "print(f'{sys.version_info.major}.{sys.version_info.minor}')" + ), + ] + ) + # Update the python version tag with the *actual* python version. + app.python_version_tag = output.split("\n")[0] + target_python_version = tuple(int(v) for v in app.python_version_tag.split(".")) + + if target_python_version < self.briefcase_required_python_version: + briefcase_min_version = ".".join( + str(v) for v in self.briefcase_required_python_version + ) + raise BriefcaseCommandError( + f"The system python3 version provided by {app.target_image} " + f"is {app.python_version_tag}; Briefcase requires a " + f"minimum Python3 version of {briefcase_min_version}." + ) + elif target_python_version != ( + self.tools.sys.version_info.major, + self.tools.sys.version_info.minor, + ): + self.logger.warning( + f""" +************************************************************************* +** WARNING: Python version mismatch! ** +************************************************************************* + + The system python3 provided by {app.target_image} is {app.python_version_tag}. + This is not the same as your local system ({self.python_version_tag}). + + Ensure you have tested for Python version compatibility before + releasing this app. + +************************************************************************* +""" + ) + + def verify_system_python(self): + """Verify that the Python being used to run Briefcase is the + default system python. + + Will raise an exception if the system Python isn't an obvious Python3, + or the Briefcase Python isn't the same version as the system Python. + + Requires that the app tools have been verified. + """ + system_python_bin = Path("/usr/bin/python3").resolve() + system_version = system_python_bin.name.split(".") + if system_version[0] != "python3" or len(system_version) == 1: + raise BriefcaseCommandError("Can't determine the system python version") + + if system_version[1] != str(self.tools.sys.version_info.minor): + raise BriefcaseCommandError( + f"The version of Python being used to run Briefcase ({self.python_version_tag}) " + f"is not the system python3 (3.{system_version[1]})." + ) + + def _system_requirement_tools(self, app: AppConfig): + """Utility method returning the packages and tools needed to verify + system requirements. + + :param app: The app being built. + :returns: A triple containing (0) The list of package names that must + be installed at a bare minimum; (1) the arguments for the command + used to verify the existence of a package on a system, and (2) + the command used to install packages. All three values are `None` + if the system cannot be identified. + """ + if app.target_vendor_base == DEBIAN: + base_system_packages = ["python3-dev", "build-essential"] + system_verify = ["dpkg", "-s"] + system_installer = "apt" + elif app.target_vendor_base == RHEL: + base_system_packages = [ + "python3-devel", + "gcc", + "make", + "pkgconf-pkg-config", + ] + system_verify = ["rpm", "-q"] + system_installer = "dnf" + else: + base_system_packages = None + system_verify = None + system_installer = None + + return base_system_packages, system_verify, system_installer + + def verify_system_packages(self, app: AppConfig): + """Verify that the required system packages are installed. + + :param app: The app being built. + """ + ( + base_system_packages, + system_verify, + system_installer, + ) = self._system_requirement_tools(app) + + if system_installer is None: + self.logger.warning( + """ +************************************************************************* +** WARNING: Can't verify system packages ** +************************************************************************* + + Briefcase doesn't know how to verify the installation of system + packages on your Linux distribution. If you have any problems + building this app, ensure that the packages listed in the app's + `system_requires` setting have been installed. + +************************************************************************* +""" + ) + return + + # Run a check for each packages listed in the app's system_requires, + # plus the baseline system packages that are required. + missing = [] + for package in base_system_packages + getattr(app, "system_requires", []): + try: + self.tools.subprocess.check_output(system_verify + [package]) + except subprocess.CalledProcessError: + missing.append(package) + + # If any required packages are missing, raise an error. + if missing: + raise BriefcaseCommandError( + f"""\ +Unable to build {app.app_name} due to missing system dependencies. Run: + + sudo {system_installer} install {' '.join(missing)} + +to install the missing dependencies, and re-run Briefcase. +""" + ) + + def verify_app_tools(self, app: AppConfig): + """Verify App environment is prepared and available. + + When Docker is used, create or update a Docker image for the App. + Without Docker, the host machine will be used as the App environment. + + :param app: The application being built + """ + # Verifying the App context is idempotent; but we have some + # additional logic that we only want to run the first time through. + # Check (and store) the pre-verify app tool state. + verify_python = not hasattr(self.tools[app], "app_context") + + if self.use_docker: + DockerAppContext.verify( + tools=self.tools, + app=app, + image_tag=self.docker_image_tag(app), + dockerfile_path=self.bundle_path(app) / "Dockerfile", + app_base_path=self.base_path, + host_bundle_path=self.bundle_path(app), + host_data_path=self.data_path, + python_version=app.python_version_tag, + ) + + # Check the system Python on the target system to see if it is + # compatible with Briefcase. + if verify_python: + self.verify_python(app) + else: + NativeAppContext.verify(tools=self.tools, app=app) + + # Check the system Python on the target system to see if it is + # compatible with Briefcase, and that the required system packages + # are installed. + if verify_python: + self.verify_system_python() + self.verify_system_packages(app) + + # Establish Docker as app context before letting super set subprocess + super().verify_app_tools(app) + + +class LinuxSystemMixin(LinuxSystemMostlyPassiveMixin): + def verify_host(self): + """If we're *not* using Docker, verify that we're actually on Linux.""" + super().verify_host() + if not self.use_docker: + if self.tools.host_os != "Linux": + raise UnsupportedHostError(self.supported_host_os_reason) + + +class LinuxSystemCreateCommand(LinuxSystemMixin, LocalRequirementsMixin, CreateCommand): + description = "Create and populate a Linux system project." + + def output_format_template_context(self, app: AppConfig): + context = super().output_format_template_context(app) + + # Linux system templates use the target codename, rather than + # the format "system" as the leaf of the bundle path + context["format"] = app.target_codename + + # The base template context includes the host Python version; + # override that with an app-specific Python version, allowing + # for the app to be built with the system Python. + context["python_version"] = app.python_version_tag + + # Add the docker base image + context["docker_base_image"] = app.target_image + + # Add the vendor base + context["vendor_base"] = app.target_vendor_base + + return context + + +class LinuxSystemUpdateCommand(LinuxSystemCreateCommand, UpdateCommand): + description = "Update an existing Linux system project." + + +class LinuxSystemOpenCommand(LinuxSystemMostlyPassiveMixin, DockerOpenCommand): + description = ( + "Open a shell in a Docker container for an existing Linux system project." + ) + + +class LinuxSystemBuildCommand(LinuxSystemMixin, BuildCommand): + description = "Build a Linux system project." + + def build_app(self, app: AppConfig, **kwargs): + """Build an application. + + :param app: The application to build + """ + self.logger.info("Building application...", prefix=app.app_name) + + self.logger.info("Build bootstrap binary...") + with self.input.wait_bar("Building bootstrap binary..."): + try: + # Build the bootstrap binary. + self.tools[app].app_context.run( + [ + "make", + "-C", + "bootstrap", + "install", + ], + check=True, + cwd=self.bundle_path(app), + ) + except subprocess.CalledProcessError as e: + raise BriefcaseCommandError( + f"Error building bootstrap binary for {app.app_name}." + ) from e + + # Make the folder for docs + doc_folder = ( + self.bundle_path(app) + / f"{app.app_name}-{app.version}" + / "usr" + / "share" + / "doc" + / app.app_name + ) + doc_folder.mkdir(parents=True, exist_ok=True) + + with self.input.wait_bar("Installing license..."): + license_file = self.base_path / "LICENSE" + if license_file.exists(): + self.tools.shutil.copy(license_file, doc_folder / "copyright") + else: + raise BriefcaseCommandError( + """\ +Your project does not contain a LICENSE file. + +Create a file named `LICENSE` in the same directory as your `pyproject.toml` +with your app's licensing terms. +""" + ) + + with self.input.wait_bar("Installing changelog..."): + changelog = self.base_path / "CHANGELOG" + if changelog.exists(): + with changelog.open() as infile: + outfile = gzip.GzipFile( + doc_folder / "changelog.gz", mode="wb", mtime=0 + ) + outfile.write(infile.read().encode("utf-8")) + outfile.close() + else: + raise BriefcaseCommandError( + """\ +Your project does not contain a CHANGELOG file. + +Create a file named `CHANGELOG` in the same directory as your `pyproject.toml` +with details about the release. +""" + ) + + # Make a folder for manpages + man_folder = ( + self.bundle_path(app) + / f"{app.app_name}-{app.version}" + / "usr" + / "share" + / "man" + / "man1" + ) + man_folder.mkdir(parents=True, exist_ok=True) + + with self.input.wait_bar("Installing man page..."): + manpage_source = self.bundle_path(app) / f"{app.app_name}.1" + if manpage_source.exists(): + with manpage_source.open() as infile: + outfile = gzip.GzipFile( + man_folder / f"{app.app_name}.1.gz", mode="wb", mtime=0 + ) + outfile.write(infile.read().encode("utf-8")) + outfile.close() + else: + raise BriefcaseCommandError( + f"Template does not provide a manpage source file `{app.app_name}.1`" + ) + + self.logger.info("Update file permissions...") + with self.input.wait_bar("Updating file permissions..."): + for path in self.project_path(app).glob("**/*"): + old_perms = self.tools.os.stat(path).st_mode & 0o777 + user_perms = old_perms & 0o700 + world_perms = old_perms & 0o007 + + # File permissions like 775 and 664 (where the group and user + # permissions are the same), cause Debian heartburn. So, make + # sure the group and world permissions are the same + new_perms = user_perms | (world_perms << 3) | world_perms + + # If there's been any change in permissions, apply them + if new_perms != old_perms: + self.logger.info( + "Updating file permissions on " + f"{path.relative_to(self.bundle_path(app))} " + f"from {oct(old_perms)[2:]} to {oct(new_perms)[2:]}" + ) + path.chmod(new_perms) + + with self.input.wait_bar("Stripping binary..."): + self.tools.subprocess.check_output(["strip", self.binary_path(app)]) + + +class LinuxSystemRunCommand(LinuxSystemPassiveMixin, RunCommand): + description = "Run a Linux system project." + supported_host_os = {"Linux"} + supported_host_os_reason = "Linux system projects can only be executed on Linux." + + def run_app( + self, app: AppConfig, test_mode: bool, passthrough: List[str], **kwargs + ): + """Start the application. + + :param app: The config object for the app + :param test_mode: Boolean; Is the app running in test mode? + :param passthrough: The list of arguments to pass to the app + """ + # Set up the log stream + kwargs = self._prepare_app_env(app=app, test_mode=test_mode) + + # Start the app in a way that lets us stream the logs + app_popen = self.tools.subprocess.Popen( + [os.fsdecode(self.binary_path(app))] + passthrough, + cwd=self.tools.home_path, + **kwargs, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + bufsize=1, + ) + + # Start streaming logs for the app. + self._stream_app_logs( + app, + popen=app_popen, + test_mode=test_mode, + clean_output=False, + ) + + +def debian_multiline_description(description): + """Generate a Debian multiline description string. + + The long description in a Debian control file must + *not* contain any blank lines, and each line must start with a single space. + Convert a long description into Debian format. + + :param description: A multi-line long description string. + :returns: A string in Debian's multiline format + """ + return "\n ".join(line for line in description.split("\n") if line.strip() != "") + + +class LinuxSystemPackageCommand(LinuxSystemMixin, PackageCommand): + description = "Package a Linux system project." + + @property + def packaging_formats(self): + return ["deb", "rpm", "pkg", "system"] + + def _verify_deb_tools(self): + """Verify that the local environment contains the debian packaging tools.""" + if not Path("/usr/bin/dpkg-deb").exists(): + raise BriefcaseCommandError( + "Can't find the dpkg tools. Try running `sudo apt install dpkg-dev`." + ) + + def _verify_rpm_tools(self): + """Verify that the local environment contains the redhat packaging tools.""" + if not Path("/usr/bin/rpmbuild").exists(): + raise BriefcaseCommandError( + "Can't find the rpm-build tools. Try running `sudo dnf install rpm-build`." + ) + + def verify_app_tools(self, app): + super().verify_app_tools(app) + # If "system" packaging format was selected, determine what that means. + if app.packaging_format == "system": + app.packaging_format = { + DEBIAN: "deb", + RHEL: "rpm", + ARCH: "pkg", + }.get(app.target_vendor_base, None) + + if app.packaging_format is None: + raise BriefcaseCommandError( + "Briefcase doesn't know the system packaging format for " + f"{app.target_vendor}. You may be able to build a package " + "by manually specifying a format with -p/--packaging-format" + ) + + if not self.use_docker: + # Check for the format-specific packaging tools. + if app.packaging_format == "deb": + self._verify_deb_tools() + elif app.packaging_format == "rpm": + self._verify_rpm_tools() + + def package_app(self, app: AppConfig, **kwargs): + if app.packaging_format == "deb": + self._package_deb(app, **kwargs) + elif app.packaging_format == "rpm": + self._package_rpm(app, **kwargs) + else: + raise BriefcaseCommandError( + "Briefcase doesn't currently know how to build system packages in " + f"{app.packaging_format.upper()} format." + ) + + def _package_deb(self, app: AppConfig, **kwargs): + self.logger.info("Building .deb package...", prefix=app.app_name) + + # The long description *must* exist. + if app.long_description is None: + raise BriefcaseCommandError( + "App configuration does not define `long_description`. " + "Debian projects require a long description." + ) + + # Write the Debian metadata control file. + with self.input.wait_bar("Write Debian package control file..."): + if (self.project_path(app) / "DEBIAN").exists(): + self.tools.shutil.rmtree(self.project_path(app) / "DEBIAN") + + (self.project_path(app) / "DEBIAN").mkdir() + + # Add runtime package dependencies. App config has been finalized, + # so this will be the target-specific definition, if one exists. + # libc6 is added because lintian complains without it, even though + # it's a dependency of the thing we *do* care about - python. + system_runtime_requires = ", ".join( + [ + f"libc6 (>={app.glibc_version})", + f"libpython{app.python_version_tag}", + ] + + getattr(app, "system_runtime_requires", []) + ) + + with (self.project_path(app) / "DEBIAN" / "control").open( + "w", encoding="utf-8" + ) as f: + f.write( + "\n".join( + [ + f"Package: { app.app_name }", + f"Version: { app.version }", + f"Architecture: { self.linux_arch }", + f"Maintainer: { app.author } <{ app.author_email }>", + f"Homepage: { app.url }", + f"Description: { app.description }", + f" { debian_multiline_description(app.long_description) }", + f"Depends: { system_runtime_requires }", + f"Section: { getattr(app, 'system_section', 'utils') }", + "Priority: optional\n", + ] + ) + ) + + with self.input.wait_bar("Building Debian package..."): + try: + # Build the dpkg. + self.tools[app].app_context.run( + [ + "dpkg-deb", + "--build", + "--root-owner-group", + f"{app.app_name}-{app.version}", + ], + check=True, + cwd=self.bundle_path(app), + ) + except subprocess.CalledProcessError as e: + raise BriefcaseCommandError( + f"Error while building .deb package for {app.app_name}." + ) from e + + # Move the deb file to its final location + self.tools.shutil.move( + self.bundle_path(app) / f"{app.app_name}-{app.version}.deb", + self.distribution_path(app), + ) + + def _package_rpm(self, app: AppConfig, **kwargs): + self.logger.info("Building .rpm package...", prefix=app.app_name) + + # The long description *must* exist. + if app.long_description is None: + raise BriefcaseCommandError( + "App configuration does not define `long_description`. " + "Red Hat projects require a long description." + ) + + # Generate the rpmbuild layout + rpmbuild_path = self.bundle_path(app) / "rpmbuild" + with self.input.wait_bar("Generating rpmbuild layout..."): + if rpmbuild_path.exists(): + self.tools.shutil.rmtree(rpmbuild_path) + + (rpmbuild_path / "BUILD").mkdir(parents=True) + (rpmbuild_path / "BUILDROOT").mkdir(parents=True) + (rpmbuild_path / "RPMS").mkdir(parents=True) + (rpmbuild_path / "SOURCES").mkdir(parents=True) + (rpmbuild_path / "SRPMS").mkdir(parents=True) + (rpmbuild_path / "SPECS").mkdir(parents=True) + + # Add runtime package dependencies. App config has been finalized, + # so this will be the target-specific definition, if one exists. + system_runtime_requires = [ + "python3", + ] + getattr(app, "system_runtime_requires", []) + + # Write the spec file + with self.input.wait_bar("Write RPM spec file..."): + with (rpmbuild_path / "SPECS" / f"{app.app_name}.spec").open( + "w", encoding="utf-8" + ) as f: + f.write( + "\n".join( + [ + # By default, rpmbuild thinks all .py files are executable, + # and if a .py doesn't have a shebang line, it will + # tell you that it will remove the executable bit - + # even if the executable bit isn't set. + # We disable the processor that does this. + "%global __brp_mangle_shebangs %{nil}", + # rpmbuild tries to strip binaries, which messes with + # binary wheels. Disable these checks. + "%global __brp_strip %{nil}", + "%global __brp_strip_static_archive %{nil}", + "%global __brp_strip_comment_note %{nil}", + # Disable RPATH checking, because check-rpaths can't deal with + # the structure of manylinux wheels + "%global __brp_check_rpaths %{nil}", + # Disable all the auto-detection that tries to magically + # determine requirements from the binaries + f"%global __requires_exclude_from ^%{{_libdir}}/{app.app_name}/.*$", + f"%global __provides_exclude_from ^%{{_libdir}}/{app.app_name}/.*$", + # Disable debug processing. + "%global _enable_debug_package 0", + "%global debug_package %{nil}", + "", + # Base package metadata + f"Name: {app.app_name}", + f"Version: {app.version}", + f"Release: {getattr(app, 'revision', 1)}%{{?dist}}", + f"Summary: {app.description}", + "", + f"License: {getattr(app, 'license', 'Unknown')}", + f"URL: {app.url}", + "Source0: %{name}-%{version}.tar.gz", + "", + ] + + [ + f"Requires: {requirement}" + for requirement in system_runtime_requires + ] + + [ + "", + f"ExclusiveArch: {self.tools.host_arch}", + "", + "%description", + app.long_description, + "", + "%prep", + "%autosetup", + "", + "%build", + "", + "%install", + "cp -r usr %{buildroot}/usr", + ] + ) + ) + + f.write("\n\n%files\n") + # Build the file manifest. Include any file that is found; also include + # any directory that includes an app_name component, as those paths + # will need to be cleaned up afterwards. Files that *aren't* + # in (sub)directories (e.g., /usr/bin/ or + # /usr/share/man/man1/.1.gz) will be included, but paths + # *not* cleaned up, as they're part of more general system structures. + for filename in sorted(self.project_path(app).glob("**/*")): + path = filename.relative_to(self.project_path(app)) + + if filename.is_dir(): + if app.app_name in path.parts: + f.write(f"%dir /{path}\n") + else: + f.write(f"/{path}\n") + + # Add the changelog content to the bottom of the spec file. + f.write("\n%changelog\n") + changelog_source = self.base_path / "CHANGELOG" + if not changelog_source.exists(): + raise BriefcaseCommandError( + """\ +Your project does not contain a CHANGELOG file. + +Create a file named `CHANGELOG` in the same directory as your `pyproject.toml` +with details about the release. +""" + ) + with changelog_source.open(encoding="utf-8") as c: + f.write(c.read()) + + with self.input.wait_bar("Building source archive..."): + self.tools.shutil.make_archive( + rpmbuild_path / "SOURCES" / f"{app.app_name}-{app.version}", + format="gztar", + root_dir=self.bundle_path(app), + base_dir=f"{app.app_name}-{app.version}", + ) + + with self.input.wait_bar("Building RPM package..."): + try: + # Build the dpkg. + self.tools[app].app_context.run( + [ + "rpmbuild", + "-bb", + "--define", + f"_topdir {self.bundle_path(app) / 'rpmbuild'}", + f"./rpmbuild/SPECS/{app.app_name}.spec", + ], + check=True, + cwd=self.bundle_path(app), + ) + except subprocess.CalledProcessError as e: + raise BriefcaseCommandError( + f"Error while building .rpm package for {app.app_name}." + ) from e + + # Move the rpm file to its final location + self.tools.shutil.move( + rpmbuild_path + / "RPMS" + / self.tools.host_arch + / self.distribution_filename(app), + self.distribution_path(app), + ) + + +class LinuxSystemPublishCommand(LinuxSystemMixin, PublishCommand): + description = "Publish a Linux system project." + + +# Declare the briefcase command bindings +create = LinuxSystemCreateCommand # noqa +update = LinuxSystemUpdateCommand # noqa +open = LinuxSystemOpenCommand # noqa +build = LinuxSystemBuildCommand # noqa +run = LinuxSystemRunCommand # noqa +package = LinuxSystemPackageCommand # noqa +publish = LinuxSystemPublishCommand # noqa diff --git a/tests/commands/base/conftest.py b/tests/commands/base/conftest.py index 56b631cb3..de9f65e60 100644 --- a/tests/commands/base/conftest.py +++ b/tests/commands/base/conftest.py @@ -19,6 +19,8 @@ def __init__(self, *args, **kwargs): kwargs.setdefault("console", Console()) super().__init__(*args, **kwargs) + self.actions = [] + def add_options(self, parser): # Provide some extra arguments: # * some optional arguments @@ -30,6 +32,18 @@ def add_options(self, parser): def binary_path(self, app): raise NotImplementedError() + def verify_host(self): + super().verify_host() + self.actions.append(("verify-host",)) + + def verify_tools(self): + super().verify_tools() + self.actions.append(("verify-tools",)) + + def finalize_app_config(self, app): + super().finalize_app_config(app=app) + self.actions.append(("finalize-app-config", app.app_name)) + @pytest.fixture def base_command(tmp_path): diff --git a/tests/commands/base/test_finalize.py b/tests/commands/base/test_finalize.py new file mode 100644 index 000000000..e0740702b --- /dev/null +++ b/tests/commands/base/test_finalize.py @@ -0,0 +1,137 @@ +import pytest + +from briefcase.config import AppConfig + +from .conftest import DummyCommand + + +@pytest.fixture +def first_app(): + return AppConfig( + app_name="first", + bundle="com.example", + version="0.0.1", + description="The first simple app", + sources=["src/first"], + ) + + +@pytest.fixture +def second_app(): + return AppConfig( + app_name="second", + bundle="com.example", + version="0.0.2", + description="The second simple app", + sources=["src/second"], + ) + + +@pytest.fixture +def base_command(tmp_path, first_app, second_app): + return DummyCommand( + base_path=tmp_path, + apps={ + "first": first_app, + "second": second_app, + }, + ) + + +def test_finalize_all(base_command, first_app, second_app): + "A call to finalize verifies host, tools, and finalized all app configs" + base_command.finalize() + + # The right sequence of things will be done + assert base_command.actions == [ + # Host OS is verified + ("verify-host",), + # Tools are verified + ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), + # App config has been finalized + ("finalize-app-config", "second"), + ] + + # Apps are no longer in draft mode + assert not hasattr(first_app, "__draft__") + assert not hasattr(second_app, "__draft__") + + +def test_finalize_single(base_command, first_app, second_app): + "A call to finalize verifies host, tools, and finalized all app configs" + base_command.finalize(first_app) + + # The right sequence of things will be done + assert base_command.actions == [ + # Host OS is verified + ("verify-host",), + # Tools are verified + ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), + ] + + # First app is no longer in draft mode; second is + assert not hasattr(first_app, "__draft__") + assert hasattr(second_app, "__draft__") + + +def test_finalize_all_repeat(base_command, first_app, second_app): + "Multiple calls to finalize verifies host & tools multiple times, but only once on config" + # Finalize apps twice. This is an approximation of what happens + # when a command chain is executed; create, update, build and run will + # all finalize; create will finalize the app configs, each command will + # have it's own tools verified. + base_command.finalize() + base_command.finalize() + + # The right sequence of things will be done + assert base_command.actions == [ + # Host OS is verified + ("verify-host",), + # Tools are verified + ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), + # App config has been finalized + ("finalize-app-config", "second"), + # Host OS is verified again + ("verify-host",), + # Tools are verified again + ("verify-tools",), + ] + + # Apps are no longer in draft mode + assert not hasattr(first_app, "__draft__") + assert not hasattr(second_app, "__draft__") + + +def test_finalize_single_repeat(base_command, first_app, second_app): + "Multiple calls to finalize verifies host & tools multiple times, but finalizes app config once" + + # Finalize app twice. This is an approximation of what happens + # when a command chain is executed; create, update, build and run will + # all finalize; create will finalize the app config, each command will + # have it's own tools verified. + base_command.finalize(first_app) + base_command.finalize(first_app) + + # The right sequence of things will be done + assert base_command.actions == [ + # Host OS is verified + ("verify-host",), + # Tools are verified + ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), + # Host OS is verified again + ("verify-host",), + # Tools are verified again + ("verify-tools",), + ] + + # First app is no longer in draft mode; second is + assert not hasattr(first_app, "__draft__") + assert hasattr(second_app, "__draft__") diff --git a/tests/commands/base/test_properties.py b/tests/commands/base/test_properties.py index a0b62bcef..4e4951b4b 100644 --- a/tests/commands/base/test_properties.py +++ b/tests/commands/base/test_properties.py @@ -1,6 +1,10 @@ from .conftest import DummyCommand +def test_briefcase_required_python_version(base_command): + assert base_command.briefcase_required_python_version == (3, 8) + + def test_bundle_path(base_command, my_app, tmp_path): bundle_path = base_command.bundle_path(my_app) diff --git a/tests/commands/build/conftest.py b/tests/commands/build/conftest.py index ff593acae..e7b4e1fa8 100644 --- a/tests/commands/build/conftest.py +++ b/tests/commands/build/conftest.py @@ -27,7 +27,7 @@ def __init__(self, *args, **kwargs): self.actions = [] def binary_path(self, app): - return self.bundle_path(app) / f"{app.app_name}.bin" + return self.bundle_path(app) / f"{app.app_name}.dummy.bin" def verify_host(self): super().verify_host() @@ -37,6 +37,10 @@ def verify_tools(self): super().verify_tools() self.actions.append(("verify-tools",)) + def finalize_app_config(self, app): + super().finalize_app_config(app=app) + self.actions.append(("finalize-app-config", app.app_name)) + def verify_app_tools(self, app): super().verify_app_tools(app=app) self.actions.append(("verify-app-tools", app.app_name)) diff --git a/tests/commands/build/test_call.py b/tests/commands/build/test_call.py index 952cd08da..ee7b4836a 100644 --- a/tests/commands/build/test_call.py +++ b/tests/commands/build/test_call.py @@ -23,6 +23,8 @@ def test_specific_app(build_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App tools are verified for app ("verify-app-tools", "first"), # Build the first app; no state @@ -50,6 +52,9 @@ def test_multiple_apps(build_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # App tools are verified for first app ("verify-app-tools", "first"), # Build the first app; no state @@ -81,6 +86,9 @@ def test_non_existent(build_command, first_app_config, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # First App doesn't exist, so it will be created, then built ("create", "first", {"test_mode": False}), # App tools are verified for first app @@ -118,6 +126,9 @@ def test_unbuilt(build_command, first_app_unbuilt, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # App tools are verified for first app ("verify-app-tools", "first"), # First App exists, but hasn't been built; it will be built. @@ -149,6 +160,9 @@ def test_update_app(build_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # Update then build the first app ( "update", @@ -204,6 +218,9 @@ def test_update_app_requirements(build_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # Update then build the first app ( "update", @@ -259,6 +276,9 @@ def test_update_app_resources(build_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # Update then build the first app ( "update", @@ -314,6 +334,9 @@ def test_update_non_existent(build_command, first_app_config, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # First App doesn't exist, so it will be created, then built ("create", "first", {"test_mode": False}), # App tools are verified for first app @@ -366,6 +389,9 @@ def test_update_unbuilt(build_command, first_app_unbuilt, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # First App exists, but hasn't been built; it will be updated then built. ( "update", @@ -421,6 +447,9 @@ def test_build_test(build_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # Update then build the first app ( "update", @@ -477,6 +506,9 @@ def test_build_test_no_update(build_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # No update of the first app # App tools are verified for first app ("verify-app-tools", "first"), @@ -513,6 +545,9 @@ def test_build_test_update_dependencies(build_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # Update then build the first app ( "update", @@ -569,6 +604,9 @@ def test_build_test_update_resources(build_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # Update then build the first app ( "update", @@ -683,6 +721,9 @@ def test_test_app_non_existent(build_command, first_app_config, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # First App doesn't exist, so it will be created, then built ("create", "first", {"test_mode": True}), # App tools are verified for first app @@ -736,6 +777,9 @@ def test_test_app_unbuilt(build_command, first_app_unbuilt, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # First App exists, but hasn't been built; it will be updated then built. ( "update", diff --git a/tests/commands/create/conftest.py b/tests/commands/create/conftest.py index c41af55ba..1ae0033fe 100644 --- a/tests/commands/create/conftest.py +++ b/tests/commands/create/conftest.py @@ -21,9 +21,6 @@ class DefaultCreateCommand(CreateCommand): def binary_path(self, app): return NotImplementedError() - def distribution_path(self, app, packaging_format): - return NotImplementedError() - @pytest.fixture def default_create_command(tmp_path): @@ -102,32 +99,36 @@ def verify_tools(self): super().verify_tools() self.actions.append(("verify-tools",)) + def finalize_app_config(self, app): + super().finalize_app_config(app=app) + self.actions.append(("finalize-app-config", app.app_name)) + def verify_app_tools(self, app): super().verify_app_tools(app=app) - self.actions.append(("verify-app-tools", app)) + self.actions.append(("verify-app-tools", app.app_name)) # Override all the body methods of a CreateCommand # with versions that we can use to track actions performed. def generate_app_template(self, app): - self.actions.append(("generate", app)) + self.actions.append(("generate", app.app_name)) # A mock version of template generation. create_file(self.bundle_path(app) / "new", "new template!") def install_app_support_package(self, app): - self.actions.append(("support", app)) + self.actions.append(("support", app.app_name)) def install_app_requirements(self, app, test_mode): - self.actions.append(("requirements", app, test_mode)) + self.actions.append(("requirements", app.app_name, test_mode)) def install_app_code(self, app, test_mode): - self.actions.append(("code", app, test_mode)) + self.actions.append(("code", app.app_name, test_mode)) def install_app_resources(self, app): - self.actions.append(("resources", app)) + self.actions.append(("resources", app.app_name)) def cleanup_app_content(self, app): - self.actions.append(("cleanup", app)) + self.actions.append(("cleanup", app.app_name)) @pytest.fixture diff --git a/tests/commands/create/test_call.py b/tests/commands/create/test_call.py index 1fb726563..8e40ec993 100644 --- a/tests/commands/create/test_call.py +++ b/tests/commands/create/test_call.py @@ -30,22 +30,25 @@ def test_create(tracking_create_command, tmp_path): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # Create the first app - ("generate", tracking_create_command.apps["first"]), - ("support", tracking_create_command.apps["first"]), - ("verify-app-tools", tracking_create_command.apps["first"]), - ("code", tracking_create_command.apps["first"], False), - ("requirements", tracking_create_command.apps["first"], False), - ("resources", tracking_create_command.apps["first"]), - ("cleanup", tracking_create_command.apps["first"]), + ("generate", "first"), + ("support", "first"), + ("verify-app-tools", "first"), + ("code", "first", False), + ("requirements", "first", False), + ("resources", "first"), + ("cleanup", "first"), # Create the second app - ("generate", tracking_create_command.apps["second"]), - ("support", tracking_create_command.apps["second"]), - ("verify-app-tools", tracking_create_command.apps["second"]), - ("code", tracking_create_command.apps["second"], False), - ("requirements", tracking_create_command.apps["second"], False), - ("resources", tracking_create_command.apps["second"]), - ("cleanup", tracking_create_command.apps["second"]), + ("generate", "second"), + ("support", "second"), + ("verify-app-tools", "second"), + ("code", "second", False), + ("requirements", "second", False), + ("resources", "second"), + ("cleanup", "second"), ] # New app content has been created @@ -67,14 +70,16 @@ def test_create_single(tracking_create_command, tmp_path): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # Create the first app - ("generate", tracking_create_command.apps["first"]), - ("support", tracking_create_command.apps["first"]), - ("verify-app-tools", tracking_create_command.apps["first"]), - ("code", tracking_create_command.apps["first"], False), - ("requirements", tracking_create_command.apps["first"], False), - ("resources", tracking_create_command.apps["first"]), - ("cleanup", tracking_create_command.apps["first"]), + ("generate", "first"), + ("support", "first"), + ("verify-app-tools", "first"), + ("code", "first", False), + ("requirements", "first", False), + ("resources", "first"), + ("cleanup", "first"), ] # New app content has been created diff --git a/tests/commands/create/test_cleanup_app_content.py b/tests/commands/create/test_cleanup_app_content.py index ada9ba327..663548479 100644 --- a/tests/commands/create/test_cleanup_app_content.py +++ b/tests/commands/create/test_cleanup_app_content.py @@ -9,6 +9,8 @@ def myapp_unrolled(myapp, support_path, app_packages_path_index): create_file(support_path / "dir1" / "a_file1.txt", "pork") create_file(support_path / "dir1" / "a_file2.doc", "ham") create_file(support_path / "dir1" / "b_file.txt", "eggs") + create_file(support_path / "dir1" / "__pycache__" / "first.pyc", "pyc 1") + create_file(support_path / "dir1" / "__pycache__" / "second.pyc", "pyc 2") create_file(support_path / "dir2" / "b_file.txt", "spam") create_file(support_path / "other" / "deep" / "b_file.doc", "wigs") create_file(support_path / "other" / "deep" / "other.doc", "wigs") @@ -17,15 +19,16 @@ def myapp_unrolled(myapp, support_path, app_packages_path_index): def test_no_cleanup(create_command, myapp_unrolled, support_path): - """If there are no cleanup directives, bundle content isn't touched.""" + """If there are no cleanup directives, bundle content isn't touched; but __pycache__ is cleaned""" # Cleanup app content create_command.cleanup_app_content(myapp_unrolled) - # Confirm the files are still there + # Confirm the files are still there, except for the pycache assert (support_path / "dir1" / "a_file1.txt").exists() assert (support_path / "dir1" / "a_file2.doc").exists() assert (support_path / "dir1" / "b_file.txt").exists() + assert not (support_path / "dir1" / "__pycache__").exists() assert (support_path / "dir2" / "b_file.txt").exists() assert (support_path / "other" / "deep" / "b_file.doc").exists() @@ -50,11 +53,12 @@ def test_file_cleanup(create_command, myapp_unrolled, support_path): # Cleanup app content create_command.cleanup_app_content(myapp_unrolled) - # Confirm the named file has been removed + # Confirm the named file (plus __pycache__) has been removed assert not (support_path / "dir1" / "a_file1.txt").exists() assert (support_path / "dir1" / "a_file2.doc").exists() assert (support_path / "dir1" / "b_file.txt").exists() assert (support_path / "dir2" / "b_file.txt").exists() + assert not (support_path / "dir1" / "__pycache__").exists() assert (support_path / "other" / "deep" / "b_file.doc").exists() @@ -65,10 +69,12 @@ def test_all_files_in_dir_cleanup(create_command, myapp_unrolled, support_path): # Cleanup app content create_command.cleanup_app_content(myapp_unrolled) - # Confirm the named files have been removed, but the dir still exists + # Confirm the named files (and __pycache__) have been removed, + # but the dir still exists assert not (support_path / "dir1" / "a_file1.txt").exists() assert not (support_path / "dir1" / "a_file2.doc").exists() assert not (support_path / "dir1" / "b_file.txt").exists() + assert not (support_path / "dir1" / "__pycache__").exists() assert (support_path / "dir1").exists() assert (support_path / "dir2" / "b_file.txt").exists() assert (support_path / "other" / "deep" / "b_file.doc").exists() @@ -94,10 +100,11 @@ def test_file_glob_cleanup(create_command, myapp_unrolled, support_path): # Cleanup app content create_command.cleanup_app_content(myapp_unrolled) - # Confirm the matching files have been removed + # Confirm the matching files (plus __pycache__) have been removed assert not (support_path / "dir1" / "a_file1.txt").exists() assert (support_path / "dir1" / "a_file2.doc").exists() assert not (support_path / "dir1" / "b_file.txt").exists() + assert not (support_path / "dir1" / "__pycache__").exists() assert (support_path / "dir2" / "b_file.txt").exists() assert (support_path / "other" / "deep" / "b_file.doc").exists() @@ -109,10 +116,11 @@ def test_deep_glob_cleanup(create_command, myapp_unrolled, support_path): # Cleanup app content create_command.cleanup_app_content(myapp_unrolled) - # Confirm the matching files have been removed + # Confirm the matching files (plus __pycache__) have been removed assert (support_path / "dir1" / "a_file1.txt").exists() assert (support_path / "dir1" / "a_file2.doc").exists() assert not (support_path / "dir1" / "b_file.txt").exists() + assert not (support_path / "dir1" / "__pycache__").exists() assert not (support_path / "dir2" / "b_file.txt").exists() assert not (support_path / "other" / "deep" / "b_file.doc").exists() assert (support_path / "other" / "deep" / "other.doc").exists() @@ -131,10 +139,12 @@ def test_template_glob_cleanup(create_command, myapp_unrolled, support_path): # Cleanup app content create_command.cleanup_app_content(myapp_unrolled) - # Confirm the files from the app config and template config have been removed + # Confirm the files from the app config and template config have been + # removed, as well as __pycache__ assert not (support_path / "dir1" / "a_file1.txt").exists() assert not (support_path / "dir1" / "a_file2.doc").exists() assert (support_path / "dir1" / "b_file.txt").exists() + assert not (support_path / "dir1" / "__pycache__").exists() assert (support_path / "dir2" / "b_file.txt").exists() assert not (support_path / "other" / "deep" / "b_file.doc").exists() @@ -153,9 +163,11 @@ def test_non_existent_cleanup(create_command, myapp_unrolled, support_path): # Cleanup app content create_command.cleanup_app_content(myapp_unrolled) - # Confirm the single existing file named has been removed + # Confirm the single existing file named has been removed, + # as well as __pycache__ assert not (support_path / "dir1" / "a_file1.txt").exists() assert (support_path / "dir1" / "a_file2.doc").exists() assert (support_path / "dir1" / "b_file.txt").exists() + assert not (support_path / "dir1" / "__pycache__").exists() assert (support_path / "dir2" / "b_file.txt").exists() assert (support_path / "other" / "deep" / "b_file.doc").exists() diff --git a/tests/commands/create/test_create_app.py b/tests/commands/create/test_create_app.py index ff501419d..d24a84cdb 100644 --- a/tests/commands/create/test_create_app.py +++ b/tests/commands/create/test_create_app.py @@ -13,13 +13,13 @@ def test_create_app(tracking_create_command, tmp_path): # The right sequence of things will be done assert tracking_create_command.actions == [ - ("generate", tracking_create_command.apps["first"]), - ("support", tracking_create_command.apps["first"]), - ("verify-app-tools", tracking_create_command.apps["first"]), - ("code", tracking_create_command.apps["first"], False), - ("requirements", tracking_create_command.apps["first"], False), - ("resources", tracking_create_command.apps["first"]), - ("cleanup", tracking_create_command.apps["first"]), + ("generate", "first"), + ("support", "first"), + ("verify-app-tools", "first"), + ("code", "first", False), + ("requirements", "first", False), + ("resources", "first"), + ("cleanup", "first"), ] # New app content has been created @@ -48,13 +48,13 @@ def test_create_existing_app_overwrite(tracking_create_command, tmp_path): # The right sequence of things will be done assert tracking_create_command.actions == [ - ("generate", tracking_create_command.apps["first"]), - ("support", tracking_create_command.apps["first"]), - ("verify-app-tools", tracking_create_command.apps["first"]), - ("code", tracking_create_command.apps["first"], False), - ("requirements", tracking_create_command.apps["first"], False), - ("resources", tracking_create_command.apps["first"]), - ("cleanup", tracking_create_command.apps["first"]), + ("generate", "first"), + ("support", "first"), + ("verify-app-tools", "first"), + ("code", "first", False), + ("requirements", "first", False), + ("resources", "first"), + ("cleanup", "first"), ] # Original content has been deleted diff --git a/tests/commands/create/test_generate_app_template.py b/tests/commands/create/test_generate_app_template.py index be9519c2d..f5f4358d4 100644 --- a/tests/commands/create/test_generate_app_template.py +++ b/tests/commands/create/test_generate_app_template.py @@ -26,6 +26,7 @@ def full_context(): "bundle": "com.example", "version": "1.2.3", "description": "This is a simple app", + "long_description": None, "sources": ["src/my_app"], "test_sources": None, "test_requires": None, @@ -39,6 +40,7 @@ def full_context(): "document_types": {}, # Properties of the generating environment "python_version": platform.python_version(), + "briefcase_version": briefcase.__version__, # Fields generated from other properties "module_name": "my_app", "class_name": "MyApp", @@ -48,6 +50,9 @@ def full_context(): "month": date.today().strftime("%B"), # Fields added by the output format. "output_format": "dummy", + # These tests don't do a full finalization, so the context will still be + # marked as draft. + "__draft__": True, } @@ -76,6 +81,7 @@ def test_default_template( the template branch.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", briefcase_version) + full_context["briefcase_version"] = briefcase_version # There won't be a cookiecutter cache, so there won't be # a cache path (yet). @@ -110,6 +116,7 @@ def test_default_template_dev( template doesn't exist.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", briefcase_version) + full_context["briefcase_version"] = briefcase_version # There won't be a cookiecutter cache, so there won't be # a cache path (yet). @@ -164,6 +171,7 @@ def test_default_template_dev_explicit_branch( """In a dev version, if an explicit branch is provided, it is used.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", briefcase_version) + full_context["briefcase_version"] = briefcase_version # Set an explicit branch branch = "some_branch" @@ -206,6 +214,7 @@ def test_default_template_dev_explicit_invalid_branch( to the 'main' branch will not occur.""" # Set the Briefcase version to a dev version monkeypatch.setattr(briefcase, "__version__", briefcase_version) + full_context["briefcase_version"] = briefcase_version # Set an explicit branch branch = "some_branch" @@ -242,6 +251,7 @@ def test_explicit_branch(monkeypatch, create_command, myapp, full_context, tmp_p """user can choose which branch to take the template from.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", "37.42.7") + full_context["briefcase_version"] = "37.42.7" # Set an explicit branch branch = "some_branch" @@ -268,6 +278,7 @@ def test_platform_exists(monkeypatch, create_command, myapp, full_context, tmp_p """If the platform directory already exists, it's ok.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", "37.42.7") + full_context["briefcase_version"] = "37.42.7" # There won't be a cookiecutter cache, so there won't be # a cache path (yet). @@ -295,6 +306,7 @@ def test_explicit_repo_template( """If a template is specified in the app config, it is used.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", "37.42.7") + full_context["briefcase_version"] = "37.42.7" myapp.template = "https://example.com/magic/special-template.git" @@ -325,6 +337,7 @@ def test_explicit_repo_template_and_branch( """If a template and branch is specified in the app config, it is used.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", "37.42.7") + full_context["briefcase_version"] = "37.42.7" # Set an explicit template and branch myapp.template = "https://example.com/magic/special-template.git" @@ -354,6 +367,7 @@ def test_explicit_local_template( """If a local template path is specified in the app config, it is used.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", "37.42.7") + full_context["briefcase_version"] = "37.42.7" myapp.template = "/path/to/special-template" @@ -384,6 +398,7 @@ def test_explicit_local_template_and_branch( used.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", "37.42.7") + full_context["briefcase_version"] = "37.42.7" myapp.template = "/path/to/special-template" branch = "some_branch" @@ -412,6 +427,7 @@ def test_offline_repo_template( raised.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", "37.42.7") + full_context["briefcase_version"] = "37.42.7" # There won't be a cookiecutter cache, so there won't be # a repo path (yet). @@ -447,6 +463,7 @@ def test_invalid_repo_template( """If the provided template URL isn't valid, an error is raised.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", "37.42.7") + full_context["briefcase_version"] = "37.42.7" myapp.template = "https://example.com/somewhere/not-a-repo.git" @@ -480,6 +497,7 @@ def test_missing_branch_template( version, an error is raised.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", "37.42.7") + full_context["briefcase_version"] = "37.42.7" myapp.template = "https://example.com/somewhere/missing-branch.git" @@ -511,6 +529,7 @@ def test_cached_template(monkeypatch, create_command, myapp, full_context, tmp_p """If a template has already been used, the cached version will be used.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", "37.42.7") + full_context["briefcase_version"] = "37.42.7" mock_repo = mock.MagicMock() mock_remote = mock.MagicMock() @@ -554,6 +573,7 @@ def test_cached_template_offline( work.""" # Set the Briefcase version monkeypatch.setattr(briefcase, "__version__", "37.42.7") + full_context["briefcase_version"] = "37.42.7" mock_repo = mock.MagicMock() mock_remote = mock.MagicMock() @@ -591,12 +611,7 @@ def test_cached_template_offline( ) -def test_cached_missing_branch_template( - monkeypatch, - create_command, - myapp, - full_context, -): +def test_cached_missing_branch_template(monkeypatch, create_command, myapp): """If the cached repo doesn't have a branch for this Briefcase version, an error is raised.""" # Set the Briefcase version diff --git a/tests/commands/dev/test_call.py b/tests/commands/dev/test_call.py index 1f8e4b4f4..748a13b24 100644 --- a/tests/commands/dev/test_call.py +++ b/tests/commands/dev/test_call.py @@ -80,7 +80,7 @@ def test_no_args_one_app(dev_command, first_app): def test_no_args_two_apps(dev_command, first_app, second_app): - """If there are one app, dev starts that app by default.""" + """If there are two apps and no explicit app provided, an error is raised.""" # Add two apps dev_command.apps = { "first": first_app, @@ -94,13 +94,8 @@ def test_no_args_two_apps(dev_command, first_app, second_app): with pytest.raises(BriefcaseCommandError): dev_command(**options) - # No apps will be launched - assert dev_command.actions == [ - # Host OS is verified - ("verify-host",), - # Tools are verified - ("verify-tools",), - ] + # Finalization will not occur. + assert dev_command.actions == [] def test_with_arg_one_app(dev_command, first_app): @@ -172,13 +167,8 @@ def test_bad_app_reference(dev_command, first_app, second_app): with pytest.raises(BriefcaseCommandError): dev_command(**options) - # No apps will be launched - assert dev_command.actions == [ - # Host OS is verified - ("verify-host",), - # Tools are verified - ("verify-tools",), - ] + # Finalization will not occur. + assert dev_command.actions == [] def test_update_requirements(dev_command, first_app): diff --git a/tests/commands/new/conftest.py b/tests/commands/new/conftest.py index c6860e050..790029b33 100644 --- a/tests/commands/new/conftest.py +++ b/tests/commands/new/conftest.py @@ -30,6 +30,14 @@ def verify_tools(self): super().verify_tools() self.actions.append(("verify-tools",)) + def finalize_app_config(self, app): + super().finalize_app_config(app=app) + self.actions.append(("finalize-app-config", app)) + + def verify_app_tools(self, app): + super().verify_app_tools(app=app) + self.actions.append(("verify-app-tools", app.app_name)) + def new_app(self, **kwargs): self.actions.append(("new", kwargs)) return full_options({"new_state": "done"}, kwargs) diff --git a/tests/commands/open/conftest.py b/tests/commands/open/conftest.py index ed22dbddf..98ee5c65c 100644 --- a/tests/commands/open/conftest.py +++ b/tests/commands/open/conftest.py @@ -50,6 +50,10 @@ def verify_tools(self): super().verify_tools() self.actions.append(("verify-tools",)) + def finalize_app_config(self, app): + super().finalize_app_config(app=app) + self.actions.append(("finalize-app-config", app.app_name)) + def verify_app_tools(self, app): super().verify_app_tools(app=app) self.actions.append(("verify-app-tools", app.app_name)) diff --git a/tests/commands/open/test_call.py b/tests/commands/open/test_call.py index 64d585ba4..de246a04f 100644 --- a/tests/commands/open/test_call.py +++ b/tests/commands/open/test_call.py @@ -11,6 +11,10 @@ def test_open(open_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), + # App tools are verified ("verify-app-tools", "first"), # open the first app ("open", "first"), @@ -33,6 +37,9 @@ def test_open_single(open_command, first_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), + # App tools are verified ("verify-app-tools", "first"), # open the first app ("open", "first"), @@ -52,8 +59,11 @@ def test_create_before_open(open_command, tmp_path): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # create, then open the first app ("create", "first", {}), + # App tools are verified ("verify-app-tools", "first"), ("open", "first"), ] diff --git a/tests/commands/package/conftest.py b/tests/commands/package/conftest.py index cb115b39f..1b751daaa 100644 --- a/tests/commands/package/conftest.py +++ b/tests/commands/package/conftest.py @@ -68,6 +68,10 @@ def verify_tools(self): super().verify_tools() self.actions.append(("verify-tools",)) + def finalize_app_config(self, app): + super().finalize_app_config(app=app) + self.actions.append(("finalize-app-config", app.app_name)) + def verify_app_tools(self, app): super().verify_app_tools(app=app) self.actions.append(("verify-app-tools", app.app_name)) diff --git a/tests/commands/package/test_call.py b/tests/commands/package/test_call.py index ed3edcbef..075d9c731 100644 --- a/tests/commands/package/test_call.py +++ b/tests/commands/package/test_call.py @@ -20,6 +20,8 @@ def test_no_args_package_one_app(package_command, first_app, tmp_path): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App tools are verified for app ("verify-app-tools", "first"), # Package the first app @@ -61,6 +63,8 @@ def test_package_one_explicit_app(package_command, first_app, second_app, tmp_pa ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App tools are verified for app ("verify-app-tools", "first"), # Package the first app @@ -103,6 +107,9 @@ def test_no_args_package_two_app(package_command, first_app, second_app, tmp_pat ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # App tools are verified for first app ("verify-app-tools", "first"), # Package the first app @@ -158,6 +165,8 @@ def test_no_sign_package_one_app(package_command, first_app, tmp_path): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App tools are verified for app ("verify-app-tools", "first"), # Package the first app @@ -199,6 +208,8 @@ def test_identity_arg_package_one_app(package_command, first_app, tmp_path): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App tools are verified for app ("verify-app-tools", "first"), # Package the first app @@ -240,6 +251,8 @@ def test_adhoc_sign_package_one_app(package_command, first_app, tmp_path): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App tools are verified for app ("verify-app-tools", "first"), # Package the first app @@ -282,6 +295,9 @@ def test_no_sign_args_package_two_app(package_command, first_app, second_app, tm ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # App tools are verified for first app ("verify-app-tools", "first"), # Package the first app @@ -343,6 +359,9 @@ def test_adhoc_sign_args_package_two_app( ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # App tools are verified for first app ("verify-app-tools", "first"), # Package the first app @@ -402,6 +421,9 @@ def test_identity_sign_args_package_two_app( ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # App tools are verified for first app ("verify-app-tools", "first"), # Package the first app @@ -457,6 +479,8 @@ def test_package_alternate_format(package_command, first_app, tmp_path): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App tools are verified for app ("verify-app-tools", "first"), # Package the first app @@ -497,6 +521,8 @@ def test_create_before_package(package_command, first_app_config, tmp_path): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # Create and then build the first app ( "create", @@ -559,6 +585,8 @@ def test_update_package_one_app(package_command, first_app, tmp_path): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # Update (and then build) the first app ( "update", @@ -624,6 +652,9 @@ def test_update_package_two_app(package_command, first_app, second_app, tmp_path ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # Update (and then build) the first app ( "update", @@ -734,6 +765,8 @@ def test_build_before_package(package_command, first_app_unbuilt, tmp_path): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # Build the first app ( "build", @@ -789,6 +822,8 @@ def test_already_packaged(package_command, first_app, tmp_path): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App tools are verified for app ("verify-app-tools", "first"), # Package the first app diff --git a/tests/commands/publish/conftest.py b/tests/commands/publish/conftest.py index 19bc09fb4..e13675f1a 100644 --- a/tests/commands/publish/conftest.py +++ b/tests/commands/publish/conftest.py @@ -37,6 +37,10 @@ def verify_tools(self): super().verify_tools() self.actions.append(("verify-tools",)) + def finalize_app_config(self, app): + super().finalize_app_config(app=app) + self.actions.append(("finalize-app-config", app.app_name)) + def verify_app_tools(self, app): super().verify_app_tools(app=app) self.actions.append(("verify-app-tools", app.app_name)) diff --git a/tests/commands/publish/test_call.py b/tests/commands/publish/test_call.py index b5800132f..d86f65dfc 100644 --- a/tests/commands/publish/test_call.py +++ b/tests/commands/publish/test_call.py @@ -23,6 +23,9 @@ def test_publish(publish_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # App tools are verified for first app ("verify-app-tools", "first"), # Publish the first app to s3 @@ -54,6 +57,9 @@ def test_publish_alternative_channel(publish_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # App tools are verified for first app ("verify-app-tools", "first"), # Publish the first app to the alternative channel @@ -86,6 +92,9 @@ def test_non_existent(publish_command, first_app_config, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), ] @@ -111,4 +120,7 @@ def test_unbuilt(publish_command, first_app_unbuilt, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), ] diff --git a/tests/commands/run/conftest.py b/tests/commands/run/conftest.py index a9848e516..2bb808c97 100644 --- a/tests/commands/run/conftest.py +++ b/tests/commands/run/conftest.py @@ -37,6 +37,14 @@ def verify_tools(self): super().verify_tools() self.actions.append(("verify-tools",)) + def finalize_app_config(self, app): + super().finalize_app_config(app) + self.actions.append(("finalize-app-config", app.app_name)) + + def verify_app_tools(self, app): + super().verify_app_tools(app=app) + self.actions.append(("verify-app-tools", app.app_name)) + def run_app(self, app, **kwargs): self.actions.append(("run", app.app_name, kwargs.copy())) # Remove arguments consumed by the underlying call to run_app() diff --git a/tests/commands/run/test_call.py b/tests/commands/run/test_call.py index 4bd167cc6..4ea34d817 100644 --- a/tests/commands/run/test_call.py +++ b/tests/commands/run/test_call.py @@ -22,6 +22,10 @@ def test_no_args_one_app(run_command, first_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), + # App tools are verified + ("verify-app-tools", "first"), # Run the first app ("run", "first", {"test_mode": False, "passthrough": []}), ] @@ -47,13 +51,17 @@ def test_no_args_one_app_with_passthrough(run_command, first_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), + # App tools have been verified + ("verify-app-tools", "first"), # Run the first app ("run", "first", {"test_mode": False, "passthrough": ["foo", "--bar"]}), ] def test_no_args_two_apps(run_command, first_app, second_app): - """If there are one app, run starts that app by default.""" + """If there are two apps and no explicit app is started, an error is raised.""" # Add two apps run_command.apps = { "first": first_app, @@ -67,13 +75,8 @@ def test_no_args_two_apps(run_command, first_app, second_app): with pytest.raises(BriefcaseCommandError): run_command(**options) - # Only verification actions will be performed - assert run_command.actions == [ - # Host OS is verified - ("verify-host",), - # Tools are verified - ("verify-tools",), - ] + # No verification actions will be performed + assert run_command.actions == [] def test_with_arg_one_app(run_command, first_app): @@ -95,6 +98,10 @@ def test_with_arg_one_app(run_command, first_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), + # App tools are verified + ("verify-app-tools", "first"), # Run the first app ("run", "first", {"test_mode": False, "passthrough": []}), ] @@ -120,6 +127,10 @@ def test_with_arg_two_apps(run_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "second"), + # App tools have been verified + ("verify-app-tools", "second"), # Run the second app ("run", "second", {"test_mode": False, "passthrough": []}), ] @@ -141,13 +152,8 @@ def test_bad_app_reference(run_command, first_app, second_app): with pytest.raises(BriefcaseCommandError): run_command(**options) - # Only verification actions will be performed - assert run_command.actions == [ - # Host OS is verified - ("verify-host",), - # Tools are verified - ("verify-tools",), - ] + # No verification actions will be performed + assert run_command.actions == [] def test_create_app_before_start(run_command, first_app_config): @@ -169,6 +175,8 @@ def test_create_app_before_start(run_command, first_app_config): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App doesn't exist, so it will be built # (which will transitively create) ( @@ -182,6 +190,8 @@ def test_create_app_before_start(run_command, first_app_config): "no_update": False, }, ), + # App tools are verified + ("verify-app-tools", "first"), # Then, it will be started ( "run", @@ -210,6 +220,8 @@ def test_build_app_before_start(run_command, first_app_unbuilt): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # A build was requested, with no update ( "build", @@ -222,6 +234,8 @@ def test_build_app_before_start(run_command, first_app_unbuilt): "no_update": False, }, ), + # App tools are verified + ("verify-app-tools", "first"), # Then, it will be started ( "run", @@ -250,6 +264,8 @@ def test_update_app(run_command, first_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # A build with an explicit update was requested ( "build", @@ -262,6 +278,8 @@ def test_update_app(run_command, first_app): "no_update": False, }, ), + # App tools are verified + ("verify-app-tools", "first"), # Then, it will be started ( "run", @@ -290,6 +308,8 @@ def test_update_app_requirements(run_command, first_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # A build with an explicit update was requested ( "build", @@ -302,6 +322,8 @@ def test_update_app_requirements(run_command, first_app): "no_update": False, }, ), + # App tools are verified + ("verify-app-tools", "first"), # Then, it will be started ( "run", @@ -330,6 +352,8 @@ def test_update_app_resources(run_command, first_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # A build with an explicit update was requested ( "build", @@ -342,6 +366,8 @@ def test_update_app_resources(run_command, first_app): "no_update": False, }, ), + # App tools are verified + ("verify-app-tools", "first"), # Then, it will be started ( "run", @@ -370,6 +396,8 @@ def test_update_unbuilt_app(run_command, first_app_unbuilt): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # An update was requested, so a build with an explicit update # will be performed ( @@ -383,6 +411,8 @@ def test_update_unbuilt_app(run_command, first_app_unbuilt): "no_update": False, }, ), + # App tools are verified + ("verify-app-tools", "first"), # Then, it will be started ( "run", @@ -411,6 +441,8 @@ def test_update_non_existent(run_command, first_app_config): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App doesn't exist, so it will be built, with an # update requested ( @@ -424,6 +456,8 @@ def test_update_non_existent(run_command, first_app_config): "no_update": False, }, ), + # App tools are verified + ("verify-app-tools", "first"), # Then, it will be started ( "run", @@ -452,6 +486,8 @@ def test_test_mode_existing_app(run_command, first_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App is built in test mode ( "build", @@ -464,6 +500,8 @@ def test_test_mode_existing_app(run_command, first_app): "no_update": False, }, ), + # App tools are verified + ("verify-app-tools", "first"), # Run the first app ( "run", @@ -492,6 +530,8 @@ def test_test_mode_existing_app_with_passthrough(run_command, first_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App is built in test mode ( "build", @@ -504,6 +544,8 @@ def test_test_mode_existing_app_with_passthrough(run_command, first_app): "no_update": False, }, ), + # App tools have been verified + ("verify-app-tools", "first"), # Run the first app ( "run", @@ -536,7 +578,11 @@ def test_test_mode_existing_app_no_update(run_command, first_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App will not be built; update is disabled + # App tools are verified + ("verify-app-tools", "first"), # Run the first app ( "run", @@ -565,6 +611,8 @@ def test_test_mode_existing_app_update_requirements(run_command, first_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App will be built with a requirements update ( "build", @@ -577,6 +625,8 @@ def test_test_mode_existing_app_update_requirements(run_command, first_app): "no_update": False, }, ), + # App tools are verified + ("verify-app-tools", "first"), # Run the first app ( "run", @@ -605,6 +655,8 @@ def test_test_mode_existing_app_update_resources(run_command, first_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App will be built with a resource update ( "build", @@ -617,6 +669,8 @@ def test_test_mode_existing_app_update_resources(run_command, first_app): "no_update": False, }, ), + # App tools are verified + ("verify-app-tools", "first"), # Run the first app ( "run", @@ -645,6 +699,8 @@ def test_test_mode_update_existing_app(run_command, first_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App will be built; update is explicit ( "build", @@ -657,6 +713,8 @@ def test_test_mode_update_existing_app(run_command, first_app): "no_update": False, }, ), + # App tools are verified + ("verify-app-tools", "first"), # Run the first app ( "run", @@ -685,6 +743,8 @@ def test_test_mode_non_existent(run_command, first_app_config): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # App will be built in test mode, (which will transitively create) ( "build", @@ -697,6 +757,8 @@ def test_test_mode_non_existent(run_command, first_app_config): "no_update": False, }, ), + # App tools are verified + ("verify-app-tools", "first"), # Then, it will be started ( "run", diff --git a/tests/commands/update/conftest.py b/tests/commands/update/conftest.py index 429e61005..275a30b51 100644 --- a/tests/commands/update/conftest.py +++ b/tests/commands/update/conftest.py @@ -36,22 +36,30 @@ def verify_tools(self): super().verify_tools() self.actions.append(("verify-tools",)) + def finalize_app_config(self, app): + super().finalize_app_config(app=app) + self.actions.append(("finalize-app-config", app.app_name)) + + def verify_app_tools(self, app): + super().verify_app_tools(app=app) + self.actions.append(("verify-app-tools", app.app_name)) + # Override all the body methods of a UpdateCommand # with versions that we can use to track actions performed. def install_app_requirements(self, app, test_mode): - self.actions.append(("requirements", app, test_mode)) + self.actions.append(("requirements", app.app_name, test_mode)) create_file(self.bundle_path(app) / "requirements", "app requirements") def install_app_code(self, app, test_mode): - self.actions.append(("code", app, test_mode)) + self.actions.append(("code", app.app_name, test_mode)) create_file(self.bundle_path(app) / "code.py", "print('app')") def install_app_resources(self, app): - self.actions.append(("resources", app)) + self.actions.append(("resources", app.app_name)) create_file(self.bundle_path(app) / "resources", "app resources") def cleanup_app_content(self, app): - self.actions.append(("cleanup", app)) + self.actions.append(("cleanup", app.app_name)) @pytest.fixture diff --git a/tests/commands/update/test_call.py b/tests/commands/update/test_call.py index dc9738ab6..21332f5ac 100644 --- a/tests/commands/update/test_call.py +++ b/tests/commands/update/test_call.py @@ -32,12 +32,17 @@ def test_update(update_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # Update the first app - ("code", update_command.apps["first"], False), - ("cleanup", update_command.apps["first"]), + ("verify-app-tools", "first"), + ("code", "first", False), + ("cleanup", "first"), # Update the second app - ("code", update_command.apps["second"], False), - ("cleanup", update_command.apps["second"]), + ("verify-app-tools", "second"), + ("code", "second", False), + ("cleanup", "second"), ] @@ -54,9 +59,12 @@ def test_update_single(update_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App config has been finalized + ("finalize-app-config", "first"), # update the first app - ("code", update_command.apps["first"], False), - ("cleanup", update_command.apps["first"]), + ("verify-app-tools", "first"), + ("code", "first", False), + ("cleanup", "first"), ] @@ -73,14 +81,19 @@ def test_update_with_requirements(update_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # Update the first app - ("code", update_command.apps["first"], False), - ("requirements", update_command.apps["first"], False), - ("cleanup", update_command.apps["first"]), + ("verify-app-tools", "first"), + ("code", "first", False), + ("requirements", "first", False), + ("cleanup", "first"), # Update the second app - ("code", update_command.apps["second"], False), - ("requirements", update_command.apps["second"], False), - ("cleanup", update_command.apps["second"]), + ("verify-app-tools", "second"), + ("code", "second", False), + ("requirements", "second", False), + ("cleanup", "second"), ] @@ -97,12 +110,17 @@ def test_update_with_resources(update_command, first_app, second_app): ("verify-host",), # Tools are verified ("verify-tools",), + # App configs have been finalized + ("finalize-app-config", "first"), + ("finalize-app-config", "second"), # Update the first app - ("code", update_command.apps["first"], False), - ("resources", update_command.apps["first"]), - ("cleanup", update_command.apps["first"]), + ("verify-app-tools", "first"), + ("code", "first", False), + ("resources", "first"), + ("cleanup", "first"), # Update the second app - ("code", update_command.apps["second"], False), - ("resources", update_command.apps["second"]), - ("cleanup", update_command.apps["second"]), + ("verify-app-tools", "second"), + ("code", "second", False), + ("resources", "second"), + ("cleanup", "second"), ] diff --git a/tests/commands/update/test_update_app.py b/tests/commands/update/test_update_app.py index e7b9b7502..f96c377ab 100644 --- a/tests/commands/update/test_update_app.py +++ b/tests/commands/update/test_update_app.py @@ -9,8 +9,9 @@ def test_update_app(update_command, first_app, tmp_path): # The right sequence of things will be done assert update_command.actions == [ - ("code", update_command.apps["first"], False), - ("cleanup", update_command.apps["first"]), + ("verify-app-tools", "first"), + ("code", "first", False), + ("cleanup", "first"), ] # App content and resources have been updated @@ -63,9 +64,10 @@ def test_update_app_with_requirements(update_command, first_app, tmp_path): # The right sequence of things will be done assert update_command.actions == [ - ("code", update_command.apps["first"], False), - ("requirements", update_command.apps["first"], False), - ("cleanup", update_command.apps["first"]), + ("verify-app-tools", "first"), + ("code", "first", False), + ("requirements", "first", False), + ("cleanup", "first"), ] # App content has been updated @@ -96,9 +98,10 @@ def test_update_app_with_resources(update_command, first_app, tmp_path): # The right sequence of things will be done assert update_command.actions == [ - ("code", update_command.apps["first"], False), - ("resources", update_command.apps["first"]), - ("cleanup", update_command.apps["first"]), + ("verify-app-tools", "first"), + ("code", "first", False), + ("resources", "first"), + ("cleanup", "first"), ] # App content and resources have been updated @@ -130,8 +133,9 @@ def test_update_app_test_mode(update_command, first_app, tmp_path): # The right sequence of things will be done assert update_command.actions == [ - ("code", update_command.apps["first"], True), - ("cleanup", update_command.apps["first"]), + ("verify-app-tools", "first"), + ("code", "first", True), + ("cleanup", "first"), ] # App code has been updated @@ -163,9 +167,10 @@ def test_update_app_test_mode_requirements(update_command, first_app, tmp_path): # The right sequence of things will be done assert update_command.actions == [ - ("code", update_command.apps["first"], True), - ("requirements", update_command.apps["first"], True), - ("cleanup", update_command.apps["first"]), + ("verify-app-tools", "first"), + ("code", "first", True), + ("requirements", "first", True), + ("cleanup", "first"), ] # App content and requirements have been updated @@ -197,9 +202,10 @@ def test_update_app_test_mode_resources(update_command, first_app, tmp_path): # The right sequence of things will be done assert update_command.actions == [ - ("code", update_command.apps["first"], True), - ("resources", update_command.apps["first"]), - ("cleanup", update_command.apps["first"]), + ("verify-app-tools", "first"), + ("code", "first", True), + ("resources", "first"), + ("cleanup", "first"), ] # App content and resources have been updated diff --git a/tests/config/test_AppConfig.py b/tests/config/test_AppConfig.py index 243ce5c1d..42d811082 100644 --- a/tests/config/test_AppConfig.py +++ b/tests/config/test_AppConfig.py @@ -47,6 +47,7 @@ def test_extra_attrs(): version="1.2.3", bundle="org.beeware", description="A simple app", + long_description="A longer description\nof the app", template="/path/to/template", sources=["src/myapp"], requires=["first", "second", "third"], @@ -65,6 +66,7 @@ def test_extra_attrs(): assert config.version == "1.2.3" assert config.bundle == "org.beeware" assert config.description == "A simple app" + assert config.long_description == "A longer description\nof the app" assert config.template == "/path/to/template" assert config.requires == ["first", "second", "third"] diff --git a/tests/console/test_Log.py b/tests/console/test_Log.py index 5c50f4064..7e573b8ed 100644 --- a/tests/console/test_Log.py +++ b/tests/console/test_Log.py @@ -101,17 +101,16 @@ def test_save_log_to_file_no_exception(tmp_path, now): prefix="wibble", markup=True, ) - logger.save_log_to_file(command=command) - log_filepath = tmp_path / logger.LOG_DIR / "briefcase.2022_06_25-16_12_29.dev.log" + log_filepath = tmp_path / "logs" / "briefcase.2022_06_25-16_12_29.dev.log" assert log_filepath.exists() with open(log_filepath, encoding="utf-8") as log: log_contents = log.read() assert log_contents.startswith("Date/Time: 2022-06-25 16:12:29") - assert f"{Log.DEBUG_PREFACE}this is debug output" in log_contents + assert ">>> this is debug output" in log_contents assert "this is info output" in log_contents assert "this is [bold]info output with markup[/bold]" in log_contents assert "this is info output with escaped markup" in log_contents @@ -146,7 +145,7 @@ def test_save_log_to_file_with_exception(tmp_path, now): logger.capture_stacktrace() logger.save_log_to_file(command=command) - log_filepath = tmp_path / logger.LOG_DIR / "briefcase.2022_06_25-16_12_29.dev.log" + log_filepath = tmp_path / "logs" / "briefcase.2022_06_25-16_12_29.dev.log" assert log_filepath.exists() with open(log_filepath, encoding="utf-8") as log: @@ -175,7 +174,7 @@ def test_save_log_to_file_with_multiple_exceptions(tmp_path, now): logger.save_log_to_file(command=command) - log_filepath = tmp_path / logger.LOG_DIR / "briefcase.2022_06_25-16_12_29.dev.log" + log_filepath = tmp_path / "logs" / "briefcase.2022_06_25-16_12_29.dev.log" assert log_filepath.exists() with open(log_filepath, encoding="utf-8") as log: @@ -210,7 +209,7 @@ def extra3(): for extra in [extra1, extra2, extra3]: logger.add_log_file_extra(extra) logger.save_log_to_file(command=command) - log_filepath = tmp_path / logger.LOG_DIR / "briefcase.2022_06_25-16_12_29.dev.log" + log_filepath = tmp_path / "logs" / "briefcase.2022_06_25-16_12_29.dev.log" with open(log_filepath, encoding="utf-8") as log: log_contents = log.read() @@ -239,7 +238,7 @@ def extra1(): with pytest.raises(KeyboardInterrupt): logger.save_log_to_file(command=command) extra2.assert_not_called() - log_filepath = tmp_path / logger.LOG_DIR / "briefcase.2022_06_25-16_12_29.dev.log" + log_filepath = tmp_path / "logs" / "briefcase.2022_06_25-16_12_29.dev.log" assert log_filepath.stat().st_size == 0 @@ -258,3 +257,89 @@ def test_save_log_to_file_fail_to_write_file(capsys): last_line_of_output = capsys.readouterr().out.strip().splitlines()[-1] assert last_line_of_output.startswith("Failed to save log to ") + + +def test_log_with_context(tmp_path, capsys): + """Log file can be given a persistent context.""" + command = MagicMock() + command.base_path = Path(tmp_path) + + logger = Log(verbosity=2) + logger.save_log = False + + logger.info("this is info output") + with logger.context("Deep"): + logger.info("this is deep context") + logger.info("prefixed deep context", prefix="prefix") + logger.info() + logger.debug("this is deep debug") + with logger.context("Really Deep"): + logger.info("this is really deep context") + logger.info("prefixed really deep context", prefix="prefix2") + logger.info() + logger.debug("this is really deep debug") + logger.info("Pop back to deep") + logger.info("Pop back to normal") + + assert capsys.readouterr().out == "\n".join( + [ + "this is info output", + "", + "Entering Deep context...", + "Deep| --------------------------------------------------------------------", + "Deep| this is deep context", + "Deep| ", + "Deep| [prefix] prefixed deep context", + "Deep| ", + "Deep| >>> this is deep debug", + "Deep| ", + "Deep| Entering Really Deep context...", + "Really Deep| -------------------------------------------------------------", + "Really Deep| this is really deep context", + "Really Deep| ", + "Really Deep| [prefix2] prefixed really deep context", + "Really Deep| ", + "Really Deep| >>> this is really deep debug", + "Really Deep| -------------------------------------------------------------", + "Deep| Leaving Really Deep context.", + "Deep| ", + "Deep| Pop back to deep", + "Deep| --------------------------------------------------------------------", + "Leaving Deep context.", + "", + "Pop back to normal", + "", + ] + ) + + +def test_log_error_with_context_(tmp_path, capsys): + """If an exception is raised in a logging context, the context is cleared.""" + command = MagicMock() + command.base_path = Path(tmp_path) + + logger = Log(verbosity=2) + logger.save_log = False + + logger.info("this is info output") + try: + with logger.context("Deep"): + logger.info("this is deep context") + raise ValueError() + except ValueError: + logger.info("this is cleanup") + + assert capsys.readouterr().out == "\n".join( + [ + "this is info output", + "", + "Entering Deep context...", + "Deep| --------------------------------------------------------------------", + "Deep| this is deep context", + "Deep| --------------------------------------------------------------------", + "Leaving Deep context.", + "", + "this is cleanup", + "", + ] + ) diff --git a/tests/integrations/base/test_ToolCache.py b/tests/integrations/base/test_ToolCache.py index 658396242..960ac469d 100644 --- a/tests/integrations/base/test_ToolCache.py +++ b/tests/integrations/base/test_ToolCache.py @@ -46,7 +46,13 @@ def test_toolcache_typing(): # Tools that are intentionally not annotated in ToolCache. tools_unannotated = {"cookiecutter"} # Tool names to exclude from the dynamic annotation checks; they are manually checked. - tool_names_skip_dynamic_check = {"app_context", "git", "xcode", "xcode_cli"} + tool_names_skip_dynamic_check = { + "app_context", + "git", + "xcode", + "xcode_cli", + "ETC_OS_RELEASE", + } # Tool classes to exclude from dynamic annotation checks. tool_klasses_skip_dynamic_checks = {"DockerAppContext", "NativeAppContext"} @@ -90,6 +96,8 @@ def test_toolcache_typing(): assert ToolCache.__annotations__["git"] == "git_" + assert ToolCache.__annotations__["ETC_OS_RELEASE"] == "Path" + def test_third_party_tools_available(): """Third party tools are available.""" diff --git a/tests/integrations/docker/test_DockerAppContext__check_output.py b/tests/integrations/docker/test_DockerAppContext__check_output.py index 509d575de..a9b7162d5 100644 --- a/tests/integrations/docker/test_DockerAppContext__check_output.py +++ b/tests/integrations/docker/test_DockerAppContext__check_output.py @@ -68,6 +68,41 @@ def test_extra_mounts(mock_docker_app_context, tmp_path, capsys): assert capsys.readouterr().out == "" +@pytest.mark.skipif( + sys.platform == "win32", reason="Windows paths aren't converted in Docker context" +) +def test_cwd(mock_docker_app_context, tmp_path, capsys): + """A call can use a working directory relative to the project folder.""" + assert ( + mock_docker_app_context.check_output( + ["hello", "world"], + cwd=tmp_path / "bundle" / "foobar", + ) + == "goodbye\n" + ) + + mock_docker_app_context.tools.subprocess._subprocess.check_output.assert_called_once_with( + [ + "docker", + "run", + "--rm", + "--volume", + f"{tmp_path / 'bundle'}:/app:z", + "--volume", + f"{tmp_path / 'briefcase'}:/home/brutus/.cache/briefcase:z", + "--workdir", + "/app/foobar", + "briefcase/com.example.myapp:py3.X", + "hello", + "world", + ], + text=True, + encoding=ANY, + stderr=subprocess.STDOUT, + ) + assert capsys.readouterr().out == "" + + def test_call_with_arg_and_env(mock_docker_app_context, tmp_path, capsys): """Extra keyword arguments are passed through as-is; env modifications are converted.""" @@ -135,11 +170,12 @@ def test_call_with_path_arg_and_env(mock_docker_app_context, tmp_path, capsys): "MAGIC=True", "--env", "PATH=/somewhere/safe:/home/brutus/.cache/briefcase/tools:/app/location", + "--workdir", + f"{tmp_path / 'cwd'}", "briefcase/com.example.myapp:py3.X", "hello", os.fsdecode(tmp_path / "location"), ], - cwd=os.fsdecode(tmp_path / "cwd"), text=True, encoding=ANY, stderr=subprocess.STDOUT, diff --git a/tests/integrations/docker/test_DockerAppContext__run.py b/tests/integrations/docker/test_DockerAppContext__run.py index 837096bf3..895cdbb3b 100644 --- a/tests/integrations/docker/test_DockerAppContext__run.py +++ b/tests/integrations/docker/test_DockerAppContext__run.py @@ -33,9 +33,11 @@ def test_simple_call(mock_docker_app_context, tmp_path, capsys): ) assert capsys.readouterr().out == ( "\n" - "[myapp] Entering Docker context...\n" + "Entering Docker context...\n" + "Docker| ------------------------------------------------------------------\n" + "Docker| ------------------------------------------------------------------\n" + "Leaving Docker context.\n" "\n" - "[myapp] Leaving Docker context\n" ) @@ -63,9 +65,11 @@ def test_interactive(mock_docker_app_context, tmp_path, capsys): ) assert capsys.readouterr().out == ( "\n" - "[myapp] Entering Docker context...\n" + "Entering Docker context...\n" + "Docker| ------------------------------------------------------------------\n" + "Docker| ------------------------------------------------------------------\n" + "Leaving Docker context.\n" "\n" - "[myapp] Leaving Docker context\n" ) @@ -105,9 +109,53 @@ def test_extra_mounts(mock_docker_app_context, tmp_path, capsys): ) assert capsys.readouterr().out == ( "\n" - "[myapp] Entering Docker context...\n" + "Entering Docker context...\n" + "Docker| ------------------------------------------------------------------\n" + "Docker| ------------------------------------------------------------------\n" + "Leaving Docker context.\n" + "\n" + ) + + +@pytest.mark.skipif( + sys.platform == "win32", reason="Windows paths aren't converted in Docker context" +) +def test_cwd(mock_docker_app_context, tmp_path, capsys): + """A subprocess call can use a working directory relative to the project folder.""" + + mock_docker_app_context.run( + ["hello", "world"], + cwd=tmp_path / "bundle" / "foobar", + ) + + mock_docker_app_context.tools.subprocess._subprocess.Popen.assert_called_once_with( + [ + "docker", + "run", + "--rm", + "--volume", + f"{tmp_path / 'bundle'}:/app:z", + "--volume", + f"{tmp_path / 'briefcase'}:/home/brutus/.cache/briefcase:z", + "--workdir", + "/app/foobar", + "briefcase/com.example.myapp:py3.X", + "hello", + "world", + ], + text=True, + encoding=ANY, + bufsize=1, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + assert capsys.readouterr().out == ( + "\n" + "Entering Docker context...\n" + "Docker| ------------------------------------------------------------------\n" + "Docker| ------------------------------------------------------------------\n" + "Leaving Docker context.\n" "\n" - "[myapp] Leaving Docker context\n" ) @@ -149,9 +197,11 @@ def test_call_with_arg_and_env(mock_docker_app_context, tmp_path, capsys): ) assert capsys.readouterr().out == ( "\n" - "[myapp] Entering Docker context...\n" + "Entering Docker context...\n" + "Docker| ------------------------------------------------------------------\n" + "Docker| ------------------------------------------------------------------\n" + "Leaving Docker context.\n" "\n" - "[myapp] Leaving Docker context\n" ) @@ -184,11 +234,12 @@ def test_call_with_path_arg_and_env(mock_docker_app_context, tmp_path, capsys): "MAGIC=True", "--env", "PATH=/somewhere/safe:/home/brutus/.cache/briefcase/tools:/app/location", + "--workdir", + f"{tmp_path / 'cwd'}", "briefcase/com.example.myapp:py3.X", "hello", os.fsdecode(tmp_path / "location"), ], - cwd=os.fsdecode(tmp_path / "cwd"), text=True, encoding=ANY, bufsize=1, @@ -197,9 +248,11 @@ def test_call_with_path_arg_and_env(mock_docker_app_context, tmp_path, capsys): ) assert capsys.readouterr().out == ( "\n" - "[myapp] Entering Docker context...\n" + "Entering Docker context...\n" + "Docker| ------------------------------------------------------------------\n" + "Docker| ------------------------------------------------------------------\n" + "Leaving Docker context.\n" "\n" - "[myapp] Leaving Docker context\n" ) @@ -243,19 +296,22 @@ def test_interactive_with_path_arg_and_env_and_mounts( "MAGIC=True", "--env", "PATH=/somewhere/safe:/home/brutus/.cache/briefcase/tools:/app/location", + "--workdir", + f"{tmp_path / 'cwd'}", "briefcase/com.example.myapp:py3.X", "hello", os.fsdecode(tmp_path / "location"), ], - cwd=os.fsdecode(tmp_path / "cwd"), text=True, encoding=ANY, ) assert capsys.readouterr().out == ( "\n" - "[myapp] Entering Docker context...\n" + "Entering Docker context...\n" + "Docker| ------------------------------------------------------------------\n" + "Docker| ------------------------------------------------------------------\n" + "Leaving Docker context.\n" "\n" - "[myapp] Leaving Docker context\n" ) @@ -289,18 +345,20 @@ def test_simple_verbose_call(mock_docker_app_context, tmp_path, capsys): ) assert capsys.readouterr().out == ( "\n" - "[myapp] Entering Docker context...\n" - "\n" - ">>> Running Command:\n" - ">>> docker run " + "Entering Docker context...\n" + "Docker| ------------------------------------------------------------------\n" + "Docker| \n" + "Docker| >>> Running Command:\n" + "Docker| >>> docker run " "--rm " f"--volume {tmp_path / 'bundle'}:/app:z " f"--volume {tmp_path / 'briefcase'}:/home/brutus/.cache/briefcase:z " "briefcase/com.example.myapp:py3.X " "hello world\n" - ">>> Working Directory:\n" - f">>> {Path.cwd()}\n" - ">>> Return code: 0\n" + "Docker| >>> Working Directory:\n" + f"Docker| >>> {Path.cwd()}\n" + "Docker| >>> Return code: 0\n" + "Docker| ------------------------------------------------------------------\n" + "Leaving Docker context.\n" "\n" - "[myapp] Leaving Docker context\n" ) diff --git a/tests/integrations/docker/test_Docker__check_output.py b/tests/integrations/docker/test_Docker__check_output.py new file mode 100644 index 000000000..14a52e744 --- /dev/null +++ b/tests/integrations/docker/test_Docker__check_output.py @@ -0,0 +1,25 @@ +from unittest.mock import MagicMock + +import pytest + +from briefcase.integrations.base import ToolCache +from briefcase.integrations.docker import Docker +from briefcase.integrations.subprocess import Subprocess + + +@pytest.fixture +def mock_tools(mock_tools) -> ToolCache: + mock_tools.subprocess = MagicMock(spec_set=Subprocess) + mock_tools.docker = Docker(mock_tools) + return mock_tools + + +def test_check_output(mock_tools): + "A command can be invoked on a bare Docker image." + + # Run the command in a container + mock_tools.docker.check_output(["cmd", "arg1", "arg2"], image_tag="ubuntu:jammy") + + mock_tools.subprocess.check_output.assert_called_once_with( + ["docker", "run", "--rm", "ubuntu:jammy", "cmd", "arg1", "arg2"] + ) diff --git a/tests/integrations/docker/test_Docker__prepare.py b/tests/integrations/docker/test_Docker__prepare.py new file mode 100644 index 000000000..7b97cd7df --- /dev/null +++ b/tests/integrations/docker/test_Docker__prepare.py @@ -0,0 +1,52 @@ +import subprocess +from unittest.mock import MagicMock + +import pytest + +from briefcase.exceptions import BriefcaseCommandError +from briefcase.integrations.base import ToolCache +from briefcase.integrations.docker import Docker +from briefcase.integrations.subprocess import Subprocess + + +@pytest.fixture +def mock_tools(mock_tools) -> ToolCache: + mock_tools.subprocess = MagicMock(spec_set=Subprocess) + mock_tools.docker = Docker(mock_tools) + return mock_tools + + +def test_prepare(mock_tools): + "A docker image can be prepared" + + # Prepare an image + mock_tools.docker.prepare("ubuntu:jammy") + + mock_tools.subprocess.run.assert_called_once_with( + ["docker", "run", "--rm", "ubuntu:jammy", "printf", ""], + check=True, + stream_output=False, + ) + + +def test_prepare_bad_image(mock_tools): + "If an image is invalid, an exception raised" + # Mock a Docker failure due to a bad image + mock_tools.subprocess.run.side_effect = subprocess.CalledProcessError( + returncode=125, + cmd="docker...", + ) + + # Try to prepare an image thadt doesn't exist: + with pytest.raises( + BriefcaseCommandError, + match=r"Unable to obtain the Docker base image ubuntu:does-not-exist.", + ): + mock_tools.docker.prepare("ubuntu:does-not-exist") + + # The subprocess call was made. + mock_tools.subprocess.run.assert_called_once_with( + ["docker", "run", "--rm", "ubuntu:does-not-exist", "printf", ""], + check=True, + stream_output=False, + ) diff --git a/tests/platforms/linux/appimage/test_build.py b/tests/platforms/linux/appimage/test_build.py index ce26f12d2..55d959415 100644 --- a/tests/platforms/linux/appimage/test_build.py +++ b/tests/platforms/linux/appimage/test_build.py @@ -431,6 +431,8 @@ def test_build_appimage_in_docker(build_command, first_app, tmp_path, monkeypatc "APPIMAGE_EXTRACT_AND_RUN=1", "--env", "ARCH=wonky", + "--workdir", + "/app", f"briefcase/com.example.first-app:py3.{sys.version_info.minor}", "/home/brutus/.cache/briefcase/tools/linuxdeploy-wonky.AppImage", "--appdir", @@ -446,9 +448,6 @@ def test_build_appimage_in_docker(build_command, first_app, tmp_path, monkeypatc "--deploy-deps-only", "/app/First App.AppDir/usr/app_packages/secondlib", ], - cwd=os.fsdecode( - tmp_path / "base_path" / "build" / "first-app" / "linux" / "appimage" - ), text=True, encoding=mock.ANY, stdout=subprocess.PIPE, @@ -553,6 +552,8 @@ def test_build_appimage_with_plugins_in_docker(build_command, first_app, tmp_pat "APPIMAGE_EXTRACT_AND_RUN=1", "--env", "ARCH=wonky", + "--workdir", + "/app", f"briefcase/com.example.first-app:py3.{sys.version_info.minor}", "/home/brutus/.cache/briefcase/tools/linuxdeploy-wonky.AppImage", "--appdir", @@ -572,9 +573,6 @@ def test_build_appimage_with_plugins_in_docker(build_command, first_app, tmp_pat "--plugin", "something", ], - cwd=os.fsdecode( - tmp_path / "base_path" / "build" / "first-app" / "linux" / "appimage" - ), text=True, encoding=mock.ANY, stdout=subprocess.PIPE, diff --git a/tests/platforms/linux/appimage/test_create.py b/tests/platforms/linux/appimage/test_create.py index c2afac098..4660ea622 100644 --- a/tests/platforms/linux/appimage/test_create.py +++ b/tests/platforms/linux/appimage/test_create.py @@ -1,152 +1,43 @@ -import shutil -import subprocess import sys -from pathlib import Path -from unittest.mock import MagicMock, call import pytest from briefcase.console import Console, Log -from briefcase.exceptions import BriefcaseCommandError, UnsupportedHostError -from briefcase.integrations.docker import DockerAppContext -from briefcase.integrations.subprocess import Subprocess +from briefcase.exceptions import UnsupportedHostError from briefcase.platforms.linux.appimage import LinuxAppImageCreateCommand -from ....utils import create_file, create_tgz_file, create_zip_file - @pytest.fixture -def no_docker_create_command(first_app_config, tmp_path): - command = LinuxAppImageCreateCommand( +def create_command(first_app_config, tmp_path): + return LinuxAppImageCreateCommand( logger=Log(), console=Console(), base_path=tmp_path / "base_path", data_path=tmp_path / "briefcase", ) - # Disable Docker use - command.use_docker = False - - # Set the host architecture to something known for test purposes. - command.tools.host_arch = "wonky" - - # Set the host system to Linux for test purposes - command.tools.host_os = "Linux" - - # Mock the existence of Docker - command.tools.subprocess = MagicMock(spec_set=Subprocess) - - # Mock shutil.copy to do the copy, but be observable - command.tools.shutil.copy = MagicMock(side_effect=shutil.copy) - - command._path_index = { - first_app_config: {"app_packages_path": "path/to/app_packages"} - } - - # At the time app requirements are installed, the project folder will exist. - command.project_path(first_app_config).mkdir(parents=True, exist_ok=True) - return command - - -@pytest.fixture -def create_command(no_docker_create_command, first_app_config, tmp_path): - # Enable Docker use - no_docker_create_command.use_docker = True - - # Provide Docker app context - no_docker_create_command.tools[first_app_config].app_context = DockerAppContext( - tools=no_docker_create_command.tools, - app=first_app_config, - ) - no_docker_create_command.tools[first_app_config].app_context.prepare( - image_tag="briefcase/com.example.first-app:py3.X", - dockerfile_path=tmp_path - / "base_path" - / "build" - / "first-app" - / "linux" - / "appimage" - / "Dockerfile", - app_base_path=tmp_path / "base_path", - host_bundle_path=tmp_path - / "base_path" - / "build" - / "first-app" - / "linux" - / "appimage", - host_data_path=tmp_path / "briefcase", - python_version="3.X", - ) - - # Reset the subprocess.run mock, removing the Docker setup call - no_docker_create_command.tools.subprocess.run.reset_mock() - - return no_docker_create_command - - -@pytest.fixture -def first_package(tmp_path): - # Create a local package to be built - create_file( - tmp_path / "local" / "first" / "setup.py", - content="Python config", - ) - create_file( - tmp_path / "local" / "first" / "first.py", - content="Python source", - ) - - return str(tmp_path / "local" / "first") - - -@pytest.fixture -def second_package(tmp_path): - # Create a local pre-built sdist - create_tgz_file( - tmp_path / "local" / "second-2.3.4.tar.gz", - content=[ - ("setup.py", "Python config"), - ("second.py", "Python source"), - ], - ) +def test_default_options(create_command): + """The default options are as expected.""" + options = create_command.parse_options([]) - return str(tmp_path / "local" / "second-2.3.4.tar.gz") + assert options == {} + assert create_command.use_docker -@pytest.fixture -def third_package(tmp_path): - # Create a local pre-built wheel - create_zip_file( - tmp_path / "local" / "third-3.4.5-py3-none-any.whl", - content=[ - ("MANIFEST.in", "Wheel config"), - ("third.py", "Python source"), - ], - ) - return str(tmp_path / "local" / "third-3.4.5-py3-none-any.whl") +def test_options(create_command): + """The extra options can be parsed.""" + options = create_command.parse_options(["--no-docker"]) + assert options == {} -@pytest.fixture -def other_package(create_command, first_app_config): - # A stale sdist, built in a previous pass - create_tgz_file( - create_command.local_requirements_path(first_app_config) - / "other_package-0.1.2.tar.gz", - content=[ - ("setup.py", "Python config"), - ("other.py", "Python source"), - ], - ) + assert not create_command.use_docker @pytest.mark.parametrize("host_os", ["Windows", "WeirdOS"]) -def test_unsupported_host_os_with_docker(no_docker_create_command, host_os, tmp_path): +def test_unsupported_host_os_with_docker(create_command, host_os): """Error raised for an unsupported OS when using Docker.""" - # start with "no_docker_create_command" to avoid the initialization of a - # docker env when the underlying OS may not support it during the test. - create_command = no_docker_create_command create_command.use_docker = True create_command.tools.host_os = host_os @@ -159,371 +50,29 @@ def test_unsupported_host_os_with_docker(no_docker_create_command, host_os, tmp_ @pytest.mark.parametrize("host_os", ["Darwin", "Windows", "WeirdOS"]) def test_unsupported_host_os_without_docker( - no_docker_create_command, + create_command, host_os, - tmp_path, ): """Error raised for an unsupported OS when not using Docker.""" - no_docker_create_command.tools.host_os = host_os + create_command.use_docker = False + create_command.tools.host_os = host_os with pytest.raises( UnsupportedHostError, match="Linux AppImages can only be built on Linux, or on macOS using Docker.", ): - no_docker_create_command() + create_command() -def test_support_package_url(no_docker_create_command): +def test_support_package_url(create_command): """The URL of the support package is predictable.""" + # Set the host arch to something predictable + create_command.tools.host_arch = "wonky" + revision = 52 expected_url = ( f"https://briefcase-support.s3.amazonaws.com/python" f"/3.{sys.version_info.minor}/linux/wonky" f"/Python-3.{sys.version_info.minor}-linux-wonky-support.b{revision}.tar.gz" ) - assert no_docker_create_command.support_package_url(revision) == expected_url - - -@pytest.mark.skipif( - sys.platform == "win32", reason="Windows paths aren't converted in Docker context" -) -def test_install_app_requirements_in_docker(create_command, first_app_config, tmp_path): - """If Docker is in use, a docker context is used to invoke pip.""" - - # Install requirements - create_command.install_app_requirements(first_app_config, test_mode=False) - - # pip was invoked inside docker. - create_command.tools.subprocess.run.assert_called_once_with( - [ - "docker", - "run", - "--rm", - "--volume", - f"{tmp_path / 'base_path' / 'build' / 'first-app' / 'linux' / 'appimage'}:/app:z", - "--volume", - f"{tmp_path / 'briefcase'}:/home/brutus/.cache/briefcase:z", - "briefcase/com.example.first-app:py3.X", - "python3.X", - "-u", - "-m", - "pip", - "install", - "--upgrade", - "--no-user", - "--target=/app/path/to/app_packages", - "foo==1.2.3", - "bar>=4.5", - ], - check=True, - ) - - # The local requirements path exists, but is empty - local_requirements_path = create_command.local_requirements_path(first_app_config) - assert local_requirements_path.exists() - assert len(list(local_requirements_path.iterdir())) == 0 - - -@pytest.mark.skipif( - sys.platform == "win32", reason="Windows paths aren't converted in Docker context" -) -def test_install_app_requirements_no_docker( - no_docker_create_command, - first_app_config, - tmp_path, -): - """If docker is *not* in use, calls are made on raw subprocess.""" - # Verify the tools; this should operate in the non-docker context - no_docker_create_command.verify_tools() - no_docker_create_command.verify_app_tools(first_app_config) - - # Install requirements - no_docker_create_command.install_app_requirements(first_app_config, test_mode=False) - - # Docker is not verified. - assert not hasattr(no_docker_create_command.tools, "docker") - - # Subprocess is used for app_context - assert isinstance( - no_docker_create_command.tools[first_app_config].app_context, Subprocess - ) - assert ( - no_docker_create_command.tools[first_app_config].app_context - is no_docker_create_command.tools.subprocess - ) - - # pip was invoked natively - no_docker_create_command.tools[ - first_app_config - ].app_context.run.assert_called_once_with( - [ - sys.executable, - "-u", - "-m", - "pip", - "install", - "--upgrade", - "--no-user", - f"--target={tmp_path}/base_path/build/first-app/linux/appimage/path/to/app_packages", - "foo==1.2.3", - "bar>=4.5", - ], - check=True, - ) - - # The local requirements path exists, but is empty - local_requirements_path = no_docker_create_command.local_requirements_path( - first_app_config - ) - assert local_requirements_path.exists() - assert len(list(local_requirements_path.iterdir())) == 0 - - -@pytest.mark.skipif( - sys.platform == "win32", reason="Windows paths aren't converted in Docker context" -) -def test_install_app_requirements_with_locals( - create_command, - first_app_config, - tmp_path, - first_package, # A local folder to be built - second_package, # A pre-built sdist - third_package, # A pre-built wheel - other_package, # A stale local requirement -): - """If the app has local requirements, they are compiled into sdists for - installation.""" - # Add local requirements - first_app_config.requires.extend([first_package, second_package, third_package]) - - # Mock the side effect of building an sdist - def build_sdist(*args, **kwargs): - # Extract the folder name; assume that's the name of the package - name = Path(args[0][-1]).name - create_tgz_file( - create_command.local_requirements_path(first_app_config) - / f"{name}-1.2.3.tar.gz", - content=[ - ("setup.py", "Python config"), - ("local.py", "Python source"), - ], - ) - - create_command.tools.subprocess.check_output.side_effect = build_sdist - - # Install requirements - create_command.install_app_requirements(first_app_config, test_mode=False) - - # An sdist was built for the local package - create_command.tools.subprocess.check_output.assert_called_once_with( - [ - sys.executable, - "-m", - "build", - "--sdist", - "--outdir", - tmp_path - / "base_path" - / "build" - / "first-app" - / "linux" - / "appimage" - / "_requirements", - str(tmp_path / "local" / "first"), - ] - ) - - # An attempt was made to copy the prebuilt packages - create_command.tools.shutil.copy.mock_calls = [ - call( - str(tmp_path / "local" / "second-2.3.4.tar.gz"), - tmp_path - / "base_path" - / "build" - / "first-app" - / "linux" - / "appimage" - / "_requirements", - ), - call( - str(tmp_path / "local" / "third-3.4.5-py3-none-any.whl"), - tmp_path - / "base_path" - / "build" - / "first-app" - / "linux" - / "appimage" - / "_requirements", - ), - ] - - # pip was invoked inside docker. - create_command.tools.subprocess.run.assert_called_once_with( - [ - "docker", - "run", - "--rm", - "--volume", - f"{tmp_path / 'base_path' / 'build' / 'first-app' / 'linux' / 'appimage'}:/app:z", - "--volume", - f"{tmp_path / 'briefcase'}:/home/brutus/.cache/briefcase:z", - "briefcase/com.example.first-app:py3.X", - "python3.X", - "-u", - "-m", - "pip", - "install", - "--upgrade", - "--no-user", - "--target=/app/path/to/app_packages", - "foo==1.2.3", - "bar>=4.5", - "/app/_requirements/first-1.2.3.tar.gz", - "/app/_requirements/second-2.3.4.tar.gz", - "/app/_requirements/third-3.4.5-py3-none-any.whl", - ], - check=True, - ) - - # The local requirements path exists, and contains the compiled sdist, the - # pre-existing sdist, and the pre-existing wheel; the old requirement has - # been purged. - local_requirements_path = create_command.local_requirements_path(first_app_config) - assert local_requirements_path.exists() - assert [f.name for f in sorted(local_requirements_path.iterdir())] == [ - "first-1.2.3.tar.gz", - "second-2.3.4.tar.gz", - "third-3.4.5-py3-none-any.whl", - ] - - -@pytest.mark.skipif( - sys.platform == "win32", reason="Windows paths aren't converted in Docker context" -) -def test_install_app_requirements_with_bad_local( - create_command, - first_app_config, - tmp_path, - first_package, # A local folder to be built - other_package, # A stale local requirement -): - """If the app has local requirement that can't be built, an error is raised.""" - # Add a local requirement - first_app_config.requires.append(first_package) - - # Mock the building an sdist raising an error - create_command.tools.subprocess.check_output.side_effect = ( - subprocess.CalledProcessError( - cmd=["python", "-m", "build", "..."], returncode=1 - ) - ) - - # Install requirements - with pytest.raises( - BriefcaseCommandError, - match=r"Unable to build sdist for .*/local/first", - ): - create_command.install_app_requirements(first_app_config, test_mode=False) - - # An attempt to build the sdist was made - create_command.tools.subprocess.check_output.assert_called_once_with( - [ - sys.executable, - "-m", - "build", - "--sdist", - "--outdir", - tmp_path - / "base_path" - / "build" - / "first-app" - / "linux" - / "appimage" - / "_requirements", - str(tmp_path / "local" / "first"), - ] - ) - - # pip was *not* invoked inside docker. - create_command.tools.subprocess.run.assert_not_called() - - # The local requirements path exists, and is empty. It has been purged, but not refilled. - local_requirements_path = create_command.local_requirements_path(first_app_config) - assert local_requirements_path.exists() - assert len(list(local_requirements_path.iterdir())) == 0 - - -@pytest.mark.skipif( - sys.platform == "win32", reason="Windows paths aren't converted in Docker context" -) -def test_install_app_requirements_with_missing_local_build( - create_command, - first_app_config, - tmp_path, -): - """If the app references a requirement that needs to be built, but is missing, an - error is raised.""" - # Define a local requirement, but don't create the files it points at - first_app_config.requires.append(str(tmp_path / "local" / "first")) - - # Install requirements - with pytest.raises( - BriefcaseCommandError, - match=r"Unable to find local requirement .*/local/first", - ): - create_command.install_app_requirements(first_app_config, test_mode=False) - - # No attempt to build the sdist was made - create_command.tools.subprocess.check_output.assert_not_called() - - # pip was *not* invoked inside docker. - create_command.tools.subprocess.run.assert_not_called() - - # The local requirements path exists, and is empty. It has been purged, but not refilled. - local_requirements_path = create_command.local_requirements_path(first_app_config) - assert local_requirements_path.exists() - assert len(list(local_requirements_path.iterdir())) == 0 - - -@pytest.mark.skipif( - sys.platform == "win32", reason="Windows paths aren't converted in Docker context" -) -def test_install_app_requirements_with_bad_local_file( - create_command, - first_app_config, - tmp_path, -): - """If the app references a local requirement file that doesn't exist, an error is - raised.""" - # Add a local requirement that doesn't exist - first_app_config.requires.append(str(tmp_path / "local" / "missing-2.3.4.tar.gz")) - - # Install requirements - with pytest.raises( - BriefcaseCommandError, - match=r"Unable to find local requirement .*/local/missing-2.3.4.tar.gz", - ): - create_command.install_app_requirements(first_app_config, test_mode=False) - - # An attempt was made to copy the package - create_command.tools.shutil.copy.assert_called_once_with( - str(tmp_path / "local" / "missing-2.3.4.tar.gz"), - tmp_path - / "base_path" - / "build" - / "first-app" - / "linux" - / "appimage" - / "_requirements", - ) - - # No attempt was made to build the sdist - create_command.tools.subprocess.check_output.assert_not_called() - - # pip was *not* invoked inside docker. - create_command.tools.subprocess.run.assert_not_called() - - # The local requirements path exists, and is empty. It has been purged, but not refilled. - local_requirements_path = create_command.local_requirements_path(first_app_config) - assert local_requirements_path.exists() - assert len(list(local_requirements_path.iterdir())) == 0 + assert create_command.support_package_url(revision) == expected_url diff --git a/tests/platforms/linux/appimage/test_run.py b/tests/platforms/linux/appimage/test_run.py index 7aaaaf12c..83480be3d 100644 --- a/tests/platforms/linux/appimage/test_run.py +++ b/tests/platforms/linux/appimage/test_run.py @@ -34,6 +34,8 @@ def run_command(tmp_path): def test_unsupported_host_os(run_command, host_os): """Error raised for an unsupported OS.""" run_command.tools.host_os = host_os + # Mock the existence of a single app + run_command.apps = {"app": None} with pytest.raises( UnsupportedHostError, diff --git a/tests/platforms/linux/os_release.py b/tests/platforms/linux/os_release.py new file mode 100644 index 000000000..a92ef85f9 --- /dev/null +++ b/tests/platforms/linux/os_release.py @@ -0,0 +1,228 @@ +# Sample data that is the literal content of /etc/os-release +# for each release, keyed by Docker image (or equivalent) +OS_RELEASE = { + ####################################################### + # RHEL 8.7 + ####################################################### + "rhel/ubi8:8.7": """\ +NAME="Red Hat Enterprise Linux" +VERSION="8.7 (Ootpa)" +ID="rhel" +ID_LIKE="fedora" +VERSION_ID="8.7" +PLATFORM_ID="platform:el8" +PRETTY_NAME="Red Hat Enterprise Linux 8.7 (Ootpa)" +ANSI_COLOR="0;31" +CPE_NAME="cpe:/o:redhat:enterprise_linux:8::baseos" +HOME_URL="https://www.redhat.com/" +DOCUMENTATION_URL="https://access.redhat.com/documentation/red_hat_enterprise_linux/8/" +BUG_REPORT_URL="https://bugzilla.redhat.com/" + +REDHAT_BUGZILLA_PRODUCT="Red Hat Enterprise Linux 8" +REDHAT_BUGZILLA_PRODUCT_VERSION=8.7 +REDHAT_SUPPORT_PRODUCT="Red Hat Enterprise Linux" +REDHAT_SUPPORT_PRODUCT_VERSION="8.7" +""", + ####################################################### + # Fedora 37 + ####################################################### + "fedora:37": """\ +NAME="Fedora Linux" +VERSION="37 (Container Image)" +ID=fedora +VERSION_ID=37 +VERSION_CODENAME="" +PLATFORM_ID="platform:f37" +PRETTY_NAME="Fedora Linux 37 (Container Image)" +ANSI_COLOR="0;38;2;60;110;180" +LOGO=fedora-logo-icon +CPE_NAME="cpe:/o:fedoraproject:fedora:37" +DEFAULT_HOSTNAME="fedora" +HOME_URL="https://fedoraproject.org/" +DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora/f37/system-administrators-guide/" +SUPPORT_URL="https://ask.fedoraproject.org/" +BUG_REPORT_URL="https://bugzilla.redhat.com/" +REDHAT_BUGZILLA_PRODUCT="Fedora" +REDHAT_BUGZILLA_PRODUCT_VERSION=37 +REDHAT_SUPPORT_PRODUCT="Fedora" +REDHAT_SUPPORT_PRODUCT_VERSION=37 +SUPPORT_END=2023-11-14 +VARIANT="Container Image" +VARIANT_ID=container +""", + ####################################################### + # AlmaLinux 9.1 + ####################################################### + "almalinux:9.1": """\ +NAME="AlmaLinux" +VERSION="9.1 (Lime Lynx)" +ID="almalinux" +ID_LIKE="rhel centos fedora" +VERSION_ID="9.1" +PLATFORM_ID="platform:el9" +PRETTY_NAME="AlmaLinux 9.1 (Lime Lynx)" +ANSI_COLOR="0;34" +LOGO="fedora-logo-icon" +CPE_NAME="cpe:/o:almalinux:almalinux:9::baseos" +HOME_URL="https://almalinux.org/" +DOCUMENTATION_URL="https://wiki.almalinux.org/" +BUG_REPORT_URL="https://bugs.almalinux.org/" + +ALMALINUX_MANTISBT_PROJECT="AlmaLinux-9" +ALMALINUX_MANTISBT_PROJECT_VERSION="9.1" +REDHAT_SUPPORT_PRODUCT="AlmaLinux" +REDHAT_SUPPORT_PRODUCT_VERSION="9.1" +""", + ####################################################### + # Centos 8 + ####################################################### + "centos:8": """\ +NAME="CentOS Linux" +VERSION="8" +ID="centos" +ID_LIKE="rhel fedora" +VERSION_ID="8" +PLATFORM_ID="platform:el8" +PRETTY_NAME="CentOS Linux 8" +ANSI_COLOR="0;31" +CPE_NAME="cpe:/o:centos:centos:8" +HOME_URL="https://centos.org/" +BUG_REPORT_URL="https://bugs.centos.org/" +CENTOS_MANTISBT_PROJECT="CentOS-8" +CENTOS_MANTISBT_PROJECT_VERSION="8" +""", + ####################################################### + # OpenSuSE Leap 15.4 + ####################################################### + "opensuse/leap:15.4": """\ +NAME="openSUSE Leap" +VERSION="15.4" +ID="opensuse-leap" +ID_LIKE="suse opensuse" +VERSION_ID="15.4" +PRETTY_NAME="openSUSE Leap 15.4" +ANSI_COLOR="0;32" +CPE_NAME="cpe:/o:opensuse:leap:15.4" +BUG_REPORT_URL="https://bugs.opensuse.org" +HOME_URL="https://www.opensuse.org/" +DOCUMENTATION_URL="https://en.opensuse.org/Portal:Leap" +LOGO="distributor-logo-Leap" +""", + ####################################################### + # OpenSuse Tumbleweed (20230304) + ####################################################### + "opensuse/tumbleweed:latest": """\ +NAME="openSUSE Tumbleweed" +# VERSION="20230304" +ID="opensuse-tumbleweed" +ID_LIKE="opensuse suse" +VERSION_ID="20230304" +PRETTY_NAME="openSUSE Tumbleweed" +ANSI_COLOR="0;32" +CPE_NAME="cpe:/o:opensuse:tumbleweed:20230304" +BUG_REPORT_URL="https://bugs.opensuse.org" +HOME_URL="https://www.opensuse.org/" +DOCUMENTATION_URL="https://en.opensuse.org/Portal:Tumbleweed" +LOGO="distributor-logo-Tumbleweed" +""", + ####################################################### + # Debian 11 + ####################################################### + "debian:11": """\ +PRETTY_NAME="Debian GNU/Linux 11 (bullseye)" +NAME="Debian GNU/Linux" +VERSION_ID="11" +VERSION="11 (bullseye)" +VERSION_CODENAME=bullseye +ID=debian +HOME_URL="https://www.debian.org/" +SUPPORT_URL="https://www.debian.org/support" +BUG_REPORT_URL="https://bugs.debian.org/" +""", + ####################################################### + # Ubuntu 22.04 (Jammy Jellyfish) + ####################################################### + "ubuntu:22.04": """\ +PRETTY_NAME="Ubuntu 22.04.2 LTS" +NAME="Ubuntu" +VERSION_ID="22.04" +VERSION="22.04.2 LTS (Jammy Jellyfish)" +VERSION_CODENAME=jammy +ID=ubuntu +ID_LIKE=debian +HOME_URL="https://www.ubuntu.com/" +SUPPORT_URL="https://help.ubuntu.com/" +BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/" +PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy" +UBUNTU_CODENAME=jammy +""", + ####################################################### + # Pop! OS 22.04 + ####################################################### + "pop:22.04": """\ +NAME="Pop!_OS" +VERSION="22.04 LTS" +ID=pop +ID_LIKE="ubuntu debian" +PRETTY_NAME="Pop!_OS 22.04 LTS" +VERSION_ID="22.04" +HOME_URL="https://pop.system76.com" +SUPPORT_URL="https://support.system76.com" +BUG_REPORT_URL="https://github.com/pop-os/pop/issues" +PRIVACY_POLICY_URL="https://system76.com/privacy" +VERSION_CODENAME=jammy +UBUNTU_CODENAME=jammy +LOGO=distributor-logo-pop-os +""", + ####################################################### + # LinuxMint 19.2 + ####################################################### + "linuxmint:19.2": """\ +NAME="Linux Mint" +VERSION="19.2 (Tina)" +ID=linuxmint +ID_LIKE=ubuntu +PRETTY_NAME="Linux Mint 19.2" +VERSION_ID="19.2" +HOME_URL="https://www.linuxmint.com/" +SUPPORT_URL="https://support.ubnutu.com/" +BUG_REPORT_URL="https://linuxmint-troubleshooting-guide.readthedocs.io/en/latest" +PRIVACY_POLICY_URL="https://www.linuxmint.com/" +VERSION_CODENAME=tina +UBUNTU_CODENAME=bionic +""", + ####################################################### + # Arch Linux (2023-03-03) + ####################################################### + "archlinux:latest": """\ +NAME="Arch Linux" +PRETTY_NAME="Arch Linux" +ID=arch +BUILD_ID=rolling +VERSION_ID=TEMPLATE_VERSION_ID +ANSI_COLOR="38;2;23;147;209" +HOME_URL="https://archlinux.org/" +DOCUMENTATION_URL="https://wiki.archlinux.org/" +SUPPORT_URL="https://bbs.archlinux.org/" +BUG_REPORT_URL="https://bugs.archlinux.org/" +PRIVACY_POLICY_URL="https://terms.archlinux.org/docs/privacy-policy/" +LOGO=archlinux-logo +""", + ####################################################### + # Manjaro Linux (2023-03-03) + ####################################################### + "manjaro/base:latest": """\ +NAME="Manjaro Linux" +PRETTY_NAME="Manjaro Linux" +ID=manjaro +ID_LIKE=arch +BUILD_ID=rolling +ANSI_COLOR="32;1;24;144;200" +HOME_URL="https://manjaro.org/" +DOCUMENTATION_URL="https://wiki.manjaro.org/" +SUPPORT_URL="https://forum.manjaro.org/" +BUG_REPORT_URL="https://docs.manjaro.org/reporting-bugs/" +PRIVACY_POLICY_URL="https://manjaro.org/privacy-policy/" +LOGO=manjarolinux +""", +} diff --git a/tests/platforms/linux/system/__init__.py b/tests/platforms/linux/system/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/platforms/linux/system/conftest.py b/tests/platforms/linux/system/conftest.py new file mode 100644 index 000000000..e8d6220b8 --- /dev/null +++ b/tests/platforms/linux/system/conftest.py @@ -0,0 +1,58 @@ +import pytest + +from briefcase.console import Console, Log +from briefcase.platforms.linux.system import LinuxSystemCreateCommand + +from ....utils import create_file + + +@pytest.fixture +def create_command(tmp_path): + return LinuxSystemCreateCommand( + logger=Log(), + console=Console(), + base_path=tmp_path / "base_path", + data_path=tmp_path / "briefcase", + ) + + +@pytest.fixture +def first_app(first_app_config, tmp_path): + """A fixture for the first app, rolled out on disk.""" + # Specify a system python app for a dummy vendor + first_app_config.target_vendor = "somevendor" + first_app_config.target_codename = "surprising" + first_app_config.target_vendor_base = "basevendor" + + # Some project-level files. + create_file(tmp_path / "base_path" / "LICENSE", "First App License") + create_file(tmp_path / "base_path" / "CHANGELOG", "First App Changelog") + + # Make it look like the template has been generated + bundle_dir = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + create_file(bundle_dir / "first-app.1", "First App manpage") + + lib_dir = bundle_dir / "first-app-0.0.1" / "usr" / "lib" / "first-app" + (lib_dir / "app").mkdir(parents=True, exist_ok=True) + (lib_dir / "app_packages" / "firstlib").mkdir(parents=True, exist_ok=True) + (lib_dir / "app_packages" / "secondlib").mkdir(parents=True, exist_ok=True) + + # Create some .so files + # An SO file with 775 permissions + (lib_dir / "app" / "support.so").touch() + (lib_dir / "app" / "support.so").chmod(0o775) + + # A SO file with both .so and .so.1.0 forms + (lib_dir / "app_packages" / "firstlib" / "first.so").touch() + (lib_dir / "app_packages" / "firstlib" / "first.so.1.0").touch() + + # An SO file with 664 permissions + (lib_dir / "app_packages" / "secondlib" / "second_a.so").touch() + (lib_dir / "app_packages" / "secondlib" / "second_a.so").chmod(0o664) + + (lib_dir / "app_packages" / "secondlib" / "second_b.so").touch() + + return first_app_config diff --git a/tests/platforms/linux/system/test_build.py b/tests/platforms/linux/system/test_build.py new file mode 100644 index 000000000..670f4b6c3 --- /dev/null +++ b/tests/platforms/linux/system/test_build.py @@ -0,0 +1,200 @@ +import gzip +import os +import subprocess +import sys +from unittest import mock + +import pytest + +from briefcase.console import Console, Log +from briefcase.exceptions import BriefcaseCommandError +from briefcase.platforms.linux.system import LinuxSystemBuildCommand + + +@pytest.fixture +def build_command(tmp_path, first_app): + command = LinuxSystemBuildCommand( + logger=Log(), + console=Console(), + base_path=tmp_path / "base_path", + data_path=tmp_path / "briefcase", + apps={"first": first_app}, + ) + command.tools.host_os = "Linux" + command.tools.host_arch = "wonky" + + # Mock subprocess + command.tools.subprocess = mock.MagicMock() + + # Mock the app context + command.tools.app_tools[first_app].app_context = mock.MagicMock() + + return command + + +@pytest.mark.skipif(sys.platform == "win32", reason="Can't build Linux apps on Windows") +def test_build_app(build_command, first_app, tmp_path): + """An app can be built as a deb.""" + # Build the app + build_command.build_app(first_app) + + # The bootstrap binary was compiled + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + build_command.tools[first_app].app_context.run.assert_called_with( + ["make", "-C", "bootstrap", "install"], + check=True, + cwd=bundle_path, + ) + + # The license file has been installed + doc_path = bundle_path / "first-app-0.0.1" / "usr" / "share" / "doc" / "first-app" + assert (doc_path / "copyright").exists() + with (doc_path / "copyright").open() as f: + assert f.read() == "First App License" + + # The Changelog has been compressed and installed + assert (doc_path / "changelog.gz").exists() + with gzip.open(doc_path / "changelog.gz") as f: + assert f.read().decode() == "First App Changelog" + + # The manpage has been installed + man_path = bundle_path / "first-app-0.0.1" / "usr" / "share" / "man" / "man1" + assert (man_path / "first-app.1.gz").exists() + with gzip.open(man_path / "first-app.1.gz") as f: + assert f.read().decode() == "First App manpage" + + # Problematic permissions have been updated + lib_dir = bundle_path / "first-app-0.0.1" / "usr" / "lib" / "first-app" + # 775 -> 775 + assert os.stat(lib_dir / "app" / "support.so").st_mode & 0o777 == 0o755 + # 664 -> 644 + assert ( + os.stat(lib_dir / "app_packages" / "secondlib" / "second_a.so").st_mode & 0o777 + == 0o644 + ) + + # Strip has been invoked on the binary + build_command.tools.subprocess.check_output.assert_called_once_with( + [ + "strip", + bundle_path / "first-app-0.0.1" / "usr" / "bin" / "first-app", + ] + ) + + +def test_build_bootstrap_failed(build_command, first_app, tmp_path): + """If the bootstrap binary can't be compiled, an error is raised.""" + # Mock a build failure + build_command.tools[ + first_app + ].app_context.run.side_effect = subprocess.CalledProcessError( + cmd=["make ..."], returncode=-1 + ) + + # Build the app; it will fail + with pytest.raises( + BriefcaseCommandError, + match=r"Error building bootstrap binary for first-app.", + ): + build_command.build_app(first_app) + + # An attempt to do the compile occurred. + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + build_command.tools[first_app].app_context.run.assert_called_with( + ["make", "-C", "bootstrap", "install"], + check=True, + cwd=bundle_path, + ) + + +def test_missing_license(build_command, first_app, tmp_path): + """If the license source file is missing, an error is raised.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Delete the license source + (tmp_path / "base_path" / "LICENSE").unlink() + + # Build the app; it will fail + with pytest.raises( + BriefcaseCommandError, + match=r"Your project does not contain a LICENSE file.", + ): + build_command.build_app(first_app) + + # The bootstrap binary was compiled + build_command.tools[first_app].app_context.run.assert_called_with( + ["make", "-C", "bootstrap", "install"], + check=True, + cwd=bundle_path, + ) + + +def test_missing_changelog(build_command, first_app, tmp_path): + """If the changelog source file is missing, an error is raised.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Delete the changelog source + (tmp_path / "base_path" / "CHANGELOG").unlink() + + # Build the app; it will fail + with pytest.raises( + BriefcaseCommandError, + match=r"Your project does not contain a CHANGELOG file.", + ): + build_command.build_app(first_app) + + # The bootstrap binary was compiled + build_command.tools[first_app].app_context.run.assert_called_with( + ["make", "-C", "bootstrap", "install"], + check=True, + cwd=bundle_path, + ) + + # The license file has been installed + doc_path = bundle_path / "first-app-0.0.1" / "usr" / "share" / "doc" / "first-app" + assert (doc_path / "copyright").exists() + with (doc_path / "copyright").open() as f: + assert f.read() == "First App License" + + +def test_missing_manpage(build_command, first_app, tmp_path): + """If the manpage source file is missing, an error is raised.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Delete the manpage source + (bundle_path / "first-app.1").unlink() + + # Build the app; it will fail + with pytest.raises( + BriefcaseCommandError, + match=r"Template does not provide a manpage source file `first-app\.1`", + ): + build_command.build_app(first_app) + + # The bootstrap binary was compiled + build_command.tools[first_app].app_context.run.assert_called_with( + ["make", "-C", "bootstrap", "install"], + check=True, + cwd=bundle_path, + ) + + # The license file has been installed + doc_path = bundle_path / "first-app-0.0.1" / "usr" / "share" / "doc" / "first-app" + assert (doc_path / "copyright").exists() + with (doc_path / "copyright").open() as f: + assert f.read() == "First App License" + + # The Changelog has been compressed and installed + assert (doc_path / "changelog.gz").exists() + with gzip.open(doc_path / "changelog.gz") as f: + assert f.read().decode() == "First App Changelog" diff --git a/tests/platforms/linux/system/test_create.py b/tests/platforms/linux/system/test_create.py new file mode 100644 index 000000000..c78e3d1a9 --- /dev/null +++ b/tests/platforms/linux/system/test_create.py @@ -0,0 +1,90 @@ +import pytest + +from briefcase.exceptions import UnsupportedHostError + + +def test_default_options(create_command): + """The default options are as expected.""" + options = create_command.parse_options([]) + + assert options == {} + + assert create_command.target_image is None + + +def test_options(create_command): + """The extra options can be parsed.""" + options = create_command.parse_options(["--target", "somevendor:surprising"]) + + assert options == {} + + assert create_command.target_image == "somevendor:surprising" + + +@pytest.mark.parametrize("host_os", ["Windows", "WeirdOS"]) +def test_unsupported_host_os_with_docker(create_command, host_os, tmp_path): + """Error raised for an unsupported OS when using Docker.""" + create_command.target_image = "somevendor:surprising" + create_command.tools.host_os = host_os + + with pytest.raises( + UnsupportedHostError, + match="Linux system projects can only be built on Linux, or on macOS using Docker.", + ): + create_command() + + +@pytest.mark.parametrize("host_os", ["Darwin", "Windows", "WeirdOS"]) +def test_unsupported_host_os_without_docker(create_command, host_os, tmp_path): + """Error raised for an unsupported OS when not using Docker.""" + create_command.target_image = None + create_command.tools.host_os = host_os + + with pytest.raises( + UnsupportedHostError, + match="Linux system projects can only be built on Linux, or on macOS using Docker.", + ): + create_command() + + +def test_supported_host_os_docker(create_command): + """If using Docker on a supported host, no error is raised""" + create_command.target_image = "somevendor:surprising" + create_command.tools.host_os = "Linux" + + # Verify the host + create_command.verify_host() + + +def test_supported_host_os_without_docker(create_command): + """If not using Docker on a supported host, no error is raised""" + create_command.target_image = None + create_command.tools.host_os = "Linux" + + # Verify the host + create_command.verify_host() + + +def test_output_format_template_context(create_command, first_app_config): + "The template context contains additional deb-specific properties" + # Add some properties defined in config finalization + first_app_config.python_version_tag = "3.X" + first_app_config.target_image = "somevendor:surprising" + first_app_config.target_vendor = "somevendor" + first_app_config.target_codename = "surprising" + first_app_config.target_vendor_base = "basevendor" + first_app_config.glibc_version = "2.42" + + # Add a long description + first_app_config.long_description = "This is a long\ndescription." + + # Generate the context + context = create_command.output_format_template_context(first_app_config) + + # Context extras are what we expect + assert context == { + "format": "surprising", + "python_version": "3.X", + "docker_base_image": "somevendor:surprising", + "vendor_base": "basevendor", + } diff --git a/tests/platforms/linux/system/test_mixin__finalize_app_config.py b/tests/platforms/linux/system/test_mixin__finalize_app_config.py new file mode 100644 index 000000000..40d5df084 --- /dev/null +++ b/tests/platforms/linux/system/test_mixin__finalize_app_config.py @@ -0,0 +1,413 @@ +import sys +from unittest.mock import MagicMock + +import pytest + +from briefcase.exceptions import BriefcaseCommandError +from briefcase.platforms.linux import parse_freedesktop_os_release + +from ....utils import create_file + + +def test_docker(create_command, first_app_config): + "An app can be finalized inside docker" + # Build the app on a specific target + create_command.target_image = "somevendor:surprising" + create_command.tools.docker = MagicMock() + create_command.target_glibc_version = MagicMock(return_value="2.42") + + # Mock a minimal response from checking /etc/os-release + create_command.tools.docker.check_output.return_value = "\n".join( + [ + "ID=somevendor", + "VERSION_CODENAME=surprising", + "ID_LIKE=debian", + ] + ) + + # Finalize the app config + create_command.finalize_app_config(first_app_config) + + # The base image has been prepared + create_command.tools.docker.prepare.assert_called_once_with("somevendor:surprising") + + # The app's image, vendor and codename have been constructed from the target image + assert first_app_config.target_image == "somevendor:surprising" + assert first_app_config.target_vendor == "somevendor" + assert first_app_config.target_codename == "surprising" + assert first_app_config.target_vendor_base == "debian" + + # For tests of other properties merged in finalization, see + # test_properties + + +def test_nodocker(create_command, first_app_config, tmp_path): + "An app can be finalized without docker" + # Build the app without docker + create_command.target_image = None + create_command.target_glibc_version = MagicMock(return_value="2.42") + + os_release = "\n".join( + [ + "ID=somevendor", + "VERSION_CODENAME=surprising", + "ID_LIKE=debian", + ] + ) + if sys.version_info >= (3, 10): + # mock platform.freedesktop_os_release() + create_command.tools.platform.freedesktop_os_release = MagicMock( + return_value=parse_freedesktop_os_release(os_release) + ) + else: + # For Pre Python3.10, mock the /etc/release file + create_file(tmp_path / "os-release", os_release) + create_command.tools.ETC_OS_RELEASE = tmp_path / "os-release" + + # Finalize the app config + create_command.finalize_app_config(first_app_config) + + # The app's image, vendor and codename have been constructed from the target image + assert first_app_config.target_image == "somevendor:surprising" + assert first_app_config.target_vendor == "somevendor" + assert first_app_config.target_codename == "surprising" + assert first_app_config.target_vendor_base == "debian" + + # For tests of other properties merged in finalization, see + # test_properties + + +def test_nodocker_non_freedesktop(create_command, first_app_config, tmp_path): + "If the system isn't FreeDesktop compliant raise an error" + # Build the app without docker + create_command.target_image = None + create_command.target_glibc_version = MagicMock(return_value="2.42") + + if sys.version_info >= (3, 10): + # mock platform.freedesktop_os_release() + create_command.tools.platform.freedesktop_os_release = MagicMock( + side_effect=FileNotFoundError + ) + else: + # For Pre Python3.10, mock the /etc/release file + # but don't create the file + create_command.tools.ETC_OS_RELEASE = tmp_path / "os-release" + + # Finalize the app config + with pytest.raises( + BriefcaseCommandError, + match=r"Could not find the /etc/os-release file. Is this a FreeDesktop-compliant Linux distribution\?", + ): + create_command.finalize_app_config(first_app_config) + + +def test_properties(create_command, first_app_config): + """The final app config is the result of merging target properties, plus other derived properties.""" + # Run this test as "docker"; however, the things we're testing aren't docker specific. + create_command.target_image = "somevendor:surprising" + create_command.tools.docker = MagicMock() + create_command.target_glibc_version = MagicMock(return_value="2.42") + + # Mock a minimal response from checking /etc/os-release + create_command.tools.docker.check_output.return_value = "\n".join( + [ + "ID=somevendor", + "VERSION_CODENAME=surprising", + "ID_LIKE=debian", + ] + ) + + # Augment the app config with some extra attributes + first_app_config.surprise_0 = "AAAA" + first_app_config.surprise_1 = "BBBB" + first_app_config.surprise_2 = "CCCC" + first_app_config.surprise_3 = "DDDD" + + first_app_config.debian = { + "surprise_1": "1111", + "surprise_2": "1112", + "surprise_3": "1113", + "surprise_4": "1114", + } + first_app_config.somevendor = { + "surprise_2": "2222", + "surprise_3": "2223", + "surprise_5": "2225", + # A version config that will override + "surprising": { + "surprise_3": "3333", + "surprise_6": "3336", + }, + # A version config that will be ignored + "normal": { + "surprise_1": "XXXX", + }, + } + # A different vendor and version that will be ignored + first_app_config.ubuntu = { + "surprise_1": "YYYY", + "jammy": { + "surprise_1": "ZZZZ", + }, + } + + create_command.finalize_app_config(first_app_config) + + # The target's config attributes have been merged into the app + # Base app properties that aren't overwritten persist + assert first_app_config.surprise_0 == "AAAA" + # Properties can be overwritten at the base vendor level + assert first_app_config.surprise_1 == "1111" + # Properties can be overwritten at the vendor level + assert first_app_config.surprise_2 == "2222" + # Properties can be overwritten at the version level + assert first_app_config.surprise_3 == "3333" + + # New properties can be defined at the base vendor level + assert first_app_config.surprise_4 == "1114" + # New properties can be defined at the vendor level + assert first_app_config.surprise_5 == "2225" + # New properties can be defined at the version level + assert first_app_config.surprise_6 == "3336" + + # The glibc version was determined + assert first_app_config.glibc_version == "2.42" + + # Since it's system python, the python version is 3 + assert first_app_config.python_version_tag == "3" + + +def test_properties_unknown_basevendor(create_command, first_app_config): + """If the base vendor can't be identified, the merge still succeeds.""" + # Run this test as "docker"; however, the things we're testing aren't docker specific. + create_command.target_image = "somevendor:surprising" + create_command.tools.docker = MagicMock() + create_command.target_glibc_version = MagicMock(return_value="2.42") + + # Mock a minimal response from checking /etc/os-release + create_command.tools.docker.check_output.return_value = "\n".join( + [ + "ID=somevendor", + "VERSION_CODENAME=surprising", + ] + ) + + # Augment the app config with some extra attributes + first_app_config.surprise_0 = "AAAA" + first_app_config.surprise_1 = "BBBB" + first_app_config.surprise_2 = "CCCC" + first_app_config.surprise_3 = "DDDD" + + first_app_config.somevendor = { + "surprise_2": "2222", + "surprise_3": "2223", + "surprise_5": "2225", + # A version config that will override + "surprising": { + "surprise_3": "3333", + "surprise_6": "3336", + }, + # A version config that will be ignored + "normal": { + "surprise_1": "XXXX", + }, + } + # A different vendor and version that will be ignored + first_app_config.ubuntu = { + "surprise_1": "YYYY", + "jammy": { + "surprise_1": "ZZZZ", + }, + } + + create_command.finalize_app_config(first_app_config) + + # The target's config attributes have been merged into the app + # Base app properties that aren't overwritten persist + assert first_app_config.surprise_0 == "AAAA" + assert first_app_config.surprise_1 == "BBBB" + # Properties can be overwritten at the vendor level + assert first_app_config.surprise_2 == "2222" + # Properties can be overwritten at the version level + assert first_app_config.surprise_3 == "3333" + + # The glibc version was determined + assert first_app_config.glibc_version == "2.42" + + # Since it's system python, the python version is 3 + assert first_app_config.python_version_tag == "3" + + +def test_properties_no_basevendor_config(create_command, first_app_config): + """If there's no basevendor config, the merge still succeeds.""" + # Run this test as "docker"; however, the things we're testing aren't docker specific. + create_command.target_image = "somevendor:surprising" + create_command.tools.docker = MagicMock() + create_command.target_glibc_version = MagicMock(return_value="2.42") + + # Mock a minimal response from checking /etc/os-release + create_command.tools.docker.check_output.return_value = "\n".join( + [ + "ID=somevendor", + "VERSION_CODENAME=surprising", + "ID_LIKE=debian", + ] + ) + + # Augment the app config with some extra attributes + first_app_config.surprise_0 = "AAAA" + first_app_config.surprise_1 = "BBBB" + first_app_config.surprise_2 = "CCCC" + first_app_config.surprise_3 = "DDDD" + + first_app_config.somevendor = { + "surprise_2": "2222", + "surprise_3": "2223", + "surprise_5": "2225", + # A version config that will override + "surprising": { + "surprise_3": "3333", + "surprise_6": "3336", + }, + # A version config that will be ignored + "normal": { + "surprise_1": "XXXX", + }, + } + # A different vendor and version that will be ignored + first_app_config.ubuntu = { + "surprise_1": "YYYY", + "jammy": { + "surprise_1": "ZZZZ", + }, + } + + create_command.finalize_app_config(first_app_config) + + # The target's config attributes have been merged into the app + # Base app properties that aren't overwritten persist + assert first_app_config.surprise_0 == "AAAA" + assert first_app_config.surprise_1 == "BBBB" + # Properties can be overwritten at the vendor level + assert first_app_config.surprise_2 == "2222" + # Properties can be overwritten at the version level + assert first_app_config.surprise_3 == "3333" + + # The glibc version was determined + assert first_app_config.glibc_version == "2.42" + + # Since it's system python, the python version is 3 + assert first_app_config.python_version_tag == "3" + + +def test_properties_no_vendor(create_command, first_app_config): + """If there's no vendor-specific config, the merge succeeds""" + # Run this test as "docker"; however, the things we're testing aren't docker specific. + create_command.target_image = "somevendor:surprising" + create_command.tools.docker = MagicMock() + create_command.target_glibc_version = MagicMock(return_value="2.42") + + # Mock a minimal response from checking /etc/os-release + create_command.tools.docker.check_output.return_value = "\n".join( + [ + "ID=somevendor", + "VERSION_CODENAME=surprising", + "ID_LIKE=debian", + ] + ) + + # Augment the app config with some extra attributes + first_app_config.surprise_0 = "AAAA" + first_app_config.surprise_1 = "BBBB" + first_app_config.surprise_2 = "CCCC" + first_app_config.surprise_3 = "DDDD" + + first_app_config.debian = { + "surprise_1": "1111", + "surprise_2": "1112", + "surprise_3": "1113", + } + # A different vendor and version that will be ignored + first_app_config.ubuntu = { + "surprise_1": "YYYY", + "jammy": { + "surprise_1": "ZZZZ", + }, + } + + create_command.finalize_app_config(first_app_config) + + # The target's config attributes have been merged into the app + # Base app properties that aren't overwritten persist + assert first_app_config.surprise_0 == "AAAA" + # Properties can be overwritten at the base vendor level + assert first_app_config.surprise_1 == "1111" + assert first_app_config.surprise_2 == "1112" + assert first_app_config.surprise_3 == "1113" + + # The glibc version was determined + assert first_app_config.glibc_version == "2.42" + + # Since it's system python, the python version is 3 + assert first_app_config.python_version_tag == "3" + + +def test_properties_no_version(create_command, first_app_config): + """If there's no version-specific config, the merge succeeds""" + # Run this test as "docker"; however, the things we're testing aren't docker specific. + create_command.target_image = "somevendor:surprising" + create_command.tools.docker = MagicMock() + create_command.target_glibc_version = MagicMock(return_value="2.42") + + # Mock a minimal response from checking /etc/os-release + create_command.tools.docker.check_output.return_value = "\n".join( + [ + "ID=somevendor", + "VERSION_CODENAME=surprising", + "ID_LIKE=debian", + ] + ) + + # Augment the app config with some extra attributes + first_app_config.surprise_0 = "AAAA" + first_app_config.surprise_1 = "BBBB" + first_app_config.surprise_2 = "CCCC" + first_app_config.surprise_3 = "DDDD" + + first_app_config.debian = { + "surprise_1": "1111", + "surprise_2": "1112", + "surprise_3": "1113", + } + first_app_config.somevendor = { + "surprise_2": "2222", + "surprise_3": "2223", + # A version config that will be ignored + "normal": { + "surprise_1": "XXXX", + }, + } + # A different vendor and version that will be ignored + first_app_config.ubuntu = { + "surprise_1": "YYYY", + "jammy": { + "surprise_1": "ZZZZ", + }, + } + + create_command.finalize_app_config(first_app_config) + + # The target's config attributes have been merged into the app + # Base app properties that aren't overwritten persist + assert first_app_config.surprise_0 == "AAAA" + # Properties can be overwritten at the base vendor level + assert first_app_config.surprise_1 == "1111" + # Properties can be overwritten at the vendor level + assert first_app_config.surprise_2 == "2222" + assert first_app_config.surprise_3 == "2223" + + # The glibc version was determined + assert first_app_config.glibc_version == "2.42" + + # Since it's system python, the python version is 3 + assert first_app_config.python_version_tag == "3" diff --git a/tests/platforms/linux/system/test_mixin__properties.py b/tests/platforms/linux/system/test_mixin__properties.py new file mode 100644 index 000000000..35f40ee78 --- /dev/null +++ b/tests/platforms/linux/system/test_mixin__properties.py @@ -0,0 +1,185 @@ +import pytest + +from briefcase.console import Console, Log +from briefcase.exceptions import BriefcaseCommandError +from briefcase.platforms.linux.system import LinuxSystemBuildCommand + + +@pytest.mark.parametrize( + "host_arch, linux_arch", + [ + ("x86_64", "amd64"), + ("aarch64", "arm64"), + ("armv6l", "armhf"), + ], +) +def test_linux_arch(create_command, host_arch, linux_arch): + """Host architectures are transformed to Linux-accepted values.""" + create_command.tools.host_arch = host_arch + assert create_command.linux_arch == linux_arch + + +@pytest.mark.parametrize( + "vendor, codename", + [ + ("ubuntu", "jammy"), + ("debian", "bullseye"), + ], +) +def test_build_path( + create_command, + first_app_config, + vendor, + codename, + tmp_path, +): + """The bundle path contains vendor and Python source details.""" + first_app_config.target_vendor = vendor + first_app_config.target_codename = codename + + assert ( + create_command.build_path(first_app_config) + == tmp_path / "base_path" / "build" / "first-app" / vendor + ) + + +@pytest.mark.parametrize( + "vendor, codename", + [ + ("ubuntu", "jammy"), + ("debian", "bullseye"), + ], +) +def test_bundle_path( + create_command, + first_app_config, + vendor, + codename, + tmp_path, +): + """The bundle path contains vendor and Python source details.""" + first_app_config.target_vendor = vendor + first_app_config.target_codename = codename + + assert ( + create_command.bundle_path(first_app_config) + == tmp_path / "base_path" / "build" / "first-app" / vendor / codename + ) + + +def test_binary_path(create_command, first_app_config, tmp_path): + """The binary path contains vendor and Python source details.""" + # Force the architecture to x86_64 for test purposes. + create_command.tools.host_arch = "x86_64" + + # Force a dummy vendor:codename for test purposes. + first_app_config.target_vendor = "somevendor" + first_app_config.target_codename = "surprising" + + assert ( + create_command.binary_path(first_app_config) + == tmp_path + / "base_path" + / "build" + / "first-app" + / "somevendor" + / "surprising" + / "first-app-0.0.1" + / "usr" + / "bin" + / "first-app" + ) + + +@pytest.mark.parametrize( + "packaging_format, filename", + [("deb", "first-app_0.0.1-1~somevendor-surprising_amd64.deb")], +) +def test_distribution_path( + create_command, first_app_config, packaging_format, filename, tmp_path +): + """The distribution path contains vendor details.""" + # Force the architecture to x86_64 for test purposes. + create_command.tools.host_arch = "x86_64" + + # Force a dummy vendor:codename for test purposes. + first_app_config.target_vendor = "somevendor" + first_app_config.target_codename = "surprising" + first_app_config.packaging_format = packaging_format + + assert ( + create_command.distribution_path(first_app_config) + == tmp_path / "base_path" / "dist" / filename + ) + + +def test_distribution_path_unknown(create_command, first_app_config, tmp_path): + """If the packaging format isn't known, an error is raised.""" + # Force the architecture to x86_64 for test purposes. + create_command.tools.host_arch = "x86_64" + + # Force a dummy vendor:codename for test purposes. + first_app_config.target_vendor = "somevendor" + first_app_config.target_codename = "surprising" + first_app_config.packaging_format = "unknown" + + with pytest.raises( + BriefcaseCommandError, + match=r"Briefcase doesn't currently know how to build system packages in UNKNOWN format.", + ): + create_command.distribution_path(first_app_config) + + +@pytest.mark.parametrize( + "vendor, codename", + [ + ("ubuntu", "jammy"), + ("debian", "bullseye"), + ], +) +def test_docker_image_tag( + create_command, + first_app_config, + vendor, + codename, + tmp_path, +): + """The docker image tag contains vendor and Python source details.""" + first_app_config.target_vendor = vendor + first_app_config.target_codename = codename + + assert ( + create_command.docker_image_tag(first_app_config) + == f"briefcase/com.example.first-app:{vendor}-{codename}" + ) + + +def test_docker_image_tag_uppercase_name( + create_command, + uppercase_app_config, + tmp_path, +): + uppercase_app_config.target_vendor = "somevendor" + uppercase_app_config.target_codename = "surprising" + + assert ( + create_command.docker_image_tag(uppercase_app_config) + == "briefcase/com.example.first-app:somevendor-surprising" + ) + + +def test_clone_options(create_command, tmp_path): + """Docker options are cloned.""" + build_command = LinuxSystemBuildCommand( + logger=Log(), + console=Console(), + base_path=tmp_path / "base_path", + data_path=tmp_path / "briefcase", + ) + build_command.target_image = "somevendor:surprising" + + create_command = build_command.create_command + + # Confirm the use_docker option has been cloned. + assert create_command.is_clone + assert create_command.target_image == "somevendor:surprising" diff --git a/tests/platforms/linux/system/test_mixin__target_glibc_version.py b/tests/platforms/linux/system/test_mixin__target_glibc_version.py new file mode 100644 index 000000000..22dfcf328 --- /dev/null +++ b/tests/platforms/linux/system/test_mixin__target_glibc_version.py @@ -0,0 +1,115 @@ +import subprocess +from unittest.mock import MagicMock + +import pytest + +from briefcase.exceptions import BriefcaseCommandError + + +@pytest.mark.parametrize( + "ldd_output, version", + [ + # ubuntu:focal + ( + [ + "ldd (Ubuntu GLIBC 2.31-0ubuntu9.9) 2.31", + "Copyright (C) 2020 Free Software Foundation, Inc.", + "...", + ], + "2.31", + ), + # fedora:37 + ( + [ + "ldd (GNU libc) 2.36", + "Copyright (C) 2020 Free Software Foundation, Inc.", + "...", + ], + "2.36", + ), + ], +) +def test_target_glibc_version_docker( + create_command, first_app_config, ldd_output, version +): + "Test that the glibc version in a docker container can be determined" + # Mock an app being built on docker + create_command.target_image = "somevendor:surprising" + first_app_config.target_image = "somevendor:surprising" + + # Mock a verified Docker, and the output of ldd. + create_command.tools.docker = MagicMock() + create_command.tools.docker.check_output.return_value = "\n".join(ldd_output) + + # The glibc version was returned + assert create_command.target_glibc_version(first_app_config) == version + + # Docker was consulted for the glibc version + create_command.tools.docker.check_output.assert_called_once_with( + ["ldd", "--version"], + image_tag="somevendor:surprising", + ) + + +def test_target_glibc_version_docker_no_ldd(create_command, first_app_config): + "If the Docker container can't run ldd, an error is raised" + # Mock an app being built on docker + create_command.target_image = "somevendor:surprising" + first_app_config.target_image = "somevendor:surprising" + + # Mock a verified Docker, and an error from invoking ldd. + create_command.tools.docker = MagicMock() + create_command.tools.docker.check_output.side_effect = ( + subprocess.CalledProcessError(cmd=["docker ..."], returncode=-1) + ) + + # An error is raised when getting the glibc version + with pytest.raises( + BriefcaseCommandError, match=r"Unable to determine glibc dependency version." + ): + create_command.target_glibc_version(first_app_config) + + # Docker was consulted for the glibc version + create_command.tools.docker.check_output.assert_called_once_with( + ["ldd", "--version"], + image_tag="somevendor:surprising", + ) + + +def test_target_glibc_version_docker_bad_ldd_output(create_command, first_app_config): + "If ldd returns unexpected content, an error is raised" + # Mock an app being built on docker + create_command.target_image = "somevendor:surprising" + first_app_config.target_image = "somevendor:surprising" + + # Mock a verified Docker, and unexpected ldd output + create_command.tools.docker = MagicMock() + create_command.tools.docker.check_output.return_value = ( + "I don't know what this is, but it isn't ldd output." + ) + + # An error is raised when getting the glibc version + with pytest.raises( + BriefcaseCommandError, + match=r"Unable to parse glibc dependency version from version string.", + ): + create_command.target_glibc_version(first_app_config) + + # Docker was consulted for the glibc version + create_command.tools.docker.check_output.assert_called_once_with( + ["ldd", "--version"], + image_tag="somevendor:surprising", + ) + + +def test_target_glibc_version_nodocker(create_command, first_app_config): + "Test that the glibc version of the local system can be returned" + # Mock a non-docker setup + create_command.target_image = None + create_command.tools.os.confstr = MagicMock(return_value="glibc 2.42") + + # The glibc version was returned + assert create_command.target_glibc_version(first_app_config) == "2.42" + + # The OS module was consulted for the glibc version + create_command.tools.os.confstr.assert_called_once_with("CS_GNU_LIBC_VERSION") diff --git a/tests/platforms/linux/system/test_mixin__verify.py b/tests/platforms/linux/system/test_mixin__verify.py new file mode 100644 index 000000000..fad3299bc --- /dev/null +++ b/tests/platforms/linux/system/test_mixin__verify.py @@ -0,0 +1,146 @@ +from unittest.mock import MagicMock + +from briefcase.integrations.docker import Docker, DockerAppContext +from briefcase.integrations.subprocess import Subprocess + + +def test_linux_no_docker(monkeypatch, create_command, first_app_config): + """If Docker is disabled on Linux, the app_context is Subprocess.""" + create_command.tools.host_os = "Linux" + create_command.target_image = None + + # Force a dummy vendor:codename for test purposes. + first_app_config.target_vendor = "somevendor" + first_app_config.target_codename = "surprising" + first_app_config.target_vendor_base = "basevendor" + + # Mock the existence of a valid non-docker system Python + create_command.verify_system_python = MagicMock() + + # Verify the tools + create_command.verify_tools() + create_command.verify_app_tools(app=first_app_config) + + # No error and Subprocess is used. + assert isinstance(create_command.tools[first_app_config].app_context, Subprocess) + # Docker is not verified. + assert not hasattr(create_command.tools, "docker") + # System python is verified + create_command.verify_system_python.assert_called_once_with() + + # Reset the mock, then invoke verify_app_tools a second time. + create_command.verify_system_python.reset_mock() + create_command.verify_app_tools(app=first_app_config) + + # Python will *not* be verified a second time. + create_command.verify_system_python.assert_not_called() + + +def test_linux_docker(create_command, tmp_path, first_app_config, monkeypatch): + """If Docker is enabled on Linux, the Docker alias is set.""" + create_command.tools.host_os = "Linux" + create_command.target_image = "somevendor:surprising" + + # Force a dummy vendor:codename for test purposes. + first_app_config.target_vendor = "somevendor" + first_app_config.target_codename = "surprising" + first_app_config.target_vendor_base = "basevendor" + first_app_config.python_version_tag = "3" + + # Mock Docker tool verification + Docker.verify = MagicMock() + DockerAppContext.verify = MagicMock() + create_command.verify_python = MagicMock() + + # Verify the tools + create_command.verify_tools() + create_command.verify_app_tools(app=first_app_config) + + # Docker and Docker app context are verified + Docker.verify.assert_called_with(tools=create_command.tools) + DockerAppContext.verify.assert_called_with( + tools=create_command.tools, + app=first_app_config, + image_tag="briefcase/com.example.first-app:somevendor-surprising", + dockerfile_path=tmp_path + / "base_path" + / "build" + / "first-app" + / "somevendor" + / "surprising" + / "Dockerfile", + app_base_path=tmp_path / "base_path", + host_bundle_path=tmp_path + / "base_path" + / "build" + / "first-app" + / "somevendor" + / "surprising", + host_data_path=tmp_path / "briefcase", + python_version="3", + ) + + # Python was also verified + create_command.verify_python.assert_called_once_with(first_app_config) + + # Reset the mock, then invoke verify_app_tools a second time. + create_command.verify_python.reset_mock() + create_command.verify_app_tools(app=first_app_config) + + # Python will *not* be verified a second time. + create_command.verify_python.assert_not_called() + + +def test_non_linux_docker(create_command, tmp_path, first_app_config): + """If Docker is enabled on non-Linux, the Docker alias is set.""" + create_command.tools.host_os = "Darwin" + create_command.target_image = "somevendor:surprising" + + # Force a dummy vendor:codename for test purposes. + first_app_config.target_vendor = "somevendor" + first_app_config.target_codename = "surprising" + first_app_config.target_vendor_base = "basevendor" + first_app_config.python_version_tag = "3" + + # Mock Docker tool verification + Docker.verify = MagicMock() + DockerAppContext.verify = MagicMock() + create_command.verify_python = MagicMock() + + # Verify the tools + create_command.verify_tools() + create_command.verify_app_tools(app=first_app_config) + + # Docker and Docker app context are verified + Docker.verify.assert_called_with(tools=create_command.tools) + DockerAppContext.verify.assert_called_with( + tools=create_command.tools, + app=first_app_config, + image_tag="briefcase/com.example.first-app:somevendor-surprising", + dockerfile_path=tmp_path + / "base_path" + / "build" + / "first-app" + / "somevendor" + / "surprising" + / "Dockerfile", + app_base_path=tmp_path / "base_path", + host_bundle_path=tmp_path + / "base_path" + / "build" + / "first-app" + / "somevendor" + / "surprising", + host_data_path=tmp_path / "briefcase", + python_version="3", + ) + + # Python was also verified + create_command.verify_python.assert_called_once_with(first_app_config) + + # Reset the mock, then invoke verify_app_tools a second time. + create_command.verify_python.reset_mock() + create_command.verify_app_tools(app=first_app_config) + + # Python will *not* be verified a second time. + create_command.verify_python.assert_not_called() diff --git a/tests/platforms/linux/system/test_mixin__verify_python.py b/tests/platforms/linux/system/test_mixin__verify_python.py new file mode 100644 index 000000000..b03ec2607 --- /dev/null +++ b/tests/platforms/linux/system/test_mixin__verify_python.py @@ -0,0 +1,124 @@ +import sys +from unittest.mock import MagicMock + +import pytest + +from briefcase.exceptions import BriefcaseCommandError +from briefcase.integrations.docker import DockerAppContext + + +def test_match(create_command, first_app_config, capsys): + """If the system python matches the target python, everything is OK""" + first_app_config.python_version_tag = "3" + first_app_config.target_image = "somevendor:surprising" + + create_command.tools[first_app_config].app_context = DockerAppContext( + tools=create_command.tools, + app=first_app_config, + ) + + # Mock a return value from Docker that matches the system Python + create_command.tools[first_app_config].app_context.check_output = MagicMock( + return_value=f"3.{sys.version_info.minor}\n" + ) + + # Verify python for the app + create_command.verify_python(first_app_config) + + # The docker container was interrogated for a Python version + create_command.tools[ + first_app_config + ].app_context.check_output.assert_called_once_with( + [ + "python3", + "-c", + ( + "import sys; " + "print(f'{sys.version_info.major}.{sys.version_info.minor}')" + ), + ] + ) + + # Warning message was not recorded + assert "WARNING: Python version mismatch!" not in capsys.readouterr().out + + # The python version has been updated + assert first_app_config.python_version_tag == f"3.{sys.version_info.minor}" + + +def test_mismatch(create_command, first_app_config, capsys): + """If the system python doesn't match the target python, a warning is raised""" + first_app_config.python_version_tag = "3" + first_app_config.target_image = "somevendor:surprising" + + create_command.tools[first_app_config].app_context = DockerAppContext( + tools=create_command.tools, + app=first_app_config, + ) + + # Mock a return value from Docker that matches the system Python + create_command.tools[first_app_config].app_context.check_output = MagicMock( + return_value="3.42\n" + ) + + # Verify python for the app + create_command.verify_python(first_app_config) + + # The docker container was interrogated for a Python version + create_command.tools[ + first_app_config + ].app_context.check_output.assert_called_once_with( + [ + "python3", + "-c", + ( + "import sys; " + "print(f'{sys.version_info.major}.{sys.version_info.minor}')" + ), + ] + ) + + # Warning message was recorded + assert "WARNING: Python version mismatch!" in capsys.readouterr().out + + # The python version has been updated + assert first_app_config.python_version_tag == "3.42" + + +def test_target_too_old(create_command, first_app_config): + """If the target python is too old, an error is raised""" + first_app_config.python_version_tag = "3" + first_app_config.target_image = "somevendor:surprising" + + create_command.logger.warning = MagicMock() + create_command.tools[first_app_config].app_context = DockerAppContext( + tools=create_command.tools, + app=first_app_config, + ) + + # Mock a return value from Docker that is too old for Briefcase + create_command.tools[first_app_config].app_context.check_output = MagicMock( + return_value="3.7.16\n" + ) + + # Verify python for the app + with pytest.raises( + BriefcaseCommandError, + match=r"The system python3 version provided by somevendor:surprising " + r"is 3\.7\.16; Briefcase requires a minimum Python3 version of 3\.8\.", + ): + create_command.verify_python(first_app_config) + + # The docker container was interrogated for a Python version + create_command.tools[ + first_app_config + ].app_context.check_output.assert_called_once_with( + [ + "python3", + "-c", + ( + "import sys; " + "print(f'{sys.version_info.major}.{sys.version_info.minor}')" + ), + ] + ) diff --git a/tests/platforms/linux/system/test_mixin__verify_system_packages.py b/tests/platforms/linux/system/test_mixin__verify_system_packages.py new file mode 100644 index 000000000..05acaa3e2 --- /dev/null +++ b/tests/platforms/linux/system/test_mixin__verify_system_packages.py @@ -0,0 +1,138 @@ +import subprocess +from unittest.mock import MagicMock, call + +import pytest + +from briefcase.console import Console, Log +from briefcase.exceptions import BriefcaseCommandError +from briefcase.platforms.linux.system import LinuxSystemBuildCommand + + +@pytest.fixture +def build_command(tmp_path, first_app): + command = LinuxSystemBuildCommand( + logger=Log(), + console=Console(), + base_path=tmp_path / "base_path", + data_path=tmp_path / "briefcase", + apps={"first": first_app}, + ) + command.tools.host_os = "Linux" + command.tools.host_arch = "wonky" + + # Mock subprocess + command.tools.subprocess = MagicMock() + + return command + + +def test_deb_requirements(build_command, first_app_config): + """Debian requirements can be verified""" + first_app_config.target_vendor_base = "debian" + + build_command.verify_system_packages(first_app_config) + + # The packages were verified + assert build_command.tools.subprocess.check_output.mock_calls == [ + call(["dpkg", "-s", "python3-dev"]), + call(["dpkg", "-s", "build-essential"]), + ] + + +def test_rpm_requirements(build_command, first_app_config): + """RHEL requirements can be verified""" + first_app_config.target_vendor_base = "rhel" + + build_command.verify_system_packages(first_app_config) + + assert build_command.tools.subprocess.check_output.mock_calls == [ + call(["rpm", "-q", "python3-devel"]), + call(["rpm", "-q", "gcc"]), + call(["rpm", "-q", "make"]), + call(["rpm", "-q", "pkgconf-pkg-config"]), + ] + + +def test_arch_requirements(build_command, first_app_config, capsys): + """Arch requirements can be verified""" + first_app_config.target_vendor_base = "arch" + + build_command.verify_system_packages(first_app_config) + + # For now, we don't know how to verify arch packages. + + # No packages verified + build_command.tools.subprocess.check_output.assert_not_called() + + # A warning was logged. + output = capsys.readouterr().out + assert "WARNING: Can't verify system packages" in output + + +def test_unknown_requirements(build_command, first_app_config, capsys): + """An unknown system can't be verified""" + first_app_config.target_vendor_base = "somevendor" + + build_command.verify_system_packages(first_app_config) + + # No packages verified + build_command.tools.subprocess.check_output.assert_not_called() + + # A warning was logged. + output = capsys.readouterr().out + assert "WARNING: Can't verify system packages" in output + + +def test_missing_packages(build_command, first_app_config, capsys): + """If there are missing system packages, an error is raised""" + # Mock the system requirement tools; there's a base requirement of + # a packaged called "compiler", verified using "check ", and + # isntalled using "system " + build_command._system_requirement_tools = MagicMock( + return_value=(["compiler"], ["check"], "system") + ) + + # Add some system requirements. + first_app_config.system_requires = ["first", "second", "third"] + + # Mock the side effect of checking those requirements. + build_command.tools.subprocess.check_output.side_effect = [ + subprocess.CalledProcessError(cmd="check", returncode=1), + "installed", + subprocess.CalledProcessError(cmd="check", returncode=1), + "installed", + ] + + # Verify the requirements. This will raise an error, but the error + # message will tell you how to install the system packages. + with pytest.raises( + BriefcaseCommandError, + match=r" sudo system install compiler second", + ): + build_command.verify_system_packages(first_app_config) + + +def test_packages_installed(build_command, first_app_config, capsys): + """If all required packages are installed, no error is raised""" + # Mock the system requirement tools; there's a base requirement of + # a packaged called "compiler", verified using "check ", and + # isntalled using "system " + build_command._system_requirement_tools = MagicMock( + return_value=(["compiler"], ["check"], "system") + ) + + # Add some system requirements. + first_app_config.system_requires = ["first", "second", "third"] + + # Mock the effect of checking requirements that are all present + build_command.tools.subprocess.check_output.return_value = "installed" + + # Verify the requirements. This will raise an error. + build_command.verify_system_packages(first_app_config) + + assert build_command.tools.subprocess.check_output.mock_calls == [ + call(["check", "compiler"]), + call(["check", "first"]), + call(["check", "second"]), + call(["check", "third"]), + ] diff --git a/tests/platforms/linux/system/test_mixin__verify_system_python.py b/tests/platforms/linux/system/test_mixin__verify_system_python.py new file mode 100644 index 000000000..b45b2319f --- /dev/null +++ b/tests/platforms/linux/system/test_mixin__verify_system_python.py @@ -0,0 +1,49 @@ +import sys +from pathlib import Path +from unittest.mock import MagicMock + +import pytest + +from briefcase.exceptions import BriefcaseCommandError +from briefcase.platforms.linux import system + + +def test_valid_python3(monkeypatch, create_command): + """If Briefcase is being run with the system python, verification passes.""" + + # Mock the existence of a valid non-docker system Python + # with the same major/minor as the current Python + python3 = MagicMock() + python3.resolve.return_value = Path( + f"/usr/bin/python{sys.version_info.major}.{sys.version_info.minor}" + ) + mock_Path = MagicMock(return_value=python3) + monkeypatch.setattr(system, "Path", mock_Path) + + # System Python can be verified + create_command.verify_system_python() + + +@pytest.mark.parametrize( + "resolved_path, expected_error", + [ + ("/usr/bin/pythonX", "Can't determine the system python version"), + ("/usr/bin/python3", "Can't determine the system python version"), + ( + "/usr/bin/python3.X", + r"The version of Python being used to run Briefcase \(3\..*\) is not the system python3 \(3.X\)\.", + ), + ], +) +def test_bad_python3(monkeypatch, create_command, resolved_path, expected_error): + """If the system Python3 isn't obviously a Python3, an error is raised.""" + # Mock a Python3 symlink that isn'tthe existence of a valid non-docker system Python + # with the same major/minor as the current Python + python3 = MagicMock() + python3.resolve.return_value = Path(resolved_path) + mock_Path = MagicMock(return_value=python3) + monkeypatch.setattr(system, "Path", mock_Path) + + # Verifying python raises an error + with pytest.raises(BriefcaseCommandError, match=expected_error): + create_command.verify_system_python() diff --git a/tests/platforms/linux/system/test_package.py b/tests/platforms/linux/system/test_package.py new file mode 100644 index 000000000..7fa03c7d6 --- /dev/null +++ b/tests/platforms/linux/system/test_package.py @@ -0,0 +1,168 @@ +from unittest import mock + +import pytest + +from briefcase.console import Console, Log +from briefcase.exceptions import BriefcaseCommandError +from briefcase.platforms.linux.system import LinuxSystemPackageCommand + + +@pytest.fixture +def package_command(monkeypatch, first_app, tmp_path): + command = LinuxSystemPackageCommand( + logger=Log(), + console=Console(), + base_path=tmp_path / "base_path", + data_path=tmp_path / "briefcase", + ) + command.tools.home_path = tmp_path / "home" + + # Set the host architecture for test purposes. + command.tools.host_arch = "wonky" + + # Run outside docker for these tests. + command.target_image = None + + # Mock the detection of system python. + command.verify_system_python = mock.MagicMock() + command.verify_system_packages = mock.MagicMock() + + # Mock the packaging tools. + command._verify_deb_tools = mock.MagicMock() + command._verify_rpm_tools = mock.MagicMock() + + return command + + +def test_formats(package_command): + "The supported packaging formats are as expected." + assert package_command.packaging_formats == ["deb", "rpm", "pkg", "system"] + + +@pytest.mark.parametrize( + "format, vendor, codename, revision, filename", + [ + ["rpm", "rhel", "9", None, "first-app-0.0.1-1.el9.wonky.rpm"], + ["rpm", "rhel", "9", 5, "first-app-0.0.1-5.el9.wonky.rpm"], + ["rpm", "fedora", "37", None, "first-app-0.0.1-1.fc37.wonky.rpm"], + [ + "deb", + "debian", + "bullseye", + None, + "first-app_0.0.1-1~debian-bullseye_wonky.deb", + ], + ["deb", "debian", "bullseye", 5, "first-app_0.0.1-5~debian-bullseye_wonky.deb"], + ["deb", "ubuntu", "jammy", None, "first-app_0.0.1-1~ubuntu-jammy_wonky.deb"], + [ + "deb", + "linuxmint", + "vera", + None, + "first-app_0.0.1-1~linuxmint-vera_wonky.deb", + ], + ], +) +def test_distribution_path( + package_command, first_app, format, vendor, codename, revision, filename, tmp_path +): + first_app.packaging_format = format + first_app.target_vendor = vendor + first_app.target_codename = codename + + if revision: + first_app.revision = revision + + assert ( + package_command.distribution_path(first_app) + == tmp_path / "base_path" / "dist" / filename + ) + + +@pytest.mark.parametrize( + "base_vendor, input_format, output_format", + [ + # System packaging maps to known formats + ("debian", "system", "deb"), + ("rhel", "system", "rpm"), + ("arch", "system", "pkg"), + # Explicit output format is preserved + ("debian", "deb", "deb"), + ("redhat", "rpm", "rpm"), + ("arch", "pkg", "pkg"), + # This is technically posssible, but probably ill-advised + ("debian", "rpm", "rpm"), + # Unknown base vendor, but explicit packaging format + (None, "deb", "deb"), + (None, "rpm", "rpm"), + (None, "pkg", "pkg"), + ], +) +def test_adjust_packaging_format( + package_command, first_app, base_vendor, input_format, output_format +): + "The packaging format can be ajusted based on host system knowledge" + first_app.target_vendor_base = base_vendor + first_app.packaging_format = input_format + + package_command.verify_app_tools(first_app) + + assert first_app.packaging_format == output_format + + +def test_unknown_packaging_format(package_command, first_app): + """An unknown packaging format raises an error.""" + first_app.target_vendor_base = None + first_app.packaging_format = "system" + + with pytest.raises( + BriefcaseCommandError, + match=r"Briefcase doesn't know the system packaging format for somevendor.", + ): + package_command.verify_app_tools(first_app) + + +def test_package_deb_app(package_command, first_app): + """A debian app can be packaged""" + # Set the packaging format + first_app.packaging_format = "deb" + + # Mock the actual packaging call + package_command._package_deb = mock.MagicMock() + + # Package the app + package_command.package_app(first_app) + + # Assert the right backend was called. + package_command._package_deb.assert_called_once_with(first_app) + + +def test_package_rpm_app(package_command, first_app): + """A Red Hat app can be packaged""" + # Set the packaging format + first_app.packaging_format = "rpm" + + # Mock the actual packaging call + package_command._package_rpm = mock.MagicMock() + + # Package the app + package_command.package_app(first_app) + + # Assert the right backend was called. + package_command._package_rpm.assert_called_once_with(first_app) + + +def test_package_unknown_format(package_command, first_app): + "Unknown/unsupported packaging formats raise an error" + # Set the packaging format + first_app.packaging_format = "unknown" + + # Mock the actual packaging call + package_command._package_deb = mock.MagicMock() + + # Package the app + with pytest.raises( + BriefcaseCommandError, + match=r"Briefcase doesn't currently know how to build system packages in UNKNOWN format.", + ): + package_command.package_app(first_app) diff --git a/tests/platforms/linux/system/test_package__deb.py b/tests/platforms/linux/system/test_package__deb.py new file mode 100644 index 000000000..b6fef4830 --- /dev/null +++ b/tests/platforms/linux/system/test_package__deb.py @@ -0,0 +1,399 @@ +import shutil +import subprocess +import sys +from pathlib import Path +from unittest import mock + +import pytest + +from briefcase.console import Console, Log +from briefcase.exceptions import BriefcaseCommandError +from briefcase.platforms.linux import system +from briefcase.platforms.linux.system import ( + LinuxSystemPackageCommand, + debian_multiline_description, +) + +from ....utils import create_file + + +@pytest.fixture +def package_command(first_app, tmp_path): + command = LinuxSystemPackageCommand( + logger=Log(), + console=Console(), + base_path=tmp_path / "base_path", + data_path=tmp_path / "briefcase", + ) + command.tools.home_path = tmp_path / "home" + + # Set the host architecture for test purposes. + command.tools.host_arch = "wonky" + + # Mock the app context + command.tools.app_tools[first_app].app_context = mock.MagicMock() + + # Mock shutil + command.tools.shutil = mock.MagicMock() + # Make the mock rmtree still remove content + command.tools.shutil.rmtree.side_effect = shutil.rmtree + + return command + + +@pytest.fixture +def first_app_deb(first_app): + # Mock a debian app + first_app.python_version_tag = "3.10" + first_app.target_vendor_base = "debian" + first_app.packaging_format = "deb" + first_app.glibc_version = "2.99" + first_app.long_description = "Long description\nfor the app" + + return first_app + + +def test_verify_no_docker(monkeypatch, package_command, first_app_deb): + """If not using docker, existence of dpkg-deb is verified""" + # Mock not using docker + package_command.target_image = None + + # Mock the path of dpkg-deb + dpkg_deb = mock.MagicMock() + dpkg_deb.exists.return_value = True + + mock_Path = mock.MagicMock(return_value=dpkg_deb) + monkeypatch.setattr(system, "Path", mock_Path) + + # App tools can be verified + package_command.verify_app_tools(first_app_deb) + + # dpkg_deb was inspected + dpkg_deb.exists.assert_called_once() + + +def test_verify_dpkg_deb_missing(monkeypatch, package_command, first_app_deb): + """If dpkg_deb isn't installed, an error is raised""" + # Mock not using docker + package_command.target_image = None + + # Mock the path of dpkg-deb + dpkg_deb = mock.MagicMock() + dpkg_deb.exists.return_value = False + + mock_Path = mock.MagicMock(return_value=dpkg_deb) + monkeypatch.setattr(system, "Path", mock_Path) + + # Verifying app tools will raise an error + with pytest.raises( + BriefcaseCommandError, + match=r"Can't find the dpkg tools. Try running `sudo apt install dpkg-dev`.", + ): + package_command.verify_app_tools(first_app_deb) + + # dpkg_deb was inspected + dpkg_deb.exists.assert_called_once() + + +def test_verify_docker(monkeypatch, package_command, first_app_deb): + """If using Docker, no tool checks are needed""" + # Mock using docker + package_command.target_image = "somevendor:surprising" + + # Mock the existence of a valid non-docker system Python + # with the same major/minor as the current Python, + # plus the existence of dpkg-deb + python3 = mock.MagicMock() + python3.resolve.return_value = Path( + f"/usr/bin/python{sys.version_info.major}.{sys.version_info.minor}" + ) + + dpkg_deb = mock.MagicMock() + dpkg_deb.exists.return_value = False + + mock_Path = mock.MagicMock(side_effect=[python3, dpkg_deb]) + monkeypatch.setattr(system, "Path", mock_Path) + + # App tools can be verified + package_command.verify_app_tools(first_app_deb) + + # dpkg_deb was not inspected + dpkg_deb.exists.assert_not_called() + + +def test_deb_package(package_command, first_app_deb, tmp_path): + """A deb app can be packaged.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Package the app + package_command.package_app(first_app_deb) + + # The control file is written + assert (bundle_path / "first-app-0.0.1" / "DEBIAN" / "control").exists() + with (bundle_path / "first-app-0.0.1" / "DEBIAN" / "control").open( + encoding="utf-8" + ) as f: + assert ( + f.read() + == "\n".join( + [ + "Package: first-app", + "Version: 0.0.1", + "Architecture: wonky", + "Maintainer: Megacorp ", + "Homepage: None", + "Description: The first simple app \\ demonstration", + " Long description", + " for the app", + "Depends: libc6 (>=2.99), libpython3.10", + "Section: utils", + "Priority: optional", + ] + ) + + "\n" + ) + + package_command.tools.app_tools[ + first_app_deb + ].app_context.run.assert_called_once_with( + [ + "dpkg-deb", + "--build", + "--root-owner-group", + "first-app-0.0.1", + ], + check=True, + cwd=bundle_path, + ) + + # The deb was moved into the final location + package_command.tools.shutil.move.assert_called_once_with( + bundle_path / "first-app-0.0.1.deb", + tmp_path + / "base_path" + / "dist" + / "first-app_0.0.1-1~somevendor-surprising_wonky.deb", + ) + + +def test_deb_re_package(package_command, first_app_deb, tmp_path): + """A deb app that has previously been packaged can be re-packaged.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Create an old control file that will be overwritten. + create_file( + bundle_path / "first-app-0.0.1" / "DEBIAN" / "control", "Old control content" + ) + + # Package the app + package_command.package_app(first_app_deb) + + # The control file is re-written + assert (bundle_path / "first-app-0.0.1" / "DEBIAN" / "control").exists() + with (bundle_path / "first-app-0.0.1" / "DEBIAN" / "control").open( + encoding="utf-8" + ) as f: + assert ( + f.read() + == "\n".join( + [ + "Package: first-app", + "Version: 0.0.1", + "Architecture: wonky", + "Maintainer: Megacorp ", + "Homepage: None", + "Description: The first simple app \\ demonstration", + " Long description", + " for the app", + "Depends: libc6 (>=2.99), libpython3.10", + "Section: utils", + "Priority: optional", + ] + ) + + "\n" + ) + + package_command.tools.app_tools[ + first_app_deb + ].app_context.run.assert_called_once_with( + [ + "dpkg-deb", + "--build", + "--root-owner-group", + "first-app-0.0.1", + ], + check=True, + cwd=bundle_path, + ) + + # The deb was moved into the final location + package_command.tools.shutil.move.assert_called_once_with( + bundle_path / "first-app-0.0.1.deb", + tmp_path + / "base_path" + / "dist" + / "first-app_0.0.1-1~somevendor-surprising_wonky.deb", + ) + + +def test_deb_package_no_long_description(package_command, first_app_deb, tmp_path): + """A deb app without a long description raises an error.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Delete the long description + first_app_deb.long_description = None + + # Packaging the app will fail + with pytest.raises( + BriefcaseCommandError, + match=r"App configuration does not define `long_description`. Debian projects require a long description.", + ): + package_command.package_app(first_app_deb) + + # The control file won't be written + assert not (bundle_path / "first-app-0.0.1" / "DEBIAN" / "control").exists() + + +@pytest.mark.parametrize( + "input, output", + [ + ["", ""], + ["one line", "one line"], + ["first line\nsecond line", "first line\n second line"], + ["first line\n\nsecond line", "first line\n second line"], + ["first line\n \nsecond line", "first line\n second line"], + ], +) +def test_multiline_long_description(input, output): + """Multiline debian descriptions are transformed""" + assert debian_multiline_description(input) == output + + +def test_deb_package_extra_requirements(package_command, first_app_deb, tmp_path): + """A deb app can be packaged with extra runtime requirements and configuration options.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Add system requirements and other optional settings. + first_app_deb.system_runtime_requires = ["first", "second (>=1.2.3)"] + first_app_deb.revision = 42 + first_app_deb.system_section = "Funny stuff" + + # Package the app + package_command.package_app(first_app_deb) + + # The control file is written + assert (bundle_path / "first-app-0.0.1" / "DEBIAN" / "control").exists() + with (bundle_path / "first-app-0.0.1" / "DEBIAN" / "control").open( + encoding="utf-8" + ) as f: + assert ( + f.read() + == "\n".join( + [ + "Package: first-app", + "Version: 0.0.1", + "Architecture: wonky", + "Maintainer: Megacorp ", + "Homepage: None", + "Description: The first simple app \\ demonstration", + " Long description", + " for the app", + "Depends: libc6 (>=2.99), libpython3.10, first, second (>=1.2.3)", + "Section: Funny stuff", + "Priority: optional", + ] + ) + + "\n" + ) + + package_command.tools.app_tools[ + first_app_deb + ].app_context.run.assert_called_once_with( + [ + "dpkg-deb", + "--build", + "--root-owner-group", + "first-app-0.0.1", + ], + check=True, + cwd=bundle_path, + ) + + # The deb was moved into the final location + package_command.tools.shutil.move.assert_called_once_with( + bundle_path / "first-app-0.0.1.deb", + tmp_path + / "base_path" + / "dist" + / "first-app_0.0.1-42~somevendor-surprising_wonky.deb", + ) + + +def test_deb_package_failure(package_command, first_app_deb, tmp_path): + """If an packaging doesn't succeed, an error is raised.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Mock a packaging failure + package_command.tools.app_tools[ + first_app_deb + ].app_context.run.side_effect = subprocess.CalledProcessError( + cmd="dpkg-deb ...", returncode=-1 + ) + + # Package the app + with pytest.raises( + BriefcaseCommandError, match=r"Error while building .deb package for first-app." + ): + package_command.package_app(first_app_deb) + + # The control file is written + assert (bundle_path / "first-app-0.0.1" / "DEBIAN" / "control").exists() + with (bundle_path / "first-app-0.0.1" / "DEBIAN" / "control").open( + encoding="utf-8" + ) as f: + assert ( + f.read() + == "\n".join( + [ + "Package: first-app", + "Version: 0.0.1", + "Architecture: wonky", + "Maintainer: Megacorp ", + "Homepage: None", + "Description: The first simple app \\ demonstration", + " Long description", + " for the app", + "Depends: libc6 (>=2.99), libpython3.10", + "Section: utils", + "Priority: optional", + ] + ) + + "\n" + ) + + # The call to package was made + package_command.tools.app_tools[ + first_app_deb + ].app_context.run.assert_called_once_with( + [ + "dpkg-deb", + "--build", + "--root-owner-group", + "first-app-0.0.1", + ], + check=True, + cwd=bundle_path, + ) + + # The deb wasn't built, so it wasn't moved. + package_command.tools.shutil.move.assert_not_called() diff --git a/tests/platforms/linux/system/test_package__rpm.py b/tests/platforms/linux/system/test_package__rpm.py new file mode 100644 index 000000000..032c9b717 --- /dev/null +++ b/tests/platforms/linux/system/test_package__rpm.py @@ -0,0 +1,664 @@ +import shutil +import subprocess +import sys +import tarfile +from pathlib import Path +from unittest import mock + +import pytest + +from briefcase.console import Console, Log +from briefcase.exceptions import BriefcaseCommandError +from briefcase.platforms.linux import system +from briefcase.platforms.linux.system import LinuxSystemPackageCommand + +from ....utils import create_file, create_tgz_file + + +@pytest.fixture +def package_command(first_app, tmp_path): + command = LinuxSystemPackageCommand( + logger=Log(), + console=Console(), + base_path=tmp_path / "base_path", + data_path=tmp_path / "briefcase", + ) + command.tools.home_path = tmp_path / "home" + + # Set the host architecture for test purposes. + command.tools.host_arch = "wonky" + + # Mock the app context + command.tools.app_tools[first_app].app_context = mock.MagicMock() + + # Mock shutil move and rmtree + command.tools.shutil.move = mock.MagicMock() + # Make the mock rmtree still remove content + command.tools.shutil.rmtree = mock.MagicMock(side_effect=shutil.rmtree) + + # Mock the RPM tag, since "somevendor" won't identify as rpm. + command.rpm_tag = mock.MagicMock(return_value="fcXX") + return command + + +@pytest.fixture +def first_app_rpm(first_app, tmp_path): + # Mock a Red Hat app + first_app.python_version_tag = "3" + first_app.target_vendor_base = "redhat" + first_app.packaging_format = "rpm" + first_app.glibc_version = "2.99" + first_app.long_description = "Long description\nfor the app" + + # Mock the side effects of building the app + usr_dir = ( + tmp_path + / "base_path" + / "build" + / "first-app" + / "somevendor" + / "surprising" + / "first-app-0.0.1" + / "usr" + ) + + # Create the binary + create_file(usr_dir / "bin" / "first-app", "binary") + + # Files in an app-named folder + create_file(usr_dir / "share" / "doc" / "first-app" / "license", "license") + create_file(usr_dir / "share" / "doc" / "first-app" / "UserManual", "manual") + + # A share file in an app-named folder + create_file(usr_dir / "share" / "man" / "man1" / "first-app.1.gz", "man") + + return first_app + + +def test_verify_no_docker(monkeypatch, package_command, first_app_rpm): + """If not using docker, existence of rpmbuild is verified""" + # Mock not using docker + package_command.target_image = None + + # Mock the path of rpmbuild + rpmbuild = mock.MagicMock() + rpmbuild.exists.return_value = True + + mock_Path = mock.MagicMock(return_value=rpmbuild) + monkeypatch.setattr(system, "Path", mock_Path) + + # App tools can be verified + package_command.verify_app_tools(first_app_rpm) + + # rpmbuild was inspected + rpmbuild.exists.assert_called_once() + + +def test_verify_rpmbuild_missing(monkeypatch, package_command, first_app_rpm): + """If rpmbuild isn't installed, an error is raised""" + # Mock not using docker + package_command.target_image = None + + # Mock the path of rpmbuild + rpmbuild = mock.MagicMock() + rpmbuild.exists.return_value = False + + mock_Path = mock.MagicMock(return_value=rpmbuild) + monkeypatch.setattr(system, "Path", mock_Path) + + # Verifying app tools will raise an error + with pytest.raises( + BriefcaseCommandError, + match=r"Can't find the rpm-build tools. Try running `sudo dnf install rpm-build`.", + ): + package_command.verify_app_tools(first_app_rpm) + + # rpmbuild was inspected + rpmbuild.exists.assert_called_once() + + +def test_verify_docker(monkeypatch, package_command, first_app_rpm): + """If using Docker, no tool checks are needed""" + # Mock using docker + package_command.target_image = "somevendor:surprising" + + # Mock the existence of a valid non-docker system Python + # with the same major/minor as the current Python, + # plus the existence of rpmbuild + python3 = mock.MagicMock() + python3.resolve.return_value = Path( + f"/usr/bin/python{sys.version_info.major}.{sys.version_info.minor}" + ) + + rpmbuild = mock.MagicMock() + rpmbuild.exists.return_value = False + + mock_Path = mock.MagicMock(side_effect=[python3, rpmbuild]) + monkeypatch.setattr(system, "Path", mock_Path) + + # App tools can be verified + package_command.verify_app_tools(first_app_rpm) + + # rpmbuild was not inspected + rpmbuild.exists.assert_not_called() + + +@pytest.mark.skipif(sys.platform == "win32", reason="Can't build RPMs on Windows") +def test_rpm_package(package_command, first_app_rpm, tmp_path): + """A rpm app can be packaged.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Package the app + package_command.package_app(first_app_rpm) + + # rpmbuild layout has been generated + assert (bundle_path / "rpmbuild" / "BUILD").exists() + assert (bundle_path / "rpmbuild" / "BUILDROOT").exists() + assert (bundle_path / "rpmbuild" / "RPMS").exists() + assert (bundle_path / "rpmbuild" / "SOURCES").exists() + assert (bundle_path / "rpmbuild" / "SRPMS").exists() + assert (bundle_path / "rpmbuild" / "SPECS").exists() + + # The spec file is written + assert (bundle_path / "rpmbuild" / "SPECS" / "first-app.spec").exists() + with (bundle_path / "rpmbuild" / "SPECS" / "first-app.spec").open( + encoding="utf-8" + ) as f: + assert f.read() == "\n".join( + [ + "%global __brp_mangle_shebangs %{nil}", + "%global __brp_strip %{nil}", + "%global __brp_strip_static_archive %{nil}", + "%global __brp_strip_comment_note %{nil}", + "%global __brp_check_rpaths %{nil}", + "%global __requires_exclude_from ^%{_libdir}/first-app/.*$", + "%global __provides_exclude_from ^%{_libdir}/first-app/.*$", + "%global _enable_debug_package 0", + "%global debug_package %{nil}", + "", + "Name: first-app", + "Version: 0.0.1", + "Release: 1%{?dist}", + "Summary: The first simple app \\ demonstration", + "", + "License: Unknown", + "URL: None", + "Source0: %{name}-%{version}.tar.gz", + "", + "Requires: python3", + "", + "ExclusiveArch: wonky", + "", + "%description", + "Long description", + "for the app", + "", + "%prep", + "%autosetup", + "", + "%build", + "", + "%install", + "cp -r usr %{buildroot}/usr", + "", + "%files", + "/usr/bin/first-app", + "%dir /usr/lib/first-app", + "%dir /usr/lib/first-app/app", + "/usr/lib/first-app/app/support.so", + "%dir /usr/lib/first-app/app_packages", + "%dir /usr/lib/first-app/app_packages/firstlib", + "/usr/lib/first-app/app_packages/firstlib/first.so", + "/usr/lib/first-app/app_packages/firstlib/first.so.1.0", + "%dir /usr/lib/first-app/app_packages/secondlib", + "/usr/lib/first-app/app_packages/secondlib/second_a.so", + "/usr/lib/first-app/app_packages/secondlib/second_b.so", + "%dir /usr/share/doc/first-app", + "/usr/share/doc/first-app/UserManual", + "/usr/share/doc/first-app/license", + "/usr/share/man/man1/first-app.1.gz", + "", + "%changelog", + "First App Changelog", + ] + ) + + # A source tarball was created with the right content + archive_file = bundle_path / "rpmbuild" / "SOURCES" / "first-app-0.0.1.tar.gz" + assert archive_file.exists() + with tarfile.open(archive_file, "r:gz") as archive: + assert sorted(archive.getnames()) == [ + "first-app-0.0.1", + "first-app-0.0.1/usr", + "first-app-0.0.1/usr/bin", + "first-app-0.0.1/usr/bin/first-app", + "first-app-0.0.1/usr/lib", + "first-app-0.0.1/usr/lib/first-app", + "first-app-0.0.1/usr/lib/first-app/app", + "first-app-0.0.1/usr/lib/first-app/app/support.so", + "first-app-0.0.1/usr/lib/first-app/app_packages", + "first-app-0.0.1/usr/lib/first-app/app_packages/firstlib", + "first-app-0.0.1/usr/lib/first-app/app_packages/firstlib/first.so", + "first-app-0.0.1/usr/lib/first-app/app_packages/firstlib/first.so.1.0", + "first-app-0.0.1/usr/lib/first-app/app_packages/secondlib", + "first-app-0.0.1/usr/lib/first-app/app_packages/secondlib/second_a.so", + "first-app-0.0.1/usr/lib/first-app/app_packages/secondlib/second_b.so", + "first-app-0.0.1/usr/share", + "first-app-0.0.1/usr/share/doc", + "first-app-0.0.1/usr/share/doc/first-app", + "first-app-0.0.1/usr/share/doc/first-app/UserManual", + "first-app-0.0.1/usr/share/doc/first-app/license", + "first-app-0.0.1/usr/share/man", + "first-app-0.0.1/usr/share/man/man1", + "first-app-0.0.1/usr/share/man/man1/first-app.1.gz", + ] + + # rpmbuild was invoked + package_command.tools.app_tools[ + first_app_rpm + ].app_context.run.assert_called_once_with( + [ + "rpmbuild", + "-bb", + "--define", + f"_topdir {bundle_path / 'rpmbuild'}", + "./rpmbuild/SPECS/first-app.spec", + ], + check=True, + cwd=bundle_path, + ) + + # The rpm was moved into the final location + package_command.tools.shutil.move.assert_called_once_with( + bundle_path + / "rpmbuild" + / "RPMS" + / "wonky" + / "first-app-0.0.1-1.fcXX.wonky.rpm", + tmp_path / "base_path" / "dist" / "first-app-0.0.1-1.fcXX.wonky.rpm", + ) + + +@pytest.mark.skipif(sys.platform == "win32", reason="Can't build RPMs on Windows") +def test_rpm_re_package(package_command, first_app_rpm, tmp_path): + """A rpm app that has previously been packaged can be re-packaged.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Create an old spec file and tarball that will be overwritten. + create_file( + bundle_path / "rpmbuild" / "SPECS" / "first-app.spec", + "Old spec content", + ) + create_tgz_file( + bundle_path / "rpmbuild" / "SOURCES" / "first-app-0.0.1.tar.gz", + [("old.txt", "old content")], + ) + + # Package the app + package_command.package_app(first_app_rpm) + + # rpmbuild layout has been generated + assert (bundle_path / "rpmbuild" / "BUILD").exists() + assert (bundle_path / "rpmbuild" / "BUILDROOT").exists() + assert (bundle_path / "rpmbuild" / "RPMS").exists() + assert (bundle_path / "rpmbuild" / "SOURCES").exists() + assert (bundle_path / "rpmbuild" / "SRPMS").exists() + assert (bundle_path / "rpmbuild" / "SPECS").exists() + + # The spec file is written + assert (bundle_path / "rpmbuild" / "SPECS" / "first-app.spec").exists() + with (bundle_path / "rpmbuild" / "SPECS" / "first-app.spec").open( + encoding="utf-8" + ) as f: + assert f.read() == "\n".join( + [ + "%global __brp_mangle_shebangs %{nil}", + "%global __brp_strip %{nil}", + "%global __brp_strip_static_archive %{nil}", + "%global __brp_strip_comment_note %{nil}", + "%global __brp_check_rpaths %{nil}", + "%global __requires_exclude_from ^%{_libdir}/first-app/.*$", + "%global __provides_exclude_from ^%{_libdir}/first-app/.*$", + "%global _enable_debug_package 0", + "%global debug_package %{nil}", + "", + "Name: first-app", + "Version: 0.0.1", + "Release: 1%{?dist}", + "Summary: The first simple app \\ demonstration", + "", + "License: Unknown", + "URL: None", + "Source0: %{name}-%{version}.tar.gz", + "", + "Requires: python3", + "", + "ExclusiveArch: wonky", + "", + "%description", + "Long description", + "for the app", + "", + "%prep", + "%autosetup", + "", + "%build", + "", + "%install", + "cp -r usr %{buildroot}/usr", + "", + "%files", + "/usr/bin/first-app", + "%dir /usr/lib/first-app", + "%dir /usr/lib/first-app/app", + "/usr/lib/first-app/app/support.so", + "%dir /usr/lib/first-app/app_packages", + "%dir /usr/lib/first-app/app_packages/firstlib", + "/usr/lib/first-app/app_packages/firstlib/first.so", + "/usr/lib/first-app/app_packages/firstlib/first.so.1.0", + "%dir /usr/lib/first-app/app_packages/secondlib", + "/usr/lib/first-app/app_packages/secondlib/second_a.so", + "/usr/lib/first-app/app_packages/secondlib/second_b.so", + "%dir /usr/share/doc/first-app", + "/usr/share/doc/first-app/UserManual", + "/usr/share/doc/first-app/license", + "/usr/share/man/man1/first-app.1.gz", + "", + "%changelog", + "First App Changelog", + ] + ) + + # A source tarball was created with the right content + archive_file = bundle_path / "rpmbuild" / "SOURCES" / "first-app-0.0.1.tar.gz" + assert archive_file.exists() + with tarfile.open(archive_file, "r:gz") as archive: + assert sorted(archive.getnames()) == [ + "first-app-0.0.1", + "first-app-0.0.1/usr", + "first-app-0.0.1/usr/bin", + "first-app-0.0.1/usr/bin/first-app", + "first-app-0.0.1/usr/lib", + "first-app-0.0.1/usr/lib/first-app", + "first-app-0.0.1/usr/lib/first-app/app", + "first-app-0.0.1/usr/lib/first-app/app/support.so", + "first-app-0.0.1/usr/lib/first-app/app_packages", + "first-app-0.0.1/usr/lib/first-app/app_packages/firstlib", + "first-app-0.0.1/usr/lib/first-app/app_packages/firstlib/first.so", + "first-app-0.0.1/usr/lib/first-app/app_packages/firstlib/first.so.1.0", + "first-app-0.0.1/usr/lib/first-app/app_packages/secondlib", + "first-app-0.0.1/usr/lib/first-app/app_packages/secondlib/second_a.so", + "first-app-0.0.1/usr/lib/first-app/app_packages/secondlib/second_b.so", + "first-app-0.0.1/usr/share", + "first-app-0.0.1/usr/share/doc", + "first-app-0.0.1/usr/share/doc/first-app", + "first-app-0.0.1/usr/share/doc/first-app/UserManual", + "first-app-0.0.1/usr/share/doc/first-app/license", + "first-app-0.0.1/usr/share/man", + "first-app-0.0.1/usr/share/man/man1", + "first-app-0.0.1/usr/share/man/man1/first-app.1.gz", + ] + + # rpmbuild was invoked + package_command.tools.app_tools[ + first_app_rpm + ].app_context.run.assert_called_once_with( + [ + "rpmbuild", + "-bb", + "--define", + f"_topdir {bundle_path / 'rpmbuild'}", + "./rpmbuild/SPECS/first-app.spec", + ], + check=True, + cwd=bundle_path, + ) + + # The rpm was moved into the final location + package_command.tools.shutil.move.assert_called_once_with( + bundle_path + / "rpmbuild" + / "RPMS" + / "wonky" + / "first-app-0.0.1-1.fcXX.wonky.rpm", + tmp_path / "base_path" / "dist" / "first-app-0.0.1-1.fcXX.wonky.rpm", + ) + + +@pytest.mark.skipif(sys.platform == "win32", reason="Can't build RPMs on Windows") +def test_rpm_package_no_long_description(package_command, first_app_rpm, tmp_path): + """A rpm app without a long description raises an error.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Delete the long description + first_app_rpm.long_description = None + + # Packaging the app will fail + with pytest.raises( + BriefcaseCommandError, + match=r"App configuration does not define `long_description`. Red Hat projects require a long description.", + ): + package_command.package_app(first_app_rpm) + + # The spec file and tarball won't be written + assert not (bundle_path / "rpmbuild" / "SPECS" / "first-app.spec").exists() + assert not ( + bundle_path / "rpmbuild" / "SOURCES" / "first-app-0.0.1.tar.gz" + ).exists() + + +@pytest.mark.skipif(sys.platform == "win32", reason="Can't build RPMs on Windows") +def test_rpm_package_extra_requirements(package_command, first_app_rpm, tmp_path): + """A rpm app can be packaged with extra runtime requirements and config features.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Add system requirements and other optional settings. + first_app_rpm.system_runtime_requires = ["first", "second"] + first_app_rpm.revision = 42 + first_app_rpm.license = "BSD License" + + # Package the app + package_command.package_app(first_app_rpm) + + # rpmbuild layout has been generated + assert (bundle_path / "rpmbuild" / "BUILD").exists() + assert (bundle_path / "rpmbuild" / "BUILDROOT").exists() + assert (bundle_path / "rpmbuild" / "RPMS").exists() + assert (bundle_path / "rpmbuild" / "SOURCES").exists() + assert (bundle_path / "rpmbuild" / "SRPMS").exists() + assert (bundle_path / "rpmbuild" / "SPECS").exists() + + # The spec file is written + assert (bundle_path / "rpmbuild" / "SPECS" / "first-app.spec").exists() + with (bundle_path / "rpmbuild" / "SPECS" / "first-app.spec").open( + encoding="utf-8" + ) as f: + assert f.read() == "\n".join( + [ + "%global __brp_mangle_shebangs %{nil}", + "%global __brp_strip %{nil}", + "%global __brp_strip_static_archive %{nil}", + "%global __brp_strip_comment_note %{nil}", + "%global __brp_check_rpaths %{nil}", + "%global __requires_exclude_from ^%{_libdir}/first-app/.*$", + "%global __provides_exclude_from ^%{_libdir}/first-app/.*$", + "%global _enable_debug_package 0", + "%global debug_package %{nil}", + "", + "Name: first-app", + "Version: 0.0.1", + "Release: 42%{?dist}", + "Summary: The first simple app \\ demonstration", + "", + "License: BSD License", + "URL: None", + "Source0: %{name}-%{version}.tar.gz", + "", + "Requires: python3", + "Requires: first", + "Requires: second", + "", + "ExclusiveArch: wonky", + "", + "%description", + "Long description", + "for the app", + "", + "%prep", + "%autosetup", + "", + "%build", + "", + "%install", + "cp -r usr %{buildroot}/usr", + "", + "%files", + "/usr/bin/first-app", + "%dir /usr/lib/first-app", + "%dir /usr/lib/first-app/app", + "/usr/lib/first-app/app/support.so", + "%dir /usr/lib/first-app/app_packages", + "%dir /usr/lib/first-app/app_packages/firstlib", + "/usr/lib/first-app/app_packages/firstlib/first.so", + "/usr/lib/first-app/app_packages/firstlib/first.so.1.0", + "%dir /usr/lib/first-app/app_packages/secondlib", + "/usr/lib/first-app/app_packages/secondlib/second_a.so", + "/usr/lib/first-app/app_packages/secondlib/second_b.so", + "%dir /usr/share/doc/first-app", + "/usr/share/doc/first-app/UserManual", + "/usr/share/doc/first-app/license", + "/usr/share/man/man1/first-app.1.gz", + "", + "%changelog", + "First App Changelog", + ] + ) + + # A source tarball was created + archive_file = bundle_path / "rpmbuild" / "SOURCES" / "first-app-0.0.1.tar.gz" + assert archive_file.exists() + + # rpmbuild was invoked + package_command.tools.app_tools[ + first_app_rpm + ].app_context.run.assert_called_once_with( + [ + "rpmbuild", + "-bb", + "--define", + f"_topdir {bundle_path / 'rpmbuild'}", + "./rpmbuild/SPECS/first-app.spec", + ], + check=True, + cwd=bundle_path, + ) + + # The rpm was moved into the final location + package_command.tools.shutil.move.assert_called_once_with( + bundle_path + / "rpmbuild" + / "RPMS" + / "wonky" + / "first-app-0.0.1-42.fcXX.wonky.rpm", + tmp_path / "base_path" / "dist" / "first-app-0.0.1-42.fcXX.wonky.rpm", + ) + + +@pytest.mark.skipif(sys.platform == "win32", reason="Can't build RPMs on Windows") +def test_rpm_package_failure(package_command, first_app_rpm, tmp_path): + """If an packaging doesn't succeed, an error is raised.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Mock a packaging failure + package_command.tools.app_tools[ + first_app_rpm + ].app_context.run.side_effect = subprocess.CalledProcessError( + cmd="rpmbuild ...", returncode=-1 + ) + + # Package the app; this will fail + with pytest.raises( + BriefcaseCommandError, match=r"Error while building .rpm package for first-app." + ): + package_command.package_app(first_app_rpm) + + # rpmbuild layout has been generated + assert (bundle_path / "rpmbuild" / "BUILD").exists() + assert (bundle_path / "rpmbuild" / "BUILDROOT").exists() + assert (bundle_path / "rpmbuild" / "RPMS").exists() + assert (bundle_path / "rpmbuild" / "SOURCES").exists() + assert (bundle_path / "rpmbuild" / "SRPMS").exists() + assert (bundle_path / "rpmbuild" / "SPECS").exists() + + # The spec file is written + assert (bundle_path / "rpmbuild" / "SPECS" / "first-app.spec").exists() + + # A source tarball was created + archive_file = bundle_path / "rpmbuild" / "SOURCES" / "first-app-0.0.1.tar.gz" + assert archive_file.exists() + + # rpmbuild was invoked + package_command.tools.app_tools[ + first_app_rpm + ].app_context.run.assert_called_once_with( + [ + "rpmbuild", + "-bb", + "--define", + f"_topdir {bundle_path / 'rpmbuild'}", + "./rpmbuild/SPECS/first-app.spec", + ], + check=True, + cwd=bundle_path, + ) + + # The deb wasn't built, so it wasn't moved. + package_command.tools.shutil.move.assert_not_called() + + +@pytest.mark.skipif(sys.platform == "win32", reason="Can't build RPMs on Windows") +def test_no_changelog(package_command, first_app_rpm, tmp_path): + """If an packaging doesn't succeed, an error is raised.""" + bundle_path = ( + tmp_path / "base_path" / "build" / "first-app" / "somevendor" / "surprising" + ) + + # Remove the changelog file + (tmp_path / "base_path" / "CHANGELOG").unlink() + + # Package the app; this will fail + with pytest.raises( + BriefcaseCommandError, match=r"Your project does not contain a CHANGELOG file." + ): + package_command.package_app(first_app_rpm) + + # rpmbuild layout has been generated + assert (bundle_path / "rpmbuild" / "BUILD").exists() + assert (bundle_path / "rpmbuild" / "BUILDROOT").exists() + assert (bundle_path / "rpmbuild" / "RPMS").exists() + assert (bundle_path / "rpmbuild" / "SOURCES").exists() + assert (bundle_path / "rpmbuild" / "SRPMS").exists() + assert (bundle_path / "rpmbuild" / "SPECS").exists() + + # The spec file will exist (however, it will be incomplete) + assert (bundle_path / "rpmbuild" / "SPECS" / "first-app.spec").exists() + + # No source tarball was created + archive_file = bundle_path / "rpmbuild" / "SOURCES" / "first-app-0.0.1.tar.gz" + assert not archive_file.exists() + + # rpmbuild wasn't invoked + package_command.tools.app_tools[first_app_rpm].app_context.run.assert_not_called() + + # The deb wasn't built, so it wasn't moved. + package_command.tools.shutil.move.assert_not_called() diff --git a/tests/platforms/linux/system/test_run.py b/tests/platforms/linux/system/test_run.py new file mode 100644 index 000000000..710074209 --- /dev/null +++ b/tests/platforms/linux/system/test_run.py @@ -0,0 +1,256 @@ +import os +import subprocess +from unittest import mock + +import pytest + +from briefcase.console import Console, Log +from briefcase.exceptions import UnsupportedHostError +from briefcase.integrations.subprocess import Subprocess +from briefcase.platforms.linux.system import LinuxSystemRunCommand + + +@pytest.fixture +def run_command(tmp_path): + command = LinuxSystemRunCommand( + logger=Log(), + console=Console(), + base_path=tmp_path / "base_path", + data_path=tmp_path / "briefcase", + ) + command.tools.home_path = tmp_path / "home" + + # Set the host architecture for test purposes. + command.tools.host_arch = "wonky" + + command.tools.subprocess = mock.MagicMock(spec_set=Subprocess) + + command._stream_app_logs = mock.MagicMock() + + return command + + +@pytest.mark.parametrize("host_os", ["Darwin", "Windows", "WeirdOS"]) +def test_unsupported_host_os(run_command, host_os): + """Error raised for an unsupported OS.""" + run_command.tools.host_os = host_os + # Mock the existence of a single app + run_command.apps = {"app": None} + + with pytest.raises( + UnsupportedHostError, + match="Linux system projects can only be executed on Linux.", + ): + run_command() + + +def test_run_app(run_command, first_app, tmp_path): + """A bootstrap binary can be started.""" + + # Set up the log streamer to return a known stream + log_popen = mock.MagicMock() + run_command.tools.subprocess.Popen.return_value = log_popen + + # Run the app + run_command.run_app(first_app, test_mode=False, passthrough=[]) + + # The process was started + run_command.tools.subprocess.Popen.assert_called_with( + [ + os.fsdecode( + tmp_path + / "base_path" + / "build" + / "first-app" + / "somevendor" + / "surprising" + / "first-app-0.0.1" + / "usr" + / "bin" + / "first-app" + ) + ], + cwd=tmp_path / "home", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + bufsize=1, + ) + + # The streamer was started + run_command._stream_app_logs.assert_called_once_with( + first_app, + popen=log_popen, + test_mode=False, + clean_output=False, + ) + + +def test_run_app_with_passthrough(run_command, first_app, tmp_path): + """A linux App can be started with args.""" + + # Set up the log streamer to return a known stream + log_popen = mock.MagicMock() + run_command.tools.subprocess.Popen.return_value = log_popen + + # Run the app with args + run_command.run_app( + first_app, + test_mode=False, + passthrough=["foo", "--bar"], + ) + + # The process was started + run_command.tools.subprocess.Popen.assert_called_with( + [ + os.fsdecode( + tmp_path + / "base_path" + / "build" + / "first-app" + / "somevendor" + / "surprising" + / "first-app-0.0.1" + / "usr" + / "bin" + / "first-app" + ), + "foo", + "--bar", + ], + cwd=tmp_path / "home", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + bufsize=1, + ) + + # The streamer was started + run_command._stream_app_logs.assert_called_once_with( + first_app, + popen=log_popen, + test_mode=False, + clean_output=False, + ) + + +def test_run_app_failed(run_command, first_app, tmp_path): + """If there's a problem starting the app, an exception is raised.""" + + run_command.tools.subprocess.Popen.side_effect = OSError + + with pytest.raises(OSError): + run_command.run_app(first_app, test_mode=False, passthrough=[]) + + # The run command was still invoked + run_command.tools.subprocess.Popen.assert_called_with( + [ + os.fsdecode( + tmp_path + / "base_path" + / "build" + / "first-app" + / "somevendor" + / "surprising" + / "first-app-0.0.1" + / "usr" + / "bin" + / "first-app" + ) + ], + cwd=tmp_path / "home", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + bufsize=1, + ) + + # No attempt to stream was made + run_command._stream_app_logs.assert_not_called() + + +def test_run_app_test_mode(run_command, first_app, tmp_path): + """A linux App can be started in test mode.""" + + # Set up the log streamer to return a known stream + log_popen = mock.MagicMock() + run_command.tools.subprocess.Popen.return_value = log_popen + + # Run the app + run_command.run_app(first_app, test_mode=True, passthrough=[]) + + # The process was started + run_command.tools.subprocess.Popen.assert_called_with( + [ + os.fsdecode( + tmp_path + / "base_path" + / "build" + / "first-app" + / "somevendor" + / "surprising" + / "first-app-0.0.1" + / "usr" + / "bin" + / "first-app" + ) + ], + cwd=tmp_path / "home", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + bufsize=1, + env={"BRIEFCASE_MAIN_MODULE": "tests.first_app"}, + ) + + # The streamer was started + run_command._stream_app_logs.assert_called_once_with( + first_app, + popen=log_popen, + test_mode=True, + clean_output=False, + ) + + +def test_run_app_test_mode_with_args(run_command, first_app, tmp_path): + """A linux App can be started in test mode with args.""" + + # Set up the log streamer to return a known stream + log_popen = mock.MagicMock() + run_command.tools.subprocess.Popen.return_value = log_popen + + # Run the app with args + run_command.run_app( + first_app, + test_mode=True, + passthrough=["foo", "--bar"], + ) + + # The process was started + run_command.tools.subprocess.Popen.assert_called_with( + [ + os.fsdecode( + tmp_path + / "base_path" + / "build" + / "first-app" + / "somevendor" + / "surprising" + / "first-app-0.0.1" + / "usr" + / "bin" + / "first-app" + ), + "foo", + "--bar", + ], + cwd=tmp_path / "home", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + bufsize=1, + env={"BRIEFCASE_MAIN_MODULE": "tests.first_app"}, + ) + + # The streamer was started + run_command._stream_app_logs.assert_called_once_with( + first_app, + popen=log_popen, + test_mode=True, + clean_output=False, + ) diff --git a/tests/platforms/linux/appimage/test_open.py b/tests/platforms/linux/test_DockerOpenCommand.py similarity index 95% rename from tests/platforms/linux/appimage/test_open.py rename to tests/platforms/linux/test_DockerOpenCommand.py index 20e7e83bb..1b813ee71 100644 --- a/tests/platforms/linux/appimage/test_open.py +++ b/tests/platforms/linux/test_DockerOpenCommand.py @@ -9,11 +9,11 @@ from briefcase.integrations.subprocess import Subprocess from briefcase.platforms.linux.appimage import LinuxAppImageOpenCommand -from ....utils import create_file +from ...utils import create_file @pytest.fixture -def open_command(tmp_path, first_app_config): +def open_command(tmp_path): command = LinuxAppImageOpenCommand( logger=Log(), console=Console(), @@ -85,11 +85,10 @@ def test_open_docker(open_command, first_app_config, tmp_path): "--rm", "-it", "--volume", - f"{tmp_path / 'base_path' / 'build' / 'first-app' / 'linux' / 'appimage'}:/app:z", + f"{open_command.base_path}/build/first-app/linux/appimage:/app:z", "--volume", f"{open_command.data_path}:/home/brutus/.cache/briefcase:z", f"briefcase/com.example.first-app:py3.{sys.version_info.minor}", - "/bin/bash", ], stream_output=False, ) diff --git a/tests/platforms/linux/test_LinuxMixin__vendor_details.py b/tests/platforms/linux/test_LinuxMixin__vendor_details.py new file mode 100644 index 000000000..1605b4c19 --- /dev/null +++ b/tests/platforms/linux/test_LinuxMixin__vendor_details.py @@ -0,0 +1,43 @@ +from unittest.mock import MagicMock + +import pytest + +from briefcase.platforms.linux import LinuxMixin, parse_freedesktop_os_release + +from .os_release import OS_RELEASE + + +@pytest.fixture +def linux_mixin(): + "A Linux mixin with a mocked tools collection" + linux_mixin = LinuxMixin() + linux_mixin.tools = MagicMock() + return linux_mixin + + +@pytest.mark.parametrize( + "docker, vendor, codename, vendor_base", + [ + ("archlinux:latest", "arch", "rolling", "arch"), + ("manjaro/base:latest", "manjaro", "rolling", "arch"), + ("fedora:37", "fedora", "37", "rhel"), + ("rhel/ubi8:8.7", "rhel", "8", "rhel"), + ("almalinux:9.1", "almalinux", "9", "rhel"), + ("centos:8", "centos", "8", "rhel"), + ("opensuse/leap:15.4", "opensuse-leap", "15", None), + ("opensuse/tumbleweed:latest", "opensuse-tumbleweed", "20230304", None), + ("debian:11", "debian", "bullseye", "debian"), + ("ubuntu:22.04", "ubuntu", "jammy", "debian"), + ("pop:22.04", "pop", "jammy", "debian"), + ("linuxmint:19.2", "linuxmint", "tina", "debian"), + ], +) +def test_vendor_details(linux_mixin, docker, vendor, codename, vendor_base): + "Assert real-world examples of vendor details work" + assert linux_mixin.vendor_details( + parse_freedesktop_os_release(OS_RELEASE[docker]) + ) == ( + vendor, + codename, + vendor_base, + ) diff --git a/tests/platforms/linux/test_LocalRequirementsMixin.py b/tests/platforms/linux/test_LocalRequirementsMixin.py new file mode 100644 index 000000000..744e72ad2 --- /dev/null +++ b/tests/platforms/linux/test_LocalRequirementsMixin.py @@ -0,0 +1,496 @@ +import shutil +import subprocess +import sys +from pathlib import Path +from unittest.mock import MagicMock, call + +import pytest + +from briefcase.commands import CreateCommand +from briefcase.console import Console, Log +from briefcase.exceptions import BriefcaseCommandError +from briefcase.integrations.docker import DockerAppContext +from briefcase.integrations.subprocess import Subprocess +from briefcase.platforms.linux import LocalRequirementsMixin + +from ...utils import create_file, create_tgz_file, create_zip_file + + +class DummyCreateCommand(LocalRequirementsMixin, CreateCommand): + # An command that provides the stubs required to satisfy LocalRequirementeMixin + platform = "Tester" + output_format = "Dummy" + + def binary_path(self, app): + return self.bundle_path(app) / f"{app.app_name}.bin" + + +@pytest.fixture +def no_docker_create_command(first_app_config, tmp_path): + command = DummyCreateCommand( + logger=Log(), + console=Console(), + base_path=tmp_path / "base_path", + data_path=tmp_path / "briefcase", + ) + + # Disable Docker use + command.use_docker = False + + # Set the host architecture to something known for test purposes. + command.tools.host_arch = "wonky" + + # Set the host system to Linux for test purposes + command.tools.host_os = "Linux" + + # Mock the existence of Docker + command.tools.subprocess = MagicMock(spec_set=Subprocess) + + # Mock shutil.copy to do the copy, but be observable + command.tools.shutil.copy = MagicMock(side_effect=shutil.copy) + + command._path_index = { + first_app_config: {"app_packages_path": "path/to/app_packages"} + } + + # At the time app requirements are installed, the project folder will exist. + command.bundle_path(first_app_config).mkdir(parents=True, exist_ok=True) + + return command + + +@pytest.fixture +def create_command(no_docker_create_command, first_app_config, tmp_path): + # Enable Docker use + no_docker_create_command.use_docker = True + + # Provide Docker app context + no_docker_create_command.tools[first_app_config].app_context = DockerAppContext( + tools=no_docker_create_command.tools, + app=first_app_config, + ) + no_docker_create_command.tools[first_app_config].app_context.prepare( + image_tag="briefcase/com.example.first-app:py3.X", + dockerfile_path=tmp_path + / "base_path" + / "build" + / "first-app" + / "tester" + / "dummy" + / "Dockerfile", + app_base_path=tmp_path / "base_path", + host_bundle_path=tmp_path + / "base_path" + / "build" + / "first-app" + / "tester" + / "dummy", + host_data_path=tmp_path / "briefcase", + python_version="3.X", + ) + + # Reset the subprocess.run mock, removing the Docker setup call + no_docker_create_command.tools.subprocess.run.reset_mock() + + return no_docker_create_command + + +@pytest.fixture +def first_package(tmp_path): + # Create a local package to be built + create_file( + tmp_path / "local" / "first" / "setup.py", + content="Python config", + ) + create_file( + tmp_path / "local" / "first" / "first.py", + content="Python source", + ) + + return str(tmp_path / "local" / "first") + + +@pytest.fixture +def second_package(tmp_path): + # Create a local pre-built sdist + create_tgz_file( + tmp_path / "local" / "second-2.3.4.tar.gz", + content=[ + ("setup.py", "Python config"), + ("second.py", "Python source"), + ], + ) + + return str(tmp_path / "local" / "second-2.3.4.tar.gz") + + +@pytest.fixture +def third_package(tmp_path): + # Create a local pre-built wheel + create_zip_file( + tmp_path / "local" / "third-3.4.5-py3-none-any.whl", + content=[ + ("MANIFEST.in", "Wheel config"), + ("third.py", "Python source"), + ], + ) + + return str(tmp_path / "local" / "third-3.4.5-py3-none-any.whl") + + +@pytest.fixture +def other_package(create_command, first_app_config): + # A stale sdist, built in a previous pass + create_tgz_file( + create_command.local_requirements_path(first_app_config) + / "other_package-0.1.2.tar.gz", + content=[ + ("setup.py", "Python config"), + ("other.py", "Python source"), + ], + ) + + +@pytest.mark.skipif( + sys.platform == "win32", reason="Windows paths aren't converted in Docker context" +) +def test_install_app_requirements_in_docker(create_command, first_app_config, tmp_path): + """If Docker is in use, a docker context is used to invoke pip.""" + + # Install requirements + create_command.install_app_requirements(first_app_config, test_mode=False) + + # pip was invoked inside docker. + create_command.tools.subprocess.run.assert_called_once_with( + [ + "docker", + "run", + "--rm", + "--volume", + f"{tmp_path / 'base_path' / 'build' / 'first-app' / 'tester' / 'dummy'}:/app:z", + "--volume", + f"{tmp_path / 'briefcase'}:/home/brutus/.cache/briefcase:z", + "briefcase/com.example.first-app:py3.X", + "python3.X", + "-u", + "-m", + "pip", + "install", + "--upgrade", + "--no-user", + "--target=/app/path/to/app_packages", + "foo==1.2.3", + "bar>=4.5", + ], + check=True, + ) + + # The local requirements path exists, but is empty + local_requirements_path = create_command.local_requirements_path(first_app_config) + assert local_requirements_path.exists() + assert len(list(local_requirements_path.iterdir())) == 0 + + +@pytest.mark.skipif( + sys.platform == "win32", reason="Windows paths aren't converted in Docker context" +) +def test_install_app_requirements_no_docker( + no_docker_create_command, + first_app_config, + tmp_path, +): + """If docker is *not* in use, calls are made on raw subprocess.""" + # Verify the tools; this should operate in the non-docker context + no_docker_create_command.verify_tools() + no_docker_create_command.verify_app_tools(first_app_config) + + # Install requirements + no_docker_create_command.install_app_requirements(first_app_config, test_mode=False) + + # Docker is not verified. + assert not hasattr(no_docker_create_command.tools, "docker") + + # Subprocess is used for app_context + assert isinstance( + no_docker_create_command.tools[first_app_config].app_context, Subprocess + ) + assert ( + no_docker_create_command.tools[first_app_config].app_context + is no_docker_create_command.tools.subprocess + ) + + # pip was invoked natively + no_docker_create_command.tools[ + first_app_config + ].app_context.run.assert_called_once_with( + [ + sys.executable, + "-u", + "-m", + "pip", + "install", + "--upgrade", + "--no-user", + f"--target={tmp_path}/base_path/build/first-app/tester/dummy/path/to/app_packages", + "foo==1.2.3", + "bar>=4.5", + ], + check=True, + ) + + # The local requirements path exists, but is empty + local_requirements_path = no_docker_create_command.local_requirements_path( + first_app_config + ) + assert local_requirements_path.exists() + assert len(list(local_requirements_path.iterdir())) == 0 + + +@pytest.mark.skipif( + sys.platform == "win32", reason="Windows paths aren't converted in Docker context" +) +def test_install_app_requirements_with_locals( + create_command, + first_app_config, + tmp_path, + first_package, # A local folder to be built + second_package, # A pre-built sdist + third_package, # A pre-built wheel + other_package, # A stale local requirement +): + """If the app has local requirements, they are compiled into sdists for + installation.""" + # Add local requirements + first_app_config.requires.extend([first_package, second_package, third_package]) + + # Mock the side effect of building an sdist + def build_sdist(*args, **kwargs): + # Extract the folder name; assume that's the name of the package + name = Path(args[0][-1]).name + create_tgz_file( + create_command.local_requirements_path(first_app_config) + / f"{name}-1.2.3.tar.gz", + content=[ + ("setup.py", "Python config"), + ("local.py", "Python source"), + ], + ) + + create_command.tools.subprocess.check_output.side_effect = build_sdist + + # Install requirements + create_command.install_app_requirements(first_app_config, test_mode=False) + + # An sdist was built for the local package + create_command.tools.subprocess.check_output.assert_called_once_with( + [ + sys.executable, + "-m", + "build", + "--sdist", + "--outdir", + tmp_path + / "base_path" + / "build" + / "first-app" + / "tester" + / "dummy" + / "_requirements", + str(tmp_path / "local" / "first"), + ] + ) + + # An attempt was made to copy the prebuilt packages + create_command.tools.shutil.copy.mock_calls = [ + call( + str(tmp_path / "local" / "second-2.3.4.tar.gz"), + tmp_path + / "base_path" + / "build" + / "first-app" + / "tester" + / "dummy" + / "_requirements", + ), + call( + str(tmp_path / "local" / "third-3.4.5-py3-none-any.whl"), + tmp_path + / "base_path" + / "build" + / "first-app" + / "tester" + / "dummy" + / "_requirements", + ), + ] + + # pip was invoked inside docker. + create_command.tools.subprocess.run.assert_called_once_with( + [ + "docker", + "run", + "--rm", + "--volume", + f"{tmp_path / 'base_path' / 'build' / 'first-app' / 'tester' / 'dummy'}:/app:z", + "--volume", + f"{tmp_path / 'briefcase'}:/home/brutus/.cache/briefcase:z", + "briefcase/com.example.first-app:py3.X", + "python3.X", + "-u", + "-m", + "pip", + "install", + "--upgrade", + "--no-user", + "--target=/app/path/to/app_packages", + "foo==1.2.3", + "bar>=4.5", + "/app/_requirements/first-1.2.3.tar.gz", + "/app/_requirements/second-2.3.4.tar.gz", + "/app/_requirements/third-3.4.5-py3-none-any.whl", + ], + check=True, + ) + + # The local requirements path exists, and contains the compiled sdist, the + # pre-existing sdist, and the pre-existing wheel; the old requirement has + # been purged. + local_requirements_path = create_command.local_requirements_path(first_app_config) + assert local_requirements_path.exists() + assert [f.name for f in sorted(local_requirements_path.iterdir())] == [ + "first-1.2.3.tar.gz", + "second-2.3.4.tar.gz", + "third-3.4.5-py3-none-any.whl", + ] + + +@pytest.mark.skipif( + sys.platform == "win32", reason="Windows paths aren't converted in Docker context" +) +def test_install_app_requirements_with_bad_local( + create_command, + first_app_config, + tmp_path, + first_package, # A local folder to be built + other_package, # A stale local requirement +): + """If the app has local requirement that can't be built, an error is raised.""" + # Add a local requirement + first_app_config.requires.append(first_package) + + # Mock the building an sdist raising an error + create_command.tools.subprocess.check_output.side_effect = ( + subprocess.CalledProcessError( + cmd=["python", "-m", "build", "..."], returncode=1 + ) + ) + + # Install requirements + with pytest.raises( + BriefcaseCommandError, + match=r"Unable to build sdist for .*/local/first", + ): + create_command.install_app_requirements(first_app_config, test_mode=False) + + # An attempt to build the sdist was made + create_command.tools.subprocess.check_output.assert_called_once_with( + [ + sys.executable, + "-m", + "build", + "--sdist", + "--outdir", + tmp_path + / "base_path" + / "build" + / "first-app" + / "tester" + / "dummy" + / "_requirements", + str(tmp_path / "local" / "first"), + ] + ) + + # pip was *not* invoked inside docker. + create_command.tools.subprocess.run.assert_not_called() + + # The local requirements path exists, and is empty. It has been purged, but not refilled. + local_requirements_path = create_command.local_requirements_path(first_app_config) + assert local_requirements_path.exists() + assert len(list(local_requirements_path.iterdir())) == 0 + + +@pytest.mark.skipif( + sys.platform == "win32", reason="Windows paths aren't converted in Docker context" +) +def test_install_app_requirements_with_missing_local_build( + create_command, + first_app_config, + tmp_path, +): + """If the app references a requirement that needs to be built, but is missing, an + error is raised.""" + # Define a local requirement, but don't create the files it points at + first_app_config.requires.append(str(tmp_path / "local" / "first")) + + # Install requirements + with pytest.raises( + BriefcaseCommandError, + match=r"Unable to find local requirement .*/local/first", + ): + create_command.install_app_requirements(first_app_config, test_mode=False) + + # No attempt to build the sdist was made + create_command.tools.subprocess.check_output.assert_not_called() + + # pip was *not* invoked inside docker. + create_command.tools.subprocess.run.assert_not_called() + + # The local requirements path exists, and is empty. It has been purged, but not refilled. + local_requirements_path = create_command.local_requirements_path(first_app_config) + assert local_requirements_path.exists() + assert len(list(local_requirements_path.iterdir())) == 0 + + +@pytest.mark.skipif( + sys.platform == "win32", reason="Windows paths aren't converted in Docker context" +) +def test_install_app_requirements_with_bad_local_file( + create_command, + first_app_config, + tmp_path, +): + """If the app references a local requirement file that doesn't exist, an error is + raised.""" + # Add a local requirement that doesn't exist + first_app_config.requires.append(str(tmp_path / "local" / "missing-2.3.4.tar.gz")) + + # Install requirements + with pytest.raises( + BriefcaseCommandError, + match=r"Unable to find local requirement .*/local/missing-2.3.4.tar.gz", + ): + create_command.install_app_requirements(first_app_config, test_mode=False) + + # An attempt was made to copy the package + create_command.tools.shutil.copy.assert_called_once_with( + str(tmp_path / "local" / "missing-2.3.4.tar.gz"), + tmp_path + / "base_path" + / "build" + / "first-app" + / "tester" + / "dummy" + / "_requirements", + ) + + # No attempt was made to build the sdist + create_command.tools.subprocess.check_output.assert_not_called() + + # pip was *not* invoked inside docker. + create_command.tools.subprocess.run.assert_not_called() + + # The local requirements path exists, and is empty. It has been purged, but not refilled. + local_requirements_path = create_command.local_requirements_path(first_app_config) + assert local_requirements_path.exists() + assert len(list(local_requirements_path.iterdir())) == 0 diff --git a/tests/platforms/linux/test_parse_freedesktop_os_release.py b/tests/platforms/linux/test_parse_freedesktop_os_release.py new file mode 100644 index 000000000..c346188d6 --- /dev/null +++ b/tests/platforms/linux/test_parse_freedesktop_os_release.py @@ -0,0 +1,37 @@ +import pytest + +from briefcase.exceptions import ParseError +from briefcase.platforms.linux import parse_freedesktop_os_release + + +def test_parse(): + "Content can be parsed from a freedesktop file." + content = """ +KEY1=value +KEY2="quoted value" +KEY3='another quoted value' + +# Commented line +KEY4=42 +""" + assert parse_freedesktop_os_release(content) == { + "KEY1": "value", + "KEY2": "quoted value", + "KEY3": "another quoted value", + "KEY4": "42", + } + + +@pytest.mark.parametrize( + "content, error", + [ + ("KEY=value\nnot valid content", r"Line 2: 'not valid content'"), + ("KEY=value\nBAD='unbalanced quote", r"string literal"), + ], +) +def test_parse_error(content, error): + with pytest.raises( + ParseError, + match=error, + ): + parse_freedesktop_os_release(content) diff --git a/tests/platforms/macOS/app/conftest.py b/tests/platforms/macOS/app/conftest.py index 28c77c42d..79c79aaca 100644 --- a/tests/platforms/macOS/app/conftest.py +++ b/tests/platforms/macOS/app/conftest.py @@ -103,7 +103,7 @@ def first_app_with_binaries(first_app_config, tmp_path): with (lib_path / "unknown.binary").open("wb") as f: f.write(b"\xCA\xFE\xBA\xBEother") - # Set a default packaging format + # Select dmg packaging by default first_app_config.packaging_format = "dmg" return first_app_config diff --git a/tests/platforms/macOS/app/test_package.py b/tests/platforms/macOS/app/test_package.py index 0127a20a5..70fe8d7b4 100644 --- a/tests/platforms/macOS/app/test_package.py +++ b/tests/platforms/macOS/app/test_package.py @@ -401,6 +401,9 @@ def test_package_app_adhoc_sign_default_notarization( def test_package_bare_app(package_command, first_app_with_binaries, tmp_path): """A macOS App can be packaged without building dmg.""" + # Select app packaging + first_app_with_binaries.packaging_format = "app" + # Select a code signing identity package_command.select_identity.return_value = ( "CAFEBEEF", @@ -469,6 +472,9 @@ def test_package_bare_app(package_command, first_app_with_binaries, tmp_path): def test_package_bare_app_no_sign(package_command, first_app_with_binaries): """A macOS App can be packaged without building dmg, and without signing.""" + # Select app packaging + first_app_with_binaries.packaging_format = "app" + # Select a code signing identity package_command.select_identity.return_value = ( "CAFEBEEF", @@ -500,6 +506,9 @@ def test_package_bare_app_no_sign(package_command, first_app_with_binaries): def test_package_bare_app_no_notarization(package_command, first_app_with_binaries): """A macOS App can be packaged without building dmg, and without notarization.""" + # Select app packaging + first_app_with_binaries.packaging_format = "app" + # Select a code signing identity package_command.select_identity.return_value = ( "CAFEBEEF", @@ -584,6 +593,7 @@ def test_dmg_with_missing_installer_icon( warning.""" # Specify an installer icon, but don't create the matching file. first_app_with_binaries.installer_icon = "pretty" + first_app_with_binaries.packaging_format = "dmg" # Package the app without signing or notarization package_command.package_app( @@ -782,6 +792,7 @@ def test_dmg_with_missing_installer_background( warning.""" # Specify an installer background, but don't create the matching file. first_app_with_binaries.installer_background = "pretty_background" + first_app_with_binaries.packaging_format = "dmg" # Package the app without signing or notarization package_command.package_app( diff --git a/tests/test_cmdline.py b/tests/test_cmdline.py index 90a674f33..da9a263a8 100644 --- a/tests/test_cmdline.py +++ b/tests/test_cmdline.py @@ -12,7 +12,7 @@ NoCommandError, UnsupportedCommandError, ) -from briefcase.platforms.linux.appimage import LinuxAppImageCreateCommand +from briefcase.platforms.linux.system import LinuxSystemCreateCommand from briefcase.platforms.macOS.app import ( macOSAppCreateCommand, macOSAppPublishCommand, @@ -238,13 +238,13 @@ def test_bare_command(monkeypatch, logger, console): @pytest.mark.skipif(sys.platform != "linux", reason="requires Linux") def test_linux_default(logger, console): - """``briefcase create`` returns the linux create appimage command on Linux.""" + """``briefcase create`` returns the linux create system command on Linux.""" cmd, options = do_cmdline_parse("create".split(), logger, console) - assert isinstance(cmd, LinuxAppImageCreateCommand) + assert isinstance(cmd, LinuxSystemCreateCommand) assert cmd.platform == "linux" - assert cmd.output_format == "appimage" + assert cmd.output_format == "system" assert cmd.input.enabled assert cmd.logger.verbosity == 1 assert cmd.logger is logger @@ -254,7 +254,7 @@ def test_linux_default(logger, console): @pytest.mark.skipif(sys.platform != "darwin", reason="requires macOS") def test_macOS_default(logger, console): - """``briefcase create`` returns the linux create appimage command on Linux.""" + """``briefcase create`` returns the macOS create command on Linux.""" cmd, options = do_cmdline_parse("create".split(), logger, console) @@ -337,9 +337,9 @@ def test_command_explicit_platform(monkeypatch, logger, console): cmd, options = do_cmdline_parse("create linux".split(), logger, console) - assert isinstance(cmd, LinuxAppImageCreateCommand) + assert isinstance(cmd, LinuxSystemCreateCommand) assert cmd.platform == "linux" - assert cmd.output_format == "appimage" + assert cmd.output_format == "system" assert cmd.input.enabled assert cmd.logger.verbosity == 1 assert cmd.logger is logger