diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000..b442c82b --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,3 @@ +# AGENTS.md + +This repository doesn't contain any agent specific instructions other than its README.md and its linked resources. diff --git a/README.md b/README.md index 94d6b9e6..0db76faf 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,35 @@ # CPPython -A Python management solution for C++ dependencies + +A transparent Python management solution for C++ dependencies and building. + +[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE.md) +[![PyPI version](https://img.shields.io/pypi/v/cppython.svg)](https://pypi.org/project/cppython/) + +## Goals + +1. **CLI** — Provide imperative commands (`build`, `test`, `bench`, `run`, `install`) for managing C++ projects within a Python ecosystem. +2. **Plugin Architecture** — Support pluggable generators (CMake, Meson) and providers (Conan, vcpkg) so users can mix and match toolchains. +3. **PEP 517 Build Backend** — Act as a transparent build backend that delegates to scikit-build-core or meson-python after ensuring C++ dependencies are in place. +4. **Package Manager Integration** — Integrate with Python package managers so that ` install` seamlessly handles C++ dependency installation alongside Python dependencies. + +## Features + +## Setup + +See [Setup](https://synodic.github.io/cppython/setup) for setup instructions. + +## Development + +We use [pdm](https://pdm-project.org/en/latest/) as our build system and package manager. Scripts for development tasks are defined in `pyproject.toml` under the `[tool.pdm.scripts]` section. + +See [Development](https://synodic.github.io/cppython/development) for additional build, test, and installation instructions. + +For contribution guidelines, see [CONTRIBUTING.md](https://github.com/synodic/.github/blob/stable/CONTRIBUTING.md). + +## Documentation + +## License + +This project is licensed under the MIT License — see [LICENSE.md](LICENSE.md) for details. + +Copyright © 2026 Synodic Software diff --git a/cppython/build/__init__.py b/cppython/build/__init__.py index 72daf34a..05dbe90f 100644 --- a/cppython/build/__init__.py +++ b/cppython/build/__init__.py @@ -1,8 +1,9 @@ -"""CPPython build backend wrapping scikit-build-core. +"""CPPython build backend wrapping scikit-build-core and meson-python. -This module provides PEP 517/518 build backend hooks that wrap scikit-build-core, -automatically running CPPython's provider workflow before building -to inject the generated toolchain file into the CMake configuration. +This module provides PEP 517/518 build backend hooks that wrap scikit-build-core +or meson-python depending on the active generator, automatically running +CPPython's provider workflow before building to inject the generated +toolchain or native/cross files into the build configuration. Usage in pyproject.toml: [build-system] diff --git a/cppython/build/backend.py b/cppython/build/backend.py index 0f47a036..445c2342 100644 --- a/cppython/build/backend.py +++ b/cppython/build/backend.py @@ -1,21 +1,65 @@ -"""PEP 517 build backend implementation wrapping scikit-build-core. +"""PEP 517 build backend implementation wrapping scikit-build-core and meson-python. -This module provides the actual build hooks that delegate to scikit-build-core +This module provides the actual build hooks that delegate to the appropriate +underlying build backend (scikit-build-core for CMake, meson-python for Meson) after running CPPython's preparation workflow. """ import logging +import tomllib from pathlib import Path +from types import ModuleType from typing import Any +import mesonpy from scikit_build_core import build as skbuild -from cppython.build.prepare import prepare_build +from cppython.build.prepare import BuildPreparationResult, prepare_build +from cppython.plugins.cmake.schema import CMakeSyncData +from cppython.plugins.meson.schema import MesonSyncData logger = logging.getLogger('cppython.build') -def _inject_toolchain(config_settings: dict[str, Any] | None, toolchain_file: Path | None) -> dict[str, Any]: +def _is_meson_project() -> bool: + """Detect if the current project uses Meson by checking pyproject.toml. + + Looks for ``[tool.cppython.generator]`` containing "meson" or the + presence of a ``meson.build`` file in the source directory. + + Returns: + True if the project appears to be Meson-based + """ + source_dir = Path.cwd() + + # Check pyproject.toml for cppython generator configuration + pyproject_path = source_dir / 'pyproject.toml' + if pyproject_path.exists(): + with open(pyproject_path, 'rb') as f: + data = tomllib.load(f) + generator = data.get('tool', {}).get('cppython', {}).get('generator', '') + if isinstance(generator, str) and 'meson' in generator.lower(): + return True + + # Fallback: check for meson.build file + return (source_dir / 'meson.build').exists() + + +def _get_backend(is_meson: bool) -> ModuleType: + """Get the appropriate backend module. + + Args: + is_meson: Whether to use meson-python instead of scikit-build-core + + Returns: + The backend module (mesonpy or scikit_build_core.build) + """ + if is_meson: + return mesonpy + return skbuild + + +def _inject_cmake_toolchain(config_settings: dict[str, Any] | None, toolchain_file: Path | None) -> dict[str, Any]: """Inject the toolchain file into config settings for scikit-build-core. Args: @@ -49,70 +93,110 @@ def _inject_toolchain(config_settings: dict[str, Any] | None, toolchain_file: Pa return settings -def _prepare_and_get_settings( +def _inject_meson_files( config_settings: dict[str, Any] | None, + native_file: Path | None, + cross_file: Path | None, ) -> dict[str, Any]: - """Run CPPython preparation and merge toolchain into config settings. + """Inject native/cross files into config settings for meson-python. + + Args: + config_settings: The original config settings (may be None) + native_file: Path to the Meson native file to inject + cross_file: Path to the Meson cross file to inject + + Returns: + Updated config settings with Meson files injected + """ + settings = dict(config_settings) if config_settings else {} + + setup_args_key = 'setup-args' + existing_args = settings.get(setup_args_key, '') + + args_to_add: list[str] = [] + + if native_file and native_file.exists(): + native_arg = f'--native-file={native_file.absolute()}' + if '--native-file' not in existing_args: + args_to_add.append(native_arg) + logger.info('CPPython: Injected --native-file=%s', native_file) + else: + logger.info('CPPython: User-specified native file takes precedence') + + if cross_file and cross_file.exists(): + cross_arg = f'--cross-file={cross_file.absolute()}' + if '--cross-file' not in existing_args: + args_to_add.append(cross_arg) + logger.info('CPPython: Injected --cross-file=%s', cross_file) + else: + logger.info('CPPython: User-specified cross file takes precedence') + + if args_to_add: + if existing_args: + settings[setup_args_key] = f'{existing_args};' + ';'.join(args_to_add) + else: + settings[setup_args_key] = ';'.join(args_to_add) + + return settings + + +def _prepare_and_get_result( + config_settings: dict[str, Any] | None, +) -> tuple[BuildPreparationResult, dict[str, Any]]: + """Run CPPython preparation and merge config into settings. Args: config_settings: The original config settings Returns: - Config settings with CPPython toolchain injected + Tuple of (preparation result, updated config settings) """ # Determine source directory (current working directory during build) source_dir = Path.cwd() # Run CPPython preparation - toolchain_file = prepare_build(source_dir) + result = prepare_build(source_dir) - # Inject toolchain into config settings - return _inject_toolchain(config_settings, toolchain_file) + # Inject settings based on sync data type + settings = dict(config_settings) if config_settings else {} + if result.sync_data is not None: + if isinstance(result.sync_data, CMakeSyncData): + settings = _inject_cmake_toolchain(settings, result.sync_data.toolchain_file) + elif isinstance(result.sync_data, MesonSyncData): + settings = _inject_meson_files(settings, result.sync_data.native_file, result.sync_data.cross_file) -# PEP 517 Hooks - delegating to scikit-build-core after preparation + return result, settings -def get_requires_for_build_wheel( - config_settings: dict[str, Any] | None = None, -) -> list[str]: - """Get additional requirements for building a wheel. +def _is_meson_build(result: BuildPreparationResult) -> bool: + """Determine if the build should use meson-python based on sync data. Args: - config_settings: Build configuration settings + result: The build preparation result Returns: - List of additional requirements + True if meson-python should be used, False for scikit-build-core """ - return skbuild.get_requires_for_build_wheel(config_settings) + return isinstance(result.sync_data, MesonSyncData) -def get_requires_for_build_sdist( - config_settings: dict[str, Any] | None = None, -) -> list[str]: - """Get additional requirements for building an sdist. +# PEP 517 Hooks - dispatching to the appropriate backend after preparation - Args: - config_settings: Build configuration settings - Returns: - List of additional requirements - """ - return skbuild.get_requires_for_build_sdist(config_settings) +def get_requires_for_build_wheel(config_settings: dict[str, Any] | None = None) -> list[str]: + """Get additional requirements for building a wheel.""" + return _get_backend(_is_meson_project()).get_requires_for_build_wheel(config_settings) -def get_requires_for_build_editable( - config_settings: dict[str, Any] | None = None, -) -> list[str]: - """Get additional requirements for building an editable install. +def get_requires_for_build_sdist(config_settings: dict[str, Any] | None = None) -> list[str]: + """Get additional requirements for building an sdist.""" + return _get_backend(_is_meson_project()).get_requires_for_build_sdist(config_settings) - Args: - config_settings: Build configuration settings - Returns: - List of additional requirements - """ - return skbuild.get_requires_for_build_editable(config_settings) +def get_requires_for_build_editable(config_settings: dict[str, Any] | None = None) -> list[str]: + """Get additional requirements for building an editable install.""" + return _get_backend(_is_meson_project()).get_requires_for_build_editable(config_settings) def build_wheel( @@ -120,49 +204,19 @@ def build_wheel( config_settings: dict[str, Any] | None = None, metadata_directory: str | None = None, ) -> str: - """Build a wheel from the source distribution. - - This runs CPPython's provider workflow first to ensure C++ dependencies - are installed and the toolchain file is generated, then delegates to - scikit-build-core for the actual wheel build. - - Args: - wheel_directory: Directory to place the built wheel - config_settings: Build configuration settings - metadata_directory: Directory containing wheel metadata - - Returns: - The basename of the built wheel - """ + """Build a wheel, running CPPython preparation first.""" logger.info('CPPython: Starting wheel build') - - # Prepare CPPython and get updated settings - settings = _prepare_and_get_settings(config_settings) - - # Delegate to scikit-build-core - return skbuild.build_wheel(wheel_directory, settings, metadata_directory) + result, settings = _prepare_and_get_result(config_settings) + return _get_backend(_is_meson_build(result)).build_wheel(wheel_directory, settings, metadata_directory) def build_sdist( sdist_directory: str, config_settings: dict[str, Any] | None = None, ) -> str: - """Build a source distribution. - - For sdist, we don't run the full CPPython workflow since the C++ dependencies - should be resolved at wheel build time, not sdist creation time. - - Args: - sdist_directory: Directory to place the built sdist - config_settings: Build configuration settings - - Returns: - The basename of the built sdist - """ + """Build a source distribution (no CPPython workflow needed).""" logger.info('CPPython: Starting sdist build') - - # Delegate directly to scikit-build-core (no preparation needed for sdist) - return skbuild.build_sdist(sdist_directory, config_settings) + return _get_backend(_is_meson_project()).build_sdist(sdist_directory, config_settings) def build_editable( @@ -170,54 +224,23 @@ def build_editable( config_settings: dict[str, Any] | None = None, metadata_directory: str | None = None, ) -> str: - """Build an editable wheel. - - This runs CPPython's provider workflow first, similar to build_wheel. - - Args: - wheel_directory: Directory to place the built wheel - config_settings: Build configuration settings - metadata_directory: Directory containing wheel metadata - - Returns: - The basename of the built wheel - """ + """Build an editable wheel, running CPPython preparation first.""" logger.info('CPPython: Starting editable build') - - # Prepare CPPython and get updated settings - settings = _prepare_and_get_settings(config_settings) - - # Delegate to scikit-build-core - return skbuild.build_editable(wheel_directory, settings, metadata_directory) + result, settings = _prepare_and_get_result(config_settings) + return _get_backend(_is_meson_build(result)).build_editable(wheel_directory, settings, metadata_directory) def prepare_metadata_for_build_wheel( metadata_directory: str, config_settings: dict[str, Any] | None = None, ) -> str: - """Prepare metadata for wheel build. - - Args: - metadata_directory: Directory to place the metadata - config_settings: Build configuration settings - - Returns: - The basename of the metadata directory - """ - return skbuild.prepare_metadata_for_build_wheel(metadata_directory, config_settings) + """Prepare metadata for wheel build.""" + return _get_backend(_is_meson_project()).prepare_metadata_for_build_wheel(metadata_directory, config_settings) def prepare_metadata_for_build_editable( metadata_directory: str, config_settings: dict[str, Any] | None = None, ) -> str: - """Prepare metadata for editable build. - - Args: - metadata_directory: Directory to place the metadata - config_settings: Build configuration settings - - Returns: - The basename of the metadata directory - """ - return skbuild.prepare_metadata_for_build_editable(metadata_directory, config_settings) + """Prepare metadata for editable build.""" + return _get_backend(_is_meson_project()).prepare_metadata_for_build_editable(metadata_directory, config_settings) diff --git a/cppython/build/prepare.py b/cppython/build/prepare.py index 4084367d..b864ae9b 100644 --- a/cppython/build/prepare.py +++ b/cppython/build/prepare.py @@ -1,35 +1,40 @@ """Build preparation utilities for CPPython. This module handles the pre-build workflow: running CPPython's provider -to install C++ dependencies and extract the toolchain file path for -injection into scikit-build-core's CMake configuration. +to install C++ dependencies and extract sync data for injection into +the appropriate build backend (scikit-build-core or meson-python). """ import logging import tomllib +from dataclasses import dataclass from pathlib import Path from typing import Any -from cppython.core.schema import Interface, ProjectConfiguration -from cppython.plugins.cmake.schema import CMakeSyncData +from cppython.core.interface import NoOpInterface +from cppython.core.schema import ProjectConfiguration, SyncData from cppython.project import Project +from cppython.utility.exception import InstallationVerificationError -class BuildInterface(Interface): - """Minimal interface implementation for build backend usage.""" +@dataclass +class BuildPreparationResult: + """Result of the build preparation step. - def write_pyproject(self) -> None: - """No-op for build backend - we don't modify pyproject.toml during builds.""" + Contains the sync data from the provider, which the build backend + uses to determine which underlying backend to delegate to and what + configuration to inject. + """ + + sync_data: SyncData | None = None - def write_configuration(self) -> None: - """No-op for build backend - we don't modify configuration during builds.""" - def write_user_configuration(self) -> None: - """No-op for build backend - we don't modify user configuration during builds.""" +BuildInterface = NoOpInterface +"""Interface implementation for the build backend (no-op write-backs).""" class BuildPreparation: - """Handles CPPython preparation before scikit-build-core runs.""" + """Handles CPPython preparation before the build backend runs.""" def __init__(self, source_dir: Path) -> None: """Initialize build preparation. @@ -56,49 +61,23 @@ def _load_pyproject(self) -> dict[str, Any]: with open(pyproject_path, 'rb') as f: return tomllib.load(f) - def _get_toolchain_file(self, project: Project) -> Path | None: - """Extract the toolchain file path from the project's sync data. + def prepare(self) -> BuildPreparationResult: + """Run CPPython preparation and return the build preparation result. - Args: - project: The initialized CPPython project + Syncs provider config and verifies that C++ dependencies have been + installed by a prior ``install()`` call. Does **not** install + dependencies itself — the build backend is not responsible for that. Returns: - Path to the toolchain file, or None if not available - """ - if not project.enabled: - return None - - # Access the internal data to get sync information - # The toolchain file is generated during the sync process - data = project._data # noqa: SLF001 - - # Get sync data from provider for the generator - sync_data = data.plugins.provider.sync_data(data.plugins.generator) + BuildPreparationResult containing sync data for the active generator - if isinstance(sync_data, CMakeSyncData): - return sync_data.toolchain_file - - return None - - def prepare(self) -> Path | None: - """Run CPPython preparation and return the toolchain file path. - - This runs the provider workflow (download tools, sync, install) - and extracts the generated toolchain file path. - - Returns: - Path to the generated toolchain file, or None if CPPython is not configured + Raises: + InstallationVerificationError: If provider artifacts are missing """ self.logger.info('CPPython: Preparing build environment') pyproject_data = self._load_pyproject() - # Check if CPPython is configured - tool_data = pyproject_data.get('tool', {}) - if 'cppython' not in tool_data: - self.logger.info('CPPython: No [tool.cppython] configuration found, skipping preparation') - return None - # Get version from pyproject if available project_data = pyproject_data.get('project', {}) version = project_data.get('version') @@ -116,31 +95,35 @@ def prepare(self) -> Path | None: if not project.enabled: self.logger.info('CPPython: Project not enabled, skipping preparation') - return None + return BuildPreparationResult() - # Run the install workflow to ensure dependencies are ready - self.logger.info('CPPython: Installing C++ dependencies') - project.install() + # Sync and verify — does NOT install dependencies + self.logger.info('CPPython: Verifying C++ dependencies are installed') - # Extract the toolchain file path - toolchain_file = self._get_toolchain_file(project) + try: + sync_data = project.prepare_build() + except InstallationVerificationError: + self.logger.error( + "CPPython: C++ dependencies not installed. Run 'cppython install' or 'pdm install' before building." + ) + raise - if toolchain_file: - self.logger.info('CPPython: Using toolchain file: %s', toolchain_file) + if sync_data: + self.logger.info('CPPython: Sync data obtained from provider: %s', type(sync_data).__name__) else: - self.logger.warning('CPPython: No toolchain file generated') + self.logger.warning('CPPython: No sync data generated') - return toolchain_file + return BuildPreparationResult(sync_data=sync_data) -def prepare_build(source_dir: Path) -> Path | None: +def prepare_build(source_dir: Path) -> BuildPreparationResult: """Convenience function to prepare the build environment. Args: source_dir: The source directory containing pyproject.toml Returns: - Path to the generated toolchain file, or None if not available + BuildPreparationResult containing sync data for the active generator """ preparation = BuildPreparation(source_dir) return preparation.prepare() diff --git a/cppython/builder.py b/cppython/builder.py index 6a4fb209..43a94174 100644 --- a/cppython/builder.py +++ b/cppython/builder.py @@ -36,6 +36,7 @@ DataPlugin, PEP621Configuration, PEP621Data, + Plugin, ProjectConfiguration, ProjectData, ) @@ -65,21 +66,21 @@ def generate_plugins( Returns: The resolved plugin data """ - raw_generator_plugins = self.find_generators() + raw_generator_plugins = self._find_plugins('generator', Generator) generator_plugins = self.filter_plugins( raw_generator_plugins, - self._get_effective_generator_name(cppython_local_configuration), + self._get_effective_plugin_name(cppython_local_configuration.generators), 'Generator', ) - raw_provider_plugins = self.find_providers() + raw_provider_plugins = self._find_plugins('provider', Provider) provider_plugins = self.filter_plugins( raw_provider_plugins, - self._get_effective_provider_name(cppython_local_configuration), + self._get_effective_plugin_name(cppython_local_configuration.providers), 'Provider', ) - scm_plugins = self.find_source_managers() + scm_plugins = self._find_plugins('scm', SCM) scm_type = self.select_scm(scm_plugins, project_data) @@ -88,72 +89,34 @@ def generate_plugins( return PluginBuildData(generator_type=generator_type, provider_type=provider_type, scm_type=scm_type) - def _get_effective_generator_name(self, config: CPPythonLocalConfiguration) -> str | None: - """Get the effective generator name from configuration + @staticmethod + def _get_effective_plugin_name(plugins: dict[TypeName, Any]) -> str | None: + """Get the effective plugin name from a plugins configuration dict. Args: - config: The local configuration + plugins: The plugins dict (e.g. config.generators or config.providers) Returns: - The generator name to use, or None for auto-detection + The first plugin name if any are configured, or None for auto-detection """ - if config.generators: - # For now, pick the first generator (in future, could support selection logic) - return list(config.generators.keys())[0] - - # No generators specified, use auto-detection + if plugins: + return list(plugins.keys())[0] return None - def _get_effective_provider_name(self, config: CPPythonLocalConfiguration) -> str | None: - """Get the effective provider name from configuration - - Args: - config: The local configuration - - Returns: - The provider name to use, or None for auto-detection - """ - if config.providers: - # For now, pick the first provider (in future, could support selection logic) - return list(config.providers.keys())[0] - - # No providers specified, use auto-detection - return None - - def _get_effective_generator_config( - self, config: CPPythonLocalConfiguration, generator_name: str - ) -> dict[str, Any]: - """Get the effective generator configuration - - Args: - config: The local configuration - generator_name: The name of the generator being used - - Returns: - The configuration dict for the generator - """ - generator_type_name = TypeName(generator_name) - if config.generators and generator_type_name in config.generators: - return config.generators[generator_type_name] - - # Return empty config if not found - return {} - - def _get_effective_provider_config(self, config: CPPythonLocalConfiguration, provider_name: str) -> dict[str, Any]: - """Get the effective provider configuration + @staticmethod + def get_plugin_config(plugins: dict[TypeName, Any], plugin_name: str) -> dict[str, Any]: + """Get the configuration dict for a specific plugin. Args: - config: The local configuration - provider_name: The name of the provider being used + plugins: The plugins dict (e.g. config.generators or config.providers) + plugin_name: The name of the plugin Returns: - The configuration dict for the provider + The configuration dict for the plugin, or empty dict if not found """ - provider_type_name = TypeName(provider_name) - if config.providers and provider_type_name in config.providers: - return config.providers[provider_type_name] - - # Return empty config if not found + type_name = TypeName(plugin_name) + if type_name in plugins: + return plugins[type_name] return {} @staticmethod @@ -201,92 +164,32 @@ def resolve_global_config() -> CPPythonGlobalConfiguration: global_config_data = loader.load_global_config() if global_config_data: return resolve_model(CPPythonGlobalConfiguration, global_config_data) - except (FileNotFoundError, ValueError): + except FileNotFoundError, ValueError: # If global config doesn't exist or is invalid, use defaults pass return CPPythonGlobalConfiguration() - def find_generators(self) -> list[type[Generator]]: - """Extracts the generator plugins from the package's entry points - - Raises: - PluginError: Raised if no plugins can be found - - Returns: - The list of generator plugin types - """ - group_name = 'generator' - plugin_types: list[type[Generator]] = [] - - entries = entry_points(group=f'cppython.{group_name}') - - # Filter entries by type - for entry_point in list(entries): - loaded_type = entry_point.load() - if not issubclass(loaded_type, Generator): - self._logger.warning( - f"Found incompatible plugin. The '{loaded_type.name()}' plugin must be an instance of" - f" '{group_name}'" - ) - else: - self._logger.info(f'{group_name} plugin found: {loaded_type.name()} from {getmodule(loaded_type)}') - plugin_types.append(loaded_type) - - if not plugin_types: - raise PluginError(f'No {group_name} plugin was found') - - return plugin_types - - def find_providers(self) -> list[type[Provider]]: - """Extracts the provider plugins from the package's entry points - - Raises: - PluginError: Raised if no plugins can be found - - Returns: - The list of provider plugin types - """ - group_name = 'provider' - plugin_types: list[type[Provider]] = [] - - entries = entry_points(group=f'cppython.{group_name}') + def _find_plugins[T: Plugin](self, group_name: str, base_type: type[T]) -> list[type[T]]: + """Extracts plugins of a given type from entry points. - # Filter entries by type - for entry_point in list(entries): - loaded_type = entry_point.load() - if not issubclass(loaded_type, Provider): - self._logger.warning( - f"Found incompatible plugin. The '{loaded_type.name()}' plugin must be an instance of" - f" '{group_name}'" - ) - else: - self._logger.info(f'{group_name} plugin found: {loaded_type.name()} from {getmodule(loaded_type)}') - plugin_types.append(loaded_type) - - if not plugin_types: - raise PluginError(f'No {group_name} plugin was found') - - return plugin_types - - def find_source_managers(self) -> list[type[SCM]]: - """Extracts the source control manager plugins from the package's entry points + Args: + group_name: The entry point group suffix (e.g. 'generator', 'provider', 'scm') + base_type: The expected base type to filter against Raises: PluginError: Raised if no plugins can be found Returns: - The list of source control manager plugin types + The list of discovered plugin types """ - group_name = 'scm' - plugin_types: list[type[SCM]] = [] + plugin_types: list[type[T]] = [] entries = entry_points(group=f'cppython.{group_name}') - # Filter entries by type for entry_point in list(entries): loaded_type = entry_point.load() - if not issubclass(loaded_type, SCM): + if not issubclass(loaded_type, base_type): self._logger.warning( f"Found incompatible plugin. The '{loaded_type.name()}' plugin must be an instance of" f" '{group_name}'" @@ -554,15 +457,15 @@ def build( pep621_data = self._resolver.generate_pep621_data(pep621_configuration, self._project_configuration, scm) # Create the chosen plugins - generator_config = self._resolver._get_effective_generator_config( - cppython_local_configuration, plugin_build_data.generator_type.name() + generator_config = Resolver.get_plugin_config( + cppython_local_configuration.generators, plugin_build_data.generator_type.name() ) generator = self._resolver.create_generator( core_data, pep621_data, generator_config, plugin_build_data.generator_type ) - provider_config = self._resolver._get_effective_provider_config( - cppython_local_configuration, plugin_build_data.provider_type.name() + provider_config = Resolver.get_plugin_config( + cppython_local_configuration.providers, plugin_build_data.provider_type.name() ) provider = self._resolver.create_provider( core_data, pep621_data, provider_config, plugin_build_data.provider_type diff --git a/cppython/console/entry.py b/cppython/console/entry.py index 123aa4ec..f2ee5891 100644 --- a/cppython/console/entry.py +++ b/cppython/console/entry.py @@ -1,4 +1,4 @@ -"""A click CLI for CPPython interfacing""" +"""A Typer CLI for CPPython interfacing""" from pathlib import Path from typing import Annotated @@ -78,23 +78,25 @@ def _parse_groups_argument(groups: str | None) -> list[str] | None: def _find_pyproject_file() -> Path: - """Searches upward for a pyproject.toml file + """Searches upward for a pyproject.toml file. Returns: - The found directory + The directory containing pyproject.toml + + Raises: + AssertionError: If no pyproject.toml is found up to the filesystem root """ - # Search for a path upward path = Path.cwd() - while not path.glob('pyproject.toml'): - if path.is_absolute(): + while True: + if (path / 'pyproject.toml').exists(): + return path + parent = path.parent + if parent == path: raise AssertionError( 'This is not a valid project. No pyproject.toml found in the current directory or any of its parents.' ) - - path = Path(path) - - return path + path = parent @app.callback() @@ -204,3 +206,88 @@ def publish( """ project = get_enabled_project(context) project.publish() + + +@app.command() +def build( + context: typer.Context, + configuration: Annotated[ + str | None, + typer.Option(help='Named build configuration to use (e.g. CMake preset name, Meson build directory)'), + ] = None, +) -> None: + """Build the project + + Assumes dependencies have been installed via `install`. + + Args: + context: The CLI configuration object + configuration: Optional named configuration + """ + project = get_enabled_project(context) + project.build(configuration=configuration) + + +@app.command() +def test( + context: typer.Context, + configuration: Annotated[ + str | None, + typer.Option(help='Named build configuration to use (e.g. CMake preset name, Meson build directory)'), + ] = None, +) -> None: + """Run project tests + + Assumes dependencies have been installed via `install`. + + Args: + context: The CLI configuration object + configuration: Optional named configuration + """ + project = get_enabled_project(context) + project.test(configuration=configuration) + + +@app.command() +def bench( + context: typer.Context, + configuration: Annotated[ + str | None, + typer.Option(help='Named build configuration to use (e.g. CMake preset name, Meson build directory)'), + ] = None, +) -> None: + """Run project benchmarks + + Assumes dependencies have been installed via `install`. + + Args: + context: The CLI configuration object + configuration: Optional named configuration + """ + project = get_enabled_project(context) + project.bench(configuration=configuration) + + +@app.command() +def run( + context: typer.Context, + target: Annotated[ + str, + typer.Argument(help='The name of the build target/executable to run'), + ], + configuration: Annotated[ + str | None, + typer.Option(help='Named build configuration to use (e.g. CMake preset name, Meson build directory)'), + ] = None, +) -> None: + """Run a built executable + + Assumes dependencies have been installed via `install`. + + Args: + context: The CLI configuration object + target: The name of the build target to run + configuration: Optional named configuration + """ + project = get_enabled_project(context) + project.run(target, configuration=configuration) diff --git a/cppython/console/schema.py b/cppython/console/schema.py index 99aaf595..32590001 100644 --- a/cppython/console/schema.py +++ b/cppython/console/schema.py @@ -2,20 +2,11 @@ from pydantic import ConfigDict +from cppython.core.interface import NoOpInterface from cppython.core.schema import CPPythonModel, Interface, ProjectConfiguration - -class ConsoleInterface(Interface): - """Interface implementation to pass to the project""" - - def write_pyproject(self) -> None: - """Write output to pyproject.toml""" - - def write_configuration(self) -> None: - """Write output to primary configuration (pyproject.toml or cppython.toml)""" - - def write_user_configuration(self) -> None: - """Write output to global user configuration (~/.cppython/config.toml)""" +ConsoleInterface = NoOpInterface +"""Interface implementation for the console application (no-op write-backs).""" class ConsoleConfiguration(CPPythonModel): diff --git a/cppython/core/exception.py b/cppython/core/exception.py index a77bffc0..bc7d5216 100644 --- a/cppython/core/exception.py +++ b/cppython/core/exception.py @@ -1,28 +1,5 @@ """Custom exceptions used by CPPython""" -from pydantic import BaseModel - - -class ConfigError(BaseModel): - """Data for ConfigError""" - - message: str - class ConfigException(ValueError): - """Raised when there is a configuration error""" - - def __init__(self, message: str, errors: list[ConfigError]): - """Initializes the exception""" - super().__init__(message) - self._errors = errors - - @property - def error_count(self) -> int: - """The number of configuration errors associated with this exception""" - return len(self._errors) - - @property - def errors(self) -> list[ConfigError]: - """The list of configuration errors""" - return self._errors + """Raised when there is a configuration error.""" diff --git a/cppython/core/interface.py b/cppython/core/interface.py new file mode 100644 index 00000000..0cf612b4 --- /dev/null +++ b/cppython/core/interface.py @@ -0,0 +1,20 @@ +"""Default interface implementations.""" + +from cppython.core.schema import Interface + + +class NoOpInterface(Interface): + """No-op implementation of Interface. + + Used when no write-back to configuration files is needed, + e.g. in the build backend and console application contexts. + """ + + def write_pyproject(self) -> None: + """No-op.""" + + def write_configuration(self) -> None: + """No-op.""" + + def write_user_configuration(self) -> None: + """No-op.""" diff --git a/cppython/core/plugin_schema/generator.py b/cppython/core/plugin_schema/generator.py index 83bb373f..9e170bd4 100644 --- a/cppython/core/plugin_schema/generator.py +++ b/cppython/core/plugin_schema/generator.py @@ -69,3 +69,43 @@ def features(directory: DirectoryPath) -> SupportedFeatures: The supported features - `SupportedGeneratorFeatures`. Cast to this type to help us avoid generic typing """ raise NotImplementedError + + @abstractmethod + def build(self, configuration: str | None = None) -> None: + """Builds the project using the generator's build system. + + Executes the build step. The interpretation of ``configuration`` is + generator-specific (e.g. CMake preset name, Meson build directory). + + Args: + configuration: Optional named configuration override. + """ + raise NotImplementedError + + @abstractmethod + def test(self, configuration: str | None = None) -> None: + """Runs tests using the generator's build system. + + Args: + configuration: Optional named configuration override. + """ + raise NotImplementedError + + @abstractmethod + def bench(self, configuration: str | None = None) -> None: + """Runs benchmarks using the generator's build system. + + Args: + configuration: Optional named configuration override. + """ + raise NotImplementedError + + @abstractmethod + def run(self, target: str, configuration: str | None = None) -> None: + """Runs a built executable by target name. + + Args: + target: The name of the build target/executable to run. + configuration: Optional named configuration override. + """ + raise NotImplementedError diff --git a/cppython/core/plugin_schema/provider.py b/cppython/core/plugin_schema/provider.py index b4028660..740a00c6 100644 --- a/cppython/core/plugin_schema/provider.py +++ b/cppython/core/plugin_schema/provider.py @@ -79,6 +79,19 @@ def features(directory: DirectoryPath) -> SupportedFeatures: """ raise NotImplementedError + @abstractmethod + def verify_installed(self) -> None: + """Verify that provider artifacts exist on disk. + + This is called by the build backend to confirm that C++ dependencies + have been installed (via a prior ``install()`` call) before delegating + to the downstream build backend. + + Raises: + InstallationVerificationError: If required artifacts are missing + """ + raise NotImplementedError + @abstractmethod def install(self, groups: list[str] | None = None) -> None: """Called when dependencies need to be installed from a lock file. diff --git a/cppython/core/resolution.py b/cppython/core/resolution.py index 76431449..e2e4f67c 100644 --- a/cppython/core/resolution.py +++ b/cppython/core/resolution.py @@ -20,6 +20,7 @@ PEP621Configuration, PEP621Data, Plugin, + PluginGroupData, ProjectConfiguration, ProjectData, ) @@ -180,7 +181,7 @@ def resolve_cppython( dependency_groups[group_name] = resolved_group if invalid_requirements: - raise ConfigException('\n'.join(invalid_requirements), []) + raise ConfigException('\n'.join(invalid_requirements)) cppython_data = CPPythonData( configuration_path=modified_configuration_path, @@ -212,20 +213,7 @@ def resolve_cppython_plugin(cppython_data: CPPythonData, plugin_type: type[Plugi # Add plugin specific paths to the base path modified_install_path = cppython_data.install_path / plugin_type.name() - plugin_data = CPPythonData( - configuration_path=cppython_data.configuration_path, - install_path=modified_install_path, - tool_path=cppython_data.tool_path, - build_path=cppython_data.build_path, - current_check=cppython_data.current_check, - provider_name=cppython_data.provider_name, - generator_name=cppython_data.generator_name, - scm_name=cppython_data.scm_name, - dependencies=cppython_data.dependencies, - dependency_groups=cppython_data.dependency_groups, - provider_data=cppython_data.provider_data, - generator_data=cppython_data.generator_data, - ) + plugin_data = cppython_data.model_copy(update={'install_path': modified_install_path}) return cast(CPPythonPluginData, plugin_data) @@ -245,24 +233,43 @@ def _write_tool_directory(cppython_data: CPPythonData, directory: Path) -> Direc return plugin_directory -def resolve_generator(project_data: ProjectData, cppython_data: CPPythonPluginData) -> GeneratorPluginGroupData: - """Creates an instance from the given project +def _resolve_plugin_group[T: PluginGroupData]( + project_data: ProjectData, cppython_data: CPPythonPluginData, category: str, plugin_name: str, group_type: type[T] +) -> T: + """Generic helper to resolve plugin group data. Args: project_data: The input project data cppython_data: The input cppython data + category: The subfolder category (e.g. 'generators', 'providers', 'managers') + plugin_name: The name of the specific plugin + group_type: The PluginGroupData subclass to construct Returns: The plugin specific configuration """ root_directory = project_data.project_root - tool_directory = _write_tool_directory(cppython_data, Path('generators') / cppython_data.generator_name) - configuration = GeneratorPluginGroupData(root_directory=root_directory, tool_directory=tool_directory) - return configuration + tool_directory = _write_tool_directory(cppython_data, Path(category) / plugin_name) + return group_type(root_directory=root_directory, tool_directory=tool_directory) + + +def resolve_generator(project_data: ProjectData, cppython_data: CPPythonPluginData) -> GeneratorPluginGroupData: + """Creates generator plugin group data from the given project. + + Args: + project_data: The input project data + cppython_data: The input cppython data + + Returns: + The plugin specific configuration + """ + return _resolve_plugin_group( + project_data, cppython_data, 'generators', cppython_data.generator_name, GeneratorPluginGroupData + ) def resolve_provider(project_data: ProjectData, cppython_data: CPPythonPluginData) -> ProviderPluginGroupData: - """Creates an instance from the given project + """Creates provider plugin group data from the given project. Args: project_data: The input project data @@ -271,14 +278,13 @@ def resolve_provider(project_data: ProjectData, cppython_data: CPPythonPluginDat Returns: The plugin specific configuration """ - root_directory = project_data.project_root - tool_directory = _write_tool_directory(cppython_data, Path('providers') / cppython_data.provider_name) - configuration = ProviderPluginGroupData(root_directory=root_directory, tool_directory=tool_directory) - return configuration + return _resolve_plugin_group( + project_data, cppython_data, 'providers', cppython_data.provider_name, ProviderPluginGroupData + ) def resolve_scm(project_data: ProjectData, cppython_data: CPPythonPluginData) -> SCMPluginGroupData: - """Creates an instance from the given project + """Creates SCM plugin group data from the given project. Args: project_data: The input project data @@ -287,10 +293,7 @@ def resolve_scm(project_data: ProjectData, cppython_data: CPPythonPluginData) -> Returns: The plugin specific configuration """ - root_directory = project_data.project_root - tool_directory = _write_tool_directory(cppython_data, Path('managers') / cppython_data.scm_name) - configuration = SCMPluginGroupData(root_directory=root_directory, tool_directory=tool_directory) - return configuration + return _resolve_plugin_group(project_data, cppython_data, 'managers', cppython_data.scm_name, SCMPluginGroupData) def resolve_model[T: BaseModel](model: type[T], data: dict[str, Any]) -> T: @@ -321,4 +324,4 @@ def resolve_model[T: BaseModel](model: type[T], data: dict[str, Any]) -> T: else: formatted_errors = 'An unknown validation error occurred.' - raise ConfigException(f'The input project failed validation:\n{formatted_errors}', []) from e + raise ConfigException(f'The input project failed validation:\n{formatted_errors}') from e diff --git a/cppython/plugins/cmake/builder.py b/cppython/plugins/cmake/builder.py index ee76c4c2..b68d4e3d 100644 --- a/cppython/plugins/cmake/builder.py +++ b/cppython/plugins/cmake/builder.py @@ -8,6 +8,7 @@ CMakePresets, CMakeSyncData, ConfigurePreset, + TestPreset, ) @@ -100,22 +101,25 @@ def write_cppython_preset( @staticmethod def _create_presets( cmake_data: CMakeData, build_directory: Path - ) -> tuple[list[ConfigurePreset], list[BuildPreset]]: - """Create the default configure and build presets for the user. + ) -> tuple[list[ConfigurePreset], list[BuildPreset], list[TestPreset]]: + """Create the default configure, build, and test presets for the user. Args: cmake_data: The CMake data to use build_directory: The build directory to use Returns: - A tuple containing the configure preset and list of build presets + A tuple containing the configure presets, build presets, and test presets """ user_configure_presets: list[ConfigurePreset] = [] user_build_presets: list[BuildPreset] = [] + user_test_presets: list[TestPreset] = [] name = cmake_data.configuration_name release_name = name + '-release' debug_name = name + '-debug' + bench_release_name = name + '-bench-release' + bench_debug_name = name + '-bench-debug' user_configure_presets.append( ConfigurePreset( @@ -161,7 +165,43 @@ def _create_presets( ) ) - return user_configure_presets, user_build_presets + # Test presets + user_test_presets.append( + TestPreset( + name=release_name, + description='Run tests for release configuration', + configurePreset=release_name, + ) + ) + + user_test_presets.append( + TestPreset( + name=debug_name, + description='Run tests for debug configuration', + configurePreset=debug_name, + ) + ) + + # Benchmark test presets with label filter + user_test_presets.append( + TestPreset( + name=bench_release_name, + description='Run benchmark tests for release configuration', + configurePreset=release_name, + filter={'include': {'label': 'benchmark'}}, + ) + ) + + user_test_presets.append( + TestPreset( + name=bench_debug_name, + description='Run benchmark tests for debug configuration', + configurePreset=debug_name, + filter={'include': {'label': 'benchmark'}}, + ) + ) + + return user_configure_presets, user_build_presets, user_test_presets @staticmethod def _load_existing_preset(preset_file: Path) -> CMakePresets | None: @@ -200,11 +240,35 @@ def _update_configure_preset(existing_preset: ConfigurePreset, build_directory: if not existing_preset.binaryDir: existing_preset.binaryDir = '${sourceDir}/' + build_directory.as_posix() # type: ignore[misc] + @staticmethod + def _merge_presets[T: (ConfigurePreset, BuildPreset, TestPreset)]( + existing: list[T] | None, + new_presets: list[T], + ) -> list[T]: + """Merge new presets into an existing list, adding only those not already present. + + Args: + existing: The existing preset list (may be None) + new_presets: The new presets to merge in + + Returns: + The merged list of presets + """ + if existing is None: + return new_presets.copy() + + for preset in new_presets: + if not any(p.name == preset.name for p in existing): + existing.append(preset) + + return existing + @staticmethod def _modify_presets( root_preset: CMakePresets, user_configure_presets: list[ConfigurePreset], user_build_presets: list[BuildPreset], + user_test_presets: list[TestPreset], build_directory: Path, ) -> None: """Handle presets in the root preset. @@ -213,6 +277,7 @@ def _modify_presets( root_preset: The root preset to modify user_configure_presets: The user's configure presets user_build_presets: The user's build presets + user_test_presets: The user's test presets build_directory: The build directory to use """ if root_preset.configurePresets is None: @@ -228,14 +293,8 @@ def _modify_presets( else: root_preset.configurePresets.append(user_configure_preset) - if root_preset.buildPresets is None: - root_preset.buildPresets = user_build_presets.copy() # type: ignore[misc] - else: - # Add build presets if they don't exist - for build_preset in user_build_presets: - existing = next((p for p in root_preset.buildPresets if p.name == build_preset.name), None) - if not existing: - root_preset.buildPresets.append(build_preset) + root_preset.buildPresets = Builder._merge_presets(root_preset.buildPresets, user_build_presets) # type: ignore[misc] + root_preset.testPresets = Builder._merge_presets(root_preset.testPresets, user_test_presets) # type: ignore[misc] @staticmethod def _modify_includes(root_preset: CMakePresets, preset_file: Path, cppython_preset_file: Path) -> None: @@ -273,7 +332,9 @@ def generate_root_preset( A CMakePresets object """ # Create user presets - user_configure_presets, user_build_presets = Builder._create_presets(cmake_data, build_directory) + user_configure_presets, user_build_presets, user_test_presets = Builder._create_presets( + cmake_data, build_directory + ) # Load existing preset or create new one root_preset = Builder._load_existing_preset(preset_file) @@ -281,9 +342,12 @@ def generate_root_preset( root_preset = CMakePresets( configurePresets=user_configure_presets, buildPresets=user_build_presets, + testPresets=user_test_presets, ) else: - Builder._modify_presets(root_preset, user_configure_presets, user_build_presets, build_directory) + Builder._modify_presets( + root_preset, user_configure_presets, user_build_presets, user_test_presets, build_directory + ) Builder._modify_includes(root_preset, preset_file, cppython_preset_file) diff --git a/cppython/plugins/cmake/plugin.py b/cppython/plugins/cmake/plugin.py index c86ca4e7..6c64af2e 100644 --- a/cppython/plugins/cmake/plugin.py +++ b/cppython/plugins/cmake/plugin.py @@ -1,5 +1,6 @@ """The CMake generator implementation""" +import subprocess from pathlib import Path from typing import Any @@ -77,3 +78,106 @@ def sync(self, sync_data: SyncData) -> None: ) case _: raise ValueError('Unsupported sync data type') + + def _cmake_command(self) -> str: + """Returns the cmake command to use. + + Returns: + The cmake binary path as a string + """ + if self.data.cmake_binary: + return str(self.data.cmake_binary) + return 'cmake' + + def _ctest_command(self) -> str: + """Returns the ctest command to use. + + Derives the ctest path from the cmake binary path when available. + + Returns: + The ctest binary path as a string + """ + if self.data.cmake_binary: + # ctest is typically in the same directory as cmake + ctest_path = self.data.cmake_binary.parent / 'ctest' + if ctest_path.exists(): + return str(ctest_path) + # Try with .exe on Windows + ctest_exe = self.data.cmake_binary.parent / 'ctest.exe' + if ctest_exe.exists(): + return str(ctest_exe) + return 'ctest' + + def _resolve_configuration(self, configuration: str | None) -> str: + """Resolves the effective CMake preset from CLI argument or default config. + + Args: + configuration: The configuration value passed from the CLI, or None + + Returns: + The resolved CMake preset name + + Raises: + ValueError: If no configuration is available from either CLI or default-configuration config + """ + effective = configuration or self.data.default_configuration + if effective is None: + raise ValueError( + 'CMake generator requires a configuration. ' + "Provide --configuration on the CLI or set 'default-configuration' in [tool.cppython.generators.cmake]." + ) + return effective + + def build(self, configuration: str | None = None) -> None: + """Builds the project using cmake --build with the resolved preset. + + Args: + configuration: Optional CMake preset name. Overrides default-configuration from config. + """ + preset = self._resolve_configuration(configuration) + cmd = [self._cmake_command(), '--build', '--preset', preset] + subprocess.run(cmd, check=True, cwd=self.data.preset_file.parent) + + def test(self, configuration: str | None = None) -> None: + """Runs tests using ctest with the resolved preset. + + Args: + configuration: Optional CMake preset name. Overrides default-configuration from config. + """ + preset = self._resolve_configuration(configuration) + cmd = [self._ctest_command(), '--preset', preset] + subprocess.run(cmd, check=True, cwd=self.data.preset_file.parent) + + def bench(self, configuration: str | None = None) -> None: + """Runs benchmarks using ctest with the resolved preset. + + Args: + configuration: Optional CMake preset name. Overrides default-configuration from config. + """ + preset = self._resolve_configuration(configuration) + cmd = [self._ctest_command(), '--preset', preset] + subprocess.run(cmd, check=True, cwd=self.data.preset_file.parent) + + def run(self, target: str, configuration: str | None = None) -> None: + """Runs a built executable by target name. + + Searches the build directory for the executable matching the target name. + + Args: + target: The name of the build target/executable to run + configuration: Optional CMake preset name. Overrides default-configuration from config. + + Raises: + FileNotFoundError: If the target executable cannot be found + """ + build_path = self.core_data.cppython_data.build_path + + # Search for the executable in the build directory + candidates = list(build_path.rglob(target)) + list(build_path.rglob(f'{target}.exe')) + executables = [c for c in candidates if c.is_file()] + + if not executables: + raise FileNotFoundError(f"Could not find executable '{target}' in build directory: {build_path}") + + executable = executables[0] + subprocess.run([str(executable)], check=True, cwd=self.data.preset_file.parent) diff --git a/cppython/plugins/cmake/resolution.py b/cppython/plugins/cmake/resolution.py index c55b3817..e5229c25 100644 --- a/cppython/plugins/cmake/resolution.py +++ b/cppython/plugins/cmake/resolution.py @@ -35,8 +35,7 @@ def _resolve_cmake_binary(configured_path: Path | None) -> Path | None: if env_path.exists(): return env_path logger.warning( - 'CMAKE_BINARY environment variable points to non-existent path: %s. ' - 'Falling back to PATH lookup.', + 'CMAKE_BINARY environment variable points to non-existent path: %s. Falling back to PATH lookup.', env_binary, ) @@ -45,8 +44,7 @@ def _resolve_cmake_binary(configured_path: Path | None) -> Path | None: if configured_path.exists(): return configured_path logger.warning( - 'Configured cmake_binary path does not exist: %s. ' - 'Falling back to PATH lookup.', + 'Configured cmake_binary path does not exist: %s. Falling back to PATH lookup.', configured_path, ) @@ -79,5 +77,8 @@ def resolve_cmake_data(data: dict[str, Any], core_data: CorePluginData) -> CMake cmake_binary = _resolve_cmake_binary(parsed_data.cmake_binary) return CMakeData( - preset_file=modified_preset_file, configuration_name=parsed_data.configuration_name, cmake_binary=cmake_binary + preset_file=modified_preset_file, + configuration_name=parsed_data.configuration_name, + cmake_binary=cmake_binary, + default_configuration=parsed_data.default_configuration, ) diff --git a/cppython/plugins/cmake/schema.py b/cppython/plugins/cmake/schema.py index 0c3d89bb..4f4adc62 100644 --- a/cppython/plugins/cmake/schema.py +++ b/cppython/plugins/cmake/schema.py @@ -89,6 +89,33 @@ class BuildPreset(CPPythonModel, extra='allow'): ] = None +class TestPreset(CPPythonModel, extra='allow'): + """Partial Test Preset specification for CMake test presets (ctest --preset)""" + + name: str + description: Annotated[str | None, Field(description='A human-readable description of the preset.')] = None + + hidden: Annotated[bool | None, Field(description='If true, the preset is hidden and cannot be used directly.')] = ( + None + ) + + inherits: Annotated[ + str | list[str] | None, Field(description='The inherits field allows inheriting from other presets.') + ] = None + configurePreset: Annotated[ + str | None, + Field(description='The name of a configure preset to associate with this test preset.'), + ] = None + configuration: Annotated[ + str | None, + Field(description='Build configuration. Equivalent to --config on the command line.'), + ] = None + filter: Annotated[ + dict | None, + Field(description='Filter for test selection, e.g. include/exclude by label or name.'), + ] = None + + class CMakePresets(CPPythonModel, extra='allow'): """The schema for the CMakePresets and CMakeUserPresets files.""" @@ -98,6 +125,7 @@ class CMakePresets(CPPythonModel, extra='allow'): ] = None configurePresets: Annotated[list[ConfigurePreset] | None, Field(description='The list of configure presets')] = None buildPresets: Annotated[list[BuildPreset] | None, Field(description='The list of build presets')] = None + testPresets: Annotated[list[TestPreset] | None, Field(description='The list of test presets')] = None class CMakeSyncData(SyncData): @@ -112,6 +140,7 @@ class CMakeData(CPPythonModel): preset_file: Path configuration_name: str cmake_binary: Path | None + default_configuration: str | None = None class CMakeConfiguration(CPPythonModel): @@ -139,3 +168,12 @@ class CMakeConfiguration(CPPythonModel): 'Can be overridden via CMAKE_BINARY environment variable.' ), ] = None + default_configuration: Annotated[ + str | None, + Field( + alias='default-configuration', + description='Default CMake preset name to use for build/test/bench commands. ' + 'When set, the --configuration CLI option is no longer required. ' + 'The CLI --configuration value takes precedence over this default.', + ), + ] = None diff --git a/cppython/plugins/conan/builder.py b/cppython/plugins/conan/builder.py index db69cba1..59654b50 100644 --- a/cppython/plugins/conan/builder.py +++ b/cppython/plugins/conan/builder.py @@ -83,10 +83,11 @@ def _create_conanfile( ) -> None: """Creates a conanfile.py file that inherits from CPPython base.""" class_name = name.replace('-', '_').title().replace('_', '') - content = f'''import os -from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain + content = f'''from conan.tools.cmake import CMake, CMakeConfigDeps, CMakeToolchain from conan.tools.files import copy +import os + from conanfile_base import CPPythonBase @@ -125,7 +126,7 @@ def layout(self): super().layout() # Get CPPython managed layout def generate(self): - deps = CMakeDeps(self) + deps = CMakeConfigDeps(self) deps.generate() tc = CMakeToolchain(self) tc.user_presets_path = None @@ -142,9 +143,11 @@ def package(self): def package_info(self): # Use native CMake config files to preserve FILE_SET information for C++ modules - # This tells CMakeDeps to skip generating files and use the package's native config + # This tells CMakeConfigDeps to skip generating files and use the package's native config self.cpp_info.set_property("cmake_find_mode", "none") - self.cpp_info.builddirs = ["."] + # Point CMakeConfigDeps to the directory containing the native config files + # so conan_cmakedeps_paths.cmake populates the search paths for find_package() + self.cpp_info.builddirs.append(os.path.join("lib", "cmake", self.name)) def export_sources(self): copy(self, "CMakeLists.txt", src=self.recipe_folder, dst=self.export_sources_folder) diff --git a/cppython/plugins/conan/plugin.py b/cppython/plugins/conan/plugin.py index 1197532c..60c3bae8 100644 --- a/cppython/plugins/conan/plugin.py +++ b/cppython/plugins/conan/plugin.py @@ -21,7 +21,9 @@ from cppython.plugins.conan.builder import Builder from cppython.plugins.conan.resolution import resolve_conan_data, resolve_conan_dependency from cppython.plugins.conan.schema import ConanData, ConanfileGenerationData -from cppython.utility.exception import NotSupportedError, ProviderInstallationError +from cppython.plugins.meson.plugin import MesonGenerator +from cppython.plugins.meson.schema import MesonSyncData +from cppython.utility.exception import InstallationVerificationError, NotSupportedError, ProviderInstallationError from cppython.utility.utility import TypeName @@ -174,6 +176,20 @@ def _run_conan_install(self, conanfile_path: Path, update: bool, build_type: str # Add build type setting if specified if build_type: command_args.extend(['-s', f'build_type={build_type}']) + # Enable CMakeConfigDeps (the modern CMake config-mode generator) + command_args.extend(['-c', 'tools.cmake.cmakedeps:new=will_break_next']) + + # Enable 'import std;' support by providing the experimental UUID in the toolchain + # The UUID must be in the toolchain file (before try_compile block) so compiler + # detection can create __CMAKE::CXX23 for projects using 'import std;' + command_args.extend( + [ + '-c', + 'tools.cmake.cmaketoolchain:extra_variables={' + "'CMAKE_EXPERIMENTAL_CXX_IMPORT_STD': 'd0edc3af-4c50-42ea-a356-e2862fe7a444'" + '}', + ] + ) # Add cmake binary configuration if specified if self._cmake_binary: @@ -195,6 +211,33 @@ def _run_conan_install(self, conanfile_path: Path, update: bool, build_type: str logger.error('Conan install failed: %s', error_msg, exc_info=True) raise ProviderInstallationError('conan', error_msg, e) from e + def verify_installed(self) -> None: + """Verify that Conan-generated artifacts exist on disk. + + Checks for the toolchain/native files that ``conan install`` produces + in the generators output directory. + + Raises: + InstallationVerificationError: If expected artifacts are missing + """ + generators_path = self.core_data.cppython_data.build_path / 'generators' + missing: list[str] = [] + + if not generators_path.is_dir(): + missing.append(f'generators directory ({generators_path})') + else: + # Check for at least one of the expected toolchain files + cmake_toolchain = generators_path / 'conan_toolchain.cmake' + meson_native = generators_path / 'conan_meson_native.ini' + + if not cmake_toolchain.exists() and not meson_native.exists(): + missing.append( + f'toolchain files in {generators_path} (expected conan_toolchain.cmake or conan_meson_native.ini)' + ) + + if missing: + raise InstallationVerificationError('conan', missing) + def install(self, groups: list[str] | None = None) -> None: """Installs the provider @@ -221,7 +264,7 @@ def supported_sync_type(sync_type: type[SyncData]) -> bool: Returns: True if the sync type is supported, False otherwise. """ - return sync_type in CMakeGenerator.sync_types() + return sync_type in CMakeGenerator.sync_types() or sync_type in MesonGenerator.sync_types() def sync_data(self, consumer: SyncConsumer) -> SyncData: """Generates synchronization data for the given consumer. @@ -238,6 +281,8 @@ def sync_data(self, consumer: SyncConsumer) -> SyncData: for sync_type in consumer.sync_types(): if sync_type == CMakeSyncData: return self._sync_with_cmake(consumer) + if sync_type == MesonSyncData: + return self._create_meson_sync_data() raise NotSupportedError(f'Unsupported sync types: {consumer.sync_types()}') @@ -272,6 +317,26 @@ def _create_cmake_sync_data(self) -> CMakeSyncData: toolchain_file=conan_toolchain_path, ) + def _create_meson_sync_data(self) -> MesonSyncData: + """Creates Meson synchronization data with Conan toolchain configuration. + + Conan's MesonToolchain generator produces ``conan_meson_native.ini`` + and ``conan_meson_cross.ini`` files in the generators directory. + + Returns: + MesonSyncData configured for Conan integration + """ + generators_path = self.core_data.cppython_data.build_path / 'generators' + + native_file = generators_path / 'conan_meson_native.ini' + cross_file = generators_path / 'conan_meson_cross.ini' + + return MesonSyncData( + provider_name=TypeName('conan'), + native_file=native_file if native_file.exists() else None, + cross_file=cross_file if cross_file.exists() else None, + ) + @classmethod async def download_tooling(cls, directory: Path) -> None: """Download external tooling required by the Conan provider. @@ -329,6 +394,30 @@ def _run_conan_create(self, conanfile_path: Path, build_type: str, logger: Logge command_args.extend(['-c', 'tools.graph:skip_test=True']) command_args.extend(['-c', 'tools.build:skip_test=True']) + # Enable CMakeConfigDeps (the modern CMake config-mode generator) + command_args.extend(['-c', 'tools.cmake.cmakedeps:new=will_break_next']) + + # Force Ninja Multi-Config generator for C++ module support + # The Visual Studio generator does not support BMI-only compilation + # needed for consuming C++ modules across package boundaries + command_args.extend(['-c', 'tools.cmake.cmaketoolchain:generator=Ninja Multi-Config']) + + # Enable 'import std;' support in the CMake toolchain + # CMAKE_EXPERIMENTAL_CXX_IMPORT_STD must be in the toolchain file (before + # the try_compile block) so compiler detection can create __CMAKE::CXX23. + # Note: CMAKE_CXX_MODULE_STD must NOT be in the toolchain because it would + # cause ABI detection try_compile to fail (chicken-and-egg with __CMAKE::CXX23). + # The UUID is specific to the CMake version and will need updating + # when the CMake version changes until import std graduates from experimental. + command_args.extend( + [ + '-c', + 'tools.cmake.cmaketoolchain:extra_variables={' + "'CMAKE_EXPERIMENTAL_CXX_IMPORT_STD': 'd0edc3af-4c50-42ea-a356-e2862fe7a444'" + '}', + ] + ) + # Add build type setting command_args.extend(['-s', f'build_type={build_type}']) diff --git a/cppython/plugins/conan/resolution.py b/cppython/plugins/conan/resolution.py index 85cf6e22..d8dd98d2 100644 --- a/cppython/plugins/conan/resolution.py +++ b/cppython/plugins/conan/resolution.py @@ -41,8 +41,7 @@ def _handle_single_specifier(name: str, specifier) -> ConanDependency: return ConanDependency(name=name, version_range=ConanVersionRange(expression=f'>={specifier.version}')) else: raise ConfigException( - f"Unsupported single specifier '{specifier.operator}'. Supported: '==', '>=', '>', '<', '<=', '!=', '~='", - [], + f"Unsupported single specifier '{specifier.operator}'. Supported: '==', '>=', '>', '<', '<=', '!=', '~='" ) @@ -79,17 +78,14 @@ def resolve_conan_dependency(requirement: Requirement) -> ConanDependency: specifier_groups[specifier.operator].append(specifier.version) elif specifier.operator == '==': # Multiple == operators would be contradictory - raise ConfigException( - "Multiple '==' specifiers are contradictory. Use a single '==' or range operators.", [] - ) + raise ConfigException("Multiple '==' specifiers are contradictory. Use a single '==' or range operators.") elif specifier.operator == '~=': # ~= with other operators is complex, for now treat as >= specifier_groups['>='].append(specifier.version) else: raise ConfigException( f"Unsupported specifier '{specifier.operator}' in multi-specifier requirement. " - f"Supported: '>=', '>', '<', '<=', '!='", - [], + f"Supported: '>=', '>', '<', '<=', '!='" ) # Build range parts in consistent order diff --git a/cppython/plugins/meson/__init__.py b/cppython/plugins/meson/__init__.py new file mode 100644 index 00000000..6f6c29d6 --- /dev/null +++ b/cppython/plugins/meson/__init__.py @@ -0,0 +1,6 @@ +"""The Meson generator plugin for CPPython. + +This module implements the Meson generator plugin, which integrates CPPython with +the Meson build system. It includes functionality for resolving configuration data, +writing native/cross files, and synchronizing project data. +""" diff --git a/cppython/plugins/meson/builder.py b/cppython/plugins/meson/builder.py new file mode 100644 index 00000000..ebd87831 --- /dev/null +++ b/cppython/plugins/meson/builder.py @@ -0,0 +1,139 @@ +"""Plugin builder for Meson native/cross file management.""" + +import configparser +import io +from pathlib import Path + +from cppython.plugins.meson.schema import MesonSyncData + + +class Builder: + """Aids in building the information needed for the Meson plugin. + + Manages generation and writing of Meson native and cross files + that configure dependency paths from providers. + """ + + def __init__(self) -> None: + """Initialize the builder.""" + + @staticmethod + def generate_native_file(sync_data: MesonSyncData, project_root: Path) -> str: + """Generates a Meson native file that references provider-managed dependencies. + + The native file points Meson to the provider's dependency paths via + ``pkg_config_path`` and ``cmake_prefix_path`` in the ``[built-in options]`` + section. If the provider supplies its own native file, an include + directive is used instead. + + Args: + sync_data: The provider's synchronization data + project_root: The project root directory + + Returns: + The native file content as a string + """ + config = configparser.ConfigParser() + # Preserve key casing + config.optionxform = str # type: ignore[assignment] + + if sync_data.native_file: + # Reference the provider's native file via properties + config['properties'] = { + 'cppython_provider': f"'{sync_data.provider_name}'", + 'cppython_native_file': f"'{sync_data.native_file.as_posix()}'", + } + + output = io.StringIO() + config.write(output) + return output.getvalue() + + @staticmethod + def generate_cross_file(sync_data: MesonSyncData, project_root: Path) -> str: + """Generates a Meson cross file that references provider-managed toolchain. + + Args: + sync_data: The provider's synchronization data + project_root: The project root directory + + Returns: + The cross file content as a string + """ + config = configparser.ConfigParser() + config.optionxform = str # type: ignore[assignment] + + if sync_data.cross_file: + config['properties'] = { + 'cppython_provider': f"'{sync_data.provider_name}'", + 'cppython_cross_file': f"'{sync_data.cross_file.as_posix()}'", + } + + output = io.StringIO() + config.write(output) + return output.getvalue() + + @staticmethod + def write_native_file(directory: Path, sync_data: MesonSyncData, project_root: Path) -> Path | None: + """Write a CPPython-managed native file to disk. + + Only writes if the provider supplied a native file. The generated file + is written to ``{directory}/cppython_native.ini`` and is only updated + if the content has changed. + + Args: + directory: The tool directory to write the file to + sync_data: The provider's synchronization data + project_root: The project root directory + + Returns: + Path to the written native file, or None if no native file was provided + """ + if not sync_data.native_file: + return None + + directory.mkdir(parents=True, exist_ok=True) + native_file_path = directory / 'cppython_native.ini' + + content = Builder.generate_native_file(sync_data, project_root) + + # Only write if content changed + if native_file_path.exists(): + existing = native_file_path.read_text(encoding='utf-8') + if existing == content: + return native_file_path + + native_file_path.write_text(content, encoding='utf-8') + return native_file_path + + @staticmethod + def write_cross_file(directory: Path, sync_data: MesonSyncData, project_root: Path) -> Path | None: + """Write a CPPython-managed cross file to disk. + + Only writes if the provider supplied a cross file. The generated file + is written to ``{directory}/cppython_cross.ini`` and is only updated + if the content has changed. + + Args: + directory: The tool directory to write the file to + sync_data: The provider's synchronization data + project_root: The project root directory + + Returns: + Path to the written cross file, or None if no cross file was provided + """ + if not sync_data.cross_file: + return None + + directory.mkdir(parents=True, exist_ok=True) + cross_file_path = directory / 'cppython_cross.ini' + + content = Builder.generate_cross_file(sync_data, project_root) + + # Only write if content changed + if cross_file_path.exists(): + existing = cross_file_path.read_text(encoding='utf-8') + if existing == content: + return cross_file_path + + cross_file_path.write_text(content, encoding='utf-8') + return cross_file_path diff --git a/cppython/plugins/meson/plugin.py b/cppython/plugins/meson/plugin.py new file mode 100644 index 00000000..e66abe91 --- /dev/null +++ b/cppython/plugins/meson/plugin.py @@ -0,0 +1,196 @@ +"""The Meson generator implementation""" + +import subprocess +from pathlib import Path +from typing import Any + +from cppython.core.plugin_schema.generator import ( + Generator, + GeneratorPluginGroupData, + SupportedGeneratorFeatures, +) +from cppython.core.schema import CorePluginData, Information, SupportedFeatures, SyncData +from cppython.plugins.meson.builder import Builder +from cppython.plugins.meson.resolution import resolve_meson_data +from cppython.plugins.meson.schema import MesonSyncData + + +class MesonGenerator(Generator): + """Meson generator""" + + def __init__(self, group_data: GeneratorPluginGroupData, core_data: CorePluginData, data: dict[str, Any]) -> None: + """Initializes the generator.""" + self.group_data = group_data + self.core_data = core_data + self.data = resolve_meson_data(data, core_data) + self.builder = Builder() + + self._cppython_meson_directory = self.core_data.cppython_data.tool_path / 'cppython' / 'meson' + + # Track injected native/cross files for use in meson setup + self._native_file: Path | None = None + self._cross_file: Path | None = None + + @staticmethod + def features(directory: Path) -> SupportedFeatures: + """Queries if Meson is supported. + + Args: + directory: The root directory where features are evaluated + + Returns: + The supported features + """ + return SupportedGeneratorFeatures() + + @staticmethod + def information() -> Information: + """Queries plugin info. + + Returns: + Plugin information + """ + return Information() + + @staticmethod + def sync_types() -> list[type[SyncData]]: + """Returns types in order of preference. + + Returns: + The available types + """ + return [MesonSyncData] + + def sync(self, sync_data: SyncData) -> None: + """Disk sync point. + + Receives sync data from the provider and writes native/cross files + that will be passed to ``meson setup``. + + Args: + sync_data: The input data + """ + match sync_data: + case MesonSyncData(): + project_root = self.core_data.project_data.project_root + + self._native_file = self.builder.write_native_file( + self._cppython_meson_directory, sync_data, project_root + ) + self._cross_file = self.builder.write_cross_file( + self._cppython_meson_directory, sync_data, project_root + ) + case _: + raise ValueError('Unsupported sync data type') + + def _meson_command(self) -> str: + """Returns the meson command to use. + + Returns: + The meson binary path as a string + """ + if self.data.meson_binary: + return str(self.data.meson_binary) + return 'meson' + + def _build_dir(self) -> Path: + """Returns the absolute path to the meson build directory. + + Returns: + The build directory path + """ + return self.data.build_file.parent / self.data.build_directory + + def _ensure_setup(self) -> None: + """Ensure the meson build directory is configured. + + Runs ``meson setup`` if the build directory doesn't exist yet, + or ``meson setup --reconfigure`` if it does. + """ + build_dir = self._build_dir() + source_dir = self.data.build_file.parent + + cmd = [self._meson_command(), 'setup'] + + # Add native file if available + if self._native_file and self._native_file.exists(): + cmd.extend(['--native-file', str(self._native_file)]) + + # Add cross file if available + if self._cross_file and self._cross_file.exists(): + cmd.extend(['--cross-file', str(self._cross_file)]) + + if build_dir.exists(): + cmd.append('--reconfigure') + + cmd.extend([str(build_dir), str(source_dir)]) + + subprocess.run(cmd, check=True, cwd=source_dir) + + def _effective_build_dir(self, configuration: str | None) -> Path: + """Returns the build directory, optionally overridden by a configuration name. + + Args: + configuration: If provided, used as the build directory name instead of the + configured ``build_directory``. + + Returns: + The absolute path to the build directory + """ + directory = configuration if configuration else self.data.build_directory + return self.data.build_file.parent / directory + + def build(self, configuration: str | None = None) -> None: + """Builds the project using meson compile. + + Args: + configuration: Optional build directory name override. + """ + self._ensure_setup() + build_dir = self._effective_build_dir(configuration) + cmd = [self._meson_command(), 'compile', '-C', str(build_dir)] + subprocess.run(cmd, check=True, cwd=self.data.build_file.parent) + + def test(self, configuration: str | None = None) -> None: + """Runs tests using meson test. + + Args: + configuration: Optional build directory name override. + """ + build_dir = self._effective_build_dir(configuration) + cmd = [self._meson_command(), 'test', '-C', str(build_dir)] + subprocess.run(cmd, check=True, cwd=self.data.build_file.parent) + + def bench(self, configuration: str | None = None) -> None: + """Runs benchmarks using meson test --benchmark. + + Args: + configuration: Optional build directory name override. + """ + build_dir = self._effective_build_dir(configuration) + cmd = [self._meson_command(), 'test', '--benchmark', '-C', str(build_dir)] + subprocess.run(cmd, check=True, cwd=self.data.build_file.parent) + + def run(self, target: str, configuration: str | None = None) -> None: + """Runs a built executable by target name. + + Searches the build directory for the executable matching the target name. + + Args: + target: The name of the build target/executable to run + configuration: Optional build directory name override. + + Raises: + FileNotFoundError: If the target executable cannot be found + """ + build_dir = self._effective_build_dir(configuration) + + # Search for the executable in the build directory + candidates = list(build_dir.rglob(target)) + list(build_dir.rglob(f'{target}.exe')) + executables = [c for c in candidates if c.is_file()] + + if not executables: + raise FileNotFoundError(f"Could not find executable '{target}' in build directory: {build_dir}") + + executable = executables[0] + subprocess.run([str(executable)], check=True, cwd=self.data.build_file.parent) diff --git a/cppython/plugins/meson/resolution.py b/cppython/plugins/meson/resolution.py new file mode 100644 index 00000000..60056fc0 --- /dev/null +++ b/cppython/plugins/meson/resolution.py @@ -0,0 +1,81 @@ +"""Builder to help resolve meson state""" + +import logging +import os +import shutil +from pathlib import Path +from typing import Any + +from cppython.core.schema import CorePluginData +from cppython.plugins.meson.schema import MesonConfiguration, MesonData + + +def _resolve_meson_binary(configured_path: Path | None) -> Path | None: + """Resolve the meson binary path with validation. + + Resolution order: + 1. MESON_BINARY environment variable (highest priority) + 2. Configured path from meson_binary setting + 3. meson from PATH (fallback) + + If a path is specified (via env or config) but doesn't exist, + a warning is logged and we fall back to PATH lookup. + + Args: + configured_path: The meson_binary path from configuration, if any + + Returns: + Resolved meson path, or None if not found anywhere + """ + logger = logging.getLogger('cppython.meson') + + # Environment variable takes precedence + if env_binary := os.environ.get('MESON_BINARY'): + env_path = Path(env_binary) + if env_path.exists(): + return env_path + logger.warning( + 'MESON_BINARY environment variable points to non-existent path: %s. Falling back to PATH lookup.', + env_binary, + ) + + # Try configured path + if configured_path: + if configured_path.exists(): + return configured_path + logger.warning( + 'Configured meson_binary path does not exist: %s. Falling back to PATH lookup.', + configured_path, + ) + + # Fall back to PATH lookup + if meson_in_path := shutil.which('meson'): + return Path(meson_in_path) + + return None + + +def resolve_meson_data(data: dict[str, Any], core_data: CorePluginData) -> MesonData: + """Resolves the input data table from defaults to requirements. + + Args: + data: The input table + core_data: The core data to help with the resolve + + Returns: + The resolved data + """ + parsed_data = MesonConfiguration(**data) + + root_directory = core_data.project_data.project_root.absolute() + + modified_build_file = parsed_data.build_file + if not modified_build_file.is_absolute(): + modified_build_file = root_directory / modified_build_file + + # Resolve meson binary: environment variable takes precedence over configuration + meson_binary = _resolve_meson_binary(parsed_data.meson_binary) + + return MesonData( + build_file=modified_build_file, build_directory=parsed_data.build_directory, meson_binary=meson_binary + ) diff --git a/cppython/plugins/meson/schema.py b/cppython/plugins/meson/schema.py new file mode 100644 index 00000000..0fa1ce4d --- /dev/null +++ b/cppython/plugins/meson/schema.py @@ -0,0 +1,73 @@ +"""Meson plugin schema + +This module defines the schema and data models for integrating the Meson +generator with CPPython. It includes definitions for configuration, +synchronization data, and resolved runtime data. +""" + +from pathlib import Path +from typing import Annotated + +from pydantic import Field + +from cppython.core.schema import CPPythonModel, SyncData + + +class MesonSyncData(SyncData): + """The Meson sync data exchanged between providers and the Meson generator. + + Providers populate these fields with paths to native/cross files + that configure Meson to find provider-managed dependencies. + """ + + native_file: Annotated[ + Path | None, + Field( + description='Path to a Meson native file for same-platform builds. ' + 'Contains pkg-config paths, dependency directories, and build options.' + ), + ] = None + cross_file: Annotated[ + Path | None, + Field( + description='Path to a Meson cross file for cross-compilation. ' + 'Contains host/target machine definitions and toolchain configuration.' + ), + ] = None + + +class MesonData(CPPythonModel): + """Resolved Meson data used at runtime by the generator plugin.""" + + build_file: Path + build_directory: str + meson_binary: Path | None + + +class MesonConfiguration(CPPythonModel): + """Configuration for the Meson generator plugin. + + User-facing configuration from ``[tool.cppython.generators.meson]``. + """ + + build_file: Annotated[ + Path, + Field( + description='The meson.build file that defines the project. ' + 'Relative paths are resolved against the project root.', + ), + ] = Path('meson.build') + build_directory: Annotated[ + str, + Field( + description='The Meson build directory name. This is passed to ' + '"meson setup" as the build directory argument.', + ), + ] = 'builddir' + meson_binary: Annotated[ + Path | None, + Field( + description='Path to a specific Meson binary to use. If not specified, uses "meson" from PATH. ' + 'Can be overridden via MESON_BINARY environment variable.' + ), + ] = None diff --git a/cppython/plugins/pdm/plugin.py b/cppython/plugins/pdm/plugin.py index 47607fd1..674f4dd0 100644 --- a/cppython/plugins/pdm/plugin.py +++ b/cppython/plugins/pdm/plugin.py @@ -50,6 +50,7 @@ def on_post_install(self, project: Project, dry_run: bool, **_kwargs: Any) -> No _kwargs: Sink for unknown arguments """ root = project.root.absolute() + pdm_pyproject = project.pyproject.open_for_read() # Attach configuration for CPPythonPlugin callbacks version = project.pyproject.metadata.get('version') @@ -57,16 +58,13 @@ def on_post_install(self, project: Project, dry_run: bool, **_kwargs: Any) -> No project_configuration = ProjectConfiguration(project_root=root, verbosity=verbosity, version=version) - self.logger.info("CPPython: Entered 'on_post_install'") - - if (pdm_pyproject := project.pyproject.open_for_read()) is None: - self.logger.info('CPPython: Project data was not available') - return - - cppython_project = CPPythonProject(project_configuration, self, pdm_pyproject) + try: + cppython_project = CPPythonProject(project_configuration, self, pdm_pyproject) - if not dry_run: - cppython_project.install() + if not dry_run: + cppython_project.install() + except Exception: + self.logger.debug('CPPython: Error during post-install hook', exc_info=True) class CPPythonCommand(BaseCommand): diff --git a/cppython/plugins/vcpkg/plugin.py b/cppython/plugins/vcpkg/plugin.py index 0f64e616..05f08add 100644 --- a/cppython/plugins/vcpkg/plugin.py +++ b/cppython/plugins/vcpkg/plugin.py @@ -15,9 +15,16 @@ from cppython.core.schema import CorePluginData, Information, SupportedFeatures, SyncData from cppython.plugins.cmake.plugin import CMakeGenerator from cppython.plugins.cmake.schema import CMakeSyncData +from cppython.plugins.meson.plugin import MesonGenerator +from cppython.plugins.meson.schema import MesonSyncData from cppython.plugins.vcpkg.resolution import generate_manifest, resolve_vcpkg_data from cppython.plugins.vcpkg.schema import VcpkgData -from cppython.utility.exception import NotSupportedError, ProviderInstallationError, ProviderToolingError +from cppython.utility.exception import ( + InstallationVerificationError, + NotSupportedError, + ProviderInstallationError, + ProviderToolingError, +) from cppython.utility.utility import TypeName @@ -87,7 +94,7 @@ def supported_sync_type(sync_type: type[SyncData]) -> bool: Returns: True if the sync type is supported, False otherwise. """ - return sync_type in CMakeGenerator.sync_types() + return sync_type in CMakeGenerator.sync_types() or sync_type in MesonGenerator.sync_types() @staticmethod def information() -> Information: @@ -130,7 +137,7 @@ def _update_provider(cls, path: Path) -> None: cls._handle_subprocess_error(logger, 'bootstrap the vcpkg repository', e, ProviderToolingError) def sync_data(self, consumer: SyncConsumer) -> SyncData: - """Gathers a data object for the given generator + """Gathers a data object for the given generator. Args: consumer: The input consumer @@ -139,11 +146,13 @@ def sync_data(self, consumer: SyncConsumer) -> SyncData: NotSupportedError: If not supported Returns: - The synch data object + The sync data object """ for sync_type in consumer.sync_types(): if sync_type == CMakeSyncData: return self._create_cmake_sync_data() + if sync_type == MesonSyncData: + return self._create_meson_sync_data() raise NotSupportedError('OOF') @@ -161,6 +170,33 @@ def _create_cmake_sync_data(self) -> CMakeSyncData: toolchain_file=vcpkg_cmake_path, ) + def _create_meson_sync_data(self) -> MesonSyncData: + """Creates Meson synchronization data with vcpkg configuration. + + vcpkg exposes installed dependencies via pkg-config. The native file + points Meson's ``pkg_config_path`` to vcpkg's installed pkgconfig directory. + + Returns: + MesonSyncData configured for vcpkg integration + """ + # vcpkg installs pkg-config files under installed//lib/pkgconfig + # We point Meson to the installed directory via a native file reference + # The native file itself is generated by the Meson builder during sync + vcpkg_pkgconfig_path = self.core_data.cppython_data.install_path / 'installed' + + # Create a native file path in the tool directory + native_file = self.core_data.cppython_data.tool_path / 'cppython' / 'meson' / 'vcpkg_native.ini' + + # Write a minimal native file pointing to vcpkg's pkg-config + native_file.parent.mkdir(parents=True, exist_ok=True) + content = f"[built-in options]\npkg_config_path = '{vcpkg_pkgconfig_path.as_posix()}'\n" + native_file.write_text(content, encoding='utf-8') + + return MesonSyncData( + provider_name=TypeName('vcpkg'), + native_file=native_file, + ) + @classmethod def tooling_downloaded(cls, path: Path) -> bool: """Returns whether the provider tooling needs to be downloaded @@ -231,6 +267,30 @@ async def download_tooling(cls, directory: Path) -> None: cls._update_provider(directory) + def verify_installed(self) -> None: + """Verify that vcpkg tooling and installed packages exist on disk. + + Checks that the vcpkg repository has been cloned and that the install + directory contains packages from a prior ``install()`` call. + + Raises: + InstallationVerificationError: If required artifacts are missing + """ + missing: list[str] = [] + + # Check that vcpkg tooling has been downloaded + tooling_path = self.core_data.cppython_data.install_path + if not self.tooling_downloaded(tooling_path): + missing.append(f'vcpkg repository ({tooling_path})') + + # Check that packages have been installed + install_directory = self.data.install_directory + if not install_directory.is_dir() or not any(install_directory.iterdir()): + missing.append(f'installed packages directory ({install_directory})') + + if missing: + raise InstallationVerificationError('vcpkg', missing) + def install(self, groups: list[str] | None = None) -> None: """Called when dependencies need to be installed from a lock file. diff --git a/cppython/plugins/vcpkg/resolution.py b/cppython/plugins/vcpkg/resolution.py index 4dcfde52..1725f46d 100644 --- a/cppython/plugins/vcpkg/resolution.py +++ b/cppython/plugins/vcpkg/resolution.py @@ -34,7 +34,7 @@ def generate_manifest(core_data: CorePluginData, data: VcpkgData) -> Manifest: result = check_output(['git', 'rev-parse', 'HEAD'], cwd=cwd) data.builtin_baseline = result.decode('utf-8').strip() except (CalledProcessError, FileNotFoundError) as e: - raise ConfigException('Failed to get the current commit hash from the vcpkg repository.', []) from e + raise ConfigException('Failed to get the current commit hash from the vcpkg repository.') from e return Manifest( name=core_data.pep621_data.name, @@ -92,14 +92,14 @@ def resolve_vcpkg_dependency(requirement: Requirement) -> VcpkgDependency: # If the length of specifiers is greater than one, raise a configuration error if len(specifiers) > 1: - raise ConfigException('Multiple specifiers are not supported. Please provide a single specifier.', []) + raise ConfigException('Multiple specifiers are not supported. Please provide a single specifier.') # Extract the version from the single specifier min_version = None if len(specifiers) == 1: specifier = next(iter(specifiers)) if specifier.operator != '>=': - raise ConfigException(f"Unsupported specifier '{specifier.operator}'. Only '>=' is supported.", []) + raise ConfigException(f"Unsupported specifier '{specifier.operator}'. Only '>=' is supported.") min_version = specifier.version return VcpkgDependency( diff --git a/cppython/project.py b/cppython/project.py index 9087e8f2..5c8c741d 100644 --- a/cppython/project.py +++ b/cppython/project.py @@ -7,7 +7,7 @@ from cppython.builder import Builder from cppython.core.exception import ConfigException from cppython.core.resolution import resolve_model -from cppython.core.schema import Interface, ProjectConfiguration, PyProject +from cppython.core.schema import Interface, ProjectConfiguration, PyProject, SyncData from cppython.schema import API @@ -28,6 +28,11 @@ def __init__( self._interface = interface self.logger = logging.getLogger('cppython') + # Early exit: if no CPPython configuration table, do nothing silently + tool_data = pyproject_data.get('tool') + if not tool_data or not isinstance(tool_data, dict) or not tool_data.get('cppython'): + return + builder = Builder(project_configuration, self.logger) self.logger.info('Initializing project') @@ -141,3 +146,102 @@ def publish(self) -> None: # Let provider handle its own exceptions for better error context self._data.plugins.provider.publish() + + def prepare_build(self) -> SyncData | None: + """Prepare for a PEP 517 build without installing C++ dependencies. + + Syncs generated files (presets, native files) and verifies that a prior + ``install()`` call has produced the expected provider artifacts. This is + used by the build backend so that ``pdm build`` / ``pip wheel`` can + delegate to scikit-build-core or meson-python without re-running the + full provider install workflow. + + Returns: + The sync data from the provider, or None if the project is not enabled + + Raises: + InstallationVerificationError: If provider artifacts are missing + """ + if not self._enabled: + self.logger.info('Skipping prepare_build because the project is not enabled') + return None + + self.logger.info('Preparing build environment') + + # Sync config files so the generator has up-to-date presets / native files + self._data.sync() + + # Verify that a prior install() produced the expected artifacts + self._data.plugins.provider.verify_installed() + + # Return sync data for the build backend to inject into config_settings + return self._data.plugins.provider.sync_data(self._data.plugins.generator) + + def build(self, configuration: str | None = None) -> None: + """Builds the project + + Assumes dependencies have been installed via `install`. + Syncs generated files to ensure they are up-to-date, then executes the build. + + Args: + configuration: Optional named configuration to use + """ + if not self._enabled: + self.logger.info('Skipping build because the project is not enabled') + return + + self.logger.info('Building project') + self._data.sync() + self._data.plugins.generator.build(configuration=configuration) + + def test(self, configuration: str | None = None) -> None: + """Runs project tests + + Assumes dependencies have been installed via `install`. + Syncs generated files to ensure they are up-to-date, then executes tests. + + Args: + configuration: Optional named configuration to use + """ + if not self._enabled: + self.logger.info('Skipping test because the project is not enabled') + return + + self.logger.info('Running tests') + self._data.sync() + self._data.plugins.generator.test(configuration=configuration) + + def bench(self, configuration: str | None = None) -> None: + """Runs project benchmarks + + Assumes dependencies have been installed via `install`. + Syncs generated files to ensure they are up-to-date, then executes benchmarks. + + Args: + configuration: Optional named configuration to use + """ + if not self._enabled: + self.logger.info('Skipping bench because the project is not enabled') + return + + self.logger.info('Running benchmarks') + self._data.sync() + self._data.plugins.generator.bench(configuration=configuration) + + def run(self, target: str, configuration: str | None = None) -> None: + """Runs a built executable + + Assumes dependencies have been installed via `install`. + Syncs generated files to ensure they are up-to-date, then executes the target. + + Args: + target: The name of the build target to run + configuration: Optional named configuration to use + """ + if not self._enabled: + self.logger.info('Skipping run because the project is not enabled') + return + + self.logger.info('Running target: %s', target) + self._data.sync() + self._data.plugins.generator.run(target, configuration=configuration) diff --git a/cppython/schema.py b/cppython/schema.py index ab2d7964..8b7038ef 100644 --- a/cppython/schema.py +++ b/cppython/schema.py @@ -24,3 +24,41 @@ def update(self, groups: list[str] | None = None) -> None: groups: Optional list of dependency groups to update """ raise NotImplementedError() + + @abstractmethod + def build(self, configuration: str | None = None) -> None: + """Builds the project + + Args: + configuration: Optional named configuration to use. Interpretation is generator-specific + (e.g. CMake preset name, Meson build directory). + """ + raise NotImplementedError() + + @abstractmethod + def test(self, configuration: str | None = None) -> None: + """Runs project tests + + Args: + configuration: Optional named configuration to use. Interpretation is generator-specific. + """ + raise NotImplementedError() + + @abstractmethod + def bench(self, configuration: str | None = None) -> None: + """Runs project benchmarks + + Args: + configuration: Optional named configuration to use. Interpretation is generator-specific. + """ + raise NotImplementedError() + + @abstractmethod + def run(self, target: str, configuration: str | None = None) -> None: + """Runs a built executable + + Args: + target: The name of the build target to run + configuration: Optional named configuration to use. Interpretation is generator-specific. + """ + raise NotImplementedError() diff --git a/cppython/test/mock/generator.py b/cppython/test/mock/generator.py index 85e96d3f..536b3b62 100644 --- a/cppython/test/mock/generator.py +++ b/cppython/test/mock/generator.py @@ -60,3 +60,15 @@ def sync_types() -> list[type[SyncData]]: def sync(self, sync_data: SyncData) -> None: """Synchronizes generator files and state with the providers input""" + + def build(self, configuration: str | None = None) -> None: + """No-op build for testing""" + + def test(self, configuration: str | None = None) -> None: + """No-op test for testing""" + + def bench(self, configuration: str | None = None) -> None: + """No-op bench for testing""" + + def run(self, target: str, configuration: str | None = None) -> None: + """No-op run for testing""" diff --git a/cppython/test/mock/provider.py b/cppython/test/mock/provider.py index 65c2090d..1735fd03 100644 --- a/cppython/test/mock/provider.py +++ b/cppython/test/mock/provider.py @@ -83,6 +83,12 @@ async def download_tooling(cls, directory: DirectoryPath) -> None: """Downloads the provider tooling""" cls.downloaded = directory + def verify_installed(self) -> None: + """Verify that mock provider artifacts exist. + + Always passes since this is a mock. + """ + def install(self, groups: list[str] | None = None) -> None: """Installs the provider diff --git a/cppython/test/pytest/contracts.py b/cppython/test/pytest/contracts.py index 2c6f0f53..4208b2a5 100644 --- a/cppython/test/pytest/contracts.py +++ b/cppython/test/pytest/contracts.py @@ -137,6 +137,13 @@ def test_install(plugin: T) -> None: """Ensure that the provider install command functions""" plugin.install() + @staticmethod + def test_verify_installed_after_install(plugin: T) -> None: + """Ensure that verify_installed passes after a successful install""" + plugin.install() + # Should not raise + plugin.verify_installed() + @staticmethod def test_update(plugin: T) -> None: """Ensure that the provider update command functions""" diff --git a/cppython/utility/exception.py b/cppython/utility/exception.py index bee69599..89744cc6 100644 --- a/cppython/utility/exception.py +++ b/cppython/utility/exception.py @@ -2,111 +2,62 @@ class PluginError(Exception): - """Raised when there is a plugin error""" + """Raised when there is a plugin error.""" def __init__(self, error: str) -> None: - """Initializes the error + """Initializes the error. Args: error: The error message """ - self._error = error - + self.error = error super().__init__(error) - @property - def error(self) -> str: - """Returns the underlying error - - Returns: - str -- The underlying error - """ - return self._error - class NotSupportedError(Exception): - """Raised when something is not supported""" + """Raised when something is not supported.""" def __init__(self, error: str) -> None: - """Initializes the error + """Initializes the error. Args: error: The error message """ - self._error = error - + self.error = error super().__init__(error) - @property - def error(self) -> str: - """Returns the underlying error - - Returns: - str -- The underlying error - """ - return self._error - class ProviderInstallationError(Exception): - """Raised when provider installation fails""" + """Raised when provider installation fails.""" def __init__(self, provider_name: str, error: str, original_error: Exception | None = None) -> None: - """Initializes the error + """Initializes the error. Args: provider_name: The name of the provider that failed error: The error message original_error: The original exception that caused this error """ - self._provider_name = provider_name - self._error = error - self._original_error = original_error - - message = f"Provider '{provider_name}' installation failed: {error}" - super().__init__(message) - - @property - def provider_name(self) -> str: - """Returns the provider name - - Returns: - str -- The provider name - """ - return self._provider_name - - @property - def error(self) -> str: - """Returns the underlying error - - Returns: - str -- The underlying error - """ - return self._error - - @property - def original_error(self) -> Exception | None: - """Returns the original exception - - Returns: - Exception | None -- The original exception if available - """ - return self._original_error + self.provider_name = provider_name + self.error = error + self.original_error = original_error + super().__init__(f"Provider '{provider_name}' installation failed: {error}") class ProviderConfigurationError(Exception): - """Raised when provider configuration is invalid""" + """Raised when provider configuration is invalid.""" def __init__(self, provider_name: str, error: str, configuration_key: str | None = None) -> None: - """Initializes the error + """Initializes the error. Args: provider_name: The name of the provider with invalid configuration error: The error message configuration_key: The specific configuration key that caused the error """ - self._provider_name = provider_name - self._error = error - self._configuration_key = configuration_key + self.provider_name = provider_name + self.error = error + self.configuration_key = configuration_key message = f"Provider '{provider_name}' configuration error" if configuration_key: @@ -114,39 +65,32 @@ def __init__(self, provider_name: str, error: str, configuration_key: str | None message += f': {error}' super().__init__(message) - @property - def provider_name(self) -> str: - """Returns the provider name - Returns: - str -- The provider name - """ - return self._provider_name +class InstallationVerificationError(Exception): + """Raised when provider artifacts are missing and the user needs to run install first.""" - @property - def error(self) -> str: - """Returns the underlying error + def __init__(self, provider_name: str, missing_artifacts: list[str]) -> None: + """Initializes the error. - Returns: - str -- The underlying error + Args: + provider_name: The name of the provider whose artifacts are missing + missing_artifacts: List of descriptions of what is missing """ - return self._error + self.provider_name = provider_name + self.missing_artifacts = missing_artifacts - @property - def configuration_key(self) -> str | None: - """Returns the configuration key - - Returns: - str | None -- The configuration key if available - """ - return self._configuration_key + artifact_list = ', '.join(missing_artifacts) + super().__init__( + f"Provider '{provider_name}' artifacts not found: {artifact_list}. " + f"Run 'cppython install' or 'pdm install' before building." + ) class ProviderToolingError(Exception): - """Raised when provider tooling operations fail""" + """Raised when provider tooling operations fail.""" def __init__(self, provider_name: str, operation: str, error: str, original_error: Exception | None = None) -> None: - """Initializes the error + """Initializes the error. Args: provider_name: The name of the provider that failed @@ -154,46 +98,8 @@ def __init__(self, provider_name: str, operation: str, error: str, original_erro error: The error message original_error: The original exception that caused this error """ - self._provider_name = provider_name - self._operation = operation - self._error = error - self._original_error = original_error - - message = f"Provider '{provider_name}' {operation} failed: {error}" - super().__init__(message) - - @property - def provider_name(self) -> str: - """Returns the provider name - - Returns: - str -- The provider name - """ - return self._provider_name - - @property - def operation(self) -> str: - """Returns the operation that failed - - Returns: - str -- The operation name - """ - return self._operation - - @property - def error(self) -> str: - """Returns the underlying error - - Returns: - str -- The underlying error - """ - return self._error - - @property - def original_error(self) -> Exception | None: - """Returns the original exception - - Returns: - Exception | None -- The original exception if available - """ - return self._original_error + self.provider_name = provider_name + self.operation = operation + self.error = error + self.original_error = original_error + super().__init__(f"Provider '{provider_name}' {operation} failed: {error}") diff --git a/docs/plugins/conan/integration.md b/docs/plugins/conan/integration.md index f925c616..988471b2 100644 --- a/docs/plugins/conan/integration.md +++ b/docs/plugins/conan/integration.md @@ -27,7 +27,7 @@ You can customize this file for package metadata like name, version, and setting If you have an existing `conanfile.py`, back it up and run `cppython install` to generate both files. Then update your conanfile to inherit from `CPPythonBase`. ```python -from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout +from conan.tools.cmake import CMake, CMakeConfigDeps, CMakeToolchain, cmake_layout from conanfile_base import CPPythonBase # Import the base class diff --git a/examples/conan_cmake/library/CMakeLists.txt b/examples/conan_cmake/library/CMakeLists.txt index 93e1c54c..bcc1214f 100644 --- a/examples/conan_cmake/library/CMakeLists.txt +++ b/examples/conan_cmake/library/CMakeLists.txt @@ -46,10 +46,6 @@ install( DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/mathutils ) -# Create empty include directory to satisfy CMake's exported target requirements -# (module-only library with no headers, but CMake expects the directory to exist) -install(DIRECTORY DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) - # Export and package configuration install( EXPORT mathutilsTargets diff --git a/examples/conan_cmake/library/pyproject.toml b/examples/conan_cmake/library/pyproject.toml index 3b5a30fe..bf0edffe 100644 --- a/examples/conan_cmake/library/pyproject.toml +++ b/examples/conan_cmake/library/pyproject.toml @@ -18,6 +18,8 @@ install-path = "install" dependencies = ["fmt>=12.1.0"] +[tool.cppython.generators.cmake] + [tool.cppython.providers.conan] [tool.pdm] diff --git a/examples/conan_cmake/library/src/mathutils.ixx b/examples/conan_cmake/library/src/mathutils.ixx index 88834193..30685354 100644 --- a/examples/conan_cmake/library/src/mathutils.ixx +++ b/examples/conan_cmake/library/src/mathutils.ixx @@ -52,7 +52,7 @@ namespace mathutils void print_result(const char *operation, double a, double b, double result) { fmt::print(fg(fmt::terminal_color::green), - "MathUtils {}: {} + {} = {}\n", + "MathUtils {}: ({}, {}) = {}\n", operation, a, b, result); } } diff --git a/examples/conan_cmake/library/test_package/CMakeLists.txt b/examples/conan_cmake/library/test_package/CMakeLists.txt index 780bd9a9..b4d7f732 100644 --- a/examples/conan_cmake/library/test_package/CMakeLists.txt +++ b/examples/conan_cmake/library/test_package/CMakeLists.txt @@ -1,12 +1,8 @@ cmake_minimum_required(VERSION 4.0) -# Enable std module support for MSVC - MUST be before project() -set(CMAKE_EXPERIMENTAL_CXX_IMPORT_STD "d0edc3af-4c50-42ea-a356-e2862fe7a444") - project(MathUtilsConsumer LANGUAGES CXX) -set(CMAKE_CXX_STANDARD 23) -set(CMAKE_CXX_STANDARD_REQUIRED ON) +# Enable 'import std;' support (requires __CMAKE::CXX23 from compiler detection) set(CMAKE_CXX_MODULE_STD ON) find_package(mathutils REQUIRED) diff --git a/examples/conan_cmake/library/test_package/conanfile.py b/examples/conan_cmake/library/test_package/conanfile.py index 57e15ffe..25d24a9b 100644 --- a/examples/conan_cmake/library/test_package/conanfile.py +++ b/examples/conan_cmake/library/test_package/conanfile.py @@ -4,7 +4,7 @@ from conan import ConanFile from conan.tools.build import can_run -from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout +from conan.tools.cmake import CMake, CMakeConfigDeps, CMakeToolchain, cmake_layout class MathUtilsTestConan(ConanFile): @@ -22,7 +22,7 @@ def layout(self): def generate(self): """Generate CMake dependencies and toolchain.""" - deps = CMakeDeps(self) + deps = CMakeConfigDeps(self) deps.generate() tc = CMakeToolchain(self) tc.generate() diff --git a/examples/conan_cmake/library/test_package/main.cpp b/examples/conan_cmake/library/test_package/main.cpp index ea73b3d1..b19807ef 100644 --- a/examples/conan_cmake/library/test_package/main.cpp +++ b/examples/conan_cmake/library/test_package/main.cpp @@ -3,11 +3,10 @@ import std; int main() { - // Test the mathutils library std::cout << "Testing MathUtils library..." << std::endl; - double result1 = mathutils::add(5.0, 3.0); - double result2 = mathutils::multiply(4.0, 2.5); + std::cout << "add(5, 3) = " << mathutils::add(5.0, 3.0) << std::endl; + std::cout << "multiply(4, 2.5) = " << mathutils::multiply(4.0, 2.5) << std::endl; std::cout << "MathUtils tests completed successfully!" << std::endl; return 0; diff --git a/pdm.lock b/pdm.lock index ba4ece9f..c2e59cda 100644 --- a/pdm.lock +++ b/pdm.lock @@ -5,11 +5,21 @@ groups = ["default", "cmake", "conan", "docs", "git", "lint", "pdm", "pytest", "test"] strategy = [] lock_version = "4.5.0" -content_hash = "sha256:1bae711fef71d97486071202b156a2e3a75f019f55372aa7800d031a809447e5" +content_hash = "sha256:d7a6b39f750be5a4340b21439665064b066fd926c616853f914b3c0340ac3453" [[metadata.targets]] requires_python = ">=3.14" +[[package]] +name = "annotated-doc" +version = "0.0.4" +requires_python = ">=3.8" +summary = "Document parameters, class attributes, return types, and variables inline, with Annotated." +files = [ + {file = "annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320"}, + {file = "annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4"}, +] + [[package]] name = "annotated-types" version = "0.7.0" @@ -32,6 +42,19 @@ files = [ {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, ] +[[package]] +name = "anysqlite" +version = "0.0.5" +requires_python = ">=3.8" +summary = "" +dependencies = [ + "anyio>3.4.0", +] +files = [ + {file = "anysqlite-0.0.5-py3-none-any.whl", hash = "sha256:cb345dc4f76f6b37f768d7a0b3e9cf5c700dfcb7a6356af8ab46a11f666edbe7"}, + {file = "anysqlite-0.0.5.tar.gz", hash = "sha256:9dfcf87baf6b93426ad1d9118088c41dbf24ef01b445eea4a5d486bac2755cce"}, +] + [[package]] name = "blinker" version = "1.9.0" @@ -74,41 +97,42 @@ files = [ [[package]] name = "click" -version = "8.1.8" -summary = "" +version = "8.3.1" +requires_python = ">=3.10" +summary = "Composable command line interface toolkit" dependencies = [ - "colorama; sys_platform == \"win32\"", + "colorama; platform_system == \"Windows\"", ] files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, + {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"}, + {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"}, ] [[package]] name = "cmake" -version = "4.2.0" +version = "4.2.1" requires_python = ">=3.8" summary = "CMake is an open-source, cross-platform family of tools designed to build, test and package software" files = [ - {file = "cmake-4.2.0-py3-none-macosx_10_10_universal2.whl", hash = "sha256:28595ec42fb6f81128b7a9bdbdfcb7b785ad197dbfb1b785cec5727a97a521f4"}, - {file = "cmake-4.2.0-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1a914c39a9349246b66089e6d741f1a3009c32fcd3a5110f9ddfc49adb4952c2"}, - {file = "cmake-4.2.0-py3-none-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0940b5b00d2b65efbd409bfe83c4144a1a4f9bac5845c2c2f52b5cb71d5ca87f"}, - {file = "cmake-4.2.0-py3-none-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a94596c64c3a302ad27fd2aa23dd19829b3a64e9493adf87758b0c7ceee6e544"}, - {file = "cmake-4.2.0-py3-none-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:1b537c69c4e91a29e82e2651e54f92b9794f4f7e9bb5385951065272cd11abe0"}, - {file = "cmake-4.2.0-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5461ceca47ad352bdb3db2fdd5affdbc5707aaee415c5ff12773b8cc0d5f5949"}, - {file = "cmake-4.2.0-py3-none-manylinux_2_31_armv7l.whl", hash = "sha256:c4ea343eba9896b8ae94ffc7141902c2a40ce5ade5be1ebe5d2dc14109a4d9b4"}, - {file = "cmake-4.2.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9f34c9018425767e4ff42b66442a57dea34809341208c5de5432ec2a87bdce59"}, - {file = "cmake-4.2.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:de8784c73dc24c34f6e9cadafc4848db5ff124aaf372e58b6550ed50726a81f9"}, - {file = "cmake-4.2.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:3b71cc13ba664b19eddbdf68ab30f12c27af93f987ee5ef66ce585d0b4ef5614"}, - {file = "cmake-4.2.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3dd6dcb08b5562e22f6b433d45bd07e3ef2e156284ddeefcb9da4ec68b9ba6bb"}, - {file = "cmake-4.2.0-py3-none-musllinux_1_2_ppc64le.whl", hash = "sha256:1971a8ef69a31e814cb72c48f39bcbe6b45fff4afced4a3970c85dda7f4a755c"}, - {file = "cmake-4.2.0-py3-none-musllinux_1_2_riscv64.whl", hash = "sha256:ce565817a47798d75d6b17b21b2389826dee069e2a9eeb07beefc6f055e79191"}, - {file = "cmake-4.2.0-py3-none-musllinux_1_2_s390x.whl", hash = "sha256:c43baab5a829b92660d4eaf2896063da49d500a066a5088139d87793cb75b2e0"}, - {file = "cmake-4.2.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:bf11883a4cb3529f69746857df9733cae6175f07361f8016f8f050a3177e7767"}, - {file = "cmake-4.2.0-py3-none-win32.whl", hash = "sha256:a052030a9722c55d50025fac1f74b499aa2ce0cb137733aa1c6fb49689f560cb"}, - {file = "cmake-4.2.0-py3-none-win_amd64.whl", hash = "sha256:fb33a0c0486c3f4923a133dbeef4d009b798f1d4e6768381670736665a7f8c0a"}, - {file = "cmake-4.2.0-py3-none-win_arm64.whl", hash = "sha256:5c0dbe7a37991720d89c84825a4818f19debc8b10d5e4636b56c8fc08bec7a00"}, - {file = "cmake-4.2.0.tar.gz", hash = "sha256:7744c20e4a23e68dea276d819767d2bdbb45442cc342560b03ff693b755cd181"}, + {file = "cmake-4.2.1-py3-none-macosx_10_10_universal2.whl", hash = "sha256:ec44fa08b6ca25a63f7356a442469840841145d7b7b6f4d65318b6bd59a0f7f6"}, + {file = "cmake-4.2.1-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8bdf88f8d50b64c88ffc75fb671f3ab017d803f36589f21c3f1e9f3a1b236a7"}, + {file = "cmake-4.2.1-py3-none-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:6ca394cdea61534f12e30f0188b19ace8ba844088105b77b9fd70e6df18ef241"}, + {file = "cmake-4.2.1-py3-none-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c5742041f8e641d977928207e2697e9cc3242d0d01f7cb8671f63ad45dcc447b"}, + {file = "cmake-4.2.1-py3-none-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ae0f51d2b8dd00a7ac1578c19364140358596e449d2ac1b978af3f0b35737d01"}, + {file = "cmake-4.2.1-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6333a2b16e1d55373419b9c1572a155b315bfb9d834fbdbba0f7d3428437c785"}, + {file = "cmake-4.2.1-py3-none-manylinux_2_31_armv7l.whl", hash = "sha256:4d7a62c462cc81a6f7a5e4db7b298b4e66d851010418c8cdc5a9de0a8701f60f"}, + {file = "cmake-4.2.1-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:3455391ffce8a860bbbd22b83c2188f13806100a21f28b8ab2c6a785def25616"}, + {file = "cmake-4.2.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4d0dfe33c993e3d58cfebe2ab1205668411aae1e6cb78430f3b9d070a97e1274"}, + {file = "cmake-4.2.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:52db8740e81d10c8d103899c87e0100e6aab969295ab99ce51eb11de4c36c9ce"}, + {file = "cmake-4.2.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:493abf42c003034c2bb1ad58a221542174a5c0fd2a76e9fdd91709ae6e53263c"}, + {file = "cmake-4.2.1-py3-none-musllinux_1_2_ppc64le.whl", hash = "sha256:3d8d7632bb27cf1d0ac78098f2f7dfb7019927f35fb5a8c1508b17524af70000"}, + {file = "cmake-4.2.1-py3-none-musllinux_1_2_riscv64.whl", hash = "sha256:3e89d391096fdbdaab82e28b7e1fa964a873c0ba8d77c3542260c7d115aaac1f"}, + {file = "cmake-4.2.1-py3-none-musllinux_1_2_s390x.whl", hash = "sha256:e758ae635c75aaf0258e2c46fe95a3821f01011d5dbe29b7f045976b88ce3ca8"}, + {file = "cmake-4.2.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:fecc03edef6257b2bc8784f7880e84fe8a0b0fb54c952528c61ce14a4d693e16"}, + {file = "cmake-4.2.1-py3-none-win32.whl", hash = "sha256:72c860dae7c0315b05f59fd8e19253861c6e42f8d391a26aa6e2b4c9bd6014b8"}, + {file = "cmake-4.2.1-py3-none-win_amd64.whl", hash = "sha256:c186e7b826978f86bcbada91845e949e1f5ce5c670d6db49f7ecf5bac1b334e3"}, + {file = "cmake-4.2.1-py3-none-win_arm64.whl", hash = "sha256:82224245741cf389d7c9072002ae2a81b63accb42732803db9b449c9423d546d"}, + {file = "cmake-4.2.1.tar.gz", hash = "sha256:a07a790ca65946667c0fb286549e8e0b5a850e2f8170ae60d3418573011ca218"}, ] [[package]] @@ -122,7 +146,7 @@ files = [ [[package]] name = "conan" -version = "2.23.0" +version = "2.25.2" requires_python = ">=3.7" summary = "Conan C/C++ package manager" dependencies = [ @@ -137,7 +161,7 @@ dependencies = [ "urllib3<3.0,>=1.26.6", ] files = [ - {file = "conan-2.23.0.tar.gz", hash = "sha256:1a7b34be6fcaa4a4c0342c54187fb87aa4b487a9b4cf2136f2207f8b3a127868"}, + {file = "conan-2.25.2.tar.gz", hash = "sha256:3a5214a095cee5c3d21ed45ea31139705703e49fa9c4bb45c4c73f5ee17a1031"}, ] [[package]] @@ -314,18 +338,28 @@ files = [ [[package]] name = "dulwich" -version = "0.24.10" -requires_python = ">=3.9" +version = "1.1.0" +requires_python = ">=3.10" summary = "Python Git Library" dependencies = [ "typing-extensions>=4.6.0; python_version < \"3.12\"", "urllib3>=2.2.2", ] files = [ - {file = "dulwich-0.24.10-cp314-cp314-android_24_arm64_v8a.whl", hash = "sha256:44f62e0244531a8c43ca7771e201ec9e7f6a2fb27f8c3c623939bc03c1f50423"}, - {file = "dulwich-0.24.10-cp314-cp314-android_24_x86_64.whl", hash = "sha256:e2eda4a634d6f1ac4c0d4786f8772495c8840dfc2b3e595507376bf5e5b0f9c5"}, - {file = "dulwich-0.24.10-py3-none-any.whl", hash = "sha256:15b32f8c3116a1c0a042dde8da96f65a607e263e860ee42b3d4a98ce2c2f4a06"}, - {file = "dulwich-0.24.10.tar.gz", hash = "sha256:30e028979b6fa7220c913da9c786026611c10746c06496149742602b36a11f6b"}, + {file = "dulwich-1.1.0-cp314-cp314-android_24_arm64_v8a.whl", hash = "sha256:fc38cc6f60c5e475fa61dcd2b743113f35377602c1ba1c82264898d97a7d3c48"}, + {file = "dulwich-1.1.0-cp314-cp314-android_24_x86_64.whl", hash = "sha256:c9752d25f01e92587f8db52e50daf3e970deb49555340653ea44ba5e60f0f416"}, + {file = "dulwich-1.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:693c450a5d327a6a5276f5292d3dd0bc473066d2fd2a2d69a990d7738535deb6"}, + {file = "dulwich-1.1.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:dff1b67e0f76fcaae8f7345c05b1c4f00c11a6c42ace20864e80e7964af31827"}, + {file = "dulwich-1.1.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:1b1b9adaf82301fd7b360a5fa521cec1623cb9d77a0c5a09d04396637b39eb48"}, + {file = "dulwich-1.1.0-cp314-cp314-win32.whl", hash = "sha256:eb5440145bb2bbab71cdfa149fd297a8b7d4db889ab90c58d7a07009a73c1d28"}, + {file = "dulwich-1.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:333b0f93b289b14f98870317fb0583fdf73d5341f21fd09c694aa88bb06ad911"}, + {file = "dulwich-1.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a0f3421802225caedd11e95ce40f6a8d3c7a5df906489b6a5f49a20f88f62928"}, + {file = "dulwich-1.1.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:518307ab080746ee9c32fc13e76ad4f7df8f7665bb85922e974037dd9415541a"}, + {file = "dulwich-1.1.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:0890fff677c617efbac0cd4584bec9753388e6cd6336e7131338ea034b47e899"}, + {file = "dulwich-1.1.0-cp314-cp314t-win32.whl", hash = "sha256:a05a1049b3928205672913f4c490cf7b08afaa3e7ee7e55e15476e696412672f"}, + {file = "dulwich-1.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:ba6f3f0807868f788b7f1d53b9ac0be3e425136b16563994f5ef6ecf5b7c7863"}, + {file = "dulwich-1.1.0-py3-none-any.whl", hash = "sha256:bcd67e7f9bdffb4b660330c4597d251cd33e74f5df6898a2c1e6a1730a62af06"}, + {file = "dulwich-1.1.0.tar.gz", hash = "sha256:9aa855db9fee0a7065ae9ffb38e14e353876d82f17e33e1a1fb3830eb8d0cf43"}, ] [[package]] @@ -370,14 +404,33 @@ files = [ [[package]] name = "hishel" -version = "0.1.3" -summary = "" +version = "1.1.9" +requires_python = ">=3.10" +summary = "Elegant HTTP Caching for Python" dependencies = [ - "httpx", + "msgpack>=1.1.2", + "typing-extensions>=4.14.1", +] +files = [ + {file = "hishel-1.1.9-py3-none-any.whl", hash = "sha256:6b6f294cb7593f170a9bf874849cc85330ff81f5e35d2ca189548498fed10806"}, + {file = "hishel-1.1.9.tar.gz", hash = "sha256:47248a50e4cff4fbaa141832782d8c07b2169914916f4bd792f37449176dfa23"}, +] + +[[package]] +name = "hishel" +version = "1.1.9" +extras = ["httpx"] +requires_python = ">=3.10" +summary = "Elegant HTTP Caching for Python" +dependencies = [ + "anyio>=4.9.0", + "anysqlite>=0.0.5", + "hishel==1.1.9", + "httpx>=0.28.1", ] files = [ - {file = "hishel-0.1.3-py3-none-any.whl", hash = "sha256:bae3ba9970ffc56f90014aea2b3019158fb0a5b0b635a56f414ba6b96651966e"}, - {file = "hishel-0.1.3.tar.gz", hash = "sha256:db3e07429cb739dcda851ff9b35b0f3e7589e21b90ee167df54336ac608b6ec3"}, + {file = "hishel-1.1.9-py3-none-any.whl", hash = "sha256:6b6f294cb7593f170a9bf874849cc85330ff81f5e35d2ca189548498fed10806"}, + {file = "hishel-1.1.9.tar.gz", hash = "sha256:47248a50e4cff4fbaa141832782d8c07b2169914916f4bd792f37449176dfa23"}, ] [[package]] @@ -532,23 +585,78 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "meson" +version = "1.10.1" +requires_python = ">=3.7" +summary = "A high performance build system" +files = [ + {file = "meson-1.10.1-py3-none-any.whl", hash = "sha256:fe43d1cc2e6de146fbea78f3a062194bcc0e779efc8a0f0d7c35544dfb86731f"}, + {file = "meson-1.10.1.tar.gz", hash = "sha256:c42296f12db316a4515b9375a5df330f2e751ccdd4f608430d41d7d6210e4317"}, +] + +[[package]] +name = "meson-python" +version = "0.19.0" +requires_python = ">=3.9" +summary = "Meson Python build backend (PEP 517)" +dependencies = [ + "meson>=0.64.0; python_version < \"3.12\"", + "meson>=1.2.3; python_version >= \"3.12\"", + "packaging>=23.2; sys_platform != \"ios\"", + "packaging>=24.2; sys_platform == \"ios\"", + "pyproject-metadata>=0.9.0", + "tomli>=1.0.0; python_version < \"3.11\"", +] +files = [ + {file = "meson_python-0.19.0-py3-none-any.whl", hash = "sha256:67b5906c37404396d23c195e12c8825506074460d4a2e7083266b845d14f0298"}, + {file = "meson_python-0.19.0.tar.gz", hash = "sha256:9959d198aa69b57fcfd354a34518c6f795b781a73ed0656f4d01660160cc2553"}, +] + +[[package]] +name = "msgpack" +version = "1.1.2" +requires_python = ">=3.9" +summary = "MessagePack serializer" +files = [ + {file = "msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00"}, + {file = "msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939"}, + {file = "msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e"}, + {file = "msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931"}, + {file = "msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014"}, + {file = "msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2"}, + {file = "msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717"}, + {file = "msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b"}, + {file = "msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af"}, + {file = "msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a"}, + {file = "msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b"}, + {file = "msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245"}, + {file = "msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90"}, + {file = "msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20"}, + {file = "msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27"}, + {file = "msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b"}, + {file = "msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff"}, + {file = "msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46"}, + {file = "msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e"}, +] + [[package]] name = "nanobind" -version = "2.9.2" +version = "2.11.0" summary = "nanobind: tiny and efficient C++/Python bindings" files = [ - {file = "nanobind-2.9.2-py3-none-any.whl", hash = "sha256:c37957ffd5eac7eda349cff3622ecd32e5ee1244ecc912c99b5bc8188bafd16e"}, - {file = "nanobind-2.9.2.tar.gz", hash = "sha256:e7608472de99d375759814cab3e2c94aba3f9ec80e62cfef8ced495ca5c27d6e"}, + {file = "nanobind-2.11.0-py3-none-any.whl", hash = "sha256:8097442c3e55d011a67f016ce1d9567ed9e3cdb3ad6749f13a76dbbc2721f0ee"}, + {file = "nanobind-2.11.0.tar.gz", hash = "sha256:6d98d063c61dbbd05a2d903e59be398bfcff9d59c54fbbc9d4488960485d40d0"}, ] [[package]] name = "packaging" -version = "25.0" +version = "26.0" requires_python = ">=3.8" -summary = "" +summary = "Core utilities for Python packages" files = [ - {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, - {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, + {file = "packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529"}, + {file = "packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4"}, ] [[package]] @@ -581,7 +689,7 @@ files = [ [[package]] name = "pdm" -version = "2.26.2" +version = "2.26.6" requires_python = ">=3.9" summary = "A modern Python package and dependency manager supporting the latest PEP standards" dependencies = [ @@ -590,7 +698,7 @@ dependencies = [ "dep-logic>=0.5", "filelock>=3.13", "findpython<1.0.0a0,>=0.7.0", - "hishel<1.0.0,>=0.0.32", + "hishel[httpx]>=1.0.0", "httpcore>=1.0.6", "httpx[socks]<1,>0.20", "id>=1.5.0", @@ -611,8 +719,8 @@ dependencies = [ "virtualenv>=20", ] files = [ - {file = "pdm-2.26.2-py3-none-any.whl", hash = "sha256:b3b0199f6eec37284192a6feb26bef21f911d45d5aa4b02323ff211752abf04b"}, - {file = "pdm-2.26.2.tar.gz", hash = "sha256:98207f8aabd6913a25ee0b4985e79e1652e2db274915f3ccf9408e33191ede4e"}, + {file = "pdm-2.26.6-py3-none-any.whl", hash = "sha256:39583edee738cc62b9ec2d5b4e434f44e501b55f609401a89732b5e8a451944f"}, + {file = "pdm-2.26.6.tar.gz", hash = "sha256:771f95b9a484f9eb34dcf8d851be6ff95333e4f3c46189f9004cfd5cc2e925f9"}, ] [[package]] @@ -721,26 +829,39 @@ files = [ {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"}, ] +[[package]] +name = "pyproject-metadata" +version = "0.11.0" +requires_python = ">=3.8" +summary = "PEP 621 metadata parsing" +dependencies = [ + "packaging>=23.2", +] +files = [ + {file = "pyproject_metadata-0.11.0-py3-none-any.whl", hash = "sha256:85bbecca8694e2c00f63b492c96921d6c228454057c88e7c352b2077fcaa4096"}, + {file = "pyproject_metadata-0.11.0.tar.gz", hash = "sha256:c72fa49418bb7c5a10f25e050c418009898d1c051721d19f98a6fb6da59a66cf"}, +] + [[package]] name = "pyrefly" -version = "0.43.1" +version = "0.53.0" requires_python = ">=3.8" summary = "A fast type checker and language server for Python with powerful IDE features" files = [ - {file = "pyrefly-0.43.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e60ada6bd3eb203e72457f3613d35e9997db1b378298c851301cfe84b58c3be9"}, - {file = "pyrefly-0.43.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:488b5b77ab8c0998fe9fa48a0ac8c443b13a57a40ba2d998bf29296b5c674cab"}, - {file = "pyrefly-0.43.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e2af14499acce4382aec72d36fcf2807bf667f839bc528bacac83b75106643f"}, - {file = "pyrefly-0.43.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5cad0ffbed68e99cd072a8bbe1faa30a4a26efa485775e2a9d0e5426a84ef19f"}, - {file = "pyrefly-0.43.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc79bdb599b377178457950c5cc4a61dfb82d8ab388758d1958accddfcef7f6b"}, - {file = "pyrefly-0.43.1-py3-none-win32.whl", hash = "sha256:e7253b185bb5e5149fb0698f909ccfe7f95d128fbc52fadada0ed539991bcc60"}, - {file = "pyrefly-0.43.1-py3-none-win_amd64.whl", hash = "sha256:8befa8a7d529db11ed075e1cfec3e30607dffea4a6e4c7622cce50fcec2467cf"}, - {file = "pyrefly-0.43.1-py3-none-win_arm64.whl", hash = "sha256:8359bb854f5a238c364346836291947ef084516329a50bb400fddf0dd8a9b461"}, - {file = "pyrefly-0.43.1.tar.gz", hash = "sha256:f97b09a45cbff445025f2581bd7bc20ff904baef19b97d134262e11f88fb154e"}, + {file = "pyrefly-0.53.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:79d7fb35dff0988b3943c26f74cc752fad54357a0bc33f7db665f02d1c9a5bcc"}, + {file = "pyrefly-0.53.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e1d98b1e86f3c38db44860695b7986e731238e1b19c3cad7a3050476a8f6f84d"}, + {file = "pyrefly-0.53.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb9f2440f7e0c70aa18400f44aed994c326a1ab00f2b01cf7253a63fc62d7c6b"}, + {file = "pyrefly-0.53.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4e826a5ff2aba2c41e02e6094580751c512db7916e60728cd8612dbcf178d7b"}, + {file = "pyrefly-0.53.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4c69410c7a96b417a390a0e3d340f4370fdab02f9d3eaa222c4bd42e3ce24a"}, + {file = "pyrefly-0.53.0-py3-none-win32.whl", hash = "sha256:00687bb6be6e366b8c0137a89625da40ced3b9212a65e561857ff888fe88e6e8"}, + {file = "pyrefly-0.53.0-py3-none-win_amd64.whl", hash = "sha256:e0512e6f7af44ae01cfddba096ff7740e15cbd1d0497a3d34a7afcb504e2b300"}, + {file = "pyrefly-0.53.0-py3-none-win_arm64.whl", hash = "sha256:5066e2102769683749102421b8b8667cae26abe1827617f04e8df4317e0a94af"}, + {file = "pyrefly-0.53.0.tar.gz", hash = "sha256:aef117e8abb9aa4cf17fc64fbf450d825d3c65fc9de3c02ed20129ebdd57aa74"}, ] [[package]] name = "pytest" -version = "9.0.1" +version = "9.0.2" requires_python = ">=3.10" summary = "pytest: simple powerful testing with Python" dependencies = [ @@ -753,8 +874,8 @@ dependencies = [ "tomli>=1; python_version < \"3.11\"", ] files = [ - {file = "pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad"}, - {file = "pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8"}, + {file = "pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b"}, + {file = "pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11"}, ] [[package]] @@ -873,29 +994,28 @@ files = [ [[package]] name = "ruff" -version = "0.14.7" +version = "0.15.1" requires_python = ">=3.7" summary = "An extremely fast Python linter and code formatter, written in Rust." files = [ - {file = "ruff-0.14.7-py3-none-linux_armv6l.whl", hash = "sha256:b9d5cb5a176c7236892ad7224bc1e63902e4842c460a0b5210701b13e3de4fca"}, - {file = "ruff-0.14.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3f64fe375aefaf36ca7d7250292141e39b4cea8250427482ae779a2aa5d90015"}, - {file = "ruff-0.14.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:93e83bd3a9e1a3bda64cb771c0d47cda0e0d148165013ae2d3554d718632d554"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3838948e3facc59a6070795de2ae16e5786861850f78d5914a03f12659e88f94"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24c8487194d38b6d71cd0fd17a5b6715cda29f59baca1defe1e3a03240f851d1"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79c73db6833f058a4be8ffe4a0913b6d4ad41f6324745179bd2aa09275b01d0b"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:12eb7014fccff10fc62d15c79d8a6be4d0c2d60fe3f8e4d169a0d2def75f5dad"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c623bbdc902de7ff715a93fa3bb377a4e42dd696937bf95669118773dbf0c50"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f53accc02ed2d200fa621593cdb3c1ae06aa9b2c3cae70bc96f72f0000ae97a9"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:281f0e61a23fcdcffca210591f0f53aafaa15f9025b5b3f9706879aaa8683bc4"}, - {file = "ruff-0.14.7-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:dbbaa5e14148965b91cb090236931182ee522a5fac9bc5575bafc5c07b9f9682"}, - {file = "ruff-0.14.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1464b6e54880c0fe2f2d6eaefb6db15373331414eddf89d6b903767ae2458143"}, - {file = "ruff-0.14.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f217ed871e4621ea6128460df57b19ce0580606c23aeab50f5de425d05226784"}, - {file = "ruff-0.14.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6be02e849440ed3602d2eb478ff7ff07d53e3758f7948a2a598829660988619e"}, - {file = "ruff-0.14.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19a0f116ee5e2b468dfe80c41c84e2bbd6b74f7b719bee86c2ecde0a34563bcc"}, - {file = "ruff-0.14.7-py3-none-win32.whl", hash = "sha256:e33052c9199b347c8937937163b9b149ef6ab2e4bb37b042e593da2e6f6cccfa"}, - {file = "ruff-0.14.7-py3-none-win_amd64.whl", hash = "sha256:e17a20ad0d3fad47a326d773a042b924d3ac31c6ca6deb6c72e9e6b5f661a7c6"}, - {file = "ruff-0.14.7-py3-none-win_arm64.whl", hash = "sha256:be4d653d3bea1b19742fcc6502354e32f65cd61ff2fbdb365803ef2c2aec6228"}, - {file = "ruff-0.14.7.tar.gz", hash = "sha256:3417deb75d23bd14a722b57b0a1435561db65f0ad97435b4cf9f85ffcef34ae5"}, + {file = "ruff-0.15.1-py3-none-linux_armv6l.whl", hash = "sha256:b101ed7cf4615bda6ffe65bdb59f964e9f4a0d3f85cbf0e54f0ab76d7b90228a"}, + {file = "ruff-0.15.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:939c995e9277e63ea632cc8d3fae17aa758526f49a9a850d2e7e758bfef46602"}, + {file = "ruff-0.15.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1d83466455fdefe60b8d9c8df81d3c1bbb2115cede53549d3b522ce2bc703899"}, + {file = "ruff-0.15.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9457e3c3291024866222b96108ab2d8265b477e5b1534c7ddb1810904858d16"}, + {file = "ruff-0.15.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:92c92b003e9d4f7fbd33b1867bb15a1b785b1735069108dfc23821ba045b29bc"}, + {file = "ruff-0.15.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fe5c41ab43e3a06778844c586251eb5a510f67125427625f9eb2b9526535779"}, + {file = "ruff-0.15.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66a6dd6df4d80dc382c6484f8ce1bcceb55c32e9f27a8b94c32f6c7331bf14fb"}, + {file = "ruff-0.15.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a4a42cbb8af0bda9bcd7606b064d7c0bc311a88d141d02f78920be6acb5aa83"}, + {file = "ruff-0.15.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ab064052c31dddada35079901592dfba2e05f5b1e43af3954aafcbc1096a5b2"}, + {file = "ruff-0.15.1-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5631c940fe9fe91f817a4c2ea4e81f47bee3ca4aa646134a24374f3c19ad9454"}, + {file = "ruff-0.15.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:68138a4ba184b4691ccdc39f7795c66b3c68160c586519e7e8444cf5a53e1b4c"}, + {file = "ruff-0.15.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:518f9af03bfc33c03bdb4cb63fabc935341bb7f54af500f92ac309ecfbba6330"}, + {file = "ruff-0.15.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:da79f4d6a826caaea95de0237a67e33b81e6ec2e25fc7e1993a4015dffca7c61"}, + {file = "ruff-0.15.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3dd86dccb83cd7d4dcfac303ffc277e6048600dfc22e38158afa208e8bf94a1f"}, + {file = "ruff-0.15.1-py3-none-win32.whl", hash = "sha256:660975d9cb49b5d5278b12b03bb9951d554543a90b74ed5d366b20e2c57c2098"}, + {file = "ruff-0.15.1-py3-none-win_amd64.whl", hash = "sha256:c820fef9dd5d4172a6570e5721704a96c6679b80cf7be41659ed439653f62336"}, + {file = "ruff-0.15.1-py3-none-win_arm64.whl", hash = "sha256:5ff7d5f0f88567850f45081fac8f4ec212be8d0b963e385c3f7d0d2eb4899416"}, + {file = "ruff-0.15.1.tar.gz", hash = "sha256:c590fe13fb57c97141ae975c03a1aedb3d3156030cabd740d6ff0b0d601e203f"}, ] [[package]] @@ -973,31 +1093,31 @@ files = [ [[package]] name = "typer" -version = "0.20.0" -requires_python = ">=3.8" +version = "0.24.0" +requires_python = ">=3.10" summary = "Typer, build great CLIs. Easy to code. Based on Python type hints." dependencies = [ - "click>=8.0.0", - "rich>=10.11.0", + "annotated-doc>=0.0.2", + "click>=8.2.1", + "rich>=12.3.0", "shellingham>=1.3.0", - "typing-extensions>=3.7.4.3", ] files = [ - {file = "typer-0.20.0-py3-none-any.whl", hash = "sha256:5b463df6793ec1dca6213a3cf4c0f03bc6e322ac5e16e13ddd622a889489784a"}, - {file = "typer-0.20.0.tar.gz", hash = "sha256:1aaf6494031793e4876fb0bacfa6a912b551cf43c1e63c800df8b1a866720c37"}, + {file = "typer-0.24.0-py3-none-any.whl", hash = "sha256:5fc435a9c8356f6160ed6e85a6301fdd6e3d8b2851da502050d1f92c5e9eddc8"}, + {file = "typer-0.24.0.tar.gz", hash = "sha256:f9373dc4eff901350694f519f783c29b6d7a110fc0dcc11b1d7e353b85ca6504"}, ] [[package]] name = "types-requests" -version = "2.32.4.20250913" +version = "2.32.4.20260107" requires_python = ">=3.9" summary = "Typing stubs for requests" dependencies = [ "urllib3>=2", ] files = [ - {file = "types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1"}, - {file = "types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d"}, + {file = "types_requests-2.32.4.20260107-py3-none-any.whl", hash = "sha256:b703fe72f8ce5b31ef031264fe9395cac8f46a04661a79f7ed31a80fb308730d"}, + {file = "types_requests-2.32.4.20260107.tar.gz", hash = "sha256:018a11ac158f801bfa84857ddec1650750e393df8a004a8a9ae2a9bec6fcb24f"}, ] [[package]] @@ -1061,7 +1181,7 @@ files = [ [[package]] name = "zensical" -version = "0.0.10" +version = "0.0.23" requires_python = ">=3.10" summary = "A modern static site generator built by the creators of Material for MkDocs" dependencies = [ @@ -1074,17 +1194,17 @@ dependencies = [ "tomli>=2.0; python_full_version < \"3.11\"", ] files = [ - {file = "zensical-0.0.10-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d371c9ff57cb2dc8d3b7ec630d4cf0ffa4cba3b414619c4975eb801a032105c0"}, - {file = "zensical-0.0.10-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:5bd06c9a1fb536e23cf6f5acb83af492a0aaf457708a81589731a33ab1695a1e"}, - {file = "zensical-0.0.10-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd6c4447aaa58e2545c3b7a5d39fa1f5a0e9062f1a2d380c3c21aac0a8979b5a"}, - {file = "zensical-0.0.10-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95a91f13920975fc9744f052c028800e0b0553f52c9417bef8a6e16864459bd8"}, - {file = "zensical-0.0.10-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a22908517458428bd91adfbcf4813c7b63011cd1b6761956f81255993d903ae7"}, - {file = "zensical-0.0.10-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a482443e7aded812c3e72250f2ba8b4e77f09a626c1c9892766168dd2acf8dfd"}, - {file = "zensical-0.0.10-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:679d32bceac749a79ef11c08e5c0eb7c3e8e73f4c35732a2f6bcaa3ffaa61a70"}, - {file = "zensical-0.0.10-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:db42271a085bf8efe2d6dc9f97325e866fcfa2fafe7e079c5bc0f0e8407275fa"}, - {file = "zensical-0.0.10-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:11e9acdfd6622dc43862c99e4669ebe56f4e7ab2cefe73a74005549a09ac679e"}, - {file = "zensical-0.0.10-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3fc7e47efbd90f476276492049ade0587f32f95216062ed4ecc02857f2cbca39"}, - {file = "zensical-0.0.10-cp310-abi3-win32.whl", hash = "sha256:feaab818c28170a8b542f729c2941c2a766d7026d24bcf13b403f843250fa15b"}, - {file = "zensical-0.0.10-cp310-abi3-win_amd64.whl", hash = "sha256:64f208a3168eb616985680e84a76ca2dbf0064508c4ce8655f7fd81db10636aa"}, - {file = "zensical-0.0.10.tar.gz", hash = "sha256:9a9b60a3757fb0f4cf2ec33844bf29141f02223e99440698cfb6911bb1f49956"}, + {file = "zensical-0.0.23-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35d6d3eb803fe73a67187a1a25443408bd02a8dd50e151f4a4bafd40de3f0928"}, + {file = "zensical-0.0.23-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:5973267460a190f348f24d445ff0c01e8ed334fd075947687b305e68257f6b18"}, + {file = "zensical-0.0.23-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:953adf1f0b346a6c65fc6e05e6cc1c38a6440fec29c50c76fb29700cc1927006"}, + {file = "zensical-0.0.23-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:49c1cbd6131dafa056be828e081759184f9b8dd24b99bf38d1e77c8c31b0c720"}, + {file = "zensical-0.0.23-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5b7fe22c5d33b2b91899c5df7631ad4ce9cccfabac2560cc92ba73eafe2d297"}, + {file = "zensical-0.0.23-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a3679d6bf6374f503afb74d9f6061da5de83c25922f618042b63a30b16f0389"}, + {file = "zensical-0.0.23-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:54d981e21a19c3dcec6e7fa77c4421db47389dfdff20d29fea70df8e1be4062e"}, + {file = "zensical-0.0.23-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:afde7865cc3c79c99f6df4a911d638fb2c3b472a1b81367d47163f8e3c36f910"}, + {file = "zensical-0.0.23-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:c484674d7b0a3e6d39db83914db932249bccdef2efaf8a5669671c66c16f584d"}, + {file = "zensical-0.0.23-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:927d12fe2851f355fb3206809e04641d6651bdd2ff4afe9c205721aa3a32aa82"}, + {file = "zensical-0.0.23-cp310-abi3-win32.whl", hash = "sha256:ffb79db4244324e9cc063d16adff25a40b145153e5e76d75e0012ba3c05af25d"}, + {file = "zensical-0.0.23-cp310-abi3-win_amd64.whl", hash = "sha256:a8cfe240dca75231e8e525985366d010d09ee73aec0937930e88f7230694ce01"}, + {file = "zensical-0.0.23.tar.gz", hash = "sha256:5c4fc3aaf075df99d8cf41b9f2566e4d588180d9a89493014d3607dfe50ac4bc"}, ] diff --git a/pyproject.toml b/pyproject.toml index c9e50b92..b55e1db1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,20 +12,33 @@ dynamic = ["version"] requires-python = ">=3.14" dependencies = [ - "typer>=0.20.0", + "typer>=0.24.0", "pydantic>=2.12.5", - "packaging>=25.0", + "packaging>=26.0", "requests>=2.32.5", - "types-requests>=2.32.4.20250913", + "types-requests>=2.32.4.20260107", "scikit-build-core>=0.11.6", + "meson-python>=0.19.0", ] [project.optional-dependencies] -pytest = ["pytest>=9.0.1", "pytest-mock>=3.15.1"] -git = ["dulwich>=0.24.10"] -pdm = ["pdm>=2.26.2"] -cmake = ["cmake>=4.2.0"] -conan = ["conan>=2.23.0"] +pytest = [ + "pytest>=9.0.2", + "pytest-mock>=3.15.1", +] +git = [ + "dulwich>=1.1.0", +] +pdm = [ + "pdm>=2.26.6", +] +cmake = [ + "cmake>=4.2.1", +] +meson = ["meson>=1.10.1"] +conan = [ + "conan>=2.25.2", +] [project.urls] homepage = "https://github.com/Synodic-Software/CPPython" @@ -36,6 +49,7 @@ git = "cppython.plugins.git.plugin:GitSCM" [project.entry-points."cppython.generator"] cmake = "cppython.plugins.cmake.plugin:CMakeGenerator" +meson = "cppython.plugins.meson.plugin:MesonGenerator" [project.entry-points."cppython.provider"] conan = "cppython.plugins.conan.plugin:ConanProvider" @@ -48,9 +62,19 @@ cppython = "cppython.plugins.pdm.plugin:CPPythonPlugin" cppython = "cppython.test.pytest.fixtures" [dependency-groups] -lint = ["ruff>=0.14.7", "pyrefly>=0.43.1"] -test = ["pytest>=9.0.1", "pytest-cov>=7.0.0", "pytest-mock>=3.15.1", "nanobind>=2.4.0"] -docs = ["zensical>=0.0.10"] +lint = [ + "ruff>=0.15.1", + "pyrefly>=0.53.0", +] +test = [ + "pytest>=9.0.2", + "pytest-cov>=7.0.0", + "pytest-mock>=3.15.1", + "nanobind>=2.11.0", +] +docs = [ + "zensical>=0.0.23", +] [project.scripts] cppython = "cppython.console.entry:app" @@ -80,6 +104,9 @@ select = [ [tool.ruff.lint.pydocstyle] convention = "google" +[tool.ruff.lint.per-file-ignores] +"cppython/console/entry.py" = ["PT"] # CLI commands, not pytest tests + [tool.ruff.format] docstring-code-format = true indent-style = "space" diff --git a/tests/fixtures/meson.py b/tests/fixtures/meson.py new file mode 100644 index 00000000..388b4807 --- /dev/null +++ b/tests/fixtures/meson.py @@ -0,0 +1,40 @@ +"""Fixtures for the meson plugin""" + +from pathlib import Path +from typing import cast + +import pytest + +from cppython.plugins.meson.schema import MesonConfiguration + + +def _meson_data_list() -> list[MesonConfiguration]: + """Creates a list of mocked configuration types. + + Returns: + A list of variants to test + """ + # Default + default = MesonConfiguration() + + # Non-root build file + config = MesonConfiguration(build_file=Path('subdir/meson.build'), build_directory='custom-builddir') + + return [default, config] + + +@pytest.fixture( + name='meson_data', + scope='session', + params=_meson_data_list(), +) +def fixture_meson_data(request: pytest.FixtureRequest) -> MesonConfiguration: + """A fixture to provide a list of configuration types. + + Args: + request: Parameterization list + + Returns: + A configuration type instance + """ + return cast(MesonConfiguration, request.param) diff --git a/tests/integration/examples/test_conan_cmake.py b/tests/integration/examples/test_conan_cmake.py index 8c932c89..99ac4052 100644 --- a/tests/integration/examples/test_conan_cmake.py +++ b/tests/integration/examples/test_conan_cmake.py @@ -91,6 +91,28 @@ def _ensure_conan_config(pyproject_data: dict) -> None: if 'conan' not in pyproject_data['tool']['cppython']['providers']: pyproject_data['tool']['cppython']['providers']['conan'] = {} + @staticmethod + def _verify_conan_package_configs(package_name: str, expected_build_types: list[str]) -> None: + """Verify that specified build types exist in the Conan local cache. + + Args: + package_name: Name of the package to check (e.g., 'mathutils') + expected_build_types: List of build types that should exist (e.g., ['Release', 'Debug']) + """ + result = subprocess.run( + ['conan', 'list', f'{package_name}/*:*'], + capture_output=True, + text=True, + check=False, + ) + assert result.returncode == 0, f'Failed to list Conan packages: {result.stderr}' + + output = result.stdout + for build_type in expected_build_types: + assert f'build_type: {build_type}' in output, ( + f'{build_type} configuration not found in Conan cache for {package_name}. Output: {output}' + ) + @staticmethod def test_simple(example_runner: CliRunner) -> None: """Simple project""" @@ -153,12 +175,20 @@ def test_library(example_runner: CliRunner) -> None: publish_project = TestConanCMake._create_project(skip_upload=True) publish_project.publish() + # Verify both Debug and Release configurations were published and consumed successfully + # conan create already runs test_package for each build type, verifying consumption works + TestConanCMake._verify_conan_package_configs('mathutils', ['Release', 'Debug']) + @staticmethod def test_extension(example_runner: CliRunner) -> None: """Test Python extension module built with cppython.build backend and scikit-build-core""" # This test uses the cppython.build backend which wraps scikit-build-core # The build backend automatically runs CPPython's provider workflow + # Install C++ dependencies first (creates generators/ with conan_toolchain.cmake) + project = TestConanCMake._create_project() + project.install() + # Create dist directory for the wheel dist_path = Path('dist') dist_path.mkdir(exist_ok=True) diff --git a/tests/unit/plugins/meson/__init__.py b/tests/unit/plugins/meson/__init__.py new file mode 100644 index 00000000..928f71b3 --- /dev/null +++ b/tests/unit/plugins/meson/__init__.py @@ -0,0 +1,7 @@ +"""Unit tests for the Meson generator plugin. + +This module contains unit tests for the Meson generator plugin, ensuring that +the plugin behaves as expected under various conditions. The tests cover +different aspects of the plugin's functionality, including native/cross file +writing, data synchronization, and feature extraction. +""" diff --git a/tests/unit/plugins/meson/test_generator.py b/tests/unit/plugins/meson/test_generator.py new file mode 100644 index 00000000..cca974b7 --- /dev/null +++ b/tests/unit/plugins/meson/test_generator.py @@ -0,0 +1,40 @@ +"""Unit test the Meson generator plugin""" + +from typing import Any + +import pytest + +from cppython.plugins.meson.plugin import MesonGenerator +from cppython.plugins.meson.schema import ( + MesonConfiguration, +) +from cppython.test.pytest.contracts import GeneratorUnitTestContract + +pytest_plugins = ['tests.fixtures.meson'] + + +class TestMesonGenerator(GeneratorUnitTestContract[MesonGenerator]): + """The tests for the Meson generator""" + + @staticmethod + @pytest.fixture(name='plugin_data', scope='session') + def fixture_plugin_data(meson_data: MesonConfiguration) -> dict[str, Any]: + """A required testing hook that allows data generation. + + Args: + meson_data: The input data + + Returns: + The constructed plugin data + """ + return meson_data.model_dump() + + @staticmethod + @pytest.fixture(name='plugin_type', scope='session') + def fixture_plugin_type() -> type[MesonGenerator]: + """A required testing hook that allows type generation. + + Returns: + The type of the Generator + """ + return MesonGenerator diff --git a/tests/unit/plugins/meson/test_schema.py b/tests/unit/plugins/meson/test_schema.py new file mode 100644 index 00000000..494ade26 --- /dev/null +++ b/tests/unit/plugins/meson/test_schema.py @@ -0,0 +1,82 @@ +"""Tests for the Meson schema""" + +from pathlib import Path + +from cppython.plugins.meson.schema import MesonConfiguration, MesonData, MesonSyncData +from cppython.utility.utility import TypeName + + +class TestMesonSyncData: + """Tests for the MesonSyncData class""" + + @staticmethod + def test_default() -> None: + """Tests MesonSyncData with default values.""" + data = MesonSyncData(provider_name=TypeName('test')) + assert data.native_file is None + assert data.cross_file is None + + @staticmethod + def test_native_file() -> None: + """Tests MesonSyncData with a native file.""" + data = MesonSyncData(provider_name=TypeName('conan'), native_file=Path('/path/to/native.ini')) + assert data.native_file == Path('/path/to/native.ini') + assert data.cross_file is None + + @staticmethod + def test_cross_file() -> None: + """Tests MesonSyncData with a cross file.""" + data = MesonSyncData(provider_name=TypeName('conan'), cross_file=Path('/path/to/cross.ini')) + assert data.native_file is None + assert data.cross_file == Path('/path/to/cross.ini') + + @staticmethod + def test_both_files() -> None: + """Tests MesonSyncData with both native and cross files.""" + data = MesonSyncData( + provider_name=TypeName('conan'), + native_file=Path('/path/to/native.ini'), + cross_file=Path('/path/to/cross.ini'), + ) + assert data.native_file == Path('/path/to/native.ini') + assert data.cross_file == Path('/path/to/cross.ini') + + +class TestMesonConfiguration: + """Tests for the MesonConfiguration class""" + + @staticmethod + def test_defaults() -> None: + """Tests MesonConfiguration with default values.""" + config = MesonConfiguration() + assert config.build_file == Path('meson.build') + assert config.build_directory == 'builddir' + assert config.meson_binary is None + + @staticmethod + def test_custom_values() -> None: + """Tests MesonConfiguration with custom values.""" + config = MesonConfiguration( + build_file=Path('subdir/meson.build'), + build_directory='custom-build', + meson_binary=Path('/usr/bin/meson'), + ) + assert config.build_file == Path('subdir/meson.build') + assert config.build_directory == 'custom-build' + assert config.meson_binary == Path('/usr/bin/meson') + + +class TestMesonData: + """Tests for the MesonData class""" + + @staticmethod + def test_construction() -> None: + """Tests MesonData construction.""" + data = MesonData( + build_file=Path('/project/meson.build'), + build_directory='builddir', + meson_binary=None, + ) + assert data.build_file == Path('/project/meson.build') + assert data.build_directory == 'builddir' + assert data.meson_binary is None diff --git a/tests/unit/test_project.py b/tests/unit/test_project.py index eff1981f..7c514fdf 100644 --- a/tests/unit/test_project.py +++ b/tests/unit/test_project.py @@ -4,6 +4,7 @@ import tomllib from importlib import metadata from pathlib import Path +from typing import Any import pytest from pytest_mock import MockerFixture @@ -20,6 +21,7 @@ from cppython.test.mock.interface import MockInterface from cppython.test.mock.provider import MockProvider from cppython.test.mock.scm import MockSCM +from cppython.utility.exception import InstallationVerificationError pep621 = PEP621Configuration(name='test-project', version='0.1.0') @@ -45,6 +47,31 @@ def test_self_construction(request: pytest.FixtureRequest) -> None: # Doesn't have the cppython table assert not project.enabled + @staticmethod + def test_missing_tool_table_raw_dict(tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: + """Constructing Project with a raw dict lacking tool.cppython should produce zero log output. + + This simulates input from a host tool like PDM that passes raw pyproject data + rather than a model_dump() result. + + Args: + tmp_path: Temporary directory for dummy data + caplog: Pytest fixture for capturing logs + """ + project_configuration = ProjectConfiguration(project_root=tmp_path, version=None) + interface = MockInterface() + + # Raw dict as PDM would provide — no tool table at all + raw_data: dict[str, Any] = {'project': {'name': 'some-other-project', 'version': '1.0.0'}} + + with caplog.at_level(logging.DEBUG): + project = Project(project_configuration, interface, raw_data) + + # Absolutely no log output for projects without CPPython configuration + assert len(caplog.records) == 0 + + assert not project.enabled + @staticmethod def test_missing_tool_table(tmp_path: Path, caplog: pytest.LogCaptureFixture) -> None: """The project type should be constructable without the tool table @@ -134,3 +161,98 @@ def test_default_cppython_table(tmp_path: Path, mocker: MockerFixture, caplog: p assert len(caplog.records) == 0 assert project.enabled + + +class TestPrepareBuild: + """Tests for Project.prepare_build()""" + + @staticmethod + def _create_enabled_project(tmp_path: Path, mocker: MockerFixture) -> Project: + """Helper to create an enabled project with mock plugins. + + Args: + tmp_path: Temporary directory + mocker: Pytest mocker fixture + + Returns: + An enabled Project instance + """ + mocker.patch( + 'cppython.builder.entry_points', + return_value=[metadata.EntryPoint(name='mock', value='mock', group='mock')], + ) + mocker.patch.object(metadata.EntryPoint, 'load', side_effect=[MockGenerator, MockProvider, MockSCM]) + + file_path = tmp_path / 'pyproject.toml' + with open(file_path, 'a', encoding='utf8'): + pass + + project_configuration = ProjectConfiguration(project_root=file_path.parent, version=None) + interface = MockInterface() + + cppython_config = CPPythonLocalConfiguration() + tool_data = ToolData(cppython=cppython_config) + pyproject = PyProject(project=pep621, tool=tool_data) + + return Project(project_configuration, interface, pyproject.model_dump(by_alias=True)) + + def test_prepare_build_calls_sync_and_verify(self, tmp_path: Path, mocker: MockerFixture) -> None: + """prepare_build() should call sync and verify_installed, not install. + + Args: + tmp_path: Temporary directory + mocker: Pytest mocker fixture + """ + project = self._create_enabled_project(tmp_path, mocker) + assert project.enabled + + # Spy on the key methods + sync_spy = mocker.patch.object(project._data, 'sync') # noqa: SLF001 + verify_spy = mocker.patch.object(project._data.plugins.provider, 'verify_installed') # noqa: SLF001 + install_spy = mocker.patch.object(project._data.plugins.provider, 'install') # noqa: SLF001 + + project.prepare_build() + + sync_spy.assert_called_once() + verify_spy.assert_called_once() + install_spy.assert_not_called() + + def test_prepare_build_returns_none_when_disabled(self, tmp_path: Path) -> None: + """prepare_build() should return None for a disabled project. + + Args: + tmp_path: Temporary directory + """ + file_path = tmp_path / 'pyproject.toml' + with open(file_path, 'a', encoding='utf8'): + pass + + project_configuration = ProjectConfiguration(project_root=file_path.parent, version=None) + interface = MockInterface() + + pyproject = PyProject(project=pep621) + project = Project(project_configuration, interface, pyproject.model_dump(by_alias=True)) + + assert not project.enabled + assert project.prepare_build() is None + + def test_prepare_build_raises_on_missing_artifacts(self, tmp_path: Path, mocker: MockerFixture) -> None: + """prepare_build() should propagate InstallationVerificationError. + + Args: + tmp_path: Temporary directory + mocker: Pytest mocker fixture + """ + project = self._create_enabled_project(tmp_path, mocker) + assert project.enabled + + # Make verify_installed raise + mocker.patch.object(project._data, 'sync') # noqa: SLF001 + mocker.patch.object( + project._data.plugins.provider, # noqa: SLF001 + 'verify_installed', + side_effect=InstallationVerificationError('mock', ['test artifact']), + ) + + with pytest.raises(InstallationVerificationError, match='mock'): + project.prepare_build()