diff --git a/.github/workflows/build-and-publish.yml b/.github/workflows/build-and-publish.yml
index 906dd98..c93f6b5 100644
--- a/.github/workflows/build-and-publish.yml
+++ b/.github/workflows/build-and-publish.yml
@@ -1,7 +1,14 @@
# THIS WORKFLOW WILL BUILD WHEELS FOR ALL MAJOR PLATFORMS AND UPLOAD THEM TO PYPI
+# TO BUILD AND INSTALL LOCALLY FOR TESTING, RUN THE FOLLOWING COMMAND:
+# pip install "/path/to/PythonLibraryXulbuX" --no-deps --no-cache-dir --force-reinstall --no-build-isolation
+
+# TO CREATE A NEW RELEASE, TAG A COMMIT WITH THE FOLLOWING FORMAT:
# git tag v1.X.Y
# git push origin v1.X.Y
+# IF THE TAG v1.X.Y ALREADY EXISTS, RUN THE FOLLOWING COMMANDS FIRST:
+# git tag -d v1.X.Y
+# git push origin :refs/tags/v1.X.Y
name: Build and Publish
permissions:
@@ -29,6 +36,19 @@ jobs:
env:
CIBW_BUILD: cp310-* cp311-* cp312-* cp313-* cp314-*
CIBW_SKIP: "*-musllinux_*"
+ CIBW_BEFORE_BUILD: pip install setuptools>=80.0.0 wheel>=0.45.0 mypy>=1.19.0 mypy-extensions>=1.1.0 types-regex types-keyboard prompt_toolkit>=3.0.41
+ CIBW_BUILD_FRONTEND: "pip; args: --no-build-isolation"
+ CIBW_ENVIRONMENT: XULBUX_USE_MYPYC=1
+
+ - name: Verify wheels were built
+ run: |
+ ls -la ./wheelhouse/
+ if [ -z "$(ls -A ./wheelhouse/)" ]; then
+ echo "[ERROR] No wheels were built!"
+ exit 1
+ fi
+ echo "[SUCCESS] Built $(ls ./wheelhouse/*.whl | wc -l) wheels."
+ shell: bash
- uses: actions/upload-artifact@v6
with:
@@ -50,6 +70,7 @@ jobs:
path: dist/*.tar.gz
upload_pypi:
+ name: Upload to PyPI
needs: [build_wheels, build_sdist]
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v')
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a7d058a..1418d3e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -15,6 +15,25 @@
#
Changelog
+
+
+## 06.01.2026 `v1.9.4`
+
+* Added a new base module `base.decorators` which contains custom decorators used throughout the library.
+* Made `mypy_extensions` an optional dependency by wrapping all uses of `mypy_extensions.mypyc_attr` in a custom decorator that acts as a no-op if `mypy_extensions` is not installed.
+* The methods from the `env_path` module that modify the PATH environment variable, no longer sort all paths alphabetically, but keep the original order, to not mess with the user's intended PATH order.
+* Added a new TypeAlias `PathsList` to the `base.types` module, which matches a list of paths as strings or `pathlib.Path` objects.
+
+**BREAKING CHANGES:**
+* Renamed the module `path` to `file_sys` and its main class `Path` to `FileSys`, so you can better use it alongside the built-in `pathlib.Path` class without always needing to import one of them under an alias.
+* Renamed most `FileSys` methods to better describe their functionality:
+ - `Path.extend()` is now `FileSys.extend_path()`
+ - `Path.extend_or_make()` is now `FileSys.extend_or_make_path()`
+* Renamed the param `use_closest_match` in `FileSys.extend_path()` and `FileSys.extend_or_make_path()` to `fuzzy_match`, since that name is more commonly used for that functionality.
+* Updated all library methods that work with paths to accept `pathlib.Path` objects additionally to strings, as path inputs.
+* Also, all library methods that return paths now return `pathlib.Path` objects instead of strings.
+
+
## 01.01.2026 `v1.9.3` Big Update 🚀
diff --git a/README.md b/README.md
index 67b5500..adf497f 100644
--- a/README.md
+++ b/README.md
@@ -121,6 +121,10 @@ from xulbux.color import rgba, hsla, hexa
 |
Regex class, which includes methods to dynamically generate complex regex patterns
diff --git a/pyproject.toml b/pyproject.toml
index df5e6d3..cd43574 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,4 +1,5 @@
[build-system]
+# SAME BUILD-DEPS ALSO NEED TO BE SPECIFIED IN CIBW_BEFORE_BUILD IN .github/workflows/build-and-publish.yml
requires = [
"setuptools>=80.0.0",
"wheel>=0.45.0",
@@ -13,26 +14,24 @@ build-backend = "setuptools.build_meta"
[project]
name = "xulbux"
-version = "1.9.3"
-authors = [{ name = "XulbuX", email = "xulbux.real@gmail.com" }]
-maintainers = [{ name = "XulbuX", email = "xulbux.real@gmail.com" }]
+version = "1.9.4"
description = "A Python library to simplify common programming tasks."
readme = "README.md"
+authors = [{ name = "XulbuX", email = "xulbux.real@gmail.com" }]
+maintainers = [{ name = "XulbuX", email = "xulbux.real@gmail.com" }]
license = "MIT"
license-files = ["LICENSE"]
requires-python = ">=3.10.0"
dependencies = [
"keyboard>=0.13.5",
- "mypy-extensions>=1.1.0",
"prompt_toolkit>=3.0.41",
"regex>=2023.10.3",
]
optional-dependencies = { dev = [
- "black>=23.7.0",
"flake8-pyproject>=1.2.3",
"flake8>=6.1.0",
- "isort>=5.12.0",
"pytest>=7.4.2",
+ "toml>=0.10.2",
] }
classifiers = [
"Development Status :: 5 - Production/Stable",
@@ -118,33 +117,6 @@ keywords = [
[project.scripts]
xulbux-help = "xulbux.cli.help:show_help"
-[tool.black]
-line-length = 127
-target-version = ['py310', 'py311', 'py312', 'py313', 'py314']
-include = '\.pyi?$'
-extend-exclude = '''
-/(
- # directories
- \.eggs
- | \.git
- | \.hg
- | \.mypy_cache
- | \.tox
- | \.venv
- | build
- | dist
-)/
-'''
-
-[tool.isort]
-profile = "black"
-line_length = 127
-multi_line_output = 3
-include_trailing_comma = true
-force_grid_wrap = 0
-use_parentheses = true
-ensure_newline_before_comments = true
-
[tool.flake8]
max-complexity = 12
max-line-length = 127
@@ -169,12 +141,12 @@ testpaths = [
"tests/test_console.py",
"tests/test_data.py",
"tests/test_env_path.py",
+ "tests/test_file_sys.py",
"tests/test_file.py",
"tests/test_format_codes.py",
"tests/test_json.py",
- "tests/test_path.py",
+ "tests/test_metadata_consistency.py",
"tests/test_regex.py",
"tests/test_string.py",
"tests/test_system.py",
- "tests/test_version_consistency.py",
]
diff --git a/setup.py b/setup.py
index 34d21a1..5761a66 100644
--- a/setup.py
+++ b/setup.py
@@ -1,20 +1,32 @@
-from mypyc.build import mypycify
from setuptools import setup
+from pathlib import Path
import os
def find_python_files(directory: str) -> list[str]:
python_files: list[str] = []
- for root, _, files in os.walk(directory):
- for file in files:
- if file.endswith(".py"):
- python_files.append(os.path.join(root, file))
+ for file in Path(directory).rglob("*.py"):
+ python_files.append(str(file))
return python_files
-source_files = find_python_files("src/xulbux")
+# OPTIONALLY USE MYPYC COMPILATION
+ext_modules = []
+if os.environ.get("XULBUX_USE_MYPYC", "1") == "1":
+ try:
+ from mypyc.build import mypycify
+ print("\nCompiling with mypyc...\n")
+ source_files = find_python_files("src/xulbux")
+ ext_modules = mypycify(source_files)
+
+ except (ImportError, Exception) as e:
+ fmt_error = "\n ".join(str(e).splitlines())
+ print(
+ f"\n[WARNING] mypyc compilation disabled (not available or failed):\n {fmt_error}\n"
+ "\nInstalling as pure Python package...\n"
+ )
setup(
name="xulbux",
- ext_modules=mypycify(source_files),
+ ext_modules=ext_modules,
)
diff --git a/src/xulbux/__init__.py b/src/xulbux/__init__.py
index e40a2f3..2fd5608 100644
--- a/src/xulbux/__init__.py
+++ b/src/xulbux/__init__.py
@@ -1,11 +1,21 @@
-__version__ = "1.9.3"
+__package_name__ = "xulbux"
+__version__ = "1.9.4"
+__description__ = "A Python library to simplify common programming tasks."
+__status__ = "Production/Stable"
+
+__url__ = "https://github.com/XulbuX/PythonLibraryXulbuX"
__author__ = "XulbuX"
__email__ = "xulbux.real@gmail.com"
__license__ = "MIT"
__copyright__ = "Copyright (c) 2024 XulbuX"
-__url__ = "https://github.com/XulbuX/PythonLibraryXulbuX"
-__description__ = "A Python library to simplify common programming tasks."
+
+__requires_python__ = ">=3.10.0"
+__dependencies__ = [
+ "keyboard>=0.13.5",
+ "prompt_toolkit>=3.0.41",
+ "regex>=2023.10.3",
+]
__all__ = [
"Code",
@@ -14,9 +24,9 @@
"Data",
"EnvPath",
"File",
+ "FileSys",
"FormatCodes",
"Json",
- "Path",
"Regex",
"String",
"System",
@@ -28,9 +38,9 @@
from .data import Data
from .env_path import EnvPath
from .file import File
+from .file_sys import FileSys
from .format_codes import FormatCodes
from .json import Json
-from .path import Path
from .regex import Regex
from .string import String
from .system import System
diff --git a/src/xulbux/base/decorators.py b/src/xulbux/base/decorators.py
new file mode 100644
index 0000000..7d3d092
--- /dev/null
+++ b/src/xulbux/base/decorators.py
@@ -0,0 +1,28 @@
+"""
+This module contains custom decorators used throughout the library.
+"""
+
+from typing import Callable, TypeVar, Any
+
+
+T = TypeVar("T")
+
+
+def _noop_decorator(obj: T) -> T:
+ """No-op decorator that returns the object unchanged."""
+ return obj
+
+
+def mypyc_attr(**kwargs: Any) -> Callable[[T], T]:
+ """A custom decorator that wraps `mypy_extensions.mypyc_attr` when available,
+ or acts as a no-op decorator when `mypy_extensions` is not installed.\n
+ This allows the use of `mypyc` compilation hints for compiling without making
+ `mypy_extensions` a required dependency.\n
+ -----------------------------------------------------------------------------------------
+ - `**kwargs` -⠀keyword arguments to pass to `mypy_extensions.mypyc_attr` if available"""
+ try:
+ from mypy_extensions import mypyc_attr as _mypyc_attr
+ return _mypyc_attr(**kwargs)
+ except ImportError:
+ # IF 'mypy_extensions' IS NOT INSTALLED, JUST RETURN A NO-OP DECORATOR
+ return _noop_decorator
diff --git a/src/xulbux/base/exceptions.py b/src/xulbux/base/exceptions.py
index c403253..352c929 100644
--- a/src/xulbux/base/exceptions.py
+++ b/src/xulbux/base/exceptions.py
@@ -2,7 +2,7 @@
This module contains all custom exception classes used throughout the library.
"""
-from mypy_extensions import mypyc_attr
+from .decorators import mypyc_attr
#
################################################## FILE ##################################################
diff --git a/src/xulbux/base/types.py b/src/xulbux/base/types.py
index 9ad98e1..e61a84f 100644
--- a/src/xulbux/base/types.py
+++ b/src/xulbux/base/types.py
@@ -3,6 +3,7 @@
"""
from typing import TYPE_CHECKING, Annotated, TypeAlias, TypedDict, Optional, Protocol, Union, Any
+from pathlib import Path
# PREVENT CIRCULAR IMPORTS
if TYPE_CHECKING:
@@ -26,6 +27,9 @@
#
################################################## TypeAlias ##################################################
+PathsList: TypeAlias = Union[list[Path], list[str], list[Path | str]]
+"""Union of all supported list types for a list of paths."""
+
DataStructure: TypeAlias = Union[list, tuple, set, frozenset, dict]
"""Union of supported data structures used in the `data` module."""
DataStructureTypes = (list, tuple, set, frozenset, dict)
diff --git a/src/xulbux/code.py b/src/xulbux/code.py
index afbdedf..1971176 100644
--- a/src/xulbux/code.py
+++ b/src/xulbux/code.py
@@ -104,7 +104,7 @@ def is_js(cls, code: str, funcs: set[str] = {"__", "$t", "$lang"}) -> bool:
return True
js_score = 0.0
- funcs_pattern = r"(" + "|".join(_rx.escape(f) for f in funcs) + r")" + Regex.brackets("()")
+ funcs_pattern = r"(" + "|".join(_rx.escape(func) for func in funcs) + r")" + Regex.brackets("()")
js_indicators: list[tuple[str, float]] = [
(r"\b(var|let|const)\s+[\w_$]+", 2.0), # JS VARIABLE DECLARATIONS
(r"\$[\w_$]+\s*=", 2.0), # jQuery-STYLE VARIABLES
diff --git a/src/xulbux/console.py b/src/xulbux/console.py
index b4f5d2e..b37c76f 100644
--- a/src/xulbux/console.py
+++ b/src/xulbux/console.py
@@ -4,6 +4,7 @@
"""
from .base.types import ArgConfigWithDefault, ArgResultRegular, ArgResultPositional, ProgressUpdater, AllTextChars, Rgba, Hexa
+from .base.decorators import mypyc_attr
from .base.consts import COLOR, CHARS, ANSI
from .format_codes import _PATTERNS as _FC_PATTERNS, FormatCodes
@@ -16,7 +17,6 @@
from prompt_toolkit.validation import ValidationError, Validator
from prompt_toolkit.styles import Style
from prompt_toolkit.keys import Keys
-from mypy_extensions import mypyc_attr
from contextlib import contextmanager
from io import StringIO
import prompt_toolkit as _pt
diff --git a/src/xulbux/data.py b/src/xulbux/data.py
index 801c3ff..06d1c81 100644
--- a/src/xulbux/data.py
+++ b/src/xulbux/data.py
@@ -318,13 +318,13 @@ def get_value_by_path_id(cls, data: DataStructure, path_id: str, get_key: bool =
elif isinstance(current_data, IndexIterableTypes):
if i == len(path) - 1 and get_key:
if parent is None or not isinstance(parent, dict):
- raise ValueError(f"Cannot get key from a non-dict parent at path '{path[:i+1]}'")
+ raise ValueError(f"Cannot get key from a non-dict parent at path '{path[:i + 1]}'")
return next(key for key, value in parent.items() if value is current_data)
parent = current_data
current_data = list(current_data)[path_idx] # CONVERT TO LIST FOR INDEXING
else:
- raise TypeError(f"Unsupported type '{type(current_data)}' at path '{path[:i+1]}'")
+ raise TypeError(f"Unsupported type '{type(current_data)}' at path '{path[:i + 1]}'")
return current_data
diff --git a/src/xulbux/env_path.py b/src/xulbux/env_path.py
index 9957237..b70d125 100644
--- a/src/xulbux/env_path.py
+++ b/src/xulbux/env_path.py
@@ -3,9 +3,10 @@
methods to work with the PATH environment variable.
"""
-from .path import Path
+from .file_sys import FileSys
-from typing import Optional
+from typing import Optional, cast
+from pathlib import Path
import sys as _sys
import os as _os
@@ -14,76 +15,82 @@ class EnvPath:
"""This class includes methods to work with the PATH environment variable."""
@classmethod
- def paths(cls, as_list: bool = False) -> str | list:
+ def paths(cls, as_list: bool = False) -> Path | list[Path]:
"""Get the PATH environment variable.\n
- ------------------------------------------------------------------------------
- - `as_list` -⠀if true, returns the paths as a list; otherwise, as a string"""
- paths = _os.environ.get("PATH", "")
- return paths.split(_os.pathsep) if as_list else paths
+ ------------------------------------------------------------------------------------------------
+ - `as_list` -⠀if true, returns the paths as a list of `Path`s; otherwise, as a single `Path`"""
+ paths_str = _os.environ.get("PATH", "")
+ if as_list:
+ return [Path(path) for path in paths_str.split(_os.pathsep) if path]
+ return Path(paths_str)
@classmethod
- def has_path(cls, path: Optional[str] = None, cwd: bool = False, base_dir: bool = False) -> bool:
+ def has_path(cls, path: Optional[Path | str] = None, cwd: bool = False, base_dir: bool = False) -> bool:
"""Check if a path is present in the PATH environment variable.\n
------------------------------------------------------------------------
- `path` -⠀the path to check for
- `cwd` -⠀if true, uses the current working directory as the path
- `base_dir` -⠀if true, uses the script's base directory as the path"""
- return _os.path.normpath(cls._get(path, cwd, base_dir)) \
- in {_os.path.normpath(p) for p in cls.paths(as_list=True)}
+ check_path = cls._get(path, cwd, base_dir).resolve()
+ return check_path in {path.resolve() for path in cast(list[Path], cls.paths(as_list=True))}
@classmethod
- def add_path(cls, path: Optional[str] = None, cwd: bool = False, base_dir: bool = False) -> None:
+ def add_path(cls, path: Optional[Path | str] = None, cwd: bool = False, base_dir: bool = False) -> None:
"""Add a path to the PATH environment variable.\n
------------------------------------------------------------------------
- `path` -⠀the path to add
- `cwd` -⠀if true, uses the current working directory as the path
- `base_dir` -⠀if true, uses the script's base directory as the path"""
- if not cls.has_path(path := cls._get(path, cwd, base_dir)):
- cls._persistent(path)
+ path_obj = cls._get(path, cwd, base_dir)
+ if not cls.has_path(path_obj):
+ cls._persistent(path_obj)
@classmethod
- def remove_path(cls, path: Optional[str] = None, cwd: bool = False, base_dir: bool = False) -> None:
+ def remove_path(cls, path: Optional[Path | str] = None, cwd: bool = False, base_dir: bool = False) -> None:
"""Remove a path from the PATH environment variable.\n
------------------------------------------------------------------------
- `path` -⠀the path to remove
- `cwd` -⠀if true, uses the current working directory as the path
- `base_dir` -⠀if true, uses the script's base directory as the path"""
- if cls.has_path(path := cls._get(path, cwd, base_dir)):
- cls._persistent(path, remove=True)
+ path_obj = cls._get(path, cwd, base_dir)
+ if cls.has_path(path_obj):
+ cls._persistent(path_obj, remove=True)
@staticmethod
- def _get(path: Optional[str] = None, cwd: bool = False, base_dir: bool = False) -> str:
+ def _get(path: Optional[Path | str] = None, cwd: bool = False, base_dir: bool = False) -> Path:
"""Internal method to get the normalized `path`, CWD path or script directory path.\n
--------------------------------------------------------------------------------------
Raise an error if no path is provided and neither `cwd` or `base_dir` is true."""
if cwd:
if base_dir:
raise ValueError("Both 'cwd' and 'base_dir' cannot be True at the same time.")
- path = Path.cwd
+ return FileSys.cwd
elif base_dir:
- path = Path.script_dir
+ return FileSys.script_dir
if path is None:
raise ValueError("No path provided.\nPlease provide a 'path' or set either 'cwd' or 'base_dir' to True.")
- return _os.path.normpath(path)
+ return Path(path) if isinstance(path, str) else path
@classmethod
- def _persistent(cls, path: str, remove: bool = False) -> None:
+ def _persistent(cls, path: Path, remove: bool = False) -> None:
"""Internal method to add or remove a path from the PATH environment variable,
persistently, across sessions, as well as the current session."""
- current_paths = list(cls.paths(as_list=True))
- path = _os.path.normpath(path)
+ current_paths = cast(list[Path], cls.paths(as_list=True))
+ path_resolved = path.resolve()
if remove:
- current_paths = [
- path for path in current_paths \
- if _os.path.normpath(path) != _os.path.normpath(path)
- ]
+ # FILTER OUT THE PATH TO REMOVE
+ current_paths = [path for path in current_paths if path.resolve() != path_resolved]
else:
- current_paths.append(path)
+ # ADD THE NEW PATH IF NOT ALREADY PRESENT
+ if path_resolved not in {path.resolve() for path in current_paths}:
+ current_paths = [*current_paths, path_resolved]
- _os.environ["PATH"] = new_path = _os.pathsep.join(sorted(set(filter(bool, current_paths))))
+ # CONVERT TO STRINGS ONLY FOR SETTING THE ENVIRONMENT VARIABLE
+ path_strings = [str(path) for path in current_paths]
+ _os.environ["PATH"] = new_path = _os.pathsep.join(dict.fromkeys(filter(bool, path_strings)))
if _sys.platform == "win32": # WINDOWS
try:
@@ -95,20 +102,21 @@ def _persistent(cls, path: str, remove: bool = False) -> None:
raise RuntimeError("Failed to update PATH in registry:\n " + str(e).replace("\n", " \n"))
else: # UNIX-LIKE (LINUX/macOS)
- shell_rc_file = _os.path.expanduser(
- "~/.bashrc" if _os.path.exists(_os.path.expanduser("~/.bashrc")) \
- else "~/.zshrc"
- )
+ home_path = Path.home()
+ bashrc = home_path / ".bashrc"
+ zshrc = home_path / ".zshrc"
+ shell_rc_file = bashrc if bashrc.exists() else zshrc
with open(shell_rc_file, "r+") as file:
content = file.read()
file.seek(0)
if remove:
- new_content = [line for line in content.splitlines() if not line.endswith(f':{path}"')]
+ new_content = [line for line in content.splitlines() if not line.endswith(f':{path_resolved}"')]
file.write("\n".join(new_content))
else:
- file.write(f'{content.rstrip()}\n# Added by XulbuX\nexport PATH="{new_path}"\n')
+ file.write(f"{content.rstrip()}\n# Added by 'xulbux'\n"
+ f'export PATH="{new_path}"\n')
file.truncate()
diff --git a/src/xulbux/file.py b/src/xulbux/file.py
index 98afd92..7621b8d 100644
--- a/src/xulbux/file.py
+++ b/src/xulbux/file.py
@@ -6,7 +6,7 @@
from .base.exceptions import SameContentFileExistsError
from .string import String
-import os as _os
+from pathlib import Path
class File:
@@ -15,11 +15,11 @@ class File:
@classmethod
def rename_extension(
cls,
- file_path: str,
+ file_path: Path | str,
new_extension: str,
full_extension: bool = False,
camel_case_filename: bool = False,
- ) -> str:
+ ) -> Path:
"""Rename the extension of a file.\n
----------------------------------------------------------------------------
- `file_path` -⠀the path to the file whose extension should be changed
@@ -28,8 +28,8 @@ def rename_extension(
or just the last part of it (e.g. `.gz`)
- `camel_case_filename` -⠀whether to convert the filename to CamelCase
in addition to changing the files extension"""
- normalized_file = _os.path.normpath(file_path)
- directory, filename_with_ext = _os.path.split(normalized_file)
+ path = Path(file_path)
+ filename_with_ext = path.name
if full_extension:
try:
@@ -38,17 +38,17 @@ def rename_extension(
except ValueError:
filename = filename_with_ext
else:
- filename, _ = _os.path.splitext(filename_with_ext)
+ filename = path.stem
if camel_case_filename:
filename = String.to_camel_case(filename)
if new_extension and not new_extension.startswith("."):
new_extension = "." + new_extension
- return _os.path.join(directory, f"{filename}{new_extension}")
+ return path.parent / f"{filename}{new_extension}"
@classmethod
- def create(cls, file_path: str, content: str = "", force: bool = False) -> str:
+ def create(cls, file_path: Path | str, content: str = "", force: bool = False) -> Path:
"""Create a file with ot without content.\n
------------------------------------------------------------------
- `file_path` -⠀the path where the file should be created
@@ -59,14 +59,16 @@ def create(cls, file_path: str, content: str = "", force: bool = False) -> str:
The method will throw a `FileExistsError` if a file with the same
name already exists and a `SameContentFileExistsError` if a file
with the same name and same content already exists."""
- if _os.path.exists(file_path) and not force:
- with open(file_path, "r", encoding="utf-8") as existing_file:
+ path = Path(file_path)
+
+ if path.exists() and not force:
+ with open(path, "r", encoding="utf-8") as existing_file:
existing_content = existing_file.read()
if existing_content == content:
raise SameContentFileExistsError("Already created this file. (nothing changed)")
raise FileExistsError("File already exists.")
- with open(file_path, "w", encoding="utf-8") as f:
- f.write(content)
+ with open(path, "w", encoding="utf-8") as file:
+ file.write(content)
- return _os.path.abspath(file_path)
+ return path.resolve()
diff --git a/src/xulbux/file_sys.py b/src/xulbux/file_sys.py
new file mode 100644
index 0000000..f5ef031
--- /dev/null
+++ b/src/xulbux/file_sys.py
@@ -0,0 +1,266 @@
+"""
+This module provides the `FileSys` class, which includes
+methods to work with the file system and directories.
+"""
+
+from .base.types import PathsList
+from .base.exceptions import PathNotFoundError
+from .base.decorators import mypyc_attr
+
+from typing import Optional
+from pathlib import Path
+import tempfile as _tempfile
+import difflib as _difflib
+import shutil as _shutil
+import sys as _sys
+import os as _os
+
+
+@mypyc_attr(native_class=False)
+class _FileSysMeta(type):
+
+ @property
+ def cwd(cls) -> Path:
+ """The path to the current working directory."""
+ return Path.cwd()
+
+ @property
+ def home(cls) -> Path:
+ """The path to the user's home directory."""
+ return Path.home()
+
+ @property
+ def script_dir(cls) -> Path:
+ """The path to the directory of the current script."""
+ if getattr(_sys, "frozen", False):
+ base_path = Path(_sys.executable).parent
+ else:
+ main_module = _sys.modules["__main__"]
+ if hasattr(main_module, "__file__") and main_module.__file__ is not None:
+ base_path = Path(main_module.__file__).resolve().parent
+ elif (hasattr(main_module, "__spec__") and main_module.__spec__ and main_module.__spec__.origin is not None):
+ base_path = Path(main_module.__spec__.origin).resolve().parent
+ else:
+ raise RuntimeError("Can only get base directory if accessed from a file.")
+ return base_path
+
+
+class FileSys(metaclass=_FileSysMeta):
+ """This class provides methods to work with file and directory paths."""
+
+ @classmethod
+ def extend_path(
+ cls,
+ rel_path: Path | str,
+ search_in: Optional[Path | str | PathsList] = None,
+ fuzzy_match: bool = False,
+ raise_error: bool = False,
+ ) -> Optional[Path]:
+ """Tries to resolve and extend a relative path to an absolute path.\n
+ -------------------------------------------------------------------------------------------
+ - `rel_path` -⠀the relative path to extend
+ - `search_in` -⠀a directory or a list of directories to search in,
+ in addition to the predefined directories (see exact procedure below)
+ - `fuzzy_match` -⠀if true, it will try to find the closest matching file/folder
+ names in the `search_in` directories, allowing for typos in `rel_path` and `search_in`
+ - `raise_error` -⠀if true, raises a `PathNotFoundError` if
+ the path couldn't be found (otherwise it returns `None`)\n
+ -------------------------------------------------------------------------------------------
+ If the `rel_path` couldn't be located in predefined directories,
+ it will be searched in the `search_in` directory/s.
+ If the `rel_path` is still not found, it returns `None` or
+ raises a `PathNotFoundError` if `raise_error` is true."""
+ search_dirs: list[Path] = []
+ path: Path
+
+ if isinstance(rel_path, str):
+ if rel_path == "":
+ if raise_error:
+ raise PathNotFoundError("Given 'rel_path' is an empty string.")
+ return None
+ else:
+ path = Path(rel_path)
+ else:
+ path = rel_path
+
+ if path.is_absolute():
+ return path
+
+ if search_in is not None:
+ if isinstance(search_in, (str, Path)):
+ search_dirs.extend([Path(search_in)])
+ elif isinstance(search_in, list):
+ search_dirs.extend([Path(path) for path in search_in])
+ else:
+ raise TypeError(
+ f"The 'search_in' parameter must be a string, Path, or a list of strings/Paths, got {type(search_in)}"
+ )
+
+ return _ExtendPathHelper(
+ cls,
+ rel_path=path,
+ search_dirs=search_dirs,
+ fuzzy_match=fuzzy_match,
+ raise_error=raise_error,
+ )()
+
+ @classmethod
+ def extend_or_make_path(
+ cls,
+ rel_path: Path | str,
+ search_in: Optional[Path | str | list[Path | str]] = None,
+ prefer_script_dir: bool = True,
+ fuzzy_match: bool = False,
+ ) -> Path:
+ """Tries to locate and extend a relative path to an absolute path, and if the `rel_path`
+ couldn't be located, it generates a path, as if it was located.\n
+ -------------------------------------------------------------------------------------------
+ - `rel_path` -⠀the relative path to extend or make
+ - `search_in` -⠀a directory or a list of directories to search in,
+ in addition to the predefined directories (see exact procedure below)
+ - `prefer_script_dir` -⠀if true, the script directory is preferred
+ when making a new path (otherwise the CWD is preferred)
+ - `fuzzy_match` -⠀if true, it will try to find the closest matching file/folder
+ names in the `search_in` directories, allowing for typos in `rel_path` and `search_in`\n
+ -------------------------------------------------------------------------------------------
+ If the `rel_path` couldn't be located in predefined directories,
+ it will be searched in the `search_in` directory/s.
+ If the `rel_path` is still not found, it will makes a path
+ that points to where the `rel_path` would be in the script directory,
+ even though the `rel_path` doesn't exist there.
+ If `prefer_script_dir` is false, it will instead make a path
+ that points to where the `rel_path` would be in the CWD."""
+ try:
+ result = cls.extend_path(
+ rel_path=rel_path,
+ search_in=search_in,
+ raise_error=True,
+ fuzzy_match=fuzzy_match,
+ )
+ return result if result is not None else Path()
+
+ except PathNotFoundError:
+ path = Path(str(rel_path))
+ base_dir = cls.script_dir if prefer_script_dir else Path.cwd()
+ return base_dir / path
+
+ @classmethod
+ def remove(cls, path: Path | str, only_content: bool = False) -> None:
+ """Removes the directory or the directory's content at the specified path.\n
+ -----------------------------------------------------------------------------
+ - `path` -⠀the path to the directory or file to remove
+ - `only_content` -⠀if true, only the content of the directory is removed
+ and the directory itself is kept"""
+ if not (path_obj := Path(path)).exists():
+ return None
+
+ if not only_content:
+ if path_obj.is_file() or path_obj.is_symlink():
+ path_obj.unlink()
+ elif path_obj.is_dir():
+ _shutil.rmtree(path_obj)
+
+ elif path_obj.is_dir():
+ for item in path_obj.iterdir():
+ try:
+ if item.is_file() or item.is_symlink():
+ item.unlink()
+ elif item.is_dir():
+ _shutil.rmtree(item)
+ except Exception as e:
+ fmt_error = "\n ".join(str(e).splitlines())
+ raise Exception(f"Failed to delete {item!r}:\n {fmt_error}") from e
+
+
+class _ExtendPathHelper:
+ """Internal, callable helper class to extend a relative path to an absolute path."""
+
+ def __init__(
+ self,
+ cls: type[FileSys],
+ rel_path: Path,
+ search_dirs: list[Path],
+ fuzzy_match: bool,
+ raise_error: bool,
+ ):
+ self.cls = cls
+ self.rel_path = rel_path
+ self.search_dirs = search_dirs
+ self.fuzzy_match = fuzzy_match
+ self.raise_error = raise_error
+
+ def __call__(self) -> Optional[Path]:
+ """Execute the path extension logic."""
+ expanded_path = self.expand_env_vars(self.rel_path)
+
+ if expanded_path.is_absolute():
+ # ADD ROOT TO SEARCH DIRS
+ if expanded_path.drive:
+ self.search_dirs.extend([Path(expanded_path.drive + _os.sep)])
+ else:
+ self.search_dirs.extend([Path(_os.sep)])
+ # REMOVE ROOT FROM PATH PARTS FOR SEARCHING
+ expanded_path = Path(*expanded_path.parts[1:])
+ else:
+ # ADD PREDEFINED SEARCH DIRS
+ self.search_dirs.extend([
+ self.cls.cwd,
+ self.cls.home,
+ self.cls.script_dir,
+ Path(_tempfile.gettempdir()),
+ ])
+
+ return self.search_in_dirs(expanded_path)
+
+ @staticmethod
+ def expand_env_vars(path: Path) -> Path:
+ """Expand all environment variables in the given path."""
+ if "%" not in (str_path := str(path)):
+ return path
+
+ for i in range(1, len(parts := str_path.split("%")), 2):
+ if parts[i].upper() in _os.environ:
+ parts[i] = _os.environ[parts[i].upper()]
+
+ return Path("".join(parts))
+
+ def search_in_dirs(self, path: Path) -> Optional[Path]:
+ """Search for the path in all configured directories."""
+ for search_dir in self.search_dirs:
+ if (full_path := search_dir / path).exists():
+ return full_path
+ elif self.fuzzy_match:
+ if (match := self.find_path( \
+ base_dir=search_dir,
+ target_path=path,
+ fuzzy_match=self.fuzzy_match,
+ )) is not None:
+ return match
+
+ if self.raise_error:
+ raise PathNotFoundError(f"Path {self.rel_path!r} not found in specified directories.")
+ return None
+
+ def find_path(self, base_dir: Path, target_path: Path, fuzzy_match: bool) -> Optional[Path]:
+ """Find a path by traversing the given parts from the base directory,
+ optionally using closest matches for each part."""
+ current_path: Path = base_dir
+
+ for part in target_path.parts:
+ if current_path.is_file():
+ return current_path
+ elif (closest_match := self.get_closest_match(current_path, part) if fuzzy_match else part) is None:
+ return None
+ current_path = current_path / closest_match
+
+ return current_path if current_path.exists() and current_path != base_dir else None
+
+ @staticmethod
+ def get_closest_match(dir: Path, path_part: str) -> Optional[str]:
+ """Internal method to get the closest matching file or folder name
+ in the given directory for the given path part."""
+ try:
+ items = [item.name for item in dir.iterdir()]
+ return matches[0] if (matches := _difflib.get_close_matches(path_part, items, n=1, cutoff=0.6)) else None
+ except Exception:
+ return None
diff --git a/src/xulbux/format_codes.py b/src/xulbux/format_codes.py
index 66c21b3..62ea6c4 100644
--- a/src/xulbux/format_codes.py
+++ b/src/xulbux/format_codes.py
@@ -693,7 +693,7 @@ def build_output(self, match: _rx.Match[str]) -> str:
"""Build the final output string based on processed formats and resets."""
# CHECK IF ALL FORMATS WERE VALID
has_single_valid_ansi = len(self.ansi_formats) == 1 and self.ansi_formats[0].count(f"{ANSI.CHAR}{ANSI.START}") >= 1
- all_formats_valid = all(f.startswith(f"{ANSI.CHAR}{ANSI.START}") for f in self.ansi_formats)
+ all_formats_valid = all(ansi_format.startswith(f"{ANSI.CHAR}{ANSI.START}") for ansi_format in self.ansi_formats)
if not has_single_valid_ansi and not all_formats_valid:
return match.group(0)
diff --git a/src/xulbux/json.py b/src/xulbux/json.py
index f5e7fa7..c60bca7 100644
--- a/src/xulbux/json.py
+++ b/src/xulbux/json.py
@@ -3,11 +3,12 @@
create and update JSON files, with support for comments inside the JSON data.
"""
+from .file_sys import FileSys
from .data import Data
from .file import File
-from .path import Path
from typing import Literal, Any, cast
+from pathlib import Path
import json as _json
@@ -18,7 +19,7 @@ class Json:
@classmethod
def read(
cls,
- json_file: str,
+ json_file: Path | str,
comment_start: str = ">>",
comment_end: str = "<<",
return_original: bool = False,
@@ -35,33 +36,33 @@ def read(
------------------------------------------------------------------------------------
For more detailed information about the comment handling,
see the `Data.remove_comments()` method documentation."""
- if not json_file.endswith(".json"):
- json_file += ".json"
- if (file_path := Path.extend_or_make(json_file, prefer_script_dir=True)) is None:
- raise FileNotFoundError(f"Could not find JSON file: {json_file}")
+ if (json_path := Path(json_file) if isinstance(json_file, str) else json_file).suffix != ".json":
+ json_path = json_path.with_suffix(".json")
+ file_path = FileSys.extend_or_make_path(json_path, prefer_script_dir=True)
- with open(file_path, "r") as f:
- content = f.read()
+ with open(file_path, "r") as file:
+ content = file.read()
try:
data = _json.loads(content)
except _json.JSONDecodeError as e:
- raise ValueError(f"Error parsing JSON in '{file_path}': {str(e)}")
+ fmt_error = "\n ".join(str(e).splitlines())
+ raise ValueError(f"Error parsing JSON in {file_path!r}:\n {fmt_error}") from e
if not (processed_data := dict(Data.remove_comments(data, comment_start, comment_end))):
- raise ValueError(f"The JSON file '{file_path}' is empty or contains only comments.")
+ raise ValueError(f"The JSON file {file_path!r} is empty or contains only comments.")
return (processed_data, data) if return_original else processed_data
@classmethod
def create(
cls,
- json_file: str,
+ json_file: Path | str,
data: dict,
indent: int = 2,
compactness: Literal[0, 1, 2] = 1,
force: bool = False,
- ) -> str:
+ ) -> Path:
"""Create a nicely formatted JSON file from a dictionary.\n
---------------------------------------------------------------------------
- `json_file` -⠀the path (relative or absolute) to the JSON file to create
@@ -75,12 +76,19 @@ def create(
The method will throw a `FileExistsError` if a file with the same
name already exists and a `SameContentFileExistsError` if a file
with the same name and same content already exists."""
- if not json_file.endswith(".json"):
- json_file += ".json"
+ if (json_path := Path(json_file) if isinstance(json_file, str) else json_file).suffix != ".json":
+ json_path = json_path.with_suffix(".json")
+ file_path = FileSys.extend_or_make_path(json_path, prefer_script_dir=True)
File.create(
- file_path=(file_path := Path.extend_or_make(json_file, prefer_script_dir=True)),
- content=Data.render(data=data, indent=indent, compactness=compactness, as_json=True),
+ file_path=file_path,
+ content=Data.render(
+ data=data,
+ indent=indent,
+ compactness=compactness,
+ as_json=True,
+ syntax_highlighting=False,
+ ),
force=force,
)
@@ -89,7 +97,7 @@ def create(
@classmethod
def update(
cls,
- json_file: str,
+ json_file: Path | str,
update_values: dict[str, Any],
comment_start: str = ">>",
comment_end: str = "<<",
diff --git a/src/xulbux/path.py b/src/xulbux/path.py
deleted file mode 100644
index 4266475..0000000
--- a/src/xulbux/path.py
+++ /dev/null
@@ -1,215 +0,0 @@
-"""
-This module provides the `Path` class, which includes methods to work with file and directory paths.
-"""
-
-from .base.exceptions import PathNotFoundError
-
-from typing import Optional
-from mypy_extensions import mypyc_attr
-import tempfile as _tempfile
-import difflib as _difflib
-import shutil as _shutil
-import sys as _sys
-import os as _os
-
-
-@mypyc_attr(native_class=False)
-class _PathMeta(type):
-
- @property
- def cwd(cls) -> str:
- """The path to the current working directory."""
- return _os.getcwd()
-
- @property
- def home(cls) -> str:
- """The path to the user's home directory."""
- return _os.path.expanduser("~")
-
- @property
- def script_dir(cls) -> str:
- """The path to the directory of the current script."""
- if getattr(_sys, "frozen", False):
- base_path = _os.path.dirname(_sys.executable)
- else:
- main_module = _sys.modules["__main__"]
- if hasattr(main_module, "__file__") and main_module.__file__ is not None:
- base_path = _os.path.dirname(_os.path.abspath(main_module.__file__))
- elif (hasattr(main_module, "__spec__") and main_module.__spec__ and main_module.__spec__.origin is not None):
- base_path = _os.path.dirname(_os.path.abspath(main_module.__spec__.origin))
- else:
- raise RuntimeError("Can only get base directory if accessed from a file.")
- return base_path
-
-
-class Path(metaclass=_PathMeta):
- """This class provides methods to work with file and directory paths."""
-
- @classmethod
- def extend(
- cls,
- rel_path: str,
- search_in: Optional[str | list[str]] = None,
- raise_error: bool = False,
- use_closest_match: bool = False,
- ) -> Optional[str]:
- """Tries to resolve and extend a relative path to an absolute path.\n
- -------------------------------------------------------------------------------------------
- - `rel_path` -⠀the relative path to extend
- - `search_in` -⠀a directory or a list of directories to search in,
- in addition to the predefined directories (see exact procedure below)
- - `raise_error` -⠀if true, raises a `PathNotFoundError` if
- the path couldn't be found (otherwise it returns `None`)
- - `use_closest_match` -⠀if true, it will try to find the closest matching file/folder
- names in the `search_in` directories, allowing for typos in `rel_path` and `search_in`\n
- -------------------------------------------------------------------------------------------
- If the `rel_path` couldn't be located in predefined directories,
- it will be searched in the `search_in` directory/s.
- If the `rel_path` is still not found, it returns `None` or
- raises a `PathNotFoundError` if `raise_error` is true."""
- search_dirs: list[str] = []
-
- if search_in is not None:
- if isinstance(search_in, str):
- search_dirs.extend([search_in])
- elif isinstance(search_in, list):
- search_dirs.extend(search_in)
- else:
- raise TypeError(f"The 'search_in' parameter must be a string or a list of strings, got {type(search_in)}")
-
- if rel_path == "":
- if raise_error:
- raise PathNotFoundError("Path is empty.")
- else:
- return None
- elif _os.path.isabs(rel_path):
- return rel_path
-
- rel_path = _os.path.normpath(cls._expand_env_path(rel_path))
-
- if _os.path.isabs(rel_path):
- drive, rel_path = _os.path.splitdrive(rel_path)
- rel_path = rel_path.lstrip(_os.sep)
- search_dirs.extend([(drive + _os.sep) if drive else _os.sep])
- else:
- rel_path = rel_path.lstrip(_os.sep)
- search_dirs.extend([_os.getcwd(), cls.script_dir, _os.path.expanduser("~"), _tempfile.gettempdir()])
-
- for search_dir in search_dirs:
- if _os.path.exists(full_path := _os.path.join(search_dir, rel_path)):
- return full_path
- if (match := (
- cls._find_path(search_dir, rel_path.split(_os.sep), use_closest_match) \
- if use_closest_match else None
- )):
- return match
-
- if raise_error:
- raise PathNotFoundError(f"Path '{rel_path}' not found in specified directories.")
- else:
- return None
-
- @classmethod
- def extend_or_make(
- cls,
- rel_path: str,
- search_in: Optional[str | list[str]] = None,
- prefer_script_dir: bool = True,
- use_closest_match: bool = False,
- ) -> str:
- """Tries to locate and extend a relative path to an absolute path, and if the `rel_path`
- couldn't be located, it generates a path, as if it was located.\n
- -------------------------------------------------------------------------------------------
- - `rel_path` -⠀the relative path to extend or make
- - `search_in` -⠀a directory or a list of directories to search in,
- in addition to the predefined directories (see exact procedure below)
- - `prefer_script_dir` -⠀if true, the script directory is preferred
- when making a new path (otherwise the CWD is preferred)
- - `use_closest_match` -⠀if true, it will try to find the closest matching file/folder
- names in the `search_in` directories, allowing for typos in `rel_path` and `search_in`\n
- -------------------------------------------------------------------------------------------
- If the `rel_path` couldn't be located in predefined directories,
- it will be searched in the `search_in` directory/s.
- If the `rel_path` is still not found, it will makes a path
- that points to where the `rel_path` would be in the script directory,
- even though the `rel_path` doesn't exist there.
- If `prefer_script_dir` is false, it will instead make a path
- that points to where the `rel_path` would be in the CWD."""
- try:
- return str(cls.extend( \
- rel_path=rel_path,
- search_in=search_in,
- raise_error=True,
- use_closest_match=use_closest_match,
- ))
-
- except PathNotFoundError:
- return _os.path.join(
- cls.script_dir if prefer_script_dir else _os.getcwd(),
- _os.path.normpath(rel_path),
- )
-
- @classmethod
- def remove(cls, path: str, only_content: bool = False) -> None:
- """Removes the directory or the directory's content at the specified path.\n
- -----------------------------------------------------------------------------
- - `path` -⠀the path to the directory or file to remove
- - `only_content` -⠀if true, only the content of the directory is removed
- and the directory itself is kept"""
- if not _os.path.exists(path):
- return None
-
- if not only_content:
- if _os.path.isfile(path) or _os.path.islink(path):
- _os.unlink(path)
- elif _os.path.isdir(path):
- _shutil.rmtree(path)
-
- elif _os.path.isdir(path):
- for filename in _os.listdir(path):
- file_path = _os.path.join(path, filename)
- try:
- if _os.path.isfile(file_path) or _os.path.islink(file_path):
- _os.unlink(file_path)
- elif _os.path.isdir(file_path):
- _shutil.rmtree(file_path)
- except Exception as e:
- raise Exception(f"Failed to delete {file_path}. Reason: {e}")
-
- @staticmethod
- def _expand_env_path(path_str: str) -> str:
- """Internal method that expands all environment variables in the given path string."""
- if "%" not in path_str:
- return path_str
-
- for i in range(1, len(parts := path_str.split("%")), 2):
- if parts[i].upper() in _os.environ:
- parts[i] = _os.environ[parts[i].upper()]
-
- return "".join(parts)
-
- @classmethod
- def _find_path(cls, start_dir: str, path_parts: list[str], use_closest_match: bool) -> Optional[str]:
- """Internal method to find a path by traversing the given parts from
- the start directory, optionally using closest matches for each part."""
- current_dir: str = start_dir
-
- for part in path_parts:
- if _os.path.isfile(current_dir):
- return current_dir
- if (closest_match := cls._get_closest_match(current_dir, part) if use_closest_match else part) is None:
- return None
- current_dir = _os.path.join(current_dir, closest_match)
-
- return current_dir if _os.path.exists(current_dir) and current_dir != start_dir else None
-
- @staticmethod
- def _get_closest_match(dir: str, path_part: str) -> Optional[str]:
- """Internal method to get the closest matching file or folder name
- in the given directory for the given path part."""
- try:
- return matches[0] if (
- matches := _difflib.get_close_matches(path_part, _os.listdir(dir), n=1, cutoff=0.6)
- ) else None
- except Exception:
- return None
diff --git a/src/xulbux/regex.py b/src/xulbux/regex.py
index c7ad5d9..a9ef866 100644
--- a/src/xulbux/regex.py
+++ b/src/xulbux/regex.py
@@ -3,8 +3,9 @@
to dynamically generate complex regex patterns for common use cases.
"""
+from .base.decorators import mypyc_attr
+
from typing import Optional
-from mypy_extensions import mypyc_attr
import regex as _rx
import re as _re
diff --git a/src/xulbux/system.py b/src/xulbux/system.py
index d62f634..e7ec66a 100644
--- a/src/xulbux/system.py
+++ b/src/xulbux/system.py
@@ -4,12 +4,12 @@
"""
from .base.types import MissingLibsMsgs
+from .base.decorators import mypyc_attr
from .format_codes import FormatCodes
from .console import Console
from typing import Optional
-from mypy_extensions import mypyc_attr
import subprocess as _subprocess
import platform as _platform
import ctypes as _ctypes
diff --git a/tests/test_console.py b/tests/test_console.py
index 87c56c6..ca98cd7 100644
--- a/tests/test_console.py
+++ b/tests/test_console.py
@@ -15,7 +15,10 @@
@pytest.fixture
def mock_terminal_size(monkeypatch):
TerminalSize = namedtuple("TerminalSize", ["columns", "lines"])
- mock_get_terminal_size = lambda: TerminalSize(columns=80, lines=24)
+
+ def mock_get_terminal_size():
+ return TerminalSize(columns=80, lines=24)
+
monkeypatch.setattr(console._os, "get_terminal_size", mock_get_terminal_size)
diff --git a/tests/test_env_path.py b/tests/test_env_path.py
index 497f3a1..ce71e9a 100644
--- a/tests/test_env_path.py
+++ b/tests/test_env_path.py
@@ -1,5 +1,7 @@
from xulbux.env_path import EnvPath
+from pathlib import Path
+
#
################################################## EnvPath TESTS ##################################################
@@ -9,10 +11,11 @@ def test_get_paths():
paths_list = EnvPath.paths(as_list=True)
assert paths
assert paths_list
- assert isinstance(paths, str)
+ assert isinstance(paths, Path)
assert isinstance(paths_list, list)
assert len(paths_list) > 0
- assert isinstance(paths_list[0], str)
+ assert all(isinstance(path, Path) for path in paths_list)
+ assert isinstance(paths_list[0], Path)
def test_add_path():
diff --git a/tests/test_file.py b/tests/test_file.py
index 6760996..0f04aa2 100644
--- a/tests/test_file.py
+++ b/tests/test_file.py
@@ -1,8 +1,8 @@
from xulbux.base.exceptions import SameContentFileExistsError
from xulbux.file import File
+from pathlib import Path
import pytest
-import os
#
################################################## File TESTS ##################################################
@@ -13,11 +13,11 @@
("myfile.txt", ".log", False, False, "myfile.log"),
("my_file_name.data", ".csv", False, False, "my_file_name.csv"),
("another-file.json", ".xml", False, False, "another-file.xml"),
- ("path/to/myfile.txt", ".md", False, False, os.path.join("path", "to", "myfile.md")),
+ ("path/to/myfile.txt", ".md", False, False, str(Path("path") / "to" / "myfile.md")),
("my_file_name.data", ".csv", True, False, "MyFileName.csv"),
("another-file.json", ".xml", True, False, "AnotherFile.xml"),
("alreadyCamelCase.config", ".yaml", True, False, "AlreadyCamelCase.yaml"),
- (os.path.join("path", "to", "my_file.txt"), ".log", True, False, os.path.join("path", "to", "MyFile.log")),
+ (Path("path") / "to" / "my_file.txt", ".log", True, False, str(Path("path") / "to" / "MyFile.log")),
("filename", ".ext", False, False, "filename.ext"),
("file_name", ".ext", True, False, "FileName.ext"),
("test_file.blade.php", ".vue", False, False, "test_file.blade.vue"),
@@ -26,46 +26,49 @@
("test_file.blade.php", ".vue", False, True, "test_file.vue"),
("archive.tar.gz", ".zip", False, True, "archive.zip"),
("my_archive.tar.gz", ".zip", True, True, "MyArchive.zip"),
- (os.path.join("some", "dir", "file.config.yaml"), ".json", False, True, os.path.join("some", "dir", "file.json")),
+ (Path("some") / "dir" / "file.config.yaml", ".json", False, True, str(Path("some") / "dir" / "file.json")),
(
- os.path.join("some", "dir", "file_name.config.yaml"),
+ Path("some") / "dir" / "file_name.config.yaml",
".json",
True,
True,
- os.path.join("some", "dir", "FileName.json"),
+ str(Path("some") / "dir" / "FileName.json"),
),
("nodotfile", ".txt", False, True, "nodotfile.txt"),
("no_dot_file", ".txt", True, True, "NoDotFile.txt"),
]
)
def test_rename_extension(input_file, new_extension, full_extension, camel_case, expected_output):
- expected_output = expected_output.replace("/", os.sep).replace("\\", os.sep)
- assert File.rename_extension(input_file, new_extension, full_extension, camel_case) == expected_output
+ result = File.rename_extension(input_file, new_extension, full_extension, camel_case)
+ assert isinstance(result, Path)
+ assert str(result) == expected_output
def test_create_new_file(tmp_path):
file_path = tmp_path / "new_file.txt"
abs_path = File.create(str(file_path))
- assert os.path.exists(file_path)
- assert os.path.abspath(str(file_path)) == abs_path
- with open(file_path, "r", encoding="utf-8") as f:
- assert f.read() == ""
+ assert isinstance(abs_path, Path)
+ assert file_path.exists()
+ assert abs_path.resolve() == file_path.resolve()
+ with open(file_path, "r", encoding="utf-8") as file:
+ assert file.read() == ""
def test_create_file_with_content(tmp_path):
file_path = tmp_path / "content_file.log"
content = "This is the file content.\nWith multiple lines."
abs_path = File.create(str(file_path), content=content)
- assert os.path.exists(file_path)
- assert os.path.abspath(str(file_path)) == abs_path
- with open(file_path, "r", encoding="utf-8") as f:
- assert f.read() == content
+ assert isinstance(abs_path, Path)
+ assert file_path.exists()
+ assert abs_path.resolve() == file_path.resolve()
+ with open(file_path, "r", encoding="utf-8") as file:
+ assert file.read() == content
def test_create_file_exists_error(tmp_path):
file_path = tmp_path / "existing_file.txt"
- with open(file_path, "w", encoding="utf-8") as f:
- f.write("Initial content")
+ with open(file_path, "w", encoding="utf-8") as file:
+ file.write("Initial content")
with pytest.raises(FileExistsError):
File.create(str(file_path), content="New content", force=False)
@@ -87,10 +90,11 @@ def test_create_file_force_overwrite_different_content(tmp_path):
assert open(file_path, "r", encoding="utf-8").read() == initial_content
abs_path = File.create(str(file_path), content=new_content, force=True)
- assert os.path.exists(file_path)
- assert os.path.abspath(str(file_path)) == abs_path
- with open(file_path, "r", encoding="utf-8") as f:
- assert f.read() == new_content
+ assert isinstance(abs_path, Path)
+ assert file_path.exists()
+ assert abs_path.resolve() == file_path.resolve()
+ with open(file_path, "r", encoding="utf-8") as file:
+ assert file.read() == new_content
def test_create_file_force_overwrite_same_content(tmp_path):
@@ -101,10 +105,11 @@ def test_create_file_force_overwrite_same_content(tmp_path):
assert open(file_path, "r", encoding="utf-8").read() == content
abs_path = File.create(str(file_path), content=content, force=True)
- assert os.path.exists(file_path)
- assert os.path.abspath(str(file_path)) == abs_path
- with open(file_path, "r", encoding="utf-8") as f:
- assert f.read() == content
+ assert isinstance(abs_path, Path)
+ assert file_path.exists()
+ assert abs_path.resolve() == file_path.resolve()
+ with open(file_path, "r", encoding="utf-8") as file:
+ assert file.read() == content
def test_create_file_in_subdirectory(tmp_path):
@@ -115,9 +120,10 @@ def test_create_file_in_subdirectory(tmp_path):
with pytest.raises(FileNotFoundError):
File.create(str(file_path), content=content)
- os.makedirs(dir_path)
+ dir_path.mkdir()
abs_path = File.create(str(file_path), content=content)
- assert os.path.exists(file_path)
- assert os.path.abspath(str(file_path)) == abs_path
- with open(file_path, "r", encoding="utf-8") as f:
- assert f.read() == content
+ assert isinstance(abs_path, Path)
+ assert file_path.exists()
+ assert abs_path.resolve() == file_path.resolve()
+ with open(file_path, "r", encoding="utf-8") as file:
+ assert file.read() == content
diff --git a/tests/test_path.py b/tests/test_file_sys.py
similarity index 53%
rename from tests/test_path.py
rename to tests/test_file_sys.py
index 7ac268f..0444ee8 100644
--- a/tests/test_path.py
+++ b/tests/test_file_sys.py
@@ -1,6 +1,7 @@
from xulbux.base.exceptions import PathNotFoundError
-from xulbux.path import Path
+from xulbux.file_sys import FileSys
+from pathlib import Path
import tempfile
import pytest
import sys
@@ -16,8 +17,8 @@ def setup_test_environment(tmp_path, monkeypatch):
mock_temp = tmp_path / "mock_temp"
mock_search_in = tmp_path / "mock_search_in"
- for p in [mock_cwd, mock_script_dir, mock_home, mock_temp, mock_search_in]:
- p.mkdir()
+ for path in [mock_cwd, mock_script_dir, mock_home, mock_temp, mock_search_in]:
+ path.mkdir()
(mock_cwd / "file_in_cwd.txt").touch()
(mock_script_dir / "subdir").mkdir()
@@ -30,7 +31,8 @@ def setup_test_environment(tmp_path, monkeypatch):
abs_file = mock_cwd / "absolute_file.txt"
abs_file.touch()
- monkeypatch.setattr(os, "getcwd", lambda: str(mock_cwd))
+ monkeypatch.setattr(Path, "cwd", staticmethod(lambda: mock_cwd))
+ monkeypatch.setattr(Path, "home", staticmethod(lambda: mock_home))
monkeypatch.setattr(sys.modules["__main__"], "__file__", str(mock_script_dir / "mock_script.py"))
monkeypatch.setattr(os.path, "expanduser", lambda path: str(mock_home) if path == "~" else path)
monkeypatch.setattr(tempfile, "gettempdir", lambda: str(mock_temp))
@@ -49,23 +51,23 @@ def setup_test_environment(tmp_path, monkeypatch):
def test_path_cwd(setup_test_environment):
- cwd_output = Path.cwd
- assert isinstance(cwd_output, str)
- assert cwd_output == str(setup_test_environment["cwd"])
+ cwd_output = FileSys.cwd
+ assert isinstance(cwd_output, Path)
+ assert str(cwd_output) == str(setup_test_environment["cwd"])
def test_path_script_dir(setup_test_environment):
- script_dir_output = Path.script_dir
- assert isinstance(script_dir_output, str)
- assert script_dir_output == str(setup_test_environment["script_dir"])
+ script_dir_output = FileSys.script_dir
+ assert isinstance(script_dir_output, Path)
+ assert str(script_dir_output) == str(setup_test_environment["script_dir"])
def test_path_home():
- home = Path.home
- assert isinstance(home, str)
- assert len(home) > 0
- assert os.path.exists(home)
- assert os.path.isdir(home)
+ home = FileSys.home
+ assert isinstance(home, Path)
+ assert len(str(home)) > 0
+ assert home.exists()
+ assert home.is_dir()
def test_extend(setup_test_environment):
@@ -74,34 +76,40 @@ def test_extend(setup_test_environment):
search_dirs = [str(env["cwd"]), search_dir]
# ABSOLUTE PATH
- assert Path.extend(str(env["abs_file"])) == str(env["abs_file"])
+ result = FileSys.extend_path(str(env["abs_file"]))
+ assert isinstance(result, Path)
+ assert str(result) == str(env["abs_file"])
# EMPTY PATH
- assert Path.extend("") is None
- with pytest.raises(PathNotFoundError, match="Path is empty."):
- Path.extend("", raise_error=True)
+ assert FileSys.extend_path("") is None
+ with pytest.raises(PathNotFoundError, match="Given 'rel_path' is an empty string."):
+ FileSys.extend_path("", raise_error=True)
# FOUND IN STANDARD LOCATIONS
- assert Path.extend("file_in_cwd.txt") == str(env["cwd"] / "file_in_cwd.txt")
- assert Path.extend("subdir/file_in_script_subdir.txt") == str(env["script_dir"] / "subdir" / "file_in_script_subdir.txt")
- assert Path.extend("file_in_home.txt") == str(env["home"] / "file_in_home.txt")
- assert Path.extend("temp_file.tmp") == str(env["temp"] / "temp_file.tmp")
+ assert str(FileSys.extend_path("file_in_cwd.txt")) == str(env["cwd"] / "file_in_cwd.txt")
+ assert str(FileSys.extend_path("subdir/file_in_script_subdir.txt")
+ ) == str(env["script_dir"] / "subdir" / "file_in_script_subdir.txt")
+ assert str(FileSys.extend_path("file_in_home.txt")) == str(env["home"] / "file_in_home.txt")
+ assert str(FileSys.extend_path("temp_file.tmp")) == str(env["temp"] / "temp_file.tmp")
# FOUND IN search_in
- assert Path.extend("custom_file.dat", search_in=search_dir) == str(env["search_in"] / "custom_file.dat")
- assert Path.extend("custom_file.dat", search_in=search_dirs) == str(env["search_in"] / "custom_file.dat")
+ assert str(FileSys.extend_path("custom_file.dat", search_in=search_dir)) == str(env["search_in"] / "custom_file.dat")
+ assert str(FileSys.extend_path("custom_file.dat", search_in=search_dirs)) == str(env["search_in"] / "custom_file.dat")
# NOT FOUND
- assert Path.extend("non_existent_file.xyz") is None
- with pytest.raises(PathNotFoundError, match="'non_existent_file.xyz' not found"):
- Path.extend("non_existent_file.xyz", raise_error=True)
+ assert FileSys.extend_path("non_existent_file.xyz") is None
+ with pytest.raises( \
+ PathNotFoundError,
+ match=r"Path [A-Za-z]*Path\('non_existent_file\.xyz'\) not found in specified directories\.",
+ ):
+ FileSys.extend_path("non_existent_file.xyz", raise_error=True)
# CLOSEST MATCH
expected_typo = env["search_in"] / "TypoDir" / "file_in_typo.txt"
- assert Path.extend("TypoDir/file_in_typo.txt", search_in=search_dir, use_closest_match=False) == str(expected_typo)
- assert Path.extend("TypoDir/file_in_typo.txt", search_in=search_dir, use_closest_match=True) == str(expected_typo)
- assert Path.extend("TypoDir/file_in_typx.txt", search_in=search_dir, use_closest_match=True) == str(expected_typo)
- assert Path.extend("CompletelyWrong/no_file_here.dat", search_in=search_dir, use_closest_match=True) is None
+ assert str(FileSys.extend_path("TypoDir/file_in_typo.txt", search_in=search_dir, fuzzy_match=False)) == str(expected_typo)
+ assert str(FileSys.extend_path("TypoDir/file_in_typo.txt", search_in=search_dir, fuzzy_match=True)) == str(expected_typo)
+ assert str(FileSys.extend_path("TypoDir/file_in_typx.txt", search_in=search_dir, fuzzy_match=True)) == str(expected_typo)
+ assert FileSys.extend_path("CompletelyWrong/no_file_here.dat", search_in=search_dir, fuzzy_match=True) is None
def test_extend_or_make(setup_test_environment):
@@ -109,42 +117,45 @@ def test_extend_or_make(setup_test_environment):
search_dir = str(env["search_in"])
# FOUND
- assert Path.extend_or_make("file_in_cwd.txt") == str(env["cwd"] / "file_in_cwd.txt")
+ result = FileSys.extend_or_make_path("file_in_cwd.txt")
+ assert isinstance(result, Path)
+ assert str(result) == str(env["cwd"] / "file_in_cwd.txt")
# NOT FOUND - MAKE PATH (PREFER SCRIPT DIR)
rel_path_script = "new_dir/new_file.txt"
expected_script = env["script_dir"] / rel_path_script
- assert Path.extend_or_make(rel_path_script, prefer_script_dir=True) == str(expected_script)
+ assert str(FileSys.extend_or_make_path(rel_path_script, prefer_script_dir=True)) == str(expected_script)
# NOT FOUND - MAKE PATH (PREFER CWD)
rel_path_cwd = "another_new_dir/another_new_file.txt"
expected_cwd = env["cwd"] / rel_path_cwd
- assert Path.extend_or_make(rel_path_cwd, prefer_script_dir=False) == str(expected_cwd)
+ assert str(FileSys.extend_or_make_path(rel_path_cwd, prefer_script_dir=False)) == str(expected_cwd)
# USES CLOSEST MATCH WHEN FINDING
expected_typo = env["search_in"] / "TypoDir" / "file_in_typo.txt"
- assert Path.extend_or_make("TypoDir/file_in_typx.txt", search_in=search_dir, use_closest_match=True) == str(expected_typo)
+ assert str(FileSys.extend_or_make_path("TypoDir/file_in_typx.txt", search_in=search_dir,
+ fuzzy_match=True)) == str(expected_typo)
# MAKES PATH WHEN CLOSEST MATCH FAILS
rel_path_wrong = "VeryWrong/made_up.file"
expected_made = env["script_dir"] / rel_path_wrong
- assert Path.extend_or_make(rel_path_wrong, search_in=search_dir, use_closest_match=True) == str(expected_made)
+ assert str(FileSys.extend_or_make_path(rel_path_wrong, search_in=search_dir, fuzzy_match=True)) == str(expected_made)
def test_remove(tmp_path):
# NON-EXISTENT
non_existent_path = tmp_path / "does_not_exist"
assert not non_existent_path.exists()
- Path.remove(str(non_existent_path))
+ FileSys.remove(str(non_existent_path))
assert not non_existent_path.exists()
- Path.remove(str(non_existent_path), only_content=True)
+ FileSys.remove(str(non_existent_path), only_content=True)
assert not non_existent_path.exists()
# FILE REMOVAL
file_to_remove = tmp_path / "remove_me.txt"
file_to_remove.touch()
assert file_to_remove.exists()
- Path.remove(str(file_to_remove))
+ FileSys.remove(str(file_to_remove))
assert not file_to_remove.exists()
# DIRECTORY REMOVAL (FULL)
@@ -154,7 +165,7 @@ def test_remove(tmp_path):
(dir_to_remove / "subdir").mkdir()
(dir_to_remove / "subdir" / "file2.txt").touch()
assert dir_to_remove.exists()
- Path.remove(str(dir_to_remove))
+ FileSys.remove(str(dir_to_remove))
assert not dir_to_remove.exists()
# DIRECTORY REMOVAL (ONLY CONTENT)
@@ -164,7 +175,7 @@ def test_remove(tmp_path):
(dir_to_empty / "subdir").mkdir()
(dir_to_empty / "subdir" / "file2.txt").touch()
assert dir_to_empty.exists()
- Path.remove(str(dir_to_empty), only_content=True)
+ FileSys.remove(str(dir_to_empty), only_content=True)
assert dir_to_empty.exists()
assert not list(dir_to_empty.iterdir())
@@ -172,6 +183,6 @@ def test_remove(tmp_path):
file_path_content = tmp_path / "file_content.txt"
file_path_content.write_text("content")
assert file_path_content.exists()
- Path.remove(str(file_path_content), only_content=True)
+ FileSys.remove(str(file_path_content), only_content=True)
assert file_path_content.exists()
assert file_path_content.read_text() == "content"
diff --git a/tests/test_json.py b/tests/test_json.py
index 8ae5e49..3344e30 100644
--- a/tests/test_json.py
+++ b/tests/test_json.py
@@ -1,22 +1,22 @@
from xulbux.base.exceptions import SameContentFileExistsError
from xulbux.json import Json
+from pathlib import Path
import pytest
import json
-import os
def create_test_json(tmp_path, filename, data):
file_path = tmp_path / filename
- with open(file_path, "w") as f:
- json.dump(data, f, indent=2)
+ with open(file_path, "w") as file:
+ json.dump(data, file, indent=2)
return file_path
def create_test_json_string(tmp_path, filename, content):
file_path = tmp_path / filename
- with open(file_path, "w") as f:
- f.write(content)
+ with open(file_path, "w") as file:
+ file.write(content)
return file_path
@@ -133,18 +133,18 @@ def test_read_comment_only_json(tmp_path):
def test_create_simple(tmp_path):
file_path_str = str(tmp_path / "created.json")
created_path = Json.create(file_path_str, SIMPLE_DATA)
- assert os.path.exists(created_path)
- assert file_path_str == created_path
- with open(created_path, "r") as f:
- data = json.load(f)
+ assert isinstance(created_path, Path)
+ assert created_path.exists()
+ with open(created_path, "r") as file:
+ data = json.load(file)
assert data == SIMPLE_DATA
def test_create_with_indent_compactness(tmp_path):
file_path_str = str(tmp_path / "formatted.json")
Json.create(file_path_str, SIMPLE_DATA, indent=4, compactness=0)
- with open(file_path_str, "r") as f:
- content = f.read()
+ with open(file_path_str, "r") as file:
+ content = file.read()
assert '\n "name":' in content
@@ -155,7 +155,9 @@ def test_create_force_false_exists(tmp_path):
def test_create_force_false_same_content(tmp_path):
+ from pathlib import Path
file_path = Json.create(f"{tmp_path}/existing_same.json", SIMPLE_DATA, force=False)
+ assert isinstance(file_path, Path)
with pytest.raises(SameContentFileExistsError):
Json.create(file_path, SIMPLE_DATA, force=False)
@@ -163,16 +165,16 @@ def test_create_force_false_same_content(tmp_path):
def test_create_force_true_exists(tmp_path):
file_path = create_test_json(tmp_path, "overwrite.json", {"a": 1})
Json.create(str(file_path), {"b": 2}, force=True)
- with open(file_path, "r") as f:
- data = json.load(f)
+ with open(file_path, "r") as file:
+ data = json.load(file)
assert data == {"b": 2}
def test_update_existing_values(tmp_path):
file_path = create_test_json(tmp_path, "update_test.json", UPDATE_DATA_START)
Json.update(str(file_path), UPDATE_VALUES)
- with open(file_path, "r") as f:
- data = json.load(f)
+ with open(file_path, "r") as file:
+ data = json.load(file)
assert data == UPDATE_DATA_END
@@ -192,14 +194,14 @@ def test_update_with_comments(tmp_path):
def test_update_different_path_sep(tmp_path):
file_path = create_test_json(tmp_path, "update_sep.json", {"a": {"b": 1}})
Json.update(str(file_path), {"a/b": 2}, path_sep="/")
- with open(file_path, "r") as f:
- data = json.load(f)
+ with open(file_path, "r") as file:
+ data = json.load(file)
assert data == {"a": {"b": 2}}
def test_update_create_non_existent_path(tmp_path):
file_path = create_test_json(tmp_path, "update_create.json", {"existing": 1})
Json.update(str(file_path), {"new->nested->value": "created"})
- with open(file_path, "r") as f:
- data = json.load(f)
+ with open(file_path, "r") as file:
+ data = json.load(file)
assert data == {"existing": 1, "new": {"nested": {"value": "created"}}}
diff --git a/tests/test_metadata_consistency.py b/tests/test_metadata_consistency.py
new file mode 100644
index 0000000..8c0cde9
--- /dev/null
+++ b/tests/test_metadata_consistency.py
@@ -0,0 +1,121 @@
+from typing import Optional
+from pathlib import Path
+import subprocess
+import pytest
+import toml
+import os
+import re
+
+# DEFINE PATHS RELATIVE TO THIS TEST FILE tests/test_version.py
+ROOT_DIR = Path(__file__).parent.parent
+PYPROJECT_PATH = ROOT_DIR / "pyproject.toml"
+INIT_PATH = ROOT_DIR / "src" / "xulbux" / "__init__.py"
+
+
+def get_current_branch() -> Optional[str]:
+ # CHECK GITHUB ACTIONS ENVIRONMENT VARIABLES FIRST
+ # GITHUB_HEAD_REF IS SET FOR PULL REQUESTS (SOURCE BRANCH)
+ if branch := os.environ.get("GITHUB_HEAD_REF"):
+ return branch
+ # GITHUB_REF_NAME IS SET FOR PUSHES (BRANCH NAME)
+ if branch := os.environ.get("GITHUB_REF_NAME"):
+ return branch
+
+ # FALLBACK TO GIT COMMAND FOR LOCAL DEV
+ try:
+ result = subprocess.run(["git", "branch", "--show-current"], capture_output=True, text=True, check=True)
+ return result.stdout.strip() or None
+ except (subprocess.CalledProcessError, FileNotFoundError):
+ return None
+
+
+################################################## VERSION CONSISTENCY TEST ##################################################
+
+
+def test_version_consistency():
+ """Verifies that the version numbers in `pyproject.toml` and `__init__.py`
+ match the version specified in the current release branch name (`dev/1.X.Y`)."""
+ # SKIP IF WE CAN'T DETERMINE THE BRANCH (DETACHED HEAD OR NOT A GIT REPO)
+ if not (branch_name := get_current_branch()):
+ pytest.skip("Could not determine git branch name")
+
+ # SKIP IF BRANCH NAME DOESN'T MATCH RELEASE PATTERN dev/1.X.Y
+ if not (branch_match := re.match(r"^dev/(1\.[0-9]+\.[0-9]+)$", branch_name)):
+ pytest.skip(f"Current branch '{branch_name}' is not a release branch (dev/1.X.Y)")
+
+ expected_version = branch_match.group(1)
+
+ # EXTRACT VERSION FROM __init__.py
+ with open(INIT_PATH, "r", encoding="utf-8") as file:
+ init_content = file.read()
+ init_version_match = re.search(r'^__version__\s*=\s*"([^"]+)"', init_content, re.MULTILINE)
+ init_version = init_version_match.group(1) if init_version_match else None
+
+ # EXTRACT VERSION FROM pyproject.toml
+ with open(PYPROJECT_PATH, "r", encoding="utf-8") as file:
+ pyproject_data = toml.load(file)
+ pyproject_version = pyproject_data.get("project", {}).get("version", "")
+
+ assert init_version is not None, f"Could not find var '__version__' in {INIT_PATH}"
+ assert pyproject_version, f"Could not find var 'version' in {PYPROJECT_PATH}"
+
+ assert init_version == expected_version, \
+ f"Hardcoded lib-version in src/xulbux/__init__.py ({init_version}) does not match branch version ({expected_version})"
+
+ assert pyproject_version == expected_version, \
+ f"Hardcoded lib-version in pyproject.toml ({pyproject_version}) does not match branch version ({expected_version})"
+
+
+################################################## DEPENDENCIES CONSISTENCY TEST ##################################################
+
+
+def test_dependencies_consistency():
+ """Verifies that dependencies in `pyproject.toml` match `__dependencies__` in `__init__.py`."""
+ # EXTRACT DEPENDENCIES FROM __init__.py
+ with open(INIT_PATH, "r", encoding="utf-8") as file:
+ init_content = file.read()
+ init_deps = re.search(r'__dependencies__\s*=\s*\[(.*?)\]', init_content, re.DOTALL)
+
+ # EXTRACT DEPENDENCIES FROM pyproject.toml
+ with open(PYPROJECT_PATH, "r", encoding="utf-8") as file:
+ pyproject_data = toml.load(file)
+ pyproject_deps = pyproject_data.get("project", {}).get("dependencies", [])
+
+ assert init_deps is not None, f"Could not find var '__dependencies__' in {INIT_PATH}"
+ assert pyproject_deps, f"Could not find 'dependencies' in {PYPROJECT_PATH}"
+
+ init_deps = [dep.strip().strip('"').strip("'") for dep in init_deps.group(1).split(",") if dep.strip()]
+
+ # SORT FOR COMPARISON
+ pyproject_deps_sorted = sorted(pyproject_deps)
+ init_deps_sorted = sorted(init_deps)
+
+ assert init_deps_sorted == pyproject_deps_sorted, \
+ f"\nDependencies mismatch:\n" \
+ f" __init__.py : {init_deps_sorted}\n" \
+ f" pyproject.toml : {pyproject_deps_sorted}\n"
+
+
+################################################## DESCRIPTION CONSISTENCY TEST ##################################################
+
+
+def test_description_consistency():
+ """Verifies that the description in `pyproject.toml` matches `__description__` in `__init__.py`."""
+ # EXTRACT DESCRIPTION FROM __init__.py
+ with open(INIT_PATH, "r", encoding="utf-8") as file:
+ init_content = file.read()
+ init_desc_match = re.search(r'^__description__\s*=\s*"([^"]+)"', init_content, re.MULTILINE)
+ init_desc = init_desc_match.group(1) if init_desc_match else None
+
+ # EXTRACT DESCRIPTION FROM pyproject.toml
+ with open(PYPROJECT_PATH, "r", encoding="utf-8") as file:
+ pyproject_data = toml.load(file)
+ pyproject_desc = pyproject_data.get("project", {}).get("description", "")
+
+ assert init_desc is not None, f"Could not find var '__description__' in {INIT_PATH}"
+ assert pyproject_desc, f"Could not find 'description' in {PYPROJECT_PATH}"
+
+ assert init_desc == pyproject_desc, \
+ f"\nDescription mismatch:\n" \
+ f" __init__.py : {init_desc}\n" \
+ f" pyproject.toml : {pyproject_desc}\n"
diff --git a/tests/test_version_consistency.py b/tests/test_version_consistency.py
deleted file mode 100644
index 5d0e71d..0000000
--- a/tests/test_version_consistency.py
+++ /dev/null
@@ -1,71 +0,0 @@
-from typing import Optional
-from pathlib import Path
-import subprocess
-import pytest
-import os
-import re
-
-# DEFINE PATHS RELATIVE TO THIS TEST FILE tests/test_version.py
-ROOT_DIR = Path(__file__).parent.parent
-PYPROJECT_PATH = ROOT_DIR / "pyproject.toml"
-INIT_PATH = ROOT_DIR / "src" / "xulbux" / "__init__.py"
-
-
-def get_current_branch() -> Optional[str]:
- # CHECK GITHUB ACTIONS ENVIRONMENT VARIABLES FIRST
- # GITHUB_HEAD_REF IS SET FOR PULL REQUESTS (SOURCE BRANCH)
- if branch := os.environ.get("GITHUB_HEAD_REF"):
- return branch
- # GITHUB_REF_NAME IS SET FOR PUSHES (BRANCH NAME)
- if branch := os.environ.get("GITHUB_REF_NAME"):
- return branch
-
- # FALLBACK TO GIT COMMAND FOR LOCAL DEV
- try:
- result = subprocess.run(["git", "branch", "--show-current"], capture_output=True, text=True, check=True)
- return result.stdout.strip() or None
- except (subprocess.CalledProcessError, FileNotFoundError):
- return None
-
-
-def get_file_version(file_path: Path, pattern: str) -> Optional[str]:
- if not file_path.exists():
- return None
-
- with open(file_path, "r", encoding="utf-8") as f:
- content = f.read()
- match = re.search(pattern, content, re.MULTILINE)
- if match:
- return match.group(1)
-
- return None
-
-
-################################################## VERSION CONSISTENCY TEST ##################################################
-
-
-def test_version_consistency():
- """Verifies that the version numbers in `pyproject.toml` and `__init__.py`
- match the version specified in the current release branch name (`dev/1.X.Y`)."""
- # SKIP IF WE CAN'T DETERMINE THE BRANCH (DETACHED HEAD OR NOT A GIT REPO)
- if not (branch_name := get_current_branch()):
- pytest.skip("Could not determine git branch name")
-
- # SKIP IF BRANCH NAME DOESN'T MATCH RELEASE PATTERN dev/1.X.Y
- if not (branch_match := re.match(r"^dev/(1\.[0-9]+\.[0-9]+)$", branch_name)):
- pytest.skip(f"Current branch '{branch_name}' is not a release branch (dev/1.X.Y)")
-
- expected_version = branch_match.group(1)
-
- # EXTRACT VERSIONS
- pyproject_version = get_file_version(PYPROJECT_PATH, r'^version\s*=\s*"([^"]+)"')
- init_version = get_file_version(INIT_PATH, r'^__version__\s*=\s*"([^"]+)"')
-
- assert pyproject_version is not None, f"Could not find var 'version' in {PYPROJECT_PATH}"
- assert init_version is not None, f"Could not find var '__version__' in {INIT_PATH}"
-
- assert pyproject_version == expected_version, \
- f"Hardcoded lib-version in pyproject.toml ({pyproject_version}) does not match branch version ({expected_version})"
-
- assert init_version == expected_version, \
- f"Hardcoded lib-version in src/xulbux/__init__.py ({init_version}) does not match branch version ({expected_version})"
|