From 5871ae30ad05d747bb7493750e092bab25014491 Mon Sep 17 00:00:00 2001 From: Javanaut Date: Thu, 9 Apr 2026 01:06:09 +0200 Subject: [PATCH 01/28] ffn --- tools/ffx_update.sh | 9 --------- 1 file changed, 9 deletions(-) delete mode 100644 tools/ffx_update.sh diff --git a/tools/ffx_update.sh b/tools/ffx_update.sh deleted file mode 100644 index 2217c0a..0000000 --- a/tools/ffx_update.sh +++ /dev/null @@ -1,9 +0,0 @@ -#! /bin/bash - -. ~/.local/share/ffx.venv/bin/activate -pushd ~/.local/src/ffx/ -git checkout "${1:-main}" -git pull -pip install --editable . -popd -deactivate From f9c8b8ac5e4828c1f0eb76de517c7c9601f356c7 Mon Sep 17 00:00:00 2001 From: Javanaut Date: Thu, 9 Apr 2026 01:13:06 +0200 Subject: [PATCH 02/28] ffn2 --- SCRATCHPAD.md | 225 +++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 186 insertions(+), 39 deletions(-) diff --git a/SCRATCHPAD.md b/SCRATCHPAD.md index 22431c8..cd16568 100644 --- a/SCRATCHPAD.md +++ b/SCRATCHPAD.md @@ -1,62 +1,209 @@ - +- Delete this scratchpad once the optimization backlog is either converted into issues/work items or distilled into durable project guidance. From 60ae58500a9b39f1a57e8c5aca8b93af2df4ad2c Mon Sep 17 00:00:00 2001 From: Javanaut Date: Thu, 9 Apr 2026 12:46:24 +0200 Subject: [PATCH 03/28] Tidy up logging and rework tests from scratch --- .gitignore | 5 +- SCRATCHPAD.md | 56 +-- pyproject.toml | 16 +- requirements/architecture.md | 5 +- requirements/subtrack_mapping.md | 74 ++++ requirements/tests.md | 130 +++++++ src/ffx/__main__.py | 9 + src/ffx/{ffx.py => cli.py} | 45 +-- src/ffx/database.py | 5 +- src/ffx/ffx_controller.py | 57 ++- src/ffx/helper.py | 7 +- src/ffx/logging_utils.py | 68 ++++ src/ffx/media_descriptor.py | 39 +- src/ffx/media_descriptor_change_set.py | 83 ++--- src/ffx/model/__init__.py | 20 ++ src/ffx/process.py | 7 +- src/ffx/show_descriptor.py | 5 +- src/ffx/tmdb_controller.py | 7 +- src/ffx/track_descriptor.py | 5 +- src/ffx/video_encoder.py | 1 + tests/__init__.py | 1 + tests/integration/__init__.py | 1 + .../integration/subtrack_mapping/__init__.py | 1 + .../subtrack_mapping/test_cli_bundle.py | 283 +++++++++++++++ tests/legacy/__init__.py | 1 + .../legacy}/_basename_combinator_1.py | 0 .../legacy}/basename_combinator.py | 5 +- .../legacy}/basename_combinator_0.py | 0 .../legacy}/basename_combinator_2.py | 0 {src/ffx/test => tests/legacy}/combinator.py | 0 .../legacy}/disposition_combinator_2.py | 5 +- .../legacy}/disposition_combinator_2_0.py | 0 .../legacy}/disposition_combinator_2_1.py | 0 .../legacy}/disposition_combinator_2_2.py | 0 .../legacy}/disposition_combinator_2_3 .py | 0 .../legacy}/disposition_combinator_3.py | 5 +- .../legacy}/disposition_combinator_3_0.py | 0 .../legacy}/disposition_combinator_3_1.py | 0 .../legacy}/disposition_combinator_3_2.py | 0 .../legacy}/disposition_combinator_3_3.py | 0 .../legacy}/disposition_combinator_3_4.py | 0 {src/ffx/test => tests/legacy}/helper.py | 10 +- .../legacy}/indicator_combinator.py | 0 .../test => tests/legacy}/label_combinator.py | 5 +- .../legacy}/label_combinator_0.py | 0 .../legacy}/label_combinator_1.py | 0 .../test => tests/legacy}/media_combinator.py | 5 +- .../legacy}/media_combinator_0.py | 0 .../legacy}/media_combinator_1.py | 0 .../legacy}/media_combinator_2.py | 0 .../legacy}/media_combinator_3.py | 0 .../legacy}/media_combinator_4.py | 0 .../legacy}/media_combinator_5.py | 0 .../legacy}/media_combinator_6.py | 0 .../legacy}/media_combinator_7.py | 0 .../legacy}/media_tag_combinator.py | 5 +- .../legacy}/media_tag_combinator_0.py | 0 .../legacy}/media_tag_combinator_1.py | 0 .../legacy}/media_tag_combinator_2.py | 0 .../legacy}/permutation_combinator_2.py | 0 .../legacy}/permutation_combinator_3.py | 0 .../legacy}/release_combinator.py | 0 {src/ffx/test => tests/legacy}/scenario.py | 13 +- {src/ffx/test => tests/legacy}/scenario_1.py | 9 +- {src/ffx/test => tests/legacy}/scenario_2.py | 7 +- {src/ffx/test => tests/legacy}/scenario_4.py | 11 +- .../test => tests/legacy}/show_combinator.py | 0 .../test => tests/legacy}/title_combinator.py | 0 .../legacy}/track_tag_combinator_2.py | 5 +- .../legacy}/track_tag_combinator_2_0.py | 0 .../legacy}/track_tag_combinator_2_1.py | 0 .../legacy}/track_tag_combinator_2_2.py | 0 .../legacy}/track_tag_combinator_2_3.py | 0 .../legacy}/track_tag_combinator_3.py | 5 +- .../legacy}/track_tag_combinator_3_0.py | 0 .../legacy}/track_tag_combinator_3_1.py | 0 .../legacy}/track_tag_combinator_3_2.py | 0 .../legacy}/track_tag_combinator_3_3.py | 0 .../legacy}/track_tag_combinator_3_4.py | 0 .../ffx_tests.py => tests/legacy_runner.py | 24 +- tests/support/__init__.py | 1 + tests/support/ffx_bundle.py | 337 ++++++++++++++++++ tests/unit/__init__.py | 1 + tests/unit/test_logging.py | 86 +++++ 84 files changed, 1283 insertions(+), 187 deletions(-) create mode 100644 requirements/subtrack_mapping.md create mode 100644 requirements/tests.md create mode 100644 src/ffx/__main__.py rename src/ffx/{ffx.py => cli.py} (97%) create mode 100644 src/ffx/logging_utils.py create mode 100644 tests/__init__.py create mode 100644 tests/integration/__init__.py create mode 100644 tests/integration/subtrack_mapping/__init__.py create mode 100644 tests/integration/subtrack_mapping/test_cli_bundle.py create mode 100644 tests/legacy/__init__.py rename {src/ffx/test => tests/legacy}/_basename_combinator_1.py (100%) rename {src/ffx/test => tests/legacy}/basename_combinator.py (85%) rename {src/ffx/test => tests/legacy}/basename_combinator_0.py (100%) rename {src/ffx/test => tests/legacy}/basename_combinator_2.py (100%) rename {src/ffx/test => tests/legacy}/combinator.py (100%) rename {src/ffx/test => tests/legacy}/disposition_combinator_2.py (85%) rename {src/ffx/test => tests/legacy}/disposition_combinator_2_0.py (100%) rename {src/ffx/test => tests/legacy}/disposition_combinator_2_1.py (100%) rename {src/ffx/test => tests/legacy}/disposition_combinator_2_2.py (100%) rename {src/ffx/test => tests/legacy}/disposition_combinator_2_3 .py (100%) rename {src/ffx/test => tests/legacy}/disposition_combinator_3.py (84%) rename {src/ffx/test => tests/legacy}/disposition_combinator_3_0.py (100%) rename {src/ffx/test => tests/legacy}/disposition_combinator_3_1.py (100%) rename {src/ffx/test => tests/legacy}/disposition_combinator_3_2.py (100%) rename {src/ffx/test => tests/legacy}/disposition_combinator_3_3.py (100%) rename {src/ffx/test => tests/legacy}/disposition_combinator_3_4.py (100%) rename {src/ffx/test => tests/legacy}/helper.py (97%) rename {src/ffx/test => tests/legacy}/indicator_combinator.py (100%) rename {src/ffx/test => tests/legacy}/label_combinator.py (85%) rename {src/ffx/test => tests/legacy}/label_combinator_0.py (100%) rename {src/ffx/test => tests/legacy}/label_combinator_1.py (100%) rename {src/ffx/test => tests/legacy}/media_combinator.py (84%) rename {src/ffx/test => tests/legacy}/media_combinator_0.py (100%) rename {src/ffx/test => tests/legacy}/media_combinator_1.py (100%) rename {src/ffx/test => tests/legacy}/media_combinator_2.py (100%) rename {src/ffx/test => tests/legacy}/media_combinator_3.py (100%) rename {src/ffx/test => tests/legacy}/media_combinator_4.py (100%) rename {src/ffx/test => tests/legacy}/media_combinator_5.py (100%) rename {src/ffx/test => tests/legacy}/media_combinator_6.py (100%) rename {src/ffx/test => tests/legacy}/media_combinator_7.py (100%) rename {src/ffx/test => tests/legacy}/media_tag_combinator.py (84%) rename {src/ffx/test => tests/legacy}/media_tag_combinator_0.py (100%) rename {src/ffx/test => tests/legacy}/media_tag_combinator_1.py (100%) rename {src/ffx/test => tests/legacy}/media_tag_combinator_2.py (100%) rename {src/ffx/test => tests/legacy}/permutation_combinator_2.py (100%) rename {src/ffx/test => tests/legacy}/permutation_combinator_3.py (100%) rename {src/ffx/test => tests/legacy}/release_combinator.py (100%) rename {src/ffx/test => tests/legacy}/scenario.py (88%) rename {src/ffx/test => tests/legacy}/scenario_1.py (95%) rename {src/ffx/test => tests/legacy}/scenario_2.py (96%) rename {src/ffx/test => tests/legacy}/scenario_4.py (96%) rename {src/ffx/test => tests/legacy}/show_combinator.py (100%) rename {src/ffx/test => tests/legacy}/title_combinator.py (100%) rename {src/ffx/test => tests/legacy}/track_tag_combinator_2.py (84%) rename {src/ffx/test => tests/legacy}/track_tag_combinator_2_0.py (100%) rename {src/ffx/test => tests/legacy}/track_tag_combinator_2_1.py (100%) rename {src/ffx/test => tests/legacy}/track_tag_combinator_2_2.py (100%) rename {src/ffx/test => tests/legacy}/track_tag_combinator_2_3.py (100%) rename {src/ffx/test => tests/legacy}/track_tag_combinator_3.py (84%) rename {src/ffx/test => tests/legacy}/track_tag_combinator_3_0.py (100%) rename {src/ffx/test => tests/legacy}/track_tag_combinator_3_1.py (100%) rename {src/ffx/test => tests/legacy}/track_tag_combinator_3_2.py (100%) rename {src/ffx/test => tests/legacy}/track_tag_combinator_3_3.py (100%) rename {src/ffx/test => tests/legacy}/track_tag_combinator_3_4.py (100%) rename src/ffx/ffx_tests.py => tests/legacy_runner.py (81%) create mode 100644 tests/support/__init__.py create mode 100644 tests/support/ffx_bundle.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/test_logging.py diff --git a/.gitignore b/.gitignore index eb22b5a..913e25a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ -__pycache__ +__pycache__/ +*.py[cod] junk/ .vscode .ipynb_checkpoints/ @@ -12,4 +13,6 @@ bin/conversiontest.py build/ dist/ *.egg-info/ +.venv/ +venv/ .codex diff --git a/SCRATCHPAD.md b/SCRATCHPAD.md index cd16568..5c4d5c2 100644 --- a/SCRATCHPAD.md +++ b/SCRATCHPAD.md @@ -8,6 +8,8 @@ - The biggest near-term wins are in startup cost, repeated subprocess work, repeated database query patterns, and general repo hygiene. - This list is intentionally optimization-oriented rather than bug-oriented. Some items below also improve correctness or maintainability, but they were selected because they can reduce runtime cost, operator friction, or iteration overhead. +- A first modern integration slice now exists under [`tests/integration/subtrack_mapping`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping). Remaining test-suite cleanup is now mostly about migrating and shrinking the legacy harness surface under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy). +- FFX logger setup now reuses named handlers, and fallback logger access no longer mutates handlers in ordinary constructors and helpers. ## Focused Snapshot @@ -16,17 +18,15 @@ - Collapse repeated `ffprobe` calls into a single probe result per source file. - Replace `query.count()` plus `first()` patterns with single-query ORM accessors. - Cache or precompile filename pattern regexes instead of scanning every pattern for every file. - - Guard logger handler installation to avoid duplicated handlers and noisy repeated setup. - Highest-leverage repo and workflow optimizations: - - Stop tracking nested `__pycache__` output and other generated artifacts. - Consolidate setup and upgrade tooling to reduce overlapping shell-script responsibilities. - - Trim or reorganize the oversized test/combinator surface so it is easier to run, debug, and extend. + - Continue migrating the oversized legacy test/combinator surface into focused modern tests so it is easier to run, debug, and extend. ## Optimization Candidates 1. CLI startup and import cost -- [`src/ffx/ffx.py`](/home/osgw/.local/src/codex/ffx/src/ffx/ffx.py) imports a large portion of the application at module import time, even for cheap commands such as `version`, `help`, `setup_dependencies`, and `upgrade`. +- [`src/ffx/cli.py`](/home/osgw/.local/src/codex/ffx/src/ffx/cli.py) imports a large portion of the application at module import time, even for cheap commands such as `version`, `help`, `setup_dependencies`, and `upgrade`. - Optimization: - Move heavy imports into the commands that actually need them. - Keep the CLI root importable with only core stdlib and Click dependencies. @@ -80,28 +80,8 @@ - Better failure diagnosis. - Cleaner process management semantics. -7. Logger handlers can be added repeatedly -- [`src/ffx/ffx.py`](/home/osgw/.local/src/codex/ffx/src/ffx/ffx.py) adds file and console handlers each invocation. -- Several helper classes install `NullHandler` instances ad hoc, for example [`src/ffx/process.py`](/home/osgw/.local/src/codex/ffx/src/ffx/process.py), [`src/ffx/tmdb_controller.py`](/home/osgw/.local/src/codex/ffx/src/ffx/tmdb_controller.py), [`src/ffx/media_descriptor.py`](/home/osgw/.local/src/codex/ffx/src/ffx/media_descriptor.py), and [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py). -- Optimization: - - Guard handler installation so each logger is configured once. - - Prefer module-level logger setup patterns over per-instance handler mutation. -- Expected value: - - Less duplicate logging. - - Lower confusion in long-running or repeatedly invoked contexts. - -8. Repo-local hygiene for generated Python artifacts -- The repo currently contains nested compiled artifacts under `src/ffx/__pycache__/...`. -- `.gitignore` only ignores `__pycache__` at the repo root, not recursive `__pycache__/`. -- Optimization: - - Ignore `__pycache__/` recursively and clean tracked generated files. - - Consider ignoring local virtualenv or other generated tool directories if they may appear in-repo later. -- Expected value: - - Cleaner diffs and scans. - - Lower repo noise. - -9. Tooling overlap and naming drift -- There are now multiple prep-related scripts: [`tools/prepare.sh`](/home/osgw/.local/src/codex/ffx/tools/prepare.sh), [`tools/setup.sh`](/home/osgw/.local/src/codex/ffx/tools/setup.sh), and the legacy-like [`tools/ffx_update.sh`](/home/osgw/.local/src/codex/ffx/tools/ffx_update.sh). +7. Tooling overlap and naming drift +- There are still overlapping prep and setup entrypoints across [`tools/prepare.sh`](/home/osgw/.local/src/codex/ffx/tools/prepare.sh), [`tools/setup.sh`](/home/osgw/.local/src/codex/ffx/tools/setup.sh), and newer CLI maintenance commands. - Optimization: - Decide which scripts remain canonical. - Replace or remove legacy wrappers once equivalent CLI commands exist. @@ -110,7 +90,7 @@ - Less operator confusion. - Fewer duplicated procedures to maintain. -10. Placeholder UI surfaces should either ship or disappear +8. Placeholder UI surfaces should either ship or disappear - [`src/ffx/help_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/help_screen.py) and [`src/ffx/settings_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/settings_screen.py) are placeholders. - Optimization: - Either remove them from the active UI surface or complete them. @@ -119,7 +99,7 @@ - Leaner interface. - Lower UX ambiguity. -11. Large Textual screens repeat configuration and controller loading +9. Large Textual screens repeat configuration and controller loading - Screens such as [`src/ffx/media_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/media_details_screen.py), [`src/ffx/pattern_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/pattern_details_screen.py), and [`src/ffx/show_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/show_details_screen.py) repeat setup patterns and local metadata filtering extraction. - Optimization: - Extract a shared screen base or helper for common config/controller/bootstrap logic. @@ -128,7 +108,7 @@ - Lower maintenance overhead. - Easier UI iteration. -12. Several helper functions are unfinished or dead-weight +10. Several helper functions are unfinished or dead-weight - [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) contains `permutateList(...): pass`. - There are many combinator and conversion placeholders across tests and migrations. - Optimization: @@ -138,17 +118,18 @@ - Smaller mental model. - Less time spent re-evaluating inactive paths. -13. Test suite shape is expensive to understand and likely expensive to run -- The project has a large matrix of combinator files under [`src/ffx/test`](/home/osgw/.local/src/codex/ffx/src/ffx/test), several placeholder `pass` implementations, and at least one suspicious filename with an embedded space: [`src/ffx/test/disposition_combinator_2_3 .py`](/home/osgw/.local/src/codex/ffx/src/ffx/test/disposition_combinator_2_3 .py). +11. Test suite shape is expensive to understand and likely expensive to run +- The project still carries a large legacy matrix of combinator files under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy), several placeholder `pass` implementations, and at least one suspicious filename with an embedded space: [`tests/legacy/disposition_combinator_2_3 .py`](/home/osgw/.local/src/codex/ffx/tests/legacy/disposition_combinator_2_3 .py). +- A first focused replacement slice now exists in [`tests/integration/subtrack_mapping/test_cli_bundle.py`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping/test_cli_bundle.py), so the remaining work is migration and consolidation rather than creating the modern test shape from scratch. - Optimization: - - Consolidate combinator families. - - Add a lighter smoke-test path. + - Continue replacing broad combinator matrices with focused parametrized integration and unit tests. + - Retire the bespoke legacy discovery and runner path once equivalent coverage exists. - Normalize file naming and test discovery conventions. - Expected value: - Faster contributor onboarding. - Easier CI adoption later. -14. Process resource limiting semantics could be clearer +12. Process resource limiting semantics could be clearer - [`src/ffx/process.py`](/home/osgw/.local/src/codex/ffx/src/ffx/process.py) prepends `nice` and `cpulimit` directly when values are set. - Optimization: - Validate and document effective behavior for combined `nice` + `cpulimit`. @@ -157,7 +138,7 @@ - Fewer surprises in production-like runs. - Easier support for user-reported performance behavior. -15. Import-time dependency coupling makes maintenance commands brittle +13. Import-time dependency coupling makes maintenance commands brittle - Even after recent CLI maintenance additions, the top-level CLI module still imports most application modules before Click dispatch. - Optimization: - Push imports for ORM, Textual, TMDB, ffmpeg helpers, and descriptors behind the commands that actually need them. @@ -165,7 +146,7 @@ - Maintenance commands such as setup and upgrade stay usable when optional runtime dependencies are broken. - Better separation between media runtime code and maintenance tooling. -16. Regex and string utility cleanup +14. Regex and string utility cleanup - [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) still emits a `SyntaxWarning` for `RICH_COLOR_PATTERN`. - Optimization: - Convert regex literals to raw strings where appropriate. @@ -174,7 +155,7 @@ - Cleaner runtime output. - Less warning noise during dry-run maintenance commands. -17. Database startup always runs schema creation and version checks +15. Database startup always runs schema creation and version checks - [`src/ffx/database.py`](/home/osgw/.local/src/codex/ffx/src/ffx/database.py) runs `Base.metadata.create_all(...)` and version checks every time a DB-backed context is created. - Optimization: - Measure startup cost and consider separating bootstrapping from ordinary command execution. @@ -198,7 +179,6 @@ 1. Triage the list into quick wins, medium refactors, and long-horizon cleanup. 2. Tackle the cheapest high-impact items first: - - recursive `__pycache__/` ignore and cleanup, - regex raw-string warning cleanup, - `count()` plus `first()` query cleanup, - single-call `ffprobe` refactor. diff --git a/pyproject.toml b/pyproject.toml index d77a077..224d73f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,6 +27,11 @@ Homepage = "https://gitea.maveno.de/Javanaut/ffx" Repository = "https://gitea.maveno.de/Javanaut/ffx.git" Issues = "https://gitea.maveno.de/Javanaut/ffx/issues" +[project.optional-dependencies] +test = [ + "pytest", +] + [build-system] requires = [ "setuptools", @@ -35,4 +40,13 @@ requires = [ build-backend = "setuptools.build_meta" [project.scripts] -ffx = "ffx.ffx:ffx" +ffx = "ffx.cli:ffx" + +[tool.pytest.ini_options] +testpaths = ["tests"] +python_files = ["test_*.py"] +addopts = "-ra" +markers = [ + "integration: exercises the FFX bundle with real ffmpeg/ffprobe processes", + "subtrack_mapping: covers requirements/subtrack_mapping.md", +] diff --git a/requirements/architecture.md b/requirements/architecture.md index a31476c..4e7f8e9 100644 --- a/requirements/architecture.md +++ b/requirements/architecture.md @@ -32,7 +32,7 @@ ## High-Level Building Blocks - Frontend, CLI, API, or worker: - - A Click-based CLI in [`src/ffx/ffx.py`](/home/osgw/.local/src/codex/ffx/src/ffx/ffx.py). + - A Click-based CLI in [`src/ffx/cli.py`](/home/osgw/.local/src/codex/ffx/src/ffx/cli.py), exposed as the `ffx` command and via `python -m ffx`. - A Textual terminal UI rooted in [`src/ffx/ffx_app.py`](/home/osgw/.local/src/codex/ffx/src/ffx/ffx_app.py) with screens for shows, patterns, file inspection, tracks, tags, and shifted seasons. - Core business logic: - Descriptor objects model media files, shows, and tracks. @@ -50,7 +50,7 @@ - Key entities or records: - `Show`: canonical TV show metadata plus digit-formatting rules for generated filenames. - `Pattern`: regex rule tying filenames to one show and one target media schema. - - `Track` and `TrackTag`: persisted target stream layout, codec, dispositions, audio layout, and stream-level tags. + - `Track` and `TrackTag`: persisted target stream records, codec, dispositions, audio layout, and stream-level tags. Detailed source-to-target mapping rules live in `requirements/subtrack_mapping.md`. - `MediaTag`: persisted container-level metadata for a pattern. - `ShiftedSeason`: mapping from source numbering ranges to adjusted season and episode numbers. - `Property`: internal key-value storage currently used for database versioning. @@ -63,7 +63,6 @@ - Only supported media-file extensions are accepted for conversion. - Stored database version must match the runtime-required version. - A normalized descriptor may have at most one default and one forced stream per relevant track type. - - Stored target tracks must refer to valid source tracks of matching types. - Shifted-season ranges are intended not to overlap for the same show and season. - TMDB lookups require a show ID and season and episode numbers. - Error-handling approach: diff --git a/requirements/subtrack_mapping.md b/requirements/subtrack_mapping.md new file mode 100644 index 0000000..9339aab --- /dev/null +++ b/requirements/subtrack_mapping.md @@ -0,0 +1,74 @@ +# Subtrack Mapping + +This file defines the behavioral contract for mapping input subtracks to output +subtracks during conversion. + +Primary source: actual tool code in `src/ffx/`. +Secondary source: `tests/legacy/`, used only to clarify intent and reveal gaps. + +## Scope + +- Ensuring each target subtrack is created from the corresponding source-subtrack information, including stream-level metadata. +- Mapping input streams to output streams during conversion. +- Using persisted pattern-track definitions from the database as the target schema. +- Allowing omission and reordering of retained tracks. +- Keeping stream-level metadata attached to the correct source-derived logical track after remapping. +- Normalizing target output into ordered track groups: video, audio, subtitle, then special types such as fonts or images. + +## Terms + +- `source_index`: identity of the originating input stream from ffprobe or an imported source descriptor. +- `index`: final output-track order across all retained tracks. +- `sub_index`: per-type position within the retained tracks of one type, for example audio stream `0` or subtitle stream `1`. +- `target schema`: stored or constructed output-track definition that decides which tracks are kept, omitted, reordered, and rewritten. +- `separate source file`: additional file bound to one target track slot whose media payload replaces the regular source payload for that slot. + +## Rules + +- `SUBTRACK_MAPPING-0001`: The system shall represent source-stream identity separately from output order. `source_index`, `index`, and `sub_index` are distinct concepts and shall not be collapsed into one field. +- `SUBTRACK_MAPPING-0002`: The system shall derive `source_index` for probed tracks from the original ffprobe stream index and preserve that identity through conversion planning. +- `SUBTRACK_MAPPING-0003`: Pattern-backed track definitions stored in the database shall persist both target output order and originating source-stream identity. +- `SUBTRACK_MAPPING-0004`: When a filename matches a pattern, the pattern target schema shall be the source of truth for which source tracks are retained, which are omitted, and in what order retained tracks appear in the output. +- `SUBTRACK_MAPPING-0005`: A target track may refer only to an existing source track of the same type. Conversion shall fail fast when a target track refers to a nonexistent source stream or a source stream of a different type. +- `SUBTRACK_MAPPING-0006`: The ffmpeg mapping phase shall be generated from target output order while resolving each retained output track back to its originating source stream via `source_index`. +- `SUBTRACK_MAPPING-0007`: Reordering and omission shall preserve logical track identity. Stream-level metadata, titles, languages, and disposition decisions shall stay attached to the correct source-derived logical track after mapping. +- `SUBTRACK_MAPPING-0008`: The system shall support one-off CLI stream-order overrides without requiring prior database edits. +- `SUBTRACK_MAPPING-0009`: Operator-facing inspection and editing surfaces shall expose enough source-versus-target information to let a user reason about subtrack mapping decisions. +- `SUBTRACK_MAPPING-0010`: Test coverage for subtrack mapping shall assert source-derived identity, omission, and output order explicitly. Final track counts or final type sequences alone are insufficient proof of correct mapping. +- `SUBTRACK_MAPPING-0011`: Retained target tracks shall appear in ordered groups: video track or tracks first, then audio tracks, then subtitle tracks, then special types such as fonts or images. Within each group, the target schema shall define the order. +- `SUBTRACK_MAPPING-0012`: Track omission is valid when required by output compatibility, when needed to normalize source tracks into the required target group order and schema, or when explicitly requested by database rules or CLI options. +- `SUBTRACK_MAPPING-0013`: If source tracks do not already comply with the required target group order, conversion shall reorder retained tracks to match the target ordering contract without losing source-track identity or stream-level metadata lineage. + +## Separate Additional Source Files + +- `SUBTRACK_MAPPING-0014`: A separate source file may substitute the media payload of one target subtrack without changing that target track's intended output position. +- `SUBTRACK_MAPPING-0015`: When a separate source file is used, the target track shall remain bound to the corresponding logical source track for mapping, validation, and metadata lineage. +- `SUBTRACK_MAPPING-0016`: Metadata for a substituted target track shall be merged from the regular source track and the separate source file when available. +- `SUBTRACK_MAPPING-0017`: If the separate source file provides a metadata field that is also present on the regular source track, the separate source file value shall win in the target output. +- `SUBTRACK_MAPPING-0018`: If a metadata field is absent from the separate source file, the system shall fall back to the corresponding metadata from the regular source track or target schema rewrite rules. + +## Acceptance + +- Given a source media descriptor and a pattern-backed target schema, the planned output tracks can be listed in final output order and each retained track can still be traced to one originating source stream. +- Planned output order follows grouped target order: video, audio, subtitle, then special types. +- Tracks not referenced by the target schema are omitted from output mapping. +- Tracks may also be omitted when they are incompatible with the chosen output format or explicitly excluded by database or CLI rules. +- Two retained target tracks never originate from the same source stream unless duplication is implemented explicitly as a separate feature. +- If target-track metadata is rewritten after reordering, it is written onto the correct source-derived logical track rather than the track that merely occupies the same final output position. +- Invalid target-to-source references fail deterministically before the conversion job is launched. +- If a separate source file substitutes one target track, that track keeps its target slot and ordering while metadata is merged with separate-file values taking precedence when both sides provide the same field. +- A test proving subtrack mapping must assert at least one of: exact `source_index` to output-order mapping, omission of named source tracks, or preservation of per-track metadata after reorder. + +## Test Notes + +- `tests/legacy/scenario.py` names pattern behavior as `Filter/Reorder Tracks`. +- `tests/legacy/scenario_4.py` is the strongest end-to-end signal because it runs DB-backed conversion and reapplies source indices before assertion. +- `tests/legacy/track_tag_combinator_2_0.py` and `tests/legacy/track_tag_combinator_3_4.py` sort result tracks by `source_index` before checking tags, which matches the intended identity model. +- Legacy permutation combinators define permutations but their assertion functions are stubs. +- Some legacy scenarios produce `AP` and `SP` selectors but do not execute them. + +## Risks + +- `src/ffx/media_descriptor.py` contains an explicit `rearrangeTrackDescriptors()` path whose current implementation appears defective and under-tested. +- Separate-source-file metadata precedence is only partly expressed in current implementation paths and should be covered directly in the rewritten test suite. +- Production code expresses the mapping contract more clearly than the legacy harness, so a rewrite should add direct logic-level tests for mapping and reorder planning. diff --git a/requirements/tests.md b/requirements/tests.md new file mode 100644 index 0000000..933f28e --- /dev/null +++ b/requirements/tests.md @@ -0,0 +1,130 @@ +# Test Rewrite + +This file captures the structure executed by `tests/legacy_runner.py` today and +defines the target shape for a complete rewrite. + +Detailed product rules for source-to-target subtrack mapping live in +`requirements/subtrack_mapping.md`. This file describes only how tests cover +that area. + +## Current Harness + +- Entrypoint: `python tests/legacy_runner.py run` +- Runner style: custom Click CLI, not `pytest` or `unittest` +- Commands: + - `run`: discover scenario files, instantiate each scenario, run yielded jobs + - `dupe`: helper command that creates duplicate media fixtures; not part of the test run +- Filters: `--scenario`, `--variant`, `--limit` +- Shared context: + - builds one mutable dict for the whole run + - installs loggers and writes `ffx_test_report.log` + - creates `ConfigurationController` eagerly + - tracks only passed and failed counters +- Discovery: + - scenario files: `tests/legacy/scenario_*.py` + - combinators: `glob + importlib + inspect` by filename convention + - ordering: implicit glob order, no explicit sorting +- Skip behavior: + - Scenario 4 is skipped when `TMDB_API_KEY` is missing + - only `TMDB_API_KEY_NOT_PRESENT_EXCEPTION` is caught at scenario construction time + +## Current Scenarios + +- `1`: `tests/legacy/scenario_1.py` + - focus: basename generation without pattern lookup or TMDB + - inputs per job: `1` + - jobs: `140` + - expected failures: `0` + - execution: build one synthetic source file, run `python -m ffx convert`, assert filename selectors only + - selectors executed: `B`, `L`, `I` + - selectors defined but not executed: `S`, `R` +- `2`: `tests/legacy/scenario_2.py` + - focus: conversion matrix over media layouts, dispositions, tags, and permutations + - inputs per job: `1` + - jobs: `8193` + - expected failures: `3267` + - execution: build one synthetic source file, run `python -m ffx convert`, probe result with `FileProperties`, assert track layout and selected audio and subtitle metadata + - selectors executed: `M`, `AD`, `AT`, `SD`, `ST` + - selectors defined but not executed: `MT`, `AP`, `SP`, `J` +- `4`: `tests/legacy/scenario_4.py` + - focus: pattern-driven batch conversion with SQLite state and live TMDB naming + - inputs per job: `6` + - jobs: `768` + - expected failures: `336` + - execution: build six synthetic preset files, recreate temp SQLite DB, insert show and pattern, run one batch convert command, query TMDB during assertions + - selectors executed: `M`, `AD`, `AT`, `SD`, `ST` + - selectors defined but not executed: `MT`, `AP`, `SP`, `J` + - notes: + - uses `MediaCombinator6` only + - issues live HTTP requests through `TmdbController` with no request cache + +## Current Combinator Families + +- scenario files discovered: `3` +- basename combinators discovered: `2` +- media combinators discovered: `8` +- media tag combinators discovered: `3` +- disposition combinator 2 variants: `4` +- disposition combinator 3 variants: `5` +- track tag combinator 2 variants: `4` +- track tag combinator 3 variants: `5` +- indicator variants: `7` +- label variants: `2` +- show variants: `3` +- release variants: `3` +- permutation 2 variants: `2` +- permutation 3 variants: `3` + +## Current Totals + +- full run without TMDB: `8333` +- full run with TMDB: `9101` +- Scenario 4 generated source files: `4608` +- Scenario 4 live TMDB episode queries: `4608` + +## Current Behavior Areas + +- output basename rules for label, season and episode indicator, show name, and release suffix combinations +- track layout normalization across the eight media combinator shapes from `VA` through `VAASSS` +- two-track and three-track disposition edge cases, including intentional failure cases +- two-track and three-track track-tag preservation checks, including checks that sort results by source identity +- container-level media tag handling +- pattern-backed conversion against a temporary SQLite database +- TMDB-assisted episode naming for batch conversion + +## Structural Findings + +- The suite is process-heavy: most jobs run `ffmpeg` to generate a fixture and then spawn the FFX CLI as a subprocess. +- The suite is integration-first and has almost no isolated unit-level coverage for pure logic. +- The base `Combinator` class is a placeholder and is not the real abstraction boundary used by the suite. +- Many combinator methods are placeholders: there are `25` `pass` statements across the current test modules. +- Several assertion families are never executed because scenario selector dispatch is incomplete. +- Scenario comments mention a Scenario 3, but no `scenario_3.py` exists. +- `tests/legacy/_basename_combinator_1.py` is effectively orphaned because discovery only matches `basename_combinator_*.py`. +- `tests/legacy/disposition_combinator_2_3 .py` contains an embedded space in the filename and is still part of discovery. +- Expected failures are validated only as subprocess return-code matches, not as specific error types or messages. +- The current suite depends on `ffmpeg`, `ffprobe`, SQLite, the local Python environment, and for Scenario 4 a live TMDB API key plus network access. + +## Rewrite Target + +- Replace the custom Click harness with a standard test runner, preferably `pytest`. +- Split the suite into explicit layers: unit, integration, and optional external-system tests. +- Keep unit tests as the default path and make them runnable without `ffmpeg`, `ffprobe`, TMDB, or a user config directory. +- Model discovery explicitly in code instead of relying on glob-plus-reflection naming conventions. +- Convert the current Cartesian-product combinators into readable parametrized cases grouped by behavior area. +- Preserve the current behavior areas, but represent them with targeted cases instead of thousands of opaque variant IDs. +- Make every assertion family explicit and executable; there must be no selector that is produced but never consumed. +- Replace live TMDB access with fixtures or mocks in normal runs; any live-contract test must be opt-in. +- Replace ad hoc subprocess return-code checks with assertions on typed exceptions, stderr content, or structured outputs. +- Provide small reusable media fixtures or fixture builders so only a narrow integration slice needs `ffmpeg`-generated media. +- Make database tests self-contained and fast through temporary databases and direct controller-level assertions. +- Make ordering, naming, and selection deterministic so a contributor can predict exactly what will run. +- Expose a small smoke suite for quick local runs and CI, plus a separately marked slower integration suite. +- Prefer domain-oriented test modules over combinator-family modules: basename, pattern matching, metadata rewrite, track ordering, TMDB naming, CLI smoke, and failure handling. + +## Rewrite Acceptance + +- A default local test run finishes quickly and without network access. +- A contributor can identify which behavior a failing test covers without decoding variant strings like `VAASSS-A:D10-S:T001`. +- All current intended failure behaviors remain covered, but each one is asserted directly and readably. +- The rewritten suite can be adopted by CI without requiring live TMDB credentials. diff --git a/src/ffx/__main__.py b/src/ffx/__main__.py new file mode 100644 index 0000000..9d2ccea --- /dev/null +++ b/src/ffx/__main__.py @@ -0,0 +1,9 @@ +from .cli import ffx + + +def main(): + ffx() + + +if __name__ == "__main__": + main() diff --git a/src/ffx/ffx.py b/src/ffx/cli.py similarity index 97% rename from src/ffx/ffx.py rename to src/ffx/cli.py index fe6b56f..dfa559e 100755 --- a/src/ffx/ffx.py +++ b/src/ffx/cli.py @@ -1,6 +1,14 @@ #! /usr/bin/python3 -import os, click, time, logging, shutil, subprocess +import os, sys, click, time, shutil, subprocess + +# Allow direct execution via `python src/ffx/cli.py` by preferring the package +# root on sys.path. +if __package__ in (None, ''): + script_dir = os.path.dirname(__file__) + package_root = os.path.dirname(os.path.dirname(__file__)) + sys.path = [p for p in sys.path if os.path.abspath(p) != os.path.abspath(script_dir)] + sys.path.insert(0, package_root) from ffx.configuration_controller import ConfigurationController @@ -37,6 +45,7 @@ from ffx.filter.deinterlace_filter import DeinterlaceFilter from ffx.constants import VERSION from ffx.shifted_season_controller import ShiftedSeasonController +from ffx.logging_utils import configure_ffx_logger @click.group() @@ -70,23 +79,11 @@ def ffx(ctx, database_file, verbose, dry_run): fileLogVerbosity = max(40 - verbose * 10, 10) consoleLogVerbosity = max(20 - verbose * 10, 10) - ctx.obj['logger'] = logging.getLogger('FFX') - ctx.obj['logger'].setLevel(logging.DEBUG) - - ffxFileHandler = logging.FileHandler(ctx.obj['config'].getLogFilePath()) - ffxFileHandler.setLevel(fileLogVerbosity) - ffxConsoleHandler = logging.StreamHandler() - ffxConsoleHandler.setLevel(consoleLogVerbosity) - - fileFormatter = logging.Formatter( - '%(asctime)s - %(name)s - %(levelname)s - %(message)s') - ffxFileHandler.setFormatter(fileFormatter) - consoleFormatter = logging.Formatter( - '%(message)s') - ffxConsoleHandler.setFormatter(consoleFormatter) - - ctx.obj['logger'].addHandler(ffxConsoleHandler) - ctx.obj['logger'].addHandler(ffxFileHandler) + ctx.obj['logger'] = configure_ffx_logger( + ctx.obj['config'].getLogFilePath(), + fileLogVerbosity, + consoleLogVerbosity, + ) # Define a subcommand @@ -392,7 +389,7 @@ def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): @click.option('-l', '--label', type=str, default='', help='Label to be used as filename prefix') -@click.option('-v', '--video-encoder', type=str, default=FfxController.DEFAULT_VIDEO_ENCODER, help=f"Target video encoder (vp9, av1 or h264)", show_default=True) +@click.option('-v', '--video-encoder', type=str, default=FfxController.DEFAULT_VIDEO_ENCODER, help=f"Target video encoder (vp9, av1, h264 or copy)", show_default=True) @click.option('-q', '--quality', type=str, default="", help=f"Quality settings to be used with VP9/H264 encoder") @click.option('-p', '--preset', type=str, default="", help=f"Quality preset to be used with AV1 encoder") @@ -516,9 +513,13 @@ def convert(ctx, context['video_encoder'] = VideoEncoder.fromLabel(video_encoder) - #HINT: quick and dirty override for h264, todo improve - targetFormat = '' if context['video_encoder'] == VideoEncoder.H264 else FfxController.DEFAULT_FILE_FORMAT - targetExtension = 'mkv' if context['video_encoder'] == VideoEncoder.H264 else FfxController.DEFAULT_FILE_EXTENSION + # HINT: quick and dirty override for h264, todo improve + if context['video_encoder'] in (VideoEncoder.H264, VideoEncoder.COPY): + targetFormat = '' + targetExtension = 'mkv' + else: + targetFormat = FfxController.DEFAULT_FILE_FORMAT + targetExtension = FfxController.DEFAULT_FILE_EXTENSION context['use_tmdb'] = not no_tmdb context['use_pattern'] = not no_pattern diff --git a/src/ffx/database.py b/src/ffx/database.py index 5c46034..239817d 100644 --- a/src/ffx/database.py +++ b/src/ffx/database.py @@ -3,6 +3,9 @@ import os, click from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker +# Import the full model package so SQLAlchemy registers every mapped class +# before metadata creation and the first ORM query. +import ffx.model from ffx.model.show import Base from ffx.model.property import Property @@ -99,4 +102,4 @@ def setDatabaseVersion(databaseContext, databaseVersion: int): except Exception as ex: raise click.ClickException(f"setDatabaseVersion(): {repr(ex)}") finally: - s.close() \ No newline at end of file + s.close() diff --git a/src/ffx/ffx_controller.py b/src/ffx/ffx_controller.py index fb2f1b9..131809e 100644 --- a/src/ffx/ffx_controller.py +++ b/src/ffx/ffx_controller.py @@ -99,6 +99,37 @@ class FfxController(): def generateVideoCopyTokens(self, subIndex): return [f"-c:v:{int(subIndex)}", 'copy'] + + def generateAudioCopyTokens(self, subIndex): + return [f"-c:a:{int(subIndex)}", 'copy'] + + def generateSubtitleCopyTokens(self, subIndex): + return [f"-c:s:{int(subIndex)}", 'copy'] + + def generateAttachmentCopyTokens(self, subIndex): + return [f"-c:t:{int(subIndex)}", 'copy'] + + def generateCopyTokens(self): + copyTokens = [] + + for trackDescriptor in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.VIDEO): + copyTokens += self.generateVideoCopyTokens(trackDescriptor.getSubIndex()) + + for trackDescriptor in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.AUDIO): + copyTokens += self.generateAudioCopyTokens(trackDescriptor.getSubIndex()) + + for trackDescriptor in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.SUBTITLE): + copyTokens += self.generateSubtitleCopyTokens(trackDescriptor.getSubIndex()) + + attachmentDescriptors = ( + self.__sourceMediaDescriptor.getTrackDescriptors(trackType=TrackType.ATTACHMENT) + if self.__sourceMediaDescriptor is not None + else self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.ATTACHMENT) + ) + for trackDescriptor in attachmentDescriptors: + copyTokens += self.generateAttachmentCopyTokens(trackDescriptor.getSubIndex()) + + return copyTokens def generateCropTokens(self): @@ -204,7 +235,7 @@ class FfxController(): if qualityFilters and (quality := qualityFilters[0]['parameters']['quality']): self.__logger.info(f"Setting quality {quality} from command line parameter") - elif (quality := currentPattern.quality): + elif currentPattern is not None and (quality := currentPattern.quality): self.__logger.info(f"Setting quality {quality} from pattern default") else: quality = (QualityFilter.DEFAULT_H264_QUALITY @@ -238,6 +269,30 @@ class FfxController(): commandTokens = FfxController.COMMAND_TOKENS + ['-i', sourcePath] + if videoEncoder == VideoEncoder.COPY: + + commandSequence = (commandTokens + + self.__targetMediaDescriptor.getImportFileTokens() + + self.__targetMediaDescriptor.getInputMappingTokens(sourceMediaDescriptor = self.__sourceMediaDescriptor) + + self.__mdcs.generateDispositionTokens()) + + commandSequence += self.__mdcs.generateMetadataTokens() + commandSequence += self.generateCopyTokens() + + if self.__context['perform_cut']: + commandSequence += self.generateCropTokens() + + commandSequence += self.generateOutputTokens(targetPath, + targetFormat) + + self.__logger.debug("FfxController.runJob(): Running command sequence") + + if not self.__context['dry_run']: + out, err, rc = executeProcess(commandSequence, context=self.__context) + if rc: + raise click.ClickException(f"Command resulted in error: rc={rc} error={err}") + return + if videoEncoder == VideoEncoder.AV1: commandSequence = (commandTokens diff --git a/src/ffx/helper.py b/src/ffx/helper.py index e175bbb..4c40292 100644 --- a/src/ffx/helper.py +++ b/src/ffx/helper.py @@ -1,8 +1,9 @@ -import re, logging +import re from jinja2 import Environment, Undefined from .constants import DEFAULT_OUTPUT_FILENAME_TEMPLATE from .configuration_controller import ConfigurationController +from .logging_utils import get_ffx_logger class EmptyStringUndefined(Undefined): @@ -192,8 +193,7 @@ def getEpisodeFileBasename(showName, if context is not None and 'logger' in context.keys(): logger = context['logger'] else: - logger = logging.getLogger('FFX') - logger.addHandler(logging.NullHandler()) + logger = get_ffx_logger() indexSeparator = ' ' if indexSeasonDigits or indexEpisodeDigits else '' @@ -236,4 +236,3 @@ def removeRichColor(text: str): return text else: return str(richColorMatch.group(1)) - diff --git a/src/ffx/logging_utils.py b/src/ffx/logging_utils.py new file mode 100644 index 0000000..1e27601 --- /dev/null +++ b/src/ffx/logging_utils.py @@ -0,0 +1,68 @@ +import logging +import os + + +FFX_LOGGER_NAME = "FFX" +CONSOLE_HANDLER_NAME = "ffx-console" +FILE_HANDLER_NAME = "ffx-file" + + +def get_ffx_logger(name: str = FFX_LOGGER_NAME) -> logging.Logger: + logger = logging.getLogger(name) + logger.setLevel(logging.DEBUG) + + if not logger.handlers: + logger.addHandler(logging.NullHandler()) + + return logger + + +def configure_ffx_logger( + log_file_path: str, + file_level: int, + console_level: int, + name: str = FFX_LOGGER_NAME, +) -> logging.Logger: + logger = get_ffx_logger(name) + logger.propagate = False + + for handler in list(logger.handlers): + if isinstance(handler, logging.NullHandler): + logger.removeHandler(handler) + + console_handler = next( + (handler for handler in logger.handlers if handler.get_name() == CONSOLE_HANDLER_NAME), + None, + ) + if console_handler is None: + console_handler = logging.StreamHandler() + console_handler.set_name(CONSOLE_HANDLER_NAME) + logger.addHandler(console_handler) + + console_handler.setLevel(console_level) + console_handler.setFormatter(logging.Formatter("%(message)s")) + + normalized_log_path = os.path.abspath(log_file_path) + file_handler = next( + (handler for handler in logger.handlers if handler.get_name() == FILE_HANDLER_NAME), + None, + ) + if ( + file_handler is not None + and os.path.abspath(file_handler.baseFilename) != normalized_log_path + ): + logger.removeHandler(file_handler) + file_handler.close() + file_handler = None + + if file_handler is None: + file_handler = logging.FileHandler(normalized_log_path) + file_handler.set_name(FILE_HANDLER_NAME) + logger.addHandler(file_handler) + + file_handler.setLevel(file_level) + file_handler.setFormatter( + logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + ) + + return logger diff --git a/src/ffx/media_descriptor.py b/src/ffx/media_descriptor.py index 5ff74a2..c0db35d 100644 --- a/src/ffx/media_descriptor.py +++ b/src/ffx/media_descriptor.py @@ -1,4 +1,4 @@ -import os, re, click, logging +import os, re, click from typing import List, Self @@ -9,6 +9,7 @@ from ffx.track_disposition import TrackDisposition from ffx.track_codec import TrackCodec from ffx.track_descriptor import TrackDescriptor +from ffx.logging_utils import get_ffx_logger class MediaDescriptor: @@ -46,8 +47,7 @@ class MediaDescriptor: self.__logger = self.__context['logger'] else: self.__context = {} - self.__logger = logging.getLogger('FFX') - self.__logger.addHandler(logging.NullHandler()) + self.__logger = get_ffx_logger() if MediaDescriptor.TAGS_KEY in kwargs.keys(): if type(kwargs[MediaDescriptor.TAGS_KEY]) is not dict: @@ -207,7 +207,7 @@ class MediaDescriptor: def rearrangeTrackDescriptors(self, newOrder: List[int]): if len(newOrder) != len(self.__trackDescriptors): raise ValueError('Length of list with reordered indices does not match number of track descriptors') - reorderedTrackDescriptors = {} + reorderedTrackDescriptors = [] for oldIndex in newOrder: reorderedTrackDescriptors.append(self.__trackDescriptors[oldIndex]) self.__trackDescriptors = reorderedTrackDescriptors @@ -362,6 +362,14 @@ class MediaDescriptor: inputMappingTokens = [] sortedTrackDescriptors = sorted(self.__trackDescriptors, key=lambda d: d.getIndex()) + sourceTrackDescriptorsByIndex = { + td.getIndex(): td + for td in ( + sourceMediaDescriptor.getTrackDescriptors() + if sourceMediaDescriptor is not None + else sortedTrackDescriptors + ) + } # raise click.ClickException(' '.join([f"\nindex={td.getIndex()} subIndex={td.getSubIndex()} srcIndex={td.getSourceIndex()} type={td.getType().label()}" for td in self.__trackDescriptors])) @@ -373,8 +381,12 @@ class MediaDescriptor: #HINT: Attached thumbnails are not supported by .webm container format if td.getCodec() != TrackCodec.PNG: - stdi = sortedTrackDescriptors[td.getSourceIndex()].getIndex() - stdsi = sortedTrackDescriptors[td.getSourceIndex()].getSubIndex() + sourceTrackDescriptor = sourceTrackDescriptorsByIndex.get(td.getSourceIndex()) + if sourceTrackDescriptor is None: + raise ValueError(f"No source track descriptor found for source index {td.getSourceIndex()}") + + stdi = sourceTrackDescriptor.getIndex() + stdsi = sourceTrackDescriptor.getSubIndex() trackType = td.getType() trackCodec = td.getCodec() @@ -507,7 +519,10 @@ class MediaDescriptor: d for d in availableFileSubtitleDescriptors if ((season == -1 and episode == -1) - or (d["season"] == int(season) and d["episode"] == int(episode))) + or ( + d.get("season") == int(season) + and d.get("episode") == int(episode) + )) ], key=lambda d: d["index"], ) @@ -522,10 +537,14 @@ class MediaDescriptor: if matchingSubtitleTrackDescriptor: # click.echo(f"Found matching subtitle file {msfd["path"]}\n") self.__logger.debug(f"importSubtitles(): Found matching subtitle file {msfd['path']}") - matchingSubtitleTrackDescriptor[0].setExternalSourceFilePath(msfd["path"]) + matchingTrack = matchingSubtitleTrackDescriptor[0] + matchingTrack.setExternalSourceFilePath(msfd["path"]) - # TODO: Check if useful - # matchingSubtitleTrackDescriptor[0].setDispositionSet(msfd["disposition_set"]) + # Prefer metadata coming from the external single-track source when + # it is provided explicitly by the filename contract. + matchingTrack.getTags()["language"] = msfd["language"] + if msfd["disposition_set"]: + matchingTrack.setDispositionSet(msfd["disposition_set"]) def getConfiguration(self, label: str = ''): diff --git a/src/ffx/media_descriptor_change_set.py b/src/ffx/media_descriptor_change_set.py index 46ea26b..cf98391 100644 --- a/src/ffx/media_descriptor_change_set.py +++ b/src/ffx/media_descriptor_change_set.py @@ -42,6 +42,14 @@ class MediaDescriptorChangeSet(): self.__targetTrackDescriptors = targetMediaDescriptor.getTrackDescriptors() if targetMediaDescriptor is not None else [] self.__sourceTrackDescriptors = sourceMediaDescriptor.getTrackDescriptors() if sourceMediaDescriptor is not None else [] + self.__targetTrackDescriptorsByIndex = { + trackDescriptor.getIndex(): trackDescriptor + for trackDescriptor in self.__targetTrackDescriptors + } + self.__sourceTrackDescriptorsByIndex = { + trackDescriptor.getIndex(): trackDescriptor + for trackDescriptor in self.__sourceTrackDescriptors + } targetMediaTags = targetMediaDescriptor.getTags() if targetMediaDescriptor is not None else {} sourceMediaTags = sourceMediaDescriptor.getTags() if sourceMediaDescriptor is not None else {} @@ -70,51 +78,34 @@ class MediaDescriptorChangeSet(): self.__numSourceTracks = len(self.__sourceTrackDescriptors) - maxNumOfTracks = max(self.__numSourceTracks, self.__numTargetTracks) - trackCompareResult = {} + for targetTrackDescriptor in self.__targetTrackDescriptors: + sourceTrackDescriptor = self.__sourceTrackDescriptorsByIndex.get( + targetTrackDescriptor.getSourceIndex() + ) - for trackIndex in range(maxNumOfTracks): - - correspondingSourceTrackDescriptors = [st for st in self.__sourceTrackDescriptors if st.getIndex() == trackIndex] - correspondingTargetTrackDescriptors = [tt for tt in self.__targetTrackDescriptors if tt.getIndex() == trackIndex] - - # Track present in target but not in source - if (not correspondingSourceTrackDescriptors - and correspondingTargetTrackDescriptors): - + if sourceTrackDescriptor is None: if DIFF_ADDED_KEY not in trackCompareResult.keys(): trackCompareResult[DIFF_ADDED_KEY] = {} - - trackCompareResult[DIFF_ADDED_KEY][trackIndex] = correspondingTargetTrackDescriptors[0] + trackCompareResult[DIFF_ADDED_KEY][targetTrackDescriptor.getIndex()] = targetTrackDescriptor continue - # Track present in target but not in source - if (correspondingSourceTrackDescriptors - and not correspondingTargetTrackDescriptors): + trackDiff = self.compareTracks(targetTrackDescriptor, sourceTrackDescriptor) + if trackDiff: + if DIFF_CHANGED_KEY not in trackCompareResult.keys(): + trackCompareResult[DIFF_CHANGED_KEY] = {} + trackCompareResult[DIFF_CHANGED_KEY][targetTrackDescriptor.getIndex()] = trackDiff + targetSourceIndices = { + targetTrackDescriptor.getSourceIndex() + for targetTrackDescriptor in self.__targetTrackDescriptors + } + for sourceTrackDescriptor in self.__sourceTrackDescriptors: + if sourceTrackDescriptor.getIndex() not in targetSourceIndices: if DIFF_REMOVED_KEY not in trackCompareResult.keys(): trackCompareResult[DIFF_REMOVED_KEY] = {} - - trackCompareResult[DIFF_REMOVED_KEY][trackIndex] = correspondingSourceTrackDescriptors[0] - continue - - if (correspondingSourceTrackDescriptors - and correspondingTargetTrackDescriptors): - - # if correspondingTargetTrackDescriptors[0].getIndex() == 3: - # raise click.ClickException(f"{correspondingSourceTrackDescriptors[0].getDispositionSet()} {correspondingTargetTrackDescriptors[0].getDispositionSet()}") - - - trackDiff = self.compareTracks(correspondingTargetTrackDescriptors[0], - correspondingSourceTrackDescriptors[0]) - - if trackDiff: - if DIFF_CHANGED_KEY not in trackCompareResult.keys(): - trackCompareResult[DIFF_CHANGED_KEY] = {} - - trackCompareResult[DIFF_CHANGED_KEY][trackIndex] = trackDiff + trackCompareResult[DIFF_REMOVED_KEY][sourceTrackDescriptor.getIndex()] = sourceTrackDescriptor if trackCompareResult: @@ -274,26 +265,28 @@ class MediaDescriptorChangeSet(): outputTrackTags = addedTrackTags | changedTrackTags - trackDescriptor = self.__targetTrackDescriptors[trackIndex] + trackDescriptor = self.__targetTrackDescriptorsByIndex[trackIndex] for tagKey, tagValue in outputTrackTags.items(): metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}" + f":{trackDescriptor.getSubIndex()}", f"{tagKey}={tagValue}"] - for removeKey in removedTrackTags.keys(): - metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}" - + f":{trackDescriptor.getSubIndex()}", - f"{removeKey}="] - - #HINT: In case of loading a track from an external file - # no tags from source are present for the track so - # the unchanged tracks are passed to the output file as well if trackDescriptor.getExternalSourceFilePath(): - for tagKey, tagValue in unchangedTrackTags.items(): + # When a single-track external file substitutes the + # media payload, keep metadata from the regular + # source track unless the external/target side + # overrides it explicitly. + preservedTrackTags = removedTrackTags | unchangedTrackTags + for tagKey, tagValue in preservedTrackTags.items(): metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}" + f":{trackDescriptor.getSubIndex()}", f"{tagKey}={tagValue}"] + else: + for removeKey in removedTrackTags.keys(): + metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}" + + f":{trackDescriptor.getSubIndex()}", + f"{removeKey}="] return metadataTokens diff --git a/src/ffx/model/__init__.py b/src/ffx/model/__init__.py index e69de29..7f349ec 100644 --- a/src/ffx/model/__init__.py +++ b/src/ffx/model/__init__.py @@ -0,0 +1,20 @@ +"""Load ORM model modules so SQLAlchemy relationship strings can resolve.""" + +from .show import Base, Show +from .pattern import Pattern +from .track import Track +from .track_tag import TrackTag +from .media_tag import MediaTag +from .shifted_season import ShiftedSeason +from .property import Property + +__all__ = [ + 'Base', + 'Show', + 'Pattern', + 'Track', + 'TrackTag', + 'MediaTag', + 'ShiftedSeason', + 'Property', +] diff --git a/src/ffx/process.py b/src/ffx/process.py index 08953bd..b2ab4c4 100644 --- a/src/ffx/process.py +++ b/src/ffx/process.py @@ -1,6 +1,8 @@ -import subprocess, logging +import subprocess from typing import List +from .logging_utils import get_ffx_logger + def executeProcess(commandSequence: List[str], directory: str = None, context: dict = None): """ niceness -20 bis +19 @@ -8,8 +10,7 @@ def executeProcess(commandSequence: List[str], directory: str = None, context: d """ if context is None: - logger = logging.getLogger('FFX') - logger.addHandler(logging.NullHandler()) + logger = get_ffx_logger() else: logger = context['logger'] diff --git a/src/ffx/show_descriptor.py b/src/ffx/show_descriptor.py index a045e8d..9a5a270 100644 --- a/src/ffx/show_descriptor.py +++ b/src/ffx/show_descriptor.py @@ -1,4 +1,4 @@ -import logging +from .logging_utils import get_ffx_logger class ShowDescriptor(): @@ -32,8 +32,7 @@ class ShowDescriptor(): self.__logger = self.__context['logger'] else: self.__context = {} - self.__logger = logging.getLogger('FFX') - self.__logger.addHandler(logging.NullHandler()) + self.__logger = get_ffx_logger() if ShowDescriptor.ID_KEY in kwargs.keys(): if type(kwargs[ShowDescriptor.ID_KEY]) is not int: diff --git a/src/ffx/tmdb_controller.py b/src/ffx/tmdb_controller.py index 1190564..090ee52 100644 --- a/src/ffx/tmdb_controller.py +++ b/src/ffx/tmdb_controller.py @@ -1,6 +1,8 @@ -import os, requests, time, logging +import os, requests, time from datetime import datetime +from .logging_utils import get_ffx_logger + class TMDB_REQUEST_EXCEPTION(Exception): def __init__(self, statusCode, statusMessage): @@ -27,8 +29,7 @@ class TmdbController(): self.__context = context if context is None: - self.__logger = logging.getLogger('FFX') - self.__logger.addHandler(logging.NullHandler()) + self.__logger = get_ffx_logger() else: self.__logger = context['logger'] diff --git a/src/ffx/track_descriptor.py b/src/ffx/track_descriptor.py index 84b9d6b..9a102b0 100644 --- a/src/ffx/track_descriptor.py +++ b/src/ffx/track_descriptor.py @@ -1,4 +1,3 @@ -import logging from typing import Self from .iso_language import IsoLanguage @@ -6,6 +5,7 @@ from .track_type import TrackType from .audio_layout import AudioLayout from .track_disposition import TrackDisposition from .track_codec import TrackCodec +from .logging_utils import get_ffx_logger # from .helper import dictDiff, setDiff @@ -46,8 +46,7 @@ class TrackDescriptor: self.__logger = self.__context['logger'] else: self.__context = {} - self.__logger = logging.getLogger('FFX') - self.__logger.addHandler(logging.NullHandler()) + self.__logger = get_ffx_logger() if TrackDescriptor.ID_KEY in kwargs.keys(): if type(kwargs[TrackDescriptor.ID_KEY]) is not int: diff --git a/src/ffx/video_encoder.py b/src/ffx/video_encoder.py index 573c4fc..ec9fe59 100644 --- a/src/ffx/video_encoder.py +++ b/src/ffx/video_encoder.py @@ -5,6 +5,7 @@ class VideoEncoder(Enum): AV1 = {'label': 'av1', 'index': 1} VP9 = {'label': 'vp9', 'index': 2} H264 = {'label': 'h264', 'index': 3} + COPY = {'label': 'copy', 'index': 4} UNDEFINED = {'label': 'undefined', 'index': 0} diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..3b48c5e --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +# Repo-root tests package for legacy and future test code. diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/integration/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/integration/subtrack_mapping/__init__.py b/tests/integration/subtrack_mapping/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/integration/subtrack_mapping/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/integration/subtrack_mapping/test_cli_bundle.py b/tests/integration/subtrack_mapping/test_cli_bundle.py new file mode 100644 index 0000000..94a171f --- /dev/null +++ b/tests/integration/subtrack_mapping/test_cli_bundle.py @@ -0,0 +1,283 @@ +from __future__ import annotations + +from pathlib import Path +import tempfile +import unittest + +from tests.support.ffx_bundle import ( + PatternTrackSpec, + SourceTrackSpec, + create_source_fixture, + expected_output_path, + extract_first_subtitle_text, + ffprobe_json, + get_tag, + prepare_pattern_database, + run_ffx_convert, + write_vtt, +) + +from ffx.track_type import TrackType + +try: + import pytest +except ImportError: # pragma: no cover - unittest-only environments + pytest = None + +if pytest is not None: + pytestmark = [pytest.mark.integration, pytest.mark.subtrack_mapping] + + +class SubtrackMappingBundleTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.workdir = Path(self.tempdir.name) + self.home_dir = self.workdir / "home" + self.home_dir.mkdir() + self.database_path = self.workdir / "test.db" + + def tearDown(self): + self.tempdir.cleanup() + + def assertCompleted(self, completed): + if completed.returncode != 0: + self.fail( + "FFX convert failed\n" + f"STDOUT:\n{completed.stdout}\n" + f"STDERR:\n{completed.stderr}" + ) + + def test_pattern_reorders_and_omits_tracks_preserving_metadata_and_group_order(self): + source_filename = "reorder_s01e01.mkv" + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0", title="Video Zero"), + SourceTrackSpec( + TrackType.SUBTITLE, + identity="subtitle-1", + language="eng", + title="First Subtitle", + subtitle_lines=("first embedded subtitle",), + ), + SourceTrackSpec( + TrackType.AUDIO, + identity="audio-2", + language="deu", + title="German Audio", + ), + SourceTrackSpec( + TrackType.SUBTITLE, + identity="subtitle-3", + language="fra", + title="Second Subtitle", + subtitle_lines=("second embedded subtitle",), + ), + SourceTrackSpec(TrackType.ATTACHMENT, attachment_name="ordered.ttf"), + ], + ) + + prepare_pattern_database( + self.database_path, + r"^reorder_(s[0-9]+e[0-9]+)\.mkv$", + [ + PatternTrackSpec( + index=0, + source_index=0, + track_type=TrackType.VIDEO, + tags={"THIS_IS": "video-0", "title": "Video Zero"}, + ), + PatternTrackSpec( + index=1, + source_index=2, + track_type=TrackType.AUDIO, + tags={"THIS_IS": "audio-2", "language": "deu", "title": "German Audio"}, + ), + PatternTrackSpec( + index=2, + source_index=1, + track_type=TrackType.SUBTITLE, + tags={"THIS_IS": "subtitle-1", "language": "eng", "title": "First Subtitle"}, + ), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-tmdb", + "--no-prompt", + "--no-signature", + str(source_path), + ) + self.assertCompleted(completed) + + output_path = expected_output_path(self.workdir, source_filename) + self.assertTrue(output_path.is_file(), output_path) + + streams = ffprobe_json(output_path)["streams"] + self.assertEqual( + [stream["codec_type"] for stream in streams], + ["video", "audio", "subtitle", "attachment"], + ) + self.assertEqual( + [get_tag(streams[index], "THIS_IS") for index in range(3)], + ["video-0", "audio-2", "subtitle-1"], + ) + self.assertNotIn( + "subtitle-3", + [get_tag(stream, "THIS_IS") for stream in streams if stream["codec_type"] != "attachment"], + ) + self.assertEqual(streams[-1]["codec_name"], "ttf") + extracted_subtitle = extract_first_subtitle_text(self.workdir, output_path) + self.assertIn("first embedded subtitle", extracted_subtitle) + self.assertNotIn("second embedded subtitle", extracted_subtitle) + + def test_cli_rearrange_streams_reorders_tracks_without_database_pattern(self): + source_filename = "cli_s01e01.mkv" + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-1", language="eng", title="First Audio"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-2", language="deu", title="Second Audio"), + SourceTrackSpec(TrackType.SUBTITLE, identity="subtitle-3", language="eng", title="Subtitle"), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-pattern", + "--no-tmdb", + "--no-prompt", + "--no-signature", + "--rearrange-streams", + "0,2,1,3", + str(source_path), + ) + self.assertCompleted(completed) + + output_path = expected_output_path(self.workdir, source_filename) + streams = ffprobe_json(output_path)["streams"] + + self.assertEqual( + [stream["codec_type"] for stream in streams], + ["video", "audio", "audio", "subtitle"], + ) + self.assertEqual( + [get_tag(stream, "THIS_IS") for stream in streams], + ["video-0", "audio-2", "audio-1", "subtitle-3"], + ) + + def test_pattern_validation_fails_for_nonexistent_source_track_reference(self): + source_filename = "invalid_s01e01.mkv" + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-1"), + SourceTrackSpec(TrackType.SUBTITLE, identity="subtitle-2"), + ], + ) + + prepare_pattern_database( + self.database_path, + r"^invalid_(s[0-9]+e[0-9]+)\.mkv$", + [ + PatternTrackSpec(index=0, source_index=0, track_type=TrackType.VIDEO), + PatternTrackSpec(index=1, source_index=99, track_type=TrackType.SUBTITLE), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-tmdb", + "--no-prompt", + "--no-signature", + str(source_path), + ) + + self.assertNotEqual(completed.returncode, 0) + error_output = f"{completed.stdout}\n{completed.stderr}" + self.assertIn("non-existent source track #99", error_output) + self.assertFalse(expected_output_path(self.workdir, source_filename).exists()) + + def test_external_subtitle_file_replaces_payload_and_overrides_metadata(self): + source_filename = "substitute_s01e01.mkv" + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-1", language="eng", title="Main Audio"), + SourceTrackSpec( + TrackType.SUBTITLE, + identity="embedded-subtitle", + language="eng", + title="Embedded Title", + subtitle_lines=("embedded subtitle payload",), + ), + ], + ) + + write_vtt( + self.workdir / "substitute_s01e01_2_deu.vtt", + ("external subtitle payload",), + ) + + prepare_pattern_database( + self.database_path, + r"^substitute_(s[0-9]+e[0-9]+)\.mkv$", + [ + PatternTrackSpec(index=0, source_index=0, track_type=TrackType.VIDEO), + PatternTrackSpec(index=1, source_index=1, track_type=TrackType.AUDIO), + PatternTrackSpec(index=2, source_index=2, track_type=TrackType.SUBTITLE), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-tmdb", + "--no-prompt", + "--no-signature", + "--subtitle-directory", + str(self.workdir), + "--subtitle-prefix", + "substitute", + str(source_path), + ) + self.assertCompleted(completed) + + output_path = expected_output_path(self.workdir, source_filename) + streams = ffprobe_json(output_path)["streams"] + subtitle_stream = [stream for stream in streams if stream["codec_type"] == "subtitle"][0] + + self.assertEqual(get_tag(subtitle_stream, "language"), "deu") + self.assertEqual(get_tag(subtitle_stream, "title"), "Embedded Title") + self.assertEqual(get_tag(subtitle_stream, "THIS_IS"), "embedded-subtitle") + + extracted_subtitle = extract_first_subtitle_text(self.workdir, output_path) + self.assertIn("external subtitle payload", extracted_subtitle) + self.assertNotIn("embedded subtitle payload", extracted_subtitle) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/legacy/__init__.py b/tests/legacy/__init__.py new file mode 100644 index 0000000..56a2b3f --- /dev/null +++ b/tests/legacy/__init__.py @@ -0,0 +1 @@ +# Legacy custom FFX test harness modules. diff --git a/src/ffx/test/_basename_combinator_1.py b/tests/legacy/_basename_combinator_1.py similarity index 100% rename from src/ffx/test/_basename_combinator_1.py rename to tests/legacy/_basename_combinator_1.py diff --git a/src/ffx/test/basename_combinator.py b/tests/legacy/basename_combinator.py similarity index 85% rename from src/ffx/test/basename_combinator.py rename to tests/legacy/basename_combinator.py index 65c0e8d..bd1275c 100644 --- a/src/ffx/test/basename_combinator.py +++ b/tests/legacy/basename_combinator.py @@ -24,8 +24,9 @@ class BasenameCombinator(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.basename_combinator_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.basename_combinator_{ identifier }"]): + module_name = f"tests.legacy.basename_combinator_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding MediaCombinator as it seems to be included by import (?) if inspect.isclass(obj) and name != 'BasenameCombinator' and name.startswith('BasenameCombinator'): return obj diff --git a/src/ffx/test/basename_combinator_0.py b/tests/legacy/basename_combinator_0.py similarity index 100% rename from src/ffx/test/basename_combinator_0.py rename to tests/legacy/basename_combinator_0.py diff --git a/src/ffx/test/basename_combinator_2.py b/tests/legacy/basename_combinator_2.py similarity index 100% rename from src/ffx/test/basename_combinator_2.py rename to tests/legacy/basename_combinator_2.py diff --git a/src/ffx/test/combinator.py b/tests/legacy/combinator.py similarity index 100% rename from src/ffx/test/combinator.py rename to tests/legacy/combinator.py diff --git a/src/ffx/test/disposition_combinator_2.py b/tests/legacy/disposition_combinator_2.py similarity index 85% rename from src/ffx/test/disposition_combinator_2.py rename to tests/legacy/disposition_combinator_2.py index b2e7767..d908e21 100644 --- a/src/ffx/test/disposition_combinator_2.py +++ b/tests/legacy/disposition_combinator_2.py @@ -24,8 +24,9 @@ class DispositionCombinator2(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.disposition_combinator_2_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.disposition_combinator_2_{ identifier }"]): + module_name = f"tests.legacy.disposition_combinator_2_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding DispositionCombination as it seems to be included by import (?) if inspect.isclass(obj) and name != 'DispositionCombinator2' and name.startswith('DispositionCombinator2'): return obj diff --git a/src/ffx/test/disposition_combinator_2_0.py b/tests/legacy/disposition_combinator_2_0.py similarity index 100% rename from src/ffx/test/disposition_combinator_2_0.py rename to tests/legacy/disposition_combinator_2_0.py diff --git a/src/ffx/test/disposition_combinator_2_1.py b/tests/legacy/disposition_combinator_2_1.py similarity index 100% rename from src/ffx/test/disposition_combinator_2_1.py rename to tests/legacy/disposition_combinator_2_1.py diff --git a/src/ffx/test/disposition_combinator_2_2.py b/tests/legacy/disposition_combinator_2_2.py similarity index 100% rename from src/ffx/test/disposition_combinator_2_2.py rename to tests/legacy/disposition_combinator_2_2.py diff --git a/src/ffx/test/disposition_combinator_2_3 .py b/tests/legacy/disposition_combinator_2_3 .py similarity index 100% rename from src/ffx/test/disposition_combinator_2_3 .py rename to tests/legacy/disposition_combinator_2_3 .py diff --git a/src/ffx/test/disposition_combinator_3.py b/tests/legacy/disposition_combinator_3.py similarity index 84% rename from src/ffx/test/disposition_combinator_3.py rename to tests/legacy/disposition_combinator_3.py index 154a072..b08affe 100644 --- a/src/ffx/test/disposition_combinator_3.py +++ b/tests/legacy/disposition_combinator_3.py @@ -23,8 +23,9 @@ class DispositionCombinator3(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.disposition_combinator_3_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.disposition_combinator_3_{ identifier }"]): + module_name = f"tests.legacy.disposition_combinator_3_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding DispositionCombination as it seems to be included by import (?) if inspect.isclass(obj) and name != 'DispositionCombinator3' and name.startswith('DispositionCombinator3'): return obj diff --git a/src/ffx/test/disposition_combinator_3_0.py b/tests/legacy/disposition_combinator_3_0.py similarity index 100% rename from src/ffx/test/disposition_combinator_3_0.py rename to tests/legacy/disposition_combinator_3_0.py diff --git a/src/ffx/test/disposition_combinator_3_1.py b/tests/legacy/disposition_combinator_3_1.py similarity index 100% rename from src/ffx/test/disposition_combinator_3_1.py rename to tests/legacy/disposition_combinator_3_1.py diff --git a/src/ffx/test/disposition_combinator_3_2.py b/tests/legacy/disposition_combinator_3_2.py similarity index 100% rename from src/ffx/test/disposition_combinator_3_2.py rename to tests/legacy/disposition_combinator_3_2.py diff --git a/src/ffx/test/disposition_combinator_3_3.py b/tests/legacy/disposition_combinator_3_3.py similarity index 100% rename from src/ffx/test/disposition_combinator_3_3.py rename to tests/legacy/disposition_combinator_3_3.py diff --git a/src/ffx/test/disposition_combinator_3_4.py b/tests/legacy/disposition_combinator_3_4.py similarity index 100% rename from src/ffx/test/disposition_combinator_3_4.py rename to tests/legacy/disposition_combinator_3_4.py diff --git a/src/ffx/test/helper.py b/tests/legacy/helper.py similarity index 97% rename from src/ffx/test/helper.py rename to tests/legacy/helper.py index 619ad3d..798ef09 100644 --- a/src/ffx/test/helper.py +++ b/tests/legacy/helper.py @@ -1,11 +1,9 @@ import os, math, tempfile, click - -from ffx.ffx_controller import FfxController - from ffx.process import executeProcess from ffx.media_descriptor import MediaDescriptor +from ffx.media_descriptor_change_set import MediaDescriptorChangeSet from ffx.track_type import TrackType from ffx.helper import dictCache @@ -149,7 +147,6 @@ def createMediaTestFile(mediaDescriptor: MediaDescriptor, # subtitleFilePath = createVttFile(SHORT_SUBTITLE_SEQUENCE) - # commandTokens = FfxController.COMMAND_TOKENS commandTokens = ['ffmpeg', '-y'] generatorCache = [] @@ -232,15 +229,14 @@ def createMediaTestFile(mediaDescriptor: MediaDescriptor, f"{mediaTagKey}={mediaTagValue}"] subIndexCounter[trackType] += 1 - #TODO: Optimize too many runs ffxContext = {'config': ConfigurationController(), 'logger': logger} - fc = FfxController(ffxContext, mediaDescriptor) + mdcs = MediaDescriptorChangeSet(ffxContext, mediaDescriptor) commandTokens += (generatorTokens + importTokens + mappingTokens + metadataTokens - + fc.generateDispositionTokens()) + + mdcs.generateDispositionTokens()) commandTokens += ['-t', str(length)] diff --git a/src/ffx/test/indicator_combinator.py b/tests/legacy/indicator_combinator.py similarity index 100% rename from src/ffx/test/indicator_combinator.py rename to tests/legacy/indicator_combinator.py diff --git a/src/ffx/test/label_combinator.py b/tests/legacy/label_combinator.py similarity index 85% rename from src/ffx/test/label_combinator.py rename to tests/legacy/label_combinator.py index 7965f92..2731129 100644 --- a/src/ffx/test/label_combinator.py +++ b/tests/legacy/label_combinator.py @@ -25,8 +25,9 @@ class LabelCombinator(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.{LabelCombinator.PREFIX}{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.{LabelCombinator.PREFIX}{ identifier }"]): + module_name = f"tests.legacy.{LabelCombinator.PREFIX}{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding MediaCombinator as it seems to be included by import (?) if inspect.isclass(obj) and name != 'LabelCombinator' and name.startswith('LabelCombinator'): return obj diff --git a/src/ffx/test/label_combinator_0.py b/tests/legacy/label_combinator_0.py similarity index 100% rename from src/ffx/test/label_combinator_0.py rename to tests/legacy/label_combinator_0.py diff --git a/src/ffx/test/label_combinator_1.py b/tests/legacy/label_combinator_1.py similarity index 100% rename from src/ffx/test/label_combinator_1.py rename to tests/legacy/label_combinator_1.py diff --git a/src/ffx/test/media_combinator.py b/tests/legacy/media_combinator.py similarity index 84% rename from src/ffx/test/media_combinator.py rename to tests/legacy/media_combinator.py index 4053e35..bb00e70 100644 --- a/src/ffx/test/media_combinator.py +++ b/tests/legacy/media_combinator.py @@ -22,8 +22,9 @@ class MediaCombinator(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.media_combinator_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.media_combinator_{ identifier }"]): + module_name = f"tests.legacy.media_combinator_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding MediaCombinator as it seems to be included by import (?) if inspect.isclass(obj) and name != 'MediaCombinator' and name.startswith('MediaCombinator'): return obj diff --git a/src/ffx/test/media_combinator_0.py b/tests/legacy/media_combinator_0.py similarity index 100% rename from src/ffx/test/media_combinator_0.py rename to tests/legacy/media_combinator_0.py diff --git a/src/ffx/test/media_combinator_1.py b/tests/legacy/media_combinator_1.py similarity index 100% rename from src/ffx/test/media_combinator_1.py rename to tests/legacy/media_combinator_1.py diff --git a/src/ffx/test/media_combinator_2.py b/tests/legacy/media_combinator_2.py similarity index 100% rename from src/ffx/test/media_combinator_2.py rename to tests/legacy/media_combinator_2.py diff --git a/src/ffx/test/media_combinator_3.py b/tests/legacy/media_combinator_3.py similarity index 100% rename from src/ffx/test/media_combinator_3.py rename to tests/legacy/media_combinator_3.py diff --git a/src/ffx/test/media_combinator_4.py b/tests/legacy/media_combinator_4.py similarity index 100% rename from src/ffx/test/media_combinator_4.py rename to tests/legacy/media_combinator_4.py diff --git a/src/ffx/test/media_combinator_5.py b/tests/legacy/media_combinator_5.py similarity index 100% rename from src/ffx/test/media_combinator_5.py rename to tests/legacy/media_combinator_5.py diff --git a/src/ffx/test/media_combinator_6.py b/tests/legacy/media_combinator_6.py similarity index 100% rename from src/ffx/test/media_combinator_6.py rename to tests/legacy/media_combinator_6.py diff --git a/src/ffx/test/media_combinator_7.py b/tests/legacy/media_combinator_7.py similarity index 100% rename from src/ffx/test/media_combinator_7.py rename to tests/legacy/media_combinator_7.py diff --git a/src/ffx/test/media_tag_combinator.py b/tests/legacy/media_tag_combinator.py similarity index 84% rename from src/ffx/test/media_tag_combinator.py rename to tests/legacy/media_tag_combinator.py index b434694..8735eb2 100644 --- a/src/ffx/test/media_tag_combinator.py +++ b/tests/legacy/media_tag_combinator.py @@ -22,8 +22,9 @@ class MediaTagCombinator(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.media_tag_combinator_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.media_tag_combinator_{ identifier }"]): + module_name = f"tests.legacy.media_tag_combinator_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding MediaCombinator as it seems to be included by import (?) if inspect.isclass(obj) and name != 'MediaTagCombinator' and name.startswith('MediaTagCombinator'): return obj diff --git a/src/ffx/test/media_tag_combinator_0.py b/tests/legacy/media_tag_combinator_0.py similarity index 100% rename from src/ffx/test/media_tag_combinator_0.py rename to tests/legacy/media_tag_combinator_0.py diff --git a/src/ffx/test/media_tag_combinator_1.py b/tests/legacy/media_tag_combinator_1.py similarity index 100% rename from src/ffx/test/media_tag_combinator_1.py rename to tests/legacy/media_tag_combinator_1.py diff --git a/src/ffx/test/media_tag_combinator_2.py b/tests/legacy/media_tag_combinator_2.py similarity index 100% rename from src/ffx/test/media_tag_combinator_2.py rename to tests/legacy/media_tag_combinator_2.py diff --git a/src/ffx/test/permutation_combinator_2.py b/tests/legacy/permutation_combinator_2.py similarity index 100% rename from src/ffx/test/permutation_combinator_2.py rename to tests/legacy/permutation_combinator_2.py diff --git a/src/ffx/test/permutation_combinator_3.py b/tests/legacy/permutation_combinator_3.py similarity index 100% rename from src/ffx/test/permutation_combinator_3.py rename to tests/legacy/permutation_combinator_3.py diff --git a/src/ffx/test/release_combinator.py b/tests/legacy/release_combinator.py similarity index 100% rename from src/ffx/test/release_combinator.py rename to tests/legacy/release_combinator.py diff --git a/src/ffx/test/scenario.py b/tests/legacy/scenario.py similarity index 88% rename from src/ffx/test/scenario.py rename to tests/legacy/scenario.py index 4a924c6..9bb9c88 100644 --- a/src/ffx/test/scenario.py +++ b/tests/legacy/scenario.py @@ -4,7 +4,7 @@ from ffx.show_controller import ShowController from ffx.pattern_controller import PatternController from ffx.media_controller import MediaController -from ffx.test.helper import createEmptyDirectory +from .helper import createEmptyDirectory from ffx.database import databaseContext class Scenario(): @@ -90,11 +90,7 @@ class Scenario(): def __init__(self, context = None): self._context = context self._testDirectory = createEmptyDirectory() - self._ffxExecutablePath = os.path.join( - os.path.dirname( - os.path.dirname( - os.path.dirname(__file__))), - 'ffx.py') + self._ffxModuleName = 'ffx' self._logger = context['logger'] self._reportLogger = context['report_logger'] @@ -146,8 +142,9 @@ class Scenario(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.scenario_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.scenario_{ identifier }"]): + module_name = f"tests.legacy.scenario_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding Scenario as it seems to be included by import (?) if inspect.isclass(obj) and name != 'Scenario' and name.startswith('Scenario'): return obj diff --git a/src/ffx/test/scenario_1.py b/tests/legacy/scenario_1.py similarity index 95% rename from src/ffx/test/scenario_1.py rename to tests/legacy/scenario_1.py index a313e2f..57e42f5 100644 --- a/src/ffx/test/scenario_1.py +++ b/tests/legacy/scenario_1.py @@ -2,7 +2,7 @@ import os, sys, click, glob from .scenario import Scenario -from ffx.test.helper import createMediaTestFile +from .helper import createMediaTestFile from ffx.process import executeProcess from ffx.file_properties import FileProperties @@ -13,9 +13,9 @@ from ffx.track_descriptor import TrackDescriptor from ffx.track_type import TrackType from ffx.track_disposition import TrackDisposition -from ffx.test.media_combinator_0 import MediaCombinator0 +from .media_combinator_0 import MediaCombinator0 -from ffx.test.basename_combinator import BasenameCombinator +from .basename_combinator import BasenameCombinator class Scenario1(Scenario): @@ -92,8 +92,7 @@ class Scenario1(Scenario): # Phase 2: Run ffx - commandSequence = [sys.executable, - self._ffxExecutablePath] + commandSequence = [sys.executable, '-m', self._ffxModuleName] if self._context['verbosity']: commandSequence += ['--verbose', diff --git a/src/ffx/test/scenario_2.py b/tests/legacy/scenario_2.py similarity index 96% rename from src/ffx/test/scenario_2.py rename to tests/legacy/scenario_2.py index 4fa7ea8..567c967 100644 --- a/src/ffx/test/scenario_2.py +++ b/tests/legacy/scenario_2.py @@ -2,7 +2,7 @@ import os, sys, click from .scenario import Scenario -from ffx.test.helper import createMediaTestFile +from .helper import createMediaTestFile from ffx.process import executeProcess from ffx.file_properties import FileProperties @@ -13,7 +13,7 @@ from ffx.track_descriptor import TrackDescriptor from ffx.track_type import TrackType from ffx.track_disposition import TrackDisposition -from ffx.test.media_combinator import MediaCombinator +from .media_combinator import MediaCombinator class Scenario2(Scenario): @@ -77,8 +77,7 @@ class Scenario2(Scenario): # Phase 2: Run ffx - commandSequence = [sys.executable, - self._ffxExecutablePath] + commandSequence = [sys.executable, '-m', self._ffxModuleName] if self._context['verbosity']: commandSequence += ['--verbose', diff --git a/src/ffx/test/scenario_4.py b/tests/legacy/scenario_4.py similarity index 96% rename from src/ffx/test/scenario_4.py rename to tests/legacy/scenario_4.py index 677da08..74eebd0 100644 --- a/src/ffx/test/scenario_4.py +++ b/tests/legacy/scenario_4.py @@ -2,11 +2,11 @@ import os, sys, click from .scenario import Scenario -from ffx.test.helper import createMediaTestFile +from .helper import createMediaTestFile from ffx.process import executeProcess from ffx.database import databaseContext -from ffx.test.helper import createEmptyDirectory +from .helper import createEmptyDirectory from ffx.helper import getEpisodeFileBasename from ffx.file_properties import FileProperties @@ -17,8 +17,8 @@ from ffx.track_descriptor import TrackDescriptor from ffx.track_type import TrackType from ffx.track_disposition import TrackDisposition -from ffx.test.media_combinator import MediaCombinator -from ffx.test.indicator_combinator import IndicatorCombinator +from .media_combinator import MediaCombinator +from .indicator_combinator import IndicatorCombinator from ffx.show_descriptor import ShowDescriptor @@ -163,8 +163,7 @@ class Scenario4(Scenario): # Phase 3: Run ffx - commandSequence = [sys.executable, - self._ffxExecutablePath] + commandSequence = [sys.executable, '-m', self._ffxModuleName] if self._context['verbosity']: commandSequence += ['--verbose', diff --git a/src/ffx/test/show_combinator.py b/tests/legacy/show_combinator.py similarity index 100% rename from src/ffx/test/show_combinator.py rename to tests/legacy/show_combinator.py diff --git a/src/ffx/test/title_combinator.py b/tests/legacy/title_combinator.py similarity index 100% rename from src/ffx/test/title_combinator.py rename to tests/legacy/title_combinator.py diff --git a/src/ffx/test/track_tag_combinator_2.py b/tests/legacy/track_tag_combinator_2.py similarity index 84% rename from src/ffx/test/track_tag_combinator_2.py rename to tests/legacy/track_tag_combinator_2.py index 9d62845..85f715d 100644 --- a/src/ffx/test/track_tag_combinator_2.py +++ b/tests/legacy/track_tag_combinator_2.py @@ -22,8 +22,9 @@ class TrackTagCombinator2(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.track_tag_combinator_2_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.track_tag_combinator_2_{ identifier }"]): + module_name = f"tests.legacy.track_tag_combinator_2_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding DispositionCombination as it seems to be included by import (?) if inspect.isclass(obj) and name != 'TrackTagCombinator2' and name.startswith('TrackTagCombinator2'): return obj diff --git a/src/ffx/test/track_tag_combinator_2_0.py b/tests/legacy/track_tag_combinator_2_0.py similarity index 100% rename from src/ffx/test/track_tag_combinator_2_0.py rename to tests/legacy/track_tag_combinator_2_0.py diff --git a/src/ffx/test/track_tag_combinator_2_1.py b/tests/legacy/track_tag_combinator_2_1.py similarity index 100% rename from src/ffx/test/track_tag_combinator_2_1.py rename to tests/legacy/track_tag_combinator_2_1.py diff --git a/src/ffx/test/track_tag_combinator_2_2.py b/tests/legacy/track_tag_combinator_2_2.py similarity index 100% rename from src/ffx/test/track_tag_combinator_2_2.py rename to tests/legacy/track_tag_combinator_2_2.py diff --git a/src/ffx/test/track_tag_combinator_2_3.py b/tests/legacy/track_tag_combinator_2_3.py similarity index 100% rename from src/ffx/test/track_tag_combinator_2_3.py rename to tests/legacy/track_tag_combinator_2_3.py diff --git a/src/ffx/test/track_tag_combinator_3.py b/tests/legacy/track_tag_combinator_3.py similarity index 84% rename from src/ffx/test/track_tag_combinator_3.py rename to tests/legacy/track_tag_combinator_3.py index 41345f5..adc3b98 100644 --- a/src/ffx/test/track_tag_combinator_3.py +++ b/tests/legacy/track_tag_combinator_3.py @@ -22,8 +22,9 @@ class TrackTagCombinator3(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.track_tag_combinator_3_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.track_tag_combinator_3_{ identifier }"]): + module_name = f"tests.legacy.track_tag_combinator_3_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding DispositionCombination as it seems to be included by import (?) if inspect.isclass(obj) and name != 'TrackTagCombinator3' and name.startswith('TrackTagCombinator3'): return obj diff --git a/src/ffx/test/track_tag_combinator_3_0.py b/tests/legacy/track_tag_combinator_3_0.py similarity index 100% rename from src/ffx/test/track_tag_combinator_3_0.py rename to tests/legacy/track_tag_combinator_3_0.py diff --git a/src/ffx/test/track_tag_combinator_3_1.py b/tests/legacy/track_tag_combinator_3_1.py similarity index 100% rename from src/ffx/test/track_tag_combinator_3_1.py rename to tests/legacy/track_tag_combinator_3_1.py diff --git a/src/ffx/test/track_tag_combinator_3_2.py b/tests/legacy/track_tag_combinator_3_2.py similarity index 100% rename from src/ffx/test/track_tag_combinator_3_2.py rename to tests/legacy/track_tag_combinator_3_2.py diff --git a/src/ffx/test/track_tag_combinator_3_3.py b/tests/legacy/track_tag_combinator_3_3.py similarity index 100% rename from src/ffx/test/track_tag_combinator_3_3.py rename to tests/legacy/track_tag_combinator_3_3.py diff --git a/src/ffx/test/track_tag_combinator_3_4.py b/tests/legacy/track_tag_combinator_3_4.py similarity index 100% rename from src/ffx/test/track_tag_combinator_3_4.py rename to tests/legacy/track_tag_combinator_3_4.py diff --git a/src/ffx/ffx_tests.py b/tests/legacy_runner.py similarity index 81% rename from src/ffx/ffx_tests.py rename to tests/legacy_runner.py index 119700b..557045c 100755 --- a/src/ffx/ffx_tests.py +++ b/tests/legacy_runner.py @@ -1,15 +1,33 @@ #! /usr/bin/python3 -import os, logging, click +import os, sys, logging, click + +# Allow direct execution from the source tree by exposing both the repository +# root for `tests.*` imports and `src/` for `ffx.*` imports. +script_dir = os.path.dirname(os.path.abspath(__file__)) +repo_root = os.path.dirname(script_dir) +src_root = os.path.join(repo_root, 'src') + +sys.path = [p for p in sys.path if os.path.abspath(p) != script_dir] +for path in [repo_root, src_root]: + if path not in sys.path: + sys.path.insert(0, path) + +existing_pythonpath = [p for p in os.environ.get('PYTHONPATH', '').split(os.pathsep) if p] +pythonpath_entries = [] +for path in [src_root, repo_root] + existing_pythonpath: + if path not in pythonpath_entries: + pythonpath_entries.append(path) +os.environ['PYTHONPATH'] = os.pathsep.join(pythonpath_entries) from ffx.configuration_controller import ConfigurationController from ffx.file_properties import FileProperties from ffx.ffx_controller import FfxController -from ffx.test.helper import createMediaTestFile +from tests.legacy.helper import createMediaTestFile -from ffx.test.scenario import Scenario +from tests.legacy.scenario import Scenario from ffx.tmdb_controller import TMDB_API_KEY_NOT_PRESENT_EXCEPTION diff --git a/tests/support/__init__.py b/tests/support/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/support/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/support/ffx_bundle.py b/tests/support/ffx_bundle.py new file mode 100644 index 0000000..943d33b --- /dev/null +++ b/tests/support/ffx_bundle.py @@ -0,0 +1,337 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +import json +import logging +import os +from pathlib import Path +import subprocess +import sys +from typing import Mapping + + +REPO_ROOT = Path(__file__).resolve().parents[2] +SRC_ROOT = REPO_ROOT / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.audio_layout import AudioLayout +from ffx.database import databaseContext +from ffx.pattern_controller import PatternController +from ffx.show_controller import ShowController +from ffx.show_descriptor import ShowDescriptor +from ffx.track_controller import TrackController +from ffx.track_descriptor import TrackDescriptor +from ffx.track_disposition import TrackDisposition +from ffx.track_type import TrackType + + +class StaticConfig: + def __init__(self, data: dict | None = None): + self._data = data or {} + + def getData(self): + return self._data + + +@dataclass(frozen=True) +class SourceTrackSpec: + track_type: TrackType + identity: str | None = None + language: str | None = None + title: str | None = None + extra_tags: Mapping[str, str] = field(default_factory=dict) + dispositions: tuple[TrackDisposition, ...] = () + subtitle_lines: tuple[str, ...] = ("subtitle line",) + attachment_name: str = "fixture.ttf" + + +@dataclass(frozen=True) +class PatternTrackSpec: + index: int + source_index: int + track_type: TrackType + tags: Mapping[str, str] = field(default_factory=dict) + dispositions: tuple[TrackDisposition, ...] = () + audio_layout: AudioLayout = AudioLayout.LAYOUT_STEREO + + +def make_logger(name: str) -> logging.Logger: + logger = logging.getLogger(name) + logger.handlers = [] + logger.setLevel(logging.DEBUG) + logger.propagate = False + logger.addHandler(logging.NullHandler()) + return logger + + +def build_controller_context(database_path: Path) -> dict: + return { + "logger": make_logger(f"ffx-test-db-{database_path.stem}"), + "config": StaticConfig(), + "database": databaseContext(str(database_path)), + } + + +def dispose_controller_context(context: dict) -> None: + context["database"]["engine"].dispose() + + +def write_vtt(path: Path, lines: tuple[str, ...]) -> Path: + body = ["WEBVTT", ""] + for index, line in enumerate(lines): + start_ms = index * 600 + end_ms = start_ms + 500 + body.extend( + [ + f"{start_ms // 3600000:02d}:{(start_ms // 60000) % 60:02d}:{(start_ms // 1000) % 60:02d}.{start_ms % 1000:03d} --> " + + f"{end_ms // 3600000:02d}:{(end_ms // 60000) % 60:02d}:{(end_ms // 1000) % 60:02d}.{end_ms % 1000:03d}", + line, + "", + ] + ) + path.write_text("\n".join(body), encoding="utf-8") + return path + + +def create_source_fixture(workdir: Path, filename: str, tracks: list[SourceTrackSpec], duration_seconds: int = 1) -> Path: + output_path = workdir / filename + + has_video = any(track.track_type == TrackType.VIDEO for track in tracks) + has_audio = any(track.track_type == TrackType.AUDIO for track in tracks) + + command = ["ffmpeg", "-y"] + + input_indices: dict[str, int] = {} + next_input_index = 0 + + if has_video: + command += ["-f", "lavfi", "-i", "color=size=96x54:rate=2:color=black"] + input_indices["video"] = next_input_index + next_input_index += 1 + + if has_audio: + command += ["-f", "lavfi", "-i", "anullsrc=channel_layout=stereo:sample_rate=48000"] + input_indices["audio"] = next_input_index + next_input_index += 1 + + subtitle_input_indices: list[int] = [] + subtitle_counter = 0 + for track in tracks: + if track.track_type == TrackType.SUBTITLE: + subtitle_path = write_vtt( + workdir / f"{output_path.stem}_subtitle_{subtitle_counter}.vtt", + track.subtitle_lines, + ) + command += ["-i", str(subtitle_path)] + subtitle_input_indices.append(next_input_index) + next_input_index += 1 + subtitle_counter += 1 + + map_tokens: list[str] = [] + metadata_tokens: list[str] = [] + disposition_tokens: list[str] = [] + attachment_tokens: list[str] = [] + + per_type_subindex: dict[TrackType, int] = {} + subtitle_input_cursor = 0 + attachment_subindex = 0 + + for track in tracks: + if track.track_type == TrackType.VIDEO: + map_tokens += ["-map", f"{input_indices['video']}:v:0"] + stream_group = "v" + elif track.track_type == TrackType.AUDIO: + map_tokens += ["-map", f"{input_indices['audio']}:a:0"] + stream_group = "a" + elif track.track_type == TrackType.SUBTITLE: + map_tokens += ["-map", f"{subtitle_input_indices[subtitle_input_cursor]}:s:0"] + subtitle_input_cursor += 1 + stream_group = "s" + elif track.track_type == TrackType.ATTACHMENT: + attachment_path = workdir / track.attachment_name + attachment_path.write_bytes(b"dummy font bytes") + attachment_tokens += [ + "-attach", + str(attachment_path), + f"-metadata:s:t:{attachment_subindex}", + "mimetype=application/x-truetype-font", + f"-metadata:s:t:{attachment_subindex}", + f"filename={attachment_path.name}", + ] + attachment_subindex += 1 + continue + else: + raise ValueError(f"Unsupported track type {track.track_type}") + + subindex = per_type_subindex.get(track.track_type, 0) + per_type_subindex[track.track_type] = subindex + 1 + + tags = {} + if track.identity is not None: + tags["THIS_IS"] = track.identity + if track.language is not None: + tags["language"] = track.language + if track.title is not None: + tags["title"] = track.title + tags.update(track.extra_tags) + + for key, value in tags.items(): + metadata_tokens += [f"-metadata:s:{stream_group}:{subindex}", f"{key}={value}"] + + if track.dispositions: + disposition_tokens += [ + f"-disposition:{stream_group}:{subindex}", + "+".join(disposition.label() for disposition in track.dispositions), + ] + + command += map_tokens + command += metadata_tokens + command += disposition_tokens + command += [ + "-c:v", + "libx264", + "-preset", + "ultrafast", + "-crf", + "35", + "-pix_fmt", + "yuv420p", + "-c:a", + "aac", + "-b:a", + "48k", + "-c:s", + "webvtt", + "-t", + str(duration_seconds), + "-shortest", + ] + command += attachment_tokens + command += [str(output_path)] + + completed = subprocess.run(command, cwd=workdir, capture_output=True, text=True) + if completed.returncode != 0: + raise AssertionError(f"ffmpeg fixture creation failed\nSTDOUT:\n{completed.stdout}\nSTDERR:\n{completed.stderr}") + + return output_path + + +def add_show_and_pattern(context: dict, filename_pattern: str, show_id: int = 1) -> int: + show_descriptor = ShowDescriptor( + id=show_id, + name="Bundle Test Show", + year=2000, + ) + ShowController(context).updateShow(show_descriptor) + pattern_id = PatternController(context).addPattern( + { + "show_id": show_id, + "pattern": filename_pattern, + } + ) + if not pattern_id: + raise AssertionError("Failed to create pattern in test database") + return pattern_id + + +def add_pattern_tracks(context: dict, pattern_id: int, track_specs: list[PatternTrackSpec]) -> None: + track_controller = TrackController(context) + for track in track_specs: + kwargs = { + TrackDescriptor.INDEX_KEY: track.index, + TrackDescriptor.SOURCE_INDEX_KEY: track.source_index, + TrackDescriptor.TRACK_TYPE_KEY: track.track_type, + TrackDescriptor.TAGS_KEY: dict(track.tags), + TrackDescriptor.DISPOSITION_SET_KEY: set(track.dispositions), + } + if track.track_type == TrackType.AUDIO: + kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = track.audio_layout + track_controller.addTrack(TrackDescriptor(**kwargs), pattern_id) + + +def prepare_pattern_database(database_path: Path, filename_pattern: str, track_specs: list[PatternTrackSpec], show_id: int = 1) -> None: + context = build_controller_context(database_path) + try: + pattern_id = add_show_and_pattern(context, filename_pattern, show_id=show_id) + add_pattern_tracks(context, pattern_id, track_specs) + finally: + dispose_controller_context(context) + + +def run_ffx_convert(workdir: Path, home_dir: Path, database_path: Path, *args: str) -> subprocess.CompletedProcess[str]: + env = os.environ.copy() + env["HOME"] = str(home_dir) + existing_pythonpath = env.get("PYTHONPATH", "") + env["PYTHONPATH"] = str(SRC_ROOT) if not existing_pythonpath else f"{SRC_ROOT}{os.pathsep}{existing_pythonpath}" + + command = [ + sys.executable, + "-m", + "ffx", + "--database-file", + str(database_path), + "convert", + *args, + ] + return subprocess.run(command, cwd=workdir, env=env, capture_output=True, text=True) + + +def ffprobe_json(path: Path) -> dict: + completed = subprocess.run( + [ + "ffprobe", + "-hide_banner", + "-show_streams", + "-show_format", + "-of", + "json", + str(path), + ], + capture_output=True, + text=True, + ) + if completed.returncode != 0: + raise AssertionError(f"ffprobe failed for {path}\nSTDERR:\n{completed.stderr}") + return json.loads(completed.stdout) + + +def stream_tags(stream: dict) -> dict[str, str]: + return {str(key): str(value) for key, value in stream.get("tags", {}).items()} + + +def get_tag(stream: dict, key: str) -> str | None: + tags = stream_tags(stream) + for candidate in (key, key.lower(), key.upper()): + if candidate in tags: + return tags[candidate] + return None + + +def extract_first_subtitle_text(workdir: Path, media_path: Path) -> str: + extracted_path = workdir / f"{media_path.stem}.subtitle.vtt" + completed = subprocess.run( + [ + "ffmpeg", + "-y", + "-i", + str(media_path), + "-map", + "0:s:0", + "-c", + "copy", + str(extracted_path), + ], + cwd=workdir, + capture_output=True, + text=True, + ) + if completed.returncode != 0: + raise AssertionError(f"Subtitle extraction failed\nSTDERR:\n{completed.stderr}") + return extracted_path.read_text(encoding="utf-8") + + +def expected_output_path(workdir: Path, source_filename: str) -> Path: + return workdir / f"out_{source_filename}" diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py new file mode 100644 index 0000000..0f44c7f --- /dev/null +++ b/tests/unit/test_logging.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import logging +from pathlib import Path +import sys +import tempfile +import unittest + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.logging_utils import ( # noqa: E402 + CONSOLE_HANDLER_NAME, + FILE_HANDLER_NAME, + configure_ffx_logger, + get_ffx_logger, +) + + +class LoggingUtilsTests(unittest.TestCase): + def cleanup_logger(self, logger_name: str) -> None: + logger = logging.getLogger(logger_name) + for handler in list(logger.handlers): + logger.removeHandler(handler) + handler.close() + + def test_get_ffx_logger_adds_only_one_null_handler(self): + logger_name = "ffx-test-null-handler" + self.cleanup_logger(logger_name) + + logger = get_ffx_logger(logger_name) + logger = get_ffx_logger(logger_name) + + null_handlers = [ + handler for handler in logger.handlers if isinstance(handler, logging.NullHandler) + ] + self.assertEqual(1, len(null_handlers)) + + self.cleanup_logger(logger_name) + + def test_configure_ffx_logger_reuses_named_handlers(self): + logger_name = "ffx-test-configure-handler" + self.cleanup_logger(logger_name) + + with tempfile.TemporaryDirectory() as tempdir: + first_log_path = Path(tempdir) / "first.log" + second_log_path = Path(tempdir) / "second.log" + + logger = configure_ffx_logger( + str(first_log_path), + logging.ERROR, + logging.INFO, + name=logger_name, + ) + logger = configure_ffx_logger( + str(second_log_path), + logging.DEBUG, + logging.WARNING, + name=logger_name, + ) + + console_handlers = [ + handler for handler in logger.handlers if handler.get_name() == CONSOLE_HANDLER_NAME + ] + file_handlers = [ + handler for handler in logger.handlers if handler.get_name() == FILE_HANDLER_NAME + ] + + self.assertEqual(1, len(console_handlers)) + self.assertEqual(1, len(file_handlers)) + self.assertFalse( + any(isinstance(handler, logging.NullHandler) for handler in logger.handlers) + ) + self.assertEqual(logging.WARNING, console_handlers[0].level) + self.assertEqual(logging.DEBUG, file_handlers[0].level) + self.assertEqual(str(second_log_path.resolve()), file_handlers[0].baseFilename) + + self.cleanup_logger(logger_name) + + +if __name__ == "__main__": + unittest.main() From 01b5fdb2896221553ee703b1451f43ed01d5a813 Mon Sep 17 00:00:00 2001 From: Javanaut Date: Thu, 9 Apr 2026 13:34:38 +0200 Subject: [PATCH 04/28] Refine tests, CLI --- README.md | 129 +++++++++++++++--- SCRATCHPAD.md | 33 ++--- pyproject.toml | 1 + requirements/project.md | 11 +- src/ffx/cli.py | 25 ++-- src/ffx/ffx_controller.py | 23 ++-- src/ffx/process.py | 91 ++++++++++-- tests/unit/test_process.py | 52 +++++++ .../{prepare.sh => configure_workstation.sh} | 33 ++++- tools/setup.sh | 63 +++++++-- tools/test.sh | 22 +++ 11 files changed, 391 insertions(+), 92 deletions(-) create mode 100644 tests/unit/test_process.py rename tools/{prepare.sh => configure_workstation.sh} (91%) create mode 100755 tools/test.sh diff --git a/README.md b/README.md index 5fa42fc..6eed68c 100644 --- a/README.md +++ b/README.md @@ -1,48 +1,135 @@ # FFX +FFX is a local CLI and Textual TUI for inspecting TV episode files, storing normalization rules in SQLite, and converting outputs into a predictable stream, metadata, and filename layout. + +## Requirements + +- Linux-like environment +- `python3` +- `ffmpeg` +- `ffprobe` +- `cpulimit` + ## Installation -per https: +FFX uses a two-step local setup flow. + +### 1. Install The Bundle + +This step creates or reuses the persistent bundle virtualenv in `~/.local/share/ffx.venv`, installs FFX into it, and ensures `ffx` is exposed through a shell alias. ```sh -pip install https:////ffx.git@ +bash tools/setup.sh ``` -per git: +If you also want the Python packages needed for the modern test suite: ```sh -pip install git+ssh://@//ffx.git@ +bash tools/setup.sh --with-tests ``` -## Version history +You can verify the bundle state without changing anything: -### 0.1.1 +```sh +bash tools/setup.sh --check +``` -Bugfixes, TMBD identify shows +### 2. Prepare System Dependencies And Local User Files -### 0.1.2 +This step installs or verifies workstation dependencies and seeds local config and data directories. It is the step wrapped by the CLI command `ffx configure_workstation`. -Bugfixes +Run it directly: -### 0.1.3 +```sh +bash tools/configure_workstation.sh +``` -Subtitle file imports +Or through the installed CLI: -### 0.2.0 +```sh +ffx configure_workstation +``` -Tests, Config-File +Check-only mode is available in both forms: -### 0.2.1 +```sh +bash tools/configure_workstation.sh --check +ffx configure_workstation --check +``` -Signature, Tags cleaning, Bugfixes, Refactoring +`tools/configure_workstation.sh` does not manage the bundle virtualenv. Python-side test packages belong to `tools/setup.sh --with-tests`. -### 0.2.2 +## Basic Usage -CLI-Overrides +Examples: + +```sh +ffx version +ffx inspect /path/to/episode.mkv +ffx convert /path/to/episode.mkv +ffx shows +``` + +## Modern Tests + +Install Python test packages first: + +```sh +bash tools/setup.sh --with-tests +``` + +Then run the modern automatically discovered test suite: + +```sh +./tools/test.sh +``` + +This runner uses `pytest` and intentionally excludes the legacy harness under `tests/legacy/`. + +## Default Local Paths + +- Config: `~/.local/etc/ffx.json` +- Database: `~/.local/var/ffx/ffx.db` +- Log file: `~/.local/var/log/ffx.log` +- Bundle venv: `~/.local/share/ffx.venv` + +## TMDB + +TMDB-backed metadata enrichment requires `TMDB_API_KEY` to be set in the environment. + +## Version History ### 0.2.3 -PyPi packaging -Templating output filename -Season shiftung -DB-Versionierung +- PyPI packaging +- output filename templating +- season shifting +- DB versioning + +### 0.2.2 + +- CLI overrides + +### 0.2.1 + +- signature handling +- tag cleanup +- bugfixes and refactoring + +### 0.2.0 + +- tests +- config file + +### 0.1.3 + +- subtitle file imports + +### 0.1.2 + +- bugfixes + +### 0.1.1 + +- bugfixes +- TMDB show identification diff --git a/SCRATCHPAD.md b/SCRATCHPAD.md index 5c4d5c2..ce4ba22 100644 --- a/SCRATCHPAD.md +++ b/SCRATCHPAD.md @@ -10,6 +10,7 @@ - This list is intentionally optimization-oriented rather than bug-oriented. Some items below also improve correctness or maintainability, but they were selected because they can reduce runtime cost, operator friction, or iteration overhead. - A first modern integration slice now exists under [`tests/integration/subtrack_mapping`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping). Remaining test-suite cleanup is now mostly about migrating and shrinking the legacy harness surface under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy). - FFX logger setup now reuses named handlers, and fallback logger access no longer mutates handlers in ordinary constructors and helpers. +- The process wrapper now uses `subprocess.run(...)` with centralized command formatting plus stable timeout and missing-command error mapping. ## Focused Snapshot @@ -26,7 +27,7 @@ ## Optimization Candidates 1. CLI startup and import cost -- [`src/ffx/cli.py`](/home/osgw/.local/src/codex/ffx/src/ffx/cli.py) imports a large portion of the application at module import time, even for cheap commands such as `version`, `help`, `setup_dependencies`, and `upgrade`. +- [`src/ffx/cli.py`](/home/osgw/.local/src/codex/ffx/src/ffx/cli.py) imports a large portion of the application at module import time, even for cheap commands such as `version`, `help`, `configure_workstation`, and `upgrade`. - Optimization: - Move heavy imports into the commands that actually need them. - Keep the CLI root importable with only core stdlib and Click dependencies. @@ -70,18 +71,8 @@ - Expected value: - Lower latency on repeated experimentation. -6. Process wrapper lacks stronger execution controls -- [`src/ffx/process.py`](/home/osgw/.local/src/codex/ffx/src/ffx/process.py) uses `Popen(...).communicate()` without timeout handling, structured error mapping, or direct missing-command handling. -- Optimization: - - Add timeout support and clearer `FileNotFoundError` handling. - - Consider `subprocess.run(..., check=False, text=True)` where streaming is not required. - - Centralize return/error formatting. -- Expected value: - - Better failure diagnosis. - - Cleaner process management semantics. - -7. Tooling overlap and naming drift -- There are still overlapping prep and setup entrypoints across [`tools/prepare.sh`](/home/osgw/.local/src/codex/ffx/tools/prepare.sh), [`tools/setup.sh`](/home/osgw/.local/src/codex/ffx/tools/setup.sh), and newer CLI maintenance commands. +6. Tooling overlap and naming drift +- There are still overlapping workstation-setup entrypoints across [`tools/configure_workstation.sh`](/home/osgw/.local/src/codex/ffx/tools/configure_workstation.sh), [`tools/setup.sh`](/home/osgw/.local/src/codex/ffx/tools/setup.sh), and newer CLI maintenance commands. - Optimization: - Decide which scripts remain canonical. - Replace or remove legacy wrappers once equivalent CLI commands exist. @@ -90,7 +81,7 @@ - Less operator confusion. - Fewer duplicated procedures to maintain. -8. Placeholder UI surfaces should either ship or disappear +7. Placeholder UI surfaces should either ship or disappear - [`src/ffx/help_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/help_screen.py) and [`src/ffx/settings_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/settings_screen.py) are placeholders. - Optimization: - Either remove them from the active UI surface or complete them. @@ -99,7 +90,7 @@ - Leaner interface. - Lower UX ambiguity. -9. Large Textual screens repeat configuration and controller loading +8. Large Textual screens repeat configuration and controller loading - Screens such as [`src/ffx/media_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/media_details_screen.py), [`src/ffx/pattern_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/pattern_details_screen.py), and [`src/ffx/show_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/show_details_screen.py) repeat setup patterns and local metadata filtering extraction. - Optimization: - Extract a shared screen base or helper for common config/controller/bootstrap logic. @@ -108,7 +99,7 @@ - Lower maintenance overhead. - Easier UI iteration. -10. Several helper functions are unfinished or dead-weight +9. Several helper functions are unfinished or dead-weight - [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) contains `permutateList(...): pass`. - There are many combinator and conversion placeholders across tests and migrations. - Optimization: @@ -118,7 +109,7 @@ - Smaller mental model. - Less time spent re-evaluating inactive paths. -11. Test suite shape is expensive to understand and likely expensive to run +10. Test suite shape is expensive to understand and likely expensive to run - The project still carries a large legacy matrix of combinator files under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy), several placeholder `pass` implementations, and at least one suspicious filename with an embedded space: [`tests/legacy/disposition_combinator_2_3 .py`](/home/osgw/.local/src/codex/ffx/tests/legacy/disposition_combinator_2_3 .py). - A first focused replacement slice now exists in [`tests/integration/subtrack_mapping/test_cli_bundle.py`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping/test_cli_bundle.py), so the remaining work is migration and consolidation rather than creating the modern test shape from scratch. - Optimization: @@ -129,7 +120,7 @@ - Faster contributor onboarding. - Easier CI adoption later. -12. Process resource limiting semantics could be clearer +11. Process resource limiting semantics could be clearer - [`src/ffx/process.py`](/home/osgw/.local/src/codex/ffx/src/ffx/process.py) prepends `nice` and `cpulimit` directly when values are set. - Optimization: - Validate and document effective behavior for combined `nice` + `cpulimit`. @@ -138,7 +129,7 @@ - Fewer surprises in production-like runs. - Easier support for user-reported performance behavior. -13. Import-time dependency coupling makes maintenance commands brittle +12. Import-time dependency coupling makes maintenance commands brittle - Even after recent CLI maintenance additions, the top-level CLI module still imports most application modules before Click dispatch. - Optimization: - Push imports for ORM, Textual, TMDB, ffmpeg helpers, and descriptors behind the commands that actually need them. @@ -146,7 +137,7 @@ - Maintenance commands such as setup and upgrade stay usable when optional runtime dependencies are broken. - Better separation between media runtime code and maintenance tooling. -14. Regex and string utility cleanup +13. Regex and string utility cleanup - [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) still emits a `SyntaxWarning` for `RICH_COLOR_PATTERN`. - Optimization: - Convert regex literals to raw strings where appropriate. @@ -155,7 +146,7 @@ - Cleaner runtime output. - Less warning noise during dry-run maintenance commands. -15. Database startup always runs schema creation and version checks +14. Database startup always runs schema creation and version checks - [`src/ffx/database.py`](/home/osgw/.local/src/codex/ffx/src/ffx/database.py) runs `Base.metadata.create_all(...)` and version checks every time a DB-backed context is created. - Optimization: - Measure startup cost and consider separating bootstrapping from ordinary command execution. diff --git a/pyproject.toml b/pyproject.toml index 224d73f..da2dc02 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,7 @@ ffx = "ffx.cli:ffx" [tool.pytest.ini_options] testpaths = ["tests"] python_files = ["test_*.py"] +norecursedirs = ["tests/legacy", "tests/support"] addopts = "-ra" markers = [ "integration: exercises the FFX bundle with real ffmpeg/ffprobe processes", diff --git a/requirements/project.md b/requirements/project.md index 574b2a7..d47b826 100644 --- a/requirements/project.md +++ b/requirements/project.md @@ -35,7 +35,11 @@ ## Functional Requirements -- The system shall provide a CLI entrypoint named `ffx` with commands for `convert`, `inspect`, `shows`, `unmux`, `cropdetect`, `version`, and `help`. +- The system shall provide a CLI entrypoint named `ffx` with commands for `convert`, `inspect`, `shows`, `unmux`, `cropdetect`, `configure_workstation`, `upgrade`, `version`, and `help`. +- The system shall support a two-step local installation and preparation flow: + - `tools/setup.sh` is the first step and shall own bundle virtualenv creation, package installation, shell alias exposure, and optional Python test-package installation. + - `tools/configure_workstation.sh` is the second step and shall own workstation dependency checks and installation plus local config and directory seeding. +- The CLI command `ffx configure_workstation` shall act as a wrapper for the second-step preparation flow in `tools/configure_workstation.sh`. - The system shall persist reusable normalization rules in SQLite for: - shows and show formatting digits, - regex-based filename patterns, @@ -65,7 +69,7 @@ - The system should stay understandable as a small local tool: controllers, descriptors, models, and screens should remain separate enough for contributors to trace a workflow end to end. - The system should produce predictable output for the same database rules, CLI overrides, and source files. - The system should preserve a lightweight operational footprint: local SQLite state, local log file, no mandatory background services. -- The system should be testable through the existing combinatorial CLI-oriented test harness and through isolated logic in descriptors and controllers. +- The system should be testable through modern automatically discovered tests and through remaining legacy harness coverage during migration. - The system should expose enough logging to diagnose failed probes, failed conversions, and rule mismatches without requiring a debugger. ## Constraints And Assumptions @@ -84,6 +88,9 @@ - Third-party dependencies: - `ffmpeg`, `ffprobe`, and `cpulimit`. - TMDB API access through `TMDB_API_KEY` for metadata enrichment. +- Installation assumptions: + - The Python-side bundle install step and optional Python test extras are managed by `tools/setup.sh`. + - The workstation-preparation step is managed separately by `tools/configure_workstation.sh` or `ffx configure_workstation`. ## Acceptance Scope diff --git a/src/ffx/cli.py b/src/ffx/cli.py index dfa559e..2d18395 100755 --- a/src/ffx/cli.py +++ b/src/ffx/cli.py @@ -58,7 +58,7 @@ def ffx(ctx, database_file, verbose, dry_run): ctx.obj = {} - if ctx.invoked_subcommand in ('setup_dependencies', 'upgrade'): + if ctx.invoked_subcommand in ('configure_workstation', 'upgrade'): ctx.obj['dry_run'] = dry_run ctx.obj['verbosity'] = verbose return @@ -104,8 +104,8 @@ def getRepoRootPath(): return os.path.dirname(os.path.dirname(os.path.dirname(currentFilePath))) -def getPrepareScriptPath(): - return os.path.join(getRepoRootPath(), 'tools', 'prepare.sh') +def getConfigureWorkstationScriptPath(): + return os.path.join(getRepoRootPath(), 'tools', 'configure_workstation.sh') def getBundleVenvDirectory(): @@ -120,22 +120,23 @@ def getBundleRepoPath(): return getRepoRootPath() -@ffx.command(name='setup_dependencies') +@ffx.command(name='configure_workstation') @click.pass_context -@click.option('--check', is_flag=True, default=False, help='Only verify dependency readiness') -@click.argument('prepare_args', nargs=-1, type=click.UNPROCESSED) -def setup_dependencies(ctx, check, prepare_args): - prepareScriptPath = getPrepareScriptPath() +@click.option('--check', is_flag=True, default=False, help='Only verify workstation-configuration readiness') +@click.argument('configure_args', nargs=-1, type=click.UNPROCESSED) +def configure_workstation(ctx, check, configure_args): + """Prepare workstation dependencies and local config after bundle install.""" + configureScriptPath = getConfigureWorkstationScriptPath() - if not os.path.isfile(prepareScriptPath): - raise click.ClickException(f"Preparation script not found at {prepareScriptPath}") + if not os.path.isfile(configureScriptPath): + raise click.ClickException(f"Workstation configuration script not found at {configureScriptPath}") - commandSequence = ['bash', prepareScriptPath] + commandSequence = ['bash', configureScriptPath] if check: commandSequence.append('--check') - commandSequence += list(prepare_args) + commandSequence += list(configure_args) if ctx.obj.get('dry_run', False): click.echo(' '.join(commandSequence)) diff --git a/src/ffx/ffx_controller.py b/src/ffx/ffx_controller.py index 131809e..f3241fc 100644 --- a/src/ffx/ffx_controller.py +++ b/src/ffx/ffx_controller.py @@ -54,6 +54,13 @@ class FfxController(): self.__logger: Logger = context['logger'] + def executeCommandSequence(self, commandSequence): + out, err, rc = executeProcess(commandSequence, context=self.__context) + if rc: + raise click.ClickException(f"Command resulted in error: rc={rc} error={err}") + return out, err, rc + + def generateAV1Tokens(self, quality, preset, subIndex : int = 0): return [f"-c:v:{int(subIndex)}", 'libsvtav1', @@ -288,9 +295,7 @@ class FfxController(): self.__logger.debug("FfxController.runJob(): Running command sequence") if not self.__context['dry_run']: - out, err, rc = executeProcess(commandSequence, context=self.__context) - if rc: - raise click.ClickException(f"Command resulted in error: rc={rc} error={err}") + self.executeCommandSequence(commandSequence) return if videoEncoder == VideoEncoder.AV1: @@ -320,7 +325,7 @@ class FfxController(): self.__logger.debug(f"FfxController.runJob(): Running command sequence") if not self.__context['dry_run']: - executeProcess(commandSequence, context = self.__context) + self.executeCommandSequence(commandSequence) if videoEncoder == VideoEncoder.H264: @@ -350,7 +355,7 @@ class FfxController(): self.__logger.debug(f"FfxController.runJob(): Running command sequence") if not self.__context['dry_run']: - executeProcess(commandSequence, context = self.__context) + self.executeCommandSequence(commandSequence) @@ -382,7 +387,7 @@ class FfxController(): self.__logger.debug(f"FfxController.runJob(): Running command sequence 1") if not self.__context['dry_run']: - executeProcess(commandSequence1, context = self.__context) + self.executeCommandSequence(commandSequence1) commandSequence2 = (commandTokens + self.__targetMediaDescriptor.getImportFileTokens() @@ -409,9 +414,7 @@ class FfxController(): self.__logger.debug(f"FfxController.runJob(): Running command sequence 2") if not self.__context['dry_run']: - out, err, rc = executeProcess(commandSequence2, context = self.__context) - if rc: - raise click.ClickException(f"Command resulted in error: rc={rc} error={err}") + self.executeCommandSequence(commandSequence2) @@ -436,4 +439,4 @@ class FfxController(): str(length), path] - out, err, rc = executeProcess(commandTokens, context = self.__context) + self.executeCommandSequence(commandTokens) diff --git a/src/ffx/process.py b/src/ffx/process.py index b2ab4c4..7db5492 100644 --- a/src/ffx/process.py +++ b/src/ffx/process.py @@ -1,19 +1,23 @@ +import shlex import subprocess -from typing import List +from typing import Iterable, List from .logging_utils import get_ffx_logger -def executeProcess(commandSequence: List[str], directory: str = None, context: dict = None): +COMMAND_TIMED_OUT_RETURN_CODE = 124 +COMMAND_NOT_FOUND_RETURN_CODE = 127 + + +def formatCommandSequence(commandSequence: Iterable[str]) -> str: + return shlex.join([str(token) for token in commandSequence]) + + +def getWrappedCommandSequence(commandSequence: List[str], context: dict = None) -> List[str]: """ niceness -20 bis +19 cpu_percent: 1 bis 99 """ - if context is None: - logger = get_ffx_logger() - else: - logger = context['logger'] - niceSequence = [] niceness = int((context or {}).get('resource_limits', {}).get('niceness', 99)) @@ -24,11 +28,72 @@ def executeProcess(commandSequence: List[str], directory: str = None, context: d if cpu_percent >= 1: niceSequence += ['cpulimit', '-l', str(cpu_percent), '--'] - niceCommand = niceSequence + commandSequence + return niceSequence + [str(token) for token in commandSequence] - logger.debug(f"executeProcess() command sequence: {' '.join(niceCommand)}") - process = subprocess.Popen(niceCommand, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8', cwd = directory) - output, error = process.communicate() - - return output, error, process.returncode +def getProcessTimeoutSeconds(context: dict = None, timeoutSeconds: float = None): + if timeoutSeconds is None: + timeoutSeconds = (context or {}).get('resource_limits', {}).get('timeout_seconds') + + if timeoutSeconds is None: + return None + + timeoutSeconds = float(timeoutSeconds) + + return timeoutSeconds if timeoutSeconds > 0 else None + + +def executeProcess( + commandSequence: List[str], + directory: str = None, + context: dict = None, + timeoutSeconds: float = None, +): + + logger = context['logger'] if context is not None and 'logger' in context else get_ffx_logger() + wrappedCommandSequence = getWrappedCommandSequence(commandSequence, context=context) + timeoutSeconds = getProcessTimeoutSeconds(context=context, timeoutSeconds=timeoutSeconds) + + logger.debug( + "executeProcess() cwd=%s timeout=%s command=%s", + directory or '.', + timeoutSeconds if timeoutSeconds is not None else 'none', + formatCommandSequence(wrappedCommandSequence), + ) + + try: + completed = subprocess.run( + wrappedCommandSequence, + capture_output=True, + text=True, + cwd=directory, + timeout=timeoutSeconds, + check=False, + ) + except FileNotFoundError as ex: + error = ( + "Command not found while running " + + f"{formatCommandSequence(wrappedCommandSequence)}: {ex.filename or ex}" + ) + logger.error(error) + return '', error, COMMAND_NOT_FOUND_RETURN_CODE + except subprocess.TimeoutExpired as ex: + stdout = ex.stdout or '' + stderr = ex.stderr or '' + error = ( + f"Command timed out after {timeoutSeconds} seconds while running " + + formatCommandSequence(wrappedCommandSequence) + ) + if stderr: + error = f"{error}\n{stderr}" + logger.error(error) + return stdout, error, COMMAND_TIMED_OUT_RETURN_CODE + + if completed.returncode != 0: + logger.warning( + "executeProcess() rc=%s command=%s", + completed.returncode, + formatCommandSequence(wrappedCommandSequence), + ) + + return completed.stdout, completed.stderr, completed.returncode diff --git a/tests/unit/test_process.py b/tests/unit/test_process.py new file mode 100644 index 0000000..a379444 --- /dev/null +++ b/tests/unit/test_process.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import unittest + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.process import ( # noqa: E402 + COMMAND_NOT_FOUND_RETURN_CODE, + COMMAND_TIMED_OUT_RETURN_CODE, + executeProcess, +) + + +class ProcessTests(unittest.TestCase): + def test_execute_process_returns_stdout_for_success(self): + out, err, rc = executeProcess( + [sys.executable, "-c", "print('hello from process')"] + ) + + self.assertEqual(0, rc) + self.assertEqual("", err) + self.assertEqual("hello from process\n", out) + + def test_execute_process_maps_missing_command_to_stable_error(self): + out, err, rc = executeProcess(["ffx-command-that-does-not-exist"]) + + self.assertEqual("", out) + self.assertEqual(COMMAND_NOT_FOUND_RETURN_CODE, rc) + self.assertIn("Command not found while running", err) + self.assertIn("ffx-command-that-does-not-exist", err) + + def test_execute_process_maps_timeout_to_stable_error(self): + out, err, rc = executeProcess( + [sys.executable, "-c", "import time; time.sleep(0.2)"], + timeoutSeconds=0.05, + ) + + self.assertEqual("", out) + self.assertEqual(COMMAND_TIMED_OUT_RETURN_CODE, rc) + self.assertIn("Command timed out", err) + self.assertIn(sys.executable, err) + + +if __name__ == "__main__": + unittest.main() diff --git a/tools/prepare.sh b/tools/configure_workstation.sh similarity index 91% rename from tools/prepare.sh rename to tools/configure_workstation.sh index f3c49ed..7d302bc 100755 --- a/tools/prepare.sh +++ b/tools/configure_workstation.sh @@ -2,8 +2,6 @@ set -u -SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)" - CONFIG_DIR="${FFX_CONFIG_DIR:-${HOME}/.local/etc}" CONFIG_FILE="${FFX_CONFIG_FILE:-${CONFIG_DIR}/ffx.json}" VAR_DIR="${FFX_VAR_DIR:-${HOME}/.local/var/ffx}" @@ -11,6 +9,7 @@ LOG_DIR="${FFX_LOG_DIR:-${HOME}/.local/var/log}" DATABASE_FILE="${FFX_DATABASE_FILE:-${VAR_DIR}/ffx.db}" CHECK_ONLY=0 +WITH_TESTS=0 MUTATIONS=0 INSTALL_FAILURES=0 @@ -33,12 +32,13 @@ fi usage() { cat < ${VENV_FFX} + - optionally install Python packages required for modern tests Options: - --check Report readiness only. Do not create or modify anything. - --help Show this help text. + --check Report readiness only. Do not create or modify anything. + --with-tests Also install and verify Python packages required for modern tests. + --help Show this help text. + +Notes: + - This is the first installation step. + - tools/configure_workstation.sh is the second step and configures system dependencies plus local user files. EOF } @@ -100,6 +107,10 @@ check_venv_ffx() { [ -x "${VENV_FFX}" ] } +check_venv_pytest() { + check_venv_dir && "${VENV_PYTHON}" -m pytest --version >/dev/null 2>&1 +} + check_bashrc_file() { [ -f "${BASHRC_FILE}" ] } @@ -136,6 +147,14 @@ detail_venv_ffx() { fi } +detail_venv_pytest() { + if check_venv_pytest; then + "${VENV_PYTHON}" -m pytest --version 2>/dev/null | head -n 1 + else + printf 'missing pytest in %s' "${VENV_DIR}" + fi +} + detail_bashrc_file() { if check_bashrc_file; then printf '%s' "${BASHRC_FILE}" @@ -186,6 +205,17 @@ print_status_report() { READINESS_FAILURES=$((READINESS_FAILURES + 1)) fi + if [ "${WITH_TESTS}" -eq 1 ]; then + echo + echo "Bundle test package status:" + if check_venv_pytest; then + report_component ok "bundle pytest" "$(detail_venv_pytest)" + else + report_component failed "bundle pytest" "$(detail_venv_pytest)" + READINESS_FAILURES=$((READINESS_FAILURES + 1)) + fi + fi + echo echo "Shell exposure status:" if check_bashrc_file; then @@ -220,11 +250,23 @@ ensure_bundle_venv() { return 1 fi - printf 'Installing FFX package into %s...\n' "${VENV_DIR}" - if ! "${VENV_PIP}" install --editable "${ROOT_DIR}"; then - printf 'Failed to install FFX package into %s.\n' "${VENV_DIR}" >&2 - INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) - return 1 + if [ "${WITH_TESTS}" -eq 1 ]; then + printf 'Installing FFX package and test extras into %s...\n' "${VENV_DIR}" + if ! ( + cd "${ROOT_DIR}" && + "${VENV_PIP}" install --editable '.[test]' + ); then + printf 'Failed to install FFX package and test extras into %s.\n' "${VENV_DIR}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + else + printf 'Installing FFX package into %s...\n' "${VENV_DIR}" + if ! "${VENV_PIP}" install --editable "${ROOT_DIR}"; then + printf 'Failed to install FFX package into %s.\n' "${VENV_DIR}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi fi return 0 @@ -300,6 +342,9 @@ parse_args() { --check) CHECK_ONLY=1 ;; + --with-tests) + WITH_TESTS=1 + ;; --help|-h) usage exit 0 diff --git a/tools/test.sh b/tools/test.sh new file mode 100755 index 0000000..9480290 --- /dev/null +++ b/tools/test.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)" + +PYTHON_BIN="${FFX_PYTHON:-${HOME}/.local/share/ffx.venv/bin/python}" + +if [[ ! -x "${PYTHON_BIN}" ]]; then + echo "Missing Python interpreter: ${PYTHON_BIN}" >&2 + echo "Set FFX_PYTHON to a suitable interpreter if needed." >&2 + exit 1 +fi + +cd "${REPO_ROOT}" + +exec "${PYTHON_BIN}" -m pytest \ + --ignore=tests/legacy \ + --ignore=tests/support \ + tests \ + "$@" From be0f4b4c4ed5341c7e02a0887f4641946d96d358 Mon Sep 17 00:00:00 2001 From: Javanaut Date: Thu, 9 Apr 2026 13:49:14 +0200 Subject: [PATCH 05/28] Optimize database queries --- SCRATCHPAD.md | 36 +++++++------------ src/ffx/database.py | 4 +-- src/ffx/media_controller.py | 5 ++- src/ffx/pattern_controller.py | 33 +++++++++-------- src/ffx/shifted_season_controller.py | 27 +++++++------- src/ffx/show_controller.py | 29 +++++---------- src/ffx/tag_controller.py | 54 ++++++++++------------------ src/ffx/track_controller.py | 28 ++++++--------- 8 files changed, 82 insertions(+), 134 deletions(-) diff --git a/SCRATCHPAD.md b/SCRATCHPAD.md index ce4ba22..4502742 100644 --- a/SCRATCHPAD.md +++ b/SCRATCHPAD.md @@ -11,13 +11,13 @@ - A first modern integration slice now exists under [`tests/integration/subtrack_mapping`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping). Remaining test-suite cleanup is now mostly about migrating and shrinking the legacy harness surface under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy). - FFX logger setup now reuses named handlers, and fallback logger access no longer mutates handlers in ordinary constructors and helpers. - The process wrapper now uses `subprocess.run(...)` with centralized command formatting plus stable timeout and missing-command error mapping. +- Active ORM controllers now use single-query accessors instead of paired `count()` plus `first()` lookups. ## Focused Snapshot - Highest-leverage application optimizations: - Lazy-load CLI command dependencies so lightweight commands do not import most of the app. - Collapse repeated `ffprobe` calls into a single probe result per source file. - - Replace `query.count()` plus `first()` patterns with single-query ORM accessors. - Cache or precompile filename pattern regexes instead of scanning every pattern for every file. - Highest-leverage repo and workflow optimizations: @@ -35,16 +35,7 @@ - Faster startup for scripting and tooling commands. - Less coupling between maintenance commands and the runtime stack. -2. Repeated database queries via `count()` plus `first()` -- Controllers such as [`src/ffx/show_controller.py`](/home/osgw/.local/src/codex/ffx/src/ffx/show_controller.py), [`src/ffx/pattern_controller.py`](/home/osgw/.local/src/codex/ffx/src/ffx/pattern_controller.py), and [`src/ffx/database.py`](/home/osgw/.local/src/codex/ffx/src/ffx/database.py) often do `q.count()` and then `q.first()`. -- Optimization: - - Replace with `first()`, `one_or_none()`, or existence checks that do not issue two queries. - - Standardize this across all controllers. -- Expected value: - - Lower SQLite query volume. - - Simpler controller code. - -3. Filename pattern matching scales linearly across all patterns +2. Filename pattern matching scales linearly across all patterns - [`src/ffx/pattern_controller.py`](/home/osgw/.local/src/codex/ffx/src/ffx/pattern_controller.py) loads every pattern and runs `re.search` against each filename on every lookup. - Optimization: - Cache compiled regexes in process memory. @@ -54,7 +45,7 @@ - Faster per-file setup when many patterns exist. - More predictable matching behavior. -4. Media probing does two separate `ffprobe` subprocesses per file +3. Media probing does two separate `ffprobe` subprocesses per file - [`src/ffx/file_properties.py`](/home/osgw/.local/src/codex/ffx/src/ffx/file_properties.py) calls `ffprobe` once for format data and once for stream data. - Optimization: - Use one probe call that requests both format and streams. @@ -63,7 +54,7 @@ - Less subprocess overhead. - Faster inspect and convert flows. -5. Crop detection is always a full extra ffmpeg scan +4. Crop detection is always a full extra ffmpeg scan - [`src/ffx/file_properties.py`](/home/osgw/.local/src/codex/ffx/src/ffx/file_properties.py) runs a dedicated `ffmpeg -vf cropdetect` pass for each file when crop detection is requested. - Optimization: - Cache crop results for repeated runs on the same source. @@ -71,7 +62,7 @@ - Expected value: - Lower latency on repeated experimentation. -6. Tooling overlap and naming drift +5. Tooling overlap and naming drift - There are still overlapping workstation-setup entrypoints across [`tools/configure_workstation.sh`](/home/osgw/.local/src/codex/ffx/tools/configure_workstation.sh), [`tools/setup.sh`](/home/osgw/.local/src/codex/ffx/tools/setup.sh), and newer CLI maintenance commands. - Optimization: - Decide which scripts remain canonical. @@ -81,7 +72,7 @@ - Less operator confusion. - Fewer duplicated procedures to maintain. -7. Placeholder UI surfaces should either ship or disappear +6. Placeholder UI surfaces should either ship or disappear - [`src/ffx/help_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/help_screen.py) and [`src/ffx/settings_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/settings_screen.py) are placeholders. - Optimization: - Either remove them from the active UI surface or complete them. @@ -90,7 +81,7 @@ - Leaner interface. - Lower UX ambiguity. -8. Large Textual screens repeat configuration and controller loading +7. Large Textual screens repeat configuration and controller loading - Screens such as [`src/ffx/media_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/media_details_screen.py), [`src/ffx/pattern_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/pattern_details_screen.py), and [`src/ffx/show_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/show_details_screen.py) repeat setup patterns and local metadata filtering extraction. - Optimization: - Extract a shared screen base or helper for common config/controller/bootstrap logic. @@ -99,7 +90,7 @@ - Lower maintenance overhead. - Easier UI iteration. -9. Several helper functions are unfinished or dead-weight +8. Several helper functions are unfinished or dead-weight - [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) contains `permutateList(...): pass`. - There are many combinator and conversion placeholders across tests and migrations. - Optimization: @@ -109,7 +100,7 @@ - Smaller mental model. - Less time spent re-evaluating inactive paths. -10. Test suite shape is expensive to understand and likely expensive to run +9. Test suite shape is expensive to understand and likely expensive to run - The project still carries a large legacy matrix of combinator files under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy), several placeholder `pass` implementations, and at least one suspicious filename with an embedded space: [`tests/legacy/disposition_combinator_2_3 .py`](/home/osgw/.local/src/codex/ffx/tests/legacy/disposition_combinator_2_3 .py). - A first focused replacement slice now exists in [`tests/integration/subtrack_mapping/test_cli_bundle.py`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping/test_cli_bundle.py), so the remaining work is migration and consolidation rather than creating the modern test shape from scratch. - Optimization: @@ -120,7 +111,7 @@ - Faster contributor onboarding. - Easier CI adoption later. -11. Process resource limiting semantics could be clearer +10. Process resource limiting semantics could be clearer - [`src/ffx/process.py`](/home/osgw/.local/src/codex/ffx/src/ffx/process.py) prepends `nice` and `cpulimit` directly when values are set. - Optimization: - Validate and document effective behavior for combined `nice` + `cpulimit`. @@ -129,7 +120,7 @@ - Fewer surprises in production-like runs. - Easier support for user-reported performance behavior. -12. Import-time dependency coupling makes maintenance commands brittle +11. Import-time dependency coupling makes maintenance commands brittle - Even after recent CLI maintenance additions, the top-level CLI module still imports most application modules before Click dispatch. - Optimization: - Push imports for ORM, Textual, TMDB, ffmpeg helpers, and descriptors behind the commands that actually need them. @@ -137,7 +128,7 @@ - Maintenance commands such as setup and upgrade stay usable when optional runtime dependencies are broken. - Better separation between media runtime code and maintenance tooling. -13. Regex and string utility cleanup +12. Regex and string utility cleanup - [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) still emits a `SyntaxWarning` for `RICH_COLOR_PATTERN`. - Optimization: - Convert regex literals to raw strings where appropriate. @@ -146,7 +137,7 @@ - Cleaner runtime output. - Less warning noise during dry-run maintenance commands. -14. Database startup always runs schema creation and version checks +13. Database startup always runs schema creation and version checks - [`src/ffx/database.py`](/home/osgw/.local/src/codex/ffx/src/ffx/database.py) runs `Base.metadata.create_all(...)` and version checks every time a DB-backed context is created. - Optimization: - Measure startup cost and consider separating bootstrapping from ordinary command execution. @@ -171,7 +162,6 @@ 1. Triage the list into quick wins, medium refactors, and long-horizon cleanup. 2. Tackle the cheapest high-impact items first: - regex raw-string warning cleanup, - - `count()` plus `first()` query cleanup, - single-call `ffprobe` refactor. 3. Decide whether maintenance/tooling command imports should be split from media-runtime imports before adding more CLI maintenance surface. diff --git a/src/ffx/database.py b/src/ffx/database.py index 239817d..10430e3 100644 --- a/src/ffx/database.py +++ b/src/ffx/database.py @@ -70,9 +70,9 @@ def getDatabaseVersion(databaseContext): Session = databaseContext['session'] s = Session() - q = s.query(Property).filter(Property.key == DATABASE_VERSION_KEY) + versionProperty = s.query(Property).filter(Property.key == DATABASE_VERSION_KEY).first() - return int(q.first().value) if q.count() else 0 + return int(versionProperty.value) if versionProperty is not None else 0 except Exception as ex: raise click.ClickException(f"getDatabaseVersion(): {repr(ex)}") diff --git a/src/ffx/media_controller.py b/src/ffx/media_controller.py index bb2d0de..d46285d 100644 --- a/src/ffx/media_controller.py +++ b/src/ffx/media_controller.py @@ -25,10 +25,9 @@ class MediaController(): pid = int(patternId) s = self.Session() - q = s.query(Pattern).filter(Pattern.id == pid) + pattern = s.query(Pattern).filter(Pattern.id == pid).first() - if q.count(): - pattern = q.first + if pattern is not None: for mediaTagKey, mediaTagValue in mediaDescriptor.getTags(): self.__tac.updateMediaTag(pid, mediaTagKey, mediaTagValue) diff --git a/src/ffx/pattern_controller.py b/src/ffx/pattern_controller.py index 089c30c..2e2cdd1 100644 --- a/src/ffx/pattern_controller.py +++ b/src/ffx/pattern_controller.py @@ -19,10 +19,12 @@ class PatternController(): try: s = self.Session() - q = s.query(Pattern).filter(Pattern.show_id == int(patternObj['show_id']), - Pattern.pattern == str(patternObj['pattern'])) + pattern = s.query(Pattern).filter( + Pattern.show_id == int(patternObj['show_id']), + Pattern.pattern == str(patternObj['pattern']), + ).first() - if not q.count(): + if pattern is None: pattern = Pattern(show_id = int(patternObj['show_id']), pattern = str(patternObj['pattern'])) s.add(pattern) @@ -41,11 +43,9 @@ class PatternController(): try: s = self.Session() - q = s.query(Pattern).filter(Pattern.id == int(patternId)) + pattern = s.query(Pattern).filter(Pattern.id == int(patternId)).first() - if q.count(): - - pattern: Pattern = q.first() + if pattern is not None: pattern.show_id = int(patternObj['show_id']) pattern.pattern = str(patternObj['pattern']) @@ -69,10 +69,12 @@ class PatternController(): try: s = self.Session() - q = s.query(Pattern).filter(Pattern.show_id == int(patternObj['show_id']), Pattern.pattern == str(patternObj['pattern'])) + pattern = s.query(Pattern).filter( + Pattern.show_id == int(patternObj['show_id']), + Pattern.pattern == str(patternObj['pattern']), + ).first() - if q.count(): - pattern = q.first() + if pattern is not None: return int(pattern.id) else: return None @@ -90,9 +92,7 @@ class PatternController(): try: s = self.Session() - q = s.query(Pattern).filter(Pattern.id == int(patternId)) - - return q.first() if q.count() else None + return s.query(Pattern).filter(Pattern.id == int(patternId)).first() except Exception as ex: raise click.ClickException(f"PatternController.getPattern(): {repr(ex)}") @@ -103,13 +103,12 @@ class PatternController(): def deletePattern(self, patternId): try: s = self.Session() - q = s.query(Pattern).filter(Pattern.id == int(patternId)) + pattern = s.query(Pattern).filter(Pattern.id == int(patternId)).first() - if q.count(): + if pattern is not None: #DAFUQ: https://stackoverflow.com/a/19245058 # q.delete() - pattern = q.first() s.delete(pattern) s.commit() @@ -158,4 +157,4 @@ class PatternController(): # except Exception as ex: # raise click.ClickException(f"PatternController.getMediaDescriptor(): {repr(ex)}") # finally: -# s.close() \ No newline at end of file +# s.close() diff --git a/src/ffx/shifted_season_controller.py b/src/ffx/shifted_season_controller.py index b6e01e6..6fc254d 100644 --- a/src/ffx/shifted_season_controller.py +++ b/src/ffx/shifted_season_controller.py @@ -101,11 +101,9 @@ class ShiftedSeasonController(): try: s = self.Session() - q = s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId)) + shiftedSeason = s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId)).first() - if q.count(): - - shiftedSeason = q.first() + if shiftedSeason is not None: shiftedSeason.original_season = int(shiftedSeasonObj['original_season']) shiftedSeason.first_episode = int(shiftedSeasonObj['first_episode']) @@ -141,12 +139,14 @@ class ShiftedSeasonController(): try: s = self.Session() - q = s.query(ShiftedSeason).filter(ShiftedSeason.show_id == int(showId), - ShiftedSeason.original_season == int(originalSeason), - ShiftedSeason.first_episode == int(firstEpisode), - ShiftedSeason.last_episode == int(lastEpisode)) + shiftedSeason = s.query(ShiftedSeason).filter( + ShiftedSeason.show_id == int(showId), + ShiftedSeason.original_season == int(originalSeason), + ShiftedSeason.first_episode == int(firstEpisode), + ShiftedSeason.last_episode == int(lastEpisode), + ).first() - return q.first().getId() if q.count() else None + return shiftedSeason.getId() if shiftedSeason is not None else None except Exception as ex: raise click.ClickException(f"PatternController.findShiftedSeason(): {repr(ex)}") @@ -177,9 +177,7 @@ class ShiftedSeasonController(): try: s = self.Session() - q = s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId)) - - return q.first() if q.count() else None + return s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId)).first() except Exception as ex: raise click.ClickException(f"ShiftedSeasonController.getShiftedSeason(): {repr(ex)}") @@ -194,13 +192,12 @@ class ShiftedSeasonController(): try: s = self.Session() - q = s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId)) + shiftedSeason = s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId)).first() - if q.count(): + if shiftedSeason is not None: #DAFUQ: https://stackoverflow.com/a/19245058 # q.delete() - shiftedSeason = q.first() s.delete(shiftedSeason) s.commit() diff --git a/src/ffx/show_controller.py b/src/ffx/show_controller.py index a426eaf..7407a19 100644 --- a/src/ffx/show_controller.py +++ b/src/ffx/show_controller.py @@ -16,10 +16,9 @@ class ShowController(): try: s = self.Session() - q = s.query(Show).filter(Show.id == showId) + show = s.query(Show).filter(Show.id == showId).first() - if q.count(): - show: Show = q.first() + if show is not None: return show.getDescriptor(self.context) except Exception as ex: @@ -31,9 +30,7 @@ class ShowController(): try: s = self.Session() - q = s.query(Show).filter(Show.id == showId) - - return q.first() if q.count() else None + return s.query(Show).filter(Show.id == showId).first() except Exception as ex: raise click.ClickException(f"ShowController.getShow(): {repr(ex)}") @@ -44,12 +41,7 @@ class ShowController(): try: s = self.Session() - q = s.query(Show) - - if q.count(): - return q.all() - else: - return [] + return s.query(Show).all() except Exception as ex: raise click.ClickException(f"ShowController.getAllShows(): {repr(ex)}") @@ -61,9 +53,9 @@ class ShowController(): try: s = self.Session() - q = s.query(Show).filter(Show.id == showDescriptor.getId()) + currentShow = s.query(Show).filter(Show.id == showDescriptor.getId()).first() - if not q.count(): + if currentShow is None: show = Show(id = int(showDescriptor.getId()), name = str(showDescriptor.getName()), year = int(showDescriptor.getYear()), @@ -76,9 +68,6 @@ class ShowController(): s.commit() return True else: - - currentShow = q.first() - changed = False if currentShow.name != str(showDescriptor.getName()): currentShow.name = str(showDescriptor.getName()) @@ -113,14 +102,12 @@ class ShowController(): def deleteShow(self, show_id): try: s = self.Session() - q = s.query(Show).filter(Show.id == int(show_id)) + show = s.query(Show).filter(Show.id == int(show_id)).first() - - if q.count(): + if show is not None: #DAFUQ: https://stackoverflow.com/a/19245058 # q.delete() - show = q.first() s.delete(show) s.commit() diff --git a/src/ffx/tag_controller.py b/src/ffx/tag_controller.py index 792dad7..4e14479 100644 --- a/src/ffx/tag_controller.py +++ b/src/ffx/tag_controller.py @@ -67,10 +67,11 @@ class TagController(): try: s = self.Session() - q = s.query(MediaTag).filter(MediaTag.pattern_id == int(patternId), - MediaTag.key == str(tagKey)) - if q.count(): - tag = q.first() + tag = s.query(MediaTag).filter( + MediaTag.pattern_id == int(patternId), + MediaTag.key == str(tagKey), + ).first() + if tag is not None: s.delete(tag) s.commit() return True @@ -107,12 +108,8 @@ class TagController(): try: s = self.Session() - q = s.query(MediaTag).filter(MediaTag.pattern_id == int(patternId)) - - if q.count(): - return {t.key:t.value for t in q.all()} - else: - return {} + tags = s.query(MediaTag).filter(MediaTag.pattern_id == int(patternId)).all() + return {t.key:t.value for t in tags} except Exception as ex: raise click.ClickException(f"TagController.findAllMediaTags(): {repr(ex)}") @@ -125,12 +122,8 @@ class TagController(): try: s = self.Session() - q = s.query(TrackTag).filter(TrackTag.track_id == int(trackId)) - - if q.count(): - return {t.key:t.value for t in q.all()} - else: - return {} + tags = s.query(TrackTag).filter(TrackTag.track_id == int(trackId)).all() + return {t.key:t.value for t in tags} except Exception as ex: raise click.ClickException(f"TagController.findAllTracks(): {repr(ex)}") @@ -142,12 +135,7 @@ class TagController(): try: s = self.Session() - q = s.query(Track).filter(MediaTag.track_id == int(trackId), MediaTag.key == str(trackKey)) - - if q.count(): - return q.first() - else: - return None + return s.query(Track).filter(MediaTag.track_id == int(trackId), MediaTag.key == str(trackKey)).first() except Exception as ex: raise click.ClickException(f"TagController.findMediaTag(): {repr(ex)}") @@ -158,12 +146,10 @@ class TagController(): try: s = self.Session() - q = s.query(TrackTag).filter(TrackTag.track_id == int(trackId), TrackTag.key == str(tagKey)) - - if q.count(): - return q.first() - else: - return None + return s.query(TrackTag).filter( + TrackTag.track_id == int(trackId), + TrackTag.key == str(tagKey), + ).first() except Exception as ex: raise click.ClickException(f"TagController.findTrackTag(): {repr(ex)}") @@ -175,11 +161,9 @@ class TagController(): def deleteMediaTag(self, tagId) -> bool: try: s = self.Session() - q = s.query(MediaTag).filter(MediaTag.id == int(tagId)) + tag = s.query(MediaTag).filter(MediaTag.id == int(tagId)).first() - if q.count(): - - tag = q.first() + if tag is not None: s.delete(tag) @@ -201,11 +185,9 @@ class TagController(): try: s = self.Session() - q = s.query(TrackTag).filter(TrackTag.id == int(tagId)) + tag = s.query(TrackTag).filter(TrackTag.id == int(tagId)).first() - if q.count(): - - tag = q.first() + if tag is not None: s.delete(tag) diff --git a/src/ffx/track_controller.py b/src/ffx/track_controller.py index 5b1de31..ece0a24 100644 --- a/src/ffx/track_controller.py +++ b/src/ffx/track_controller.py @@ -75,11 +75,9 @@ class TrackController(): try: s = self.Session() - q = s.query(Track).filter(Track.id == int(trackId)) + track = s.query(Track).filter(Track.id == int(trackId)).first() - if q.count(): - - track : Track = q.first() + if track is not None: track.index = int(trackDescriptor.getIndex()) @@ -193,12 +191,10 @@ class TrackController(): try: s = self.Session() - q = s.query(Track).filter(Track.pattern_id == int(patternId), Track.index == int(index)) - - if q.count(): - return q.first() - else: - return None + return s.query(Track).filter( + Track.pattern_id == int(patternId), + Track.index == int(index), + ).first() except Exception as ex: raise click.ClickException(f"TrackController.getTrack(): {repr(ex)}") @@ -218,11 +214,9 @@ class TrackController(): try: s = self.Session() - q = s.query(Track).filter(Track.pattern_id == patternId, Track.index == index) + track = s.query(Track).filter(Track.pattern_id == patternId, Track.index == index).first() - if q.count(): - - track : Track = q.first() + if track is not None: if state: track.setDisposition(disposition) @@ -244,10 +238,10 @@ class TrackController(): try: s = self.Session() - q = s.query(Track).filter(Track.id == int(trackId)) + track = s.query(Track).filter(Track.id == int(trackId)).first() - if q.count(): - patternId = int(q.first().pattern_id) + if track is not None: + patternId = int(track.pattern_id) q_siblings = s.query(Track).filter(Track.pattern_id == patternId).order_by(Track.index) From d19e69990a8883734a39087b65f1430e6a7f160d Mon Sep 17 00:00:00 2001 From: Javanaut Date: Thu, 9 Apr 2026 16:11:51 +0200 Subject: [PATCH 06/28] Opt pattern matching --- SCRATCHPAD.md | 34 +- pyproject.toml | 1 + requirements/pattern_management.md | 68 +++ requirements/project.md | 1 + src/ffx/file_properties.py | 3 +- src/ffx/media_details_screen.py | 24 +- src/ffx/model/pattern.py | 5 +- src/ffx/pattern_controller.py | 412 +++++++++++++---- src/ffx/pattern_details_screen.py | 331 ++++++++------ src/ffx/track_controller.py | 8 +- src/ffx/track_delete_screen.py | 23 +- src/ffx/track_details_screen.py | 429 +++++++++--------- .../pattern_management/__init__.py | 1 + .../test_cli_pattern_matching.py | 138 ++++++ tests/support/ffx_bundle.py | 52 +-- tests/unit/test_pattern_management.py | 240 ++++++++++ 16 files changed, 1248 insertions(+), 522 deletions(-) create mode 100644 requirements/pattern_management.md create mode 100644 tests/integration/pattern_management/__init__.py create mode 100644 tests/integration/pattern_management/test_cli_pattern_matching.py create mode 100644 tests/unit/test_pattern_management.py diff --git a/SCRATCHPAD.md b/SCRATCHPAD.md index 4502742..c0ab74d 100644 --- a/SCRATCHPAD.md +++ b/SCRATCHPAD.md @@ -12,13 +12,13 @@ - FFX logger setup now reuses named handlers, and fallback logger access no longer mutates handlers in ordinary constructors and helpers. - The process wrapper now uses `subprocess.run(...)` with centralized command formatting plus stable timeout and missing-command error mapping. - Active ORM controllers now use single-query accessors instead of paired `count()` plus `first()` lookups. +- Pattern matching now uses cached compiled regexes plus explicit duplicate-match errors, and pattern creation flows no longer persist zero-track patterns. ## Focused Snapshot - Highest-leverage application optimizations: - Lazy-load CLI command dependencies so lightweight commands do not import most of the app. - Collapse repeated `ffprobe` calls into a single probe result per source file. - - Cache or precompile filename pattern regexes instead of scanning every pattern for every file. - Highest-leverage repo and workflow optimizations: - Consolidate setup and upgrade tooling to reduce overlapping shell-script responsibilities. @@ -35,17 +35,7 @@ - Faster startup for scripting and tooling commands. - Less coupling between maintenance commands and the runtime stack. -2. Filename pattern matching scales linearly across all patterns -- [`src/ffx/pattern_controller.py`](/home/osgw/.local/src/codex/ffx/src/ffx/pattern_controller.py) loads every pattern and runs `re.search` against each filename on every lookup. -- Optimization: - - Cache compiled regexes in process memory. - - Stop after the first intentional match instead of silently returning the last match. - - Consider explicit pattern priority if overlapping rules are valid. -- Expected value: - - Faster per-file setup when many patterns exist. - - More predictable matching behavior. - -3. Media probing does two separate `ffprobe` subprocesses per file +2. Media probing does two separate `ffprobe` subprocesses per file - [`src/ffx/file_properties.py`](/home/osgw/.local/src/codex/ffx/src/ffx/file_properties.py) calls `ffprobe` once for format data and once for stream data. - Optimization: - Use one probe call that requests both format and streams. @@ -54,7 +44,7 @@ - Less subprocess overhead. - Faster inspect and convert flows. -4. Crop detection is always a full extra ffmpeg scan +3. Crop detection is always a full extra ffmpeg scan - [`src/ffx/file_properties.py`](/home/osgw/.local/src/codex/ffx/src/ffx/file_properties.py) runs a dedicated `ffmpeg -vf cropdetect` pass for each file when crop detection is requested. - Optimization: - Cache crop results for repeated runs on the same source. @@ -62,7 +52,7 @@ - Expected value: - Lower latency on repeated experimentation. -5. Tooling overlap and naming drift +4. Tooling overlap and naming drift - There are still overlapping workstation-setup entrypoints across [`tools/configure_workstation.sh`](/home/osgw/.local/src/codex/ffx/tools/configure_workstation.sh), [`tools/setup.sh`](/home/osgw/.local/src/codex/ffx/tools/setup.sh), and newer CLI maintenance commands. - Optimization: - Decide which scripts remain canonical. @@ -72,7 +62,7 @@ - Less operator confusion. - Fewer duplicated procedures to maintain. -6. Placeholder UI surfaces should either ship or disappear +5. Placeholder UI surfaces should either ship or disappear - [`src/ffx/help_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/help_screen.py) and [`src/ffx/settings_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/settings_screen.py) are placeholders. - Optimization: - Either remove them from the active UI surface or complete them. @@ -81,7 +71,7 @@ - Leaner interface. - Lower UX ambiguity. -7. Large Textual screens repeat configuration and controller loading +6. Large Textual screens repeat configuration and controller loading - Screens such as [`src/ffx/media_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/media_details_screen.py), [`src/ffx/pattern_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/pattern_details_screen.py), and [`src/ffx/show_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/show_details_screen.py) repeat setup patterns and local metadata filtering extraction. - Optimization: - Extract a shared screen base or helper for common config/controller/bootstrap logic. @@ -90,7 +80,7 @@ - Lower maintenance overhead. - Easier UI iteration. -8. Several helper functions are unfinished or dead-weight +7. Several helper functions are unfinished or dead-weight - [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) contains `permutateList(...): pass`. - There are many combinator and conversion placeholders across tests and migrations. - Optimization: @@ -100,7 +90,7 @@ - Smaller mental model. - Less time spent re-evaluating inactive paths. -9. Test suite shape is expensive to understand and likely expensive to run +8. Test suite shape is expensive to understand and likely expensive to run - The project still carries a large legacy matrix of combinator files under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy), several placeholder `pass` implementations, and at least one suspicious filename with an embedded space: [`tests/legacy/disposition_combinator_2_3 .py`](/home/osgw/.local/src/codex/ffx/tests/legacy/disposition_combinator_2_3 .py). - A first focused replacement slice now exists in [`tests/integration/subtrack_mapping/test_cli_bundle.py`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping/test_cli_bundle.py), so the remaining work is migration and consolidation rather than creating the modern test shape from scratch. - Optimization: @@ -111,7 +101,7 @@ - Faster contributor onboarding. - Easier CI adoption later. -10. Process resource limiting semantics could be clearer +9. Process resource limiting semantics could be clearer - [`src/ffx/process.py`](/home/osgw/.local/src/codex/ffx/src/ffx/process.py) prepends `nice` and `cpulimit` directly when values are set. - Optimization: - Validate and document effective behavior for combined `nice` + `cpulimit`. @@ -120,7 +110,7 @@ - Fewer surprises in production-like runs. - Easier support for user-reported performance behavior. -11. Import-time dependency coupling makes maintenance commands brittle +10. Import-time dependency coupling makes maintenance commands brittle - Even after recent CLI maintenance additions, the top-level CLI module still imports most application modules before Click dispatch. - Optimization: - Push imports for ORM, Textual, TMDB, ffmpeg helpers, and descriptors behind the commands that actually need them. @@ -128,7 +118,7 @@ - Maintenance commands such as setup and upgrade stay usable when optional runtime dependencies are broken. - Better separation between media runtime code and maintenance tooling. -12. Regex and string utility cleanup +11. Regex and string utility cleanup - [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) still emits a `SyntaxWarning` for `RICH_COLOR_PATTERN`. - Optimization: - Convert regex literals to raw strings where appropriate. @@ -137,7 +127,7 @@ - Cleaner runtime output. - Less warning noise during dry-run maintenance commands. -13. Database startup always runs schema creation and version checks +12. Database startup always runs schema creation and version checks - [`src/ffx/database.py`](/home/osgw/.local/src/codex/ffx/src/ffx/database.py) runs `Base.metadata.create_all(...)` and version checks every time a DB-backed context is created. - Optimization: - Measure startup cost and consider separating bootstrapping from ordinary command execution. diff --git a/pyproject.toml b/pyproject.toml index da2dc02..7c00f6d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,5 +49,6 @@ norecursedirs = ["tests/legacy", "tests/support"] addopts = "-ra" markers = [ "integration: exercises the FFX bundle with real ffmpeg/ffprobe processes", + "pattern_management: covers requirements/pattern_management.md", "subtrack_mapping: covers requirements/subtrack_mapping.md", ] diff --git a/requirements/pattern_management.md b/requirements/pattern_management.md new file mode 100644 index 0000000..51f6674 --- /dev/null +++ b/requirements/pattern_management.md @@ -0,0 +1,68 @@ +# Pattern Management + +This file defines the behavioral contract for managing shows, patterns, and +pattern-backed filename matching. + +Primary source: actual tool code in `src/ffx/`. +Secondary source: operator intent captured in task discussion. + +## Scope + +- The show, pattern, and track hierarchy stored in SQLite. +- The role of a pattern as a reusable normalization definition for related media files. +- Filename-driven assignment of a scanned media file to one show through one matching pattern. +- Duplicate-match handling when more than one pattern matches the same filename. + +## Terms + +- `show`: logical series identity such as one TV show entry in the database. +- `pattern`: regex-backed normalization definition attached to one show. +- `track`: one persisted target-track definition attached to one pattern. +- `scanned media file`: one source file currently being inspected or converted. +- `duplicate pattern match`: a filename state where more than one stored pattern matches the same scanned media file. +- `pattern-backed target schema`: the combination of one pattern's stored media tags and stored track definitions. + +## Rules + +- `PATTERN_MANAGEMENT-0001`: The domain model shall treat a show as the parent entity for patterns that describe distinct release families or normalization schemas for that show. A show may temporarily exist without patterns during editing or initial TUI creation. +- `PATTERN_MANAGEMENT-0002`: Each persisted pattern shall belong to exactly one show. +- `PATTERN_MANAGEMENT-0003`: The domain model shall treat a pattern as the reusable normalization definition for a series of media files expected to share the same internal track layout and materially similar stream and container metadata. +- `PATTERN_MANAGEMENT-0004`: Each persisted track definition shall belong to exactly one pattern. +- `PATTERN_MANAGEMENT-0005`: A pattern may also carry pattern-level media tags. The pattern's media tags plus its track definitions together form the pattern-backed target schema. +- `PATTERN_MANAGEMENT-0006`: A scanned media file shall resolve to at most one pattern and therefore at most one show. +- `PATTERN_MANAGEMENT-0007`: If no pattern matches a filename, the file shall remain unmatched rather than being assigned implicitly. +- `PATTERN_MANAGEMENT-0008`: If more than one pattern matches the same filename, the system shall raise a duplicate pattern match error instead of silently selecting one. +- `PATTERN_MANAGEMENT-0009`: Duplicate-match detection shall apply regardless of whether the competing patterns belong to the same show or to different shows. +- `PATTERN_MANAGEMENT-0010`: Exact duplicate pattern definitions for the same show should not create multiple persisted pattern rows. +- `PATTERN_MANAGEMENT-0011`: A persisted pattern shall define one or more tracks. Creating or retaining a zero-track pattern in the database is invalid managed state and shall be prohibited. +- `PATTERN_MANAGEMENT-0012`: A show may exist without patterns as an intermediate editing state, for example when a user creates the show first in the TUI and adds patterns later. +- `PATTERN_MANAGEMENT-0013`: Operator-facing pattern management should expose the owning show, regex pattern, stored track set, and stored media-tag set so a user can reason about matching and normalization behavior. +- `PATTERN_MANAGEMENT-0014`: Matching semantics shall be deterministic and documented. Implicit "last matching pattern wins" behavior is not acceptable released behavior. + +## Acceptance + +- A filename that matches exactly one pattern yields one matched pattern and one show identity. +- A filename that matches no pattern yields no matched pattern and an unmatched state. +- A filename that matches more than one pattern yields an explicit duplicate-match error. +- A pattern-backed target schema can be reconstructed from one pattern's stored media tags and stored track definitions. +- A show may be stored before any patterns are attached to it. +- A pattern cannot be stored or retained as a valid managed pattern unless at least one track is defined for it. +- Pattern-backed conversion never proceeds with two competing matching patterns for the same input filename. + +## Current Code Fit + +- `src/ffx/model/show.py` implements a one-to-many `Show -> Pattern` relationship. +- `src/ffx/model/pattern.py` implements `Pattern.show_id`, a one-to-many `Pattern -> Track` relationship, a one-to-many `Pattern -> MediaTag` relationship, and a unique `(show_id, pattern)` constraint for freshly created databases. +- `src/ffx/model/track.py` implements `Track.pattern_id`, so each persisted track belongs to one pattern. +- `src/ffx/model/pattern.py` reconstructs a pattern-backed target schema through `Pattern.getMediaDescriptor(...)`, combining stored media tags and stored tracks. +- `src/ffx/file_properties.py` assumes a scanned file resolves to at most one pattern, because it stores only one `self.__pattern` and derives one `show_id` from it. +- `src/ffx/pattern_controller.py` prevents exact duplicate `(show_id, pattern)` definitions during create and update flows, and it refreshes cached compiled regexes when stored pattern expressions change. +- `src/ffx/pattern_controller.py` now complies with duplicate-match safety. `matchFilename(...)` scans deterministically, returns exactly one match, returns `{}` for no match, and raises an explicit duplicate-pattern-match error when more than one pattern matches the same filename. +- The current persistence layer already aligns with the intended empty-show workflow because a show can exist without patterns. +- New pattern creation and schema replacement flows now require at least one track, and `TrackController.deleteTrack(...)` prevents deleting the last persisted track from a pattern. +- Trackless legacy rows can still exist in preexisting databases, but matching now rejects them explicitly instead of letting them participate silently. + +## Risks + +- The intended "release family" meaning of a pattern is a domain assumption, not something the code verifies automatically across all files matching that pattern. +- Preexisting databases created before the newer validation rules may still contain invalid rows, so upgrade and cleanup paths should continue to treat explicit validation failures as recoverable operator signals. diff --git a/requirements/project.md b/requirements/project.md index d47b826..2e8130c 100644 --- a/requirements/project.md +++ b/requirements/project.md @@ -47,6 +47,7 @@ - per-pattern stream definitions, - shifted-season mappings, - internal database version properties. +- Detailed show, pattern, and duplicate-match management rules live in `requirements/pattern_management.md`. - The system shall inspect source media using `ffprobe` and derive a structured description of container metadata and streams. - The system shall optionally open a Textual UI to browse shows, inspect files, and create, edit, or delete shows, patterns, stream definitions, tags, and shifted-season rules. - The system shall match filenames against stored regex patterns to decide whether an input file should inherit a target stream and metadata schema. diff --git a/src/ffx/file_properties.py b/src/ffx/file_properties.py index e8134db..09f676e 100644 --- a/src/ffx/file_properties.py +++ b/src/ffx/file_properties.py @@ -44,9 +44,10 @@ class FileProperties(): self.__sourceFilenameExtension = '' self.__pc = PatternController(context) + self.__usePattern = bool(self.context.get('use_pattern', True)) # Checking if database contains matching pattern - matchResult = self.__pc.matchFilename(self.__sourceFilename) + matchResult = self.__pc.matchFilename(self.__sourceFilename) if self.__usePattern else {} self.__logger.debug(f"FileProperties.__init__(): Match result: {matchResult}") diff --git a/src/ffx/media_details_screen.py b/src/ffx/media_details_screen.py index 9afbd71..dfb837f 100644 --- a/src/ffx/media_details_screen.py +++ b/src/ffx/media_details_screen.py @@ -602,20 +602,21 @@ class MediaDetailsScreen(Screen): patternObj = self.getPatternObjFromInput() if patternObj: - patternId = self.__pc.addPattern(patternObj) + mediaTags = {} + for tagKey, tagValue in self.__sourceMediaDescriptor.getTags().items(): + + # Filter tags that make no sense to preserve + if tagKey not in self.__ignoreGlobalKeys and not tagKey in self.__removeGlobalKeys: + mediaTags[tagKey] = tagValue + + patternId = self.__pc.savePatternSchema( + patternObj, + trackDescriptors=self.__sourceMediaDescriptor.getTrackDescriptors(), + mediaTags=mediaTags, + ) if patternId: self.highlightPattern(False) - for tagKey, tagValue in self.__sourceMediaDescriptor.getTags().items(): - - # Filter tags that make no sense to preserve - if tagKey not in self.__ignoreGlobalKeys and not tagKey in self.__removeGlobalKeys: - self.__tac.updateMediaTag(patternId, tagKey, tagValue) - - # for trackDescriptor in self.__sourceMediaDescriptor.getAllTrackDescriptors(): - for trackDescriptor in self.__sourceMediaDescriptor.getTrackDescriptors(): - self.__tc.addTrack(trackDescriptor, patternId = patternId) - def action_new_pattern(self): """Adding new patterns @@ -754,4 +755,3 @@ class MediaDetailsScreen(Screen): def handle_edit_pattern(self, screenResult): self.query_one("#pattern_input", Input).value = screenResult['pattern'] self.updateDifferences() - diff --git a/src/ffx/model/pattern.py b/src/ffx/model/pattern.py index 9fc8595..8d810d3 100644 --- a/src/ffx/model/pattern.py +++ b/src/ffx/model/pattern.py @@ -1,6 +1,6 @@ import click -from sqlalchemy import Column, Integer, String, Text, ForeignKey +from sqlalchemy import Column, Integer, String, Text, ForeignKey, UniqueConstraint from sqlalchemy.orm import relationship from .show import Base, Show @@ -12,6 +12,9 @@ from ffx.show_descriptor import ShowDescriptor class Pattern(Base): __tablename__ = 'patterns' + __table_args__ = ( + UniqueConstraint('show_id', 'pattern', name='uq_patterns_show_id_pattern'), + ) # v1.x id = Column(Integer, primary_key=True) diff --git a/src/ffx/pattern_controller.py b/src/ffx/pattern_controller.py index 2e2cdd1..e10d5a7 100644 --- a/src/ffx/pattern_controller.py +++ b/src/ffx/pattern_controller.py @@ -1,160 +1,388 @@ -import click, re +import re +import click + +from ffx.model.media_tag import MediaTag from ffx.model.pattern import Pattern +from ffx.model.track import Track +from ffx.model.track_tag import TrackTag +from ffx.track_descriptor import TrackDescriptor +from ffx.track_disposition import TrackDisposition -class PatternController(): - +class DuplicatePatternMatchError(click.ClickException): + pass + + +class InvalidPatternSchemaError(click.ClickException): + pass + + +class PatternController: + _compiled_regex_cache: dict[str, re.Pattern] = {} + def __init__(self, context): - + self.context = context - self.Session = self.context['database']['session'] # convenience + self.Session = self.context["database"]["session"] + self.__configurationData = self.context["config"].getData() - def addPattern(self, patternObj): - """Adds pattern to database from obj - - Returns database id or 0 if pattern already exists""" + metadataConfiguration = ( + self.__configurationData["metadata"] + if "metadata" in self.__configurationData.keys() + else {} + ) + + self.__removeTrackKeys = ( + metadataConfiguration["streams"]["remove"] + if "streams" in metadataConfiguration.keys() + and "remove" in metadataConfiguration["streams"].keys() + else [] + ) + self.__ignoreTrackKeys = ( + metadataConfiguration["streams"]["ignore"] + if "streams" in metadataConfiguration.keys() + and "ignore" in metadataConfiguration["streams"].keys() + else [] + ) + + @classmethod + def _clear_regex_cache(cls): + cls._compiled_regex_cache.clear() + + @classmethod + def _compile_pattern_expression(cls, pattern_id: int, expression: str) -> re.Pattern: + expression_text = str(expression) + compiled = cls._compiled_regex_cache.get(expression_text) + if compiled is None: + try: + compiled = re.compile(expression_text) + except re.error as ex: + raise click.ClickException( + f"Pattern #{pattern_id} contains an invalid regex {expression_text!r}: {ex}" + ) + cls._compiled_regex_cache[expression_text] = compiled + return compiled + + def _coerce_pattern_fields(self, patternObj): + return { + "show_id": int(patternObj["show_id"]), + "pattern": str(patternObj["pattern"]), + "quality": int(patternObj.get("quality", 0) or 0), + "notes": str(patternObj.get("notes", "")), + } + + def _coerce_media_tags(self, mediaTags): + return { + str(tagKey): str(tagValue) + for tagKey, tagValue in (mediaTags or {}).items() + } + + def _normalize_track_descriptors(self, trackDescriptors): + if trackDescriptors is None: + raise InvalidPatternSchemaError( + "Patterns must define at least one track before they can be stored." + ) + + normalized_descriptors = [] + for trackDescriptor in trackDescriptors: + if type(trackDescriptor) is not TrackDescriptor: + raise TypeError( + "PatternController: All track descriptors are required to be of type TrackDescriptor" + ) + normalized_descriptors.append(trackDescriptor) + + if not normalized_descriptors: + raise InvalidPatternSchemaError( + "Patterns must define at least one track before they can be stored." + ) + + normalized_descriptors = sorted( + normalized_descriptors, key=lambda descriptor: descriptor.getIndex() + ) + + index_set = {descriptor.getIndex() for descriptor in normalized_descriptors} + expected_indexes = set(range(len(normalized_descriptors))) + if index_set != expected_indexes: + raise click.ClickException( + "Pattern tracks must use a contiguous zero-based index order." + ) + + return normalized_descriptors + + def _ensure_unique_pattern_definition( + self, + session, + show_id: int, + pattern_expression: str, + exclude_pattern_id: int | None = None, + ): + query = session.query(Pattern).filter( + Pattern.show_id == show_id, + Pattern.pattern == pattern_expression, + ) + if exclude_pattern_id is not None: + query = query.filter(Pattern.id != int(exclude_pattern_id)) + + existing_pattern = query.first() + if existing_pattern is not None: + raise click.ClickException( + f"Pattern {pattern_expression!r} already exists for show #{show_id}." + ) + + def _build_track_row(self, trackDescriptor: TrackDescriptor) -> Track: + track = Track( + track_type=int(trackDescriptor.getType().index()), + codec_name=str(trackDescriptor.getCodec().identifier()), + index=int(trackDescriptor.getIndex()), + source_index=int(trackDescriptor.getSourceIndex()), + disposition_flags=int( + TrackDisposition.toFlags(trackDescriptor.getDispositionSet()) + ), + audio_layout=trackDescriptor.getAudioLayout().index(), + ) + + for tagKey, tagValue in trackDescriptor.getTags().items(): + if tagKey in self.__ignoreTrackKeys or tagKey in self.__removeTrackKeys: + continue + track.track_tags.append(TrackTag(key=str(tagKey), value=str(tagValue))) + + return track + + def _replace_pattern_schema( + self, + session, + pattern: Pattern, + mediaTags: dict[str, str], + trackDescriptors: list[TrackDescriptor], + ): + for mediaTag in list(pattern.media_tags): + session.delete(mediaTag) + for track in list(pattern.tracks): + session.delete(track) + session.flush() + + for tagKey, tagValue in mediaTags.items(): + pattern.media_tags.append(MediaTag(key=str(tagKey), value=str(tagValue))) + + for trackDescriptor in trackDescriptors: + pattern.tracks.append(self._build_track_row(trackDescriptor)) + + def _validate_persisted_pattern(self, pattern: Pattern): + if not pattern.tracks: + raise InvalidPatternSchemaError( + f"Pattern #{pattern.getId()} ({pattern.getPattern()!r}) is invalid because it has no tracks." + ) + + def savePatternSchema( + self, + patternObj, + trackDescriptors, + mediaTags=None, + patternId: int | None = None, + ) -> int: + fields = self._coerce_pattern_fields(patternObj) + normalized_tracks = self._normalize_track_descriptors(trackDescriptors) + normalized_tags = self._coerce_media_tags(mediaTags) + session = None try: + session = self.Session() + self._ensure_unique_pattern_definition( + session, + fields["show_id"], + fields["pattern"], + exclude_pattern_id=patternId, + ) - s = self.Session() - pattern = s.query(Pattern).filter( - Pattern.show_id == int(patternObj['show_id']), - Pattern.pattern == str(patternObj['pattern']), - ).first() - - if pattern is None: - pattern = Pattern(show_id = int(patternObj['show_id']), - pattern = str(patternObj['pattern'])) - s.add(pattern) - s.commit() - return pattern.getId() + if patternId is None: + pattern = Pattern( + show_id=fields["show_id"], + pattern=fields["pattern"], + quality=fields["quality"], + notes=fields["notes"], + ) + session.add(pattern) + session.flush() else: - return 0 + pattern = session.query(Pattern).filter(Pattern.id == int(patternId)).first() + if pattern is None: + raise click.ClickException( + f"PatternController.savePatternSchema(): Pattern #{patternId} not found" + ) + pattern.show_id = fields["show_id"] + pattern.pattern = fields["pattern"] + pattern.quality = fields["quality"] + pattern.notes = fields["notes"] + self._replace_pattern_schema( + session, + pattern, + normalized_tags, + normalized_tracks, + ) + + session.commit() + self._clear_regex_cache() + return pattern.getId() + + except click.ClickException: + raise except Exception as ex: - raise click.ClickException(f"PatternController.addPattern(): {repr(ex)}") + raise click.ClickException( + f"PatternController.savePatternSchema(): {repr(ex)}" + ) finally: - s.close() + if session is not None: + session.close() + def addPattern(self, patternObj, trackDescriptors=None, mediaTags=None): + return self.savePatternSchema( + patternObj, + trackDescriptors=trackDescriptors, + mediaTags=mediaTags, + ) def updatePattern(self, patternId, patternObj): + fields = self._coerce_pattern_fields(patternObj) + session = None + try: - s = self.Session() - pattern = s.query(Pattern).filter(Pattern.id == int(patternId)).first() + session = self.Session() + pattern = session.query(Pattern).filter(Pattern.id == int(patternId)).first() if pattern is not None: + self._ensure_unique_pattern_definition( + session, + fields["show_id"], + fields["pattern"], + exclude_pattern_id=patternId, + ) + self._validate_persisted_pattern(pattern) - pattern.show_id = int(patternObj['show_id']) - pattern.pattern = str(patternObj['pattern']) - pattern.quality = str(patternObj['quality']) - pattern.notes = str(patternObj['notes']) + pattern.show_id = fields["show_id"] + pattern.pattern = fields["pattern"] + pattern.quality = fields["quality"] + pattern.notes = fields["notes"] - s.commit() + session.commit() + self._clear_regex_cache() return True - else: - return False + return False + except click.ClickException: + raise except Exception as ex: raise click.ClickException(f"PatternController.updatePattern(): {repr(ex)}") finally: - s.close() - - + if session is not None: + session.close() def findPattern(self, patternObj): - + session = None + try: - s = self.Session() - pattern = s.query(Pattern).filter( - Pattern.show_id == int(patternObj['show_id']), - Pattern.pattern == str(patternObj['pattern']), - ).first() + session = self.Session() + pattern = ( + session.query(Pattern) + .filter( + Pattern.show_id == int(patternObj["show_id"]), + Pattern.pattern == str(patternObj["pattern"]), + ) + .first() + ) if pattern is not None: return int(pattern.id) - else: - return None + return None except Exception as ex: raise click.ClickException(f"PatternController.findPattern(): {repr(ex)}") finally: - s.close() + if session is not None: + session.close() - - def getPattern(self, patternId : int): + def getPattern(self, patternId: int): if type(patternId) is not int: - raise ValueError(f"PatternController.getPattern(): Argument patternId is required to be of type int") + raise ValueError( + "PatternController.getPattern(): Argument patternId is required to be of type int" + ) + session = None try: - s = self.Session() - return s.query(Pattern).filter(Pattern.id == int(patternId)).first() + session = self.Session() + return session.query(Pattern).filter(Pattern.id == int(patternId)).first() except Exception as ex: raise click.ClickException(f"PatternController.getPattern(): {repr(ex)}") finally: - s.close() - + if session is not None: + session.close() def deletePattern(self, patternId): + session = None try: - s = self.Session() - pattern = s.query(Pattern).filter(Pattern.id == int(patternId)).first() + session = self.Session() + pattern = session.query(Pattern).filter(Pattern.id == int(patternId)).first() if pattern is not None: - - #DAFUQ: https://stackoverflow.com/a/19245058 - # q.delete() - s.delete(pattern) - - s.commit() + session.delete(pattern) + session.commit() + self._clear_regex_cache() return True return False except Exception as ex: raise click.ClickException(f"PatternController.deletePattern(): {repr(ex)}") finally: - s.close() + if session is not None: + session.close() - - def matchFilename(self, filename : str) -> dict: - """Returns dict {'match': , 'pattern': } or empty dict of no pattern was found""" + def matchFilename(self, filename: str) -> dict: + """Return {'match': regex match, 'pattern': Pattern} or {} when unmatched.""" + session = None try: - s = self.Session() - q = s.query(Pattern) + session = self.Session() + matches = [] + query = session.query(Pattern).order_by(Pattern.show_id, Pattern.id) - matchResult = {} - - for pattern in q.all(): - patternMatch = re.search(str(pattern.pattern), str(filename)) - if patternMatch is not None: - matchResult['match'] = patternMatch - matchResult['pattern'] = pattern + for pattern in query.all(): + compiled = self._compile_pattern_expression( + pattern.getId(), + pattern.getPattern(), + ) + patternMatch = compiled.search(str(filename)) + if patternMatch is None: + continue - return matchResult - + self._validate_persisted_pattern(pattern) + matches.append({"match": patternMatch, "pattern": pattern}) + + if not matches: + return {} + + if len(matches) > 1: + duplicateDescriptions = ", ".join( + [ + f"show #{match['pattern'].getShowId()} pattern #{match['pattern'].getId()} {match['pattern'].getPattern()!r}" + for match in matches + ] + ) + raise DuplicatePatternMatchError( + f"Filename {filename!r} matched more than one pattern: {duplicateDescriptions}" + ) + + return matches[0] + + except click.ClickException: + raise except Exception as ex: raise click.ClickException(f"PatternController.matchFilename(): {repr(ex)}") finally: - s.close() - -# def getMediaDescriptor(self, context, patternId): -# -# try: -# s = self.Session() -# q = s.query(Pattern).filter(Pattern.id == int(patternId)) -# -# if q.count(): -# return q.first().getMediaDescriptor(context) -# else: -# return None -# -# except Exception as ex: -# raise click.ClickException(f"PatternController.getMediaDescriptor(): {repr(ex)}") -# finally: -# s.close() + if session is not None: + session.close() diff --git a/src/ffx/pattern_details_screen.py b/src/ffx/pattern_details_screen.py index da64b7b..bdb1257 100644 --- a/src/ffx/pattern_details_screen.py +++ b/src/ffx/pattern_details_screen.py @@ -6,7 +6,6 @@ from textual.widgets import Header, Footer, Static, Button, Input, DataTable, Te from textual.containers import Grid from ffx.model.pattern import Pattern -from ffx.model.track import Track from .pattern_controller import PatternController from .show_controller import ShowController @@ -132,6 +131,8 @@ class PatternDetailsScreen(Screen): self.__pattern : Pattern = self.__pc.getPattern(patternId) if patternId is not None else None self.__showDescriptor = self.__sc.getShowDescriptor(showId) if showId is not None else None + self.__draftTracks : List[TrackDescriptor] = [] + self.__draftTags : dict[str, str] = {} #TODO: per controller @@ -158,42 +159,60 @@ class PatternDetailsScreen(Screen): self.tracksTable.clear() + tracks = self.getCurrentTrackDescriptors() + + typeCounter = {} + + td: TrackDescriptor + for td in tracks: + + if (trackType := td.getType()) != TrackType.ATTACHMENT: + + if not trackType in typeCounter.keys(): + typeCounter[trackType] = 0 + + dispoSet = td.getDispositionSet() + + trackLanguage = td.getLanguage() + audioLayout = td.getAudioLayout() + + row = (td.getIndex(), + trackType.label(), + typeCounter[trackType], + td.getCodec().label(), + audioLayout.label() if trackType == TrackType.AUDIO + and audioLayout != AudioLayout.LAYOUT_UNDEFINED else ' ', + trackLanguage.label() if trackLanguage != IsoLanguage.UNDEFINED else ' ', + td.getTitle(), + 'Yes' if TrackDisposition.DEFAULT in dispoSet else 'No', + 'Yes' if TrackDisposition.FORCED in dispoSet else 'No', + td.getSourceIndex()) + + self.tracksTable.add_row(*map(str, row)) + + typeCounter[trackType] += 1 + + + def getCurrentTrackDescriptors(self) -> List[TrackDescriptor]: if self.__pattern is not None: + return self.__tc.findSiblingDescriptors(self.__pattern.getId()) + return list(self.__draftTracks) - tracks = self.__tc.findTracks(self.__pattern.getId()) - typeCounter = {} + def normalizeDraftTracks(self): - tr: Track - for tr in tracks: + typeCounter = {} - td : TrackDescriptor = tr.getDescriptor(self.context) + for index, trackDescriptor in enumerate(self.__draftTracks): + trackDescriptor.setIndex(index) - if (trackType := td.getType()) != TrackType.ATTACHMENT: + trackType = trackDescriptor.getType() + subIndex = typeCounter.get(trackType, 0) + trackDescriptor.setSubIndex(subIndex) + typeCounter[trackType] = subIndex + 1 - if not trackType in typeCounter.keys(): - typeCounter[trackType] = 0 - - dispoSet = td.getDispositionSet() - - trackLanguage = td.getLanguage() - audioLayout = td.getAudioLayout() - - row = (td.getIndex(), - trackType.label(), - typeCounter[trackType], - td.getCodec().label(), - audioLayout.label() if trackType == TrackType.AUDIO - and audioLayout != AudioLayout.LAYOUT_UNDEFINED else ' ', - trackLanguage.label() if trackLanguage != IsoLanguage.UNDEFINED else ' ', - td.getTitle(), - 'Yes' if TrackDisposition.DEFAULT in dispoSet else 'No', - 'Yes' if TrackDisposition.FORCED in dispoSet else 'No', - td.getSourceIndex()) - - self.tracksTable.add_row(*map(str, row)) - - typeCounter[trackType] += 1 + if trackDescriptor.getSourceIndex() < 0: + trackDescriptor.setSourceIndex(index) def swapTracks(self, trackIndex1: int, trackIndex2: int): @@ -201,6 +220,20 @@ class PatternDetailsScreen(Screen): ti1 = int(trackIndex1) ti2 = int(trackIndex2) + if self.__pattern is None: + numSiblings = len(self.__draftTracks) + + if ti1 < 0 or ti1 >= numSiblings: + raise ValueError(f"PatternDetailsScreen.swapTracks(): trackIndex1 ({ti1}) is out of range ({numSiblings})") + + if ti2 < 0 or ti2 >= numSiblings: + raise ValueError(f"PatternDetailsScreen.swapTracks(): trackIndex2 ({ti2}) is out of range ({numSiblings})") + + self.__draftTracks[ti1], self.__draftTracks[ti2] = self.__draftTracks[ti2], self.__draftTracks[ti1] + self.normalizeDraftTracks() + self.updateTracks() + return + siblingDescriptors: List[TrackDescriptor] = self.__tc.findSiblingDescriptors(self.__pattern.getId()) numSiblings = len(siblingDescriptors) @@ -236,21 +269,22 @@ class PatternDetailsScreen(Screen): self.tagsTable.clear() - if self.__pattern is not None: + tags = ( + self.__tac.findAllMediaTags(self.__pattern.getId()) + if self.__pattern is not None + else self.__draftTags + ) - tags = self.__tac.findAllMediaTags(self.__pattern.getId()) + for tagKey, tagValue in tags.items(): - for tagKey, tagValue in tags.items(): + textColor = None + if tagKey in self.__ignoreGlobalKeys: + textColor = 'blue' + if tagKey in self.__removeGlobalKeys: + textColor = 'red' - textColor = None - if tagKey in self.__ignoreGlobalKeys: - textColor = 'blue' - if tagKey in self.__removeGlobalKeys: - textColor = 'red' - - # if tagKey not in self.__ignoreTrackKeys: - row = (formatRichColor(tagKey, textColor), formatRichColor(tagValue, textColor)) - self.tagsTable.add_row(*map(str, row)) + row = (formatRichColor(tagKey, textColor), formatRichColor(tagValue, textColor)) + self.tagsTable.add_row(*map(str, row)) def on_mount(self): @@ -340,16 +374,9 @@ class PatternDetailsScreen(Screen): # 9 yield Static("Media Tags") - - - if self.__pattern is not None: - yield Button("Add", id="button_add_tag") - yield Button("Edit", id="button_edit_tag") - yield Button("Delete", id="button_delete_tag") - else: - yield Static(" ") - yield Static(" ") - yield Static(" ") + yield Button("Add", id="button_add_tag") + yield Button("Edit", id="button_edit_tag") + yield Button("Delete", id="button_delete_tag") yield Static(" ") yield Static(" ") @@ -363,16 +390,9 @@ class PatternDetailsScreen(Screen): # 12 yield Static("Streams") - - - if self.__pattern is not None: - yield Button("Add", id="button_add_track") - yield Button("Edit", id="button_edit_track") - yield Button("Delete", id="button_delete_track") - else: - yield Static(" ") - yield Static(" ") - yield Static(" ") + yield Button("Add", id="button_add_track") + yield Button("Edit", id="button_edit_track") + yield Button("Delete", id="button_delete_track") yield Static(" ") yield Button("Up", id="button_track_up") @@ -413,13 +433,8 @@ class PatternDetailsScreen(Screen): def getSelectedTrackDescriptor(self): - if not self.__pattern: - return None - try: - # Fetch the currently selected row when 'Enter' is pressed - #selected_row_index = self.table.cursor_row row_key, col_key = self.tracksTable.coordinate_to_cell_key(self.tracksTable.cursor_coordinate) if row_key is not None: @@ -428,10 +443,12 @@ class PatternDetailsScreen(Screen): trackIndex = int(selected_track_data[0]) trackSubIndex = int(selected_track_data[2]) - return self.__tc.getTrack(self.__pattern.getId(), trackIndex).getDescriptor(self.context, subIndex=trackSubIndex) + for trackDescriptor in self.getCurrentTrackDescriptors(): + if (trackDescriptor.getIndex() == trackIndex + and trackDescriptor.getSubIndex() == trackSubIndex): + return trackDescriptor - else: - return None + return None except CellDoesNotExist: return None @@ -482,7 +499,11 @@ class PatternDetailsScreen(Screen): self.app.pop_screen() else: - patternId = self.__pc.addPattern(patternDescriptor) + patternId = self.__pc.savePatternSchema( + patternDescriptor, + trackDescriptors=self.__draftTracks, + mediaTags=self.__draftTags, + ) if patternId: self.dismiss(patternDescriptor) else: @@ -494,33 +515,52 @@ class PatternDetailsScreen(Screen): self.app.pop_screen() - # Save pattern when just created before adding streams - if self.__pattern is not None: + numTracks = len(self.getCurrentTrackDescriptors()) - numTracks = len(self.tracksTable.rows) + if event.button.id == "button_add_track": + self.app.push_screen( + TrackDetailsScreen( + patternId=self.__pattern.getId() if self.__pattern is not None else None, + patternLabel=self.getPatternFromInput(), + siblingTrackDescriptors=self.getCurrentTrackDescriptors(), + index=numTracks, + ), + self.handle_add_track, + ) - if event.button.id == "button_add_track": - self.app.push_screen(TrackDetailsScreen(patternId = self.__pattern.getId(), index = numTracks), self.handle_add_track) - - selectedTrack = self.getSelectedTrackDescriptor() - if selectedTrack is not None: - if event.button.id == "button_edit_track": - self.app.push_screen(TrackDetailsScreen(trackDescriptor = selectedTrack), self.handle_edit_track) - if event.button.id == "button_delete_track": - self.app.push_screen(TrackDeleteScreen(trackDescriptor = selectedTrack), self.handle_delete_track) + selectedTrack = self.getSelectedTrackDescriptor() + if selectedTrack is not None: + if event.button.id == "button_edit_track": + self.app.push_screen( + TrackDetailsScreen( + trackDescriptor=selectedTrack, + patternId=self.__pattern.getId() if self.__pattern is not None else None, + patternLabel=self.getPatternFromInput(), + siblingTrackDescriptors=self.getCurrentTrackDescriptors(), + ), + self.handle_edit_track, + ) + if event.button.id == "button_delete_track": + self.app.push_screen( + TrackDeleteScreen(trackDescriptor = selectedTrack), + self.handle_delete_track, + ) if event.button.id == "button_add_tag": - if self.__pattern is not None: - self.app.push_screen(TagDetailsScreen(), self.handle_update_tag) + self.app.push_screen(TagDetailsScreen(), self.handle_update_tag) if event.button.id == "button_edit_tag": - tagKey, tagValue = self.getSelectedTag() - self.app.push_screen(TagDetailsScreen(key=tagKey, value=tagValue), self.handle_update_tag) + selectedTag = self.getSelectedTag() + if selectedTag is not None: + tagKey, tagValue = selectedTag + self.app.push_screen(TagDetailsScreen(key=tagKey, value=tagValue), self.handle_update_tag) if event.button.id == "button_delete_tag": - tagKey, tagValue = self.getSelectedTag() - self.app.push_screen(TagDeleteScreen(key=tagKey, value=tagValue), self.handle_delete_tag) + selectedTag = self.getSelectedTag() + if selectedTag is not None: + tagKey, tagValue = selectedTag + self.app.push_screen(TagDeleteScreen(key=tagKey, value=tagValue), self.handle_delete_tag) if event.button.id == "pattern_button": @@ -537,83 +577,106 @@ class PatternDetailsScreen(Screen): if event.button.id == "button_track_up": selectedTrackDescriptor = self.getSelectedTrackDescriptor() - selectedTrackIndex = selectedTrackDescriptor.getIndex() + if selectedTrackDescriptor is not None: + selectedTrackIndex = selectedTrackDescriptor.getIndex() - if selectedTrackIndex > 0 and selectedTrackIndex < self.tracksTable.row_count: - correspondingTrackIndex = selectedTrackIndex - 1 - self.swapTracks(selectedTrackIndex, correspondingTrackIndex) + if selectedTrackIndex > 0 and selectedTrackIndex < self.tracksTable.row_count: + correspondingTrackIndex = selectedTrackIndex - 1 + self.swapTracks(selectedTrackIndex, correspondingTrackIndex) if event.button.id == "button_track_down": selectedTrackDescriptor = self.getSelectedTrackDescriptor() - selectedTrackIndex = selectedTrackDescriptor.getIndex() + if selectedTrackDescriptor is not None: + selectedTrackIndex = selectedTrackDescriptor.getIndex() - if selectedTrackIndex >= 0 and selectedTrackIndex < (self.tracksTable.row_count - 1): - correspondingTrackIndex = selectedTrackIndex + 1 - self.swapTracks(selectedTrackIndex, correspondingTrackIndex) + if selectedTrackIndex >= 0 and selectedTrackIndex < (self.tracksTable.row_count - 1): + correspondingTrackIndex = selectedTrackIndex + 1 + self.swapTracks(selectedTrackIndex, correspondingTrackIndex) def handle_add_track(self, trackDescriptor : TrackDescriptor): + if trackDescriptor is None: + return - dispoSet = trackDescriptor.getDispositionSet() - trackType = trackDescriptor.getType() - index = trackDescriptor.getIndex() - subIndex = trackDescriptor.getSubIndex() - codec = trackDescriptor.getCodec() - language = trackDescriptor.getLanguage() - title = trackDescriptor.getTitle() + if self.__pattern is not None: + self.__tc.addTrack(trackDescriptor, patternId=self.__pattern.getId()) + else: + self.__draftTracks.append(trackDescriptor) + self.normalizeDraftTracks() - row = (index, - trackType.label(), - subIndex, - codec.label(), - language.label(), - title, - 'Yes' if TrackDisposition.DEFAULT in dispoSet else 'No', - 'Yes' if TrackDisposition.FORCED in dispoSet else 'No') - - self.tracksTable.add_row(*map(str, row)) + self.updateTracks() def handle_edit_track(self, trackDescriptor : TrackDescriptor): + if trackDescriptor is None: + return - try: + if self.__pattern is not None: + if not self.__tc.updateTrack(trackDescriptor.getId(), trackDescriptor): + raise click.ClickException("PatternDetailsScreen.handle_edit_track(): track update failed") + else: + selectedTrack = self.getSelectedTrackDescriptor() + for index, currentTrack in enumerate(self.__draftTracks): + if (selectedTrack is not None + and currentTrack.getIndex() == selectedTrack.getIndex() + and currentTrack.getSubIndex() == selectedTrack.getSubIndex()): + self.__draftTracks[index] = trackDescriptor + break + self.normalizeDraftTracks() - row_key, col_key = self.tracksTable.coordinate_to_cell_key(self.tracksTable.cursor_coordinate) - - self.tracksTable.update_cell(row_key, self.column_key_track_audio_layout, - trackDescriptor.getAudioLayout().label() - if trackDescriptor.getType() == TrackType.AUDIO else ' ') - - self.tracksTable.update_cell(row_key, self.column_key_track_language, trackDescriptor.getLanguage().label()) - self.tracksTable.update_cell(row_key, self.column_key_track_title, trackDescriptor.getTitle()) - self.tracksTable.update_cell(row_key, self.column_key_track_default, - 'Yes' if TrackDisposition.DEFAULT in trackDescriptor.getDispositionSet() else 'No') - self.tracksTable.update_cell(row_key, self.column_key_track_forced, - 'Yes' if TrackDisposition.FORCED in trackDescriptor.getDispositionSet() else 'No') - - except CellDoesNotExist: - pass + self.updateTracks() def handle_delete_track(self, trackDescriptor : TrackDescriptor): + if trackDescriptor is None: + return + + if self.__pattern is not None: + track = self.__tc.getTrack(trackDescriptor.getPatternId(), trackDescriptor.getIndex()) + + if track is None: + raise click.ClickException( + f"Track is none: patternId={trackDescriptor.getPatternId()} type={trackDescriptor.getType()} subIndex={trackDescriptor.getSubIndex()}" + ) + + self.__tc.deleteTrack(track.getId()) + else: + self.__draftTracks = [ + currentTrack + for currentTrack in self.__draftTracks + if not ( + currentTrack.getIndex() == trackDescriptor.getIndex() + and currentTrack.getSubIndex() == trackDescriptor.getSubIndex() + ) + ] + self.normalizeDraftTracks() + self.updateTracks() def handle_update_tag(self, tag): + if tag is None: + return if self.__pattern is None: - raise click.ClickException(f"PatternDetailsScreen.handle_update_tag: pattern not set") + self.__draftTags[str(tag[0])] = str(tag[1]) + else: + if self.__tac.updateMediaTag(self.__pattern.getId(), tag[0], tag[1]) is None: + raise click.ClickException("PatternDetailsScreen.handle_update_tag(): tag update failed") - if self.__tac.updateMediaTag(self.__pattern.getId(), tag[0], tag[1]) is not None: - self.updateTags() + self.updateTags() def handle_delete_tag(self, tag): + if tag is None: + return if self.__pattern is None: - raise click.ClickException(f"PatternDetailsScreen.handle_delete_tag: pattern not set") + self.__draftTags.pop(str(tag[0]), None) + self.updateTags() + return if self.__tac.deleteMediaTagByKey(self.__pattern.getId(), tag[0]): self.updateTags() diff --git a/src/ffx/track_controller.py b/src/ffx/track_controller.py index ece0a24..3288dd8 100644 --- a/src/ffx/track_controller.py +++ b/src/ffx/track_controller.py @@ -244,9 +244,15 @@ class TrackController(): patternId = int(track.pattern_id) q_siblings = s.query(Track).filter(Track.pattern_id == patternId).order_by(Track.index) + siblingTracks = q_siblings.all() + + if len(siblingTracks) <= 1: + raise click.ClickException( + f"Cannot delete the last track from pattern #{patternId}. Patterns must define at least one track." + ) index = 0 - for track in q_siblings.all(): + for track in siblingTracks: if track.id == int(trackId): s.delete(track) diff --git a/src/ffx/track_delete_screen.py b/src/ffx/track_delete_screen.py index c944525..4743538 100644 --- a/src/ffx/track_delete_screen.py +++ b/src/ffx/track_delete_screen.py @@ -6,8 +6,6 @@ from textual.containers import Grid from ffx.track_descriptor import TrackDescriptor -from .track_controller import TrackController - # Screen[dict[int, str, int]] class TrackDeleteScreen(Screen): @@ -52,14 +50,9 @@ class TrackDeleteScreen(Screen): def __init__(self, trackDescriptor : TrackDescriptor): super().__init__() - self.context = self.app.getContext() - self.Session = self.context['database']['session'] # convenience - if type(trackDescriptor) is not TrackDescriptor: raise click.ClickException('TrackDeleteScreen.init(): trackDescriptor is required to be of type TrackDescriptor') - self.__tc = TrackController(context = self.context) - self.__trackDescriptor = trackDescriptor @@ -116,21 +109,7 @@ class TrackDeleteScreen(Screen): def on_button_pressed(self, event: Button.Pressed) -> None: if event.button.id == "delete_button": - - track = self.__tc.getTrack(self.__trackDescriptor.getPatternId(), self.__trackDescriptor.getIndex()) - - if track is None: - raise click.ClickException(f"Track is none: patternId={self.__trackDescriptor.getPatternId()} type={self.__trackDescriptor.getType()} subIndex={self.__trackDescriptor.getSubIndex()}") - - if track is not None: - - if self.__tc.deleteTrack(track.getId()): - self.dismiss(self.__trackDescriptor) - - else: - #TODO: Meldung - self.app.pop_screen() + self.dismiss(self.__trackDescriptor) if event.button.id == "cancel_button": self.app.pop_screen() - diff --git a/src/ffx/track_details_screen.py b/src/ffx/track_details_screen.py index dfa226c..f0d1c15 100644 --- a/src/ffx/track_details_screen.py +++ b/src/ffx/track_details_screen.py @@ -3,31 +3,20 @@ import click from textual.screen import Screen from textual.widgets import Header, Footer, Static, Button, SelectionList, Select, DataTable, Input from textual.containers import Grid - -from ffx.model.pattern import Pattern - -from .track_controller import TrackController -from .pattern_controller import PatternController -from .tag_controller import TagController - -from .track_type import TrackType -from .track_codec import TrackCodec - -from .iso_language import IsoLanguage -from .track_disposition import TrackDisposition -from .audio_layout import AudioLayout - -from .track_descriptor import TrackDescriptor - -from .tag_details_screen import TagDetailsScreen -from .tag_delete_screen import TagDeleteScreen - from textual.widgets._data_table import CellDoesNotExist +from .audio_layout import AudioLayout +from .iso_language import IsoLanguage +from .tag_delete_screen import TagDeleteScreen +from .tag_details_screen import TagDetailsScreen +from .track_codec import TrackCodec +from .track_descriptor import TrackDescriptor +from .track_disposition import TrackDisposition +from .track_type import TrackType + from ffx.helper import formatRichColor, removeRichColor -# Screen[dict[int, str, int]] class TrackDetailsScreen(Screen): CSS = """ @@ -79,7 +68,7 @@ class TrackDetailsScreen(Screen): .three { column-span: 3; } - + .four { column-span: 4; } @@ -97,257 +86,288 @@ class TrackDetailsScreen(Screen): } """ - def __init__(self, trackDescriptor : TrackDescriptor = None, patternId = None, trackType : TrackType = None, index = None, subIndex = None): + def __init__( + self, + trackDescriptor: TrackDescriptor = None, + patternId=None, + patternLabel: str = "", + siblingTrackDescriptors=None, + trackType: TrackType = None, + index=None, + subIndex=None, + ): super().__init__() self.context = self.app.getContext() - self.Session = self.context['database']['session'] # convenience - self.__configurationData = self.context['config'].getData() + self.__configurationData = self.context["config"].getData() - metadataConfiguration = self.__configurationData['metadata'] if 'metadata' in self.__configurationData.keys() else {} + metadataConfiguration = ( + self.__configurationData["metadata"] + if "metadata" in self.__configurationData.keys() + else {} + ) - self.__signatureTags = metadataConfiguration['signature'] if 'signature' in metadataConfiguration.keys() else {} - self.__removeGlobalKeys = metadataConfiguration['remove'] if 'remove' in metadataConfiguration.keys() else [] - self.__ignoreGlobalKeys = metadataConfiguration['ignore'] if 'ignore' in metadataConfiguration.keys() else [] - self.__removeTrackKeys = (metadataConfiguration['streams']['remove'] - if 'streams' in metadataConfiguration.keys() - and 'remove' in metadataConfiguration['streams'].keys() else []) - self.__ignoreTrackKeys = (metadataConfiguration['streams']['ignore'] - if 'streams' in metadataConfiguration.keys() - and 'ignore' in metadataConfiguration['streams'].keys() else []) - - - self.__tc = TrackController(context = self.context) - self.__pc = PatternController(context = self.context) - self.__tac = TagController(context = self.context) + self.__removeTrackKeys = ( + metadataConfiguration["streams"]["remove"] + if "streams" in metadataConfiguration.keys() + and "remove" in metadataConfiguration["streams"].keys() + else [] + ) + self.__ignoreTrackKeys = ( + metadataConfiguration["streams"]["ignore"] + if "streams" in metadataConfiguration.keys() + and "ignore" in metadataConfiguration["streams"].keys() + else [] + ) self.__isNew = trackDescriptor is None + self.__trackDescriptor = trackDescriptor + self.__patternId = ( + int(patternId) + if patternId is not None + else ( + int(trackDescriptor.getPatternId()) + if trackDescriptor is not None and trackDescriptor.getPatternId() != -1 + else -1 + ) + ) + self.__patternLabel = str(patternLabel) + self.__siblingTrackDescriptors = list(siblingTrackDescriptors or []) + if self.__isNew: self.__trackType = trackType self.__trackCodec = TrackCodec.UNKNOWN self.__audioLayout = AudioLayout.LAYOUT_UNDEFINED self.__index = index self.__subIndex = subIndex - self.__trackDescriptor : TrackDescriptor = None - self.__pattern : Pattern = self.__pc.getPattern(patternId) if patternId is not None else {} + self.__draftTrackTags = {} else: self.__trackType = trackDescriptor.getType() self.__trackCodec = trackDescriptor.getCodec() self.__audioLayout = trackDescriptor.getAudioLayout() self.__index = trackDescriptor.getIndex() self.__subIndex = trackDescriptor.getSubIndex() - self.__trackDescriptor : TrackDescriptor = trackDescriptor - self.__pattern : Pattern = self.__pc.getPattern(self.__trackDescriptor.getPatternId()) - + self.__draftTrackTags = { + key: value + for key, value in trackDescriptor.getTags().items() + if key not in ("language", "title") + } + def _descriptor_refs_same_track(self, descriptor: TrackDescriptor) -> bool: + if self.__trackDescriptor is None: + return False + if descriptor.getId() != -1 and self.__trackDescriptor.getId() != -1: + return descriptor.getId() == self.__trackDescriptor.getId() + return ( + descriptor.getPatternId() == self.__trackDescriptor.getPatternId() + and descriptor.getIndex() == self.__trackDescriptor.getIndex() + and descriptor.getSubIndex() == self.__trackDescriptor.getSubIndex() + ) def updateTags(self): self.trackTagsTable.clear() - trackId = self.__trackDescriptor.getId() - - if trackId != -1: - - trackTags = self.__tac.findAllTrackTags(trackId) - - for k,v in trackTags.items(): - - if k != 'language' and k != 'title': - - textColor = None - if k in self.__ignoreTrackKeys: - textColor = 'blue' - if k in self.__removeTrackKeys: - textColor = 'red' - - row = (formatRichColor(k, textColor), formatRichColor(v, textColor)) - self.trackTagsTable.add_row(*map(str, row)) + for key, value in self.__draftTrackTags.items(): + textColor = None + if key in self.__ignoreTrackKeys: + textColor = "blue" + if key in self.__removeTrackKeys: + textColor = "red" + row = (formatRichColor(key, textColor), formatRichColor(value, textColor)) + self.trackTagsTable.add_row(*map(str, row)) def on_mount(self): - self.query_one("#index_label", Static).update(str(self.__index) if self.__index is not None else '-') - self.query_one("#subindex_label", Static).update(str(self.__subIndex)if self.__subIndex is not None else '-') - - if self.__pattern is not None: - self.query_one("#pattern_label", Static).update(self.__pattern.getPattern()) + self.query_one("#index_label", Static).update( + str(self.__index) if self.__index is not None else "-" + ) + self.query_one("#subindex_label", Static).update( + str(self.__subIndex) if self.__subIndex is not None else "-" + ) + self.query_one("#pattern_label", Static).update(self.__patternLabel) if self.__trackType is not None: self.query_one("#type_select", Select).value = self.__trackType.label() - if self.__trackType == TrackType.AUDIO: - self.query_one("#audio_layout_select", Select).value = self.__audioLayout.label() - for d in TrackDisposition: + self.query_one("#audio_layout_select", Select).value = self.__audioLayout.label() - dispositionIsSet = (self.__trackDescriptor is not None - and d in self.__trackDescriptor.getDispositionSet()) + for disposition in TrackDisposition: - dispositionOption = (d.label(), d.index(), dispositionIsSet) - self.query_one("#dispositions_selection_list", SelectionList).add_option(dispositionOption) + dispositionIsSet = ( + self.__trackDescriptor is not None + and disposition in self.__trackDescriptor.getDispositionSet() + ) + + dispositionOption = ( + disposition.label(), + disposition.index(), + dispositionIsSet, + ) + self.query_one("#dispositions_selection_list", SelectionList).add_option( + dispositionOption + ) if self.__trackDescriptor is not None: - - self.query_one("#language_select", Select).value = self.__trackDescriptor.getLanguage().label() + self.query_one("#language_select", Select).value = ( + self.__trackDescriptor.getLanguage().label() + ) self.query_one("#title_input", Input).value = self.__trackDescriptor.getTitle() self.updateTags() - def compose(self): self.trackTagsTable = DataTable(classes="five") - # Define the columns with headers self.column_key_track_tag_key = self.trackTagsTable.add_column("Key", width=50) self.column_key_track_tag_value = self.trackTagsTable.add_column("Value", width=100) - self.trackTagsTable.cursor_type = 'row' + self.trackTagsTable.cursor_type = "row" - - languages = [l.label() for l in IsoLanguage] + languages = [language.label() for language in IsoLanguage] yield Header() with Grid(): - # 1 - yield Static(f"New stream" if self.__isNew else f"Edit stream", id="toplabel", classes="five") + yield Static( + "New stream" if self.__isNew else "Edit stream", + id="toplabel", + classes="five", + ) - # 2 yield Static("for pattern") yield Static("", id="pattern_label", classes="four", markup=False) - # 3 yield Static(" ", classes="five") - # 4 yield Static("Index / Subindex") yield Static("", id="index_label", classes="two") yield Static("", id="subindex_label", classes="two") - # 5 yield Static(" ", classes="five") - # 6 yield Static("Type") - yield Select.from_values([t.label() for t in TrackType], classes="four", id="type_select") + yield Select.from_values( + [trackType.label() for trackType in TrackType], + classes="four", + id="type_select", + ) - # 7 - if self.__trackType == TrackType.AUDIO: - yield Static("Audio Layout") - yield Select.from_values([t.label() for t in AudioLayout], classes="four", id="audio_layout_select") - else: - yield Static(" ", classes="five") + yield Static("Audio Layout") + yield Select.from_values( + [layout.label() for layout in AudioLayout], + classes="four", + id="audio_layout_select", + ) - # 8 yield Static(" ", classes="five") - # 9 yield Static(" ", classes="five") - # 10 yield Static("Language") yield Select.from_values(languages, classes="four", id="language_select") - # 11 + yield Static(" ", classes="five") - # 12 yield Static("Title") yield Input(id="title_input", classes="four") - # 13 yield Static(" ", classes="five") - # 14 yield Static(" ", classes="five") - # 15 yield Static("Stream tags") yield Static(" ") yield Button("Add", id="button_add_stream_tag") yield Button("Edit", id="button_edit_stream_tag") yield Button("Delete", id="button_delete_stream_tag") - # 16 + yield self.trackTagsTable - # 17 yield Static(" ", classes="five") - # 18 yield Static("Stream dispositions", classes="five") - # 19 yield SelectionList[int]( classes="five", - id = "dispositions_selection_list" + id="dispositions_selection_list", ) - # 20 yield Static(" ", classes="five") - # 21 yield Static(" ", classes="five") - # 22 yield Button("Save", id="save_button") yield Button("Cancel", id="cancel_button") - # 23 yield Static(" ", classes="five") - # 24 yield Static(" ", classes="five", id="messagestatic") - yield Footer(id="footer") - def getTrackDescriptorFromInput(self): kwargs = {} - kwargs[TrackDescriptor.CONTEXT_KEY] = self.context - kwargs[TrackDescriptor.PATTERN_ID_KEY] = int(self.__pattern.getId()) + if self.__trackDescriptor is not None and self.__trackDescriptor.getId() != -1: + kwargs[TrackDescriptor.ID_KEY] = self.__trackDescriptor.getId() - kwargs[TrackDescriptor.INDEX_KEY] = self.__index - kwargs[TrackDescriptor.SUB_INDEX_KEY] = self.__subIndex #! + if self.__patternId != -1: + kwargs[TrackDescriptor.PATTERN_ID_KEY] = int(self.__patternId) - kwargs[TrackDescriptor.TRACK_TYPE_KEY] = TrackType.fromLabel(self.query_one("#type_select", Select).value) + kwargs[TrackDescriptor.INDEX_KEY] = int(self.__index) + kwargs[TrackDescriptor.SOURCE_INDEX_KEY] = ( + int(self.__trackDescriptor.getSourceIndex()) + if self.__trackDescriptor is not None + else int(self.__index) + ) + if self.__subIndex is not None and int(self.__subIndex) >= 0: + kwargs[TrackDescriptor.SUB_INDEX_KEY] = int(self.__subIndex) + selectedTrackType = TrackType.fromLabel( + self.query_one("#type_select", Select).value + ) + kwargs[TrackDescriptor.TRACK_TYPE_KEY] = selectedTrackType kwargs[TrackDescriptor.CODEC_KEY] = self.__trackCodec - - if self.__trackType == TrackType.AUDIO: - kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = AudioLayout.fromLabel(self.query_one("#audio_layout_select", Select).value) + + if selectedTrackType == TrackType.AUDIO: + kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = AudioLayout.fromLabel( + self.query_one("#audio_layout_select", Select).value + ) else: kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = AudioLayout.LAYOUT_UNDEFINED - trackTags = {} + trackTags = dict(self.__draftTrackTags) + language = self.query_one("#language_select", Select).value if language: - trackTags['language'] = IsoLanguage.find(language).threeLetter() + trackTags["language"] = IsoLanguage.find(language).threeLetter() + title = self.query_one("#title_input", Input).value if title: - trackTags['title'] = title + trackTags["title"] = title - tableTags = {row[0]:row[1] for r in self.trackTagsTable.rows if (row := self.trackTagsTable.get_row(r)) and row[0] != 'language' and row[0] != 'title'} + kwargs[TrackDescriptor.TAGS_KEY] = trackTags - kwargs[TrackDescriptor.TAGS_KEY] = trackTags | tableTags - - dispositionFlags = sum([2**f for f in self.query_one("#dispositions_selection_list", SelectionList).selected]) - kwargs[TrackDescriptor.DISPOSITION_SET_KEY] = TrackDisposition.toSet(dispositionFlags) + dispositionFlags = sum( + [2 ** flag for flag in self.query_one("#dispositions_selection_list", SelectionList).selected] + ) + kwargs[TrackDescriptor.DISPOSITION_SET_KEY] = TrackDisposition.toSet( + dispositionFlags + ) return TrackDescriptor(**kwargs) - - def getSelectedTag(self): try: - - # Fetch the currently selected row when 'Enter' is pressed - #selected_row_index = self.table.cursor_row - row_key, col_key = self.trackTagsTable.coordinate_to_cell_key(self.trackTagsTable.cursor_coordinate) + row_key, _ = self.trackTagsTable.coordinate_to_cell_key( + self.trackTagsTable.cursor_coordinate + ) if row_key is not None: selected_tag_data = self.trackTagsTable.get_row(row_key) @@ -357,101 +377,92 @@ class TrackDetailsScreen(Screen): return tagKey, tagValue - else: - return None + return None except CellDoesNotExist: return None - - - # Event handler for button press def on_button_pressed(self, event: Button.Pressed) -> None: - # Check if the button pressed is the one we are interested in if event.button.id == "save_button": - - # Check for multiple default/forced disposition flags - - if self.__trackType == TrackType.VIDEO: - trackList = self.__tc.findVideoTracks(self.__pattern.getId()) - if self.__trackType == TrackType.AUDIO: - trackList = self.__tc.findAudioTracks(self.__pattern.getId()) - elif self.__trackType == TrackType.SUBTITLE: - trackList = self.__tc.findSubtitleTracks(self.__pattern.getId()) - else: - trackList = [] - - siblingTrackList = [t for t in trackList if t.getType() == self.__trackType and t.getIndex() != self.__index] - - numDefaultTracks = len([t for t in siblingTrackList if TrackDisposition.DEFAULT in t.getDispositionSet()]) - numForcedTracks = len([t for t in siblingTrackList if TrackDisposition.FORCED in t.getDispositionSet()]) - - self.__subIndex = len(trackList) trackDescriptor = self.getTrackDescriptorFromInput() - if ((TrackDisposition.DEFAULT in trackDescriptor.getDispositionSet() and numDefaultTracks) - or (TrackDisposition.FORCED in trackDescriptor.getDispositionSet() and numForcedTracks)): + siblingTrackList = [ + descriptor + for descriptor in self.__siblingTrackDescriptors + if not self._descriptor_refs_same_track(descriptor) + ] + siblingTrackList = [ + descriptor + for descriptor in siblingTrackList + if descriptor.getType() == trackDescriptor.getType() + ] - self.query_one("#messagestatic", Static).update("Cannot add another stream with disposition flag 'debug' or 'forced' set") + numDefaultTracks = len( + [ + descriptor + for descriptor in siblingTrackList + if TrackDisposition.DEFAULT in descriptor.getDispositionSet() + ] + ) + numForcedTracks = len( + [ + descriptor + for descriptor in siblingTrackList + if TrackDisposition.FORCED in descriptor.getDispositionSet() + ] + ) + if self.__isNew: + trackDescriptor.setSubIndex(len(siblingTrackList)) + elif self.__subIndex is not None and int(self.__subIndex) >= 0: + trackDescriptor.setSubIndex(int(self.__subIndex)) + + if ( + TrackDisposition.DEFAULT in trackDescriptor.getDispositionSet() + and numDefaultTracks + ) or ( + TrackDisposition.FORCED in trackDescriptor.getDispositionSet() + and numForcedTracks + ): + + self.query_one("#messagestatic", Static).update( + "Cannot add another stream with disposition flag 'default' or 'forced' set" + ) else: - self.query_one("#messagestatic", Static).update(" ") - - if self.__isNew: - - # Track per Screen hinzufügen - self.__tc.addTrack(trackDescriptor) - self.dismiss(trackDescriptor) - - else: - - track = self.__tc.getTrack(self.__pattern.getId(), self.__index) - - # Track per details screen updaten - if self.__tc.updateTrack(track.getId(), trackDescriptor): - self.dismiss(trackDescriptor) - - else: - self.app.pop_screen() + self.dismiss(trackDescriptor) if event.button.id == "cancel_button": self.app.pop_screen() - if event.button.id == "button_add_stream_tag": - if not self.__isNew: - self.app.push_screen(TagDetailsScreen(), self.handle_update_tag) + self.app.push_screen(TagDetailsScreen(), self.handle_update_tag) if event.button.id == "button_edit_stream_tag": - tagKey, tagValue = self.getSelectedTag() - self.app.push_screen(TagDetailsScreen(key=tagKey, value=tagValue), self.handle_update_tag) + selectedTag = self.getSelectedTag() + if selectedTag is not None: + self.app.push_screen( + TagDetailsScreen(key=selectedTag[0], value=selectedTag[1]), + self.handle_update_tag, + ) if event.button.id == "button_delete_stream_tag": - tagKey, tagValue = self.getSelectedTag() - self.app.push_screen(TagDeleteScreen(key=tagKey, value=tagValue), self.handle_delete_tag) - + selectedTag = self.getSelectedTag() + if selectedTag is not None: + self.app.push_screen( + TagDeleteScreen(key=selectedTag[0], value=selectedTag[1]), + self.handle_delete_tag, + ) def handle_update_tag(self, tag): - - trackId = self.__trackDescriptor.getId() - - if trackId == -1: - raise click.ClickException(f"TrackDetailsScreen.handle_update_tag: trackId not set (-1) trackDescriptor={self.__trackDescriptor}") - - if self.__tac.updateTrackTag(trackId, tag[0], tag[1]) is not None: - self.updateTags() + if tag is None: + return + self.__draftTrackTags[str(tag[0])] = str(tag[1]) + self.updateTags() def handle_delete_tag(self, trackTag): - - trackId = self.__trackDescriptor.getId() - - if trackId == -1: - raise click.ClickException(f"TrackDetailsScreen.handle_delete_tag: trackId not set (-1) trackDescriptor={self.__trackDescriptor}") - - tag = self.__tac.findTrackTag(trackId, trackTag[0]) - - if tag is not None: - if self.__tac.deleteTrackTag(tag.id): - self.updateTags() + if trackTag is None: + return + self.__draftTrackTags.pop(str(trackTag[0]), None) + self.updateTags() diff --git a/tests/integration/pattern_management/__init__.py b/tests/integration/pattern_management/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/integration/pattern_management/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/integration/pattern_management/test_cli_pattern_matching.py b/tests/integration/pattern_management/test_cli_pattern_matching.py new file mode 100644 index 0000000..4e3bf97 --- /dev/null +++ b/tests/integration/pattern_management/test_cli_pattern_matching.py @@ -0,0 +1,138 @@ +from __future__ import annotations + +from pathlib import Path +import tempfile +import unittest + +from tests.support.ffx_bundle import ( + PatternTrackSpec, + SourceTrackSpec, + add_show, + build_controller_context, + create_source_fixture, + dispose_controller_context, + expected_output_path, + run_ffx_convert, +) + +from ffx.pattern_controller import PatternController +from ffx.track_type import TrackType + +try: + import pytest +except ImportError: # pragma: no cover - unittest-only environments + pytest = None + +if pytest is not None: + pytestmark = [pytest.mark.integration, pytest.mark.pattern_management] + + +class PatternManagementCliTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.workdir = Path(self.tempdir.name) + self.home_dir = self.workdir / "home" + self.home_dir.mkdir() + self.database_path = self.workdir / "test.db" + + def tearDown(self): + self.tempdir.cleanup() + + def prepare_duplicate_matching_patterns(self): + context = build_controller_context(self.database_path) + try: + add_show(context, show_id=1) + add_show(context, show_id=2) + + controller = PatternController(context) + track_descriptors = [ + PatternTrackSpec(index=0, source_index=0, track_type=TrackType.VIDEO) + ] + + def to_track_descriptor(spec: PatternTrackSpec): + from ffx.track_descriptor import TrackDescriptor + + kwargs = { + TrackDescriptor.INDEX_KEY: spec.index, + TrackDescriptor.SOURCE_INDEX_KEY: spec.source_index, + TrackDescriptor.TRACK_TYPE_KEY: spec.track_type, + TrackDescriptor.TAGS_KEY: dict(spec.tags), + TrackDescriptor.DISPOSITION_SET_KEY: set(spec.dispositions), + } + return TrackDescriptor(**kwargs) + + controller.savePatternSchema( + {"show_id": 1, "pattern": r"^dup_(s[0-9]+e[0-9]+)\.mkv$"}, + [to_track_descriptor(track_descriptors[0])], + ) + controller.savePatternSchema( + {"show_id": 2, "pattern": r"^dup_.*$"}, + [to_track_descriptor(track_descriptors[0])], + ) + finally: + dispose_controller_context(context) + + def test_convert_fails_when_filename_matches_more_than_one_pattern(self): + self.prepare_duplicate_matching_patterns() + source_filename = "dup_s01e01.mkv" + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-1", language="eng"), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-tmdb", + "--no-prompt", + "--no-signature", + str(source_path), + ) + + self.assertNotEqual(completed.returncode, 0) + error_output = f"{completed.stdout}\n{completed.stderr}" + self.assertIn("matched more than one pattern", error_output) + self.assertFalse(expected_output_path(self.workdir, source_filename).exists()) + + def test_convert_can_ignore_duplicate_matches_when_no_pattern_is_requested(self): + self.prepare_duplicate_matching_patterns() + source_filename = "dup_s01e01.mkv" + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-1", language="eng"), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-pattern", + "--no-tmdb", + "--no-prompt", + "--no-signature", + str(source_path), + ) + + self.assertEqual( + 0, + completed.returncode, + f"STDOUT:\n{completed.stdout}\nSTDERR:\n{completed.stderr}", + ) + self.assertTrue(expected_output_path(self.workdir, source_filename).exists()) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/support/ffx_bundle.py b/tests/support/ffx_bundle.py index 943d33b..1fa5942 100644 --- a/tests/support/ffx_bundle.py +++ b/tests/support/ffx_bundle.py @@ -22,7 +22,6 @@ from ffx.database import databaseContext from ffx.pattern_controller import PatternController from ffx.show_controller import ShowController from ffx.show_descriptor import ShowDescriptor -from ffx.track_controller import TrackController from ffx.track_descriptor import TrackDescriptor from ffx.track_disposition import TrackDisposition from ffx.track_type import TrackType @@ -219,44 +218,41 @@ def create_source_fixture(workdir: Path, filename: str, tracks: list[SourceTrack return output_path -def add_show_and_pattern(context: dict, filename_pattern: str, show_id: int = 1) -> int: +def add_show(context: dict, show_id: int = 1) -> None: show_descriptor = ShowDescriptor( id=show_id, name="Bundle Test Show", year=2000, ) ShowController(context).updateShow(show_descriptor) - pattern_id = PatternController(context).addPattern( - { - "show_id": show_id, - "pattern": filename_pattern, - } - ) - if not pattern_id: - raise AssertionError("Failed to create pattern in test database") - return pattern_id - - -def add_pattern_tracks(context: dict, pattern_id: int, track_specs: list[PatternTrackSpec]) -> None: - track_controller = TrackController(context) - for track in track_specs: - kwargs = { - TrackDescriptor.INDEX_KEY: track.index, - TrackDescriptor.SOURCE_INDEX_KEY: track.source_index, - TrackDescriptor.TRACK_TYPE_KEY: track.track_type, - TrackDescriptor.TAGS_KEY: dict(track.tags), - TrackDescriptor.DISPOSITION_SET_KEY: set(track.dispositions), - } - if track.track_type == TrackType.AUDIO: - kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = track.audio_layout - track_controller.addTrack(TrackDescriptor(**kwargs), pattern_id) def prepare_pattern_database(database_path: Path, filename_pattern: str, track_specs: list[PatternTrackSpec], show_id: int = 1) -> None: context = build_controller_context(database_path) try: - pattern_id = add_show_and_pattern(context, filename_pattern, show_id=show_id) - add_pattern_tracks(context, pattern_id, track_specs) + add_show(context, show_id=show_id) + track_descriptors = [] + for track in track_specs: + kwargs = { + TrackDescriptor.INDEX_KEY: track.index, + TrackDescriptor.SOURCE_INDEX_KEY: track.source_index, + TrackDescriptor.TRACK_TYPE_KEY: track.track_type, + TrackDescriptor.TAGS_KEY: dict(track.tags), + TrackDescriptor.DISPOSITION_SET_KEY: set(track.dispositions), + } + if track.track_type == TrackType.AUDIO: + kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = track.audio_layout + track_descriptors.append(TrackDescriptor(**kwargs)) + + pattern_id = PatternController(context).savePatternSchema( + { + "show_id": show_id, + "pattern": filename_pattern, + }, + trackDescriptors=track_descriptors, + ) + if not pattern_id: + raise AssertionError("Failed to create pattern in test database") finally: dispose_controller_context(context) diff --git a/tests/unit/test_pattern_management.py b/tests/unit/test_pattern_management.py new file mode 100644 index 0000000..eb5ef60 --- /dev/null +++ b/tests/unit/test_pattern_management.py @@ -0,0 +1,240 @@ +from __future__ import annotations + +import logging +from pathlib import Path +import sys +import tempfile +import unittest + +import click + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.audio_layout import AudioLayout # noqa: E402 +from ffx.database import databaseContext # noqa: E402 +from ffx.file_properties import FileProperties # noqa: E402 +from ffx.model.pattern import Pattern # noqa: E402 +from ffx.pattern_controller import ( # noqa: E402 + DuplicatePatternMatchError, + InvalidPatternSchemaError, + PatternController, +) +from ffx.show_controller import ShowController # noqa: E402 +from ffx.show_descriptor import ShowDescriptor # noqa: E402 +from ffx.track_controller import TrackController # noqa: E402 +from ffx.track_descriptor import TrackDescriptor # noqa: E402 +from ffx.track_disposition import TrackDisposition # noqa: E402 +from ffx.track_type import TrackType # noqa: E402 + + +class StaticConfig: + def __init__(self, data: dict | None = None): + self._data = data or {} + + def getData(self): + return self._data + + +def make_logger(name: str) -> logging.Logger: + logger = logging.getLogger(name) + logger.handlers = [] + logger.setLevel(logging.DEBUG) + logger.propagate = False + logger.addHandler(logging.NullHandler()) + return logger + + +def make_context(database_path: Path) -> dict: + return { + "logger": make_logger(f"ffx-test-pattern-{database_path.stem}"), + "config": StaticConfig(), + "database": databaseContext(str(database_path)), + "use_pattern": True, + } + + +def make_track_descriptor( + index: int = 0, + *, + source_index: int | None = None, + track_type: TrackType = TrackType.VIDEO, + title: str = "", + dispositions: set[TrackDisposition] | None = None, +) -> TrackDescriptor: + kwargs = { + TrackDescriptor.INDEX_KEY: index, + TrackDescriptor.SOURCE_INDEX_KEY: index if source_index is None else source_index, + TrackDescriptor.TRACK_TYPE_KEY: track_type, + TrackDescriptor.TAGS_KEY: {"title": title} if title else {}, + TrackDescriptor.DISPOSITION_SET_KEY: dispositions or set(), + } + if track_type == TrackType.AUDIO: + kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = AudioLayout.LAYOUT_STEREO + return TrackDescriptor(**kwargs) + + +class PatternManagementTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.database_path = Path(self.tempdir.name) / "pattern-test.db" + self.context = make_context(self.database_path) + self.pattern_controller = PatternController(self.context) + self.track_controller = TrackController(self.context) + self.show_controller = ShowController(self.context) + PatternController._clear_regex_cache() + + def tearDown(self): + self.context["database"]["engine"].dispose() + self.tempdir.cleanup() + PatternController._clear_regex_cache() + + def add_show(self, show_id: int, name: str) -> None: + self.show_controller.updateShow( + ShowDescriptor( + id=show_id, + name=name, + year=2000 + show_id, + ) + ) + + def save_pattern( + self, + show_id: int, + pattern_expression: str, + *, + tracks: list[TrackDescriptor] | None = None, + ) -> int: + self.add_show(show_id, f"Show {show_id}") + return self.pattern_controller.savePatternSchema( + { + "show_id": show_id, + "pattern": pattern_expression, + "quality": 0, + "notes": "", + }, + trackDescriptors=tracks or [make_track_descriptor(0)], + ) + + def insert_trackless_pattern_row(self, show_id: int, pattern_expression: str) -> int: + self.add_show(show_id, f"Show {show_id}") + Session = self.context["database"]["session"] + session = Session() + try: + pattern = Pattern(show_id=show_id, pattern=pattern_expression) + session.add(pattern) + session.commit() + return int(pattern.id) + finally: + session.close() + + def test_match_filename_returns_single_matching_pattern(self): + pattern_id = self.save_pattern(1, r"^single_(s[0-9]+e[0-9]+)\.mkv$") + + match = self.pattern_controller.matchFilename("single_s01e01.mkv") + + self.assertEqual(pattern_id, match["pattern"].getId()) + self.assertEqual("s01e01", match["match"].group(1)) + + def test_match_filename_raises_for_duplicate_matches_in_same_show(self): + self.save_pattern(1, r"^same_(s[0-9]+e[0-9]+)\.mkv$") + self.save_pattern(1, r"^same_.*$") + + with self.assertRaises(DuplicatePatternMatchError) as caught: + self.pattern_controller.matchFilename("same_s01e01.mkv") + + self.assertIn("matched more than one pattern", str(caught.exception)) + self.assertIn("show #1", str(caught.exception)) + + def test_match_filename_raises_for_duplicate_matches_across_shows(self): + self.save_pattern(1, r"^cross_(s[0-9]+e[0-9]+)\.mkv$") + self.save_pattern(2, r"^cross_.*$") + + with self.assertRaises(DuplicatePatternMatchError) as caught: + self.pattern_controller.matchFilename("cross_s01e01.mkv") + + self.assertIn("show #1", str(caught.exception)) + self.assertIn("show #2", str(caught.exception)) + + def test_update_pattern_refreshes_regex_matching_after_change(self): + pattern_id = self.save_pattern(1, r"^before_(s[0-9]+e[0-9]+)\.mkv$") + + self.assertTrue( + self.pattern_controller.updatePattern( + pattern_id, + { + "show_id": 1, + "pattern": r"^after_(s[0-9]+e[0-9]+)\.mkv$", + "quality": 0, + "notes": "", + }, + ) + ) + + self.assertEqual({}, self.pattern_controller.matchFilename("before_s01e01.mkv")) + match = self.pattern_controller.matchFilename("after_s01e01.mkv") + self.assertEqual(pattern_id, match["pattern"].getId()) + + def test_save_pattern_schema_rejects_zero_track_patterns(self): + self.add_show(1, "Empty Pattern Show") + + with self.assertRaises(InvalidPatternSchemaError) as caught: + self.pattern_controller.savePatternSchema( + { + "show_id": 1, + "pattern": r"^empty_(s[0-9]+e[0-9]+)\.mkv$", + }, + trackDescriptors=[], + ) + + self.assertIn("at least one track", str(caught.exception)) + + def test_match_filename_rejects_existing_trackless_pattern_rows(self): + self.insert_trackless_pattern_row(1, r"^invalid_(s[0-9]+e[0-9]+)\.mkv$") + + with self.assertRaises(InvalidPatternSchemaError) as caught: + self.pattern_controller.matchFilename("invalid_s01e01.mkv") + + self.assertIn("has no tracks", str(caught.exception)) + + def test_file_properties_skips_pattern_matching_when_disabled(self): + self.save_pattern(1, r"^nopattern_(s[0-9]+e[0-9]+)\.mkv$") + self.save_pattern(2, r"^nopattern_.*$") + + no_pattern_context = dict(self.context) + no_pattern_context["use_pattern"] = False + + file_properties = FileProperties( + no_pattern_context, + "/tmp/nopattern_s01e01.mkv", + ) + + self.assertIsNone(file_properties.getPattern()) + self.assertEqual(-1, file_properties.getShowId()) + self.assertEqual(1, file_properties.getSeason()) + self.assertEqual(1, file_properties.getEpisode()) + + def test_track_controller_refuses_to_delete_last_track(self): + pattern_id = self.save_pattern(1, r"^delete_(s[0-9]+e[0-9]+)\.mkv$") + track = self.track_controller.getTrack(pattern_id, 0) + + with self.assertRaises(click.ClickException) as caught: + self.track_controller.deleteTrack(track.getId()) + + self.assertIn("last track", str(caught.exception)) + + def test_exact_duplicate_pattern_definition_is_rejected(self): + self.save_pattern(1, r"^unique_(s[0-9]+e[0-9]+)\.mkv$") + + with self.assertRaises(click.ClickException) as caught: + self.save_pattern(1, r"^unique_(s[0-9]+e[0-9]+)\.mkv$") + + self.assertIn("already exists", str(caught.exception)) + + +if __name__ == "__main__": + unittest.main() From 71553aad329932a2337695b0d1ca06cde10e2b4f Mon Sep 17 00:00:00 2001 From: Javanaut Date: Sat, 11 Apr 2026 14:57:01 +0200 Subject: [PATCH 07/28] Streamlines imports and app start --- SCRATCHPAD.md | 49 +++++-------- src/ffx/cli.py | 93 ++++++++++++++----------- src/ffx/constants.py | 7 ++ src/ffx/ffx_controller.py | 23 ++++-- src/ffx/helper.py | 2 +- tests/unit/test_cli_lazy_imports.py | 104 ++++++++++++++++++++++++++++ 6 files changed, 199 insertions(+), 79 deletions(-) create mode 100644 tests/unit/test_cli_lazy_imports.py diff --git a/SCRATCHPAD.md b/SCRATCHPAD.md index c0ab74d..4864883 100644 --- a/SCRATCHPAD.md +++ b/SCRATCHPAD.md @@ -9,6 +9,8 @@ - The biggest near-term wins are in startup cost, repeated subprocess work, repeated database query patterns, and general repo hygiene. - This list is intentionally optimization-oriented rather than bug-oriented. Some items below also improve correctness or maintainability, but they were selected because they can reduce runtime cost, operator friction, or iteration overhead. - A first modern integration slice now exists under [`tests/integration/subtrack_mapping`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping). Remaining test-suite cleanup is now mostly about migrating and shrinking the legacy harness surface under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy). +- The CLI root now lazy-loads heavy runtime dependencies so lightweight commands such as `version`, `help`, `configure_workstation`, and `upgrade` stay import-light. +- Shared CLI defaults for container/output tokens now live outside [`src/ffx/ffx_controller.py`](/home/osgw/.local/src/codex/ffx/src/ffx/ffx_controller.py), and a focused unit test locks in the lazy-import contract. - FFX logger setup now reuses named handlers, and fallback logger access no longer mutates handlers in ordinary constructors and helpers. - The process wrapper now uses `subprocess.run(...)` with centralized command formatting plus stable timeout and missing-command error mapping. - Active ORM controllers now use single-query accessors instead of paired `count()` plus `first()` lookups. @@ -17,8 +19,8 @@ ## Focused Snapshot - Highest-leverage application optimizations: - - Lazy-load CLI command dependencies so lightweight commands do not import most of the app. - Collapse repeated `ffprobe` calls into a single probe result per source file. + - Revisit crop detection cost after the probe path is consolidated. - Highest-leverage repo and workflow optimizations: - Consolidate setup and upgrade tooling to reduce overlapping shell-script responsibilities. @@ -26,16 +28,7 @@ ## Optimization Candidates -1. CLI startup and import cost -- [`src/ffx/cli.py`](/home/osgw/.local/src/codex/ffx/src/ffx/cli.py) imports a large portion of the application at module import time, even for cheap commands such as `version`, `help`, `configure_workstation`, and `upgrade`. -- Optimization: - - Move heavy imports into the commands that actually need them. - - Keep the CLI root importable with only core stdlib and Click dependencies. -- Expected value: - - Faster startup for scripting and tooling commands. - - Less coupling between maintenance commands and the runtime stack. - -2. Media probing does two separate `ffprobe` subprocesses per file +1. Media probing does two separate `ffprobe` subprocesses per file - [`src/ffx/file_properties.py`](/home/osgw/.local/src/codex/ffx/src/ffx/file_properties.py) calls `ffprobe` once for format data and once for stream data. - Optimization: - Use one probe call that requests both format and streams. @@ -44,7 +37,7 @@ - Less subprocess overhead. - Faster inspect and convert flows. -3. Crop detection is always a full extra ffmpeg scan +2. Crop detection is always a full extra ffmpeg scan - [`src/ffx/file_properties.py`](/home/osgw/.local/src/codex/ffx/src/ffx/file_properties.py) runs a dedicated `ffmpeg -vf cropdetect` pass for each file when crop detection is requested. - Optimization: - Cache crop results for repeated runs on the same source. @@ -52,7 +45,7 @@ - Expected value: - Lower latency on repeated experimentation. -4. Tooling overlap and naming drift +3. Tooling overlap and naming drift - There are still overlapping workstation-setup entrypoints across [`tools/configure_workstation.sh`](/home/osgw/.local/src/codex/ffx/tools/configure_workstation.sh), [`tools/setup.sh`](/home/osgw/.local/src/codex/ffx/tools/setup.sh), and newer CLI maintenance commands. - Optimization: - Decide which scripts remain canonical. @@ -62,7 +55,7 @@ - Less operator confusion. - Fewer duplicated procedures to maintain. -5. Placeholder UI surfaces should either ship or disappear +4. Placeholder UI surfaces should either ship or disappear - [`src/ffx/help_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/help_screen.py) and [`src/ffx/settings_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/settings_screen.py) are placeholders. - Optimization: - Either remove them from the active UI surface or complete them. @@ -71,7 +64,7 @@ - Leaner interface. - Lower UX ambiguity. -6. Large Textual screens repeat configuration and controller loading +5. Large Textual screens repeat configuration and controller loading - Screens such as [`src/ffx/media_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/media_details_screen.py), [`src/ffx/pattern_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/pattern_details_screen.py), and [`src/ffx/show_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/show_details_screen.py) repeat setup patterns and local metadata filtering extraction. - Optimization: - Extract a shared screen base or helper for common config/controller/bootstrap logic. @@ -80,7 +73,7 @@ - Lower maintenance overhead. - Easier UI iteration. -7. Several helper functions are unfinished or dead-weight +6. Several helper functions are unfinished or dead-weight - [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) contains `permutateList(...): pass`. - There are many combinator and conversion placeholders across tests and migrations. - Optimization: @@ -90,7 +83,7 @@ - Smaller mental model. - Less time spent re-evaluating inactive paths. -8. Test suite shape is expensive to understand and likely expensive to run +7. Test suite shape is expensive to understand and likely expensive to run - The project still carries a large legacy matrix of combinator files under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy), several placeholder `pass` implementations, and at least one suspicious filename with an embedded space: [`tests/legacy/disposition_combinator_2_3 .py`](/home/osgw/.local/src/codex/ffx/tests/legacy/disposition_combinator_2_3 .py). - A first focused replacement slice now exists in [`tests/integration/subtrack_mapping/test_cli_bundle.py`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping/test_cli_bundle.py), so the remaining work is migration and consolidation rather than creating the modern test shape from scratch. - Optimization: @@ -101,7 +94,7 @@ - Faster contributor onboarding. - Easier CI adoption later. -9. Process resource limiting semantics could be clearer +8. Process resource limiting semantics could be clearer - [`src/ffx/process.py`](/home/osgw/.local/src/codex/ffx/src/ffx/process.py) prepends `nice` and `cpulimit` directly when values are set. - Optimization: - Validate and document effective behavior for combined `nice` + `cpulimit`. @@ -110,24 +103,16 @@ - Fewer surprises in production-like runs. - Easier support for user-reported performance behavior. -10. Import-time dependency coupling makes maintenance commands brittle -- Even after recent CLI maintenance additions, the top-level CLI module still imports most application modules before Click dispatch. +9. Regex and string utility cleanup +- [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) still has repeated string-replacement churn in filename/TMDB normalization helpers, and regex handling in helpers is easy to regress quietly. - Optimization: - - Push imports for ORM, Textual, TMDB, ffmpeg helpers, and descriptors behind the commands that actually need them. -- Expected value: - - Maintenance commands such as setup and upgrade stay usable when optional runtime dependencies are broken. - - Better separation between media runtime code and maintenance tooling. - -11. Regex and string utility cleanup -- [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) still emits a `SyntaxWarning` for `RICH_COLOR_PATTERN`. -- Optimization: - - Convert regex literals to raw strings where appropriate. + - Keep regex literals raw and centralized where appropriate. - Review filename and TMDB substitution helpers for repeated string churn. - Expected value: - Cleaner runtime output. - Less warning noise during dry-run maintenance commands. -12. Database startup always runs schema creation and version checks +10. Database startup always runs schema creation and version checks - [`src/ffx/database.py`](/home/osgw/.local/src/codex/ffx/src/ffx/database.py) runs `Base.metadata.create_all(...)` and version checks every time a DB-backed context is created. - Optimization: - Measure startup cost and consider separating bootstrapping from ordinary command execution. @@ -151,9 +136,9 @@ 1. Triage the list into quick wins, medium refactors, and long-horizon cleanup. 2. Tackle the cheapest high-impact items first: - - regex raw-string warning cleanup, - single-call `ffprobe` refactor. -3. Decide whether maintenance/tooling command imports should be split from media-runtime imports before adding more CLI maintenance surface. + - crop detection sampling or caching pass. +3. Decide which setup and upgrade entrypoints stay canonical before adding more maintenance surface. ## Delete When diff --git a/src/ffx/cli.py b/src/ffx/cli.py index 2d18395..d36f8ad 100755 --- a/src/ffx/cli.py +++ b/src/ffx/cli.py @@ -1,6 +1,9 @@ #! /usr/bin/python3 +from __future__ import annotations + import os, sys, click, time, shutil, subprocess +from typing import TYPE_CHECKING # Allow direct execution via `python src/ffx/cli.py` by preferring the package # root on sys.path. @@ -10,42 +13,24 @@ if __package__ in (None, ''): sys.path = [p for p in sys.path if os.path.abspath(p) != os.path.abspath(script_dir)] sys.path.insert(0, package_root) -from ffx.configuration_controller import ConfigurationController +from ffx.constants import ( + DEFAULT_AC3_BANDWIDTH, + DEFAULT_CONTAINER_EXTENSION, + DEFAULT_CONTAINER_FORMAT, + DEFAULT_DTS_BANDWIDTH, + DEFAULT_STEREO_BANDWIDTH, + DEFAULT_VIDEO_ENCODER_LABEL, + FFMPEG_COMMAND_TOKENS, + SUPPORTED_INPUT_FILE_EXTENSIONS, + VERSION, +) -from ffx.file_properties import FileProperties +if TYPE_CHECKING: + from ffx.media_descriptor import MediaDescriptor + from ffx.track_descriptor import TrackDescriptor -from ffx.ffx_app import FfxApp -from ffx.ffx_controller import FfxController -from ffx.tmdb_controller import TmdbController +LIGHTWEIGHT_COMMANDS = {None, 'version', 'help', 'configure_workstation', 'upgrade'} -from ffx.database import databaseContext - -from ffx.media_descriptor import MediaDescriptor -from ffx.track_descriptor import TrackDescriptor -from ffx.show_descriptor import ShowDescriptor - -from ffx.track_type import TrackType -from ffx.video_encoder import VideoEncoder -from ffx.track_disposition import TrackDisposition -from ffx.track_codec import TrackCodec - -from ffx.process import executeProcess -from ffx.helper import filterFilename, substituteTmdbFilename -from ffx.helper import getEpisodeFileBasename - -from ffx.constants import DEFAULT_STEREO_BANDWIDTH, DEFAULT_AC3_BANDWIDTH, DEFAULT_DTS_BANDWIDTH, DEFAULT_7_1_BANDWIDTH - -from ffx.filter.quality_filter import QualityFilter -from ffx.filter.preset_filter import PresetFilter - -from ffx.filter.crop_filter import CropFilter -from ffx.filter.nlmeans_filter import NlmeansFilter -from ffx.filter.deinterlace_filter import DeinterlaceFilter - -from ffx.constants import VERSION - -from ffx.shifted_season_controller import ShiftedSeasonController -from ffx.logging_utils import configure_ffx_logger @click.group() @@ -58,11 +43,18 @@ def ffx(ctx, database_file, verbose, dry_run): ctx.obj = {} - if ctx.invoked_subcommand in ('configure_workstation', 'upgrade'): + if ctx.resilient_parsing: + return + + if ctx.invoked_subcommand in LIGHTWEIGHT_COMMANDS: ctx.obj['dry_run'] = dry_run ctx.obj['verbosity'] = verbose return + from ffx.configuration_controller import ConfigurationController + from ffx.database import databaseContext + from ffx.logging_utils import configure_ffx_logger + ctx.obj['config'] = ConfigurationController() ctx.obj['database'] = databaseContext(databasePath=database_file @@ -184,6 +176,7 @@ def upgrade(ctx, branch): @click.pass_context @click.argument('filename', nargs=1) def inspect(ctx, filename): + from ffx.ffx_app import FfxApp ctx.obj['command'] = 'inspect' ctx.obj['arguments'] = {} @@ -196,7 +189,7 @@ def inspect(ctx, filename): def getUnmuxSequence(trackDescriptor: TrackDescriptor, sourcePath, targetPrefix, targetDirectory = ''): # executable and input file - commandTokens = FfxController.COMMAND_TOKENS + ['-i', sourcePath] + commandTokens = list(FFMPEG_COMMAND_TOKENS) + ['-i', sourcePath] trackType = trackDescriptor.getType() @@ -237,6 +230,10 @@ def unmux(ctx, subtitles_only, nice, cpu): + from ffx.file_properties import FileProperties + from ffx.process import executeProcess + from ffx.track_disposition import TrackDisposition + from ffx.track_type import TrackType existingSourcePaths = [p for p in paths if os.path.isfile(p)] ctx.obj['logger'].debug(f"\nUnmuxing {len(existingSourcePaths)} files") @@ -307,6 +304,7 @@ def cropdetect(ctx, paths, nice, cpu): + from ffx.file_properties import FileProperties existingSourcePaths = [p for p in paths if os.path.isfile(p)] ctx.obj['logger'].debug(f"\nUnmuxing {len(existingSourcePaths)} files") @@ -333,6 +331,7 @@ def cropdetect(ctx, @click.pass_context def shows(ctx): + from ffx.ffx_app import FfxApp ctx.obj['command'] = 'shows' @@ -341,6 +340,8 @@ def shows(ctx): def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): + from ffx.track_disposition import TrackDisposition + from ffx.track_type import TrackType # Check for multiple default or forced dispositions if not set by user input or database requirements # @@ -390,7 +391,7 @@ def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): @click.option('-l', '--label', type=str, default='', help='Label to be used as filename prefix') -@click.option('-v', '--video-encoder', type=str, default=FfxController.DEFAULT_VIDEO_ENCODER, help=f"Target video encoder (vp9, av1, h264 or copy)", show_default=True) +@click.option('-v', '--video-encoder', type=str, default=DEFAULT_VIDEO_ENCODER_LABEL, help=f"Target video encoder (vp9, av1, h264 or copy)", show_default=True) @click.option('-q', '--quality', type=str, default="", help=f"Quality settings to be used with VP9/H264 encoder") @click.option('-p', '--preset', type=str, default="", help=f"Quality preset to be used with AV1 encoder") @@ -507,6 +508,20 @@ def convert(ctx, Filename extensions will be changed appropriately. Suffices will we appended to filename in case of multiple created files or if the filename has not changed.""" + from ffx.ffx_controller import FfxController + from ffx.file_properties import FileProperties + from ffx.filter.crop_filter import CropFilter + from ffx.filter.deinterlace_filter import DeinterlaceFilter + from ffx.filter.nlmeans_filter import NlmeansFilter + from ffx.filter.preset_filter import PresetFilter + from ffx.filter.quality_filter import QualityFilter + from ffx.helper import filterFilename, getEpisodeFileBasename, substituteTmdbFilename + from ffx.shifted_season_controller import ShiftedSeasonController + from ffx.show_descriptor import ShowDescriptor + from ffx.tmdb_controller import TmdbController + from ffx.track_codec import TrackCodec + from ffx.track_disposition import TrackDisposition + from ffx.video_encoder import VideoEncoder startTime = time.perf_counter() @@ -519,8 +534,8 @@ def convert(ctx, targetFormat = '' targetExtension = 'mkv' else: - targetFormat = FfxController.DEFAULT_FILE_FORMAT - targetExtension = FfxController.DEFAULT_FILE_EXTENSION + targetFormat = DEFAULT_CONTAINER_FORMAT + targetExtension = DEFAULT_CONTAINER_EXTENSION context['use_tmdb'] = not no_tmdb context['use_pattern'] = not no_pattern @@ -540,7 +555,7 @@ def convert(ctx, context['subtitle_prefix'] = subtitle_prefix - existingSourcePaths = [p for p in paths if os.path.isfile(p) and p.split('.')[-1] in FfxController.INPUT_FILE_EXTENSIONS] + existingSourcePaths = [p for p in paths if os.path.isfile(p) and p.split('.')[-1] in SUPPORTED_INPUT_FILE_EXTENSIONS] # CLI Overrides diff --git a/src/ffx/constants.py b/src/ffx/constants.py index b4f9d87..b1471db 100644 --- a/src/ffx/constants.py +++ b/src/ffx/constants.py @@ -4,6 +4,13 @@ DATABASE_VERSION = 2 DEFAULT_QUALITY = 32 DEFAULT_AV1_PRESET = 5 +DEFAULT_VIDEO_ENCODER_LABEL = "vp9" +DEFAULT_CONTAINER_FORMAT = "webm" +DEFAULT_CONTAINER_EXTENSION = "webm" +SUPPORTED_INPUT_FILE_EXTENSIONS = ("mkv", "mp4", "avi", "flv", "webm") +FFMPEG_COMMAND_TOKENS = ("ffmpeg", "-y") +FFMPEG_NULL_OUTPUT_TOKENS = ("-f", "null", "/dev/null") + DEFAULT_STEREO_BANDWIDTH = "112" DEFAULT_AC3_BANDWIDTH = "256" DEFAULT_DTS_BANDWIDTH = "320" diff --git a/src/ffx/ffx_controller.py b/src/ffx/ffx_controller.py index f3241fc..a4907ff 100644 --- a/src/ffx/ffx_controller.py +++ b/src/ffx/ffx_controller.py @@ -10,7 +10,16 @@ from ffx.track_codec import TrackCodec from ffx.video_encoder import VideoEncoder from ffx.process import executeProcess -from ffx.constants import DEFAULT_cut_start, DEFAULT_cut_length +from ffx.constants import ( + DEFAULT_CONTAINER_EXTENSION, + DEFAULT_CONTAINER_FORMAT, + DEFAULT_VIDEO_ENCODER_LABEL, + DEFAULT_cut_start, + DEFAULT_cut_length, + FFMPEG_COMMAND_TOKENS, + FFMPEG_NULL_OUTPUT_TOKENS, + SUPPORTED_INPUT_FILE_EXTENSIONS, +) from ffx.filter.quality_filter import QualityFilter from ffx.filter.preset_filter import PresetFilter @@ -21,17 +30,17 @@ from ffx.model.pattern import Pattern class FfxController(): - COMMAND_TOKENS = ['ffmpeg', '-y'] - NULL_TOKENS = ['-f', 'null', '/dev/null'] # -f null /dev/null + COMMAND_TOKENS = list(FFMPEG_COMMAND_TOKENS) + NULL_TOKENS = list(FFMPEG_NULL_OUTPUT_TOKENS) # -f null /dev/null TEMP_FILE_NAME = "ffmpeg2pass-0.log" - DEFAULT_VIDEO_ENCODER = VideoEncoder.VP9.label() + DEFAULT_VIDEO_ENCODER = DEFAULT_VIDEO_ENCODER_LABEL - DEFAULT_FILE_FORMAT = 'webm' - DEFAULT_FILE_EXTENSION = 'webm' + DEFAULT_FILE_FORMAT = DEFAULT_CONTAINER_FORMAT + DEFAULT_FILE_EXTENSION = DEFAULT_CONTAINER_EXTENSION - INPUT_FILE_EXTENSIONS = ['mkv', 'mp4', 'avi', 'flv', 'webm'] + INPUT_FILE_EXTENSIONS = list(SUPPORTED_INPUT_FILE_EXTENSIONS) CHANNEL_MAP_5_1 = 'FL-FL|FR-FR|FC-FC|LFE-LFE|SL-BL|SR-BR:5.1' diff --git a/src/ffx/helper.py b/src/ffx/helper.py index 4c40292..cbb8e46 100644 --- a/src/ffx/helper.py +++ b/src/ffx/helper.py @@ -16,7 +16,7 @@ DIFF_REMOVED_KEY = 'removed' DIFF_CHANGED_KEY = 'changed' DIFF_UNCHANGED_KEY = 'unchanged' -RICH_COLOR_PATTERN = '\[[a-z_]+\](.+)\[\/[a-z_]+\]' +RICH_COLOR_PATTERN = '\\[[a-z_]+\\](.+)\\[\\/[a-z_]+\\]' def dictDiff(a : dict, b : dict, ignoreKeys: list = [], removeKeys: list = []): diff --git a/tests/unit/test_cli_lazy_imports.py b/tests/unit/test_cli_lazy_imports.py new file mode 100644 index 0000000..3707219 --- /dev/null +++ b/tests/unit/test_cli_lazy_imports.py @@ -0,0 +1,104 @@ +from __future__ import annotations + +import json +from pathlib import Path +import subprocess +import sys +import textwrap +import unittest + + +REPO_ROOT = Path(__file__).resolve().parents[2] +SRC_ROOT = REPO_ROOT / "src" +HEAVY_MODULES = [ + "ffx.configuration_controller", + "ffx.database", + "ffx.ffx_app", + "ffx.ffx_controller", + "ffx.file_properties", + "ffx.tmdb_controller", +] + + +class CliLazyImportTests(unittest.TestCase): + def run_python(self, code: str) -> dict: + completed = subprocess.run( + [sys.executable, "-c", code], + capture_output=True, + cwd=REPO_ROOT, + text=True, + ) + if completed.returncode != 0: + self.fail( + "Python helper failed\n" + f"STDOUT:\n{completed.stdout}\n" + f"STDERR:\n{completed.stderr}" + ) + return json.loads(completed.stdout) + + def test_importing_cli_keeps_runtime_modules_unloaded(self): + result = self.run_python( + textwrap.dedent( + f""" + import json + import sys + + sys.path.insert(0, {str(SRC_ROOT)!r}) + + import ffx.cli + + print(json.dumps({{ + "modules": {{ + module_name: module_name in sys.modules + for module_name in {HEAVY_MODULES!r} + }}, + }})) + """ + ) + ) + + self.assertTrue( + all(not is_loaded for is_loaded in result["modules"].values()), + result["modules"], + ) + + def test_lightweight_configure_workstation_command_stays_light(self): + result = self.run_python( + textwrap.dedent( + f""" + import json + import sys + from click.testing import CliRunner + + sys.path.insert(0, {str(SRC_ROOT)!r}) + + import ffx.cli + + runner = CliRunner() + invoke_result = runner.invoke( + ffx.cli.ffx, + ["--dry-run", "configure_workstation", "--check"], + ) + if invoke_result.exit_code != 0: + raise SystemExit(invoke_result.output) + + print(json.dumps({{ + "output": invoke_result.output, + "modules": {{ + module_name: module_name in sys.modules + for module_name in {HEAVY_MODULES!r} + }}, + }})) + """ + ) + ) + + self.assertIn("configure_workstation.sh --check", result["output"]) + self.assertTrue( + all(not is_loaded for is_loaded in result["modules"].values()), + result["modules"], + ) + + +if __name__ == "__main__": + unittest.main() From c384d54c12d37db5fc9705530006f83c9d3a1079 Mon Sep 17 00:00:00 2001 From: Javanaut Date: Sat, 11 Apr 2026 15:08:08 +0200 Subject: [PATCH 08/28] Impr upgrade --- src/ffx/cli.py | 36 +++++++++++++ tests/unit/test_cli_upgrade.py | 99 ++++++++++++++++++++++++++++++++++ 2 files changed, 135 insertions(+) create mode 100644 tests/unit/test_cli_upgrade.py diff --git a/src/ffx/cli.py b/src/ffx/cli.py index d36f8ad..189c198 100755 --- a/src/ffx/cli.py +++ b/src/ffx/cli.py @@ -112,6 +112,24 @@ def getBundleRepoPath(): return getRepoRootPath() +def getTrackedGitChanges(repoPath): + completed = subprocess.run( + ['git', 'status', '--porcelain', '--untracked-files=no'], + cwd=repoPath, + capture_output=True, + text=True, + ) + + if completed.returncode != 0: + commandLabel = 'git status --porcelain --untracked-files=no' + errorOutput = completed.stderr.strip() or completed.stdout.strip() + raise click.ClickException( + f"Unable to inspect bundle repository state using '{commandLabel}': {errorOutput}" + ) + + return [line for line in completed.stdout.splitlines() if line.strip()] + + @ffx.command(name='configure_workstation') @click.pass_context @click.option('--check', is_flag=True, default=False, help='Only verify workstation-configuration readiness') @@ -152,6 +170,24 @@ def upgrade(ctx, branch): raise click.ClickException(f"Bundle pip not found at {bundlePipPath}") commandSequences = [] + trackedChanges = getTrackedGitChanges(bundleRepoPath) + + if trackedChanges: + click.echo("Tracked local changes detected in the bundle repository:") + for trackedChange in trackedChanges: + click.echo(f" {trackedChange}") + + shouldReset = click.confirm( + "Discard these tracked changes with 'git reset --hard HEAD' before upgrade?", + default=False, + ) + + if not shouldReset: + raise click.ClickException( + "Upgrade aborted because tracked local changes are present." + ) + + commandSequences.append(['git', 'reset', '--hard', 'HEAD']) if branch: commandSequences.append(['git', 'checkout', branch]) diff --git a/tests/unit/test_cli_upgrade.py b/tests/unit/test_cli_upgrade.py new file mode 100644 index 0000000..90211ba --- /dev/null +++ b/tests/unit/test_cli_upgrade.py @@ -0,0 +1,99 @@ +from __future__ import annotations + +from pathlib import Path +import subprocess +import sys +import unittest +from unittest.mock import patch + +from click.testing import CliRunner + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import cli # noqa: E402 + + +class UpgradeCommandTests(unittest.TestCase): + def make_completed(self, args, *, stdout: str = "", stderr: str = "", returncode: int = 0): + return subprocess.CompletedProcess(args=args, returncode=returncode, stdout=stdout, stderr=stderr) + + def test_upgrade_aborts_when_tracked_changes_are_present_and_reset_is_declined(self): + runner = CliRunner() + repo_path = "/tmp/ffx-repo" + pip_path = "/tmp/ffx-venv/bin/pip" + + subprocess_calls = [] + + def fake_run(args, **kwargs): + subprocess_calls.append((args, kwargs)) + if args == ['git', 'status', '--porcelain', '--untracked-files=no']: + return self.make_completed(args, stdout=" M src/ffx/cli.py\n") + raise AssertionError(f"Unexpected subprocess invocation: args={args} kwargs={kwargs}") + + with ( + patch.object(cli, "getBundleRepoPath", return_value=repo_path), + patch.object(cli, "getBundlePipPath", return_value=pip_path), + patch.object(cli.os.path, "isdir", return_value=True), + patch.object(cli.os.path, "isfile", return_value=True), + patch.object(cli.subprocess, "run", side_effect=fake_run), + ): + result = runner.invoke(cli.ffx, ["upgrade"], input="n\n") + + self.assertNotEqual(0, result.exit_code) + self.assertIn("Tracked local changes detected in the bundle repository:", result.output) + self.assertIn("Discard these tracked changes with 'git reset --hard HEAD' before upgrade?", result.output) + self.assertIn("Upgrade aborted because tracked local changes are present.", result.output) + self.assertEqual(1, len(subprocess_calls)) + self.assertEqual( + ['git', 'status', '--porcelain', '--untracked-files=no'], + subprocess_calls[0][0], + ) + self.assertEqual(repo_path, subprocess_calls[0][1]["cwd"]) + self.assertTrue(subprocess_calls[0][1]["capture_output"]) + self.assertTrue(subprocess_calls[0][1]["text"]) + + def test_upgrade_resets_before_checkout_and_pull_when_user_confirms(self): + runner = CliRunner() + repo_path = "/tmp/ffx-repo" + pip_path = "/tmp/ffx-venv/bin/pip" + + subprocess_calls = [] + + def fake_run(args, **kwargs): + subprocess_calls.append((args, kwargs)) + if args == ['git', 'status', '--porcelain', '--untracked-files=no']: + return self.make_completed(args, stdout="M src/ffx/constants.py\n") + return self.make_completed(args) + + with ( + patch.object(cli, "getBundleRepoPath", return_value=repo_path), + patch.object(cli, "getBundlePipPath", return_value=pip_path), + patch.object(cli.os.path, "isdir", return_value=True), + patch.object(cli.os.path, "isfile", return_value=True), + patch.object(cli.subprocess, "run", side_effect=fake_run), + ): + result = runner.invoke(cli.ffx, ["upgrade", "--branch", "main"], input="y\n") + + self.assertEqual(0, result.exit_code, result.output) + self.assertIn("Tracked local changes detected in the bundle repository:", result.output) + self.assertEqual( + [ + ['git', 'status', '--porcelain', '--untracked-files=no'], + ['git', 'reset', '--hard', 'HEAD'], + ['git', 'checkout', 'main'], + ['git', 'pull'], + [pip_path, 'install', '--editable', '.'], + ], + [call[0] for call in subprocess_calls], + ) + for args, kwargs in subprocess_calls[1:]: + self.assertEqual(repo_path, kwargs["cwd"], args) + + +if __name__ == "__main__": + unittest.main() From 0939a0c6c2d9288ad10234ae8f9ad114521e7ffc Mon Sep 17 00:00:00 2001 From: Javanaut Date: Sat, 11 Apr 2026 16:00:01 +0200 Subject: [PATCH 09/28] Optimizes ffprobe usage --- .gitignore | 5 + SCRATCHPAD.md | 32 +++---- requirements/tests.md | 15 ++- src/ffx/file_properties.py | 57 +++++------- tests/unit/test_file_properties_probe.py | 111 +++++++++++++++++++++++ 5 files changed, 161 insertions(+), 59 deletions(-) create mode 100644 tests/unit/test_file_properties_probe.py diff --git a/.gitignore b/.gitignore index 913e25a..cbc9287 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,8 @@ dist/ .venv/ venv/ .codex + + +*.mkv +*.webm +ffmpeg2pass-0.log diff --git a/SCRATCHPAD.md b/SCRATCHPAD.md index 4864883..fa4d84b 100644 --- a/SCRATCHPAD.md +++ b/SCRATCHPAD.md @@ -11,6 +11,7 @@ - A first modern integration slice now exists under [`tests/integration/subtrack_mapping`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping). Remaining test-suite cleanup is now mostly about migrating and shrinking the legacy harness surface under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy). - The CLI root now lazy-loads heavy runtime dependencies so lightweight commands such as `version`, `help`, `configure_workstation`, and `upgrade` stay import-light. - Shared CLI defaults for container/output tokens now live outside [`src/ffx/ffx_controller.py`](/home/osgw/.local/src/codex/ffx/src/ffx/ffx_controller.py), and a focused unit test locks in the lazy-import contract. +- `FileProperties` now uses one cached `ffprobe -show_format -show_streams -of json` call per source file, and the combined payload was confirmed against the Dragonball asset to satisfy both previous probe call sites fully. - FFX logger setup now reuses named handlers, and fallback logger access no longer mutates handlers in ordinary constructors and helpers. - The process wrapper now uses `subprocess.run(...)` with centralized command formatting plus stable timeout and missing-command error mapping. - Active ORM controllers now use single-query accessors instead of paired `count()` plus `first()` lookups. @@ -19,8 +20,7 @@ ## Focused Snapshot - Highest-leverage application optimizations: - - Collapse repeated `ffprobe` calls into a single probe result per source file. - - Revisit crop detection cost after the probe path is consolidated. + - Revisit crop detection cost now that the probe path is consolidated. - Highest-leverage repo and workflow optimizations: - Consolidate setup and upgrade tooling to reduce overlapping shell-script responsibilities. @@ -28,16 +28,7 @@ ## Optimization Candidates -1. Media probing does two separate `ffprobe` subprocesses per file -- [`src/ffx/file_properties.py`](/home/osgw/.local/src/codex/ffx/src/ffx/file_properties.py) calls `ffprobe` once for format data and once for stream data. -- Optimization: - - Use one probe call that requests both format and streams. - - Cache that result inside `FileProperties`. -- Expected value: - - Less subprocess overhead. - - Faster inspect and convert flows. - -2. Crop detection is always a full extra ffmpeg scan +1. Crop detection is always a full extra ffmpeg scan - [`src/ffx/file_properties.py`](/home/osgw/.local/src/codex/ffx/src/ffx/file_properties.py) runs a dedicated `ffmpeg -vf cropdetect` pass for each file when crop detection is requested. - Optimization: - Cache crop results for repeated runs on the same source. @@ -45,7 +36,7 @@ - Expected value: - Lower latency on repeated experimentation. -3. Tooling overlap and naming drift +2. Tooling overlap and naming drift - There are still overlapping workstation-setup entrypoints across [`tools/configure_workstation.sh`](/home/osgw/.local/src/codex/ffx/tools/configure_workstation.sh), [`tools/setup.sh`](/home/osgw/.local/src/codex/ffx/tools/setup.sh), and newer CLI maintenance commands. - Optimization: - Decide which scripts remain canonical. @@ -55,7 +46,7 @@ - Less operator confusion. - Fewer duplicated procedures to maintain. -4. Placeholder UI surfaces should either ship or disappear +3. Placeholder UI surfaces should either ship or disappear - [`src/ffx/help_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/help_screen.py) and [`src/ffx/settings_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/settings_screen.py) are placeholders. - Optimization: - Either remove them from the active UI surface or complete them. @@ -64,7 +55,7 @@ - Leaner interface. - Lower UX ambiguity. -5. Large Textual screens repeat configuration and controller loading +4. Large Textual screens repeat configuration and controller loading - Screens such as [`src/ffx/media_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/media_details_screen.py), [`src/ffx/pattern_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/pattern_details_screen.py), and [`src/ffx/show_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/show_details_screen.py) repeat setup patterns and local metadata filtering extraction. - Optimization: - Extract a shared screen base or helper for common config/controller/bootstrap logic. @@ -73,7 +64,7 @@ - Lower maintenance overhead. - Easier UI iteration. -6. Several helper functions are unfinished or dead-weight +5. Several helper functions are unfinished or dead-weight - [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) contains `permutateList(...): pass`. - There are many combinator and conversion placeholders across tests and migrations. - Optimization: @@ -83,7 +74,7 @@ - Smaller mental model. - Less time spent re-evaluating inactive paths. -7. Test suite shape is expensive to understand and likely expensive to run +6. Test suite shape is expensive to understand and likely expensive to run - The project still carries a large legacy matrix of combinator files under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy), several placeholder `pass` implementations, and at least one suspicious filename with an embedded space: [`tests/legacy/disposition_combinator_2_3 .py`](/home/osgw/.local/src/codex/ffx/tests/legacy/disposition_combinator_2_3 .py). - A first focused replacement slice now exists in [`tests/integration/subtrack_mapping/test_cli_bundle.py`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping/test_cli_bundle.py), so the remaining work is migration and consolidation rather than creating the modern test shape from scratch. - Optimization: @@ -94,7 +85,7 @@ - Faster contributor onboarding. - Easier CI adoption later. -8. Process resource limiting semantics could be clearer +7. Process resource limiting semantics could be clearer - [`src/ffx/process.py`](/home/osgw/.local/src/codex/ffx/src/ffx/process.py) prepends `nice` and `cpulimit` directly when values are set. - Optimization: - Validate and document effective behavior for combined `nice` + `cpulimit`. @@ -103,7 +94,7 @@ - Fewer surprises in production-like runs. - Easier support for user-reported performance behavior. -9. Regex and string utility cleanup +8. Regex and string utility cleanup - [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) still has repeated string-replacement churn in filename/TMDB normalization helpers, and regex handling in helpers is easy to regress quietly. - Optimization: - Keep regex literals raw and centralized where appropriate. @@ -112,7 +103,7 @@ - Cleaner runtime output. - Less warning noise during dry-run maintenance commands. -10. Database startup always runs schema creation and version checks +9. Database startup always runs schema creation and version checks - [`src/ffx/database.py`](/home/osgw/.local/src/codex/ffx/src/ffx/database.py) runs `Base.metadata.create_all(...)` and version checks every time a DB-backed context is created. - Optimization: - Measure startup cost and consider separating bootstrapping from ordinary command execution. @@ -136,7 +127,6 @@ 1. Triage the list into quick wins, medium refactors, and long-horizon cleanup. 2. Tackle the cheapest high-impact items first: - - single-call `ffprobe` refactor. - crop detection sampling or caching pass. 3. Decide which setup and upgrade entrypoints stay canonical before adding more maintenance surface. diff --git a/requirements/tests.md b/requirements/tests.md index 933f28e..61c269c 100644 --- a/requirements/tests.md +++ b/requirements/tests.md @@ -7,9 +7,16 @@ Detailed product rules for source-to-target subtrack mapping live in `requirements/subtrack_mapping.md`. This file describes only how tests cover that area. +## Interpreter Requirement + +- Agents shall run Python-side test commands with `~/.local/share/ffx.venv/bin/python`. +- This applies to the legacy harness, `unittest`, `pytest`, helper scripts, and `python -m ffx ...` test invocations. +- Agents shall not silently substitute `python`, `python3`, or another interpreter for Python-side test work. +- If `~/.local/share/ffx.venv/bin/python` is missing or not executable, agents shall stop and report the missing venv instead of continuing with Python-side test execution. + ## Current Harness -- Entrypoint: `python tests/legacy_runner.py run` +- Entrypoint: `~/.local/share/ffx.venv/bin/python tests/legacy_runner.py run` - Runner style: custom Click CLI, not `pytest` or `unittest` - Commands: - `run`: discover scenario files, instantiate each scenario, run yielded jobs @@ -35,7 +42,7 @@ that area. - inputs per job: `1` - jobs: `140` - expected failures: `0` - - execution: build one synthetic source file, run `python -m ffx convert`, assert filename selectors only + - execution: build one synthetic source file, run `~/.local/share/ffx.venv/bin/python -m ffx convert`, assert filename selectors only - selectors executed: `B`, `L`, `I` - selectors defined but not executed: `S`, `R` - `2`: `tests/legacy/scenario_2.py` @@ -43,7 +50,7 @@ that area. - inputs per job: `1` - jobs: `8193` - expected failures: `3267` - - execution: build one synthetic source file, run `python -m ffx convert`, probe result with `FileProperties`, assert track layout and selected audio and subtitle metadata + - execution: build one synthetic source file, run `~/.local/share/ffx.venv/bin/python -m ffx convert`, probe result with `FileProperties`, assert track layout and selected audio and subtitle metadata - selectors executed: `M`, `AD`, `AT`, `SD`, `ST` - selectors defined but not executed: `MT`, `AP`, `SP`, `J` - `4`: `tests/legacy/scenario_4.py` @@ -51,7 +58,7 @@ that area. - inputs per job: `6` - jobs: `768` - expected failures: `336` - - execution: build six synthetic preset files, recreate temp SQLite DB, insert show and pattern, run one batch convert command, query TMDB during assertions + - execution: build six synthetic preset files, recreate temp SQLite DB, insert show and pattern, run one batch convert command via `~/.local/share/ffx.venv/bin/python`, query TMDB during assertions - selectors executed: `M`, `AD`, `AT`, `SD`, `ST` - selectors defined but not executed: `MT`, `AP`, `SP`, `J` - notes: diff --git a/src/ffx/file_properties.py b/src/ffx/file_properties.py index 09f676e..1b45a06 100644 --- a/src/ffx/file_properties.py +++ b/src/ffx/file_properties.py @@ -13,6 +13,7 @@ from ffx.model.pattern import Pattern class FileProperties(): FILE_EXTENSIONS = ['mkv', 'mp4', 'avi', 'flv', 'webm'] + FFPROBE_COMMAND_TOKENS = ["ffprobe", "-hide_banner", "-show_format", "-show_streams", "-of", "json"] SE_INDICATOR_PATTERN = '([sS][0-9]+[eE][0-9]+)' SEASON_EPISODE_INDICATOR_MATCH = '[sS]([0-9]+)[eE]([0-9]+)' @@ -78,6 +79,26 @@ class FileProperties(): self.__season = -1 self.__episode = -1 + self.__ffprobeData = None + + + def _getFfprobeData(self): + if self.__ffprobeData is not None: + return self.__ffprobeData + + ffprobeOutput, ffprobeError, returnCode = executeProcess( + FileProperties.FFPROBE_COMMAND_TOKENS + [self.__sourcePath] + ) + + if 'Invalid data found when processing input' in ffprobeError: + raise Exception(f"File {self.__sourcePath} does not contain valid stream data") + + if returnCode != 0: + raise Exception(f"ffprobe returned with error {returnCode}") + + self.__ffprobeData = json.loads(ffprobeOutput) + return self.__ffprobeData + def getFormatData(self): """ @@ -99,22 +120,7 @@ class FileProperties(): } } """ - - # ffprobe -hide_banner -show_format -of json - ffprobeOutput, ffprobeError, returnCode = executeProcess(["ffprobe", - "-hide_banner", - "-show_format", - "-of", "json", - self.__sourcePath]) #, - #context = self.context) - - if 'Invalid data found when processing input' in ffprobeError: - raise Exception(f"File {self.__sourcePath} does not contain valid stream data") - - if returnCode != 0: - raise Exception(f"ffprobe returned with error {returnCode}") - - return json.loads(ffprobeOutput)['format'] + return self._getFfprobeData()['format'] def getStreamData(self): @@ -159,24 +165,7 @@ class FileProperties(): } } """ - - # ffprobe -hide_banner -show_streams -of json - ffprobeOutput, ffprobeError, returnCode = executeProcess(["ffprobe", - "-hide_banner", - "-show_streams", - "-of", "json", - self.__sourcePath]) #, - #context = self.context) - - if 'Invalid data found when processing input' in ffprobeError: - raise Exception(f"File {self.__sourcePath} does not contain valid stream data") - - - if returnCode != 0: - raise Exception(f"ffprobe returned with error {returnCode}") - - - return json.loads(ffprobeOutput)['streams'] + return self._getFfprobeData()['streams'] diff --git a/tests/unit/test_file_properties_probe.py b/tests/unit/test_file_properties_probe.py new file mode 100644 index 0000000..6961d0a --- /dev/null +++ b/tests/unit/test_file_properties_probe.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +import json +import logging +from pathlib import Path +import sys +import unittest +from unittest.mock import patch + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +class StaticConfig: + def getData(self): + return {} + + +class DummyPatternController: + def __init__(self, context): + self.context = context + + def matchFilename(self, filename): + return {} + + +def make_logger(name: str) -> logging.Logger: + logger = logging.getLogger(name) + logger.handlers = [] + logger.setLevel(logging.DEBUG) + logger.propagate = False + logger.addHandler(logging.NullHandler()) + return logger + + +class FilePropertiesProbeTests(unittest.TestCase): + def import_module(self): + try: + import ffx.file_properties as file_properties_module + except ModuleNotFoundError as ex: + if ex.name == "sqlalchemy": + self.skipTest("sqlalchemy is not installed in this environment") + raise + return file_properties_module + + def make_context(self): + return { + "logger": make_logger("ffx-test-file-properties-probe"), + "config": StaticConfig(), + "database": {"session": object()}, + "use_pattern": False, + } + + def sample_probe_data(self): + return { + "format": { + "filename": "/tmp/example_s01e01.mkv", + "nb_streams": 2, + "format_name": "matroska,webm", + }, + "streams": [ + { + "index": 0, + "codec_name": "h264", + "codec_type": "video", + "disposition": {"default": 1}, + "tags": {}, + }, + { + "index": 1, + "codec_name": "aac", + "codec_type": "audio", + "channel_layout": "stereo", + "channels": 2, + "disposition": {"default": 0}, + "tags": {"language": "eng"}, + }, + ], + } + + def test_format_and_stream_accessors_share_one_combined_probe(self): + file_properties_module = self.import_module() + probe_output = self.sample_probe_data() + + with ( + patch.object(file_properties_module, "PatternController", DummyPatternController), + patch.object( + file_properties_module, + "executeProcess", + return_value=(json.dumps(probe_output), "", 0), + ) as mocked_execute, + ): + file_properties = file_properties_module.FileProperties( + self.make_context(), + "/tmp/example_s01e01.mkv", + ) + + self.assertEqual(probe_output["format"], file_properties.getFormatData()) + self.assertEqual(probe_output["streams"], file_properties.getStreamData()) + + mocked_execute.assert_called_once_with( + file_properties_module.FileProperties.FFPROBE_COMMAND_TOKENS + + ["/tmp/example_s01e01.mkv"] + ) + + +if __name__ == "__main__": + unittest.main() From fc729a241436b2009c0c8dcc5cde4a4488d0889e Mon Sep 17 00:00:00 2001 From: Javanaut Date: Sat, 11 Apr 2026 16:04:54 +0200 Subject: [PATCH 10/28] Opt database bootstrapping --- SCRATCHPAD.md | 10 +---- src/ffx/database.py | 18 +++++++- tests/unit/test_database.py | 83 +++++++++++++++++++++++++++++++++++++ 3 files changed, 100 insertions(+), 11 deletions(-) create mode 100644 tests/unit/test_database.py diff --git a/SCRATCHPAD.md b/SCRATCHPAD.md index fa4d84b..a1ba86a 100644 --- a/SCRATCHPAD.md +++ b/SCRATCHPAD.md @@ -12,6 +12,7 @@ - The CLI root now lazy-loads heavy runtime dependencies so lightweight commands such as `version`, `help`, `configure_workstation`, and `upgrade` stay import-light. - Shared CLI defaults for container/output tokens now live outside [`src/ffx/ffx_controller.py`](/home/osgw/.local/src/codex/ffx/src/ffx/ffx_controller.py), and a focused unit test locks in the lazy-import contract. - `FileProperties` now uses one cached `ffprobe -show_format -show_streams -of json` call per source file, and the combined payload was confirmed against the Dragonball asset to satisfy both previous probe call sites fully. +- Database startup now bootstraps schema only when required tables are actually missing, while version enforcement still runs on ordinary DB-backed context creation. - FFX logger setup now reuses named handlers, and fallback logger access no longer mutates handlers in ordinary constructors and helpers. - The process wrapper now uses `subprocess.run(...)` with centralized command formatting plus stable timeout and missing-command error mapping. - Active ORM controllers now use single-query accessors instead of paired `count()` plus `first()` lookups. @@ -103,15 +104,6 @@ - Cleaner runtime output. - Less warning noise during dry-run maintenance commands. -9. Database startup always runs schema creation and version checks -- [`src/ffx/database.py`](/home/osgw/.local/src/codex/ffx/src/ffx/database.py) runs `Base.metadata.create_all(...)` and version checks every time a DB-backed context is created. -- Optimization: - - Measure startup cost and consider separating bootstrapping from ordinary command execution. - - Keep schema migration/version enforcement explicit. -- Expected value: - - Faster command startup. - - Clearer operational boundaries. - ## Open - Should optimization work focus first on operator-perceived latency, internal maintainability, or correctness-risk cleanup that also has performance upside? diff --git a/src/ffx/database.py b/src/ffx/database.py index 10430e3..3d5e551 100644 --- a/src/ffx/database.py +++ b/src/ffx/database.py @@ -1,6 +1,6 @@ import os, click -from sqlalchemy import create_engine +from sqlalchemy import create_engine, inspect from sqlalchemy.orm import sessionmaker # Import the full model package so SQLAlchemy registers every mapped class @@ -14,6 +14,7 @@ from ffx.constants import DATABASE_VERSION DATABASE_VERSION_KEY = 'database_version' +EXPECTED_TABLE_NAMES = set(Base.metadata.tables.keys()) class DatabaseVersionException(Exception): def __init__(self, errorMessage): @@ -37,7 +38,7 @@ def databaseContext(databasePath: str = ''): databaseContext['engine'] = create_engine(databaseContext['url']) databaseContext['session'] = sessionmaker(bind=databaseContext['engine']) - Base.metadata.create_all(databaseContext['engine']) + bootstrapDatabaseIfNeeded(databaseContext) # isSyncronuous = False # while not isSyncronuous: @@ -54,6 +55,19 @@ def databaseContext(databasePath: str = ''): return databaseContext + +def databaseNeedsBootstrap(databaseContext) -> bool: + inspector = inspect(databaseContext['engine']) + existingTableNames = set(inspector.get_table_names()) + return not EXPECTED_TABLE_NAMES.issubset(existingTableNames) + + +def bootstrapDatabaseIfNeeded(databaseContext): + if not databaseNeedsBootstrap(databaseContext): + return + + Base.metadata.create_all(databaseContext['engine']) + def ensureDatabaseVersion(databaseContext): currentDatabaseVersion = getDatabaseVersion(databaseContext) diff --git a/tests/unit/test_database.py b/tests/unit/test_database.py new file mode 100644 index 0000000..27fa2da --- /dev/null +++ b/tests/unit/test_database.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import tempfile +import unittest +from unittest.mock import patch + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.constants import DATABASE_VERSION # noqa: E402 +from ffx.database import DATABASE_VERSION_KEY, databaseContext, getDatabaseVersion # noqa: E402 +from ffx.model.property import Property # noqa: E402 +from ffx.model.show import Base # noqa: E402 + + +class DatabaseContextTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.database_path = Path(self.tempdir.name) / "ffx-test.db" + + def tearDown(self): + self.tempdir.cleanup() + + def test_database_context_bootstraps_new_database_with_current_version(self): + with patch("ffx.database.Base.metadata.create_all", wraps=Base.metadata.create_all) as mocked_create_all: + context = databaseContext(str(self.database_path)) + try: + self.assertTrue(self.database_path.exists()) + self.assertEqual(DATABASE_VERSION, getDatabaseVersion(context)) + finally: + context["engine"].dispose() + + mocked_create_all.assert_called_once() + + def test_database_context_skips_create_all_when_schema_is_already_present(self): + initial_context = databaseContext(str(self.database_path)) + initial_context["engine"].dispose() + + with patch("ffx.database.Base.metadata.create_all") as mocked_create_all: + context = databaseContext(str(self.database_path)) + try: + self.assertEqual(DATABASE_VERSION, getDatabaseVersion(context)) + finally: + context["engine"].dispose() + + mocked_create_all.assert_not_called() + + def test_database_context_restores_missing_version_property_without_schema_bootstrap(self): + context = databaseContext(str(self.database_path)) + Session = context["session"] + try: + session = Session() + try: + version_row = ( + session.query(Property) + .filter(Property.key == DATABASE_VERSION_KEY) + .first() + ) + session.delete(version_row) + session.commit() + finally: + session.close() + finally: + context["engine"].dispose() + + with patch("ffx.database.Base.metadata.create_all") as mocked_create_all: + reopened_context = databaseContext(str(self.database_path)) + try: + self.assertEqual(DATABASE_VERSION, getDatabaseVersion(reopened_context)) + finally: + reopened_context["engine"].dispose() + + mocked_create_all.assert_not_called() + + +if __name__ == "__main__": + unittest.main() From 358ef18f7768afb4bcfa2137fd853e5812ee346a Mon Sep 17 00:00:00 2001 From: Javanaut Date: Sat, 11 Apr 2026 16:10:41 +0200 Subject: [PATCH 11/28] Fix regex issues --- SCRATCHPAD.md | 10 +----- src/ffx/helper.py | 66 ++++++++++++++++++++++----------------- tests/unit/test_helper.py | 61 ++++++++++++++++++++++++++++++++++++ 3 files changed, 100 insertions(+), 37 deletions(-) create mode 100644 tests/unit/test_helper.py diff --git a/SCRATCHPAD.md b/SCRATCHPAD.md index a1ba86a..28da596 100644 --- a/SCRATCHPAD.md +++ b/SCRATCHPAD.md @@ -13,6 +13,7 @@ - Shared CLI defaults for container/output tokens now live outside [`src/ffx/ffx_controller.py`](/home/osgw/.local/src/codex/ffx/src/ffx/ffx_controller.py), and a focused unit test locks in the lazy-import contract. - `FileProperties` now uses one cached `ffprobe -show_format -show_streams -of json` call per source file, and the combined payload was confirmed against the Dragonball asset to satisfy both previous probe call sites fully. - Database startup now bootstraps schema only when required tables are actually missing, while version enforcement still runs on ordinary DB-backed context creation. +- Helper filename and rich-text utilities now use compiled raw regexes plus translate-based filename filtering, with unit coverage for TMDB suffix rewriting and Rich color stripping. - FFX logger setup now reuses named handlers, and fallback logger access no longer mutates handlers in ordinary constructors and helpers. - The process wrapper now uses `subprocess.run(...)` with centralized command formatting plus stable timeout and missing-command error mapping. - Active ORM controllers now use single-query accessors instead of paired `count()` plus `first()` lookups. @@ -95,15 +96,6 @@ - Fewer surprises in production-like runs. - Easier support for user-reported performance behavior. -8. Regex and string utility cleanup -- [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) still has repeated string-replacement churn in filename/TMDB normalization helpers, and regex handling in helpers is easy to regress quietly. -- Optimization: - - Keep regex literals raw and centralized where appropriate. - - Review filename and TMDB substitution helpers for repeated string churn. -- Expected value: - - Cleaner runtime output. - - Less warning noise during dry-run maintenance commands. - ## Open - Should optimization work focus first on operator-perceived latency, internal maintainability, or correctness-risk cleanup that also has performance upside? diff --git a/src/ffx/helper.py b/src/ffx/helper.py index cbb8e46..742dbc1 100644 --- a/src/ffx/helper.py +++ b/src/ffx/helper.py @@ -16,7 +16,21 @@ DIFF_REMOVED_KEY = 'removed' DIFF_CHANGED_KEY = 'changed' DIFF_UNCHANGED_KEY = 'unchanged' -RICH_COLOR_PATTERN = '\\[[a-z_]+\\](.+)\\[\\/[a-z_]+\\]' +FILENAME_FILTER_TRANSLATION = str.maketrans( + { + "/": "-", + ":": ";", + "*": "", + "'": "", + "?": "#", + "♥": "", + "’": "", + } +) +TMDB_FILLER_MARKERS = (" (*)", "(*)") +TMDB_EPISODE_RANGE_SUFFIX_REGEX = re.compile(r"\(([0-9]+)[-/]([0-9]+)\)$") +TMDB_EPISODE_PART_SUFFIX_REGEX = re.compile(r"\(([0-9]+)\)$") +RICH_COLOR_REGEX = re.compile(r"\[[a-z_]+\](.+)\[/[a-z_]+\]") def dictDiff(a : dict, b : dict, ignoreKeys: list = [], removeKeys: list = []): @@ -115,39 +129,35 @@ def filterFilename(fileName: str) -> str: """This filter replaces charactes from TMDB responses with characters less problemating when using in filenames or removes them""" - fileName = str(fileName).replace('/', '-') - fileName = str(fileName).replace(':', ';') - fileName = str(fileName).replace('*', '') - fileName = str(fileName).replace("'", '') - fileName = str(fileName).replace("?", '#') - fileName = str(fileName).replace('♥', '') - fileName = str(fileName).replace('’', '') - - return fileName.strip() + return str(fileName).translate(FILENAME_FILTER_TRANSLATION).strip() def substituteTmdbFilename(fileName: str) -> str: """If chaining this method with filterFilename use this one first as the latter will destroy some patterns""" - # This indicates filler episodes in TMDB episode names - fileName = str(fileName).replace(' (*)', '') - fileName = str(fileName).replace('(*)', '') + normalizedFileName = str(fileName) - # This indicates the index of multi-episode files - episodePartMatch = re.search("\\(([0-9]+)\\)$", fileName) + for fillerMarker in TMDB_FILLER_MARKERS: + normalizedFileName = normalizedFileName.replace(fillerMarker, '') + + episodeRangeMatch = TMDB_EPISODE_RANGE_SUFFIX_REGEX.search(normalizedFileName) + if episodeRangeMatch is not None: + partFirstIndex, partLastIndex = episodeRangeMatch.groups() + return TMDB_EPISODE_RANGE_SUFFIX_REGEX.sub( + f"Teil {partFirstIndex}-{partLastIndex}", + normalizedFileName, + count=1, + ) + + episodePartMatch = TMDB_EPISODE_PART_SUFFIX_REGEX.search(normalizedFileName) if episodePartMatch is not None: - partSuffix = str(episodePartMatch.group(0)) - partIndex = episodePartMatch.groups()[0] - fileName = str(fileName).replace(partSuffix, f"Teil {partIndex}") + partIndex = episodePartMatch.group(1) + return TMDB_EPISODE_PART_SUFFIX_REGEX.sub( + f"Teil {partIndex}", + normalizedFileName, + count=1, + ) - # Also multi-episodes with first and last episode index - episodePartMatch = re.search("\\(([0-9]+)[-\\/]([0-9]+)\\)$", fileName) - if episodePartMatch is not None: - partSuffix = str(episodePartMatch.group(0)) - partFirstIndex = episodePartMatch.groups()[0] - partLastIndex = episodePartMatch.groups()[1] - fileName = str(fileName).replace(partSuffix, f"Teil {partFirstIndex}-{partLastIndex}") - - return fileName + return normalizedFileName def getEpisodeFileBasename(showName, @@ -231,7 +241,7 @@ def formatRichColor(text: str, color: str = None): return f"[{color}]{text}[/{color}]" def removeRichColor(text: str): - richColorMatch = re.search(RICH_COLOR_PATTERN, text) + richColorMatch = RICH_COLOR_REGEX.search(str(text)) if richColorMatch is None: return text else: diff --git a/tests/unit/test_helper.py b/tests/unit/test_helper.py new file mode 100644 index 0000000..450877d --- /dev/null +++ b/tests/unit/test_helper.py @@ -0,0 +1,61 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import unittest + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.helper import ( # noqa: E402 + filterFilename, + formatRichColor, + removeRichColor, + substituteTmdbFilename, +) + + +class HelperTests(unittest.TestCase): + def test_filter_filename_replaces_and_removes_problem_characters(self): + self.assertEqual( + "A-B;C#", + filterFilename(" A/B:C*'?♥’ "), + ) + + def test_substitute_tmdb_filename_removes_filler_marker(self): + self.assertEqual( + "Episode Name", + substituteTmdbFilename("Episode Name (*)"), + ) + + def test_substitute_tmdb_filename_rewrites_single_episode_suffix(self): + self.assertEqual( + "Episode Name Teil 2", + substituteTmdbFilename("Episode Name (2)"), + ) + + def test_substitute_tmdb_filename_rewrites_episode_range_suffix(self): + self.assertEqual( + "Episode Name Teil 2-3", + substituteTmdbFilename("Episode Name (2/3)"), + ) + + def test_remove_rich_color_returns_inner_text(self): + self.assertEqual( + "value", + removeRichColor(formatRichColor("value", "green")), + ) + + def test_remove_rich_color_leaves_plain_text_unchanged(self): + self.assertEqual( + "plain text", + removeRichColor("plain text"), + ) + + +if __name__ == "__main__": + unittest.main() From 52c6462fa8a6cfa0785d0ae7e5be5bfa82ad843a Mon Sep 17 00:00:00 2001 From: Javanaut Date: Sat, 11 Apr 2026 16:21:17 +0200 Subject: [PATCH 12/28] Optimizes niceness and cpulimit usage --- SCRATCHPAD.md | 10 +---- requirements/architecture.md | 2 +- requirements/project.md | 4 ++ src/ffx/cli.py | 73 +++++++++++++++++++++++++++++++--- src/ffx/process.py | 65 +++++++++++++++++++++++++----- tests/unit/test_cli_upgrade.py | 1 + tests/unit/test_process.py | 36 +++++++++++++++++ 7 files changed, 164 insertions(+), 27 deletions(-) diff --git a/SCRATCHPAD.md b/SCRATCHPAD.md index 28da596..1315a27 100644 --- a/SCRATCHPAD.md +++ b/SCRATCHPAD.md @@ -14,6 +14,7 @@ - `FileProperties` now uses one cached `ffprobe -show_format -show_streams -of json` call per source file, and the combined payload was confirmed against the Dragonball asset to satisfy both previous probe call sites fully. - Database startup now bootstraps schema only when required tables are actually missing, while version enforcement still runs on ordinary DB-backed context creation. - Helper filename and rich-text utilities now use compiled raw regexes plus translate-based filename filtering, with unit coverage for TMDB suffix rewriting and Rich color stripping. +- Process resource limiting now has explicit disabled/default states in the CLI and requirements, and combined CPU-plus-niceness wrapping now executes as `cpulimit -- nice -n ... ` instead of a less explicit prefix chain. - FFX logger setup now reuses named handlers, and fallback logger access no longer mutates handlers in ordinary constructors and helpers. - The process wrapper now uses `subprocess.run(...)` with centralized command formatting plus stable timeout and missing-command error mapping. - Active ORM controllers now use single-query accessors instead of paired `count()` plus `first()` lookups. @@ -87,15 +88,6 @@ - Faster contributor onboarding. - Easier CI adoption later. -7. Process resource limiting semantics could be clearer -- [`src/ffx/process.py`](/home/osgw/.local/src/codex/ffx/src/ffx/process.py) prepends `nice` and `cpulimit` directly when values are set. -- Optimization: - - Validate and document effective behavior for combined `nice` + `cpulimit`. - - Consider explicit no-limit vs configured-limit states in the CLI and requirements. -- Expected value: - - Fewer surprises in production-like runs. - - Easier support for user-reported performance behavior. - ## Open - Should optimization work focus first on operator-perceived latency, internal maintainability, or correctness-risk cleanup that also has performance upside? diff --git a/requirements/architecture.md b/requirements/architecture.md index 4e7f8e9..e5c86b4 100644 --- a/requirements/architecture.md +++ b/requirements/architecture.md @@ -42,7 +42,7 @@ - SQLite via SQLAlchemy ORM, with schema rooted in shows, patterns, tracks, media tags, track tags, shifted seasons, and generic properties. - A configuration JSON file supplies optional path, metadata-filtering, and filename-template settings. - Integration adapters: - - Process execution wrapper for `ffmpeg`, `ffprobe`, `nice`, and `cpulimit`. + - Process execution wrapper for `ffmpeg`, `ffprobe`, `nice`, and `cpulimit`, with explicit disabled states for niceness and CPU limiting and a combined `cpulimit -- nice -n ... ` execution shape when both limits are configured. - HTTP adapter for TMDB via `requests`. ## Data And Interface Notes diff --git a/requirements/project.md b/requirements/project.md index 2e8130c..2c38517 100644 --- a/requirements/project.md +++ b/requirements/project.md @@ -62,6 +62,10 @@ - The system shall support optional TMDB lookups to resolve show names, years, and episode titles when a show ID, season, and episode are available. - The system shall generate output filenames from show metadata, season and episode indices, and episode names using the configured filename template. - The system shall allow CLI overrides for stream languages, stream titles, default and forced tracks, stream order, TMDB show and episode data, output directory, label prefix, and processing resource limits. +- Processing resource limit rules: + - `--nice` shall accept niceness values from `-20` through `19`; omitting the option shall disable niceness adjustment. + - `--cpu` shall accept CPU limit values from `1` through `99`; omitting the option shall disable CPU limiting. + - When both limits are configured, the process wrapper shall execute the target command through `cpulimit` around a `nice -n ...` invocation so both limits apply to the launched media command. - The system shall support extracting streams into separate files via `unmux` and reporting suggested crop parameters via `cropdetect`. - The system shall handle invalid input and system failures gracefully by logging warnings or raising `click` errors for missing files, invalid media, missing TMDB credentials, incompatible database versions, and ambiguous track dispositions when prompting is disabled. diff --git a/src/ffx/cli.py b/src/ffx/cli.py index 189c198..4854297 100755 --- a/src/ffx/cli.py +++ b/src/ffx/cli.py @@ -32,6 +32,24 @@ if TYPE_CHECKING: LIGHTWEIGHT_COMMANDS = {None, 'version', 'help', 'configure_workstation', 'upgrade'} +def normalizeNicenessOption(ctx, param, value): + from ffx.process import normalizeNiceness + + try: + return normalizeNiceness(value) + except ValueError as ex: + raise click.BadParameter(str(ex)) from ex + + +def normalizeCpuOption(ctx, param, value): + from ffx.process import normalizeCpuPercent + + try: + return normalizeCpuPercent(value) + except ValueError as ex: + raise click.BadParameter(str(ex)) from ex + + @click.group() @click.pass_context @@ -194,6 +212,7 @@ def upgrade(ctx, branch): commandSequences += [ ['git', 'pull'], + [bundlePipPath, 'install', '--upgrade', 'pip', 'setuptools', 'wheel'], [bundlePipPath, 'install', '--editable', '.'], ] @@ -257,8 +276,22 @@ def getUnmuxSequence(trackDescriptor: TrackDescriptor, sourcePath, targetPrefix, @click.option('-l', '--label', type=str, default='', help='Label to be used as filename prefix') @click.option("-o", "--output-directory", type=str, default='') @click.option("-s", "--subtitles-only", is_flag=True, default=False) -@click.option('--nice', type=int, default=99, help='Niceness of started processes') -@click.option('--cpu', type=int, default=0, help='Limit CPU for started processes to percent') +@click.option( + '--nice', + type=int, + default=None, + callback=normalizeNicenessOption, + show_default='disabled', + help='Adjust niceness of started processes (-20..19). Omit to disable; 99 also disables.', +) +@click.option( + '--cpu', + type=int, + default=None, + callback=normalizeCpuOption, + show_default='disabled', + help='Limit CPU percent of started processes (1..99). Omit to disable; 0 also disables.', +) def unmux(ctx, paths, label, @@ -334,8 +367,22 @@ def unmux(ctx, @click.pass_context @click.argument('paths', nargs=-1) -@click.option('--nice', type=int, default=99, help='Niceness of started processes') -@click.option('--cpu', type=int, default=0, help='Limit CPU for started processes to percent') +@click.option( + '--nice', + type=int, + default=None, + callback=normalizeNicenessOption, + show_default='disabled', + help='Adjust niceness of started processes (-20..19). Omit to disable; 99 also disables.', +) +@click.option( + '--cpu', + type=int, + default=None, + callback=normalizeCpuOption, + show_default='disabled', + help='Limit CPU percent of started processes (1..99). Omit to disable; 0 also disables.', +) def cropdetect(ctx, paths, nice, @@ -479,8 +526,22 @@ def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): @click.option("--no-signature", is_flag=True, default=False) @click.option("--keep-mkvmerge-metadata", is_flag=True, default=False) -@click.option('--nice', type=int, default=99, help='Niceness of started processes') -@click.option('--cpu', type=int, default=0, help='Limit CPU for started processes to percent') +@click.option( + '--nice', + type=int, + default=None, + callback=normalizeNicenessOption, + show_default='disabled', + help='Adjust niceness of started processes (-20..19). Omit to disable; 99 also disables.', +) +@click.option( + '--cpu', + type=int, + default=None, + callback=normalizeCpuOption, + show_default='disabled', + help='Limit CPU percent of started processes (1..99). Omit to disable; 0 also disables.', +) @click.option('--rename-only', is_flag=True, default=False, help='Only renaming, no recoding') diff --git a/src/ffx/process.py b/src/ffx/process.py index 7db5492..c186f9c 100644 --- a/src/ffx/process.py +++ b/src/ffx/process.py @@ -6,29 +6,72 @@ from .logging_utils import get_ffx_logger COMMAND_TIMED_OUT_RETURN_CODE = 124 COMMAND_NOT_FOUND_RETURN_CODE = 127 +MIN_NICENESS = -20 +MAX_NICENESS = 19 +DISABLED_NICENESS_SENTINEL = 99 +MIN_CPU_PERCENT = 1 +MAX_CPU_PERCENT = 99 +DISABLED_CPU_PERCENT_SENTINEL = 0 def formatCommandSequence(commandSequence: Iterable[str]) -> str: return shlex.join([str(token) for token in commandSequence]) +def normalizeNiceness(niceness) -> int | None: + if niceness is None: + return None + + niceness = int(niceness) + if niceness == DISABLED_NICENESS_SENTINEL: + return None + + if niceness < MIN_NICENESS or niceness > MAX_NICENESS: + raise ValueError( + f"Niceness must be between {MIN_NICENESS} and {MAX_NICENESS}, " + + f"or {DISABLED_NICENESS_SENTINEL} to disable." + ) + + return niceness + + +def normalizeCpuPercent(cpuPercent) -> int | None: + if cpuPercent is None: + return None + + cpuPercent = int(cpuPercent) + if cpuPercent == DISABLED_CPU_PERCENT_SENTINEL: + return None + + if cpuPercent < MIN_CPU_PERCENT or cpuPercent > MAX_CPU_PERCENT: + raise ValueError( + f"CPU limit must be between {MIN_CPU_PERCENT} and {MAX_CPU_PERCENT}, " + + f"or {DISABLED_CPU_PERCENT_SENTINEL} to disable." + ) + + return cpuPercent + + def getWrappedCommandSequence(commandSequence: List[str], context: dict = None) -> List[str]: """ - niceness -20 bis +19 - cpu_percent: 1 bis 99 + niceness: -20 to 19, disabled when unset + cpu_percent: 1 to 99, disabled when unset + + When both limits are configured, cpulimit wraps a nice-adjusted command: + cpulimit -l -- nice -n """ - niceSequence = [] + resourceLimits = (context or {}).get('resource_limits', {}) + niceness = normalizeNiceness(resourceLimits.get('niceness')) + cpu_percent = normalizeCpuPercent(resourceLimits.get('cpu_percent')) + wrappedCommandSequence = [str(token) for token in commandSequence] - niceness = int((context or {}).get('resource_limits', {}).get('niceness', 99)) - cpu_percent = int((context or {}).get('resource_limits', {}).get('cpu_percent', 0)) + if niceness is not None: + wrappedCommandSequence = ['nice', '-n', str(niceness)] + wrappedCommandSequence + if cpu_percent is not None: + wrappedCommandSequence = ['cpulimit', '-l', str(cpu_percent), '--'] + wrappedCommandSequence - if niceness >= -20 and niceness <= 19: - niceSequence += ['nice', '-n', str(niceness)] - if cpu_percent >= 1: - niceSequence += ['cpulimit', '-l', str(cpu_percent), '--'] - - return niceSequence + [str(token) for token in commandSequence] + return wrappedCommandSequence def getProcessTimeoutSeconds(context: dict = None, timeoutSeconds: float = None): diff --git a/tests/unit/test_cli_upgrade.py b/tests/unit/test_cli_upgrade.py index 90211ba..d392f27 100644 --- a/tests/unit/test_cli_upgrade.py +++ b/tests/unit/test_cli_upgrade.py @@ -87,6 +87,7 @@ class UpgradeCommandTests(unittest.TestCase): ['git', 'reset', '--hard', 'HEAD'], ['git', 'checkout', 'main'], ['git', 'pull'], + [pip_path, 'install', '--upgrade', 'pip', 'setuptools', 'wheel'], [pip_path, 'install', '--editable', '.'], ], [call[0] for call in subprocess_calls], diff --git a/tests/unit/test_process.py b/tests/unit/test_process.py index a379444..62ea007 100644 --- a/tests/unit/test_process.py +++ b/tests/unit/test_process.py @@ -15,6 +15,9 @@ from ffx.process import ( # noqa: E402 COMMAND_NOT_FOUND_RETURN_CODE, COMMAND_TIMED_OUT_RETURN_CODE, executeProcess, + getWrappedCommandSequence, + normalizeCpuPercent, + normalizeNiceness, ) @@ -47,6 +50,39 @@ class ProcessTests(unittest.TestCase): self.assertIn("Command timed out", err) self.assertIn(sys.executable, err) + def test_get_wrapped_command_sequence_leaves_command_unwrapped_when_limits_disabled(self): + wrapped = getWrappedCommandSequence( + ["ffmpeg", "-i", "input.mkv"], + context={"resource_limits": {"niceness": None, "cpu_percent": None}}, + ) + + self.assertEqual(["ffmpeg", "-i", "input.mkv"], wrapped) + + def test_get_wrapped_command_sequence_wraps_nice_when_configured(self): + wrapped = getWrappedCommandSequence( + ["ffmpeg", "-i", "input.mkv"], + context={"resource_limits": {"niceness": 5, "cpu_percent": None}}, + ) + + self.assertEqual(["nice", "-n", "5", "ffmpeg", "-i", "input.mkv"], wrapped) + + def test_get_wrapped_command_sequence_wraps_cpulimit_around_nice_when_both_configured(self): + wrapped = getWrappedCommandSequence( + ["ffmpeg", "-i", "input.mkv"], + context={"resource_limits": {"niceness": 5, "cpu_percent": 42}}, + ) + + self.assertEqual( + ["cpulimit", "-l", "42", "--", "nice", "-n", "5", "ffmpeg", "-i", "input.mkv"], + wrapped, + ) + + def test_normalize_niceness_accepts_disabled_sentinel(self): + self.assertIsNone(normalizeNiceness(99)) + + def test_normalize_cpu_percent_accepts_disabled_sentinel(self): + self.assertIsNone(normalizeCpuPercent(0)) + if __name__ == "__main__": unittest.main() From 609f93b78380dc9ce4e7ddfbde8d7b36c73e3e4d Mon Sep 17 00:00:00 2001 From: Javanaut Date: Sat, 11 Apr 2026 16:30:41 +0200 Subject: [PATCH 13/28] Fix cpu percentage interpretations --- requirements/architecture.md | 2 +- requirements/project.md | 2 +- src/ffx/cli.py | 20 +++++++++----- src/ffx/process.py | 41 ++++++++++++++++++++++++----- tests/unit/test_cli_lazy_imports.py | 32 ++++++++++++++++++++++ tests/unit/test_process.py | 12 +++++++-- 6 files changed, 92 insertions(+), 17 deletions(-) diff --git a/requirements/architecture.md b/requirements/architecture.md index e5c86b4..1f0cd17 100644 --- a/requirements/architecture.md +++ b/requirements/architecture.md @@ -42,7 +42,7 @@ - SQLite via SQLAlchemy ORM, with schema rooted in shows, patterns, tracks, media tags, track tags, shifted seasons, and generic properties. - A configuration JSON file supplies optional path, metadata-filtering, and filename-template settings. - Integration adapters: - - Process execution wrapper for `ffmpeg`, `ffprobe`, `nice`, and `cpulimit`, with explicit disabled states for niceness and CPU limiting and a combined `cpulimit -- nice -n ... ` execution shape when both limits are configured. + - Process execution wrapper for `ffmpeg`, `ffprobe`, `nice`, and `cpulimit`, with explicit disabled states for niceness and CPU limiting, support for both absolute `cpulimit` values and machine-wide percent input, and a combined `cpulimit -- nice -n ... ` execution shape when both limits are configured. - HTTP adapter for TMDB via `requests`. ## Data And Interface Notes diff --git a/requirements/project.md b/requirements/project.md index 2c38517..9edf200 100644 --- a/requirements/project.md +++ b/requirements/project.md @@ -64,7 +64,7 @@ - The system shall allow CLI overrides for stream languages, stream titles, default and forced tracks, stream order, TMDB show and episode data, output directory, label prefix, and processing resource limits. - Processing resource limit rules: - `--nice` shall accept niceness values from `-20` through `19`; omitting the option shall disable niceness adjustment. - - `--cpu` shall accept CPU limit values from `1` through `99`; omitting the option shall disable CPU limiting. + - `--cpu` shall accept either a positive absolute `cpulimit` value such as `200`, or a percentage suffixed with `%` such as `25%` to represent a share of present CPUs; omitting the option or using `0` shall disable CPU limiting. - When both limits are configured, the process wrapper shall execute the target command through `cpulimit` around a `nice -n ...` invocation so both limits apply to the launched media command. - The system shall support extracting streams into separate files via `unmux` and reporting suggested crop parameters via `cropdetect`. - The system shall handle invalid input and system failures gracefully by logging warnings or raising `click` errors for missing files, invalid media, missing TMDB credentials, incompatible database versions, and ambiguous track dispositions when prompting is disabled. diff --git a/src/ffx/cli.py b/src/ffx/cli.py index 4854297..50d6e25 100755 --- a/src/ffx/cli.py +++ b/src/ffx/cli.py @@ -30,6 +30,11 @@ if TYPE_CHECKING: from ffx.track_descriptor import TrackDescriptor LIGHTWEIGHT_COMMANDS = {None, 'version', 'help', 'configure_workstation', 'upgrade'} +CPU_OPTION_HELP = ( + "Limit CPU for started processes. Use an absolute cpulimit value such as 200 " + + "(about 2 cores), or use a percentage such as 25% for a share of present cores. " + + "Omit to disable; 0 also disables." +) def normalizeNicenessOption(ctx, param, value): @@ -286,11 +291,11 @@ def getUnmuxSequence(trackDescriptor: TrackDescriptor, sourcePath, targetPrefix, ) @click.option( '--cpu', - type=int, + type=str, default=None, callback=normalizeCpuOption, show_default='disabled', - help='Limit CPU percent of started processes (1..99). Omit to disable; 0 also disables.', + help=CPU_OPTION_HELP, ) def unmux(ctx, paths, @@ -309,6 +314,7 @@ def unmux(ctx, ctx.obj['resource_limits'] = {} ctx.obj['resource_limits']['niceness'] = nice + ctx.obj['resource_limits']['cpu_limit'] = cpu ctx.obj['resource_limits']['cpu_percent'] = cpu for sourcePath in existingSourcePaths: @@ -377,11 +383,11 @@ def unmux(ctx, ) @click.option( '--cpu', - type=int, + type=str, default=None, callback=normalizeCpuOption, show_default='disabled', - help='Limit CPU percent of started processes (1..99). Omit to disable; 0 also disables.', + help=CPU_OPTION_HELP, ) def cropdetect(ctx, paths, @@ -394,6 +400,7 @@ def cropdetect(ctx, ctx.obj['resource_limits'] = {} ctx.obj['resource_limits']['niceness'] = nice + ctx.obj['resource_limits']['cpu_limit'] = cpu ctx.obj['resource_limits']['cpu_percent'] = cpu for sourcePath in existingSourcePaths: @@ -536,11 +543,11 @@ def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): ) @click.option( '--cpu', - type=int, + type=str, default=None, callback=normalizeCpuOption, show_default='disabled', - help='Limit CPU percent of started processes (1..99). Omit to disable; 0 also disables.', + help=CPU_OPTION_HELP, ) @click.option('--rename-only', is_flag=True, default=False, help='Only renaming, no recoding') @@ -643,6 +650,7 @@ def convert(ctx, context['resource_limits'] = {} context['resource_limits']['niceness'] = nice + context['resource_limits']['cpu_limit'] = cpu context['resource_limits']['cpu_percent'] = cpu diff --git a/src/ffx/process.py b/src/ffx/process.py index c186f9c..429961c 100644 --- a/src/ffx/process.py +++ b/src/ffx/process.py @@ -1,3 +1,4 @@ +import os import shlex import subprocess from typing import Iterable, List @@ -9,9 +10,9 @@ COMMAND_NOT_FOUND_RETURN_CODE = 127 MIN_NICENESS = -20 MAX_NICENESS = 19 DISABLED_NICENESS_SENTINEL = 99 -MIN_CPU_PERCENT = 1 -MAX_CPU_PERCENT = 99 DISABLED_CPU_PERCENT_SENTINEL = 0 +MIN_CPU_PERCENT = 1 +MAX_CPU_PERCENT = 100 def formatCommandSequence(commandSequence: Iterable[str]) -> str: @@ -35,18 +36,42 @@ def normalizeNiceness(niceness) -> int | None: return niceness +def getPresentCpuCount() -> int: + if hasattr(os, 'sched_getaffinity'): + affinity = os.sched_getaffinity(0) + if affinity: + return len(affinity) + + cpuCount = os.cpu_count() + return cpuCount if cpuCount and cpuCount > 0 else 1 + + def normalizeCpuPercent(cpuPercent) -> int | None: if cpuPercent is None: return None + cpuPercent = str(cpuPercent).strip() + if cpuPercent.endswith('%'): + percentValue = int(cpuPercent[:-1].strip()) + if percentValue == DISABLED_CPU_PERCENT_SENTINEL: + return None + + if percentValue < MIN_CPU_PERCENT or percentValue > MAX_CPU_PERCENT: + raise ValueError( + f"CPU percentage must be between {MIN_CPU_PERCENT}% and {MAX_CPU_PERCENT}%, " + + f"or {DISABLED_CPU_PERCENT_SENTINEL} to disable." + ) + + return percentValue * getPresentCpuCount() + cpuPercent = int(cpuPercent) if cpuPercent == DISABLED_CPU_PERCENT_SENTINEL: return None - if cpuPercent < MIN_CPU_PERCENT or cpuPercent > MAX_CPU_PERCENT: + if cpuPercent < MIN_CPU_PERCENT: raise ValueError( - f"CPU limit must be between {MIN_CPU_PERCENT} and {MAX_CPU_PERCENT}, " - + f"or {DISABLED_CPU_PERCENT_SENTINEL} to disable." + "CPU limit must be a positive absolute value such as 200, " + + f"a percentage such as 25%, or {DISABLED_CPU_PERCENT_SENTINEL} to disable." ) return cpuPercent @@ -55,7 +80,7 @@ def normalizeCpuPercent(cpuPercent) -> int | None: def getWrappedCommandSequence(commandSequence: List[str], context: dict = None) -> List[str]: """ niceness: -20 to 19, disabled when unset - cpu_percent: 1 to 99, disabled when unset + cpu limit: positive absolute cpulimit value, or a machine-wide percentage When both limits are configured, cpulimit wraps a nice-adjusted command: cpulimit -l -- nice -n @@ -63,7 +88,9 @@ def getWrappedCommandSequence(commandSequence: List[str], context: dict = None) resourceLimits = (context or {}).get('resource_limits', {}) niceness = normalizeNiceness(resourceLimits.get('niceness')) - cpu_percent = normalizeCpuPercent(resourceLimits.get('cpu_percent')) + cpu_percent = normalizeCpuPercent( + resourceLimits.get('cpu_limit', resourceLimits.get('cpu_percent')) + ) wrappedCommandSequence = [str(token) for token in commandSequence] if niceness is not None: diff --git a/tests/unit/test_cli_lazy_imports.py b/tests/unit/test_cli_lazy_imports.py index 3707219..36b6553 100644 --- a/tests/unit/test_cli_lazy_imports.py +++ b/tests/unit/test_cli_lazy_imports.py @@ -99,6 +99,38 @@ class CliLazyImportTests(unittest.TestCase): result["modules"], ) + def test_convert_help_describes_absolute_and_percent_cpu_limits(self): + result = self.run_python( + textwrap.dedent( + f""" + import click + import json + import sys + + sys.path.insert(0, {str(SRC_ROOT)!r}) + + import ffx.cli + + help_output = ffx.cli.convert.get_help(click.Context(ffx.cli.convert)) + + print(json.dumps({{ + "output": help_output, + "modules": {{ + module_name: module_name in sys.modules + for module_name in {HEAVY_MODULES!r} + }}, + }})) + """ + ) + ) + + self.assertIn("200", result["output"]) + self.assertIn("25%", result["output"]) + self.assertTrue( + all(not is_loaded for is_loaded in result["modules"].values()), + result["modules"], + ) + if __name__ == "__main__": unittest.main() diff --git a/tests/unit/test_process.py b/tests/unit/test_process.py index 62ea007..05ef254 100644 --- a/tests/unit/test_process.py +++ b/tests/unit/test_process.py @@ -3,6 +3,7 @@ from __future__ import annotations from pathlib import Path import sys import unittest +from unittest.mock import patch SRC_ROOT = Path(__file__).resolve().parents[2] / "src" @@ -69,11 +70,11 @@ class ProcessTests(unittest.TestCase): def test_get_wrapped_command_sequence_wraps_cpulimit_around_nice_when_both_configured(self): wrapped = getWrappedCommandSequence( ["ffmpeg", "-i", "input.mkv"], - context={"resource_limits": {"niceness": 5, "cpu_percent": 42}}, + context={"resource_limits": {"niceness": 5, "cpu_limit": 200}}, ) self.assertEqual( - ["cpulimit", "-l", "42", "--", "nice", "-n", "5", "ffmpeg", "-i", "input.mkv"], + ["cpulimit", "-l", "200", "--", "nice", "-n", "5", "ffmpeg", "-i", "input.mkv"], wrapped, ) @@ -83,6 +84,13 @@ class ProcessTests(unittest.TestCase): def test_normalize_cpu_percent_accepts_disabled_sentinel(self): self.assertIsNone(normalizeCpuPercent(0)) + def test_normalize_cpu_percent_accepts_absolute_cpulimit_values(self): + self.assertEqual(200, normalizeCpuPercent(200)) + + def test_normalize_cpu_percent_converts_percent_of_present_cores(self): + with patch("ffx.process.getPresentCpuCount", return_value=8): + self.assertEqual(200, normalizeCpuPercent("25%")) + if __name__ == "__main__": unittest.main() From 961193094959921642ebd152c76c3c88a05be89d Mon Sep 17 00:00:00 2001 From: Javanaut Date: Sat, 11 Apr 2026 16:52:58 +0200 Subject: [PATCH 14/28] Misc Opts --- SCRATCHPAD.md | 48 +++------ requirements/architecture.md | 3 +- requirements/project.md | 11 +- src/ffx/cli.py | 122 +++++++++++++++++++---- src/ffx/constants.py | 3 + src/ffx/file_properties.py | 77 +++++++++++--- src/ffx/media_details_screen.py | 42 +++----- src/ffx/pattern_controller.py | 23 +++++ src/ffx/pattern_details_screen.py | 60 +++-------- src/ffx/screen_support.py | 65 ++++++++++++ src/ffx/show_details_screen.py | 51 ++++------ tests/unit/test_cli_lazy_imports.py | 37 +++++++ tests/unit/test_file_properties_probe.py | 64 ++++++++++++ tests/unit/test_screen_support.py | 86 ++++++++++++++++ tools/configure_workstation.sh | 1 + tools/setup.sh | 1 + 16 files changed, 516 insertions(+), 178 deletions(-) create mode 100644 src/ffx/screen_support.py create mode 100644 tests/unit/test_screen_support.py diff --git a/SCRATCHPAD.md b/SCRATCHPAD.md index 1315a27..368f60f 100644 --- a/SCRATCHPAD.md +++ b/SCRATCHPAD.md @@ -9,9 +9,10 @@ - The biggest near-term wins are in startup cost, repeated subprocess work, repeated database query patterns, and general repo hygiene. - This list is intentionally optimization-oriented rather than bug-oriented. Some items below also improve correctness or maintainability, but they were selected because they can reduce runtime cost, operator friction, or iteration overhead. - A first modern integration slice now exists under [`tests/integration/subtrack_mapping`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping). Remaining test-suite cleanup is now mostly about migrating and shrinking the legacy harness surface under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy). -- The CLI root now lazy-loads heavy runtime dependencies so lightweight commands such as `version`, `help`, `configure_workstation`, and `upgrade` stay import-light. +- The CLI root now lazy-loads heavy runtime dependencies so lightweight commands such as `version`, `help`, `setup`, `configure_workstation`, and `upgrade` stay import-light. - Shared CLI defaults for container/output tokens now live outside [`src/ffx/ffx_controller.py`](/home/osgw/.local/src/codex/ffx/src/ffx/ffx_controller.py), and a focused unit test locks in the lazy-import contract. - `FileProperties` now uses one cached `ffprobe -show_format -show_streams -of json` call per source file, and the combined payload was confirmed against the Dragonball asset to satisfy both previous probe call sites fully. +- Crop detection now uses configurable sampling windows plus per-process caching keyed by source file and sampling range, and the `cropdetect` CLI command now calls the real `FileProperties.findCropArguments()` path. - Database startup now bootstraps schema only when required tables are actually missing, while version enforcement still runs on ordinary DB-backed context creation. - Helper filename and rich-text utilities now use compiled raw regexes plus translate-based filename filtering, with unit coverage for TMDB suffix rewriting and Rich color stripping. - Process resource limiting now has explicit disabled/default states in the CLI and requirements, and combined CPU-plus-niceness wrapping now executes as `cpulimit -- nice -n ... ` instead of a less explicit prefix chain. @@ -19,37 +20,21 @@ - The process wrapper now uses `subprocess.run(...)` with centralized command formatting plus stable timeout and missing-command error mapping. - Active ORM controllers now use single-query accessors instead of paired `count()` plus `first()` lookups. - Pattern matching now uses cached compiled regexes plus explicit duplicate-match errors, and pattern creation flows no longer persist zero-track patterns. +- The two-step local setup flow now has aligned CLI wrappers for both phases: `ffx setup` for bundle prep and `ffx configure_workstation` for workstation prep, while the shell scripts remain the bootstrap entrypoints before the bundle exists. +- The large detail screens now share one screen-bootstrap helper for context, metadata-filter extraction, and controller wiring, and show-pattern loading now goes through `PatternController` instead of a screen-local session query. ## Focused Snapshot - Highest-leverage application optimizations: - - Revisit crop detection cost now that the probe path is consolidated. + - Decide whether placeholder help/settings screens should ship or disappear. + - Trim dead helpers and other dormant surface that still looks active. - Highest-leverage repo and workflow optimizations: - - Consolidate setup and upgrade tooling to reduce overlapping shell-script responsibilities. - Continue migrating the oversized legacy test/combinator surface into focused modern tests so it is easier to run, debug, and extend. ## Optimization Candidates -1. Crop detection is always a full extra ffmpeg scan -- [`src/ffx/file_properties.py`](/home/osgw/.local/src/codex/ffx/src/ffx/file_properties.py) runs a dedicated `ffmpeg -vf cropdetect` pass for each file when crop detection is requested. -- Optimization: - - Cache crop results for repeated runs on the same source. - - Consider exposing shorter sampling windows or probe presets for large files. -- Expected value: - - Lower latency on repeated experimentation. - -2. Tooling overlap and naming drift -- There are still overlapping workstation-setup entrypoints across [`tools/configure_workstation.sh`](/home/osgw/.local/src/codex/ffx/tools/configure_workstation.sh), [`tools/setup.sh`](/home/osgw/.local/src/codex/ffx/tools/setup.sh), and newer CLI maintenance commands. -- Optimization: - - Decide which scripts remain canonical. - - Replace or remove legacy wrappers once equivalent CLI commands exist. - - Keep CLI maintenance commands and shell wrappers aligned. -- Expected value: - - Less operator confusion. - - Fewer duplicated procedures to maintain. - -3. Placeholder UI surfaces should either ship or disappear +1. Placeholder UI surfaces should either ship or disappear - [`src/ffx/help_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/help_screen.py) and [`src/ffx/settings_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/settings_screen.py) are placeholders. - Optimization: - Either remove them from the active UI surface or complete them. @@ -58,16 +43,7 @@ - Leaner interface. - Lower UX ambiguity. -4. Large Textual screens repeat configuration and controller loading -- Screens such as [`src/ffx/media_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/media_details_screen.py), [`src/ffx/pattern_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/pattern_details_screen.py), and [`src/ffx/show_details_screen.py`](/home/osgw/.local/src/codex/ffx/src/ffx/show_details_screen.py) repeat setup patterns and local metadata filtering extraction. -- Optimization: - - Extract a shared screen base or helper for common config/controller/bootstrap logic. - - Reduce repeated table refresh and repeated DB fetch code where possible. -- Expected value: - - Lower maintenance overhead. - - Easier UI iteration. - -5. Several helper functions are unfinished or dead-weight +2. Several helper functions are unfinished or dead-weight - [`src/ffx/helper.py`](/home/osgw/.local/src/codex/ffx/src/ffx/helper.py) contains `permutateList(...): pass`. - There are many combinator and conversion placeholders across tests and migrations. - Optimization: @@ -77,7 +53,7 @@ - Smaller mental model. - Less time spent re-evaluating inactive paths. -6. Test suite shape is expensive to understand and likely expensive to run +3. Test suite shape is expensive to understand and likely expensive to run - The project still carries a large legacy matrix of combinator files under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy), several placeholder `pass` implementations, and at least one suspicious filename with an embedded space: [`tests/legacy/disposition_combinator_2_3 .py`](/home/osgw/.local/src/codex/ffx/tests/legacy/disposition_combinator_2_3 .py). - A first focused replacement slice now exists in [`tests/integration/subtrack_mapping/test_cli_bundle.py`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping/test_cli_bundle.py), so the remaining work is migration and consolidation rather than creating the modern test shape from scratch. - Optimization: @@ -102,9 +78,9 @@ ## Next 1. Triage the list into quick wins, medium refactors, and long-horizon cleanup. -2. Tackle the cheapest high-impact items first: - - crop detection sampling or caching pass. -3. Decide which setup and upgrade entrypoints stay canonical before adding more maintenance surface. +2. Tackle the cheapest remaining product-surface cleanup first: + - placeholder UI surfaces and dead helper cleanup. +3. Continue replacing oversized legacy test matrices with focused modern integration and unit coverage. ## Delete When diff --git a/requirements/architecture.md b/requirements/architecture.md index 1f0cd17..42be71b 100644 --- a/requirements/architecture.md +++ b/requirements/architecture.md @@ -32,12 +32,13 @@ ## High-Level Building Blocks - Frontend, CLI, API, or worker: - - A Click-based CLI in [`src/ffx/cli.py`](/home/osgw/.local/src/codex/ffx/src/ffx/cli.py), exposed as the `ffx` command and via `python -m ffx`. + - A Click-based CLI in [`src/ffx/cli.py`](/home/osgw/.local/src/codex/ffx/src/ffx/cli.py), exposed as the `ffx` command and via `python -m ffx`, including lightweight maintenance wrappers for bundle setup, workstation preparation, and upgrade tasks. - A Textual terminal UI rooted in [`src/ffx/ffx_app.py`](/home/osgw/.local/src/codex/ffx/src/ffx/ffx_app.py) with screens for shows, patterns, file inspection, tracks, tags, and shifted seasons. - Core business logic: - Descriptor objects model media files, shows, and tracks. - Controllers encapsulate CRUD operations and workflow orchestration for shows, patterns, tags, tracks, season shifts, configuration, and conversion. - `MediaDescriptorChangeSet` computes differences between a file and its stored target schema to drive metadata and disposition updates. + - File inspection caches combined `ffprobe` data and crop-detection results per source and sampling window within one process to avoid repeated subprocess work. - Storage: - SQLite via SQLAlchemy ORM, with schema rooted in shows, patterns, tracks, media tags, track tags, shifted seasons, and generic properties. - A configuration JSON file supplies optional path, metadata-filtering, and filename-template settings. diff --git a/requirements/project.md b/requirements/project.md index 9edf200..8182705 100644 --- a/requirements/project.md +++ b/requirements/project.md @@ -35,10 +35,12 @@ ## Functional Requirements -- The system shall provide a CLI entrypoint named `ffx` with commands for `convert`, `inspect`, `shows`, `unmux`, `cropdetect`, `configure_workstation`, `upgrade`, `version`, and `help`. +- The system shall provide a CLI entrypoint named `ffx` with commands for `convert`, `inspect`, `shows`, `unmux`, `cropdetect`, `setup`, `configure_workstation`, `upgrade`, `version`, and `help`. - The system shall support a two-step local installation and preparation flow: - - `tools/setup.sh` is the first step and shall own bundle virtualenv creation, package installation, shell alias exposure, and optional Python test-package installation. - - `tools/configure_workstation.sh` is the second step and shall own workstation dependency checks and installation plus local config and directory seeding. + - `tools/setup.sh` is the bootstrap entrypoint for the first step and shall own bundle virtualenv creation, package installation, shell alias exposure, and optional Python test-package installation. + - `tools/configure_workstation.sh` is the bootstrap entrypoint for the second step and shall own workstation dependency checks and installation plus local config and directory seeding. + - After the bundle is installed, `ffx setup` and `ffx configure_workstation` shall remain aligned wrapper entrypoints for those same two steps. +- The CLI command `ffx setup` shall act as a wrapper for the first-step bundle-preparation flow in `tools/setup.sh`. - The CLI command `ffx configure_workstation` shall act as a wrapper for the second-step preparation flow in `tools/configure_workstation.sh`. - The system shall persist reusable normalization rules in SQLite for: - shows and show formatting digits, @@ -67,6 +69,7 @@ - `--cpu` shall accept either a positive absolute `cpulimit` value such as `200`, or a percentage suffixed with `%` such as `25%` to represent a share of present CPUs; omitting the option or using `0` shall disable CPU limiting. - When both limits are configured, the process wrapper shall execute the target command through `cpulimit` around a `nice -n ...` invocation so both limits apply to the launched media command. - The system shall support extracting streams into separate files via `unmux` and reporting suggested crop parameters via `cropdetect`. +- Crop detection shall use a configurable sampling window, defaulting to a 60-second seek and a 180-second analysis duration, and repeated crop-detection requests for the same source plus sampling window shall reuse cached results within one process. - The system shall handle invalid input and system failures gracefully by logging warnings or raising `click` errors for missing files, invalid media, missing TMDB credentials, incompatible database versions, and ambiguous track dispositions when prompting is disabled. ## Quality Requirements @@ -94,7 +97,7 @@ - `ffmpeg`, `ffprobe`, and `cpulimit`. - TMDB API access through `TMDB_API_KEY` for metadata enrichment. - Installation assumptions: - - The Python-side bundle install step and optional Python test extras are managed by `tools/setup.sh`. + - The Python-side bundle install step and optional Python test extras are managed by `tools/setup.sh`, with `ffx setup` as the aligned wrapper after bootstrap. - The workstation-preparation step is managed separately by `tools/configure_workstation.sh` or `ffx configure_workstation`. ## Acceptance Scope diff --git a/src/ffx/cli.py b/src/ffx/cli.py index 50d6e25..296fbfd 100755 --- a/src/ffx/cli.py +++ b/src/ffx/cli.py @@ -15,6 +15,8 @@ if __package__ in (None, ''): from ffx.constants import ( DEFAULT_AC3_BANDWIDTH, + DEFAULT_CROPDETECT_DURATION_SECONDS, + DEFAULT_CROPDETECT_SEEK_SECONDS, DEFAULT_CONTAINER_EXTENSION, DEFAULT_CONTAINER_FORMAT, DEFAULT_DTS_BANDWIDTH, @@ -29,12 +31,20 @@ if TYPE_CHECKING: from ffx.media_descriptor import MediaDescriptor from ffx.track_descriptor import TrackDescriptor -LIGHTWEIGHT_COMMANDS = {None, 'version', 'help', 'configure_workstation', 'upgrade'} +LIGHTWEIGHT_COMMANDS = {None, 'version', 'help', 'setup', 'configure_workstation', 'upgrade'} CPU_OPTION_HELP = ( "Limit CPU for started processes. Use an absolute cpulimit value such as 200 " + "(about 2 cores), or use a percentage such as 25% for a share of present cores. " + "Omit to disable; 0 also disables." ) +CROPDETECT_SEEK_OPTION_HELP = ( + "Start crop detection this many seconds into the input. " + + "Useful for skipping logos, intros, or black frames." +) +CROPDETECT_DURATION_OPTION_HELP = ( + "Analyze this many seconds for crop detection. " + + "Shorter windows are faster; longer windows are usually steadier." +) def normalizeNicenessOption(ctx, param, value): @@ -111,7 +121,9 @@ def version(): @ffx.command() def help(): click.echo(f"ffx {VERSION}\n") - click.echo(f"Usage: ffx [input file] [output file] [vp9|av1] [q=[nn[,nn,...]]] [p=nn] [a=nnn[k]] [ac3=nnn[k]] [dts=nnn[k]] [crop]") + click.echo("Maintenance commands: setup, configure_workstation, upgrade") + click.echo("Media commands: shows, inspect, convert, unmux, cropdetect") + click.echo("Use 'ffx --help' or 'ffx --help' for full command help.") def getRepoRootPath(): @@ -123,6 +135,10 @@ def getConfigureWorkstationScriptPath(): return os.path.join(getRepoRootPath(), 'tools', 'configure_workstation.sh') +def getSetupScriptPath(): + return os.path.join(getRepoRootPath(), 'tools', 'setup.sh') + + def getBundleVenvDirectory(): return os.path.join(os.path.expanduser('~'), '.local', 'share', 'ffx.venv') @@ -153,23 +169,11 @@ def getTrackedGitChanges(repoPath): return [line for line in completed.stdout.splitlines() if line.strip()] -@ffx.command(name='configure_workstation') -@click.pass_context -@click.option('--check', is_flag=True, default=False, help='Only verify workstation-configuration readiness') -@click.argument('configure_args', nargs=-1, type=click.UNPROCESSED) -def configure_workstation(ctx, check, configure_args): - """Prepare workstation dependencies and local config after bundle install.""" - configureScriptPath = getConfigureWorkstationScriptPath() +def runScriptWrapper(ctx, scriptPath, missingDescription, commandArgs): + if not os.path.isfile(scriptPath): + raise click.ClickException(f"{missingDescription} not found at {scriptPath}") - if not os.path.isfile(configureScriptPath): - raise click.ClickException(f"Workstation configuration script not found at {configureScriptPath}") - - commandSequence = ['bash', configureScriptPath] - - if check: - commandSequence.append('--check') - - commandSequence += list(configure_args) + commandSequence = ['bash', scriptPath] + list(commandArgs) if ctx.obj.get('dry_run', False): click.echo(' '.join(commandSequence)) @@ -179,6 +183,44 @@ def configure_workstation(ctx, check, configure_args): ctx.exit(completed.returncode) +@ffx.command(name='setup') +@click.pass_context +@click.option('--check', is_flag=True, default=False, help='Only verify bundle-setup readiness') +@click.option('--with-tests', is_flag=True, default=False, help='Also install or verify Python test packages in the bundle venv') +@click.argument('setup_args', nargs=-1, type=click.UNPROCESSED) +def setup(ctx, check, with_tests, setup_args): + """Prepare or repair the FFX bundle virtualenv and shell alias.""" + commandArgs = [] + + if check: + commandArgs.append('--check') + if with_tests: + commandArgs.append('--with-tests') + + commandArgs += list(setup_args) + runScriptWrapper(ctx, getSetupScriptPath(), "Bundle setup script", commandArgs) + + +@ffx.command(name='configure_workstation') +@click.pass_context +@click.option('--check', is_flag=True, default=False, help='Only verify workstation-configuration readiness') +@click.argument('configure_args', nargs=-1, type=click.UNPROCESSED) +def configure_workstation(ctx, check, configure_args): + """Prepare workstation dependencies and local config after bundle install.""" + commandArgs = [] + + if check: + commandArgs.append('--check') + + commandArgs += list(configure_args) + runScriptWrapper( + ctx, + getConfigureWorkstationScriptPath(), + "Workstation configuration script", + commandArgs, + ) + + @ffx.command(name='upgrade') @click.pass_context @click.option('--branch', type=str, default='', help='Checkout this branch before pulling') @@ -389,10 +431,26 @@ def unmux(ctx, show_default='disabled', help=CPU_OPTION_HELP, ) +@click.option( + '--crop-seek', + type=click.IntRange(min=0), + default=DEFAULT_CROPDETECT_SEEK_SECONDS, + show_default=True, + help=CROPDETECT_SEEK_OPTION_HELP, +) +@click.option( + '--crop-duration', + type=click.IntRange(min=1), + default=DEFAULT_CROPDETECT_DURATION_SECONDS, + show_default=True, + help=CROPDETECT_DURATION_OPTION_HELP, +) def cropdetect(ctx, paths, nice, - cpu): + cpu, + crop_seek, + crop_duration): from ffx.file_properties import FileProperties existingSourcePaths = [p for p in paths if os.path.isfile(p)] @@ -402,6 +460,10 @@ def cropdetect(ctx, ctx.obj['resource_limits']['niceness'] = nice ctx.obj['resource_limits']['cpu_limit'] = cpu ctx.obj['resource_limits']['cpu_percent'] = cpu + ctx.obj['cropdetect'] = { + 'seek_seconds': crop_seek, + 'duration_seconds': crop_duration, + } for sourcePath in existingSourcePaths: @@ -409,7 +471,7 @@ def cropdetect(ctx, try: fp = FileProperties(ctx.obj, sourcePath) - cropParams = fp.findCropParams() + cropParams = fp.findCropArguments() click.echo(cropParams) @@ -506,6 +568,20 @@ def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): @click.option('--rearrange-streams', type=str, default="", help='Rearrange output streams order. Use format comma separated integers') @click.option("--crop", is_flag=False, flag_value="auto", default="none") +@click.option( + '--crop-seek', + type=click.IntRange(min=0), + default=DEFAULT_CROPDETECT_SEEK_SECONDS, + show_default=True, + help='When --crop auto is used, start crop detection this many seconds into the input.', +) +@click.option( + '--crop-duration', + type=click.IntRange(min=1), + default=DEFAULT_CROPDETECT_DURATION_SECONDS, + show_default=True, + help='When --crop auto is used, analyze this many seconds for crop detection.', +) @click.option("--cut", is_flag=False, flag_value="default", default="none") @click.option("--output-directory", type=str, default='') @@ -578,6 +654,8 @@ def convert(ctx, rearrange_streams, crop, + crop_seek, + crop_duration, cut, output_directory, @@ -652,6 +730,10 @@ def convert(ctx, context['resource_limits']['niceness'] = nice context['resource_limits']['cpu_limit'] = cpu context['resource_limits']['cpu_percent'] = cpu + context['cropdetect'] = { + 'seek_seconds': crop_seek, + 'duration_seconds': crop_duration, + } context['import_subtitles'] = (subtitle_directory and subtitle_prefix) diff --git a/src/ffx/constants.py b/src/ffx/constants.py index b1471db..63f67b9 100644 --- a/src/ffx/constants.py +++ b/src/ffx/constants.py @@ -16,6 +16,9 @@ DEFAULT_AC3_BANDWIDTH = "256" DEFAULT_DTS_BANDWIDTH = "320" DEFAULT_7_1_BANDWIDTH = "384" +DEFAULT_CROPDETECT_SEEK_SECONDS = 60 +DEFAULT_CROPDETECT_DURATION_SECONDS = 180 + DEFAULT_cut_start = 60 DEFAULT_cut_length = 180 diff --git a/src/ffx/file_properties.py b/src/ffx/file_properties.py index 1b45a06..2f8d0af 100644 --- a/src/ffx/file_properties.py +++ b/src/ffx/file_properties.py @@ -1,5 +1,11 @@ import os, re, json +from .constants import ( + DEFAULT_CROPDETECT_DURATION_SECONDS, + DEFAULT_CROPDETECT_SEEK_SECONDS, + FFMPEG_COMMAND_TOKENS, + FFMPEG_NULL_OUTPUT_TOKENS, +) from .media_descriptor import MediaDescriptor from .pattern_controller import PatternController @@ -11,6 +17,7 @@ from ffx.model.pattern import Pattern class FileProperties(): + _cropdetect_cache: dict[tuple[str, int, int, int, int], dict[str, str]] = {} FILE_EXTENSIONS = ['mkv', 'mp4', 'avi', 'flv', 'webm'] FFPROBE_COMMAND_TOKENS = ["ffprobe", "-hide_banner", "-show_format", "-show_streams", "-of", "json"] @@ -81,6 +88,34 @@ class FileProperties(): self.__ffprobeData = None + def _getCropdetectWindow(self): + cropdetectContext = self.context.get('cropdetect', {}) + + seekSeconds = int(cropdetectContext.get('seek_seconds', DEFAULT_CROPDETECT_SEEK_SECONDS)) + durationSeconds = int(cropdetectContext.get('duration_seconds', DEFAULT_CROPDETECT_DURATION_SECONDS)) + + if seekSeconds < 0: + raise ValueError("Crop detection seek seconds must be zero or greater.") + if durationSeconds <= 0: + raise ValueError("Crop detection duration seconds must be greater than zero.") + + return seekSeconds, durationSeconds + + def _getCropdetectCacheKey(self): + sourceStat = os.stat(self.__sourcePath) + seekSeconds, durationSeconds = self._getCropdetectWindow() + + return ( + os.path.abspath(self.__sourcePath), + sourceStat.st_mtime_ns, + sourceStat.st_size, + seekSeconds, + durationSeconds, + ) + + @classmethod + def _clear_cropdetect_cache(cls): + cls._cropdetect_cache.clear() def _getFfprobeData(self): if self.__ffprobeData is not None: @@ -172,16 +207,25 @@ class FileProperties(): def findCropArguments(self): """""" - # ffmpeg -i -vf cropdetect -f null - - ffprobeOutput, ffprobeError, returnCode = executeProcess(["ffmpeg", "-i", - self.__sourcePath, - "-vf", "cropdetect", - "-ss", "60", - "-t", "180", - "-f", "null", "-" - ]) + cacheKey = self._getCropdetectCacheKey() + cachedCropArguments = FileProperties._cropdetect_cache.get(cacheKey) + if cachedCropArguments is not None: + self.__logger.debug( + "FileProperties.findCropArguments(): Reusing cached cropdetect result for %s", + self.__sourcePath, + ) + return dict(cachedCropArguments) - errorLines = ffprobeError.split('\n') + seekSeconds, durationSeconds = self._getCropdetectWindow() + + cropdetectCommand = ( + list(FFMPEG_COMMAND_TOKENS) + + ["-ss", str(seekSeconds), "-i", self.__sourcePath, "-t", str(durationSeconds), "-vf", "cropdetect"] + + list(FFMPEG_NULL_OUTPUT_TOKENS) + ) + _ffmpegOutput, ffmpegError, returnCode = executeProcess(cropdetectCommand, context=self.context) + + errorLines = ffmpegError.split('\n') crops = {} for el in errorLines: @@ -194,21 +238,26 @@ class FileProperties(): crops[cropParam] = crops.get(cropParam, 0) + 1 if crops: - cropHistogram = sorted(crops, reverse=True) - cropString = cropHistogram[0] + cropString = max(crops.items(), key=lambda item: (item[1], item[0]))[0] cropTokens = cropString.split('=') cropValueTokens = cropTokens[1] cropValues = cropValueTokens.split(':') - return { + cropArguments = { CropFilter.OUTPUT_WIDTH_KEY: cropValues[0], CropFilter.OUTPUT_HEIGHT_KEY: cropValues[1], CropFilter.OFFSET_X_KEY: cropValues[2], CropFilter.OFFSET_Y_KEY: cropValues[3] } - else: - return {} + FileProperties._cropdetect_cache[cacheKey] = dict(cropArguments) + return cropArguments + + if returnCode != 0: + raise Exception(f"ffmpeg cropdetect returned with error {returnCode}") + + FileProperties._cropdetect_cache[cacheKey] = {} + return {} def getMediaDescriptor(self): diff --git a/src/ffx/media_details_screen.py b/src/ffx/media_details_screen.py index dfb837f..7f61622 100644 --- a/src/ffx/media_details_screen.py +++ b/src/ffx/media_details_screen.py @@ -6,13 +6,9 @@ from textual.containers import Grid from ffx.audio_layout import AudioLayout -from .pattern_controller import PatternController -from .show_controller import ShowController -from .track_controller import TrackController -from .tag_controller import TagController - from .show_details_screen import ShowDetailsScreen from .pattern_details_screen import PatternDetailsScreen +from .screen_support import build_screen_bootstrap, build_screen_controllers from ffx.track_type import TrackType from ffx.track_codec import TrackCodec @@ -135,29 +131,23 @@ class MediaDetailsScreen(Screen): def __init__(self): super().__init__() - self.context = self.app.getContext() - self.Session = self.context['database']['session'] # convenience + bootstrap = build_screen_bootstrap(self.app.getContext()) + self.context = bootstrap.context + self.__removeGlobalKeys = bootstrap.remove_global_keys + self.__ignoreGlobalKeys = bootstrap.ignore_global_keys - self.__configurationData = self.context['config'].getData() - - metadataConfiguration = self.__configurationData['metadata'] if 'metadata' in self.__configurationData.keys() else {} - - self.__signatureTags = metadataConfiguration['signature'] if 'signature' in metadataConfiguration.keys() else {} - self.__removeGlobalKeys = metadataConfiguration['remove'] if 'remove' in metadataConfiguration.keys() else [] - self.__ignoreGlobalKeys = metadataConfiguration['ignore'] if 'ignore' in metadataConfiguration.keys() else [] - self.__removeTrackKeys = (metadataConfiguration['streams']['remove'] - if 'streams' in metadataConfiguration.keys() - and 'remove' in metadataConfiguration['streams'].keys() else []) - self.__ignoreTrackKeys = (metadataConfiguration['streams']['ignore'] - if 'streams' in metadataConfiguration.keys() - and 'ignore' in metadataConfiguration['streams'].keys() else []) - - - self.__pc = PatternController(context = self.context) - self.__sc = ShowController(context = self.context) - self.__tc = TrackController(context = self.context) - self.__tac = TagController(context = self.context) + controllers = build_screen_controllers( + self.context, + pattern=True, + show=True, + track=True, + tag=True, + ) + self.__pc = controllers['pattern'] + self.__sc = controllers['show'] + self.__tc = controllers['track'] + self.__tac = controllers['tag'] if not 'command' in self.context.keys() or self.context['command'] != 'inspect': raise click.ClickException(f"MediaDetailsScreen.__init__(): Can only perform command 'inspect'") diff --git a/src/ffx/pattern_controller.py b/src/ffx/pattern_controller.py index e10d5a7..b0886ee 100644 --- a/src/ffx/pattern_controller.py +++ b/src/ffx/pattern_controller.py @@ -305,6 +305,29 @@ class PatternController: if session is not None: session.close() + def getPatternsForShow(self, showId: int) -> list[Pattern]: + + if type(showId) is not int: + raise ValueError( + "PatternController.getPatternsForShow(): Argument showId is required to be of type int" + ) + + session = None + try: + session = self.Session() + return ( + session.query(Pattern) + .filter(Pattern.show_id == int(showId)) + .order_by(Pattern.id) + .all() + ) + + except Exception as ex: + raise click.ClickException(f"PatternController.getPatternsForShow(): {repr(ex)}") + finally: + if session is not None: + session.close() + def getPattern(self, patternId: int): if type(patternId) is not int: diff --git a/src/ffx/pattern_details_screen.py b/src/ffx/pattern_details_screen.py index bdb1257..fe64352 100644 --- a/src/ffx/pattern_details_screen.py +++ b/src/ffx/pattern_details_screen.py @@ -7,16 +7,12 @@ from textual.containers import Grid from ffx.model.pattern import Pattern -from .pattern_controller import PatternController -from .show_controller import ShowController -from .track_controller import TrackController -from .tag_controller import TagController - from .track_details_screen import TrackDetailsScreen from .track_delete_screen import TrackDeleteScreen from .tag_details_screen import TagDetailsScreen from .tag_delete_screen import TagDeleteScreen +from .screen_support import build_screen_bootstrap, build_screen_controllers from ffx.track_type import TrackType @@ -107,27 +103,23 @@ class PatternDetailsScreen(Screen): def __init__(self, patternId = None, showId = None): super().__init__() - self.context = self.app.getContext() - self.Session = self.context['database']['session'] # convenience + bootstrap = build_screen_bootstrap(self.app.getContext()) + self.context = bootstrap.context - self.__configurationData = self.context['config'].getData() + self.__removeGlobalKeys = bootstrap.remove_global_keys + self.__ignoreGlobalKeys = bootstrap.ignore_global_keys - metadataConfiguration = self.__configurationData['metadata'] if 'metadata' in self.__configurationData.keys() else {} - - self.__signatureTags = metadataConfiguration['signature'] if 'signature' in metadataConfiguration.keys() else {} - self.__removeGlobalKeys = metadataConfiguration['remove'] if 'remove' in metadataConfiguration.keys() else [] - self.__ignoreGlobalKeys = metadataConfiguration['ignore'] if 'ignore' in metadataConfiguration.keys() else [] - self.__removeTrackKeys = (metadataConfiguration['streams']['remove'] - if 'streams' in metadataConfiguration.keys() - and 'remove' in metadataConfiguration['streams'].keys() else []) - self.__ignoreTrackKeys = (metadataConfiguration['streams']['ignore'] - if 'streams' in metadataConfiguration.keys() - and 'ignore' in metadataConfiguration['streams'].keys() else []) - - self.__pc = PatternController(context = self.context) - self.__sc = ShowController(context = self.context) - self.__tc = TrackController(context = self.context) - self.__tac = TagController(context = self.context) + controllers = build_screen_controllers( + self.context, + pattern=True, + show=True, + track=True, + tag=True, + ) + self.__pc = controllers['pattern'] + self.__sc = controllers['show'] + self.__tc = controllers['track'] + self.__tac = controllers['tag'] self.__pattern : Pattern = self.__pc.getPattern(patternId) if patternId is not None else None self.__showDescriptor = self.__sc.getShowDescriptor(showId) if showId is not None else None @@ -135,26 +127,6 @@ class PatternDetailsScreen(Screen): self.__draftTags : dict[str, str] = {} - #TODO: per controller - def loadTracks(self, show_id): - - try: - - tracks = {} - tracks['audio'] = {} - tracks['subtitle'] = {} - - s = self.Session() - q = s.query(Pattern).filter(Pattern.show_id == int(show_id)) - - return [{'id': int(p.id), 'pattern': p.pattern} for p in q.all()] - - except Exception as ex: - raise click.ClickException(f"loadTracks(): {repr(ex)}") - finally: - s.close() - - def updateTracks(self): self.tracksTable.clear() diff --git a/src/ffx/screen_support.py b/src/ffx/screen_support.py new file mode 100644 index 0000000..a7e24b6 --- /dev/null +++ b/src/ffx/screen_support.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +from dataclasses import dataclass + +from .pattern_controller import PatternController +from .show_controller import ShowController +from .shifted_season_controller import ShiftedSeasonController +from .tag_controller import TagController +from .tmdb_controller import TmdbController +from .track_controller import TrackController + + +@dataclass(frozen=True) +class ScreenBootstrap: + context: dict + configuration_data: dict + signature_tags: dict + remove_global_keys: list + ignore_global_keys: list + remove_track_keys: list + ignore_track_keys: list + + +def build_screen_bootstrap(context: dict) -> ScreenBootstrap: + configurationData = context['config'].getData() + metadataConfiguration = configurationData.get('metadata', {}) + streamMetadataConfiguration = metadataConfiguration.get('streams', {}) + + return ScreenBootstrap( + context=context, + configuration_data=configurationData, + signature_tags=metadataConfiguration.get('signature', {}), + remove_global_keys=metadataConfiguration.get('remove', []), + ignore_global_keys=metadataConfiguration.get('ignore', []), + remove_track_keys=streamMetadataConfiguration.get('remove', []), + ignore_track_keys=streamMetadataConfiguration.get('ignore', []), + ) + + +def build_screen_controllers( + context: dict, + *, + pattern: bool = False, + show: bool = False, + track: bool = False, + tag: bool = False, + tmdb: bool = False, + shifted_season: bool = False, +) -> dict[str, object]: + controllers = {} + + if pattern: + controllers['pattern'] = PatternController(context=context) + if show: + controllers['show'] = ShowController(context=context) + if track: + controllers['track'] = TrackController(context=context) + if tag: + controllers['tag'] = TagController(context=context) + if tmdb: + controllers['tmdb'] = TmdbController() + if shifted_season: + controllers['shifted_season'] = ShiftedSeasonController(context=context) + + return controllers diff --git a/src/ffx/show_details_screen.py b/src/ffx/show_details_screen.py index c0e2153..8d840c1 100644 --- a/src/ffx/show_details_screen.py +++ b/src/ffx/show_details_screen.py @@ -5,16 +5,9 @@ from textual.widgets import Header, Footer, Static, Button, DataTable, Input from textual.containers import Grid from textual.widgets._data_table import CellDoesNotExist -from ffx.model.pattern import Pattern - from .pattern_details_screen import PatternDetailsScreen from .pattern_delete_screen import PatternDeleteScreen -from .show_controller import ShowController -from .pattern_controller import PatternController -from .tmdb_controller import TmdbController -from .shifted_season_controller import ShiftedSeasonController - from .show_descriptor import ShowDescriptor from .shifted_season_details_screen import ShiftedSeasonDetailsScreen @@ -23,6 +16,7 @@ from .shifted_season_delete_screen import ShiftedSeasonDeleteScreen from ffx.model.shifted_season import ShiftedSeason from .helper import filterFilename +from .screen_support import build_screen_bootstrap, build_screen_controllers # Screen[dict[int, str, int]] @@ -94,31 +88,24 @@ class ShowDetailsScreen(Screen): def __init__(self, showId = None): super().__init__() - self.context = self.app.getContext() - self.Session = self.context['database']['session'] # convenience - - self.__sc = ShowController(context = self.context) - self.__pc = PatternController(context = self.context) - self.__tc = TmdbController() - self.__ssc = ShiftedSeasonController(context = self.context) + bootstrap = build_screen_bootstrap(self.app.getContext()) + self.context = bootstrap.context + + controllers = build_screen_controllers( + self.context, + pattern=True, + show=True, + tmdb=True, + shifted_season=True, + ) + self.__sc = controllers['show'] + self.__pc = controllers['pattern'] + self.__tc = controllers['tmdb'] + self.__ssc = controllers['shifted_season'] self.__showDescriptor = self.__sc.getShowDescriptor(showId) if showId is not None else None - def loadPatterns(self, show_id : int): - - try: - s = self.Session() - q = s.query(Pattern).filter(Pattern.show_id == int(show_id)) - - return [{'id': int(p.id), 'pattern': str(p.pattern)} for p in q.all()] - - except Exception as ex: - raise click.ClickException(f"ShowDetailsScreen.loadPatterns(): {repr(ex)}") - finally: - s.close() - - def updateShiftedSeasons(self): @@ -166,10 +153,8 @@ class ShowDetailsScreen(Screen): #raise click.ClickException(f"show_id {showId}") - patternList = self.loadPatterns(showId) - # raise click.ClickException(f"patternList {patternList}") - for pattern in patternList: - row = (pattern['pattern'],) + for pattern in self.__pc.getPatternsForShow(showId): + row = (pattern.getPattern(),) self.patternTable.add_row(*map(str, row)) self.updateShiftedSeasons() @@ -489,4 +474,4 @@ class ShowDetailsScreen(Screen): self.updateShiftedSeasons() def handle_delete_shifted_season(self, screenResult): - self.updateShiftedSeasons() \ No newline at end of file + self.updateShiftedSeasons() diff --git a/tests/unit/test_cli_lazy_imports.py b/tests/unit/test_cli_lazy_imports.py index 36b6553..c535cb7 100644 --- a/tests/unit/test_cli_lazy_imports.py +++ b/tests/unit/test_cli_lazy_imports.py @@ -99,6 +99,43 @@ class CliLazyImportTests(unittest.TestCase): result["modules"], ) + def test_lightweight_setup_command_stays_light(self): + result = self.run_python( + textwrap.dedent( + f""" + import json + import sys + from click.testing import CliRunner + + sys.path.insert(0, {str(SRC_ROOT)!r}) + + import ffx.cli + + runner = CliRunner() + invoke_result = runner.invoke( + ffx.cli.ffx, + ["--dry-run", "setup", "--check", "--with-tests"], + ) + if invoke_result.exit_code != 0: + raise SystemExit(invoke_result.output) + + print(json.dumps({{ + "output": invoke_result.output, + "modules": {{ + module_name: module_name in sys.modules + for module_name in {HEAVY_MODULES!r} + }}, + }})) + """ + ) + ) + + self.assertIn("tools/setup.sh --check --with-tests", result["output"]) + self.assertTrue( + all(not is_loaded for is_loaded in result["modules"].values()), + result["modules"], + ) + def test_convert_help_describes_absolute_and_percent_cpu_limits(self): result = self.run_python( textwrap.dedent( diff --git a/tests/unit/test_file_properties_probe.py b/tests/unit/test_file_properties_probe.py index 6961d0a..d99012b 100644 --- a/tests/unit/test_file_properties_probe.py +++ b/tests/unit/test_file_properties_probe.py @@ -4,6 +4,7 @@ import json import logging from pathlib import Path import sys +from types import SimpleNamespace import unittest from unittest.mock import patch @@ -106,6 +107,69 @@ class FilePropertiesProbeTests(unittest.TestCase): + ["/tmp/example_s01e01.mkv"] ) + def test_cropdetect_uses_configured_window_and_caches_results(self): + file_properties_module = self.import_module() + file_properties_module.FileProperties._clear_cropdetect_cache() + + cropdetect_stderr = "\n".join( + [ + "[Parsed_cropdetect_0] crop=1440:1080:240:0", + "[Parsed_cropdetect_0] crop=1440:1080:240:0", + "[Parsed_cropdetect_0] crop=1438:1080:242:0", + ] + ) + context = self.make_context() + context["cropdetect"] = {"seek_seconds": 15, "duration_seconds": 45} + + with ( + patch.object( + file_properties_module.os, + "stat", + return_value=SimpleNamespace(st_mtime_ns=1234, st_size=5678), + ), + patch.object(file_properties_module, "PatternController", DummyPatternController), + patch.object( + file_properties_module, + "executeProcess", + return_value=("", cropdetect_stderr, 0), + ) as mocked_execute, + ): + file_properties = file_properties_module.FileProperties( + context, + "/tmp/example_s01e01.mkv", + ) + + first = file_properties.findCropArguments() + second = file_properties.findCropArguments() + + self.assertEqual(first, second) + self.assertEqual( + { + "output_width": "1440", + "output_height": "1080", + "x_offset": "240", + "y_offset": "0", + }, + first, + ) + mocked_execute.assert_called_once_with( + list(file_properties_module.FFMPEG_COMMAND_TOKENS) + + [ + "-ss", + "15", + "-i", + "/tmp/example_s01e01.mkv", + "-t", + "45", + "-vf", + "cropdetect", + ] + + list(file_properties_module.FFMPEG_NULL_OUTPUT_TOKENS), + context=context, + ) + + file_properties_module.FileProperties._clear_cropdetect_cache() + if __name__ == "__main__": unittest.main() diff --git a/tests/unit/test_screen_support.py b/tests/unit/test_screen_support.py new file mode 100644 index 0000000..5bc8b3e --- /dev/null +++ b/tests/unit/test_screen_support.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import unittest +from unittest.mock import patch + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import screen_support # noqa: E402 + + +class StaticConfig: + def __init__(self, data): + self._data = data + + def getData(self): + return self._data + + +class ScreenSupportTests(unittest.TestCase): + def make_context(self): + return { + "config": StaticConfig( + { + "metadata": { + "signature": {"RECODED_WITH": "FFX"}, + "remove": ["VERSION-eng"], + "ignore": ["ENCODER"], + "streams": { + "remove": ["BPS"], + "ignore": ["language"], + }, + } + } + ), + "database": {"session": object()}, + } + + def test_build_screen_bootstrap_extracts_metadata_filters(self): + context = self.make_context() + + bootstrap = screen_support.build_screen_bootstrap(context) + + self.assertIs(context, bootstrap.context) + self.assertEqual({"RECODED_WITH": "FFX"}, bootstrap.signature_tags) + self.assertEqual(["VERSION-eng"], bootstrap.remove_global_keys) + self.assertEqual(["ENCODER"], bootstrap.ignore_global_keys) + self.assertEqual(["BPS"], bootstrap.remove_track_keys) + self.assertEqual(["language"], bootstrap.ignore_track_keys) + + def test_build_screen_controllers_only_creates_requested_instances(self): + context = self.make_context() + + with ( + patch.object(screen_support, "PatternController", side_effect=lambda context: ("pattern", context)), + patch.object(screen_support, "ShowController", side_effect=lambda context: ("show", context)), + patch.object(screen_support, "TmdbController", side_effect=lambda: "tmdb"), + patch.object(screen_support, "ShiftedSeasonController", side_effect=lambda context: ("shifted", context)), + ): + controllers = screen_support.build_screen_controllers( + context, + pattern=True, + show=True, + tmdb=True, + shifted_season=True, + ) + + self.assertEqual( + { + "pattern": ("pattern", context), + "show": ("show", context), + "tmdb": "tmdb", + "shifted_season": ("shifted", context), + }, + controllers, + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tools/configure_workstation.sh b/tools/configure_workstation.sh index 7d302bc..5f86948 100755 --- a/tools/configure_workstation.sh +++ b/tools/configure_workstation.sh @@ -51,6 +51,7 @@ Environment overrides: Notes: - tools/setup.sh is the first installation step and owns bundle venv setup. - This script is the second step and owns system dependencies plus local config. + - After the bundle is installed, the aligned CLI wrapper is: ffx configure_workstation - Python test packages are installed by tools/setup.sh --with-tests, not here. EOF } diff --git a/tools/setup.sh b/tools/setup.sh index 8cc378a..aaa89e5 100755 --- a/tools/setup.sh +++ b/tools/setup.sh @@ -50,6 +50,7 @@ Options: Notes: - This is the first installation step. + - After the bundle is installed, the aligned CLI wrapper is: ffx setup - tools/configure_workstation.sh is the second step and configures system dependencies plus local user files. EOF } From ebdc23c3ce44f6b75ee0cd08cc1630e9d109767a Mon Sep 17 00:00:00 2001 From: Javanaut Date: Sat, 11 Apr 2026 17:31:10 +0200 Subject: [PATCH 15/28] Fixes remove stream tags per list --- src/ffx/media_descriptor_change_set.py | 9 +- .../subtrack_mapping/test_cli_bundle.py | 18 ++++ .../unit/test_media_descriptor_change_set.py | 84 +++++++++++++++++++ 3 files changed, 110 insertions(+), 1 deletion(-) create mode 100644 tests/unit/test_media_descriptor_change_set.py diff --git a/src/ffx/media_descriptor_change_set.py b/src/ffx/media_descriptor_change_set.py index cf98391..fdfaaf8 100644 --- a/src/ffx/media_descriptor_change_set.py +++ b/src/ffx/media_descriptor_change_set.py @@ -277,7 +277,14 @@ class MediaDescriptorChangeSet(): # media payload, keep metadata from the regular # source track unless the external/target side # overrides it explicitly. - preservedTrackTags = removedTrackTags | unchangedTrackTags + preservedTrackTags = ( + { + tagKey: tagValue + for tagKey, tagValue in removedTrackTags.items() + if tagKey not in self.__removeTrackKeys + } + | unchangedTrackTags + ) for tagKey, tagValue in preservedTrackTags.items(): metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}" + f":{trackDescriptor.getSubIndex()}", diff --git a/tests/integration/subtrack_mapping/test_cli_bundle.py b/tests/integration/subtrack_mapping/test_cli_bundle.py index 94a171f..3b26eea 100644 --- a/tests/integration/subtrack_mapping/test_cli_bundle.py +++ b/tests/integration/subtrack_mapping/test_cli_bundle.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json from pathlib import Path import tempfile import unittest @@ -39,6 +40,11 @@ class SubtrackMappingBundleTests(unittest.TestCase): def tearDown(self): self.tempdir.cleanup() + def write_config(self, data: dict) -> None: + config_dir = self.home_dir / ".local" / "etc" + config_dir.mkdir(parents=True, exist_ok=True) + (config_dir / "ffx.json").write_text(json.dumps(data), encoding="utf-8") + def assertCompleted(self, completed): if completed.returncode != 0: self.fail( @@ -218,6 +224,15 @@ class SubtrackMappingBundleTests(unittest.TestCase): def test_external_subtitle_file_replaces_payload_and_overrides_metadata(self): source_filename = "substitute_s01e01.mkv" + self.write_config( + { + "metadata": { + "streams": { + "remove": ["BPS"], + } + } + } + ) source_path = create_source_fixture( self.workdir, source_filename, @@ -229,6 +244,7 @@ class SubtrackMappingBundleTests(unittest.TestCase): identity="embedded-subtitle", language="eng", title="Embedded Title", + extra_tags={"BPS": "remove-me", "EXTERNAL_KEEP": "keep-me"}, subtitle_lines=("embedded subtitle payload",), ), ], @@ -273,6 +289,8 @@ class SubtrackMappingBundleTests(unittest.TestCase): self.assertEqual(get_tag(subtitle_stream, "language"), "deu") self.assertEqual(get_tag(subtitle_stream, "title"), "Embedded Title") self.assertEqual(get_tag(subtitle_stream, "THIS_IS"), "embedded-subtitle") + self.assertEqual(get_tag(subtitle_stream, "EXTERNAL_KEEP"), "keep-me") + self.assertIsNone(get_tag(subtitle_stream, "BPS")) extracted_subtitle = extract_first_subtitle_text(self.workdir, output_path) self.assertIn("external subtitle payload", extracted_subtitle) diff --git a/tests/unit/test_media_descriptor_change_set.py b/tests/unit/test_media_descriptor_change_set.py new file mode 100644 index 0000000..5bc84c2 --- /dev/null +++ b/tests/unit/test_media_descriptor_change_set.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import unittest + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.media_descriptor import MediaDescriptor # noqa: E402 +from ffx.media_descriptor_change_set import MediaDescriptorChangeSet # noqa: E402 +from ffx.track_descriptor import TrackDescriptor # noqa: E402 +from ffx.track_type import TrackType # noqa: E402 +from ffx.logging_utils import get_ffx_logger # noqa: E402 + + +class StaticConfig: + def __init__(self, data: dict): + self._data = data + + def getData(self): + return self._data + + +class MediaDescriptorChangeSetTests(unittest.TestCase): + def test_external_subtitle_preserves_source_only_tags_except_removed_keys(self): + context = { + "logger": get_ffx_logger(), + "config": StaticConfig( + { + "metadata": { + "streams": { + "remove": ["BPS"], + } + } + } + ), + } + + source_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.SUBTITLE, + tags={ + "language": "eng", + "title": "Embedded Title", + "THIS_IS": "embedded-subtitle", + "EXTERNAL_KEEP": "keep-me", + "BPS": "remove-me", + }, + ) + target_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.SUBTITLE, + tags={"language": "deu"}, + external_source_file="/tmp/external-subtitle.vtt", + ) + + change_set = MediaDescriptorChangeSet( + context, + MediaDescriptor(track_descriptors=[target_track]), + MediaDescriptor(track_descriptors=[source_track]), + ) + + metadata_tokens = change_set.generateMetadataTokens() + + self.assertIn("-metadata:s:s:0", metadata_tokens) + self.assertIn("language=deu", metadata_tokens) + self.assertIn("title=Embedded Title", metadata_tokens) + self.assertIn("THIS_IS=embedded-subtitle", metadata_tokens) + self.assertIn("EXTERNAL_KEEP=keep-me", metadata_tokens) + self.assertNotIn("BPS=remove-me", metadata_tokens) + self.assertNotIn("BPS=", metadata_tokens) + + +if __name__ == "__main__": + unittest.main() From 0a41998e29a918436e21fb0f9c7590737b599a96 Mon Sep 17 00:00:00 2001 From: Javanaut Date: Sat, 11 Apr 2026 17:46:16 +0200 Subject: [PATCH 16/28] Adds Q/P values to output file metadata --- SCRATCHPAD.md | 2 + requirements/tests.md | 7 ++ src/ffx/ffx_controller.py | 17 +++ src/ffx/media_descriptor_change_set.py | 3 + tests/unit/test_ffx_controller.py | 139 +++++++++++++++++++++++++ 5 files changed, 168 insertions(+) create mode 100644 tests/unit/test_ffx_controller.py diff --git a/SCRATCHPAD.md b/SCRATCHPAD.md index 368f60f..f9931d0 100644 --- a/SCRATCHPAD.md +++ b/SCRATCHPAD.md @@ -74,6 +74,7 @@ - No explicit prioritization owner or milestone for the optimization backlog. - No benchmark or timing harness exists for startup, probe, DB, or conversion orchestration overhead. - Repo hygiene is still mixed with generated artifacts and some clearly unfinished files. +- The legacy TMDB-backed `Scenario 4` path is currently blocked by a pattern/track regression: `Patterns must define at least one track before they can be stored.` This surfaced while rerunning TMDB-dependent checks after the zero-track pattern hardening. ## Next @@ -81,6 +82,7 @@ 2. Tackle the cheapest remaining product-surface cleanup first: - placeholder UI surfaces and dead helper cleanup. 3. Continue replacing oversized legacy test matrices with focused modern integration and unit coverage. +4. Triage the legacy `Scenario 4` pattern/track failure and decide whether to fix the harness, adapt it to the zero-track guard, or retire that path during the ongoing test-suite migration. ## Delete When diff --git a/requirements/tests.md b/requirements/tests.md index 61c269c..9dbfd92 100644 --- a/requirements/tests.md +++ b/requirements/tests.md @@ -14,6 +14,13 @@ that area. - Agents shall not silently substitute `python`, `python3`, or another interpreter for Python-side test work. - If `~/.local/share/ffx.venv/bin/python` is missing or not executable, agents shall stop and report the missing venv instead of continuing with Python-side test execution. +## Shell Environment Requirement + +- Agents shall source `~/.bashrc` from an interactive Bash shell before running TMDB-dependent test commands or TMDB-dependent `python -m ffx ...` test invocations. +- Agents shall not source `~/.bashrc.d/interactive/77_tmdb.sh` directly for normal test work; `~/.bashrc` is the required entry point. +- In automation this means agents shall use an interactive Bash invocation such as `bash -ic 'source ~/.bashrc && ...'`, because a non-interactive `bash -lc` returns from `~/.bashrc` before the interactive fragments are loaded. +- If sourcing `~/.bashrc` still does not provide required shell environment such as `TMDB_API_KEY`, agents shall stop and report the missing environment instead of continuing with TMDB-dependent test execution. + ## Current Harness - Entrypoint: `~/.local/share/ffx.venv/bin/python tests/legacy_runner.py run` diff --git a/src/ffx/ffx_controller.py b/src/ffx/ffx_controller.py index a4907ff..52ec099 100644 --- a/src/ffx/ffx_controller.py +++ b/src/ffx/ffx_controller.py @@ -171,6 +171,18 @@ class FfxController(): return [outputFilePath] + def generateEncodingMetadataTags(self, videoEncoder: VideoEncoder, quality, preset) -> dict: + metadataTags = {} + + if videoEncoder in (VideoEncoder.AV1, VideoEncoder.H264, VideoEncoder.VP9): + metadataTags["ENCODING_QUALITY"] = str(quality) + + if videoEncoder == VideoEncoder.AV1: + metadataTags["ENCODING_PRESET"] = str(preset) + + return metadataTags + + def generateAudioEncodingTokens(self): """Generates ffmpeg options audio streams including channel remapping, codec and bitrate""" @@ -261,6 +273,11 @@ class FfxController(): preset = presetFilters[0]['parameters']['preset'] if presetFilters else PresetFilter.DEFAULT_PRESET + self.__context['encoding_metadata_tags'] = self.generateEncodingMetadataTags( + videoEncoder, + quality, + preset, + ) filterParamTokens = [] diff --git a/src/ffx/media_descriptor_change_set.py b/src/ffx/media_descriptor_change_set.py index fdfaaf8..458259d 100644 --- a/src/ffx/media_descriptor_change_set.py +++ b/src/ffx/media_descriptor_change_set.py @@ -295,6 +295,9 @@ class MediaDescriptorChangeSet(): + f":{trackDescriptor.getSubIndex()}", f"{removeKey}="] + for tagKey, tagValue in self.__context.get('encoding_metadata_tags', {}).items(): + metadataTokens += [f"-metadata:g", f"{tagKey}={tagValue}"] + return metadataTokens diff --git a/tests/unit/test_ffx_controller.py b/tests/unit/test_ffx_controller.py new file mode 100644 index 0000000..197d818 --- /dev/null +++ b/tests/unit/test_ffx_controller.py @@ -0,0 +1,139 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import unittest +from unittest.mock import patch + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.ffx_controller import FfxController # noqa: E402 +from ffx.logging_utils import get_ffx_logger # noqa: E402 +from ffx.media_descriptor import MediaDescriptor # noqa: E402 +from ffx.track_codec import TrackCodec # noqa: E402 +from ffx.track_descriptor import TrackDescriptor # noqa: E402 +from ffx.track_type import TrackType # noqa: E402 +from ffx.video_encoder import VideoEncoder # noqa: E402 + + +class StaticConfig: + def __init__(self, data: dict | None = None): + self._data = data or {} + + def getData(self): + return self._data + + +class FfxControllerTests(unittest.TestCase): + def make_context(self, video_encoder: VideoEncoder) -> dict: + return { + "logger": get_ffx_logger(), + "config": StaticConfig(), + "video_encoder": video_encoder, + "dry_run": False, + "perform_cut": False, + "bitrates": { + "stereo": "112k", + "ac3": "256k", + "dts": "320k", + }, + } + + def make_media_descriptors(self) -> tuple[MediaDescriptor, MediaDescriptor]: + descriptor = MediaDescriptor( + track_descriptors=[ + TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.VIDEO, + codec_name=TrackCodec.H264, + ) + ] + ) + source_descriptor = MediaDescriptor( + track_descriptors=[ + TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.VIDEO, + codec_name=TrackCodec.H264, + ) + ] + ) + return descriptor, source_descriptor + + def test_vp9_run_job_emits_file_level_encoding_quality_metadata(self): + context = self.make_context(VideoEncoder.VP9) + target_descriptor, source_descriptor = self.make_media_descriptors() + controller = FfxController(context, target_descriptor, source_descriptor) + commands = [] + + with ( + patch.object( + controller, + "executeCommandSequence", + side_effect=lambda command: commands.append(command) or ("", "", 0), + ), + patch("ffx.ffx_controller.os.path.exists", return_value=False), + ): + controller.runJob( + "input.mkv", + "output.webm", + targetFormat="webm", + chainIteration=[ + { + "identifier": "quality", + "parameters": {"quality": 27}, + } + ], + ) + + self.assertEqual(2, len(commands)) + self.assertIn("-metadata:g", commands[1]) + self.assertIn("ENCODING_QUALITY=27", commands[1]) + self.assertFalse( + any(token.startswith("ENCODING_PRESET=") for token in commands[1]) + ) + + def test_av1_run_job_emits_file_level_quality_and_preset_metadata(self): + context = self.make_context(VideoEncoder.AV1) + target_descriptor, source_descriptor = self.make_media_descriptors() + controller = FfxController(context, target_descriptor, source_descriptor) + commands = [] + + with patch.object( + controller, + "executeCommandSequence", + side_effect=lambda command: commands.append(command) or ("", "", 0), + ): + controller.runJob( + "input.mkv", + "output.webm", + targetFormat="webm", + chainIteration=[ + { + "identifier": "quality", + "parameters": {"quality": 29}, + }, + { + "identifier": "preset", + "parameters": {"preset": 7}, + }, + ], + ) + + self.assertEqual(1, len(commands)) + self.assertIn("-metadata:g", commands[0]) + self.assertIn("ENCODING_QUALITY=29", commands[0]) + self.assertIn("ENCODING_PRESET=7", commands[0]) + + +if __name__ == "__main__": + unittest.main() From 5eee7e11618ae5cbf928a7d358ab962527423755 Mon Sep 17 00:00:00 2001 From: Javanaut Date: Sat, 11 Apr 2026 20:27:58 +0200 Subject: [PATCH 17/28] Extd cut parameter --- src/ffx/cli.py | 77 ++++++++++++++++++++++++++--- tests/unit/test_cli_cut_option.py | 64 ++++++++++++++++++++++++ tests/unit/test_cli_lazy_imports.py | 61 +++++++++++++++++++++++ 3 files changed, 195 insertions(+), 7 deletions(-) create mode 100644 tests/unit/test_cli_cut_option.py diff --git a/src/ffx/cli.py b/src/ffx/cli.py index 296fbfd..f5eb251 100755 --- a/src/ffx/cli.py +++ b/src/ffx/cli.py @@ -17,6 +17,8 @@ from ffx.constants import ( DEFAULT_AC3_BANDWIDTH, DEFAULT_CROPDETECT_DURATION_SECONDS, DEFAULT_CROPDETECT_SEEK_SECONDS, + DEFAULT_cut_length, + DEFAULT_cut_start, DEFAULT_CONTAINER_EXTENSION, DEFAULT_CONTAINER_FORMAT, DEFAULT_DTS_BANDWIDTH, @@ -45,6 +47,14 @@ CROPDETECT_DURATION_OPTION_HELP = ( "Analyze this many seconds for crop detection. " + "Shorter windows are faster; longer windows are usually steadier." ) +DEFAULT_CUT_OPTION_VALUE = f"{DEFAULT_cut_start},{DEFAULT_cut_length}" +CUT_OPTION_HELP = ( + "Cut output in seconds. " + + f"Use --cut for the default {DEFAULT_CUT_OPTION_VALUE}, " + + "--cut DURATION to cut from 0 for DURATION seconds, " + + "or --cut START,DURATION for an explicit start and duration. " + + "Omit to disable." +) def normalizeNicenessOption(ctx, param, value): @@ -65,6 +75,48 @@ def normalizeCpuOption(ctx, param, value): raise click.BadParameter(str(ex)) from ex +def parseCutOptionValue(value) -> tuple[int, int] | None: + if value is None: + return None + + cutValue = str(value).strip() + if not cutValue: + raise ValueError( + "Cut value must be DURATION or START,DURATION, or use --cut without a value." + ) + + cutTokens = [token.strip() for token in cutValue.split(',')] + + try: + if len(cutTokens) == 1: + cutStart = 0 + cutLength = int(cutTokens[0]) + elif len(cutTokens) == 2: + cutStart = int(cutTokens[0]) + cutLength = int(cutTokens[1]) + else: + raise ValueError + except ValueError as ex: + raise ValueError( + "Cut value must be DURATION or START,DURATION, or use --cut without a value." + ) from ex + + if cutStart < 0: + raise ValueError("Cut start must be 0 or greater.") + + if cutLength <= 0: + raise ValueError("Cut duration must be greater than 0.") + + return cutStart, cutLength + + +def normalizeCutOption(ctx, param, value): + try: + return parseCutOptionValue(value) + except ValueError as ex: + raise click.BadParameter(str(ex)) from ex + + @click.group() @click.pass_context @@ -582,7 +634,16 @@ def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): show_default=True, help='When --crop auto is used, analyze this many seconds for crop detection.', ) -@click.option("--cut", is_flag=False, flag_value="default", default="none") +@click.option( + "--cut", + type=str, + metavar="DURATION|START,DURATION", + is_flag=False, + flag_value=DEFAULT_CUT_OPTION_VALUE, + default=None, + callback=normalizeCutOption, + help=CUT_OPTION_HELP, +) @click.option("--output-directory", type=str, default='') @@ -823,13 +884,15 @@ def convert(ctx, #-> # Process cut parameters - context['perform_cut'] = (cut != 'none') + context['perform_cut'] = (cut is not None) if context['perform_cut']: - cutTokens = cut.split(',') - if cutTokens and len(cutTokens) == 2: - context['cut_start'] = int(cutTokens[0]) - context['cut_length'] = int(cutTokens[1]) - ctx.obj['logger'].debug(f"Cut start={context['cut_start']} length={context['cut_length']}") + context['cut_start'], context['cut_length'] = cut + click.echo( + f"Cutting enabled: start {context['cut_start']} s, duration {context['cut_length']} s." + ) + ctx.obj['logger'].debug( + f"Cut start={context['cut_start']} length={context['cut_length']}" + ) tc = TmdbController() if context['use_tmdb'] else None diff --git a/tests/unit/test_cli_cut_option.py b/tests/unit/test_cli_cut_option.py new file mode 100644 index 0000000..11509f5 --- /dev/null +++ b/tests/unit/test_cli_cut_option.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +import os +from pathlib import Path +import sys +import tempfile +import unittest + +from click.testing import CliRunner + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import cli # noqa: E402 + + +class CutOptionCliTests(unittest.TestCase): + def invoke_convert(self, *args: str): + runner = CliRunner() + + with tempfile.TemporaryDirectory() as home_dir: + result = runner.invoke( + cli.ffx, + [ + "--database-file", + os.path.join(home_dir, "ffx.db"), + "--dry-run", + "convert", + "--no-tmdb", + *args, + ], + env={**os.environ, "HOME": home_dir}, + ) + + self.assertEqual(0, result.exit_code, result.output) + return result.output + + def test_convert_without_cut_prints_no_cut_message(self): + output = self.invoke_convert() + + self.assertNotIn("Cutting enabled:", output) + + def test_convert_with_cut_flag_prints_default_cut_message(self): + output = self.invoke_convert("--cut") + + self.assertIn("Cutting enabled: start 60 s, duration 180 s.", output) + + def test_convert_with_cut_duration_prints_zero_start_message(self): + output = self.invoke_convert("--cut", "45") + + self.assertIn("Cutting enabled: start 0 s, duration 45 s.", output) + + def test_convert_with_cut_start_and_duration_prints_both_values(self): + output = self.invoke_convert("--cut", "12,34") + + self.assertIn("Cutting enabled: start 12 s, duration 34 s.", output) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_cli_lazy_imports.py b/tests/unit/test_cli_lazy_imports.py index c535cb7..d55d630 100644 --- a/tests/unit/test_cli_lazy_imports.py +++ b/tests/unit/test_cli_lazy_imports.py @@ -168,6 +168,67 @@ class CliLazyImportTests(unittest.TestCase): result["modules"], ) + def test_convert_cut_option_supports_flag_duration_and_start_duration_forms(self): + result = self.run_python( + textwrap.dedent( + f""" + import click + import json + import sys + + sys.path.insert(0, {str(SRC_ROOT)!r}) + + import ffx.cli + + flag_context = ffx.cli.convert.make_context( + "convert", + ["--cut"], + resilient_parsing=True, + ) + duration_context = ffx.cli.convert.make_context( + "convert", + ["--cut", "12"], + resilient_parsing=True, + ) + explicit_context = ffx.cli.convert.make_context( + "convert", + ["--cut=12,34"], + resilient_parsing=True, + ) + disabled_context = ffx.cli.convert.make_context( + "convert", + [], + resilient_parsing=True, + ) + help_output = ffx.cli.convert.get_help(click.Context(ffx.cli.convert)) + + print(json.dumps({{ + "flag_cut": flag_context.params["cut"], + "duration_cut": duration_context.params["cut"], + "explicit_cut": explicit_context.params["cut"], + "disabled_cut": disabled_context.params["cut"], + "output": help_output, + "modules": {{ + module_name: module_name in sys.modules + for module_name in {HEAVY_MODULES!r} + }}, + }})) + """ + ) + ) + + self.assertEqual([60, 180], result["flag_cut"]) + self.assertEqual([0, 12], result["duration_cut"]) + self.assertEqual([12, 34], result["explicit_cut"]) + self.assertIsNone(result["disabled_cut"]) + self.assertIn("--cut DURATION|START,DURATION", result["output"]) + self.assertIn("60,180", result["output"]) + self.assertIn("START,DURATION", result["output"]) + self.assertTrue( + all(not is_loaded for is_loaded in result["modules"].values()), + result["modules"], + ) + if __name__ == "__main__": unittest.main() From 9a980b5766d83ceab73c83a23b37b568b78d7924 Mon Sep 17 00:00:00 2001 From: Javanaut Date: Sat, 11 Apr 2026 20:50:09 +0200 Subject: [PATCH 18/28] Fix streamtags remove list --- src/ffx/media_descriptor_change_set.py | 17 ++++++ .../subtrack_mapping/test_cli_bundle.py | 58 +++++++++++++++++++ .../unit/test_media_descriptor_change_set.py | 43 +++++++++++++- 3 files changed, 117 insertions(+), 1 deletion(-) diff --git a/src/ffx/media_descriptor_change_set.py b/src/ffx/media_descriptor_change_set.py index 458259d..a5448bc 100644 --- a/src/ffx/media_descriptor_change_set.py +++ b/src/ffx/media_descriptor_change_set.py @@ -298,8 +298,25 @@ class MediaDescriptorChangeSet(): for tagKey, tagValue in self.__context.get('encoding_metadata_tags', {}).items(): metadataTokens += [f"-metadata:g", f"{tagKey}={tagValue}"] + metadataTokens += self.generateConfiguredRemovalMetadataTokens() + return metadataTokens def getChangeSetObj(self): return self.__changeSetObj + + def generateConfiguredRemovalMetadataTokens(self): + metadataTokens = [] + + for removeKey in self.__removeGlobalKeys: + metadataTokens += ["-metadata:g", f"{removeKey}="] + + for trackDescriptor in self.__targetTrackDescriptors: + for removeKey in self.__removeTrackKeys: + metadataTokens += [ + f"-metadata:s:{trackDescriptor.getType().indicator()}:{trackDescriptor.getSubIndex()}", + f"{removeKey}=", + ] + + return metadataTokens diff --git a/tests/integration/subtrack_mapping/test_cli_bundle.py b/tests/integration/subtrack_mapping/test_cli_bundle.py index 3b26eea..874062f 100644 --- a/tests/integration/subtrack_mapping/test_cli_bundle.py +++ b/tests/integration/subtrack_mapping/test_cli_bundle.py @@ -184,6 +184,64 @@ class SubtrackMappingBundleTests(unittest.TestCase): ["video-0", "audio-2", "audio-1", "subtitle-3"], ) + def test_no_pattern_stream_remove_list_clears_copied_stream_metadata(self): + source_filename = "remove_tags_s01e01.mkv" + self.write_config( + { + "metadata": { + "streams": { + "remove": ["BPS"], + } + } + } + ) + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec( + TrackType.VIDEO, + identity="video-0", + extra_tags={"BPS": "remove-me", "KEEP_ME": "video-keep"}, + ), + SourceTrackSpec( + TrackType.AUDIO, + identity="audio-1", + language="eng", + title="Main Audio", + extra_tags={"BPS": "remove-me", "KEEP_ME": "audio-keep"}, + ), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-pattern", + "--no-tmdb", + "--no-prompt", + "--no-signature", + str(source_path), + ) + self.assertCompleted(completed) + + output_path = expected_output_path(self.workdir, source_filename) + streams = ffprobe_json(output_path)["streams"] + + self.assertEqual( + [stream["codec_type"] for stream in streams], + ["video", "audio"], + ) + self.assertEqual(get_tag(streams[0], "THIS_IS"), "video-0") + self.assertEqual(get_tag(streams[0], "KEEP_ME"), "video-keep") + self.assertIsNone(get_tag(streams[0], "BPS")) + self.assertEqual(get_tag(streams[1], "THIS_IS"), "audio-1") + self.assertEqual(get_tag(streams[1], "KEEP_ME"), "audio-keep") + self.assertIsNone(get_tag(streams[1], "BPS")) + def test_pattern_validation_fails_for_nonexistent_source_track_reference(self): source_filename = "invalid_s01e01.mkv" source_path = create_source_fixture( diff --git a/tests/unit/test_media_descriptor_change_set.py b/tests/unit/test_media_descriptor_change_set.py index 5bc84c2..bf9be09 100644 --- a/tests/unit/test_media_descriptor_change_set.py +++ b/tests/unit/test_media_descriptor_change_set.py @@ -77,7 +77,48 @@ class MediaDescriptorChangeSetTests(unittest.TestCase): self.assertIn("THIS_IS=embedded-subtitle", metadata_tokens) self.assertIn("EXTERNAL_KEEP=keep-me", metadata_tokens) self.assertNotIn("BPS=remove-me", metadata_tokens) - self.assertNotIn("BPS=", metadata_tokens) + self.assertIn("BPS=", metadata_tokens) + + def test_target_only_tracks_still_emit_remove_tokens_for_configured_stream_keys(self): + context = { + "logger": get_ffx_logger(), + "config": StaticConfig( + { + "metadata": { + "remove": ["creation_time"], + "streams": { + "remove": ["BPS"], + } + } + } + ), + } + + target_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.AUDIO, + tags={ + "language": "eng", + "title": "Main Audio", + "BPS": "remove-me", + "KEEP_ME": "keep-me", + }, + ) + + change_set = MediaDescriptorChangeSet( + context, + MediaDescriptor(tags={"creation_time": "remove-me"}, track_descriptors=[target_track]), + ) + + metadata_tokens = change_set.generateMetadataTokens() + + self.assertIn("-metadata:g", metadata_tokens) + self.assertIn("creation_time=", metadata_tokens) + self.assertIn("-metadata:s:a:0", metadata_tokens) + self.assertIn("BPS=", metadata_tokens) + self.assertIn("KEEP_ME=keep-me", metadata_tokens) if __name__ == "__main__": From 528915a235b200c1727de7d96a87959a5c9f7568 Mon Sep 17 00:00:00 2001 From: Javanaut Date: Sat, 11 Apr 2026 21:17:21 +0200 Subject: [PATCH 19/28] Adds subtitle default dir --- src/ffx/cli.py | 61 ++++++++++++-- src/ffx/configuration_controller.py | 9 +- .../subtrack_mapping/test_cli_bundle.py | 77 +++++++++++++++++ tests/unit/test_cli_subtitle_directory.py | 84 +++++++++++++++++++ tools/configure_workstation.sh | 24 ++++++ 5 files changed, 249 insertions(+), 6 deletions(-) create mode 100644 tests/unit/test_cli_subtitle_directory.py diff --git a/src/ffx/cli.py b/src/ffx/cli.py index f5eb251..4be3451 100755 --- a/src/ffx/cli.py +++ b/src/ffx/cli.py @@ -39,6 +39,14 @@ CPU_OPTION_HELP = ( + "(about 2 cores), or use a percentage such as 25% for a share of present cores. " + "Omit to disable; 0 also disables." ) +SUBTITLE_DIRECTORY_OPTION_HELP = ( + "Load subtitles from here. When omitted and --subtitle-prefix is set, " + + "FFX uses the configured subtitlesDirectory base path plus the prefix as a subdirectory." +) +SUBTITLE_PREFIX_OPTION_HELP = ( + "Subtitle filename prefix. Requires --subtitle-directory, or a configured " + + "subtitlesDirectory base path that contains a matching / subdirectory." +) CROPDETECT_SEEK_OPTION_HELP = ( "Start crop detection this many seconds into the input. " + "Useful for skipping logos, intros, or black frames." @@ -117,6 +125,41 @@ def normalizeCutOption(ctx, param, value): raise click.BadParameter(str(ex)) from ex +def resolveSubtitleImportOptions(context, subtitleDirectory, subtitlePrefix): + resolvedSubtitlePrefix = str(subtitlePrefix).strip() + resolvedSubtitleDirectory = ( + os.path.expanduser(str(subtitleDirectory).strip()) + if subtitleDirectory + else '' + ) + + if not resolvedSubtitlePrefix: + return False, resolvedSubtitleDirectory, resolvedSubtitlePrefix + + if resolvedSubtitleDirectory: + return True, resolvedSubtitleDirectory, resolvedSubtitlePrefix + + configuredSubtitlesBaseDirectory = context['config'].getSubtitlesDirectoryPath() + if not configuredSubtitlesBaseDirectory: + raise click.ClickException( + "Subtitle prefix was set but no --subtitle-directory was provided and " + + "no subtitlesDirectory default is configured in ffx.json." + ) + + resolvedSubtitleDirectory = os.path.join( + configuredSubtitlesBaseDirectory, + resolvedSubtitlePrefix, + ) + + if not os.path.isdir(resolvedSubtitleDirectory): + raise click.ClickException( + "Subtitle prefix was set but the resolved subtitle directory does not exist: " + + resolvedSubtitleDirectory + ) + + return True, resolvedSubtitleDirectory, resolvedSubtitlePrefix + + @click.group() @click.pass_context @@ -604,8 +647,8 @@ def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): @click.option('--ac3', type=int, default=DEFAULT_AC3_BANDWIDTH, help=f"Bitrate in kbit/s to be used to encode 5.1 audio streams", show_default=True) @click.option('--dts', type=int, default=DEFAULT_DTS_BANDWIDTH, help=f"Bitrate in kbit/s to be used to encode 6.1 audio streams", show_default=True) -@click.option('--subtitle-directory', type=str, default='', help='Load subtitles from here') -@click.option('--subtitle-prefix', type=str, default='', help='Subtitle filename prefix') +@click.option('--subtitle-directory', type=str, default='', help=SUBTITLE_DIRECTORY_OPTION_HELP) +@click.option('--subtitle-prefix', type=str, default='', help=SUBTITLE_PREFIX_OPTION_HELP) @click.option('--language', type=str, multiple=True, help='Set stream language. Use format :<3 letter iso code>') @click.option('--title', type=str, multiple=True, help='Set stream title. Use format :') @@ -797,10 +840,18 @@ def convert(ctx, } - context['import_subtitles'] = (subtitle_directory and subtitle_prefix) + ( + context['import_subtitles'], + resolvedSubtitleDirectory, + resolvedSubtitlePrefix, + ) = resolveSubtitleImportOptions( + context, + subtitle_directory, + subtitle_prefix, + ) if context['import_subtitles']: - context['subtitle_directory'] = subtitle_directory - context['subtitle_prefix'] = subtitle_prefix + context['subtitle_directory'] = resolvedSubtitleDirectory + context['subtitle_prefix'] = resolvedSubtitlePrefix existingSourcePaths = [p for p in paths if os.path.isfile(p) and p.split('.')[-1] in SUPPORTED_INPUT_FILE_EXTENSIONS] diff --git a/src/ffx/configuration_controller.py b/src/ffx/configuration_controller.py index 74cc960..8f907a3 100644 --- a/src/ffx/configuration_controller.py +++ b/src/ffx/configuration_controller.py @@ -8,6 +8,7 @@ class ConfigurationController(): DATABASE_PATH_CONFIG_KEY = 'databasePath' LOG_DIRECTORY_CONFIG_KEY = 'logDirectory' + SUBTITLES_DIRECTORY_CONFIG_KEY = 'subtitlesDirectory' OUTPUT_FILENAME_TEMPLATE_KEY = 'outputFilenameTemplate' @@ -49,6 +50,12 @@ class ConfigurationController(): def getDatabaseFilePath(self): return self.__databaseFilePath + def getSubtitlesDirectoryPath(self): + subtitlesDirectory = self.__configurationData.get( + ConfigurationController.SUBTITLES_DIRECTORY_CONFIG_KEY, + '', + ) + return os.path.expanduser(str(subtitlesDirectory)) if subtitlesDirectory else '' def getData(self): return self.__configurationData @@ -139,4 +146,4 @@ class ConfigurationController(): # raise click.ClickException(f"PatternController.getPattern(): {repr(ex)}") # finally: # s.close() -# \ No newline at end of file +# diff --git a/tests/integration/subtrack_mapping/test_cli_bundle.py b/tests/integration/subtrack_mapping/test_cli_bundle.py index 874062f..11e0a30 100644 --- a/tests/integration/subtrack_mapping/test_cli_bundle.py +++ b/tests/integration/subtrack_mapping/test_cli_bundle.py @@ -354,6 +354,83 @@ class SubtrackMappingBundleTests(unittest.TestCase): self.assertIn("external subtitle payload", extracted_subtitle) self.assertNotIn("embedded subtitle payload", extracted_subtitle) + def test_subtitle_prefix_uses_configured_base_directory_when_directory_is_omitted(self): + source_filename = "substitute_default_s01e01.mkv" + subtitle_prefix = "substitute_default" + subtitles_base_dir = self.home_dir / ".local" / "var" / "sync" / "subtitles" + resolved_subtitle_dir = subtitles_base_dir / subtitle_prefix + resolved_subtitle_dir.mkdir(parents=True, exist_ok=True) + self.write_config( + { + "subtitlesDirectory": "~/.local/var/sync/subtitles", + "metadata": { + "streams": { + "remove": ["BPS"], + } + } + } + ) + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-1", language="eng", title="Main Audio"), + SourceTrackSpec( + TrackType.SUBTITLE, + identity="embedded-subtitle", + language="eng", + title="Embedded Title", + extra_tags={"BPS": "remove-me", "EXTERNAL_KEEP": "keep-me"}, + subtitle_lines=("embedded subtitle payload",), + ), + ], + ) + + write_vtt( + resolved_subtitle_dir / f"{subtitle_prefix}_s01e01_2_deu.vtt", + ("external subtitle payload",), + ) + + prepare_pattern_database( + self.database_path, + r"^substitute_default_(s[0-9]+e[0-9]+)\.mkv$", + [ + PatternTrackSpec(index=0, source_index=0, track_type=TrackType.VIDEO), + PatternTrackSpec(index=1, source_index=1, track_type=TrackType.AUDIO), + PatternTrackSpec(index=2, source_index=2, track_type=TrackType.SUBTITLE), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-tmdb", + "--no-prompt", + "--no-signature", + "--subtitle-prefix", + subtitle_prefix, + str(source_path), + ) + self.assertCompleted(completed) + + output_path = expected_output_path(self.workdir, source_filename) + streams = ffprobe_json(output_path)["streams"] + subtitle_stream = [stream for stream in streams if stream["codec_type"] == "subtitle"][0] + + self.assertEqual(get_tag(subtitle_stream, "language"), "deu") + self.assertEqual(get_tag(subtitle_stream, "title"), "Embedded Title") + self.assertEqual(get_tag(subtitle_stream, "THIS_IS"), "embedded-subtitle") + self.assertEqual(get_tag(subtitle_stream, "EXTERNAL_KEEP"), "keep-me") + self.assertIsNone(get_tag(subtitle_stream, "BPS")) + + extracted_subtitle = extract_first_subtitle_text(self.workdir, output_path) + self.assertIn("external subtitle payload", extracted_subtitle) + self.assertNotIn("embedded subtitle payload", extracted_subtitle) + if __name__ == "__main__": unittest.main() diff --git a/tests/unit/test_cli_subtitle_directory.py b/tests/unit/test_cli_subtitle_directory.py new file mode 100644 index 0000000..d0ef29c --- /dev/null +++ b/tests/unit/test_cli_subtitle_directory.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +import json +import os +from pathlib import Path +import sys +import tempfile +import unittest + +from click.testing import CliRunner + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import cli # noqa: E402 + + +class SubtitleDirectoryCliTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.home_dir = Path(self.tempdir.name) / "home" + self.home_dir.mkdir() + self.database_path = Path(self.tempdir.name) / "test.db" + + def tearDown(self): + self.tempdir.cleanup() + + def write_config(self, data: dict) -> None: + config_dir = self.home_dir / ".local" / "etc" + config_dir.mkdir(parents=True, exist_ok=True) + (config_dir / "ffx.json").write_text(json.dumps(data), encoding="utf-8") + + def invoke_convert(self, *args: str): + runner = CliRunner() + return runner.invoke( + cli.ffx, + [ + "--database-file", + str(self.database_path), + "convert", + "--no-tmdb", + *args, + ], + env={**os.environ, "HOME": str(self.home_dir)}, + ) + + def test_subtitle_prefix_without_directory_or_default_fails(self): + result = self.invoke_convert("--subtitle-prefix", "dball") + + self.assertNotEqual(0, result.exit_code) + self.assertIn("no --subtitle-directory was provided", result.output) + self.assertIn("no subtitlesDirectory default is configured", result.output) + + def test_subtitle_prefix_without_directory_fails_when_configured_subdir_is_missing(self): + subtitles_base_dir = self.home_dir / ".local" / "var" / "sync" / "subtitles" + subtitles_base_dir.mkdir(parents=True, exist_ok=True) + self.write_config({"subtitlesDirectory": "~/.local/var/sync/subtitles"}) + + result = self.invoke_convert("--subtitle-prefix", "dball") + + self.assertNotEqual(0, result.exit_code) + self.assertIn("resolved subtitle directory does not exist", result.output) + self.assertIn(str(subtitles_base_dir / "dball"), result.output) + + def test_explicit_subtitle_directory_wins_over_missing_default(self): + explicit_subtitle_directory = self.home_dir / "manual-subtitles" + explicit_subtitle_directory.mkdir(parents=True, exist_ok=True) + + result = self.invoke_convert( + "--subtitle-directory", + str(explicit_subtitle_directory), + "--subtitle-prefix", + "dball", + ) + + self.assertEqual(0, result.exit_code, result.output) + + +if __name__ == "__main__": + unittest.main() diff --git a/tools/configure_workstation.sh b/tools/configure_workstation.sh index 5f86948..5c2112f 100755 --- a/tools/configure_workstation.sh +++ b/tools/configure_workstation.sh @@ -7,6 +7,7 @@ CONFIG_FILE="${FFX_CONFIG_FILE:-${CONFIG_DIR}/ffx.json}" VAR_DIR="${FFX_VAR_DIR:-${HOME}/.local/var/ffx}" LOG_DIR="${FFX_LOG_DIR:-${HOME}/.local/var/log}" DATABASE_FILE="${FFX_DATABASE_FILE:-${VAR_DIR}/ffx.db}" +SUBTITLES_BASE_DIR="${FFX_SUBTITLES_BASE_DIR:-${HOME}/.local/var/sync/subtitles}" CHECK_ONLY=0 WITH_TESTS=0 @@ -47,6 +48,7 @@ Environment overrides: FFX_VAR_DIR Override the default data directory. FFX_LOG_DIR Override the default log directory. FFX_DATABASE_FILE Override the database path written into a newly seeded config. + FFX_SUBTITLES_BASE_DIR Override the default subtitles base directory written into a newly seeded config. Notes: - tools/setup.sh is the first installation step and owns bundle venv setup. @@ -142,6 +144,13 @@ component_detail() { printf 'missing; prep can create it' fi ;; + subtitles-base-dir) + if check_seeded_dir "${SUBTITLES_BASE_DIR}"; then + printf '%s' "${SUBTITLES_BASE_DIR}" + else + printf 'missing; prep can create it' + fi + ;; ffx-config) if check_seeded_file "${CONFIG_FILE}"; then printf '%s' "${CONFIG_FILE}" @@ -195,6 +204,9 @@ report_seeded_component() { log-dir) check_seeded_dir "${LOG_DIR}" || ok=0 ;; + subtitles-base-dir) + check_seeded_dir "${SUBTITLES_BASE_DIR}" || ok=0 + ;; ffx-config) check_seeded_file "${CONFIG_FILE}" || ok=0 ;; @@ -231,6 +243,7 @@ print_seeded_file_status() { report_seeded_component "Config dir" "config-dir" "optional" report_seeded_component "Var dir" "var-dir" "optional" report_seeded_component "Log dir" "log-dir" "optional" + report_seeded_component "Subtitles base dir" "subtitles-base-dir" "optional" report_seeded_component "ffx config" "ffx-config" "optional" } @@ -340,12 +353,23 @@ seed_default_config() { created_any=1 fi + if [ ! -d "${SUBTITLES_BASE_DIR}" ]; then + printf 'Creating subtitles base dir at %s...\n' "${SUBTITLES_BASE_DIR}" + if ! mkdir -p "${SUBTITLES_BASE_DIR}"; then + printf 'Failed to create subtitles base dir at %s.\n' "${SUBTITLES_BASE_DIR}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + created_any=1 + fi + if [ ! -f "${CONFIG_FILE}" ]; then printf 'Seeding ffx config at %s...\n' "${CONFIG_FILE}" if ! cat >"${CONFIG_FILE}" <<EOF { "databasePath": "${DATABASE_FILE}", "logDirectory": "${LOG_DIR}", + "subtitlesDirectory": "${SUBTITLES_BASE_DIR}", "metadata": { "signature": { "RECODED_WITH": "FFX" From 4365e083dcc13cacb2029583b343fed75811773c Mon Sep 17 00:00:00 2001 From: Javanaut <javanaut@refulgent.de> Date: Sat, 11 Apr 2026 22:31:04 +0200 Subject: [PATCH 20/28] Adapt unmux command to changes in convert command --- src/ffx/cli.py | 36 +++++- tests/integration/test_cli_unmux.py | 106 ++++++++++++++++++ tests/unit/test_cli_unmux_output_directory.py | 94 ++++++++++++++++ 3 files changed, 235 insertions(+), 1 deletion(-) create mode 100644 tests/integration/test_cli_unmux.py create mode 100644 tests/unit/test_cli_unmux_output_directory.py diff --git a/src/ffx/cli.py b/src/ffx/cli.py index 4be3451..5507bef 100755 --- a/src/ffx/cli.py +++ b/src/ffx/cli.py @@ -47,6 +47,10 @@ SUBTITLE_PREFIX_OPTION_HELP = ( "Subtitle filename prefix. Requires --subtitle-directory, or a configured " + "subtitlesDirectory base path that contains a matching <prefix>/ subdirectory." ) +UNMUX_OUTPUT_DIRECTORY_OPTION_HELP = ( + "Write extracted streams here. When omitted together with --subtitles-only and " + + "--label, FFX uses the configured subtitlesDirectory base path plus the label." +) CROPDETECT_SEEK_OPTION_HELP = ( "Start crop detection this many seconds into the input. " + "Useful for skipping logos, intros, or black frames." @@ -160,6 +164,27 @@ def resolveSubtitleImportOptions(context, subtitleDirectory, subtitlePrefix): return True, resolvedSubtitleDirectory, resolvedSubtitlePrefix +def resolveUnmuxOutputDirectory(context, outputDirectory, subtitlesOnly, label): + resolvedOutputDirectory = ( + os.path.expanduser(str(outputDirectory).strip()) + if outputDirectory + else '' + ) + resolvedLabel = str(label).strip() + + if resolvedOutputDirectory or not subtitlesOnly or not resolvedLabel: + return resolvedOutputDirectory, False + + configuredSubtitlesBaseDirectory = context['config'].getSubtitlesDirectoryPath() + if not configuredSubtitlesBaseDirectory: + raise click.ClickException( + "Subtitles-only unmux with --label requires --output-directory or a configured " + + "subtitlesDirectory default in ffx.json." + ) + + return os.path.join(configuredSubtitlesBaseDirectory, resolvedLabel), True + + @click.group() @click.pass_context @@ -416,7 +441,7 @@ def getUnmuxSequence(trackDescriptor: TrackDescriptor, sourcePath, targetPrefix, @click.argument('paths', nargs=-1) @click.option('-l', '--label', type=str, default='', help='Label to be used as filename prefix') -@click.option("-o", "--output-directory", type=str, default='') +@click.option("-o", "--output-directory", type=str, default='', help=UNMUX_OUTPUT_DIRECTORY_OPTION_HELP) @click.option("-s", "--subtitles-only", is_flag=True, default=False) @click.option( '--nice', @@ -454,6 +479,15 @@ def unmux(ctx, ctx.obj['resource_limits']['cpu_limit'] = cpu ctx.obj['resource_limits']['cpu_percent'] = cpu + output_directory, create_output_directory = resolveUnmuxOutputDirectory( + ctx.obj, + output_directory, + subtitles_only, + label, + ) + if create_output_directory and existingSourcePaths and not ctx.obj.get('dry_run', False): + os.makedirs(output_directory, exist_ok=True) + for sourcePath in existingSourcePaths: fp = FileProperties(ctx.obj, sourcePath) diff --git a/tests/integration/test_cli_unmux.py b/tests/integration/test_cli_unmux.py new file mode 100644 index 0000000..a47241a --- /dev/null +++ b/tests/integration/test_cli_unmux.py @@ -0,0 +1,106 @@ +from __future__ import annotations + +import json +import os +from pathlib import Path +import subprocess +import sys +import tempfile +import unittest + +from tests.support.ffx_bundle import SourceTrackSpec, create_source_fixture + +from ffx.track_type import TrackType + +try: + import pytest +except ImportError: # pragma: no cover - unittest-only environments + pytest = None + +if pytest is not None: + pytestmark = [pytest.mark.integration] + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + + +def run_ffx_unmux(workdir: Path, home_dir: Path, database_path: Path, *args: str) -> subprocess.CompletedProcess[str]: + env = os.environ.copy() + env["HOME"] = str(home_dir) + existing_pythonpath = env.get("PYTHONPATH", "") + env["PYTHONPATH"] = str(SRC_ROOT) if not existing_pythonpath else f"{SRC_ROOT}{os.pathsep}{existing_pythonpath}" + + command = [ + sys.executable, + "-m", + "ffx", + "--database-file", + str(database_path), + "unmux", + *args, + ] + return subprocess.run(command, cwd=workdir, env=env, capture_output=True, text=True) + + +class UnmuxCliTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.workdir = Path(self.tempdir.name) + self.home_dir = self.workdir / "home" + self.home_dir.mkdir() + self.database_path = self.workdir / "test.db" + + def tearDown(self): + self.tempdir.cleanup() + + def write_config(self, data: dict) -> None: + config_dir = self.home_dir / ".local" / "etc" + config_dir.mkdir(parents=True, exist_ok=True) + (config_dir / "ffx.json").write_text(json.dumps(data), encoding="utf-8") + + def assertCompleted(self, completed): + if completed.returncode != 0: + self.fail( + "FFX unmux failed\n" + f"STDOUT:\n{completed.stdout}\n" + f"STDERR:\n{completed.stderr}" + ) + + def test_subtitles_only_without_output_directory_uses_configured_base_plus_label(self): + self.write_config( + { + "subtitlesDirectory": "~/.local/var/sync/subtitles", + } + ) + source_filename = "unmux_s01e01.mkv" + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec( + TrackType.SUBTITLE, + identity="subtitle-1", + language="eng", + subtitle_lines=("subtitle payload",), + ), + ], + ) + + completed = run_ffx_unmux( + self.workdir, + self.home_dir, + self.database_path, + "--subtitles-only", + "--label", + "dball", + str(source_path), + ) + self.assertCompleted(completed) + + expected_directory = self.home_dir / ".local" / "var" / "sync" / "subtitles" / "dball" + self.assertTrue(expected_directory.is_dir(), expected_directory) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_cli_unmux_output_directory.py b/tests/unit/test_cli_unmux_output_directory.py new file mode 100644 index 0000000..f417fc6 --- /dev/null +++ b/tests/unit/test_cli_unmux_output_directory.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import tempfile +import unittest + +import click + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import cli # noqa: E402 + + +class StaticConfig: + def __init__(self, subtitles_directory: str = ""): + self._subtitles_directory = subtitles_directory + + def getSubtitlesDirectoryPath(self): + return self._subtitles_directory + + +class UnmuxOutputDirectoryTests(unittest.TestCase): + def test_subtitles_only_with_label_uses_configured_subtitles_base_directory(self): + with tempfile.TemporaryDirectory() as tempdir: + context = { + "config": StaticConfig(str(Path(tempdir) / "subtitles")), + } + + resolved_output_directory, should_create = cli.resolveUnmuxOutputDirectory( + context, + "", + True, + "dball", + ) + + self.assertEqual(str(Path(tempdir) / "subtitles" / "dball"), resolved_output_directory) + self.assertTrue(should_create) + + def test_explicit_output_directory_keeps_existing_behavior(self): + with tempfile.TemporaryDirectory() as tempdir: + context = { + "config": StaticConfig(str(Path(tempdir) / "subtitles")), + } + explicit_output_directory = str(Path(tempdir) / "manual") + + resolved_output_directory, should_create = cli.resolveUnmuxOutputDirectory( + context, + explicit_output_directory, + True, + "dball", + ) + + self.assertEqual(explicit_output_directory, resolved_output_directory) + self.assertFalse(should_create) + + def test_subtitles_only_without_label_keeps_existing_behavior(self): + context = { + "config": StaticConfig("/tmp/subtitles"), + } + + resolved_output_directory, should_create = cli.resolveUnmuxOutputDirectory( + context, + "", + True, + "", + ) + + self.assertEqual("", resolved_output_directory) + self.assertFalse(should_create) + + def test_subtitles_only_with_label_requires_configured_default_when_output_directory_is_missing(self): + context = { + "config": StaticConfig(""), + } + + with self.assertRaises(click.ClickException) as caught: + cli.resolveUnmuxOutputDirectory( + context, + "", + True, + "dball", + ) + + self.assertIn("subtitlesDirectory default", str(caught.exception)) + + +if __name__ == "__main__": + unittest.main() From 20bdfc0dd7bd8f54f315bf4bd611b7bfe2183d79 Mon Sep 17 00:00:00 2001 From: Javanaut <javanaut@refulgent.de> Date: Sun, 12 Apr 2026 10:06:01 +0200 Subject: [PATCH 21/28] Fix pri lang for rename mode --- requirements/project.md | 2 +- src/ffx/cli.py | 4 +- src/ffx/iso_language.py | 275 ++++++++++++++++++++--------- tests/unit/test_cli_rename_only.py | 125 +++++++++++++ tests/unit/test_iso_language.py | 41 +++++ 5 files changed, 361 insertions(+), 86 deletions(-) create mode 100644 tests/unit/test_cli_rename_only.py create mode 100644 tests/unit/test_iso_language.py diff --git a/requirements/project.md b/requirements/project.md index 8182705..dfcac9c 100644 --- a/requirements/project.md +++ b/requirements/project.md @@ -60,7 +60,7 @@ - optional crop detection and crop application, - optional deinterlacing and denoising, - optional subtitle import from external files, - - rename-only copy mode. + - rename-only move mode. - The system shall support optional TMDB lookups to resolve show names, years, and episode titles when a show ID, season, and episode are available. - The system shall generate output filenames from show metadata, season and episode indices, and episode names using the configured filename template. - The system shall allow CLI overrides for stream languages, stream titles, default and forced tracks, stream order, TMDB show and episode data, output directory, label prefix, and processing resource limits. diff --git a/src/ffx/cli.py b/src/ffx/cli.py index 5507bef..6703198 100755 --- a/src/ffx/cli.py +++ b/src/ffx/cli.py @@ -764,7 +764,7 @@ def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): help=CPU_OPTION_HELP, ) -@click.option('--rename-only', is_flag=True, default=False, help='Only renaming, no recoding') +@click.option('--rename-only', is_flag=True, default=False, help='Only renaming and moving, no recoding') def convert(ctx, paths, @@ -1247,7 +1247,7 @@ def convert(ctx, if rename_only: - shutil.copyfile(sourcePath, targetPath) + shutil.move(sourcePath, targetPath) else: fc.runJob(sourcePath, targetPath, diff --git a/src/ffx/iso_language.py b/src/ffx/iso_language.py index cc01000..4dab399 100644 --- a/src/ffx/iso_language.py +++ b/src/ffx/iso_language.py @@ -1,85 +1,196 @@ from enum import Enum import difflib + class IsoLanguage(Enum): - AFRIKAANS = {"name": "Afrikaans", "iso639_1": "af", "iso639_2": ["afr"]} - ALBANIAN = {"name": "Albanian", "iso639_1": "sq", "iso639_2": ["alb"]} - ARABIC = {"name": "Arabic", "iso639_1": "ar", "iso639_2": ["ara"]} - ARMENIAN = {"name": "Armenian", "iso639_1": "hy", "iso639_2": ["arm"]} - AZERBAIJANI = {"name": "Azerbaijani", "iso639_1": "az", "iso639_2": ["aze"]} - BASQUE = {"name": "Basque", "iso639_1": "eu", "iso639_2": ["baq"]} - BELARUSIAN = {"name": "Belarusian", "iso639_1": "be", "iso639_2": ["bel"]} - BOKMAL = {"name": "Bokmål", "iso639_1": "nb", "iso639_2": ["nob"]} # Norwegian Bokmål - BULGARIAN = {"name": "Bulgarian", "iso639_1": "bg", "iso639_2": ["bul"]} - CATALAN = {"name": "Catalan", "iso639_1": "ca", "iso639_2": ["cat"]} - CHINESE = {"name": "Chinese", "iso639_1": "zh", "iso639_2": ["zho", "chi"]} - CROATIAN = {"name": "Croatian", "iso639_1": "hr", "iso639_2": ["hrv"]} - CZECH = {"name": "Czech", "iso639_1": "cs", "iso639_2": ["cze"]} - DANISH = {"name": "Danish", "iso639_1": "da", "iso639_2": ["dan"]} - DUTCH = {"name": "Dutch", "iso639_1": "nl", "iso639_2": ["nld", "dut"]} - ENGLISH = {"name": "English", "iso639_1": "en", "iso639_2": ["eng"]} - ESTONIAN = {"name": "Estonian", "iso639_1": "et", "iso639_2": ["est"]} - FILIPINO = {"name": "Filipino", "iso639_1": "tl", "iso639_2": ["fil"]} # Tagalog - FINNISH = {"name": "Finnish", "iso639_1": "fi", "iso639_2": ["fin"]} - FRENCH = {"name": "French", "iso639_1": "fr", "iso639_2": ["fra", "fre"]} - GALICIAN = {"name": "Galician", "iso639_1": "gl", "iso639_2": ["glg"]} - GEORGIAN = {"name": "Georgian", "iso639_1": "ka", "iso639_2": ["geo"]} - GERMAN = {"name": "German", "iso639_1": "de", "iso639_2": ["deu", "ger"]} - GREEK = {"name": "Greek", "iso639_1": "el", "iso639_2": ["gre"]} - HEBREW = {"name": "Hebrew", "iso639_1": "he", "iso639_2": ["heb"]} - HINDI = {"name": "Hindi", "iso639_1": "hi", "iso639_2": ["hin"]} - HUNGARIAN = {"name": "Hungarian", "iso639_1": "hu", "iso639_2": ["hun"]} - ICELANDIC = {"name": "Icelandic", "iso639_1": "is", "iso639_2": ["ice"]} - INDONESIAN = {"name": "Indonesian", "iso639_1": "id", "iso639_2": ["ind"]} - IRISH = {"name": "Irish", "iso639_1": "ga", "iso639_2": ["gle"]} - ITALIAN = {"name": "Italian", "iso639_1": "it", "iso639_2": ["ita"]} - JAPANESE = {"name": "Japanese", "iso639_1": "ja", "iso639_2": ["jpn"]} - KANNADA = {"name": "Kannada", "iso639_1": "kn", "iso639_2": ["kan"]} - KAZAKH = {"name": "Kazakh", "iso639_1": "kk", "iso639_2": ["kaz"]} - KOREAN = {"name": "Korean", "iso639_1": "ko", "iso639_2": ["kor"]} - LATIN = {"name": "Latin", "iso639_1": "la", "iso639_2": ["lat"]} - LATVIAN = {"name": "Latvian", "iso639_1": "lv", "iso639_2": ["lav"]} - LITHUANIAN = {"name": "Lithuanian", "iso639_1": "lt", "iso639_2": ["lit"]} - MACEDONIAN = {"name": "Macedonian", "iso639_1": "mk", "iso639_2": ["mac"]} - MALAY = {"name": "Malay", "iso639_1": "ms", "iso639_2": ["may"]} - MALAYALAM = {"name": "Malayalam", "iso639_1": "ml", "iso639_2": ["mal"]} - MALTESE = {"name": "Maltese", "iso639_1": "mt", "iso639_2": ["mlt"]} - NORWEGIAN = {"name": "Norwegian", "iso639_1": "no", "iso639_2": ["nor"]} - PERSIAN = {"name": "Persian", "iso639_1": "fa", "iso639_2": ["per"]} - POLISH = {"name": "Polish", "iso639_1": "pl", "iso639_2": ["pol"]} - PORTUGUESE = {"name": "Portuguese", "iso639_1": "pt", "iso639_2": ["por"]} - ROMANIAN = {"name": "Romanian", "iso639_1": "ro", "iso639_2": ["rum"]} - RUSSIAN = {"name": "Russian", "iso639_1": "ru", "iso639_2": ["rus"]} - NORTHERN_SAMI = {"name": "Northern Sami", "iso639_1": "se", "iso639_2": ["sme"]} - SAMOAN = {"name": "Samoan", "iso639_1": "sm", "iso639_2": ["smo"]} - SANGO = {"name": "Sango", "iso639_1": "sg", "iso639_2": ["sag"]} - SANSKRIT = {"name": "Sanskrit", "iso639_1": "sa", "iso639_2": ["san"]} - SARDINIAN = {"name": "Sardinian", "iso639_1": "sc", "iso639_2": ["srd"]} - SERBIAN = {"name": "Serbian", "iso639_1": "sr", "iso639_2": ["srp"]} - SHONA = {"name": "Shona", "iso639_1": "sn", "iso639_2": ["sna"]} - SINDHI = {"name": "Sindhi", "iso639_1": "sd", "iso639_2": ["snd"]} - SINHALA = {"name": "Sinhala", "iso639_1": "si", "iso639_2": ["sin"]} - SLOVAK = {"name": "Slovak", "iso639_1": "sk", "iso639_2": ["slk"]} - SLOVENIAN = {"name": "Slovenian", "iso639_1": "sl", "iso639_2": ["slv"]} - SOMALI = {"name": "Somali", "iso639_1": "so", "iso639_2": ["som"]} - SOUTHERN_SOTHO = {"name": "Southern Sotho", "iso639_1": "st", "iso639_2": ["sot"]} - SPANISH = {"name": "Spanish", "iso639_1": "es", "iso639_2": ["spa"]} - SUNDANESE = {"name": "Sundanese", "iso639_1": "su", "iso639_2": ["sun"]} - SWAHILI = {"name": "Swahili", "iso639_1": "sw", "iso639_2": ["swa"]} - SWATI = {"name": "Swati", "iso639_1": "ss", "iso639_2": ["ssw"]} - SWEDISH = {"name": "Swedish", "iso639_1": "sv", "iso639_2": ["swe"]} - TAGALOG = {"name": "Tagalog", "iso639_1": "tl", "iso639_2": ["tgl"]} - TAMIL = {"name": "Tamil", "iso639_1": "ta", "iso639_2": ["tam"]} - TELUGU = {"name": "Telugu", "iso639_1": "te", "iso639_2": ["tel"]} - THAI = {"name": "Thai", "iso639_1": "th", "iso639_2": ["tha"]} - TURKISH = {"name": "Turkish", "iso639_1": "tr", "iso639_2": ["tur"]} - UKRAINIAN = {"name": "Ukrainian", "iso639_1": "uk", "iso639_2": ["ukr"]} - URDU = {"name": "Urdu", "iso639_1": "ur", "iso639_2": ["urd"]} - VIETNAMESE = {"name": "Vietnamese", "iso639_1": "vi", "iso639_2":[ "vie"]} - WELSH = {"name": "Welsh", "iso639_1": "cy", "iso639_2": ["wel"]} + ABKHAZIAN = {"name": "Abkhazian", "iso639_1": "ab", "iso639_2": ["abk"]} + AFAR = {"name": "Afar", "iso639_1": "aa", "iso639_2": ["aar"]} + AFRIKAANS = {"name": "Afrikaans", "iso639_1": "af", "iso639_2": ["afr"]} + AKAN = {"name": "Akan", "iso639_1": "ak", "iso639_2": ["aka"]} + ALBANIAN = {"name": "Albanian", "iso639_1": "sq", "iso639_2": ["sqi", "alb"]} + AMHARIC = {"name": "Amharic", "iso639_1": "am", "iso639_2": ["amh"]} + ARABIC = {"name": "Arabic", "iso639_1": "ar", "iso639_2": ["ara"]} + ARAGONESE = {"name": "Aragonese", "iso639_1": "an", "iso639_2": ["arg"]} + ARMENIAN = {"name": "Armenian", "iso639_1": "hy", "iso639_2": ["hye", "arm"]} + ASSAMESE = {"name": "Assamese", "iso639_1": "as", "iso639_2": ["asm"]} + AVARIC = {"name": "Avaric", "iso639_1": "av", "iso639_2": ["ava"]} + AVESTAN = {"name": "Avestan", "iso639_1": "ae", "iso639_2": ["ave"]} + AYMARA = {"name": "Aymara", "iso639_1": "ay", "iso639_2": ["aym"]} + AZERBAIJANI = {"name": "Azerbaijani", "iso639_1": "az", "iso639_2": ["aze"]} + BAMBARA = {"name": "Bambara", "iso639_1": "bm", "iso639_2": ["bam"]} + BASHKIR = {"name": "Bashkir", "iso639_1": "ba", "iso639_2": ["bak"]} + BASQUE = {"name": "Basque", "iso639_1": "eu", "iso639_2": ["eus", "baq"]} + BELARUSIAN = {"name": "Belarusian", "iso639_1": "be", "iso639_2": ["bel"]} + BENGALI = {"name": "Bengali", "iso639_1": "bn", "iso639_2": ["ben"]} + BISLAMA = {"name": "Bislama", "iso639_1": "bi", "iso639_2": ["bis"]} + BOKMAL = {"name": "Bokmål", "iso639_1": "nb", "iso639_2": ["nob"]} + BOSNIAN = {"name": "Bosnian", "iso639_1": "bs", "iso639_2": ["bos"]} + BRETON = {"name": "Breton", "iso639_1": "br", "iso639_2": ["bre"]} + BULGARIAN = {"name": "Bulgarian", "iso639_1": "bg", "iso639_2": ["bul"]} + BURMESE = {"name": "Burmese", "iso639_1": "my", "iso639_2": ["mya", "bur"]} + CATALAN = {"name": "Catalan", "iso639_1": "ca", "iso639_2": ["cat"]} + CHAMORRO = {"name": "Chamorro", "iso639_1": "ch", "iso639_2": ["cha"]} + CHECHEN = {"name": "Chechen", "iso639_1": "ce", "iso639_2": ["che"]} + CHICHEWA = {"name": "Chichewa", "iso639_1": "ny", "iso639_2": ["nya"]} + CHINESE = {"name": "Chinese", "iso639_1": "zh", "iso639_2": ["zho", "chi"]} + CHURCH_SLAVIC = {"name": "Church Slavic", "iso639_1": "cu", "iso639_2": ["chu"]} + CHUVASH = {"name": "Chuvash", "iso639_1": "cv", "iso639_2": ["chv"]} + CORNISH = {"name": "Cornish", "iso639_1": "kw", "iso639_2": ["cor"]} + CORSICAN = {"name": "Corsican", "iso639_1": "co", "iso639_2": ["cos"]} + CREE = {"name": "Cree", "iso639_1": "cr", "iso639_2": ["cre"]} + CROATIAN = {"name": "Croatian", "iso639_1": "hr", "iso639_2": ["hrv"]} + CZECH = {"name": "Czech", "iso639_1": "cs", "iso639_2": ["ces", "cze"]} + DANISH = {"name": "Danish", "iso639_1": "da", "iso639_2": ["dan"]} + DIVEHI = {"name": "Divehi", "iso639_1": "dv", "iso639_2": ["div"]} + DUTCH = {"name": "Dutch", "iso639_1": "nl", "iso639_2": ["nld", "dut"]} + DZONGKHA = {"name": "Dzongkha", "iso639_1": "dz", "iso639_2": ["dzo"]} + ENGLISH = {"name": "English", "iso639_1": "en", "iso639_2": ["eng"]} + ESPERANTO = {"name": "Esperanto", "iso639_1": "eo", "iso639_2": ["epo"]} + ESTONIAN = {"name": "Estonian", "iso639_1": "et", "iso639_2": ["est"]} + EWE = {"name": "Ewe", "iso639_1": "ee", "iso639_2": ["ewe"]} + FAROESE = {"name": "Faroese", "iso639_1": "fo", "iso639_2": ["fao"]} + FIJIAN = {"name": "Fijian", "iso639_1": "fj", "iso639_2": ["fij"]} + FINNISH = {"name": "Finnish", "iso639_1": "fi", "iso639_2": ["fin"]} + FRENCH = {"name": "French", "iso639_1": "fr", "iso639_2": ["fra", "fre"]} + FULAH = {"name": "Fulah", "iso639_1": "ff", "iso639_2": ["ful"]} + GALICIAN = {"name": "Galician", "iso639_1": "gl", "iso639_2": ["glg"]} + GANDA = {"name": "Ganda", "iso639_1": "lg", "iso639_2": ["lug"]} + GEORGIAN = {"name": "Georgian", "iso639_1": "ka", "iso639_2": ["kat", "geo"]} + GERMAN = {"name": "German", "iso639_1": "de", "iso639_2": ["deu", "ger"]} + GREEK = {"name": "Greek", "iso639_1": "el", "iso639_2": ["ell", "gre"]} + GUARANI = {"name": "Guarani", "iso639_1": "gn", "iso639_2": ["grn"]} + GUJARATI = {"name": "Gujarati", "iso639_1": "gu", "iso639_2": ["guj"]} + HAITIAN = {"name": "Haitian", "iso639_1": "ht", "iso639_2": ["hat"]} + HAUSA = {"name": "Hausa", "iso639_1": "ha", "iso639_2": ["hau"]} + HEBREW = {"name": "Hebrew", "iso639_1": "he", "iso639_2": ["heb"]} + HERERO = {"name": "Herero", "iso639_1": "hz", "iso639_2": ["her"]} + HINDI = {"name": "Hindi", "iso639_1": "hi", "iso639_2": ["hin"]} + HIRI_MOTU = {"name": "Hiri Motu", "iso639_1": "ho", "iso639_2": ["hmo"]} + HUNGARIAN = {"name": "Hungarian", "iso639_1": "hu", "iso639_2": ["hun"]} + ICELANDIC = {"name": "Icelandic", "iso639_1": "is", "iso639_2": ["isl", "ice"]} + IDO = {"name": "Ido", "iso639_1": "io", "iso639_2": ["ido"]} + IGBO = {"name": "Igbo", "iso639_1": "ig", "iso639_2": ["ibo"]} + INDONESIAN = {"name": "Indonesian", "iso639_1": "id", "iso639_2": ["ind"]} + INTERLINGUA = {"name": "Interlingua", "iso639_1": "ia", "iso639_2": ["ina"]} + INTERLINGUE = {"name": "Interlingue", "iso639_1": "ie", "iso639_2": ["ile"]} + INUKTITUT = {"name": "Inuktitut", "iso639_1": "iu", "iso639_2": ["iku"]} + INUPIAQ = {"name": "Inupiaq", "iso639_1": "ik", "iso639_2": ["ipk"]} + IRISH = {"name": "Irish", "iso639_1": "ga", "iso639_2": ["gle"]} + ITALIAN = {"name": "Italian", "iso639_1": "it", "iso639_2": ["ita"]} + JAPANESE = {"name": "Japanese", "iso639_1": "ja", "iso639_2": ["jpn"]} + JAVANESE = {"name": "Javanese", "iso639_1": "jv", "iso639_2": ["jav"]} + KALAALLISUT = {"name": "Kalaallisut", "iso639_1": "kl", "iso639_2": ["kal"]} + KANNADA = {"name": "Kannada", "iso639_1": "kn", "iso639_2": ["kan"]} + KANURI = {"name": "Kanuri", "iso639_1": "kr", "iso639_2": ["kau"]} + KASHMIRI = {"name": "Kashmiri", "iso639_1": "ks", "iso639_2": ["kas"]} + KAZAKH = {"name": "Kazakh", "iso639_1": "kk", "iso639_2": ["kaz"]} + KHMER = {"name": "Khmer", "iso639_1": "km", "iso639_2": ["khm"]} + KIKUYU = {"name": "Kikuyu", "iso639_1": "ki", "iso639_2": ["kik"]} + KINYARWANDA = {"name": "Kinyarwanda", "iso639_1": "rw", "iso639_2": ["kin"]} + KIRGHIZ = {"name": "Kirghiz", "iso639_1": "ky", "iso639_2": ["kir"]} + KOMI = {"name": "Komi", "iso639_1": "kv", "iso639_2": ["kom"]} + KONGO = {"name": "Kongo", "iso639_1": "kg", "iso639_2": ["kon"]} + KOREAN = {"name": "Korean", "iso639_1": "ko", "iso639_2": ["kor"]} + KUANYAMA = {"name": "Kuanyama", "iso639_1": "kj", "iso639_2": ["kua"]} + KURDISH = {"name": "Kurdish", "iso639_1": "ku", "iso639_2": ["kur"]} + LAO = {"name": "Lao", "iso639_1": "lo", "iso639_2": ["lao"]} + LATIN = {"name": "Latin", "iso639_1": "la", "iso639_2": ["lat"]} + LATVIAN = {"name": "Latvian", "iso639_1": "lv", "iso639_2": ["lav"]} + LIMBURGAN = {"name": "Limburgan", "iso639_1": "li", "iso639_2": ["lim"]} + LINGALA = {"name": "Lingala", "iso639_1": "ln", "iso639_2": ["lin"]} + LITHUANIAN = {"name": "Lithuanian", "iso639_1": "lt", "iso639_2": ["lit"]} + LUBA_KATANGA = {"name": "Luba-Katanga", "iso639_1": "lu", "iso639_2": ["lub"]} + LUXEMBOURGISH = {"name": "Luxembourgish", "iso639_1": "lb", "iso639_2": ["ltz"]} + MACEDONIAN = {"name": "Macedonian", "iso639_1": "mk", "iso639_2": ["mkd", "mac"]} + MALAGASY = {"name": "Malagasy", "iso639_1": "mg", "iso639_2": ["mlg"]} + MALAY = {"name": "Malay", "iso639_1": "ms", "iso639_2": ["msa", "may"]} + MALAYALAM = {"name": "Malayalam", "iso639_1": "ml", "iso639_2": ["mal"]} + MALTESE = {"name": "Maltese", "iso639_1": "mt", "iso639_2": ["mlt"]} + MANX = {"name": "Manx", "iso639_1": "gv", "iso639_2": ["glv"]} + MAORI = {"name": "Maori", "iso639_1": "mi", "iso639_2": ["mri", "mao"]} + MARATHI = {"name": "Marathi", "iso639_1": "mr", "iso639_2": ["mar"]} + MARSHALLESE = {"name": "Marshallese", "iso639_1": "mh", "iso639_2": ["mah"]} + MONGOLIAN = {"name": "Mongolian", "iso639_1": "mn", "iso639_2": ["mon"]} + NAURU = {"name": "Nauru", "iso639_1": "na", "iso639_2": ["nau"]} + NAVAJO = {"name": "Navajo", "iso639_1": "nv", "iso639_2": ["nav"]} + NDONGA = {"name": "Ndonga", "iso639_1": "ng", "iso639_2": ["ndo"]} + NEPALI = {"name": "Nepali", "iso639_1": "ne", "iso639_2": ["nep"]} + NORTH_NDEBELE = {"name": "North Ndebele", "iso639_1": "nd", "iso639_2": ["nde"]} + NORTHERN_SAMI = {"name": "Northern Sami", "iso639_1": "se", "iso639_2": ["sme"]} + NORWEGIAN = {"name": "Norwegian", "iso639_1": "no", "iso639_2": ["nor"]} + NORWEGIAN_NYNORSK = {"name": "Nynorsk", "iso639_1": "nn", "iso639_2": ["nno"]} + OCCITAN = {"name": "Occitan", "iso639_1": "oc", "iso639_2": ["oci"]} + OJIBWA = {"name": "Ojibwa", "iso639_1": "oj", "iso639_2": ["oji"]} + ORIYA = {"name": "Oriya", "iso639_1": "or", "iso639_2": ["ori"]} + OROMO = {"name": "Oromo", "iso639_1": "om", "iso639_2": ["orm"]} + OSSETIAN = {"name": "Ossetian", "iso639_1": "os", "iso639_2": ["oss"]} + PALI = {"name": "Pali", "iso639_1": "pi", "iso639_2": ["pli"]} + PANJABI = {"name": "Panjabi", "iso639_1": "pa", "iso639_2": ["pan"]} + PERSIAN = {"name": "Persian", "iso639_1": "fa", "iso639_2": ["fas", "per"]} + POLISH = {"name": "Polish", "iso639_1": "pl", "iso639_2": ["pol"]} + PORTUGUESE = {"name": "Portuguese", "iso639_1": "pt", "iso639_2": ["por"]} + PUSHTO = {"name": "Pushto", "iso639_1": "ps", "iso639_2": ["pus"]} + QUECHUA = {"name": "Quechua", "iso639_1": "qu", "iso639_2": ["que"]} + ROMANIAN = {"name": "Romanian", "iso639_1": "ro", "iso639_2": ["ron", "rum"]} + ROMANSH = {"name": "Romansh", "iso639_1": "rm", "iso639_2": ["roh"]} + RUNDI = {"name": "Rundi", "iso639_1": "rn", "iso639_2": ["run"]} + RUSSIAN = {"name": "Russian", "iso639_1": "ru", "iso639_2": ["rus"]} + SAMOAN = {"name": "Samoan", "iso639_1": "sm", "iso639_2": ["smo"]} + SANGO = {"name": "Sango", "iso639_1": "sg", "iso639_2": ["sag"]} + SANSKRIT = {"name": "Sanskrit", "iso639_1": "sa", "iso639_2": ["san"]} + SARDINIAN = {"name": "Sardinian", "iso639_1": "sc", "iso639_2": ["srd"]} + SCOTTISH_GAELIC = {"name": "Scottish Gaelic", "iso639_1": "gd", "iso639_2": ["gla"]} + SERBIAN = {"name": "Serbian", "iso639_1": "sr", "iso639_2": ["srp"]} + SHONA = {"name": "Shona", "iso639_1": "sn", "iso639_2": ["sna"]} + SICHUAN_YI = {"name": "Sichuan Yi", "iso639_1": "ii", "iso639_2": ["iii"]} + SINDHI = {"name": "Sindhi", "iso639_1": "sd", "iso639_2": ["snd"]} + SINHALA = {"name": "Sinhala", "iso639_1": "si", "iso639_2": ["sin"]} + SLOVAK = {"name": "Slovak", "iso639_1": "sk", "iso639_2": ["slk", "slo"]} + SLOVENIAN = {"name": "Slovenian", "iso639_1": "sl", "iso639_2": ["slv"]} + SOMALI = {"name": "Somali", "iso639_1": "so", "iso639_2": ["som"]} + SOUTH_NDEBELE = {"name": "South Ndebele", "iso639_1": "nr", "iso639_2": ["nbl"]} + SOUTHERN_SOTHO = {"name": "Southern Sotho", "iso639_1": "st", "iso639_2": ["sot"]} + SPANISH = {"name": "Spanish", "iso639_1": "es", "iso639_2": ["spa"]} + SUNDANESE = {"name": "Sundanese", "iso639_1": "su", "iso639_2": ["sun"]} + SWAHILI = {"name": "Swahili", "iso639_1": "sw", "iso639_2": ["swa"]} + SWATI = {"name": "Swati", "iso639_1": "ss", "iso639_2": ["ssw"]} + SWEDISH = {"name": "Swedish", "iso639_1": "sv", "iso639_2": ["swe"]} + TAGALOG = {"name": "Tagalog", "iso639_1": "tl", "iso639_2": ["tgl"]} + TAHITIAN = {"name": "Tahitian", "iso639_1": "ty", "iso639_2": ["tah"]} + TAJIK = {"name": "Tajik", "iso639_1": "tg", "iso639_2": ["tgk"]} + TAMIL = {"name": "Tamil", "iso639_1": "ta", "iso639_2": ["tam"]} + TATAR = {"name": "Tatar", "iso639_1": "tt", "iso639_2": ["tat"]} + TELUGU = {"name": "Telugu", "iso639_1": "te", "iso639_2": ["tel"]} + THAI = {"name": "Thai", "iso639_1": "th", "iso639_2": ["tha"]} + TIBETAN = {"name": "Tibetan", "iso639_1": "bo", "iso639_2": ["bod", "tib"]} + TIGRINYA = {"name": "Tigrinya", "iso639_1": "ti", "iso639_2": ["tir"]} + TONGA = {"name": "Tonga", "iso639_1": "to", "iso639_2": ["ton"]} + TSONGA = {"name": "Tsonga", "iso639_1": "ts", "iso639_2": ["tso"]} + TSWANA = {"name": "Tswana", "iso639_1": "tn", "iso639_2": ["tsn"]} + TURKISH = {"name": "Turkish", "iso639_1": "tr", "iso639_2": ["tur"]} + TURKMEN = {"name": "Turkmen", "iso639_1": "tk", "iso639_2": ["tuk"]} + TWI = {"name": "Twi", "iso639_1": "tw", "iso639_2": ["twi"]} + UIGHUR = {"name": "Uighur", "iso639_1": "ug", "iso639_2": ["uig"]} + UKRAINIAN = {"name": "Ukrainian", "iso639_1": "uk", "iso639_2": ["ukr"]} + URDU = {"name": "Urdu", "iso639_1": "ur", "iso639_2": ["urd"]} + UZBEK = {"name": "Uzbek", "iso639_1": "uz", "iso639_2": ["uzb"]} + VENDA = {"name": "Venda", "iso639_1": "ve", "iso639_2": ["ven"]} + VIETNAMESE = {"name": "Vietnamese", "iso639_1": "vi", "iso639_2": ["vie"]} + VOLAPUK = {"name": "Volapük", "iso639_1": "vo", "iso639_2": ["vol"]} + WALLOON = {"name": "Walloon", "iso639_1": "wa", "iso639_2": ["wln"]} + WELSH = {"name": "Welsh", "iso639_1": "cy", "iso639_2": ["cym", "wel"]} + WESTERN_FRISIAN = {"name": "Western Frisian", "iso639_1": "fy", "iso639_2": ["fry"]} + WOLOF = {"name": "Wolof", "iso639_1": "wo", "iso639_2": ["wol"]} + XHOSA = {"name": "Xhosa", "iso639_1": "xh", "iso639_2": ["xho"]} + YIDDISH = {"name": "Yiddish", "iso639_1": "yi", "iso639_2": ["yid"]} + YORUBA = {"name": "Yoruba", "iso639_1": "yo", "iso639_2": ["yor"]} + ZHUANG = {"name": "Zhuang", "iso639_1": "za", "iso639_2": ["zha"]} + ZULU = {"name": "Zulu", "iso639_1": "zu", "iso639_2": ["zul"]} - UNDEFINED = {"name": "undefined", "iso639_1": "xx", "iso639_2": ["und"]} + FILIPINO = {"name": "Filipino", "iso639_1": "tl", "iso639_2": ["fil"]} + + UNDEFINED = {"name": "undefined", "iso639_1": "xx", "iso639_2": ["und"]} @staticmethod @@ -88,24 +199,22 @@ class IsoLanguage(Enum): closestMatches = difflib.get_close_matches(label, [l.value["name"] for l in IsoLanguage], n=1) if closestMatches: - foundLangs = [l for l in IsoLanguage if l.value['name'] == closestMatches[0]] + foundLangs = [l for l in IsoLanguage if l.value["name"] == closestMatches[0]] return foundLangs[0] if foundLangs else IsoLanguage.UNDEFINED else: return IsoLanguage.UNDEFINED @staticmethod def findThreeLetter(theeLetter : str): - foundLangs = [l for l in IsoLanguage if str(theeLetter) in l.value['iso639_2']] + foundLangs = [l for l in IsoLanguage if str(theeLetter) in l.value["iso639_2"]] return foundLangs[0] if foundLangs else IsoLanguage.UNDEFINED def label(self): - return str(self.value['name']) + return str(self.value["name"]) def twoLetter(self): - return str(self.value['iso639_1']) + return str(self.value["iso639_1"]) def threeLetter(self): - return str(self.value['iso639_2'][0]) - - + return str(self.value["iso639_2"][0]) diff --git a/tests/unit/test_cli_rename_only.py b/tests/unit/test_cli_rename_only.py new file mode 100644 index 0000000..377c246 --- /dev/null +++ b/tests/unit/test_cli_rename_only.py @@ -0,0 +1,125 @@ +from __future__ import annotations + +import os +from pathlib import Path +import sys +import tempfile +import unittest +from unittest.mock import patch + +from click.testing import CliRunner + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import cli # noqa: E402 + + +class _FakeMediaDescriptor: + def getVideoTracks(self): + return [] + + def getAudioTracks(self): + return [] + + def getSubtitleTracks(self): + return [] + + def getAttachmentTracks(self): + return [] + + +class _FakeFileProperties: + def __init__(self, context, source_path): + self.source_path = source_path + + def getShowId(self): + return -1 + + def getSeason(self): + return -1 + + def getEpisode(self): + return -1 + + def getMediaDescriptor(self): + return _FakeMediaDescriptor() + + def getPattern(self): + return None + + +class _FakeShiftedSeasonController: + def __init__(self, context): + self.context = context + + def shiftSeason(self, show_id, season, episode): + return season, episode + + +class _FakeFfxController: + def __init__(self, *args, **kwargs): + pass + + def runJob(self, *args, **kwargs): + raise AssertionError("runJob should not be called for --rename-only") + + +class RenameOnlyCliTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.home_dir = Path(self.tempdir.name) / "home" + self.home_dir.mkdir() + self.database_path = Path(self.tempdir.name) / "test.db" + self.source_dir = Path(self.tempdir.name) / "source" + self.source_dir.mkdir() + self.output_dir = Path(self.tempdir.name) / "output" + self.output_dir.mkdir() + self.source_path = self.source_dir / "episode.mkv" + self.source_bytes = b"rename-only-source" + self.source_path.write_bytes(self.source_bytes) + + def tearDown(self): + self.tempdir.cleanup() + + def test_rename_only_moves_source_file_into_output_directory(self): + runner = CliRunner() + + with ( + patch("ffx.file_properties.FileProperties", _FakeFileProperties), + patch("ffx.ffx_controller.FfxController", _FakeFfxController), + patch( + "ffx.shifted_season_controller.ShiftedSeasonController", + _FakeShiftedSeasonController, + ), + ): + result = runner.invoke( + cli.ffx, + [ + "--database-file", + str(self.database_path), + "convert", + "--no-tmdb", + "--no-pattern", + "--rename-only", + "--output-directory", + str(self.output_dir), + str(self.source_path), + ], + env={**os.environ, "HOME": str(self.home_dir)}, + ) + + self.assertEqual(0, result.exit_code, result.output) + + target_path = self.output_dir / "out_episode.mkv" + self.assertFalse(self.source_path.exists()) + self.assertTrue(target_path.exists()) + self.assertEqual(self.source_bytes, target_path.read_bytes()) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_iso_language.py b/tests/unit/test_iso_language.py new file mode 100644 index 0000000..a5aee99 --- /dev/null +++ b/tests/unit/test_iso_language.py @@ -0,0 +1,41 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import unittest + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.iso_language import IsoLanguage # noqa: E402 + + +class IsoLanguageTests(unittest.TestCase): + def test_language_constant_set_covers_iso_639_1_plus_filipino_alias(self): + languages = [language for language in IsoLanguage if language is not IsoLanguage.UNDEFINED] + + self.assertEqual(184, len(languages)) + self.assertEqual(183, len({language.twoLetter() for language in languages})) + + def test_primary_three_letter_code_is_returned_first(self): + self.assertEqual("sqi", IsoLanguage.ALBANIAN.threeLetter()) + self.assertEqual("deu", IsoLanguage.GERMAN.threeLetter()) + self.assertEqual("cym", IsoLanguage.WELSH.threeLetter()) + + def test_secondary_three_letter_codes_still_resolve_to_the_same_language(self): + self.assertIs(IsoLanguage.ALBANIAN, IsoLanguage.findThreeLetter("alb")) + self.assertIs(IsoLanguage.GERMAN, IsoLanguage.findThreeLetter("ger")) + self.assertIs(IsoLanguage.WELSH, IsoLanguage.findThreeLetter("wel")) + + def test_newly_added_languages_and_media_aliases_resolve(self): + self.assertIs(IsoLanguage.ASSAMESE, IsoLanguage.find("Assamese")) + self.assertIs(IsoLanguage.YORUBA, IsoLanguage.findThreeLetter("yor")) + self.assertIs(IsoLanguage.FILIPINO, IsoLanguage.findThreeLetter("fil")) + + +if __name__ == "__main__": + unittest.main() From 9dc08d48e9216a9f5cfa2d7623f1d512383d5bc8 Mon Sep 17 00:00:00 2001 From: Javanaut <javanaut@refulgent.de> Date: Sun, 12 Apr 2026 10:06:19 +0200 Subject: [PATCH 22/28] ff --- src/ffx/media_descriptor_change_set.py | 32 ++++++- .../unit/test_media_descriptor_change_set.py | 92 +++++++++++++++++++ 2 files changed, 120 insertions(+), 4 deletions(-) diff --git a/src/ffx/media_descriptor_change_set.py b/src/ffx/media_descriptor_change_set.py index a5448bc..093542b 100644 --- a/src/ffx/media_descriptor_change_set.py +++ b/src/ffx/media_descriptor_change_set.py @@ -1,5 +1,6 @@ import click +from ffx.iso_language import IsoLanguage from ffx.media_descriptor import MediaDescriptor from ffx.track_descriptor import TrackDescriptor @@ -117,7 +118,11 @@ class MediaDescriptorChangeSet(): sourceTrackDescriptor: TrackDescriptor = None): sourceTrackTags = sourceTrackDescriptor.getTags() if sourceTrackDescriptor is not None else {} - targetTrackTags = targetTrackDescriptor.getTags() if targetTrackDescriptor is not None else {} + targetTrackTags = ( + self.normalizeTrackTags(targetTrackDescriptor.getTags()) + if targetTrackDescriptor is not None + else {} + ) trackCompareResult = {} @@ -142,6 +147,25 @@ class MediaDescriptorChangeSet(): return trackCompareResult + def normalizeTrackTagValue(self, tagKey, tagValue): + if tagKey != "language": + return tagValue + + if isinstance(tagValue, IsoLanguage): + return tagValue.threeLetter() + + trackLanguage = IsoLanguage.findThreeLetter(str(tagValue)) + if trackLanguage != IsoLanguage.UNDEFINED: + return trackLanguage.threeLetter() + + return tagValue + + def normalizeTrackTags(self, trackTags: dict): + return { + tagKey: self.normalizeTrackTagValue(tagKey, tagValue) + for tagKey, tagValue in trackTags.items() + } + def generateDispositionTokens(self): """ @@ -243,7 +267,7 @@ class MediaDescriptorChangeSet(): addedTracks: dict = self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_ADDED_KEY] trackDescriptor: TrackDescriptor for trackDescriptor in addedTracks.values(): - for tagKey, tagValue in trackDescriptor.getTags().items(): + for tagKey, tagValue in self.normalizeTrackTags(trackDescriptor.getTags()).items(): if not tagKey in self.__removeTrackKeys: metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}" + f":{trackDescriptor.getSubIndex()}", @@ -267,7 +291,7 @@ class MediaDescriptorChangeSet(): trackDescriptor = self.__targetTrackDescriptorsByIndex[trackIndex] - for tagKey, tagValue in outputTrackTags.items(): + for tagKey, tagValue in self.normalizeTrackTags(outputTrackTags).items(): metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}" + f":{trackDescriptor.getSubIndex()}", f"{tagKey}={tagValue}"] @@ -285,7 +309,7 @@ class MediaDescriptorChangeSet(): } | unchangedTrackTags ) - for tagKey, tagValue in preservedTrackTags.items(): + for tagKey, tagValue in self.normalizeTrackTags(preservedTrackTags).items(): metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}" + f":{trackDescriptor.getSubIndex()}", f"{tagKey}={tagValue}"] diff --git a/tests/unit/test_media_descriptor_change_set.py b/tests/unit/test_media_descriptor_change_set.py index bf9be09..93c641a 100644 --- a/tests/unit/test_media_descriptor_change_set.py +++ b/tests/unit/test_media_descriptor_change_set.py @@ -27,6 +27,64 @@ class StaticConfig: class MediaDescriptorChangeSetTests(unittest.TestCase): + def test_non_primary_source_language_code_is_normalized_in_changed_track_metadata(self): + context = { + "logger": get_ffx_logger(), + "config": StaticConfig({}), + } + + source_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.AUDIO, + tags={"language": "ger", "title": "German Main"}, + ) + target_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.AUDIO, + tags={"language": "ger", "title": "German Main"}, + ) + + change_set = MediaDescriptorChangeSet( + context, + MediaDescriptor(track_descriptors=[target_track]), + MediaDescriptor(track_descriptors=[source_track]), + ) + + metadata_tokens = change_set.generateMetadataTokens() + + self.assertIn("-metadata:s:a:0", metadata_tokens) + self.assertIn("language=deu", metadata_tokens) + self.assertNotIn("language=ger", metadata_tokens) + + def test_target_only_track_language_metadata_uses_primary_code(self): + context = { + "logger": get_ffx_logger(), + "config": StaticConfig({}), + } + + target_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.AUDIO, + tags={"language": "ger", "title": "German Main"}, + ) + + change_set = MediaDescriptorChangeSet( + context, + MediaDescriptor(track_descriptors=[target_track]), + ) + + metadata_tokens = change_set.generateMetadataTokens() + + self.assertIn("-metadata:s:a:0", metadata_tokens) + self.assertIn("language=deu", metadata_tokens) + self.assertNotIn("language=ger", metadata_tokens) + def test_external_subtitle_preserves_source_only_tags_except_removed_keys(self): context = { "logger": get_ffx_logger(), @@ -79,6 +137,40 @@ class MediaDescriptorChangeSetTests(unittest.TestCase): self.assertNotIn("BPS=remove-me", metadata_tokens) self.assertIn("BPS=", metadata_tokens) + def test_external_subtitle_normalizes_preserved_source_language_metadata(self): + context = { + "logger": get_ffx_logger(), + "config": StaticConfig({}), + } + + source_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.SUBTITLE, + tags={"language": "ger", "title": "German Subtitle"}, + ) + target_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.SUBTITLE, + tags={}, + external_source_file="/tmp/external-subtitle.vtt", + ) + + change_set = MediaDescriptorChangeSet( + context, + MediaDescriptor(track_descriptors=[target_track]), + MediaDescriptor(track_descriptors=[source_track]), + ) + + metadata_tokens = change_set.generateMetadataTokens() + + self.assertIn("-metadata:s:s:0", metadata_tokens) + self.assertIn("language=deu", metadata_tokens) + self.assertNotIn("language=ger", metadata_tokens) + def test_target_only_tracks_still_emit_remove_tokens_for_configured_stream_keys(self): context = { "logger": get_ffx_logger(), From d05b01cfb202c00d744d508ce2fc39265f617d2e Mon Sep 17 00:00:00 2001 From: Javanaut <javanaut@refulgent.de> Date: Sun, 12 Apr 2026 10:38:36 +0200 Subject: [PATCH 23/28] Adds rename command --- requirements/project.md | 5 +- src/ffx/cli.py | 80 ++++++++++++++++++++++++- src/ffx/file_properties.py | 35 ++++++----- tests/unit/test_cli_rename.py | 108 ++++++++++++++++++++++++++++++++++ 4 files changed, 209 insertions(+), 19 deletions(-) create mode 100644 tests/unit/test_cli_rename.py diff --git a/requirements/project.md b/requirements/project.md index dfcac9c..73bbe47 100644 --- a/requirements/project.md +++ b/requirements/project.md @@ -18,7 +18,7 @@ - Inspect existing media files through `ffprobe` and compare discovered stream metadata with stored normalization rules. - Convert media files through `ffmpeg` into a normalized output layout, including video recoding, audio transcoding to Opus, metadata cleanup and rewrite, and controlled disposition flags. - Build output filenames from detected or configured show, season, and episode information, optionally enriched from TMDB and a configurable Jinja-style filename template. - - Support auxiliary file operations such as subtitle import, unmuxing, crop detection, and rename-only runs. + - Support auxiliary file operations such as subtitle import, unmuxing, crop detection, rename-only conversion runs, and direct in-place episode renaming. - Supported environments: - Local execution on a Python-capable workstation. - Best-supported on Linux-like systems because the implementation assumes `~/.local`, `/dev/null`, `nice`, and `cpulimit`. @@ -35,7 +35,7 @@ ## Functional Requirements -- The system shall provide a CLI entrypoint named `ffx` with commands for `convert`, `inspect`, `shows`, `unmux`, `cropdetect`, `setup`, `configure_workstation`, `upgrade`, `version`, and `help`. +- The system shall provide a CLI entrypoint named `ffx` with commands for `convert`, `inspect`, `shows`, `rename`, `unmux`, `cropdetect`, `setup`, `configure_workstation`, `upgrade`, `version`, and `help`. - The system shall support a two-step local installation and preparation flow: - `tools/setup.sh` is the bootstrap entrypoint for the first step and shall own bundle virtualenv creation, package installation, shell alias exposure, and optional Python test-package installation. - `tools/configure_workstation.sh` is the bootstrap entrypoint for the second step and shall own workstation dependency checks and installation plus local config and directory seeding. @@ -69,6 +69,7 @@ - `--cpu` shall accept either a positive absolute `cpulimit` value such as `200`, or a percentage suffixed with `%` such as `25%` to represent a share of present CPUs; omitting the option or using `0` shall disable CPU limiting. - When both limits are configured, the process wrapper shall execute the target command through `cpulimit` around a `nice -n ...` invocation so both limits apply to the launched media command. - The system shall support extracting streams into separate files via `unmux` and reporting suggested crop parameters via `cropdetect`. +- The system shall support in-place episode renaming via `rename`, requiring a `--prefix`, accepting optional `--season` and `--suffix` overrides, preserving the source extension, and supporting dry-run output without moving files. - Crop detection shall use a configurable sampling window, defaulting to a 60-second seek and a 180-second analysis duration, and repeated crop-detection requests for the same source plus sampling window shall reuse cached results within one process. - The system shall handle invalid input and system failures gracefully by logging warnings or raising `click` errors for missing files, invalid media, missing TMDB credentials, incompatible database versions, and ambiguous track dispositions when prompting is disabled. diff --git a/src/ffx/cli.py b/src/ffx/cli.py index 6703198..e3c3594 100755 --- a/src/ffx/cli.py +++ b/src/ffx/cli.py @@ -33,7 +33,7 @@ if TYPE_CHECKING: from ffx.media_descriptor import MediaDescriptor from ffx.track_descriptor import TrackDescriptor -LIGHTWEIGHT_COMMANDS = {None, 'version', 'help', 'setup', 'configure_workstation', 'upgrade'} +LIGHTWEIGHT_COMMANDS = {None, 'version', 'help', 'setup', 'configure_workstation', 'upgrade', 'rename'} CPU_OPTION_HELP = ( "Limit CPU for started processes. Use an absolute cpulimit value such as 200 " + "(about 2 cores), or use a percentage such as 25% for a share of present cores. " @@ -185,6 +185,33 @@ def resolveUnmuxOutputDirectory(context, outputDirectory, subtitlesOnly, label): return os.path.join(configuredSubtitlesBaseDirectory, resolvedLabel), True +def buildRenameTargetFilename(sourcePath, prefix, seasonOverride=None, suffix=''): + from ffx.file_properties import FileProperties + + sourceFilename = os.path.basename(sourcePath) + seasonEpisodeValues = FileProperties.extractSeasonEpisodeValues(sourceFilename) + if seasonEpisodeValues is None: + return None + + sourceSeason, sourceEpisode = seasonEpisodeValues + resolvedSeason = int(seasonOverride) if seasonOverride is not None else ( + int(sourceSeason) if sourceSeason is not None else 1 + ) + + _sourceBasename, sourceExtension = os.path.splitext(sourceFilename) + + targetFilenameTokens = [ + str(prefix).strip(), + f"s{resolvedSeason}e{int(sourceEpisode)}", + ] + + resolvedSuffix = str(suffix).strip() + if resolvedSuffix: + targetFilenameTokens.append(resolvedSuffix) + + return f"{'_'.join(targetFilenameTokens)}{sourceExtension}" + + @click.group() @click.pass_context @@ -242,7 +269,7 @@ def version(): def help(): click.echo(f"ffx {VERSION}\n") click.echo("Maintenance commands: setup, configure_workstation, upgrade") - click.echo("Media commands: shows, inspect, convert, unmux, cropdetect") + click.echo("Media commands: shows, inspect, convert, rename, unmux, cropdetect") click.echo("Use 'ffx --help' or 'ffx <command> --help' for full command help.") @@ -408,6 +435,55 @@ def inspect(ctx, filename): app.run() +@ffx.command() +@click.pass_context +@click.argument('paths', nargs=-1) +@click.option('--prefix', type=str, required=True, help='Required target filename prefix') +@click.option('--season', type=int, default=None, help='Override target season index') +@click.option('--suffix', type=str, default='', help='Optional target filename suffix') +@click.option('--dry-run', is_flag=True, default=False, help='Only print planned renames') +def rename(ctx, paths, prefix, season, suffix, dry_run): + """Rename matching episode files in place.""" + + resolvedPrefix = str(prefix).strip() + resolvedSuffix = str(suffix).strip() + effectiveDryRun = bool(ctx.obj.get('dry_run', False) or dry_run) + + if not resolvedPrefix: + raise click.ClickException("Rename prefix must not be empty.") + + processedCount = 0 + + for sourcePath in paths: + if not os.path.isfile(sourcePath): + continue + + targetFilename = buildRenameTargetFilename( + sourcePath, + resolvedPrefix, + seasonOverride=season, + suffix=resolvedSuffix, + ) + if targetFilename is None: + continue + + sourceFilename = os.path.basename(sourcePath) + targetPath = os.path.join(os.path.dirname(sourcePath), targetFilename) + click.echo(f"{sourceFilename} -> {targetFilename}") + processedCount += 1 + + if effectiveDryRun or os.path.abspath(sourcePath) == os.path.abspath(targetPath): + continue + + if os.path.exists(targetPath): + raise click.ClickException(f"Target file already exists: {targetPath}") + + shutil.move(sourcePath, targetPath) + + if processedCount == 0: + click.echo("No matching files found.") + + def getUnmuxSequence(trackDescriptor: TrackDescriptor, sourcePath, targetPrefix, targetDirectory = ''): # executable and input file diff --git a/src/ffx/file_properties.py b/src/ffx/file_properties.py index 2f8d0af..20c5d94 100644 --- a/src/ffx/file_properties.py +++ b/src/ffx/file_properties.py @@ -30,6 +30,18 @@ class FileProperties(): DEFAULT_INDEX_DIGITS = 3 + @classmethod + def extractSeasonEpisodeValues(cls, sourceText: str) -> tuple[int | None, int] | None: + seasonEpisodeMatch = re.search(cls.SEASON_EPISODE_INDICATOR_MATCH, str(sourceText)) + if seasonEpisodeMatch is not None: + return int(seasonEpisodeMatch.group(1)), int(seasonEpisodeMatch.group(2)) + + episodeMatch = re.search(cls.EPISODE_INDICATOR_MATCH, str(sourceText)) + if episodeMatch is not None: + return None, int(episodeMatch.group(1)) + + return None + def __init__(self, context, sourcePath): self.context = context @@ -65,26 +77,19 @@ class FileProperties(): databaseMatchedGroups = matchResult['match'].groups() self.__logger.debug(f"FileProperties.__init__(): Matched groups: {databaseMatchedGroups}") - seIndicator = databaseMatchedGroups[0] - - se_match = re.search(FileProperties.SEASON_EPISODE_INDICATOR_MATCH, seIndicator) - e_match = re.search(FileProperties.EPISODE_INDICATOR_MATCH, seIndicator) - + indicatorSource = databaseMatchedGroups[0] else: self.__logger.debug(f"FileProperties.__init__(): Checking file name for indicator {self.__sourceFilename}") + indicatorSource = self.__sourceFilename - se_match = re.search(FileProperties.SEASON_EPISODE_INDICATOR_MATCH, self.__sourceFilename) - e_match = re.search(FileProperties.EPISODE_INDICATOR_MATCH, self.__sourceFilename) - - if se_match is not None: - self.__season = int(se_match.group(1)) - self.__episode = int(se_match.group(2)) - elif e_match is not None: - self.__season = -1 - self.__episode = int(e_match.group(1)) - else: + seasonEpisodeValues = self.extractSeasonEpisodeValues(indicatorSource) + if seasonEpisodeValues is None: self.__season = -1 self.__episode = -1 + else: + sourceSeason, sourceEpisode = seasonEpisodeValues + self.__season = -1 if sourceSeason is None else int(sourceSeason) + self.__episode = int(sourceEpisode) self.__ffprobeData = None diff --git a/tests/unit/test_cli_rename.py b/tests/unit/test_cli_rename.py new file mode 100644 index 0000000..5d0d57e --- /dev/null +++ b/tests/unit/test_cli_rename.py @@ -0,0 +1,108 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import tempfile +import unittest + +from click.testing import CliRunner + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import cli # noqa: E402 + + +class RenameCliTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.workspace = Path(self.tempdir.name) + + def tearDown(self): + self.tempdir.cleanup() + + def write_source(self, filename: str, payload: bytes = b"episode") -> Path: + source_path = self.workspace / filename + source_path.write_bytes(payload) + return source_path + + def invoke_rename(self, *args: str): + runner = CliRunner() + result = runner.invoke(cli.ffx, ["rename", *args]) + self.assertEqual(0, result.exit_code, result.output) + return result + + def test_rename_moves_matching_file_in_place(self): + source_path = self.write_source("demo_S02E03.mkv", b"season-episode") + + result = self.invoke_rename("--prefix", "dball", str(source_path)) + + target_path = self.workspace / "dball_s2e3.mkv" + self.assertIn("demo_S02E03.mkv -> dball_s2e3.mkv", result.output) + self.assertFalse(source_path.exists()) + self.assertTrue(target_path.exists()) + self.assertEqual(b"season-episode", target_path.read_bytes()) + + def test_rename_uses_default_season_and_suffix_for_episode_only_match(self): + source_path = self.write_source("demo_E07.mp4", b"episode-only") + + result = self.invoke_rename( + "--prefix", + "dball", + "--suffix", + "bonus", + str(source_path), + ) + + target_path = self.workspace / "dball_s1e7_bonus.mp4" + self.assertIn("demo_E07.mp4 -> dball_s1e7_bonus.mp4", result.output) + self.assertFalse(source_path.exists()) + self.assertTrue(target_path.exists()) + self.assertEqual(b"episode-only", target_path.read_bytes()) + + def test_rename_cli_season_overrides_source_season(self): + source_path = self.write_source("demo_s02e07.webm") + + result = self.invoke_rename( + "--prefix", + "dball", + "--season", + "5", + str(source_path), + ) + + target_path = self.workspace / "dball_s5e7.webm" + self.assertIn("demo_s02e07.webm -> dball_s5e7.webm", result.output) + self.assertFalse(source_path.exists()) + self.assertTrue(target_path.exists()) + + def test_rename_dry_run_prints_mapping_without_moving(self): + source_path = self.write_source("demo_E07.mkv") + + result = self.invoke_rename( + "--dry-run", + "--prefix", + "dball", + str(source_path), + ) + + target_path = self.workspace / "dball_s1e7.mkv" + self.assertIn("demo_E07.mkv -> dball_s1e7.mkv", result.output) + self.assertTrue(source_path.exists()) + self.assertFalse(target_path.exists()) + + def test_rename_skips_non_matching_filenames(self): + source_path = self.write_source("demo_finale.mkv") + + result = self.invoke_rename("--prefix", "dball", str(source_path)) + + self.assertIn("No matching files found.", result.output) + self.assertTrue(source_path.exists()) + + +if __name__ == "__main__": + unittest.main() From ef0d6e9274021de2599ee18a80b87424054c86e7 Mon Sep 17 00:00:00 2001 From: Javanaut <javanaut@refulgent.de> Date: Sun, 12 Apr 2026 11:44:32 +0200 Subject: [PATCH 24/28] Extd rename/unmux to pad with zeroes --- assets/ffx.json.j2 | 36 +++++ src/ffx/cli.py | 68 +++++++- src/ffx/configuration_controller.py | 47 ++++++ src/ffx/constants.py | 5 + src/ffx/helper.py | 19 ++- src/ffx/media_details_screen.py | 1 + src/ffx/show_descriptor.py | 58 +++++-- src/ffx/show_details_screen.py | 21 ++- tests/integration/test_cli_unmux.py | 124 ++++++++++++++- tests/unit/test_cli_rename.py | 47 ++++-- .../unit/test_configure_workstation_script.py | 150 ++++++++++++++++++ tests/unit/test_show_descriptor_defaults.py | 97 +++++++++++ tools/configure_workstation.sh | 130 ++++++++++----- 13 files changed, 730 insertions(+), 73 deletions(-) create mode 100644 assets/ffx.json.j2 create mode 100644 tests/unit/test_configure_workstation_script.py create mode 100644 tests/unit/test_show_descriptor_defaults.py diff --git a/assets/ffx.json.j2 b/assets/ffx.json.j2 new file mode 100644 index 0000000..666e603 --- /dev/null +++ b/assets/ffx.json.j2 @@ -0,0 +1,36 @@ +{ + "databasePath": {{ database_path_json }}, + "logDirectory": {{ log_directory_json }}, + "subtitlesDirectory": {{ subtitles_directory_json }}, + "defaultIndexSeasonDigits": {{ default_index_season_digits }}, + "defaultIndexEpisodeDigits": {{ default_index_episode_digits }}, + "defaultIndicatorSeasonDigits": {{ default_indicator_season_digits }}, + "defaultIndicatorEpisodeDigits": {{ default_indicator_episode_digits }}, + "metadata": { + "signature": { + "RECODED_WITH": "FFX" + }, + "remove": [ + "VERSION-eng", + "creation_time", + "NAME" + ], + "streams": { + "remove": [ + "BPS", + "NUMBER_OF_FRAMES", + "NUMBER_OF_BYTES", + "_STATISTICS_WRITING_APP", + "_STATISTICS_WRITING_DATE_UTC", + "_STATISTICS_TAGS", + "BPS-eng", + "DURATION-eng", + "NUMBER_OF_FRAMES-eng", + "NUMBER_OF_BYTES-eng", + "_STATISTICS_WRITING_APP-eng", + "_STATISTICS_WRITING_DATE_UTC-eng", + "_STATISTICS_TAGS-eng" + ] + } + } +} diff --git a/src/ffx/cli.py b/src/ffx/cli.py index e3c3594..f3040c2 100755 --- a/src/ffx/cli.py +++ b/src/ffx/cli.py @@ -185,8 +185,32 @@ def resolveUnmuxOutputDirectory(context, outputDirectory, subtitlesOnly, label): return os.path.join(configuredSubtitlesBaseDirectory, resolvedLabel), True -def buildRenameTargetFilename(sourcePath, prefix, seasonOverride=None, suffix=''): +def resolveIndicatorDigitLengths(context=None, showDescriptor=None): + from ffx.show_descriptor import ShowDescriptor + + defaultDigitLengths = ShowDescriptor.getDefaultDigitLengths(context) + if showDescriptor is None: + return ( + defaultDigitLengths[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY], + defaultDigitLengths[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY], + ) + + return ( + int(showDescriptor.getIndicatorSeasonDigits()), + int(showDescriptor.getIndicatorEpisodeDigits()), + ) + + +def buildRenameTargetFilename( + sourcePath, + prefix, + seasonOverride=None, + suffix='', + indicatorSeasonDigits=None, + indicatorEpisodeDigits=None, +): from ffx.file_properties import FileProperties + from ffx.show_descriptor import ShowDescriptor sourceFilename = os.path.basename(sourcePath) seasonEpisodeValues = FileProperties.extractSeasonEpisodeValues(sourceFilename) @@ -197,12 +221,22 @@ def buildRenameTargetFilename(sourcePath, prefix, seasonOverride=None, suffix='' resolvedSeason = int(seasonOverride) if seasonOverride is not None else ( int(sourceSeason) if sourceSeason is not None else 1 ) + resolvedIndicatorSeasonDigits = ( + int(indicatorSeasonDigits) + if indicatorSeasonDigits is not None + else ShowDescriptor.DEFAULT_INDICATOR_SEASON_DIGITS + ) + resolvedIndicatorEpisodeDigits = ( + int(indicatorEpisodeDigits) + if indicatorEpisodeDigits is not None + else ShowDescriptor.DEFAULT_INDICATOR_EPISODE_DIGITS + ) _sourceBasename, sourceExtension = os.path.splitext(sourceFilename) targetFilenameTokens = [ str(prefix).strip(), - f"s{resolvedSeason}e{int(sourceEpisode)}", + f"s{resolvedSeason:0{resolvedIndicatorSeasonDigits}d}e{int(sourceEpisode):0{resolvedIndicatorEpisodeDigits}d}", ] resolvedSuffix = str(suffix).strip() @@ -444,10 +478,15 @@ def inspect(ctx, filename): @click.option('--dry-run', is_flag=True, default=False, help='Only print planned renames') def rename(ctx, paths, prefix, season, suffix, dry_run): """Rename matching episode files in place.""" + from ffx.configuration_controller import ConfigurationController resolvedPrefix = str(prefix).strip() resolvedSuffix = str(suffix).strip() effectiveDryRun = bool(ctx.obj.get('dry_run', False) or dry_run) + renameContext = { + 'config': ctx.obj.get('config') or ConfigurationController(), + } + indicatorSeasonDigits, indicatorEpisodeDigits = resolveIndicatorDigitLengths(renameContext) if not resolvedPrefix: raise click.ClickException("Rename prefix must not be empty.") @@ -463,6 +502,8 @@ def rename(ctx, paths, prefix, season, suffix, dry_run): resolvedPrefix, seasonOverride=season, suffix=resolvedSuffix, + indicatorSeasonDigits=indicatorSeasonDigits, + indicatorEpisodeDigits=indicatorEpisodeDigits, ) if targetFilename is None: continue @@ -571,13 +612,25 @@ def unmux(ctx, try: sourceMediaDescriptor = fp.getMediaDescriptor() + currentPattern = fp.getPattern() + currentShowDescriptor = ( + currentPattern.getShowDescriptor(ctx.obj) if currentPattern is not None else None + ) + indicatorSeasonDigits, indicatorEpisodeDigits = resolveIndicatorDigitLengths( + ctx.obj, + currentShowDescriptor, + ) season = fp.getSeason() episode = fp.getEpisode() #TODO: Recognition für alle Formate anpassen targetLabel = label if label else fp.getFileBasename() - targetIndicator = f"_S{season}E{episode}" if label and season != -1 and episode != -1 else '' + targetIndicator = ( + f"_S{season:0{indicatorSeasonDigits}d}E{episode:0{indicatorEpisodeDigits}d}" + if label and season != -1 and episode != -1 + else '' + ) if label and not targetIndicator: ctx.obj['logger'].warning(f"Skipping file {fp.getFilename()}: Label set but no indicator recognized") @@ -1226,10 +1279,11 @@ def convert(ctx, fc = FfxController(context, targetMediaDescriptor, sourceMediaDescriptor) - indexSeasonDigits = currentShowDescriptor.getIndexSeasonDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDEX_SEASON_DIGITS - indexEpisodeDigits = currentShowDescriptor.getIndexEpisodeDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDEX_EPISODE_DIGITS - indicatorSeasonDigits = currentShowDescriptor.getIndicatorSeasonDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDICATOR_SEASON_DIGITS - indicatorEpisodeDigits = currentShowDescriptor.getIndicatorEpisodeDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDICATOR_EPISODE_DIGITS + defaultDigitLengths = ShowDescriptor.getDefaultDigitLengths(context) + indexSeasonDigits = currentShowDescriptor.getIndexSeasonDigits() if not currentPattern is None else defaultDigitLengths[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] + indexEpisodeDigits = currentShowDescriptor.getIndexEpisodeDigits() if not currentPattern is None else defaultDigitLengths[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] + indicatorSeasonDigits = currentShowDescriptor.getIndicatorSeasonDigits() if not currentPattern is None else defaultDigitLengths[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] + indicatorEpisodeDigits = currentShowDescriptor.getIndicatorEpisodeDigits() if not currentPattern is None else defaultDigitLengths[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] # Shift season and episode if defined for this show diff --git a/src/ffx/configuration_controller.py b/src/ffx/configuration_controller.py index 8f907a3..4a6bd29 100644 --- a/src/ffx/configuration_controller.py +++ b/src/ffx/configuration_controller.py @@ -1,5 +1,12 @@ import os, json +from .constants import ( + DEFAULT_SHOW_INDEX_EPISODE_DIGITS, + DEFAULT_SHOW_INDEX_SEASON_DIGITS, + DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, +) + class ConfigurationController(): CONFIG_FILENAME = 'ffx.json' @@ -10,6 +17,10 @@ class ConfigurationController(): LOG_DIRECTORY_CONFIG_KEY = 'logDirectory' SUBTITLES_DIRECTORY_CONFIG_KEY = 'subtitlesDirectory' OUTPUT_FILENAME_TEMPLATE_KEY = 'outputFilenameTemplate' + DEFAULT_INDEX_SEASON_DIGITS_CONFIG_KEY = 'defaultIndexSeasonDigits' + DEFAULT_INDEX_EPISODE_DIGITS_CONFIG_KEY = 'defaultIndexEpisodeDigits' + DEFAULT_INDICATOR_SEASON_DIGITS_CONFIG_KEY = 'defaultIndicatorSeasonDigits' + DEFAULT_INDICATOR_EPISODE_DIGITS_CONFIG_KEY = 'defaultIndicatorEpisodeDigits' def __init__(self): @@ -57,6 +68,42 @@ class ConfigurationController(): ) return os.path.expanduser(str(subtitlesDirectory)) if subtitlesDirectory else '' + @classmethod + def getConfiguredIntegerValue(cls, configurationData: dict, configKey: str, defaultValue: int) -> int: + configuredValue = configurationData.get(configKey, defaultValue) + try: + return int(configuredValue) + except (TypeError, ValueError): + return int(defaultValue) + + def getDefaultIndexSeasonDigits(self): + return ConfigurationController.getConfiguredIntegerValue( + self.__configurationData, + ConfigurationController.DEFAULT_INDEX_SEASON_DIGITS_CONFIG_KEY, + DEFAULT_SHOW_INDEX_SEASON_DIGITS, + ) + + def getDefaultIndexEpisodeDigits(self): + return ConfigurationController.getConfiguredIntegerValue( + self.__configurationData, + ConfigurationController.DEFAULT_INDEX_EPISODE_DIGITS_CONFIG_KEY, + DEFAULT_SHOW_INDEX_EPISODE_DIGITS, + ) + + def getDefaultIndicatorSeasonDigits(self): + return ConfigurationController.getConfiguredIntegerValue( + self.__configurationData, + ConfigurationController.DEFAULT_INDICATOR_SEASON_DIGITS_CONFIG_KEY, + DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, + ) + + def getDefaultIndicatorEpisodeDigits(self): + return ConfigurationController.getConfiguredIntegerValue( + self.__configurationData, + ConfigurationController.DEFAULT_INDICATOR_EPISODE_DIGITS_CONFIG_KEY, + DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + ) + def getData(self): return self.__configurationData diff --git a/src/ffx/constants.py b/src/ffx/constants.py index 63f67b9..eb212ef 100644 --- a/src/ffx/constants.py +++ b/src/ffx/constants.py @@ -22,4 +22,9 @@ DEFAULT_CROPDETECT_DURATION_SECONDS = 180 DEFAULT_cut_start = 60 DEFAULT_cut_length = 180 +DEFAULT_SHOW_INDEX_SEASON_DIGITS = 2 +DEFAULT_SHOW_INDEX_EPISODE_DIGITS = 2 +DEFAULT_SHOW_INDICATOR_SEASON_DIGITS = 2 +DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS = 2 + DEFAULT_OUTPUT_FILENAME_TEMPLATE = '{{ ffx_show_name }} - {{ ffx_index }}{{ ffx_index_separator }}{{ ffx_episode_name }}{{ ffx_indicator_separator }}{{ ffx_indicator }}' diff --git a/src/ffx/helper.py b/src/ffx/helper.py index 742dbc1..00f1d45 100644 --- a/src/ffx/helper.py +++ b/src/ffx/helper.py @@ -4,6 +4,7 @@ from jinja2 import Environment, Undefined from .constants import DEFAULT_OUTPUT_FILENAME_TEMPLATE from .configuration_controller import ConfigurationController from .logging_utils import get_ffx_logger +from .show_descriptor import ShowDescriptor class EmptyStringUndefined(Undefined): @@ -164,10 +165,10 @@ def getEpisodeFileBasename(showName, episodeName, season, episode, - indexSeasonDigits = 2, - indexEpisodeDigits = 2, - indicatorSeasonDigits = 2, - indicatorEpisodeDigits = 2, + indexSeasonDigits = None, + indexEpisodeDigits = None, + indicatorSeasonDigits = None, + indicatorEpisodeDigits = None, context = None): """ One Piece: @@ -199,6 +200,16 @@ def getEpisodeFileBasename(showName, configData = cc.getData() if cc is not None else {} outputFilenameTemplate = configData.get(ConfigurationController.OUTPUT_FILENAME_TEMPLATE_KEY, DEFAULT_OUTPUT_FILENAME_TEMPLATE) + defaultDigitLengths = ShowDescriptor.getDefaultDigitLengths(context) + + if indexSeasonDigits is None: + indexSeasonDigits = defaultDigitLengths[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] + if indexEpisodeDigits is None: + indexEpisodeDigits = defaultDigitLengths[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] + if indicatorSeasonDigits is None: + indicatorSeasonDigits = defaultDigitLengths[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] + if indicatorEpisodeDigits is None: + indicatorEpisodeDigits = defaultDigitLengths[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] if context is not None and 'logger' in context.keys(): logger = context['logger'] diff --git a/src/ffx/media_details_screen.py b/src/ffx/media_details_screen.py index 7f61622..5a3c3c3 100644 --- a/src/ffx/media_details_screen.py +++ b/src/ffx/media_details_screen.py @@ -559,6 +559,7 @@ class MediaDetailsScreen(Screen): try: kwargs = {} + kwargs[ShowDescriptor.CONTEXT_KEY] = self.context kwargs[ShowDescriptor.ID_KEY] = int(selected_row_data[0]) kwargs[ShowDescriptor.NAME_KEY] = str(selected_row_data[1]) kwargs[ShowDescriptor.YEAR_KEY] = int(selected_row_data[2]) diff --git a/src/ffx/show_descriptor.py b/src/ffx/show_descriptor.py index 9a5a270..e1b3398 100644 --- a/src/ffx/show_descriptor.py +++ b/src/ffx/show_descriptor.py @@ -1,3 +1,10 @@ +from .configuration_controller import ConfigurationController +from .constants import ( + DEFAULT_SHOW_INDEX_EPISODE_DIGITS, + DEFAULT_SHOW_INDEX_SEASON_DIGITS, + DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, +) from .logging_utils import get_ffx_logger @@ -15,10 +22,42 @@ class ShowDescriptor(): INDICATOR_SEASON_DIGITS_KEY = 'indicator_season_digits' INDICATOR_EPISODE_DIGITS_KEY = 'indicator_episode_digits' - DEFAULT_INDEX_SEASON_DIGITS = 2 - DEFAULT_INDEX_EPISODE_DIGITS = 2 - DEFAULT_INDICATOR_SEASON_DIGITS = 2 - DEFAULT_INDICATOR_EPISODE_DIGITS = 2 + DEFAULT_INDEX_SEASON_DIGITS = DEFAULT_SHOW_INDEX_SEASON_DIGITS + DEFAULT_INDEX_EPISODE_DIGITS = DEFAULT_SHOW_INDEX_EPISODE_DIGITS + DEFAULT_INDICATOR_SEASON_DIGITS = DEFAULT_SHOW_INDICATOR_SEASON_DIGITS + DEFAULT_INDICATOR_EPISODE_DIGITS = DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS + + @classmethod + def getDefaultDigitLengths(cls, context: dict | None = None) -> dict[str, int]: + configurationData = {} + + if context is not None: + configController = context.get('config') + if configController is not None and hasattr(configController, 'getData'): + configurationData = configController.getData() + + return { + cls.INDEX_SEASON_DIGITS_KEY: ConfigurationController.getConfiguredIntegerValue( + configurationData, + ConfigurationController.DEFAULT_INDEX_SEASON_DIGITS_CONFIG_KEY, + cls.DEFAULT_INDEX_SEASON_DIGITS, + ), + cls.INDEX_EPISODE_DIGITS_KEY: ConfigurationController.getConfiguredIntegerValue( + configurationData, + ConfigurationController.DEFAULT_INDEX_EPISODE_DIGITS_CONFIG_KEY, + cls.DEFAULT_INDEX_EPISODE_DIGITS, + ), + cls.INDICATOR_SEASON_DIGITS_KEY: ConfigurationController.getConfiguredIntegerValue( + configurationData, + ConfigurationController.DEFAULT_INDICATOR_SEASON_DIGITS_CONFIG_KEY, + cls.DEFAULT_INDICATOR_SEASON_DIGITS, + ), + cls.INDICATOR_EPISODE_DIGITS_KEY: ConfigurationController.getConfiguredIntegerValue( + configurationData, + ConfigurationController.DEFAULT_INDICATOR_EPISODE_DIGITS_CONFIG_KEY, + cls.DEFAULT_INDICATOR_EPISODE_DIGITS, + ), + } def __init__(self, **kwargs): @@ -53,36 +92,37 @@ class ShowDescriptor(): raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.YEAR_KEY} is required to be of type int") self.__showYear = kwargs[ShowDescriptor.YEAR_KEY] else: - self.__showYear = -1 + self.__showYear = -1 + defaultDigitLengths = self.getDefaultDigitLengths(self.__context) if ShowDescriptor.INDEX_SEASON_DIGITS_KEY in kwargs.keys(): if type(kwargs[ShowDescriptor.INDEX_SEASON_DIGITS_KEY]) is not int: raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDEX_SEASON_DIGITS_KEY} is required to be of type int") self.__indexSeasonDigits = kwargs[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] else: - self.__indexSeasonDigits = ShowDescriptor.DEFAULT_INDEX_SEASON_DIGITS + self.__indexSeasonDigits = defaultDigitLengths[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] if ShowDescriptor.INDEX_EPISODE_DIGITS_KEY in kwargs.keys(): if type(kwargs[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY]) is not int: raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDEX_EPISODE_DIGITS_KEY} is required to be of type int") self.__indexEpisodeDigits = kwargs[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] else: - self.__indexEpisodeDigits = ShowDescriptor.DEFAULT_INDEX_EPISODE_DIGITS + self.__indexEpisodeDigits = defaultDigitLengths[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] if ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY in kwargs.keys(): if type(kwargs[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY]) is not int: raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY} is required to be of type int") self.__indicatorSeasonDigits = kwargs[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] else: - self.__indicatorSeasonDigits = ShowDescriptor.DEFAULT_INDICATOR_SEASON_DIGITS + self.__indicatorSeasonDigits = defaultDigitLengths[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] if ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY in kwargs.keys(): if type(kwargs[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY]) is not int: raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY} is required to be of type int") self.__indicatorEpisodeDigits = kwargs[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] else: - self.__indicatorEpisodeDigits = ShowDescriptor.DEFAULT_INDICATOR_EPISODE_DIGITS + self.__indicatorEpisodeDigits = defaultDigitLengths[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] def getId(self): diff --git a/src/ffx/show_details_screen.py b/src/ffx/show_details_screen.py index 8d840c1..311a91f 100644 --- a/src/ffx/show_details_screen.py +++ b/src/ffx/show_details_screen.py @@ -160,11 +160,20 @@ class ShowDetailsScreen(Screen): self.updateShiftedSeasons() else: - - self.query_one("#index_season_digits_input", Input).value = "2" - self.query_one("#index_episode_digits_input", Input).value = "2" - self.query_one("#indicator_season_digits_input", Input).value = "2" - self.query_one("#indicator_episode_digits_input", Input).value = "2" + defaultDigitLengths = ShowDescriptor.getDefaultDigitLengths(self.context) + + self.query_one("#index_season_digits_input", Input).value = str( + defaultDigitLengths[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] + ) + self.query_one("#index_episode_digits_input", Input).value = str( + defaultDigitLengths[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] + ) + self.query_one("#indicator_season_digits_input", Input).value = str( + defaultDigitLengths[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] + ) + self.query_one("#indicator_episode_digits_input", Input).value = str( + defaultDigitLengths[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] + ) def getSelectedPatternDescriptor(self): @@ -387,7 +396,7 @@ class ShowDetailsScreen(Screen): def getShowDescriptorFromInput(self) -> ShowDescriptor: - kwargs = {} + kwargs = {ShowDescriptor.CONTEXT_KEY: self.context} try: if self.__showDescriptor: diff --git a/tests/integration/test_cli_unmux.py b/tests/integration/test_cli_unmux.py index a47241a..84b4b52 100644 --- a/tests/integration/test_cli_unmux.py +++ b/tests/integration/test_cli_unmux.py @@ -8,8 +8,18 @@ import sys import tempfile import unittest -from tests.support.ffx_bundle import SourceTrackSpec, create_source_fixture +from tests.support.ffx_bundle import ( + SourceTrackSpec, + build_controller_context, + create_source_fixture, + dispose_controller_context, +) +from ffx.pattern_controller import PatternController +from ffx.show_controller import ShowController +from ffx.show_descriptor import ShowDescriptor +from ffx.track_codec import TrackCodec +from ffx.track_descriptor import TrackDescriptor from ffx.track_type import TrackType try: @@ -66,6 +76,39 @@ class UnmuxCliTests(unittest.TestCase): f"STDERR:\n{completed.stderr}" ) + def seed_matching_show(self, pattern_expression: str, *, indicator_season_digits: int, indicator_episode_digits: int) -> None: + context = build_controller_context(self.database_path) + try: + ShowController(context).updateShow( + ShowDescriptor( + id=1, + name="Unmux Test Show", + year=2000, + indicator_season_digits=indicator_season_digits, + indicator_episode_digits=indicator_episode_digits, + ) + ) + PatternController(context).savePatternSchema( + { + "show_id": 1, + "pattern": pattern_expression, + "quality": 0, + "notes": "", + }, + trackDescriptors=[ + TrackDescriptor( + index=0, + source_index=0, + track_type=TrackType.VIDEO, + codec_name=TrackCodec.H264, + tags={}, + disposition_set=set(), + ) + ], + ) + finally: + dispose_controller_context(context) + def test_subtitles_only_without_output_directory_uses_configured_base_plus_label(self): self.write_config( { @@ -101,6 +144,85 @@ class UnmuxCliTests(unittest.TestCase): expected_directory = self.home_dir / ".local" / "var" / "sync" / "subtitles" / "dball" self.assertTrue(expected_directory.is_dir(), expected_directory) + def test_unmux_uses_configured_indicator_digits_in_output_filenames(self): + self.write_config( + { + "defaultIndicatorSeasonDigits": 3, + "defaultIndicatorEpisodeDigits": 4, + } + ) + source_filename = "unmux_s01e01.mkv" + output_directory = self.workdir / "unmux-output" + output_directory.mkdir() + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + ], + ) + + completed = run_ffx_unmux( + self.workdir, + self.home_dir, + self.database_path, + "--label", + "dball", + "--output-directory", + str(output_directory), + str(source_path), + ) + self.assertCompleted(completed) + + output_filenames = sorted(path.name for path in output_directory.iterdir()) + self.assertEqual(1, len(output_filenames), output_filenames) + self.assertTrue( + output_filenames[0].startswith("dball_S001E0001_"), + output_filenames, + ) + + def test_unmux_prefers_matched_show_indicator_digits_over_config_defaults(self): + self.write_config( + { + "defaultIndicatorSeasonDigits": 4, + "defaultIndicatorEpisodeDigits": 4, + } + ) + self.seed_matching_show( + r"^unmux_([sS][0-9]+[eE][0-9]+)\.mkv$", + indicator_season_digits=1, + indicator_episode_digits=3, + ) + source_filename = "unmux_s01e01.mkv" + output_directory = self.workdir / "unmux-output" + output_directory.mkdir() + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + ], + ) + + completed = run_ffx_unmux( + self.workdir, + self.home_dir, + self.database_path, + "--label", + "dball", + "--output-directory", + str(output_directory), + str(source_path), + ) + self.assertCompleted(completed) + + output_filenames = sorted(path.name for path in output_directory.iterdir()) + self.assertEqual(1, len(output_filenames), output_filenames) + self.assertTrue( + output_filenames[0].startswith("dball_S1E001_"), + output_filenames, + ) + if __name__ == "__main__": unittest.main() diff --git a/tests/unit/test_cli_rename.py b/tests/unit/test_cli_rename.py index 5d0d57e..813f2f1 100644 --- a/tests/unit/test_cli_rename.py +++ b/tests/unit/test_cli_rename.py @@ -1,5 +1,7 @@ from __future__ import annotations +import json +import os from pathlib import Path import sys import tempfile @@ -21,6 +23,8 @@ class RenameCliTests(unittest.TestCase): def setUp(self): self.tempdir = tempfile.TemporaryDirectory() self.workspace = Path(self.tempdir.name) + self.home_dir = self.workspace / "home" + self.home_dir.mkdir() def tearDown(self): self.tempdir.cleanup() @@ -30,9 +34,18 @@ class RenameCliTests(unittest.TestCase): source_path.write_bytes(payload) return source_path + def write_config(self, data: dict) -> None: + config_dir = self.home_dir / ".local" / "etc" + config_dir.mkdir(parents=True, exist_ok=True) + (config_dir / "ffx.json").write_text(json.dumps(data), encoding="utf-8") + def invoke_rename(self, *args: str): runner = CliRunner() - result = runner.invoke(cli.ffx, ["rename", *args]) + result = runner.invoke( + cli.ffx, + ["rename", *args], + env={**os.environ, "HOME": str(self.home_dir)}, + ) self.assertEqual(0, result.exit_code, result.output) return result @@ -41,8 +54,8 @@ class RenameCliTests(unittest.TestCase): result = self.invoke_rename("--prefix", "dball", str(source_path)) - target_path = self.workspace / "dball_s2e3.mkv" - self.assertIn("demo_S02E03.mkv -> dball_s2e3.mkv", result.output) + target_path = self.workspace / "dball_s02e03.mkv" + self.assertIn("demo_S02E03.mkv -> dball_s02e03.mkv", result.output) self.assertFalse(source_path.exists()) self.assertTrue(target_path.exists()) self.assertEqual(b"season-episode", target_path.read_bytes()) @@ -58,8 +71,8 @@ class RenameCliTests(unittest.TestCase): str(source_path), ) - target_path = self.workspace / "dball_s1e7_bonus.mp4" - self.assertIn("demo_E07.mp4 -> dball_s1e7_bonus.mp4", result.output) + target_path = self.workspace / "dball_s01e07_bonus.mp4" + self.assertIn("demo_E07.mp4 -> dball_s01e07_bonus.mp4", result.output) self.assertFalse(source_path.exists()) self.assertTrue(target_path.exists()) self.assertEqual(b"episode-only", target_path.read_bytes()) @@ -75,8 +88,8 @@ class RenameCliTests(unittest.TestCase): str(source_path), ) - target_path = self.workspace / "dball_s5e7.webm" - self.assertIn("demo_s02e07.webm -> dball_s5e7.webm", result.output) + target_path = self.workspace / "dball_s05e07.webm" + self.assertIn("demo_s02e07.webm -> dball_s05e07.webm", result.output) self.assertFalse(source_path.exists()) self.assertTrue(target_path.exists()) @@ -90,11 +103,27 @@ class RenameCliTests(unittest.TestCase): str(source_path), ) - target_path = self.workspace / "dball_s1e7.mkv" - self.assertIn("demo_E07.mkv -> dball_s1e7.mkv", result.output) + target_path = self.workspace / "dball_s01e07.mkv" + self.assertIn("demo_E07.mkv -> dball_s01e07.mkv", result.output) self.assertTrue(source_path.exists()) self.assertFalse(target_path.exists()) + def test_rename_uses_configured_indicator_digit_lengths(self): + self.write_config( + { + "defaultIndicatorSeasonDigits": 3, + "defaultIndicatorEpisodeDigits": 4, + } + ) + source_path = self.write_source("demo_E07.mkv") + + result = self.invoke_rename("--prefix", "dball", str(source_path)) + + target_path = self.workspace / "dball_s001e0007.mkv" + self.assertIn("demo_E07.mkv -> dball_s001e0007.mkv", result.output) + self.assertFalse(source_path.exists()) + self.assertTrue(target_path.exists()) + def test_rename_skips_non_matching_filenames(self): source_path = self.write_source("demo_finale.mkv") diff --git a/tests/unit/test_configure_workstation_script.py b/tests/unit/test_configure_workstation_script.py new file mode 100644 index 0000000..cf5e76a --- /dev/null +++ b/tests/unit/test_configure_workstation_script.py @@ -0,0 +1,150 @@ +from __future__ import annotations + +import json +import os +from pathlib import Path +import stat +import subprocess +import sys +import tempfile +import textwrap +import unittest + + +REPO_ROOT = Path(__file__).resolve().parents[2] +SCRIPT_PATH = REPO_ROOT / "tools" / "configure_workstation.sh" +BUNDLE_PYTHON = Path.home() / ".local" / "share" / "ffx.venv" / "bin" / "python" + + +class ConfigureWorkstationScriptTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.home_dir = Path(self.tempdir.name) / "home" + self.home_dir.mkdir() + self.stub_bin_dir = Path(self.tempdir.name) / "bin" + self.stub_bin_dir.mkdir() + + for command_name in ("git", "python3", "ffmpeg", "ffprobe", "cpulimit"): + self.write_stub_command(command_name) + + def tearDown(self): + self.tempdir.cleanup() + + def write_stub_command(self, name: str, body: str = "") -> None: + script_path = self.stub_bin_dir / name + script_path.write_text( + "#!/usr/bin/env bash\n" + + body + + "\n", + encoding="utf-8", + ) + script_path.chmod(script_path.stat().st_mode | stat.S_IXUSR) + + def run_script(self, **env_overrides: str) -> subprocess.CompletedProcess[str]: + if not BUNDLE_PYTHON.is_file(): + self.skipTest(f"Missing bundle Python at {BUNDLE_PYTHON}") + + env = { + **os.environ, + "HOME": str(self.home_dir), + "PATH": f"{self.stub_bin_dir}:{os.environ.get('PATH', '')}", + "FFX_PYTHON": str(BUNDLE_PYTHON), + **env_overrides, + } + + return subprocess.run( + ["bash", str(SCRIPT_PATH)], + capture_output=True, + cwd=REPO_ROOT, + env=env, + text=True, + ) + + def test_script_seeds_default_config_from_template(self): + completed = self.run_script() + + self.assertEqual( + 0, + completed.returncode, + f"STDOUT:\n{completed.stdout}\nSTDERR:\n{completed.stderr}", + ) + + config_path = self.home_dir / ".local" / "etc" / "ffx.json" + self.assertTrue(config_path.exists()) + + config_data = json.loads(config_path.read_text(encoding="utf-8")) + self.assertEqual( + { + "databasePath": str(self.home_dir / ".local" / "var" / "ffx" / "ffx.db"), + "logDirectory": str(self.home_dir / ".local" / "var" / "log"), + "subtitlesDirectory": str( + self.home_dir / ".local" / "var" / "sync" / "subtitles" + ), + "defaultIndexSeasonDigits": 2, + "defaultIndexEpisodeDigits": 2, + "defaultIndicatorSeasonDigits": 2, + "defaultIndicatorEpisodeDigits": 2, + "metadata": { + "signature": {"RECODED_WITH": "FFX"}, + "remove": [ + "VERSION-eng", + "creation_time", + "NAME", + ], + "streams": { + "remove": [ + "BPS", + "NUMBER_OF_FRAMES", + "NUMBER_OF_BYTES", + "_STATISTICS_WRITING_APP", + "_STATISTICS_WRITING_DATE_UTC", + "_STATISTICS_TAGS", + "BPS-eng", + "DURATION-eng", + "NUMBER_OF_FRAMES-eng", + "NUMBER_OF_BYTES-eng", + "_STATISTICS_WRITING_APP-eng", + "_STATISTICS_WRITING_DATE_UTC-eng", + "_STATISTICS_TAGS-eng", + ] + }, + }, + }, + config_data, + ) + + def test_script_honors_custom_template_override(self): + custom_template_path = Path(self.tempdir.name) / "custom-config.j2" + custom_template_path.write_text( + textwrap.dedent( + """ + { + "databasePath": {{ database_path_json }}, + "marker": "from-template", + "subtitlesDirectory": {{ subtitles_directory_json }} + } + """ + ).lstrip(), + encoding="utf-8", + ) + + completed = self.run_script(FFX_CONFIG_TEMPLATE=str(custom_template_path)) + + self.assertEqual( + 0, + completed.returncode, + f"STDOUT:\n{completed.stdout}\nSTDERR:\n{completed.stderr}", + ) + + config_path = self.home_dir / ".local" / "etc" / "ffx.json" + config_data = json.loads(config_path.read_text(encoding="utf-8")) + + self.assertEqual("from-template", config_data["marker"]) + self.assertEqual( + str(self.home_dir / ".local" / "var" / "ffx" / "ffx.db"), + config_data["databasePath"], + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_show_descriptor_defaults.py b/tests/unit/test_show_descriptor_defaults.py new file mode 100644 index 0000000..159931c --- /dev/null +++ b/tests/unit/test_show_descriptor_defaults.py @@ -0,0 +1,97 @@ +from __future__ import annotations + +import logging +from pathlib import Path +import sys +import unittest + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.constants import ( + DEFAULT_SHOW_INDEX_EPISODE_DIGITS, + DEFAULT_SHOW_INDEX_SEASON_DIGITS, + DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, +) +from ffx.helper import getEpisodeFileBasename +from ffx.show_descriptor import ShowDescriptor + + +class StaticConfig: + def __init__(self, data: dict | None = None): + self._data = data or {} + + def getData(self): + return self._data + + +class ShowDescriptorDefaultTests(unittest.TestCase): + def make_context(self, config_data: dict | None = None) -> dict: + logger = logging.getLogger("ffx-test-show-descriptor-defaults") + logger.handlers = [] + logger.addHandler(logging.NullHandler()) + return {"config": StaticConfig(config_data), "logger": logger} + + def test_show_descriptor_uses_config_defaults_when_context_is_present(self): + descriptor = ShowDescriptor( + context=self.make_context( + { + "defaultIndexSeasonDigits": "1", + "defaultIndexEpisodeDigits": "3", + "defaultIndicatorSeasonDigits": "3", + "defaultIndicatorEpisodeDigits": "4", + } + ), + id=1, + name="Configured Show", + year=2024, + ) + + self.assertEqual(1, descriptor.getIndexSeasonDigits()) + self.assertEqual(3, descriptor.getIndexEpisodeDigits()) + self.assertEqual(3, descriptor.getIndicatorSeasonDigits()) + self.assertEqual(4, descriptor.getIndicatorEpisodeDigits()) + + def test_show_descriptor_without_context_uses_shared_constants(self): + descriptor = ShowDescriptor(id=1, name="Default Show", year=2024) + + self.assertEqual(DEFAULT_SHOW_INDEX_SEASON_DIGITS, descriptor.getIndexSeasonDigits()) + self.assertEqual(DEFAULT_SHOW_INDEX_EPISODE_DIGITS, descriptor.getIndexEpisodeDigits()) + self.assertEqual( + DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, + descriptor.getIndicatorSeasonDigits(), + ) + self.assertEqual( + DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + descriptor.getIndicatorEpisodeDigits(), + ) + + def test_episode_basename_uses_configured_digit_defaults_when_omitted(self): + basename = getEpisodeFileBasename( + "Configured Show", + "Episode Name", + 2, + 7, + context=self.make_context( + { + "defaultIndexSeasonDigits": 1, + "defaultIndexEpisodeDigits": 3, + "defaultIndicatorSeasonDigits": 3, + "defaultIndicatorEpisodeDigits": 4, + } + ), + ) + + self.assertEqual( + "Configured Show - 2007 Episode Name - S002E0007", + basename, + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tools/configure_workstation.sh b/tools/configure_workstation.sh index 5c2112f..30f56ab 100755 --- a/tools/configure_workstation.sh +++ b/tools/configure_workstation.sh @@ -2,12 +2,15 @@ set -u +ROOT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")/.." && pwd)" CONFIG_DIR="${FFX_CONFIG_DIR:-${HOME}/.local/etc}" CONFIG_FILE="${FFX_CONFIG_FILE:-${CONFIG_DIR}/ffx.json}" VAR_DIR="${FFX_VAR_DIR:-${HOME}/.local/var/ffx}" LOG_DIR="${FFX_LOG_DIR:-${HOME}/.local/var/log}" DATABASE_FILE="${FFX_DATABASE_FILE:-${VAR_DIR}/ffx.db}" SUBTITLES_BASE_DIR="${FFX_SUBTITLES_BASE_DIR:-${HOME}/.local/var/sync/subtitles}" +FFX_PYTHON="${FFX_PYTHON:-${HOME}/.local/share/ffx.venv/bin/python}" +CONFIG_TEMPLATE_FILE="${FFX_CONFIG_TEMPLATE:-${ROOT_DIR}/assets/ffx.json.j2}" CHECK_ONLY=0 WITH_TESTS=0 @@ -49,6 +52,8 @@ Environment overrides: FFX_LOG_DIR Override the default log directory. FFX_DATABASE_FILE Override the database path written into a newly seeded config. FFX_SUBTITLES_BASE_DIR Override the default subtitles base directory written into a newly seeded config. + FFX_PYTHON Override the bundle venv Python used to render the seeded config. + FFX_CONFIG_TEMPLATE Override the Jinja2 template path used to seed the config. Notes: - tools/setup.sh is the first installation step and owns bundle venv setup. @@ -316,6 +321,93 @@ install_system_requirements() { return 0 } +render_default_config() { + local output_path="$1" + local temporary_output_path="" + + if [ ! -x "${FFX_PYTHON}" ]; then + printf 'Missing bundle Python interpreter at %s.\n' "${FFX_PYTHON}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + + if [ ! -f "${CONFIG_TEMPLATE_FILE}" ]; then + printf 'Missing FFX config template at %s.\n' "${CONFIG_TEMPLATE_FILE}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + + if ! temporary_output_path="$(mktemp "${output_path}.tmp.XXXXXX")"; then + printf 'Failed to create a temporary config file next to %s.\n' "${output_path}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + + if ! FFX_CONFIG_TEMPLATE_FILE="${CONFIG_TEMPLATE_FILE}" \ + FFX_REPO_ROOT="${ROOT_DIR}" \ + FFX_DATABASE_PATH="${DATABASE_FILE}" \ + FFX_LOG_DIRECTORY="${LOG_DIR}" \ + FFX_SUBTITLES_DIRECTORY="${SUBTITLES_BASE_DIR}" \ + "${FFX_PYTHON}" - >"${temporary_output_path}" <<'PY' +from __future__ import annotations + +import json +import os +import sys +from pathlib import Path + +from jinja2 import Environment, FileSystemLoader, StrictUndefined + +repo_root = Path(os.environ["FFX_REPO_ROOT"]) +src_root = repo_root / "src" +if str(src_root) not in sys.path: + sys.path.insert(0, str(src_root)) + +from ffx.constants import ( + DEFAULT_SHOW_INDEX_EPISODE_DIGITS, + DEFAULT_SHOW_INDEX_SEASON_DIGITS, + DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, +) + +template_path = Path(os.environ["FFX_CONFIG_TEMPLATE_FILE"]) +environment = Environment( + loader=FileSystemLoader(str(template_path.parent)), + undefined=StrictUndefined, + autoescape=False, + keep_trailing_newline=True, +) +template = environment.get_template(template_path.name) + +sys.stdout.write( + template.render( + database_path_json=json.dumps(os.environ["FFX_DATABASE_PATH"]), + log_directory_json=json.dumps(os.environ["FFX_LOG_DIRECTORY"]), + subtitles_directory_json=json.dumps(os.environ["FFX_SUBTITLES_DIRECTORY"]), + default_index_season_digits=DEFAULT_SHOW_INDEX_SEASON_DIGITS, + default_index_episode_digits=DEFAULT_SHOW_INDEX_EPISODE_DIGITS, + default_indicator_season_digits=DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, + default_indicator_episode_digits=DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + ) +) +PY + then + rm -f "${temporary_output_path}" + printf 'Failed to render ffx config from template %s.\n' "${CONFIG_TEMPLATE_FILE}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + + if ! mv "${temporary_output_path}" "${output_path}"; then + rm -f "${temporary_output_path}" + printf 'Failed to move rendered ffx config into place at %s.\n' "${output_path}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + + return 0 +} + seed_default_config() { if [ "${CHECK_ONLY}" -eq 1 ]; then return 0 @@ -365,43 +457,7 @@ seed_default_config() { if [ ! -f "${CONFIG_FILE}" ]; then printf 'Seeding ffx config at %s...\n' "${CONFIG_FILE}" - if ! cat >"${CONFIG_FILE}" <<EOF -{ - "databasePath": "${DATABASE_FILE}", - "logDirectory": "${LOG_DIR}", - "subtitlesDirectory": "${SUBTITLES_BASE_DIR}", - "metadata": { - "signature": { - "RECODED_WITH": "FFX" - }, - "remove": [ - "VERSION-eng", - "creation_time", - "NAME" - ], - "streams": { - "remove": [ - "BPS", - "NUMBER_OF_FRAMES", - "NUMBER_OF_BYTES", - "_STATISTICS_WRITING_APP", - "_STATISTICS_WRITING_DATE_UTC", - "_STATISTICS_TAGS", - "BPS-eng", - "DURATION-eng", - "NUMBER_OF_FRAMES-eng", - "NUMBER_OF_BYTES-eng", - "_STATISTICS_WRITING_APP-eng", - "_STATISTICS_WRITING_DATE_UTC-eng", - "_STATISTICS_TAGS-eng" - ] - } - } -} -EOF - then - printf 'Failed to write ffx config at %s.\n' "${CONFIG_FILE}" >&2 - INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + if ! render_default_config "${CONFIG_FILE}"; then return 1 fi created_any=1 From f0d4c36bc3394ad0bf5d9bf950336a5eb46725ff Mon Sep 17 00:00:00 2001 From: Javanaut <javanaut@refulgent.de> Date: Sun, 12 Apr 2026 12:12:41 +0200 Subject: [PATCH 25/28] Adds release script and bumps 0.2.4 --- README.md | 12 ++ SCRATCHPAD.md | 7 - pyproject.toml | 2 +- requirements/project.md | 2 +- src/ffx/constants.py | 2 +- tools/merge_dev_into_main.sh | 322 +++++++++++++++++++++++++++++++++++ 6 files changed, 337 insertions(+), 10 deletions(-) create mode 100755 tools/merge_dev_into_main.sh diff --git a/README.md b/README.md index 6eed68c..00a1153 100644 --- a/README.md +++ b/README.md @@ -99,6 +99,18 @@ TMDB-backed metadata enrichment requires `TMDB_API_KEY` to be set in the environ ## Version History +### 0.2.4 + +- lightweight CLI commands now stay import-light via lazy runtime loading +- setup/config templating moved to `assets/ffx.json.j2` +- aligned two-step local setup wrappers: `ffx setup` and `ffx configure_workstation` +- combined `ffprobe` payload reuse in `FileProperties` +- configurable crop-detect sampling plus per-process crop result caching +- single-query controller accessors and conditional DB schema bootstrap +- shared screen bootstrap/controller wiring for large detail screens +- configurable default season/episode digit lengths +- digit-aware `rename` and padded `unmux` filename markers + ### 0.2.3 - PyPI packaging diff --git a/SCRATCHPAD.md b/SCRATCHPAD.md index f9931d0..c11bd74 100644 --- a/SCRATCHPAD.md +++ b/SCRATCHPAD.md @@ -9,19 +9,12 @@ - The biggest near-term wins are in startup cost, repeated subprocess work, repeated database query patterns, and general repo hygiene. - This list is intentionally optimization-oriented rather than bug-oriented. Some items below also improve correctness or maintainability, but they were selected because they can reduce runtime cost, operator friction, or iteration overhead. - A first modern integration slice now exists under [`tests/integration/subtrack_mapping`](/home/osgw/.local/src/codex/ffx/tests/integration/subtrack_mapping). Remaining test-suite cleanup is now mostly about migrating and shrinking the legacy harness surface under [`tests/legacy`](/home/osgw/.local/src/codex/ffx/tests/legacy). -- The CLI root now lazy-loads heavy runtime dependencies so lightweight commands such as `version`, `help`, `setup`, `configure_workstation`, and `upgrade` stay import-light. - Shared CLI defaults for container/output tokens now live outside [`src/ffx/ffx_controller.py`](/home/osgw/.local/src/codex/ffx/src/ffx/ffx_controller.py), and a focused unit test locks in the lazy-import contract. -- `FileProperties` now uses one cached `ffprobe -show_format -show_streams -of json` call per source file, and the combined payload was confirmed against the Dragonball asset to satisfy both previous probe call sites fully. -- Crop detection now uses configurable sampling windows plus per-process caching keyed by source file and sampling range, and the `cropdetect` CLI command now calls the real `FileProperties.findCropArguments()` path. -- Database startup now bootstraps schema only when required tables are actually missing, while version enforcement still runs on ordinary DB-backed context creation. - Helper filename and rich-text utilities now use compiled raw regexes plus translate-based filename filtering, with unit coverage for TMDB suffix rewriting and Rich color stripping. - Process resource limiting now has explicit disabled/default states in the CLI and requirements, and combined CPU-plus-niceness wrapping now executes as `cpulimit -- nice -n ... <command>` instead of a less explicit prefix chain. - FFX logger setup now reuses named handlers, and fallback logger access no longer mutates handlers in ordinary constructors and helpers. - The process wrapper now uses `subprocess.run(...)` with centralized command formatting plus stable timeout and missing-command error mapping. -- Active ORM controllers now use single-query accessors instead of paired `count()` plus `first()` lookups. - Pattern matching now uses cached compiled regexes plus explicit duplicate-match errors, and pattern creation flows no longer persist zero-track patterns. -- The two-step local setup flow now has aligned CLI wrappers for both phases: `ffx setup` for bundle prep and `ffx configure_workstation` for workstation prep, while the shell scripts remain the bootstrap entrypoints before the bundle exists. -- The large detail screens now share one screen-bootstrap helper for context, metadata-filter extraction, and controller wiring, and show-pattern loading now goes through `PatternController` instead of a screen-local session query. ## Focused Snapshot diff --git a/pyproject.toml b/pyproject.toml index 7c00f6d..9ea7e52 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "ffx" description = "FFX recoding and metadata managing tool" -version = "0.2.3" +version = "0.2.4" license = {file = "LICENSE.md"} dependencies = [ "requests", diff --git a/requirements/project.md b/requirements/project.md index 73bbe47..9018043 100644 --- a/requirements/project.md +++ b/requirements/project.md @@ -91,7 +91,7 @@ - Intended for local execution, not server deployment. - Stores default state in `~/.local/etc/ffx.json`, `~/.local/var/ffx/ffx.db`, and `~/.local/var/log/ffx.log`. - Timeline constraints: - - The current implemented scope reflects a compact alpha release stream up to version `0.2.3`. + - The current implemented scope reflects a compact alpha release stream up to version `0.2.4`. - Team capacity assumptions: - Maintained as a small codebase where simple patterns and direct controller logic are preferred over framework-heavy abstractions. - Third-party dependencies: diff --git a/src/ffx/constants.py b/src/ffx/constants.py index eb212ef..ec22587 100644 --- a/src/ffx/constants.py +++ b/src/ffx/constants.py @@ -1,4 +1,4 @@ -VERSION='0.2.3' +VERSION='0.2.4' DATABASE_VERSION = 2 DEFAULT_QUALITY = 32 diff --git a/tools/merge_dev_into_main.sh b/tools/merge_dev_into_main.sh new file mode 100755 index 0000000..8e28250 --- /dev/null +++ b/tools/merge_dev_into_main.sh @@ -0,0 +1,322 @@ +#!/usr/bin/env bash + +set -euo pipefail + +DEV_BRANCH="dev" +MAIN_BRANCH="main" +ORIGIN_REMOTE="origin" +DEFAULT_AGENT_DEVELOPMENT_PATHS=( + "AGENTS.md" + "SCRATCHPAD.md" + "guidance" + "requirements" + "prompts" + "process" +) +AGENT_DEVELOPMENT_PATHS=("${DEFAULT_AGENT_DEVELOPMENT_PATHS[@]}") + +CURRENT_BRANCH="${DEV_BRANCH}" +ASSUME_YES=0 +DRY_RUN=0 +SKIP_TESTS=0 + +usage() { + cat <<EOF +Usage: $(basename "$0") [--yes] [--dry-run] [--skip-tests] [--help] + +Merge the local ${DEV_BRANCH} branch into ${MAIN_BRANCH}, remove agent-development files +from ${MAIN_BRANCH}, create a release merge commit and tag, push to ${ORIGIN_REMOTE}/${MAIN_BRANCH}, +and switch back to ${DEV_BRANCH}. + +Options: + --yes Skip the interactive confirmation prompt. + --dry-run Print the validated release plan without changing git state. + --skip-tests Skip the default pre-release test gate (./tools/test.sh). + --help Show this help text. + +Environment overrides: + FFX_RELEASE_CLEAN_PATHS Colon-separated path list to remove from ${MAIN_BRANCH} + after merging ${DEV_BRANCH}. Defaults to: + ${DEFAULT_AGENT_DEVELOPMENT_PATHS[*]} +EOF +} + +fail() { + printf '%s\n' "$*" >&2 + exit 1 +} + +cleanup() { + local exit_code="$1" + + trap - EXIT + + if git rev-parse -q --verify MERGE_HEAD >/dev/null 2>&1; then + printf 'Merge is incomplete; aborting merge on %s...\n' "${CURRENT_BRANCH}" >&2 + git merge --abort >/dev/null 2>&1 || true + fi + + if [ "${CURRENT_BRANCH}" != "${DEV_BRANCH}" ]; then + printf 'Switching back to %s...\n' "${DEV_BRANCH}" >&2 + git switch "${DEV_BRANCH}" >/dev/null 2>&1 || true + CURRENT_BRANCH="${DEV_BRANCH}" + fi + + exit "${exit_code}" +} + +load_cleanup_paths() { + if [ -n "${FFX_RELEASE_CLEAN_PATHS:-}" ]; then + IFS=':' read -r -a AGENT_DEVELOPMENT_PATHS <<< "${FFX_RELEASE_CLEAN_PATHS}" + fi + + if [ "${#AGENT_DEVELOPMENT_PATHS[@]}" -eq 0 ]; then + fail "Release cleanup path list is empty." + fi +} + +require_repo_state() { + if ! git rev-parse --show-toplevel >/dev/null 2>&1; then + fail "This helper must be run inside a git repository." + fi + + if ! git show-ref --verify --quiet "refs/heads/${DEV_BRANCH}"; then + fail "Local branch '${DEV_BRANCH}' does not exist." + fi + + if ! git show-ref --verify --quiet "refs/heads/${MAIN_BRANCH}"; then + fail "Local branch '${MAIN_BRANCH}' does not exist." + fi + + if ! git remote get-url "${ORIGIN_REMOTE}" >/dev/null 2>&1; then + fail "Remote '${ORIGIN_REMOTE}' is not configured." + fi +} + +require_dev_checkout() { + CURRENT_BRANCH="$(git rev-parse --abbrev-ref HEAD)" + if [ "${CURRENT_BRANCH}" != "${DEV_BRANCH}" ]; then + fail "Current branch is '${CURRENT_BRANCH}', but '${DEV_BRANCH}' is required." + fi +} + +require_clean_worktree() { + if [ -n "$(git status --porcelain)" ]; then + fail "Local '${DEV_BRANCH}' branch is dirty. Commit, stash, or clean changes first." + fi +} + +fetch_remote_state() { + printf 'Fetching %s branch and tag state...\n' "${ORIGIN_REMOTE}" + git fetch "${ORIGIN_REMOTE}" "${DEV_BRANCH}" "${MAIN_BRANCH}" --tags >/dev/null +} + +require_branch_matches_remote() { + local branch="$1" + local local_sha="" + local remote_sha="" + + if ! git show-ref --verify --quiet "refs/remotes/${ORIGIN_REMOTE}/${branch}"; then + fail "Remote branch '${ORIGIN_REMOTE}/${branch}' does not exist." + fi + + local_sha="$(git rev-parse "refs/heads/${branch}")" + remote_sha="$(git rev-parse "refs/remotes/${ORIGIN_REMOTE}/${branch}")" + + if [ "${local_sha}" != "${remote_sha}" ]; then + fail "Local branch '${branch}' is not up to date with '${ORIGIN_REMOTE}/${branch}'. Pull, rebase, or push first." + fi +} + +resolve_release_version() { + local version_from_pyproject="" + local version_from_constants="" + + version_from_pyproject="$( + sed -n 's/^version = "\(.*\)"$/\1/p' pyproject.toml | head -n 1 + )" + version_from_constants="$( + sed -n "s/^VERSION='\(.*\)'$/\1/p" src/ffx/constants.py | head -n 1 + )" + + if [ -z "${version_from_pyproject}" ]; then + fail "Could not resolve release version from pyproject.toml." + fi + + if [ -z "${version_from_constants}" ]; then + fail "Could not resolve release version from src/ffx/constants.py." + fi + + if [ "${version_from_pyproject}" != "${version_from_constants}" ]; then + fail "Version mismatch: pyproject.toml=${version_from_pyproject}, src/ffx/constants.py=${version_from_constants}." + fi + + printf '%s\n' "${version_from_pyproject}" +} + +require_release_tag_available() { + local release_version="$1" + local release_tag="v${release_version}" + + if git rev-parse -q --verify "refs/tags/${release_tag}" >/dev/null 2>&1; then + fail "Tag '${release_tag}' already exists." + fi + + if git rev-parse -q --verify "refs/tags/${release_version}" >/dev/null 2>&1; then + fail "Bare tag '${release_version}' already exists; refusing to create ambiguous release tags." + fi +} + +run_pre_release_tests() { + if [ "${SKIP_TESTS}" -eq 1 ]; then + printf 'Skipping pre-release tests.\n' + return 0 + fi + + if [ ! -x "./tools/test.sh" ]; then + fail "Missing executable test runner at ./tools/test.sh." + fi + + printf 'Running pre-release tests via ./tools/test.sh...\n' + ./tools/test.sh +} + +print_release_plan() { + local release_version="$1" + local release_tag="v${release_version}" + local release_commit_message="Release ${release_tag}" + + printf 'Dry run only. Planned steps:\n' + printf '1. Ensure current branch is %s and the worktree is clean.\n' "${DEV_BRANCH}" + printf '2. Fetch %s and verify local %s and %s exactly match %s/%s and %s/%s.\n' \ + "${ORIGIN_REMOTE}" \ + "${DEV_BRANCH}" \ + "${MAIN_BRANCH}" \ + "${ORIGIN_REMOTE}" \ + "${DEV_BRANCH}" \ + "${ORIGIN_REMOTE}" \ + "${MAIN_BRANCH}" + if [ "${SKIP_TESTS}" -eq 1 ]; then + printf '3. Skip the pre-release test gate.\n' + else + printf '3. Run ./tools/test.sh as the pre-release test gate.\n' + fi + printf '4. Switch to %s and merge %s with --no-ff --no-commit.\n' "${MAIN_BRANCH}" "${DEV_BRANCH}" + printf '5. Remove release-cleanup paths from %s:\n' "${MAIN_BRANCH}" + local cleanup_path="" + for cleanup_path in "${AGENT_DEVELOPMENT_PATHS[@]}"; do + printf ' - %s\n' "${cleanup_path}" + done + printf '6. Create merge commit: %s\n' "${release_commit_message}" + printf '7. Create annotated tag: %s\n' "${release_tag}" + printf '8. Push %s to %s/%s with --follow-tags.\n' "${MAIN_BRANCH}" "${ORIGIN_REMOTE}" "${MAIN_BRANCH}" + printf '9. Switch back to %s.\n' "${DEV_BRANCH}" +} + +trap 'cleanup $?' EXIT + +while [ "$#" -gt 0 ]; do + case "$1" in + --yes) + ASSUME_YES=1 + ;; + --dry-run) + DRY_RUN=1 + ;; + --skip-tests) + SKIP_TESTS=1 + ;; + --help|-h) + usage + exit 0 + ;; + *) + usage >&2 + fail "Unknown option: $1" + ;; + esac + shift +done + +load_cleanup_paths +require_repo_state +require_dev_checkout +require_clean_worktree +fetch_remote_state +require_branch_matches_remote "${DEV_BRANCH}" +require_branch_matches_remote "${MAIN_BRANCH}" + +RELEASE_VERSION="$(resolve_release_version)" +RELEASE_TAG="v${RELEASE_VERSION}" +RELEASE_COMMIT_MESSAGE="Release ${RELEASE_TAG}" +require_release_tag_available "${RELEASE_VERSION}" + +printf 'This will merge %s into %s, remove agent-development files on %s,\n' "${DEV_BRANCH}" "${MAIN_BRANCH}" "${MAIN_BRANCH}" +printf 'run the pre-release gate%s, create %s, push to %s/%s, and switch back to %s.\n' \ + "$([ "${SKIP_TESTS}" -eq 1 ] && printf ' (skipped)' || printf '')" \ + "${RELEASE_TAG}" \ + "${ORIGIN_REMOTE}" \ + "${MAIN_BRANCH}" \ + "${DEV_BRANCH}" + +if [ "${ASSUME_YES}" -ne 1 ]; then + printf 'Are you sure? [y/N] ' + read -r confirmation + case "${confirmation}" in + y|Y|yes|YES) + ;; + *) + fail "Aborted by user." + ;; + esac +fi + +if [ "${DRY_RUN}" -eq 1 ]; then + print_release_plan "${RELEASE_VERSION}" + exit 0 +fi + +run_pre_release_tests +require_clean_worktree +fetch_remote_state +require_branch_matches_remote "${DEV_BRANCH}" +require_branch_matches_remote "${MAIN_BRANCH}" +require_release_tag_available "${RELEASE_VERSION}" + +git switch "${MAIN_BRANCH}" >/dev/null +CURRENT_BRANCH="${MAIN_BRANCH}" + +printf 'Merging %s into %s...\n' "${DEV_BRANCH}" "${MAIN_BRANCH}" +if ! git merge --no-ff --no-commit "${DEV_BRANCH}"; then + fail "Merge from '${DEV_BRANCH}' into '${MAIN_BRANCH}' failed." +fi + +if ! git rev-parse -q --verify MERGE_HEAD >/dev/null 2>&1; then + fail "'${MAIN_BRANCH}' is already up to date with '${DEV_BRANCH}'. Nothing to merge." +fi + +printf 'Removing agent-development files from %s...\n' "${MAIN_BRANCH}" +git rm -r --ignore-unmatch "${AGENT_DEVELOPMENT_PATHS[@]}" >/dev/null + +if git diff --cached --quiet; then + fail "No staged changes are present after merging '${DEV_BRANCH}' into '${MAIN_BRANCH}'." +fi + +printf 'Creating release merge commit: %s\n' "${RELEASE_COMMIT_MESSAGE}" +git commit -m "${RELEASE_COMMIT_MESSAGE}" + +printf 'Creating annotated tag: %s\n' "${RELEASE_TAG}" +git tag -a "${RELEASE_TAG}" -m "FFX ${RELEASE_VERSION}" + +printf 'Pushing %s and annotated tags to %s...\n' "${MAIN_BRANCH}" "${ORIGIN_REMOTE}" +git push "${ORIGIN_REMOTE}" "${MAIN_BRANCH}" --follow-tags + +printf 'Switching back to %s...\n' "${DEV_BRANCH}" +git switch "${DEV_BRANCH}" >/dev/null +CURRENT_BRANCH="${DEV_BRANCH}" + +printf 'Release merge complete: %s pushed to %s/%s and tagged as %s.\n' \ + "${RELEASE_COMMIT_MESSAGE}" \ + "${ORIGIN_REMOTE}" \ + "${MAIN_BRANCH}" \ + "${RELEASE_TAG}" From 111df1119923521aeae82f8ebe67defa7668a150 Mon Sep 17 00:00:00 2001 From: Javanaut <javanaut@refulgent.de> Date: Sun, 12 Apr 2026 12:20:01 +0200 Subject: [PATCH 26/28] ff --- tools/merge_dev_into_main.sh | 75 +++++++++++++++++++++++++++++++++--- 1 file changed, 69 insertions(+), 6 deletions(-) diff --git a/tools/merge_dev_into_main.sh b/tools/merge_dev_into_main.sh index 8e28250..8e63d89 100755 --- a/tools/merge_dev_into_main.sh +++ b/tools/merge_dev_into_main.sh @@ -25,8 +25,9 @@ usage() { Usage: $(basename "$0") [--yes] [--dry-run] [--skip-tests] [--help] Merge the local ${DEV_BRANCH} branch into ${MAIN_BRANCH}, remove agent-development files -from ${MAIN_BRANCH}, create a release merge commit and tag, push to ${ORIGIN_REMOTE}/${MAIN_BRANCH}, -and switch back to ${DEV_BRANCH}. +from ${MAIN_BRANCH}, auto-resolve merge conflicts limited to those cleanup paths, +create a release merge commit and tag, push to ${ORIGIN_REMOTE}/${MAIN_BRANCH}, and +switch back to ${DEV_BRANCH}. Options: --yes Skip the interactive confirmation prompt. @@ -75,6 +76,65 @@ load_cleanup_paths() { fi } +path_is_cleanup_target() { + local candidate_path="$1" + local cleanup_path="" + + for cleanup_path in "${AGENT_DEVELOPMENT_PATHS[@]}"; do + case "${candidate_path}" in + "${cleanup_path}"|"${cleanup_path}"/*) + return 0 + ;; + esac + done + + return 1 +} + +auto_resolve_cleanup_conflicts() { + local unmerged_paths=() + local non_cleanup_conflicts=() + local remaining_conflicts=() + local conflicted_path="" + + mapfile -t unmerged_paths < <(git diff --name-only --diff-filter=U) + if [ "${#unmerged_paths[@]}" -eq 0 ]; then + return 1 + fi + + for conflicted_path in "${unmerged_paths[@]}"; do + if ! path_is_cleanup_target "${conflicted_path}"; then + non_cleanup_conflicts+=("${conflicted_path}") + fi + done + + if [ "${#non_cleanup_conflicts[@]}" -ne 0 ]; then + printf 'Merge produced non-cleanup conflicts:\n' >&2 + for conflicted_path in "${non_cleanup_conflicts[@]}"; do + printf ' - %s\n' "${conflicted_path}" >&2 + done + return 1 + fi + + printf 'Auto-resolving merge conflicts for release-cleanup paths:\n' + for conflicted_path in "${unmerged_paths[@]}"; do + printf ' - %s\n' "${conflicted_path}" + done + + git rm -r --ignore-unmatch "${AGENT_DEVELOPMENT_PATHS[@]}" >/dev/null + + mapfile -t remaining_conflicts < <(git diff --name-only --diff-filter=U) + if [ "${#remaining_conflicts[@]}" -ne 0 ]; then + printf 'Cleanup conflict auto-resolution left unresolved paths:\n' >&2 + for conflicted_path in "${remaining_conflicts[@]}"; do + printf ' - %s\n' "${conflicted_path}" >&2 + done + return 1 + fi + + return 0 +} + require_repo_state() { if ! git rev-parse --show-toplevel >/dev/null 2>&1; then fail "This helper must be run inside a git repository." @@ -202,7 +262,7 @@ print_release_plan() { printf '3. Run ./tools/test.sh as the pre-release test gate.\n' fi printf '4. Switch to %s and merge %s with --no-ff --no-commit.\n' "${MAIN_BRANCH}" "${DEV_BRANCH}" - printf '5. Remove release-cleanup paths from %s:\n' "${MAIN_BRANCH}" + printf '5. Auto-resolve merge conflicts limited to release-cleanup paths and remove them from %s:\n' "${MAIN_BRANCH}" local cleanup_path="" for cleanup_path in "${AGENT_DEVELOPMENT_PATHS[@]}"; do printf ' - %s\n' "${cleanup_path}" @@ -252,9 +312,10 @@ RELEASE_COMMIT_MESSAGE="Release ${RELEASE_TAG}" require_release_tag_available "${RELEASE_VERSION}" printf 'This will merge %s into %s, remove agent-development files on %s,\n' "${DEV_BRANCH}" "${MAIN_BRANCH}" "${MAIN_BRANCH}" -printf 'run the pre-release gate%s, create %s, push to %s/%s, and switch back to %s.\n' \ +printf 'auto-resolve cleanup-path conflicts, run the pre-release gate%s, create %s,\n' \ "$([ "${SKIP_TESTS}" -eq 1 ] && printf ' (skipped)' || printf '')" \ - "${RELEASE_TAG}" \ + "${RELEASE_TAG}" +printf 'push to %s/%s, and switch back to %s.\n' \ "${ORIGIN_REMOTE}" \ "${MAIN_BRANCH}" \ "${DEV_BRANCH}" @@ -288,7 +349,9 @@ CURRENT_BRANCH="${MAIN_BRANCH}" printf 'Merging %s into %s...\n' "${DEV_BRANCH}" "${MAIN_BRANCH}" if ! git merge --no-ff --no-commit "${DEV_BRANCH}"; then - fail "Merge from '${DEV_BRANCH}' into '${MAIN_BRANCH}' failed." + if ! auto_resolve_cleanup_conflicts; then + fail "Merge from '${DEV_BRANCH}' into '${MAIN_BRANCH}' failed." + fi fi if ! git rev-parse -q --verify MERGE_HEAD >/dev/null 2>&1; then From fc9d94aeee472223672b1656614a9e79ab426099 Mon Sep 17 00:00:00 2001 From: Javanaut <javanaut@refulgent.de> Date: Sun, 12 Apr 2026 12:21:26 +0200 Subject: [PATCH 27/28] prep 0.2.4 --- tools/merge_dev_into_main.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/merge_dev_into_main.sh b/tools/merge_dev_into_main.sh index 8e63d89..8301f68 100755 --- a/tools/merge_dev_into_main.sh +++ b/tools/merge_dev_into_main.sh @@ -12,6 +12,7 @@ DEFAULT_AGENT_DEVELOPMENT_PATHS=( "requirements" "prompts" "process" + "tools/merge_dev_into_main.sh" ) AGENT_DEVELOPMENT_PATHS=("${DEFAULT_AGENT_DEVELOPMENT_PATHS[@]}") From 2595bfe4f47edfa602fa73433976091ae8acd10f Mon Sep 17 00:00:00 2001 From: Javanaut <javanaut@refulgent.de> Date: Sun, 12 Apr 2026 12:28:23 +0200 Subject: [PATCH 28/28] prep 0.2.4 --- tools/merge_dev_into_main.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/merge_dev_into_main.sh b/tools/merge_dev_into_main.sh index 8301f68..0340a11 100755 --- a/tools/merge_dev_into_main.sh +++ b/tools/merge_dev_into_main.sh @@ -122,7 +122,7 @@ auto_resolve_cleanup_conflicts() { printf ' - %s\n' "${conflicted_path}" done - git rm -r --ignore-unmatch "${AGENT_DEVELOPMENT_PATHS[@]}" >/dev/null + git rm -r -f --ignore-unmatch "${AGENT_DEVELOPMENT_PATHS[@]}" >/dev/null mapfile -t remaining_conflicts < <(git diff --name-only --diff-filter=U) if [ "${#remaining_conflicts[@]}" -ne 0 ]; then @@ -360,7 +360,7 @@ if ! git rev-parse -q --verify MERGE_HEAD >/dev/null 2>&1; then fi printf 'Removing agent-development files from %s...\n' "${MAIN_BRANCH}" -git rm -r --ignore-unmatch "${AGENT_DEVELOPMENT_PATHS[@]}" >/dev/null +git rm -r -f --ignore-unmatch "${AGENT_DEVELOPMENT_PATHS[@]}" >/dev/null if git diff --cached --quiet; then fail "No staged changes are present after merging '${DEV_BRANCH}' into '${MAIN_BRANCH}'."