diff --git a/README.md b/README.md index 5fa42fc..00a1153 100644 --- a/README.md +++ b/README.md @@ -1,48 +1,147 @@ # FFX +FFX is a local CLI and Textual TUI for inspecting TV episode files, storing normalization rules in SQLite, and converting outputs into a predictable stream, metadata, and filename layout. + +## Requirements + +- Linux-like environment +- `python3` +- `ffmpeg` +- `ffprobe` +- `cpulimit` + ## Installation -per https: +FFX uses a two-step local setup flow. + +### 1. Install The Bundle + +This step creates or reuses the persistent bundle virtualenv in `~/.local/share/ffx.venv`, installs FFX into it, and ensures `ffx` is exposed through a shell alias. ```sh -pip install https:////ffx.git@ +bash tools/setup.sh ``` -per git: +If you also want the Python packages needed for the modern test suite: ```sh -pip install git+ssh://@//ffx.git@ +bash tools/setup.sh --with-tests ``` -## Version history +You can verify the bundle state without changing anything: -### 0.1.1 +```sh +bash tools/setup.sh --check +``` -Bugfixes, TMBD identify shows +### 2. Prepare System Dependencies And Local User Files -### 0.1.2 +This step installs or verifies workstation dependencies and seeds local config and data directories. It is the step wrapped by the CLI command `ffx configure_workstation`. -Bugfixes +Run it directly: -### 0.1.3 +```sh +bash tools/configure_workstation.sh +``` -Subtitle file imports +Or through the installed CLI: -### 0.2.0 +```sh +ffx configure_workstation +``` -Tests, Config-File +Check-only mode is available in both forms: -### 0.2.1 +```sh +bash tools/configure_workstation.sh --check +ffx configure_workstation --check +``` -Signature, Tags cleaning, Bugfixes, Refactoring +`tools/configure_workstation.sh` does not manage the bundle virtualenv. Python-side test packages belong to `tools/setup.sh --with-tests`. -### 0.2.2 +## Basic Usage -CLI-Overrides +Examples: + +```sh +ffx version +ffx inspect /path/to/episode.mkv +ffx convert /path/to/episode.mkv +ffx shows +``` + +## Modern Tests + +Install Python test packages first: + +```sh +bash tools/setup.sh --with-tests +``` + +Then run the modern automatically discovered test suite: + +```sh +./tools/test.sh +``` + +This runner uses `pytest` and intentionally excludes the legacy harness under `tests/legacy/`. + +## Default Local Paths + +- Config: `~/.local/etc/ffx.json` +- Database: `~/.local/var/ffx/ffx.db` +- Log file: `~/.local/var/log/ffx.log` +- Bundle venv: `~/.local/share/ffx.venv` + +## TMDB + +TMDB-backed metadata enrichment requires `TMDB_API_KEY` to be set in the environment. + +## Version History + +### 0.2.4 + +- lightweight CLI commands now stay import-light via lazy runtime loading +- setup/config templating moved to `assets/ffx.json.j2` +- aligned two-step local setup wrappers: `ffx setup` and `ffx configure_workstation` +- combined `ffprobe` payload reuse in `FileProperties` +- configurable crop-detect sampling plus per-process crop result caching +- single-query controller accessors and conditional DB schema bootstrap +- shared screen bootstrap/controller wiring for large detail screens +- configurable default season/episode digit lengths +- digit-aware `rename` and padded `unmux` filename markers ### 0.2.3 -PyPi packaging -Templating output filename -Season shiftung -DB-Versionierung +- PyPI packaging +- output filename templating +- season shifting +- DB versioning + +### 0.2.2 + +- CLI overrides + +### 0.2.1 + +- signature handling +- tag cleanup +- bugfixes and refactoring + +### 0.2.0 + +- tests +- config file + +### 0.1.3 + +- subtitle file imports + +### 0.1.2 + +- bugfixes + +### 0.1.1 + +- bugfixes +- TMDB show identification diff --git a/assets/ffx.json.j2 b/assets/ffx.json.j2 new file mode 100644 index 0000000..666e603 --- /dev/null +++ b/assets/ffx.json.j2 @@ -0,0 +1,36 @@ +{ + "databasePath": {{ database_path_json }}, + "logDirectory": {{ log_directory_json }}, + "subtitlesDirectory": {{ subtitles_directory_json }}, + "defaultIndexSeasonDigits": {{ default_index_season_digits }}, + "defaultIndexEpisodeDigits": {{ default_index_episode_digits }}, + "defaultIndicatorSeasonDigits": {{ default_indicator_season_digits }}, + "defaultIndicatorEpisodeDigits": {{ default_indicator_episode_digits }}, + "metadata": { + "signature": { + "RECODED_WITH": "FFX" + }, + "remove": [ + "VERSION-eng", + "creation_time", + "NAME" + ], + "streams": { + "remove": [ + "BPS", + "NUMBER_OF_FRAMES", + "NUMBER_OF_BYTES", + "_STATISTICS_WRITING_APP", + "_STATISTICS_WRITING_DATE_UTC", + "_STATISTICS_TAGS", + "BPS-eng", + "DURATION-eng", + "NUMBER_OF_FRAMES-eng", + "NUMBER_OF_BYTES-eng", + "_STATISTICS_WRITING_APP-eng", + "_STATISTICS_WRITING_DATE_UTC-eng", + "_STATISTICS_TAGS-eng" + ] + } + } +} diff --git a/pyproject.toml b/pyproject.toml index d77a077..9ea7e52 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "ffx" description = "FFX recoding and metadata managing tool" -version = "0.2.3" +version = "0.2.4" license = {file = "LICENSE.md"} dependencies = [ "requests", @@ -27,6 +27,11 @@ Homepage = "https://gitea.maveno.de/Javanaut/ffx" Repository = "https://gitea.maveno.de/Javanaut/ffx.git" Issues = "https://gitea.maveno.de/Javanaut/ffx/issues" +[project.optional-dependencies] +test = [ + "pytest", +] + [build-system] requires = [ "setuptools", @@ -35,4 +40,15 @@ requires = [ build-backend = "setuptools.build_meta" [project.scripts] -ffx = "ffx.ffx:ffx" +ffx = "ffx.cli:ffx" + +[tool.pytest.ini_options] +testpaths = ["tests"] +python_files = ["test_*.py"] +norecursedirs = ["tests/legacy", "tests/support"] +addopts = "-ra" +markers = [ + "integration: exercises the FFX bundle with real ffmpeg/ffprobe processes", + "pattern_management: covers requirements/pattern_management.md", + "subtrack_mapping: covers requirements/subtrack_mapping.md", +] diff --git a/src/ffx/__main__.py b/src/ffx/__main__.py new file mode 100644 index 0000000..9d2ccea --- /dev/null +++ b/src/ffx/__main__.py @@ -0,0 +1,9 @@ +from .cli import ffx + + +def main(): + ffx() + + +if __name__ == "__main__": + main() diff --git a/src/ffx/ffx.py b/src/ffx/cli.py similarity index 60% rename from src/ffx/ffx.py rename to src/ffx/cli.py index fe6b56f..f3040c2 100755 --- a/src/ffx/ffx.py +++ b/src/ffx/cli.py @@ -1,42 +1,250 @@ #! /usr/bin/python3 -import os, click, time, logging, shutil, subprocess +from __future__ import annotations -from ffx.configuration_controller import ConfigurationController +import os, sys, click, time, shutil, subprocess +from typing import TYPE_CHECKING -from ffx.file_properties import FileProperties +# Allow direct execution via `python src/ffx/cli.py` by preferring the package +# root on sys.path. +if __package__ in (None, ''): + script_dir = os.path.dirname(__file__) + package_root = os.path.dirname(os.path.dirname(__file__)) + sys.path = [p for p in sys.path if os.path.abspath(p) != os.path.abspath(script_dir)] + sys.path.insert(0, package_root) -from ffx.ffx_app import FfxApp -from ffx.ffx_controller import FfxController -from ffx.tmdb_controller import TmdbController +from ffx.constants import ( + DEFAULT_AC3_BANDWIDTH, + DEFAULT_CROPDETECT_DURATION_SECONDS, + DEFAULT_CROPDETECT_SEEK_SECONDS, + DEFAULT_cut_length, + DEFAULT_cut_start, + DEFAULT_CONTAINER_EXTENSION, + DEFAULT_CONTAINER_FORMAT, + DEFAULT_DTS_BANDWIDTH, + DEFAULT_STEREO_BANDWIDTH, + DEFAULT_VIDEO_ENCODER_LABEL, + FFMPEG_COMMAND_TOKENS, + SUPPORTED_INPUT_FILE_EXTENSIONS, + VERSION, +) -from ffx.database import databaseContext +if TYPE_CHECKING: + from ffx.media_descriptor import MediaDescriptor + from ffx.track_descriptor import TrackDescriptor -from ffx.media_descriptor import MediaDescriptor -from ffx.track_descriptor import TrackDescriptor -from ffx.show_descriptor import ShowDescriptor +LIGHTWEIGHT_COMMANDS = {None, 'version', 'help', 'setup', 'configure_workstation', 'upgrade', 'rename'} +CPU_OPTION_HELP = ( + "Limit CPU for started processes. Use an absolute cpulimit value such as 200 " + + "(about 2 cores), or use a percentage such as 25% for a share of present cores. " + + "Omit to disable; 0 also disables." +) +SUBTITLE_DIRECTORY_OPTION_HELP = ( + "Load subtitles from here. When omitted and --subtitle-prefix is set, " + + "FFX uses the configured subtitlesDirectory base path plus the prefix as a subdirectory." +) +SUBTITLE_PREFIX_OPTION_HELP = ( + "Subtitle filename prefix. Requires --subtitle-directory, or a configured " + + "subtitlesDirectory base path that contains a matching / subdirectory." +) +UNMUX_OUTPUT_DIRECTORY_OPTION_HELP = ( + "Write extracted streams here. When omitted together with --subtitles-only and " + + "--label, FFX uses the configured subtitlesDirectory base path plus the label." +) +CROPDETECT_SEEK_OPTION_HELP = ( + "Start crop detection this many seconds into the input. " + + "Useful for skipping logos, intros, or black frames." +) +CROPDETECT_DURATION_OPTION_HELP = ( + "Analyze this many seconds for crop detection. " + + "Shorter windows are faster; longer windows are usually steadier." +) +DEFAULT_CUT_OPTION_VALUE = f"{DEFAULT_cut_start},{DEFAULT_cut_length}" +CUT_OPTION_HELP = ( + "Cut output in seconds. " + + f"Use --cut for the default {DEFAULT_CUT_OPTION_VALUE}, " + + "--cut DURATION to cut from 0 for DURATION seconds, " + + "or --cut START,DURATION for an explicit start and duration. " + + "Omit to disable." +) -from ffx.track_type import TrackType -from ffx.video_encoder import VideoEncoder -from ffx.track_disposition import TrackDisposition -from ffx.track_codec import TrackCodec -from ffx.process import executeProcess -from ffx.helper import filterFilename, substituteTmdbFilename -from ffx.helper import getEpisodeFileBasename +def normalizeNicenessOption(ctx, param, value): + from ffx.process import normalizeNiceness -from ffx.constants import DEFAULT_STEREO_BANDWIDTH, DEFAULT_AC3_BANDWIDTH, DEFAULT_DTS_BANDWIDTH, DEFAULT_7_1_BANDWIDTH + try: + return normalizeNiceness(value) + except ValueError as ex: + raise click.BadParameter(str(ex)) from ex -from ffx.filter.quality_filter import QualityFilter -from ffx.filter.preset_filter import PresetFilter -from ffx.filter.crop_filter import CropFilter -from ffx.filter.nlmeans_filter import NlmeansFilter -from ffx.filter.deinterlace_filter import DeinterlaceFilter +def normalizeCpuOption(ctx, param, value): + from ffx.process import normalizeCpuPercent -from ffx.constants import VERSION + try: + return normalizeCpuPercent(value) + except ValueError as ex: + raise click.BadParameter(str(ex)) from ex + + +def parseCutOptionValue(value) -> tuple[int, int] | None: + if value is None: + return None + + cutValue = str(value).strip() + if not cutValue: + raise ValueError( + "Cut value must be DURATION or START,DURATION, or use --cut without a value." + ) + + cutTokens = [token.strip() for token in cutValue.split(',')] + + try: + if len(cutTokens) == 1: + cutStart = 0 + cutLength = int(cutTokens[0]) + elif len(cutTokens) == 2: + cutStart = int(cutTokens[0]) + cutLength = int(cutTokens[1]) + else: + raise ValueError + except ValueError as ex: + raise ValueError( + "Cut value must be DURATION or START,DURATION, or use --cut without a value." + ) from ex + + if cutStart < 0: + raise ValueError("Cut start must be 0 or greater.") + + if cutLength <= 0: + raise ValueError("Cut duration must be greater than 0.") + + return cutStart, cutLength + + +def normalizeCutOption(ctx, param, value): + try: + return parseCutOptionValue(value) + except ValueError as ex: + raise click.BadParameter(str(ex)) from ex + + +def resolveSubtitleImportOptions(context, subtitleDirectory, subtitlePrefix): + resolvedSubtitlePrefix = str(subtitlePrefix).strip() + resolvedSubtitleDirectory = ( + os.path.expanduser(str(subtitleDirectory).strip()) + if subtitleDirectory + else '' + ) + + if not resolvedSubtitlePrefix: + return False, resolvedSubtitleDirectory, resolvedSubtitlePrefix + + if resolvedSubtitleDirectory: + return True, resolvedSubtitleDirectory, resolvedSubtitlePrefix + + configuredSubtitlesBaseDirectory = context['config'].getSubtitlesDirectoryPath() + if not configuredSubtitlesBaseDirectory: + raise click.ClickException( + "Subtitle prefix was set but no --subtitle-directory was provided and " + + "no subtitlesDirectory default is configured in ffx.json." + ) + + resolvedSubtitleDirectory = os.path.join( + configuredSubtitlesBaseDirectory, + resolvedSubtitlePrefix, + ) + + if not os.path.isdir(resolvedSubtitleDirectory): + raise click.ClickException( + "Subtitle prefix was set but the resolved subtitle directory does not exist: " + + resolvedSubtitleDirectory + ) + + return True, resolvedSubtitleDirectory, resolvedSubtitlePrefix + + +def resolveUnmuxOutputDirectory(context, outputDirectory, subtitlesOnly, label): + resolvedOutputDirectory = ( + os.path.expanduser(str(outputDirectory).strip()) + if outputDirectory + else '' + ) + resolvedLabel = str(label).strip() + + if resolvedOutputDirectory or not subtitlesOnly or not resolvedLabel: + return resolvedOutputDirectory, False + + configuredSubtitlesBaseDirectory = context['config'].getSubtitlesDirectoryPath() + if not configuredSubtitlesBaseDirectory: + raise click.ClickException( + "Subtitles-only unmux with --label requires --output-directory or a configured " + + "subtitlesDirectory default in ffx.json." + ) + + return os.path.join(configuredSubtitlesBaseDirectory, resolvedLabel), True + + +def resolveIndicatorDigitLengths(context=None, showDescriptor=None): + from ffx.show_descriptor import ShowDescriptor + + defaultDigitLengths = ShowDescriptor.getDefaultDigitLengths(context) + if showDescriptor is None: + return ( + defaultDigitLengths[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY], + defaultDigitLengths[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY], + ) + + return ( + int(showDescriptor.getIndicatorSeasonDigits()), + int(showDescriptor.getIndicatorEpisodeDigits()), + ) + + +def buildRenameTargetFilename( + sourcePath, + prefix, + seasonOverride=None, + suffix='', + indicatorSeasonDigits=None, + indicatorEpisodeDigits=None, +): + from ffx.file_properties import FileProperties + from ffx.show_descriptor import ShowDescriptor + + sourceFilename = os.path.basename(sourcePath) + seasonEpisodeValues = FileProperties.extractSeasonEpisodeValues(sourceFilename) + if seasonEpisodeValues is None: + return None + + sourceSeason, sourceEpisode = seasonEpisodeValues + resolvedSeason = int(seasonOverride) if seasonOverride is not None else ( + int(sourceSeason) if sourceSeason is not None else 1 + ) + resolvedIndicatorSeasonDigits = ( + int(indicatorSeasonDigits) + if indicatorSeasonDigits is not None + else ShowDescriptor.DEFAULT_INDICATOR_SEASON_DIGITS + ) + resolvedIndicatorEpisodeDigits = ( + int(indicatorEpisodeDigits) + if indicatorEpisodeDigits is not None + else ShowDescriptor.DEFAULT_INDICATOR_EPISODE_DIGITS + ) + + _sourceBasename, sourceExtension = os.path.splitext(sourceFilename) + + targetFilenameTokens = [ + str(prefix).strip(), + f"s{resolvedSeason:0{resolvedIndicatorSeasonDigits}d}e{int(sourceEpisode):0{resolvedIndicatorEpisodeDigits}d}", + ] + + resolvedSuffix = str(suffix).strip() + if resolvedSuffix: + targetFilenameTokens.append(resolvedSuffix) + + return f"{'_'.join(targetFilenameTokens)}{sourceExtension}" -from ffx.shifted_season_controller import ShiftedSeasonController @click.group() @@ -49,11 +257,18 @@ def ffx(ctx, database_file, verbose, dry_run): ctx.obj = {} - if ctx.invoked_subcommand in ('setup_dependencies', 'upgrade'): + if ctx.resilient_parsing: + return + + if ctx.invoked_subcommand in LIGHTWEIGHT_COMMANDS: ctx.obj['dry_run'] = dry_run ctx.obj['verbosity'] = verbose return + from ffx.configuration_controller import ConfigurationController + from ffx.database import databaseContext + from ffx.logging_utils import configure_ffx_logger + ctx.obj['config'] = ConfigurationController() ctx.obj['database'] = databaseContext(databasePath=database_file @@ -70,23 +285,11 @@ def ffx(ctx, database_file, verbose, dry_run): fileLogVerbosity = max(40 - verbose * 10, 10) consoleLogVerbosity = max(20 - verbose * 10, 10) - ctx.obj['logger'] = logging.getLogger('FFX') - ctx.obj['logger'].setLevel(logging.DEBUG) - - ffxFileHandler = logging.FileHandler(ctx.obj['config'].getLogFilePath()) - ffxFileHandler.setLevel(fileLogVerbosity) - ffxConsoleHandler = logging.StreamHandler() - ffxConsoleHandler.setLevel(consoleLogVerbosity) - - fileFormatter = logging.Formatter( - '%(asctime)s - %(name)s - %(levelname)s - %(message)s') - ffxFileHandler.setFormatter(fileFormatter) - consoleFormatter = logging.Formatter( - '%(message)s') - ffxConsoleHandler.setFormatter(consoleFormatter) - - ctx.obj['logger'].addHandler(ffxConsoleHandler) - ctx.obj['logger'].addHandler(ffxFileHandler) + ctx.obj['logger'] = configure_ffx_logger( + ctx.obj['config'].getLogFilePath(), + fileLogVerbosity, + consoleLogVerbosity, + ) # Define a subcommand @@ -99,7 +302,9 @@ def version(): @ffx.command() def help(): click.echo(f"ffx {VERSION}\n") - click.echo(f"Usage: ffx [input file] [output file] [vp9|av1] [q=[nn[,nn,...]]] [p=nn] [a=nnn[k]] [ac3=nnn[k]] [dts=nnn[k]] [crop]") + click.echo("Maintenance commands: setup, configure_workstation, upgrade") + click.echo("Media commands: shows, inspect, convert, rename, unmux, cropdetect") + click.echo("Use 'ffx --help' or 'ffx --help' for full command help.") def getRepoRootPath(): @@ -107,8 +312,12 @@ def getRepoRootPath(): return os.path.dirname(os.path.dirname(os.path.dirname(currentFilePath))) -def getPrepareScriptPath(): - return os.path.join(getRepoRootPath(), 'tools', 'prepare.sh') +def getConfigureWorkstationScriptPath(): + return os.path.join(getRepoRootPath(), 'tools', 'configure_workstation.sh') + + +def getSetupScriptPath(): + return os.path.join(getRepoRootPath(), 'tools', 'setup.sh') def getBundleVenvDirectory(): @@ -123,22 +332,29 @@ def getBundleRepoPath(): return getRepoRootPath() -@ffx.command(name='setup_dependencies') -@click.pass_context -@click.option('--check', is_flag=True, default=False, help='Only verify dependency readiness') -@click.argument('prepare_args', nargs=-1, type=click.UNPROCESSED) -def setup_dependencies(ctx, check, prepare_args): - prepareScriptPath = getPrepareScriptPath() +def getTrackedGitChanges(repoPath): + completed = subprocess.run( + ['git', 'status', '--porcelain', '--untracked-files=no'], + cwd=repoPath, + capture_output=True, + text=True, + ) - if not os.path.isfile(prepareScriptPath): - raise click.ClickException(f"Preparation script not found at {prepareScriptPath}") + if completed.returncode != 0: + commandLabel = 'git status --porcelain --untracked-files=no' + errorOutput = completed.stderr.strip() or completed.stdout.strip() + raise click.ClickException( + f"Unable to inspect bundle repository state using '{commandLabel}': {errorOutput}" + ) - commandSequence = ['bash', prepareScriptPath] + return [line for line in completed.stdout.splitlines() if line.strip()] - if check: - commandSequence.append('--check') - commandSequence += list(prepare_args) +def runScriptWrapper(ctx, scriptPath, missingDescription, commandArgs): + if not os.path.isfile(scriptPath): + raise click.ClickException(f"{missingDescription} not found at {scriptPath}") + + commandSequence = ['bash', scriptPath] + list(commandArgs) if ctx.obj.get('dry_run', False): click.echo(' '.join(commandSequence)) @@ -148,6 +364,44 @@ def setup_dependencies(ctx, check, prepare_args): ctx.exit(completed.returncode) +@ffx.command(name='setup') +@click.pass_context +@click.option('--check', is_flag=True, default=False, help='Only verify bundle-setup readiness') +@click.option('--with-tests', is_flag=True, default=False, help='Also install or verify Python test packages in the bundle venv') +@click.argument('setup_args', nargs=-1, type=click.UNPROCESSED) +def setup(ctx, check, with_tests, setup_args): + """Prepare or repair the FFX bundle virtualenv and shell alias.""" + commandArgs = [] + + if check: + commandArgs.append('--check') + if with_tests: + commandArgs.append('--with-tests') + + commandArgs += list(setup_args) + runScriptWrapper(ctx, getSetupScriptPath(), "Bundle setup script", commandArgs) + + +@ffx.command(name='configure_workstation') +@click.pass_context +@click.option('--check', is_flag=True, default=False, help='Only verify workstation-configuration readiness') +@click.argument('configure_args', nargs=-1, type=click.UNPROCESSED) +def configure_workstation(ctx, check, configure_args): + """Prepare workstation dependencies and local config after bundle install.""" + commandArgs = [] + + if check: + commandArgs.append('--check') + + commandArgs += list(configure_args) + runScriptWrapper( + ctx, + getConfigureWorkstationScriptPath(), + "Workstation configuration script", + commandArgs, + ) + + @ffx.command(name='upgrade') @click.pass_context @click.option('--branch', type=str, default='', help='Checkout this branch before pulling') @@ -162,12 +416,31 @@ def upgrade(ctx, branch): raise click.ClickException(f"Bundle pip not found at {bundlePipPath}") commandSequences = [] + trackedChanges = getTrackedGitChanges(bundleRepoPath) + + if trackedChanges: + click.echo("Tracked local changes detected in the bundle repository:") + for trackedChange in trackedChanges: + click.echo(f" {trackedChange}") + + shouldReset = click.confirm( + "Discard these tracked changes with 'git reset --hard HEAD' before upgrade?", + default=False, + ) + + if not shouldReset: + raise click.ClickException( + "Upgrade aborted because tracked local changes are present." + ) + + commandSequences.append(['git', 'reset', '--hard', 'HEAD']) if branch: commandSequences.append(['git', 'checkout', branch]) commandSequences += [ ['git', 'pull'], + [bundlePipPath, 'install', '--upgrade', 'pip', 'setuptools', 'wheel'], [bundlePipPath, 'install', '--editable', '.'], ] @@ -186,6 +459,7 @@ def upgrade(ctx, branch): @click.pass_context @click.argument('filename', nargs=1) def inspect(ctx, filename): + from ffx.ffx_app import FfxApp ctx.obj['command'] = 'inspect' ctx.obj['arguments'] = {} @@ -195,10 +469,66 @@ def inspect(ctx, filename): app.run() +@ffx.command() +@click.pass_context +@click.argument('paths', nargs=-1) +@click.option('--prefix', type=str, required=True, help='Required target filename prefix') +@click.option('--season', type=int, default=None, help='Override target season index') +@click.option('--suffix', type=str, default='', help='Optional target filename suffix') +@click.option('--dry-run', is_flag=True, default=False, help='Only print planned renames') +def rename(ctx, paths, prefix, season, suffix, dry_run): + """Rename matching episode files in place.""" + from ffx.configuration_controller import ConfigurationController + + resolvedPrefix = str(prefix).strip() + resolvedSuffix = str(suffix).strip() + effectiveDryRun = bool(ctx.obj.get('dry_run', False) or dry_run) + renameContext = { + 'config': ctx.obj.get('config') or ConfigurationController(), + } + indicatorSeasonDigits, indicatorEpisodeDigits = resolveIndicatorDigitLengths(renameContext) + + if not resolvedPrefix: + raise click.ClickException("Rename prefix must not be empty.") + + processedCount = 0 + + for sourcePath in paths: + if not os.path.isfile(sourcePath): + continue + + targetFilename = buildRenameTargetFilename( + sourcePath, + resolvedPrefix, + seasonOverride=season, + suffix=resolvedSuffix, + indicatorSeasonDigits=indicatorSeasonDigits, + indicatorEpisodeDigits=indicatorEpisodeDigits, + ) + if targetFilename is None: + continue + + sourceFilename = os.path.basename(sourcePath) + targetPath = os.path.join(os.path.dirname(sourcePath), targetFilename) + click.echo(f"{sourceFilename} -> {targetFilename}") + processedCount += 1 + + if effectiveDryRun or os.path.abspath(sourcePath) == os.path.abspath(targetPath): + continue + + if os.path.exists(targetPath): + raise click.ClickException(f"Target file already exists: {targetPath}") + + shutil.move(sourcePath, targetPath) + + if processedCount == 0: + click.echo("No matching files found.") + + def getUnmuxSequence(trackDescriptor: TrackDescriptor, sourcePath, targetPrefix, targetDirectory = ''): # executable and input file - commandTokens = FfxController.COMMAND_TOKENS + ['-i', sourcePath] + commandTokens = list(FFMPEG_COMMAND_TOKENS) + ['-i', sourcePath] trackType = trackDescriptor.getType() @@ -228,10 +558,24 @@ def getUnmuxSequence(trackDescriptor: TrackDescriptor, sourcePath, targetPrefix, @click.argument('paths', nargs=-1) @click.option('-l', '--label', type=str, default='', help='Label to be used as filename prefix') -@click.option("-o", "--output-directory", type=str, default='') +@click.option("-o", "--output-directory", type=str, default='', help=UNMUX_OUTPUT_DIRECTORY_OPTION_HELP) @click.option("-s", "--subtitles-only", is_flag=True, default=False) -@click.option('--nice', type=int, default=99, help='Niceness of started processes') -@click.option('--cpu', type=int, default=0, help='Limit CPU for started processes to percent') +@click.option( + '--nice', + type=int, + default=None, + callback=normalizeNicenessOption, + show_default='disabled', + help='Adjust niceness of started processes (-20..19). Omit to disable; 99 also disables.', +) +@click.option( + '--cpu', + type=str, + default=None, + callback=normalizeCpuOption, + show_default='disabled', + help=CPU_OPTION_HELP, +) def unmux(ctx, paths, label, @@ -239,14 +583,28 @@ def unmux(ctx, subtitles_only, nice, cpu): + from ffx.file_properties import FileProperties + from ffx.process import executeProcess + from ffx.track_disposition import TrackDisposition + from ffx.track_type import TrackType existingSourcePaths = [p for p in paths if os.path.isfile(p)] ctx.obj['logger'].debug(f"\nUnmuxing {len(existingSourcePaths)} files") ctx.obj['resource_limits'] = {} ctx.obj['resource_limits']['niceness'] = nice + ctx.obj['resource_limits']['cpu_limit'] = cpu ctx.obj['resource_limits']['cpu_percent'] = cpu + output_directory, create_output_directory = resolveUnmuxOutputDirectory( + ctx.obj, + output_directory, + subtitles_only, + label, + ) + if create_output_directory and existingSourcePaths and not ctx.obj.get('dry_run', False): + os.makedirs(output_directory, exist_ok=True) + for sourcePath in existingSourcePaths: fp = FileProperties(ctx.obj, sourcePath) @@ -254,13 +612,25 @@ def unmux(ctx, try: sourceMediaDescriptor = fp.getMediaDescriptor() + currentPattern = fp.getPattern() + currentShowDescriptor = ( + currentPattern.getShowDescriptor(ctx.obj) if currentPattern is not None else None + ) + indicatorSeasonDigits, indicatorEpisodeDigits = resolveIndicatorDigitLengths( + ctx.obj, + currentShowDescriptor, + ) season = fp.getSeason() episode = fp.getEpisode() #TODO: Recognition für alle Formate anpassen targetLabel = label if label else fp.getFileBasename() - targetIndicator = f"_S{season}E{episode}" if label and season != -1 and episode != -1 else '' + targetIndicator = ( + f"_S{season:0{indicatorSeasonDigits}d}E{episode:0{indicatorEpisodeDigits}d}" + if label and season != -1 and episode != -1 + else '' + ) if label and not targetIndicator: ctx.obj['logger'].warning(f"Skipping file {fp.getFilename()}: Label set but no indicator recognized") @@ -303,19 +673,55 @@ def unmux(ctx, @click.pass_context @click.argument('paths', nargs=-1) -@click.option('--nice', type=int, default=99, help='Niceness of started processes') -@click.option('--cpu', type=int, default=0, help='Limit CPU for started processes to percent') +@click.option( + '--nice', + type=int, + default=None, + callback=normalizeNicenessOption, + show_default='disabled', + help='Adjust niceness of started processes (-20..19). Omit to disable; 99 also disables.', +) +@click.option( + '--cpu', + type=str, + default=None, + callback=normalizeCpuOption, + show_default='disabled', + help=CPU_OPTION_HELP, +) +@click.option( + '--crop-seek', + type=click.IntRange(min=0), + default=DEFAULT_CROPDETECT_SEEK_SECONDS, + show_default=True, + help=CROPDETECT_SEEK_OPTION_HELP, +) +@click.option( + '--crop-duration', + type=click.IntRange(min=1), + default=DEFAULT_CROPDETECT_DURATION_SECONDS, + show_default=True, + help=CROPDETECT_DURATION_OPTION_HELP, +) def cropdetect(ctx, paths, nice, - cpu): + cpu, + crop_seek, + crop_duration): + from ffx.file_properties import FileProperties existingSourcePaths = [p for p in paths if os.path.isfile(p)] ctx.obj['logger'].debug(f"\nUnmuxing {len(existingSourcePaths)} files") ctx.obj['resource_limits'] = {} ctx.obj['resource_limits']['niceness'] = nice + ctx.obj['resource_limits']['cpu_limit'] = cpu ctx.obj['resource_limits']['cpu_percent'] = cpu + ctx.obj['cropdetect'] = { + 'seek_seconds': crop_seek, + 'duration_seconds': crop_duration, + } for sourcePath in existingSourcePaths: @@ -323,7 +729,7 @@ def cropdetect(ctx, try: fp = FileProperties(ctx.obj, sourcePath) - cropParams = fp.findCropParams() + cropParams = fp.findCropArguments() click.echo(cropParams) @@ -335,6 +741,7 @@ def cropdetect(ctx, @click.pass_context def shows(ctx): + from ffx.ffx_app import FfxApp ctx.obj['command'] = 'shows' @@ -343,6 +750,8 @@ def shows(ctx): def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): + from ffx.track_disposition import TrackDisposition + from ffx.track_type import TrackType # Check for multiple default or forced dispositions if not set by user input or database requirements # @@ -392,7 +801,7 @@ def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): @click.option('-l', '--label', type=str, default='', help='Label to be used as filename prefix') -@click.option('-v', '--video-encoder', type=str, default=FfxController.DEFAULT_VIDEO_ENCODER, help=f"Target video encoder (vp9, av1 or h264)", show_default=True) +@click.option('-v', '--video-encoder', type=str, default=DEFAULT_VIDEO_ENCODER_LABEL, help=f"Target video encoder (vp9, av1, h264 or copy)", show_default=True) @click.option('-q', '--quality', type=str, default="", help=f"Quality settings to be used with VP9/H264 encoder") @click.option('-p', '--preset', type=str, default="", help=f"Quality preset to be used with AV1 encoder") @@ -401,8 +810,8 @@ def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): @click.option('--ac3', type=int, default=DEFAULT_AC3_BANDWIDTH, help=f"Bitrate in kbit/s to be used to encode 5.1 audio streams", show_default=True) @click.option('--dts', type=int, default=DEFAULT_DTS_BANDWIDTH, help=f"Bitrate in kbit/s to be used to encode 6.1 audio streams", show_default=True) -@click.option('--subtitle-directory', type=str, default='', help='Load subtitles from here') -@click.option('--subtitle-prefix', type=str, default='', help='Subtitle filename prefix') +@click.option('--subtitle-directory', type=str, default='', help=SUBTITLE_DIRECTORY_OPTION_HELP) +@click.option('--subtitle-prefix', type=str, default='', help=SUBTITLE_PREFIX_OPTION_HELP) @click.option('--language', type=str, multiple=True, help='Set stream language. Use format :<3 letter iso code>') @click.option('--title', type=str, multiple=True, help='Set stream title. Use format :') @@ -417,7 +826,30 @@ def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): @click.option('--rearrange-streams', type=str, default="", help='Rearrange output streams order. Use format comma separated integers') @click.option("--crop", is_flag=False, flag_value="auto", default="none") -@click.option("--cut", is_flag=False, flag_value="default", default="none") +@click.option( + '--crop-seek', + type=click.IntRange(min=0), + default=DEFAULT_CROPDETECT_SEEK_SECONDS, + show_default=True, + help='When --crop auto is used, start crop detection this many seconds into the input.', +) +@click.option( + '--crop-duration', + type=click.IntRange(min=1), + default=DEFAULT_CROPDETECT_DURATION_SECONDS, + show_default=True, + help='When --crop auto is used, analyze this many seconds for crop detection.', +) +@click.option( + "--cut", + type=str, + metavar="DURATION|START,DURATION", + is_flag=False, + flag_value=DEFAULT_CUT_OPTION_VALUE, + default=None, + callback=normalizeCutOption, + help=CUT_OPTION_HELP, +) @click.option("--output-directory", type=str, default='') @@ -444,10 +876,24 @@ def checkUniqueDispositions(context, mediaDescriptor: MediaDescriptor): @click.option("--no-signature", is_flag=True, default=False) @click.option("--keep-mkvmerge-metadata", is_flag=True, default=False) -@click.option('--nice', type=int, default=99, help='Niceness of started processes') -@click.option('--cpu', type=int, default=0, help='Limit CPU for started processes to percent') +@click.option( + '--nice', + type=int, + default=None, + callback=normalizeNicenessOption, + show_default='disabled', + help='Adjust niceness of started processes (-20..19). Omit to disable; 99 also disables.', +) +@click.option( + '--cpu', + type=str, + default=None, + callback=normalizeCpuOption, + show_default='disabled', + help=CPU_OPTION_HELP, +) -@click.option('--rename-only', is_flag=True, default=False, help='Only renaming, no recoding') +@click.option('--rename-only', is_flag=True, default=False, help='Only renaming and moving, no recoding') def convert(ctx, paths, @@ -475,6 +921,8 @@ def convert(ctx, rearrange_streams, crop, + crop_seek, + crop_duration, cut, output_directory, @@ -509,6 +957,20 @@ def convert(ctx, Filename extensions will be changed appropriately. Suffices will we appended to filename in case of multiple created files or if the filename has not changed.""" + from ffx.ffx_controller import FfxController + from ffx.file_properties import FileProperties + from ffx.filter.crop_filter import CropFilter + from ffx.filter.deinterlace_filter import DeinterlaceFilter + from ffx.filter.nlmeans_filter import NlmeansFilter + from ffx.filter.preset_filter import PresetFilter + from ffx.filter.quality_filter import QualityFilter + from ffx.helper import filterFilename, getEpisodeFileBasename, substituteTmdbFilename + from ffx.shifted_season_controller import ShiftedSeasonController + from ffx.show_descriptor import ShowDescriptor + from ffx.tmdb_controller import TmdbController + from ffx.track_codec import TrackCodec + from ffx.track_disposition import TrackDisposition + from ffx.video_encoder import VideoEncoder startTime = time.perf_counter() @@ -516,9 +978,13 @@ def convert(ctx, context['video_encoder'] = VideoEncoder.fromLabel(video_encoder) - #HINT: quick and dirty override for h264, todo improve - targetFormat = '' if context['video_encoder'] == VideoEncoder.H264 else FfxController.DEFAULT_FILE_FORMAT - targetExtension = 'mkv' if context['video_encoder'] == VideoEncoder.H264 else FfxController.DEFAULT_FILE_EXTENSION + # HINT: quick and dirty override for h264, todo improve + if context['video_encoder'] in (VideoEncoder.H264, VideoEncoder.COPY): + targetFormat = '' + targetExtension = 'mkv' + else: + targetFormat = DEFAULT_CONTAINER_FORMAT + targetExtension = DEFAULT_CONTAINER_EXTENSION context['use_tmdb'] = not no_tmdb context['use_pattern'] = not no_pattern @@ -529,16 +995,29 @@ def convert(ctx, context['resource_limits'] = {} context['resource_limits']['niceness'] = nice + context['resource_limits']['cpu_limit'] = cpu context['resource_limits']['cpu_percent'] = cpu + context['cropdetect'] = { + 'seek_seconds': crop_seek, + 'duration_seconds': crop_duration, + } - context['import_subtitles'] = (subtitle_directory and subtitle_prefix) + ( + context['import_subtitles'], + resolvedSubtitleDirectory, + resolvedSubtitlePrefix, + ) = resolveSubtitleImportOptions( + context, + subtitle_directory, + subtitle_prefix, + ) if context['import_subtitles']: - context['subtitle_directory'] = subtitle_directory - context['subtitle_prefix'] = subtitle_prefix + context['subtitle_directory'] = resolvedSubtitleDirectory + context['subtitle_prefix'] = resolvedSubtitlePrefix - existingSourcePaths = [p for p in paths if os.path.isfile(p) and p.split('.')[-1] in FfxController.INPUT_FILE_EXTENSIONS] + existingSourcePaths = [p for p in paths if os.path.isfile(p) and p.split('.')[-1] in SUPPORTED_INPUT_FILE_EXTENSIONS] # CLI Overrides @@ -619,13 +1098,15 @@ def convert(ctx, #-> # Process cut parameters - context['perform_cut'] = (cut != 'none') + context['perform_cut'] = (cut is not None) if context['perform_cut']: - cutTokens = cut.split(',') - if cutTokens and len(cutTokens) == 2: - context['cut_start'] = int(cutTokens[0]) - context['cut_length'] = int(cutTokens[1]) - ctx.obj['logger'].debug(f"Cut start={context['cut_start']} length={context['cut_length']}") + context['cut_start'], context['cut_length'] = cut + click.echo( + f"Cutting enabled: start {context['cut_start']} s, duration {context['cut_length']} s." + ) + ctx.obj['logger'].debug( + f"Cut start={context['cut_start']} length={context['cut_length']}" + ) tc = TmdbController() if context['use_tmdb'] else None @@ -798,10 +1279,11 @@ def convert(ctx, fc = FfxController(context, targetMediaDescriptor, sourceMediaDescriptor) - indexSeasonDigits = currentShowDescriptor.getIndexSeasonDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDEX_SEASON_DIGITS - indexEpisodeDigits = currentShowDescriptor.getIndexEpisodeDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDEX_EPISODE_DIGITS - indicatorSeasonDigits = currentShowDescriptor.getIndicatorSeasonDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDICATOR_SEASON_DIGITS - indicatorEpisodeDigits = currentShowDescriptor.getIndicatorEpisodeDigits() if not currentPattern is None else ShowDescriptor.DEFAULT_INDICATOR_EPISODE_DIGITS + defaultDigitLengths = ShowDescriptor.getDefaultDigitLengths(context) + indexSeasonDigits = currentShowDescriptor.getIndexSeasonDigits() if not currentPattern is None else defaultDigitLengths[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] + indexEpisodeDigits = currentShowDescriptor.getIndexEpisodeDigits() if not currentPattern is None else defaultDigitLengths[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] + indicatorSeasonDigits = currentShowDescriptor.getIndicatorSeasonDigits() if not currentPattern is None else defaultDigitLengths[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] + indicatorEpisodeDigits = currentShowDescriptor.getIndicatorEpisodeDigits() if not currentPattern is None else defaultDigitLengths[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] # Shift season and episode if defined for this show @@ -895,7 +1377,7 @@ def convert(ctx, if rename_only: - shutil.copyfile(sourcePath, targetPath) + shutil.move(sourcePath, targetPath) else: fc.runJob(sourcePath, targetPath, diff --git a/src/ffx/configuration_controller.py b/src/ffx/configuration_controller.py index 74cc960..4a6bd29 100644 --- a/src/ffx/configuration_controller.py +++ b/src/ffx/configuration_controller.py @@ -1,5 +1,12 @@ import os, json +from .constants import ( + DEFAULT_SHOW_INDEX_EPISODE_DIGITS, + DEFAULT_SHOW_INDEX_SEASON_DIGITS, + DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, +) + class ConfigurationController(): CONFIG_FILENAME = 'ffx.json' @@ -8,7 +15,12 @@ class ConfigurationController(): DATABASE_PATH_CONFIG_KEY = 'databasePath' LOG_DIRECTORY_CONFIG_KEY = 'logDirectory' + SUBTITLES_DIRECTORY_CONFIG_KEY = 'subtitlesDirectory' OUTPUT_FILENAME_TEMPLATE_KEY = 'outputFilenameTemplate' + DEFAULT_INDEX_SEASON_DIGITS_CONFIG_KEY = 'defaultIndexSeasonDigits' + DEFAULT_INDEX_EPISODE_DIGITS_CONFIG_KEY = 'defaultIndexEpisodeDigits' + DEFAULT_INDICATOR_SEASON_DIGITS_CONFIG_KEY = 'defaultIndicatorSeasonDigits' + DEFAULT_INDICATOR_EPISODE_DIGITS_CONFIG_KEY = 'defaultIndicatorEpisodeDigits' def __init__(self): @@ -49,6 +61,48 @@ class ConfigurationController(): def getDatabaseFilePath(self): return self.__databaseFilePath + def getSubtitlesDirectoryPath(self): + subtitlesDirectory = self.__configurationData.get( + ConfigurationController.SUBTITLES_DIRECTORY_CONFIG_KEY, + '', + ) + return os.path.expanduser(str(subtitlesDirectory)) if subtitlesDirectory else '' + + @classmethod + def getConfiguredIntegerValue(cls, configurationData: dict, configKey: str, defaultValue: int) -> int: + configuredValue = configurationData.get(configKey, defaultValue) + try: + return int(configuredValue) + except (TypeError, ValueError): + return int(defaultValue) + + def getDefaultIndexSeasonDigits(self): + return ConfigurationController.getConfiguredIntegerValue( + self.__configurationData, + ConfigurationController.DEFAULT_INDEX_SEASON_DIGITS_CONFIG_KEY, + DEFAULT_SHOW_INDEX_SEASON_DIGITS, + ) + + def getDefaultIndexEpisodeDigits(self): + return ConfigurationController.getConfiguredIntegerValue( + self.__configurationData, + ConfigurationController.DEFAULT_INDEX_EPISODE_DIGITS_CONFIG_KEY, + DEFAULT_SHOW_INDEX_EPISODE_DIGITS, + ) + + def getDefaultIndicatorSeasonDigits(self): + return ConfigurationController.getConfiguredIntegerValue( + self.__configurationData, + ConfigurationController.DEFAULT_INDICATOR_SEASON_DIGITS_CONFIG_KEY, + DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, + ) + + def getDefaultIndicatorEpisodeDigits(self): + return ConfigurationController.getConfiguredIntegerValue( + self.__configurationData, + ConfigurationController.DEFAULT_INDICATOR_EPISODE_DIGITS_CONFIG_KEY, + DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + ) def getData(self): return self.__configurationData @@ -139,4 +193,4 @@ class ConfigurationController(): # raise click.ClickException(f"PatternController.getPattern(): {repr(ex)}") # finally: # s.close() -# \ No newline at end of file +# diff --git a/src/ffx/constants.py b/src/ffx/constants.py index b4f9d87..ec22587 100644 --- a/src/ffx/constants.py +++ b/src/ffx/constants.py @@ -1,15 +1,30 @@ -VERSION='0.2.3' +VERSION='0.2.4' DATABASE_VERSION = 2 DEFAULT_QUALITY = 32 DEFAULT_AV1_PRESET = 5 +DEFAULT_VIDEO_ENCODER_LABEL = "vp9" +DEFAULT_CONTAINER_FORMAT = "webm" +DEFAULT_CONTAINER_EXTENSION = "webm" +SUPPORTED_INPUT_FILE_EXTENSIONS = ("mkv", "mp4", "avi", "flv", "webm") +FFMPEG_COMMAND_TOKENS = ("ffmpeg", "-y") +FFMPEG_NULL_OUTPUT_TOKENS = ("-f", "null", "/dev/null") + DEFAULT_STEREO_BANDWIDTH = "112" DEFAULT_AC3_BANDWIDTH = "256" DEFAULT_DTS_BANDWIDTH = "320" DEFAULT_7_1_BANDWIDTH = "384" +DEFAULT_CROPDETECT_SEEK_SECONDS = 60 +DEFAULT_CROPDETECT_DURATION_SECONDS = 180 + DEFAULT_cut_start = 60 DEFAULT_cut_length = 180 +DEFAULT_SHOW_INDEX_SEASON_DIGITS = 2 +DEFAULT_SHOW_INDEX_EPISODE_DIGITS = 2 +DEFAULT_SHOW_INDICATOR_SEASON_DIGITS = 2 +DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS = 2 + DEFAULT_OUTPUT_FILENAME_TEMPLATE = '{{ ffx_show_name }} - {{ ffx_index }}{{ ffx_index_separator }}{{ ffx_episode_name }}{{ ffx_indicator_separator }}{{ ffx_indicator }}' diff --git a/src/ffx/database.py b/src/ffx/database.py index 5c46034..3d5e551 100644 --- a/src/ffx/database.py +++ b/src/ffx/database.py @@ -1,8 +1,11 @@ import os, click -from sqlalchemy import create_engine +from sqlalchemy import create_engine, inspect from sqlalchemy.orm import sessionmaker +# Import the full model package so SQLAlchemy registers every mapped class +# before metadata creation and the first ORM query. +import ffx.model from ffx.model.show import Base from ffx.model.property import Property @@ -11,6 +14,7 @@ from ffx.constants import DATABASE_VERSION DATABASE_VERSION_KEY = 'database_version' +EXPECTED_TABLE_NAMES = set(Base.metadata.tables.keys()) class DatabaseVersionException(Exception): def __init__(self, errorMessage): @@ -34,7 +38,7 @@ def databaseContext(databasePath: str = ''): databaseContext['engine'] = create_engine(databaseContext['url']) databaseContext['session'] = sessionmaker(bind=databaseContext['engine']) - Base.metadata.create_all(databaseContext['engine']) + bootstrapDatabaseIfNeeded(databaseContext) # isSyncronuous = False # while not isSyncronuous: @@ -51,6 +55,19 @@ def databaseContext(databasePath: str = ''): return databaseContext + +def databaseNeedsBootstrap(databaseContext) -> bool: + inspector = inspect(databaseContext['engine']) + existingTableNames = set(inspector.get_table_names()) + return not EXPECTED_TABLE_NAMES.issubset(existingTableNames) + + +def bootstrapDatabaseIfNeeded(databaseContext): + if not databaseNeedsBootstrap(databaseContext): + return + + Base.metadata.create_all(databaseContext['engine']) + def ensureDatabaseVersion(databaseContext): currentDatabaseVersion = getDatabaseVersion(databaseContext) @@ -67,9 +84,9 @@ def getDatabaseVersion(databaseContext): Session = databaseContext['session'] s = Session() - q = s.query(Property).filter(Property.key == DATABASE_VERSION_KEY) + versionProperty = s.query(Property).filter(Property.key == DATABASE_VERSION_KEY).first() - return int(q.first().value) if q.count() else 0 + return int(versionProperty.value) if versionProperty is not None else 0 except Exception as ex: raise click.ClickException(f"getDatabaseVersion(): {repr(ex)}") @@ -99,4 +116,4 @@ def setDatabaseVersion(databaseContext, databaseVersion: int): except Exception as ex: raise click.ClickException(f"setDatabaseVersion(): {repr(ex)}") finally: - s.close() \ No newline at end of file + s.close() diff --git a/src/ffx/ffx_controller.py b/src/ffx/ffx_controller.py index fb2f1b9..52ec099 100644 --- a/src/ffx/ffx_controller.py +++ b/src/ffx/ffx_controller.py @@ -10,7 +10,16 @@ from ffx.track_codec import TrackCodec from ffx.video_encoder import VideoEncoder from ffx.process import executeProcess -from ffx.constants import DEFAULT_cut_start, DEFAULT_cut_length +from ffx.constants import ( + DEFAULT_CONTAINER_EXTENSION, + DEFAULT_CONTAINER_FORMAT, + DEFAULT_VIDEO_ENCODER_LABEL, + DEFAULT_cut_start, + DEFAULT_cut_length, + FFMPEG_COMMAND_TOKENS, + FFMPEG_NULL_OUTPUT_TOKENS, + SUPPORTED_INPUT_FILE_EXTENSIONS, +) from ffx.filter.quality_filter import QualityFilter from ffx.filter.preset_filter import PresetFilter @@ -21,17 +30,17 @@ from ffx.model.pattern import Pattern class FfxController(): - COMMAND_TOKENS = ['ffmpeg', '-y'] - NULL_TOKENS = ['-f', 'null', '/dev/null'] # -f null /dev/null + COMMAND_TOKENS = list(FFMPEG_COMMAND_TOKENS) + NULL_TOKENS = list(FFMPEG_NULL_OUTPUT_TOKENS) # -f null /dev/null TEMP_FILE_NAME = "ffmpeg2pass-0.log" - DEFAULT_VIDEO_ENCODER = VideoEncoder.VP9.label() + DEFAULT_VIDEO_ENCODER = DEFAULT_VIDEO_ENCODER_LABEL - DEFAULT_FILE_FORMAT = 'webm' - DEFAULT_FILE_EXTENSION = 'webm' + DEFAULT_FILE_FORMAT = DEFAULT_CONTAINER_FORMAT + DEFAULT_FILE_EXTENSION = DEFAULT_CONTAINER_EXTENSION - INPUT_FILE_EXTENSIONS = ['mkv', 'mp4', 'avi', 'flv', 'webm'] + INPUT_FILE_EXTENSIONS = list(SUPPORTED_INPUT_FILE_EXTENSIONS) CHANNEL_MAP_5_1 = 'FL-FL|FR-FR|FC-FC|LFE-LFE|SL-BL|SR-BR:5.1' @@ -54,6 +63,13 @@ class FfxController(): self.__logger: Logger = context['logger'] + def executeCommandSequence(self, commandSequence): + out, err, rc = executeProcess(commandSequence, context=self.__context) + if rc: + raise click.ClickException(f"Command resulted in error: rc={rc} error={err}") + return out, err, rc + + def generateAV1Tokens(self, quality, preset, subIndex : int = 0): return [f"-c:v:{int(subIndex)}", 'libsvtav1', @@ -99,6 +115,37 @@ class FfxController(): def generateVideoCopyTokens(self, subIndex): return [f"-c:v:{int(subIndex)}", 'copy'] + + def generateAudioCopyTokens(self, subIndex): + return [f"-c:a:{int(subIndex)}", 'copy'] + + def generateSubtitleCopyTokens(self, subIndex): + return [f"-c:s:{int(subIndex)}", 'copy'] + + def generateAttachmentCopyTokens(self, subIndex): + return [f"-c:t:{int(subIndex)}", 'copy'] + + def generateCopyTokens(self): + copyTokens = [] + + for trackDescriptor in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.VIDEO): + copyTokens += self.generateVideoCopyTokens(trackDescriptor.getSubIndex()) + + for trackDescriptor in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.AUDIO): + copyTokens += self.generateAudioCopyTokens(trackDescriptor.getSubIndex()) + + for trackDescriptor in self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.SUBTITLE): + copyTokens += self.generateSubtitleCopyTokens(trackDescriptor.getSubIndex()) + + attachmentDescriptors = ( + self.__sourceMediaDescriptor.getTrackDescriptors(trackType=TrackType.ATTACHMENT) + if self.__sourceMediaDescriptor is not None + else self.__targetMediaDescriptor.getTrackDescriptors(trackType=TrackType.ATTACHMENT) + ) + for trackDescriptor in attachmentDescriptors: + copyTokens += self.generateAttachmentCopyTokens(trackDescriptor.getSubIndex()) + + return copyTokens def generateCropTokens(self): @@ -124,6 +171,18 @@ class FfxController(): return [outputFilePath] + def generateEncodingMetadataTags(self, videoEncoder: VideoEncoder, quality, preset) -> dict: + metadataTags = {} + + if videoEncoder in (VideoEncoder.AV1, VideoEncoder.H264, VideoEncoder.VP9): + metadataTags["ENCODING_QUALITY"] = str(quality) + + if videoEncoder == VideoEncoder.AV1: + metadataTags["ENCODING_PRESET"] = str(preset) + + return metadataTags + + def generateAudioEncodingTokens(self): """Generates ffmpeg options audio streams including channel remapping, codec and bitrate""" @@ -204,7 +263,7 @@ class FfxController(): if qualityFilters and (quality := qualityFilters[0]['parameters']['quality']): self.__logger.info(f"Setting quality {quality} from command line parameter") - elif (quality := currentPattern.quality): + elif currentPattern is not None and (quality := currentPattern.quality): self.__logger.info(f"Setting quality {quality} from pattern default") else: quality = (QualityFilter.DEFAULT_H264_QUALITY @@ -214,6 +273,11 @@ class FfxController(): preset = presetFilters[0]['parameters']['preset'] if presetFilters else PresetFilter.DEFAULT_PRESET + self.__context['encoding_metadata_tags'] = self.generateEncodingMetadataTags( + videoEncoder, + quality, + preset, + ) filterParamTokens = [] @@ -238,6 +302,28 @@ class FfxController(): commandTokens = FfxController.COMMAND_TOKENS + ['-i', sourcePath] + if videoEncoder == VideoEncoder.COPY: + + commandSequence = (commandTokens + + self.__targetMediaDescriptor.getImportFileTokens() + + self.__targetMediaDescriptor.getInputMappingTokens(sourceMediaDescriptor = self.__sourceMediaDescriptor) + + self.__mdcs.generateDispositionTokens()) + + commandSequence += self.__mdcs.generateMetadataTokens() + commandSequence += self.generateCopyTokens() + + if self.__context['perform_cut']: + commandSequence += self.generateCropTokens() + + commandSequence += self.generateOutputTokens(targetPath, + targetFormat) + + self.__logger.debug("FfxController.runJob(): Running command sequence") + + if not self.__context['dry_run']: + self.executeCommandSequence(commandSequence) + return + if videoEncoder == VideoEncoder.AV1: commandSequence = (commandTokens @@ -265,7 +351,7 @@ class FfxController(): self.__logger.debug(f"FfxController.runJob(): Running command sequence") if not self.__context['dry_run']: - executeProcess(commandSequence, context = self.__context) + self.executeCommandSequence(commandSequence) if videoEncoder == VideoEncoder.H264: @@ -295,7 +381,7 @@ class FfxController(): self.__logger.debug(f"FfxController.runJob(): Running command sequence") if not self.__context['dry_run']: - executeProcess(commandSequence, context = self.__context) + self.executeCommandSequence(commandSequence) @@ -327,7 +413,7 @@ class FfxController(): self.__logger.debug(f"FfxController.runJob(): Running command sequence 1") if not self.__context['dry_run']: - executeProcess(commandSequence1, context = self.__context) + self.executeCommandSequence(commandSequence1) commandSequence2 = (commandTokens + self.__targetMediaDescriptor.getImportFileTokens() @@ -354,9 +440,7 @@ class FfxController(): self.__logger.debug(f"FfxController.runJob(): Running command sequence 2") if not self.__context['dry_run']: - out, err, rc = executeProcess(commandSequence2, context = self.__context) - if rc: - raise click.ClickException(f"Command resulted in error: rc={rc} error={err}") + self.executeCommandSequence(commandSequence2) @@ -381,4 +465,4 @@ class FfxController(): str(length), path] - out, err, rc = executeProcess(commandTokens, context = self.__context) + self.executeCommandSequence(commandTokens) diff --git a/src/ffx/file_properties.py b/src/ffx/file_properties.py index e8134db..20c5d94 100644 --- a/src/ffx/file_properties.py +++ b/src/ffx/file_properties.py @@ -1,5 +1,11 @@ import os, re, json +from .constants import ( + DEFAULT_CROPDETECT_DURATION_SECONDS, + DEFAULT_CROPDETECT_SEEK_SECONDS, + FFMPEG_COMMAND_TOKENS, + FFMPEG_NULL_OUTPUT_TOKENS, +) from .media_descriptor import MediaDescriptor from .pattern_controller import PatternController @@ -11,8 +17,10 @@ from ffx.model.pattern import Pattern class FileProperties(): + _cropdetect_cache: dict[tuple[str, int, int, int, int], dict[str, str]] = {} FILE_EXTENSIONS = ['mkv', 'mp4', 'avi', 'flv', 'webm'] + FFPROBE_COMMAND_TOKENS = ["ffprobe", "-hide_banner", "-show_format", "-show_streams", "-of", "json"] SE_INDICATOR_PATTERN = '([sS][0-9]+[eE][0-9]+)' SEASON_EPISODE_INDICATOR_MATCH = '[sS]([0-9]+)[eE]([0-9]+)' @@ -22,6 +30,18 @@ class FileProperties(): DEFAULT_INDEX_DIGITS = 3 + @classmethod + def extractSeasonEpisodeValues(cls, sourceText: str) -> tuple[int | None, int] | None: + seasonEpisodeMatch = re.search(cls.SEASON_EPISODE_INDICATOR_MATCH, str(sourceText)) + if seasonEpisodeMatch is not None: + return int(seasonEpisodeMatch.group(1)), int(seasonEpisodeMatch.group(2)) + + episodeMatch = re.search(cls.EPISODE_INDICATOR_MATCH, str(sourceText)) + if episodeMatch is not None: + return None, int(episodeMatch.group(1)) + + return None + def __init__(self, context, sourcePath): self.context = context @@ -44,9 +64,10 @@ class FileProperties(): self.__sourceFilenameExtension = '' self.__pc = PatternController(context) + self.__usePattern = bool(self.context.get('use_pattern', True)) # Checking if database contains matching pattern - matchResult = self.__pc.matchFilename(self.__sourceFilename) + matchResult = self.__pc.matchFilename(self.__sourceFilename) if self.__usePattern else {} self.__logger.debug(f"FileProperties.__init__(): Match result: {matchResult}") @@ -56,26 +77,67 @@ class FileProperties(): databaseMatchedGroups = matchResult['match'].groups() self.__logger.debug(f"FileProperties.__init__(): Matched groups: {databaseMatchedGroups}") - seIndicator = databaseMatchedGroups[0] - - se_match = re.search(FileProperties.SEASON_EPISODE_INDICATOR_MATCH, seIndicator) - e_match = re.search(FileProperties.EPISODE_INDICATOR_MATCH, seIndicator) - + indicatorSource = databaseMatchedGroups[0] else: self.__logger.debug(f"FileProperties.__init__(): Checking file name for indicator {self.__sourceFilename}") + indicatorSource = self.__sourceFilename - se_match = re.search(FileProperties.SEASON_EPISODE_INDICATOR_MATCH, self.__sourceFilename) - e_match = re.search(FileProperties.EPISODE_INDICATOR_MATCH, self.__sourceFilename) - - if se_match is not None: - self.__season = int(se_match.group(1)) - self.__episode = int(se_match.group(2)) - elif e_match is not None: - self.__season = -1 - self.__episode = int(e_match.group(1)) - else: + seasonEpisodeValues = self.extractSeasonEpisodeValues(indicatorSource) + if seasonEpisodeValues is None: self.__season = -1 self.__episode = -1 + else: + sourceSeason, sourceEpisode = seasonEpisodeValues + self.__season = -1 if sourceSeason is None else int(sourceSeason) + self.__episode = int(sourceEpisode) + + self.__ffprobeData = None + + def _getCropdetectWindow(self): + cropdetectContext = self.context.get('cropdetect', {}) + + seekSeconds = int(cropdetectContext.get('seek_seconds', DEFAULT_CROPDETECT_SEEK_SECONDS)) + durationSeconds = int(cropdetectContext.get('duration_seconds', DEFAULT_CROPDETECT_DURATION_SECONDS)) + + if seekSeconds < 0: + raise ValueError("Crop detection seek seconds must be zero or greater.") + if durationSeconds <= 0: + raise ValueError("Crop detection duration seconds must be greater than zero.") + + return seekSeconds, durationSeconds + + def _getCropdetectCacheKey(self): + sourceStat = os.stat(self.__sourcePath) + seekSeconds, durationSeconds = self._getCropdetectWindow() + + return ( + os.path.abspath(self.__sourcePath), + sourceStat.st_mtime_ns, + sourceStat.st_size, + seekSeconds, + durationSeconds, + ) + + @classmethod + def _clear_cropdetect_cache(cls): + cls._cropdetect_cache.clear() + + def _getFfprobeData(self): + if self.__ffprobeData is not None: + return self.__ffprobeData + + ffprobeOutput, ffprobeError, returnCode = executeProcess( + FileProperties.FFPROBE_COMMAND_TOKENS + [self.__sourcePath] + ) + + if 'Invalid data found when processing input' in ffprobeError: + raise Exception(f"File {self.__sourcePath} does not contain valid stream data") + + if returnCode != 0: + raise Exception(f"ffprobe returned with error {returnCode}") + + self.__ffprobeData = json.loads(ffprobeOutput) + return self.__ffprobeData def getFormatData(self): @@ -98,22 +160,7 @@ class FileProperties(): } } """ - - # ffprobe -hide_banner -show_format -of json - ffprobeOutput, ffprobeError, returnCode = executeProcess(["ffprobe", - "-hide_banner", - "-show_format", - "-of", "json", - self.__sourcePath]) #, - #context = self.context) - - if 'Invalid data found when processing input' in ffprobeError: - raise Exception(f"File {self.__sourcePath} does not contain valid stream data") - - if returnCode != 0: - raise Exception(f"ffprobe returned with error {returnCode}") - - return json.loads(ffprobeOutput)['format'] + return self._getFfprobeData()['format'] def getStreamData(self): @@ -158,40 +205,32 @@ class FileProperties(): } } """ - - # ffprobe -hide_banner -show_streams -of json - ffprobeOutput, ffprobeError, returnCode = executeProcess(["ffprobe", - "-hide_banner", - "-show_streams", - "-of", "json", - self.__sourcePath]) #, - #context = self.context) - - if 'Invalid data found when processing input' in ffprobeError: - raise Exception(f"File {self.__sourcePath} does not contain valid stream data") - - - if returnCode != 0: - raise Exception(f"ffprobe returned with error {returnCode}") - - - return json.loads(ffprobeOutput)['streams'] + return self._getFfprobeData()['streams'] def findCropArguments(self): """""" - # ffmpeg -i <input.file> -vf cropdetect -f null - - ffprobeOutput, ffprobeError, returnCode = executeProcess(["ffmpeg", "-i", - self.__sourcePath, - "-vf", "cropdetect", - "-ss", "60", - "-t", "180", - "-f", "null", "-" - ]) + cacheKey = self._getCropdetectCacheKey() + cachedCropArguments = FileProperties._cropdetect_cache.get(cacheKey) + if cachedCropArguments is not None: + self.__logger.debug( + "FileProperties.findCropArguments(): Reusing cached cropdetect result for %s", + self.__sourcePath, + ) + return dict(cachedCropArguments) - errorLines = ffprobeError.split('\n') + seekSeconds, durationSeconds = self._getCropdetectWindow() + + cropdetectCommand = ( + list(FFMPEG_COMMAND_TOKENS) + + ["-ss", str(seekSeconds), "-i", self.__sourcePath, "-t", str(durationSeconds), "-vf", "cropdetect"] + + list(FFMPEG_NULL_OUTPUT_TOKENS) + ) + _ffmpegOutput, ffmpegError, returnCode = executeProcess(cropdetectCommand, context=self.context) + + errorLines = ffmpegError.split('\n') crops = {} for el in errorLines: @@ -204,21 +243,26 @@ class FileProperties(): crops[cropParam] = crops.get(cropParam, 0) + 1 if crops: - cropHistogram = sorted(crops, reverse=True) - cropString = cropHistogram[0] + cropString = max(crops.items(), key=lambda item: (item[1], item[0]))[0] cropTokens = cropString.split('=') cropValueTokens = cropTokens[1] cropValues = cropValueTokens.split(':') - return { + cropArguments = { CropFilter.OUTPUT_WIDTH_KEY: cropValues[0], CropFilter.OUTPUT_HEIGHT_KEY: cropValues[1], CropFilter.OFFSET_X_KEY: cropValues[2], CropFilter.OFFSET_Y_KEY: cropValues[3] } - else: - return {} + FileProperties._cropdetect_cache[cacheKey] = dict(cropArguments) + return cropArguments + + if returnCode != 0: + raise Exception(f"ffmpeg cropdetect returned with error {returnCode}") + + FileProperties._cropdetect_cache[cacheKey] = {} + return {} def getMediaDescriptor(self): diff --git a/src/ffx/helper.py b/src/ffx/helper.py index e175bbb..00f1d45 100644 --- a/src/ffx/helper.py +++ b/src/ffx/helper.py @@ -1,8 +1,10 @@ -import re, logging +import re from jinja2 import Environment, Undefined from .constants import DEFAULT_OUTPUT_FILENAME_TEMPLATE from .configuration_controller import ConfigurationController +from .logging_utils import get_ffx_logger +from .show_descriptor import ShowDescriptor class EmptyStringUndefined(Undefined): @@ -15,7 +17,21 @@ DIFF_REMOVED_KEY = 'removed' DIFF_CHANGED_KEY = 'changed' DIFF_UNCHANGED_KEY = 'unchanged' -RICH_COLOR_PATTERN = '\[[a-z_]+\](.+)\[\/[a-z_]+\]' +FILENAME_FILTER_TRANSLATION = str.maketrans( + { + "/": "-", + ":": ";", + "*": "", + "'": "", + "?": "#", + "♥": "", + "’": "", + } +) +TMDB_FILLER_MARKERS = (" (*)", "(*)") +TMDB_EPISODE_RANGE_SUFFIX_REGEX = re.compile(r"\(([0-9]+)[-/]([0-9]+)\)$") +TMDB_EPISODE_PART_SUFFIX_REGEX = re.compile(r"\(([0-9]+)\)$") +RICH_COLOR_REGEX = re.compile(r"\[[a-z_]+\](.+)\[/[a-z_]+\]") def dictDiff(a : dict, b : dict, ignoreKeys: list = [], removeKeys: list = []): @@ -114,49 +130,45 @@ def filterFilename(fileName: str) -> str: """This filter replaces charactes from TMDB responses with characters less problemating when using in filenames or removes them""" - fileName = str(fileName).replace('/', '-') - fileName = str(fileName).replace(':', ';') - fileName = str(fileName).replace('*', '') - fileName = str(fileName).replace("'", '') - fileName = str(fileName).replace("?", '#') - fileName = str(fileName).replace('♥', '') - fileName = str(fileName).replace('’', '') - - return fileName.strip() + return str(fileName).translate(FILENAME_FILTER_TRANSLATION).strip() def substituteTmdbFilename(fileName: str) -> str: """If chaining this method with filterFilename use this one first as the latter will destroy some patterns""" - # This indicates filler episodes in TMDB episode names - fileName = str(fileName).replace(' (*)', '') - fileName = str(fileName).replace('(*)', '') + normalizedFileName = str(fileName) - # This indicates the index of multi-episode files - episodePartMatch = re.search("\\(([0-9]+)\\)$", fileName) + for fillerMarker in TMDB_FILLER_MARKERS: + normalizedFileName = normalizedFileName.replace(fillerMarker, '') + + episodeRangeMatch = TMDB_EPISODE_RANGE_SUFFIX_REGEX.search(normalizedFileName) + if episodeRangeMatch is not None: + partFirstIndex, partLastIndex = episodeRangeMatch.groups() + return TMDB_EPISODE_RANGE_SUFFIX_REGEX.sub( + f"Teil {partFirstIndex}-{partLastIndex}", + normalizedFileName, + count=1, + ) + + episodePartMatch = TMDB_EPISODE_PART_SUFFIX_REGEX.search(normalizedFileName) if episodePartMatch is not None: - partSuffix = str(episodePartMatch.group(0)) - partIndex = episodePartMatch.groups()[0] - fileName = str(fileName).replace(partSuffix, f"Teil {partIndex}") + partIndex = episodePartMatch.group(1) + return TMDB_EPISODE_PART_SUFFIX_REGEX.sub( + f"Teil {partIndex}", + normalizedFileName, + count=1, + ) - # Also multi-episodes with first and last episode index - episodePartMatch = re.search("\\(([0-9]+)[-\\/]([0-9]+)\\)$", fileName) - if episodePartMatch is not None: - partSuffix = str(episodePartMatch.group(0)) - partFirstIndex = episodePartMatch.groups()[0] - partLastIndex = episodePartMatch.groups()[1] - fileName = str(fileName).replace(partSuffix, f"Teil {partFirstIndex}-{partLastIndex}") - - return fileName + return normalizedFileName def getEpisodeFileBasename(showName, episodeName, season, episode, - indexSeasonDigits = 2, - indexEpisodeDigits = 2, - indicatorSeasonDigits = 2, - indicatorEpisodeDigits = 2, + indexSeasonDigits = None, + indexEpisodeDigits = None, + indicatorSeasonDigits = None, + indicatorEpisodeDigits = None, context = None): """ One Piece: @@ -188,12 +200,21 @@ def getEpisodeFileBasename(showName, configData = cc.getData() if cc is not None else {} outputFilenameTemplate = configData.get(ConfigurationController.OUTPUT_FILENAME_TEMPLATE_KEY, DEFAULT_OUTPUT_FILENAME_TEMPLATE) + defaultDigitLengths = ShowDescriptor.getDefaultDigitLengths(context) + + if indexSeasonDigits is None: + indexSeasonDigits = defaultDigitLengths[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] + if indexEpisodeDigits is None: + indexEpisodeDigits = defaultDigitLengths[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] + if indicatorSeasonDigits is None: + indicatorSeasonDigits = defaultDigitLengths[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] + if indicatorEpisodeDigits is None: + indicatorEpisodeDigits = defaultDigitLengths[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] if context is not None and 'logger' in context.keys(): logger = context['logger'] else: - logger = logging.getLogger('FFX') - logger.addHandler(logging.NullHandler()) + logger = get_ffx_logger() indexSeparator = ' ' if indexSeasonDigits or indexEpisodeDigits else '' @@ -231,9 +252,8 @@ def formatRichColor(text: str, color: str = None): return f"[{color}]{text}[/{color}]" def removeRichColor(text: str): - richColorMatch = re.search(RICH_COLOR_PATTERN, text) + richColorMatch = RICH_COLOR_REGEX.search(str(text)) if richColorMatch is None: return text else: return str(richColorMatch.group(1)) - diff --git a/src/ffx/iso_language.py b/src/ffx/iso_language.py index cc01000..4dab399 100644 --- a/src/ffx/iso_language.py +++ b/src/ffx/iso_language.py @@ -1,85 +1,196 @@ from enum import Enum import difflib + class IsoLanguage(Enum): - AFRIKAANS = {"name": "Afrikaans", "iso639_1": "af", "iso639_2": ["afr"]} - ALBANIAN = {"name": "Albanian", "iso639_1": "sq", "iso639_2": ["alb"]} - ARABIC = {"name": "Arabic", "iso639_1": "ar", "iso639_2": ["ara"]} - ARMENIAN = {"name": "Armenian", "iso639_1": "hy", "iso639_2": ["arm"]} - AZERBAIJANI = {"name": "Azerbaijani", "iso639_1": "az", "iso639_2": ["aze"]} - BASQUE = {"name": "Basque", "iso639_1": "eu", "iso639_2": ["baq"]} - BELARUSIAN = {"name": "Belarusian", "iso639_1": "be", "iso639_2": ["bel"]} - BOKMAL = {"name": "Bokmål", "iso639_1": "nb", "iso639_2": ["nob"]} # Norwegian Bokmål - BULGARIAN = {"name": "Bulgarian", "iso639_1": "bg", "iso639_2": ["bul"]} - CATALAN = {"name": "Catalan", "iso639_1": "ca", "iso639_2": ["cat"]} - CHINESE = {"name": "Chinese", "iso639_1": "zh", "iso639_2": ["zho", "chi"]} - CROATIAN = {"name": "Croatian", "iso639_1": "hr", "iso639_2": ["hrv"]} - CZECH = {"name": "Czech", "iso639_1": "cs", "iso639_2": ["cze"]} - DANISH = {"name": "Danish", "iso639_1": "da", "iso639_2": ["dan"]} - DUTCH = {"name": "Dutch", "iso639_1": "nl", "iso639_2": ["nld", "dut"]} - ENGLISH = {"name": "English", "iso639_1": "en", "iso639_2": ["eng"]} - ESTONIAN = {"name": "Estonian", "iso639_1": "et", "iso639_2": ["est"]} - FILIPINO = {"name": "Filipino", "iso639_1": "tl", "iso639_2": ["fil"]} # Tagalog - FINNISH = {"name": "Finnish", "iso639_1": "fi", "iso639_2": ["fin"]} - FRENCH = {"name": "French", "iso639_1": "fr", "iso639_2": ["fra", "fre"]} - GALICIAN = {"name": "Galician", "iso639_1": "gl", "iso639_2": ["glg"]} - GEORGIAN = {"name": "Georgian", "iso639_1": "ka", "iso639_2": ["geo"]} - GERMAN = {"name": "German", "iso639_1": "de", "iso639_2": ["deu", "ger"]} - GREEK = {"name": "Greek", "iso639_1": "el", "iso639_2": ["gre"]} - HEBREW = {"name": "Hebrew", "iso639_1": "he", "iso639_2": ["heb"]} - HINDI = {"name": "Hindi", "iso639_1": "hi", "iso639_2": ["hin"]} - HUNGARIAN = {"name": "Hungarian", "iso639_1": "hu", "iso639_2": ["hun"]} - ICELANDIC = {"name": "Icelandic", "iso639_1": "is", "iso639_2": ["ice"]} - INDONESIAN = {"name": "Indonesian", "iso639_1": "id", "iso639_2": ["ind"]} - IRISH = {"name": "Irish", "iso639_1": "ga", "iso639_2": ["gle"]} - ITALIAN = {"name": "Italian", "iso639_1": "it", "iso639_2": ["ita"]} - JAPANESE = {"name": "Japanese", "iso639_1": "ja", "iso639_2": ["jpn"]} - KANNADA = {"name": "Kannada", "iso639_1": "kn", "iso639_2": ["kan"]} - KAZAKH = {"name": "Kazakh", "iso639_1": "kk", "iso639_2": ["kaz"]} - KOREAN = {"name": "Korean", "iso639_1": "ko", "iso639_2": ["kor"]} - LATIN = {"name": "Latin", "iso639_1": "la", "iso639_2": ["lat"]} - LATVIAN = {"name": "Latvian", "iso639_1": "lv", "iso639_2": ["lav"]} - LITHUANIAN = {"name": "Lithuanian", "iso639_1": "lt", "iso639_2": ["lit"]} - MACEDONIAN = {"name": "Macedonian", "iso639_1": "mk", "iso639_2": ["mac"]} - MALAY = {"name": "Malay", "iso639_1": "ms", "iso639_2": ["may"]} - MALAYALAM = {"name": "Malayalam", "iso639_1": "ml", "iso639_2": ["mal"]} - MALTESE = {"name": "Maltese", "iso639_1": "mt", "iso639_2": ["mlt"]} - NORWEGIAN = {"name": "Norwegian", "iso639_1": "no", "iso639_2": ["nor"]} - PERSIAN = {"name": "Persian", "iso639_1": "fa", "iso639_2": ["per"]} - POLISH = {"name": "Polish", "iso639_1": "pl", "iso639_2": ["pol"]} - PORTUGUESE = {"name": "Portuguese", "iso639_1": "pt", "iso639_2": ["por"]} - ROMANIAN = {"name": "Romanian", "iso639_1": "ro", "iso639_2": ["rum"]} - RUSSIAN = {"name": "Russian", "iso639_1": "ru", "iso639_2": ["rus"]} - NORTHERN_SAMI = {"name": "Northern Sami", "iso639_1": "se", "iso639_2": ["sme"]} - SAMOAN = {"name": "Samoan", "iso639_1": "sm", "iso639_2": ["smo"]} - SANGO = {"name": "Sango", "iso639_1": "sg", "iso639_2": ["sag"]} - SANSKRIT = {"name": "Sanskrit", "iso639_1": "sa", "iso639_2": ["san"]} - SARDINIAN = {"name": "Sardinian", "iso639_1": "sc", "iso639_2": ["srd"]} - SERBIAN = {"name": "Serbian", "iso639_1": "sr", "iso639_2": ["srp"]} - SHONA = {"name": "Shona", "iso639_1": "sn", "iso639_2": ["sna"]} - SINDHI = {"name": "Sindhi", "iso639_1": "sd", "iso639_2": ["snd"]} - SINHALA = {"name": "Sinhala", "iso639_1": "si", "iso639_2": ["sin"]} - SLOVAK = {"name": "Slovak", "iso639_1": "sk", "iso639_2": ["slk"]} - SLOVENIAN = {"name": "Slovenian", "iso639_1": "sl", "iso639_2": ["slv"]} - SOMALI = {"name": "Somali", "iso639_1": "so", "iso639_2": ["som"]} - SOUTHERN_SOTHO = {"name": "Southern Sotho", "iso639_1": "st", "iso639_2": ["sot"]} - SPANISH = {"name": "Spanish", "iso639_1": "es", "iso639_2": ["spa"]} - SUNDANESE = {"name": "Sundanese", "iso639_1": "su", "iso639_2": ["sun"]} - SWAHILI = {"name": "Swahili", "iso639_1": "sw", "iso639_2": ["swa"]} - SWATI = {"name": "Swati", "iso639_1": "ss", "iso639_2": ["ssw"]} - SWEDISH = {"name": "Swedish", "iso639_1": "sv", "iso639_2": ["swe"]} - TAGALOG = {"name": "Tagalog", "iso639_1": "tl", "iso639_2": ["tgl"]} - TAMIL = {"name": "Tamil", "iso639_1": "ta", "iso639_2": ["tam"]} - TELUGU = {"name": "Telugu", "iso639_1": "te", "iso639_2": ["tel"]} - THAI = {"name": "Thai", "iso639_1": "th", "iso639_2": ["tha"]} - TURKISH = {"name": "Turkish", "iso639_1": "tr", "iso639_2": ["tur"]} - UKRAINIAN = {"name": "Ukrainian", "iso639_1": "uk", "iso639_2": ["ukr"]} - URDU = {"name": "Urdu", "iso639_1": "ur", "iso639_2": ["urd"]} - VIETNAMESE = {"name": "Vietnamese", "iso639_1": "vi", "iso639_2":[ "vie"]} - WELSH = {"name": "Welsh", "iso639_1": "cy", "iso639_2": ["wel"]} + ABKHAZIAN = {"name": "Abkhazian", "iso639_1": "ab", "iso639_2": ["abk"]} + AFAR = {"name": "Afar", "iso639_1": "aa", "iso639_2": ["aar"]} + AFRIKAANS = {"name": "Afrikaans", "iso639_1": "af", "iso639_2": ["afr"]} + AKAN = {"name": "Akan", "iso639_1": "ak", "iso639_2": ["aka"]} + ALBANIAN = {"name": "Albanian", "iso639_1": "sq", "iso639_2": ["sqi", "alb"]} + AMHARIC = {"name": "Amharic", "iso639_1": "am", "iso639_2": ["amh"]} + ARABIC = {"name": "Arabic", "iso639_1": "ar", "iso639_2": ["ara"]} + ARAGONESE = {"name": "Aragonese", "iso639_1": "an", "iso639_2": ["arg"]} + ARMENIAN = {"name": "Armenian", "iso639_1": "hy", "iso639_2": ["hye", "arm"]} + ASSAMESE = {"name": "Assamese", "iso639_1": "as", "iso639_2": ["asm"]} + AVARIC = {"name": "Avaric", "iso639_1": "av", "iso639_2": ["ava"]} + AVESTAN = {"name": "Avestan", "iso639_1": "ae", "iso639_2": ["ave"]} + AYMARA = {"name": "Aymara", "iso639_1": "ay", "iso639_2": ["aym"]} + AZERBAIJANI = {"name": "Azerbaijani", "iso639_1": "az", "iso639_2": ["aze"]} + BAMBARA = {"name": "Bambara", "iso639_1": "bm", "iso639_2": ["bam"]} + BASHKIR = {"name": "Bashkir", "iso639_1": "ba", "iso639_2": ["bak"]} + BASQUE = {"name": "Basque", "iso639_1": "eu", "iso639_2": ["eus", "baq"]} + BELARUSIAN = {"name": "Belarusian", "iso639_1": "be", "iso639_2": ["bel"]} + BENGALI = {"name": "Bengali", "iso639_1": "bn", "iso639_2": ["ben"]} + BISLAMA = {"name": "Bislama", "iso639_1": "bi", "iso639_2": ["bis"]} + BOKMAL = {"name": "Bokmål", "iso639_1": "nb", "iso639_2": ["nob"]} + BOSNIAN = {"name": "Bosnian", "iso639_1": "bs", "iso639_2": ["bos"]} + BRETON = {"name": "Breton", "iso639_1": "br", "iso639_2": ["bre"]} + BULGARIAN = {"name": "Bulgarian", "iso639_1": "bg", "iso639_2": ["bul"]} + BURMESE = {"name": "Burmese", "iso639_1": "my", "iso639_2": ["mya", "bur"]} + CATALAN = {"name": "Catalan", "iso639_1": "ca", "iso639_2": ["cat"]} + CHAMORRO = {"name": "Chamorro", "iso639_1": "ch", "iso639_2": ["cha"]} + CHECHEN = {"name": "Chechen", "iso639_1": "ce", "iso639_2": ["che"]} + CHICHEWA = {"name": "Chichewa", "iso639_1": "ny", "iso639_2": ["nya"]} + CHINESE = {"name": "Chinese", "iso639_1": "zh", "iso639_2": ["zho", "chi"]} + CHURCH_SLAVIC = {"name": "Church Slavic", "iso639_1": "cu", "iso639_2": ["chu"]} + CHUVASH = {"name": "Chuvash", "iso639_1": "cv", "iso639_2": ["chv"]} + CORNISH = {"name": "Cornish", "iso639_1": "kw", "iso639_2": ["cor"]} + CORSICAN = {"name": "Corsican", "iso639_1": "co", "iso639_2": ["cos"]} + CREE = {"name": "Cree", "iso639_1": "cr", "iso639_2": ["cre"]} + CROATIAN = {"name": "Croatian", "iso639_1": "hr", "iso639_2": ["hrv"]} + CZECH = {"name": "Czech", "iso639_1": "cs", "iso639_2": ["ces", "cze"]} + DANISH = {"name": "Danish", "iso639_1": "da", "iso639_2": ["dan"]} + DIVEHI = {"name": "Divehi", "iso639_1": "dv", "iso639_2": ["div"]} + DUTCH = {"name": "Dutch", "iso639_1": "nl", "iso639_2": ["nld", "dut"]} + DZONGKHA = {"name": "Dzongkha", "iso639_1": "dz", "iso639_2": ["dzo"]} + ENGLISH = {"name": "English", "iso639_1": "en", "iso639_2": ["eng"]} + ESPERANTO = {"name": "Esperanto", "iso639_1": "eo", "iso639_2": ["epo"]} + ESTONIAN = {"name": "Estonian", "iso639_1": "et", "iso639_2": ["est"]} + EWE = {"name": "Ewe", "iso639_1": "ee", "iso639_2": ["ewe"]} + FAROESE = {"name": "Faroese", "iso639_1": "fo", "iso639_2": ["fao"]} + FIJIAN = {"name": "Fijian", "iso639_1": "fj", "iso639_2": ["fij"]} + FINNISH = {"name": "Finnish", "iso639_1": "fi", "iso639_2": ["fin"]} + FRENCH = {"name": "French", "iso639_1": "fr", "iso639_2": ["fra", "fre"]} + FULAH = {"name": "Fulah", "iso639_1": "ff", "iso639_2": ["ful"]} + GALICIAN = {"name": "Galician", "iso639_1": "gl", "iso639_2": ["glg"]} + GANDA = {"name": "Ganda", "iso639_1": "lg", "iso639_2": ["lug"]} + GEORGIAN = {"name": "Georgian", "iso639_1": "ka", "iso639_2": ["kat", "geo"]} + GERMAN = {"name": "German", "iso639_1": "de", "iso639_2": ["deu", "ger"]} + GREEK = {"name": "Greek", "iso639_1": "el", "iso639_2": ["ell", "gre"]} + GUARANI = {"name": "Guarani", "iso639_1": "gn", "iso639_2": ["grn"]} + GUJARATI = {"name": "Gujarati", "iso639_1": "gu", "iso639_2": ["guj"]} + HAITIAN = {"name": "Haitian", "iso639_1": "ht", "iso639_2": ["hat"]} + HAUSA = {"name": "Hausa", "iso639_1": "ha", "iso639_2": ["hau"]} + HEBREW = {"name": "Hebrew", "iso639_1": "he", "iso639_2": ["heb"]} + HERERO = {"name": "Herero", "iso639_1": "hz", "iso639_2": ["her"]} + HINDI = {"name": "Hindi", "iso639_1": "hi", "iso639_2": ["hin"]} + HIRI_MOTU = {"name": "Hiri Motu", "iso639_1": "ho", "iso639_2": ["hmo"]} + HUNGARIAN = {"name": "Hungarian", "iso639_1": "hu", "iso639_2": ["hun"]} + ICELANDIC = {"name": "Icelandic", "iso639_1": "is", "iso639_2": ["isl", "ice"]} + IDO = {"name": "Ido", "iso639_1": "io", "iso639_2": ["ido"]} + IGBO = {"name": "Igbo", "iso639_1": "ig", "iso639_2": ["ibo"]} + INDONESIAN = {"name": "Indonesian", "iso639_1": "id", "iso639_2": ["ind"]} + INTERLINGUA = {"name": "Interlingua", "iso639_1": "ia", "iso639_2": ["ina"]} + INTERLINGUE = {"name": "Interlingue", "iso639_1": "ie", "iso639_2": ["ile"]} + INUKTITUT = {"name": "Inuktitut", "iso639_1": "iu", "iso639_2": ["iku"]} + INUPIAQ = {"name": "Inupiaq", "iso639_1": "ik", "iso639_2": ["ipk"]} + IRISH = {"name": "Irish", "iso639_1": "ga", "iso639_2": ["gle"]} + ITALIAN = {"name": "Italian", "iso639_1": "it", "iso639_2": ["ita"]} + JAPANESE = {"name": "Japanese", "iso639_1": "ja", "iso639_2": ["jpn"]} + JAVANESE = {"name": "Javanese", "iso639_1": "jv", "iso639_2": ["jav"]} + KALAALLISUT = {"name": "Kalaallisut", "iso639_1": "kl", "iso639_2": ["kal"]} + KANNADA = {"name": "Kannada", "iso639_1": "kn", "iso639_2": ["kan"]} + KANURI = {"name": "Kanuri", "iso639_1": "kr", "iso639_2": ["kau"]} + KASHMIRI = {"name": "Kashmiri", "iso639_1": "ks", "iso639_2": ["kas"]} + KAZAKH = {"name": "Kazakh", "iso639_1": "kk", "iso639_2": ["kaz"]} + KHMER = {"name": "Khmer", "iso639_1": "km", "iso639_2": ["khm"]} + KIKUYU = {"name": "Kikuyu", "iso639_1": "ki", "iso639_2": ["kik"]} + KINYARWANDA = {"name": "Kinyarwanda", "iso639_1": "rw", "iso639_2": ["kin"]} + KIRGHIZ = {"name": "Kirghiz", "iso639_1": "ky", "iso639_2": ["kir"]} + KOMI = {"name": "Komi", "iso639_1": "kv", "iso639_2": ["kom"]} + KONGO = {"name": "Kongo", "iso639_1": "kg", "iso639_2": ["kon"]} + KOREAN = {"name": "Korean", "iso639_1": "ko", "iso639_2": ["kor"]} + KUANYAMA = {"name": "Kuanyama", "iso639_1": "kj", "iso639_2": ["kua"]} + KURDISH = {"name": "Kurdish", "iso639_1": "ku", "iso639_2": ["kur"]} + LAO = {"name": "Lao", "iso639_1": "lo", "iso639_2": ["lao"]} + LATIN = {"name": "Latin", "iso639_1": "la", "iso639_2": ["lat"]} + LATVIAN = {"name": "Latvian", "iso639_1": "lv", "iso639_2": ["lav"]} + LIMBURGAN = {"name": "Limburgan", "iso639_1": "li", "iso639_2": ["lim"]} + LINGALA = {"name": "Lingala", "iso639_1": "ln", "iso639_2": ["lin"]} + LITHUANIAN = {"name": "Lithuanian", "iso639_1": "lt", "iso639_2": ["lit"]} + LUBA_KATANGA = {"name": "Luba-Katanga", "iso639_1": "lu", "iso639_2": ["lub"]} + LUXEMBOURGISH = {"name": "Luxembourgish", "iso639_1": "lb", "iso639_2": ["ltz"]} + MACEDONIAN = {"name": "Macedonian", "iso639_1": "mk", "iso639_2": ["mkd", "mac"]} + MALAGASY = {"name": "Malagasy", "iso639_1": "mg", "iso639_2": ["mlg"]} + MALAY = {"name": "Malay", "iso639_1": "ms", "iso639_2": ["msa", "may"]} + MALAYALAM = {"name": "Malayalam", "iso639_1": "ml", "iso639_2": ["mal"]} + MALTESE = {"name": "Maltese", "iso639_1": "mt", "iso639_2": ["mlt"]} + MANX = {"name": "Manx", "iso639_1": "gv", "iso639_2": ["glv"]} + MAORI = {"name": "Maori", "iso639_1": "mi", "iso639_2": ["mri", "mao"]} + MARATHI = {"name": "Marathi", "iso639_1": "mr", "iso639_2": ["mar"]} + MARSHALLESE = {"name": "Marshallese", "iso639_1": "mh", "iso639_2": ["mah"]} + MONGOLIAN = {"name": "Mongolian", "iso639_1": "mn", "iso639_2": ["mon"]} + NAURU = {"name": "Nauru", "iso639_1": "na", "iso639_2": ["nau"]} + NAVAJO = {"name": "Navajo", "iso639_1": "nv", "iso639_2": ["nav"]} + NDONGA = {"name": "Ndonga", "iso639_1": "ng", "iso639_2": ["ndo"]} + NEPALI = {"name": "Nepali", "iso639_1": "ne", "iso639_2": ["nep"]} + NORTH_NDEBELE = {"name": "North Ndebele", "iso639_1": "nd", "iso639_2": ["nde"]} + NORTHERN_SAMI = {"name": "Northern Sami", "iso639_1": "se", "iso639_2": ["sme"]} + NORWEGIAN = {"name": "Norwegian", "iso639_1": "no", "iso639_2": ["nor"]} + NORWEGIAN_NYNORSK = {"name": "Nynorsk", "iso639_1": "nn", "iso639_2": ["nno"]} + OCCITAN = {"name": "Occitan", "iso639_1": "oc", "iso639_2": ["oci"]} + OJIBWA = {"name": "Ojibwa", "iso639_1": "oj", "iso639_2": ["oji"]} + ORIYA = {"name": "Oriya", "iso639_1": "or", "iso639_2": ["ori"]} + OROMO = {"name": "Oromo", "iso639_1": "om", "iso639_2": ["orm"]} + OSSETIAN = {"name": "Ossetian", "iso639_1": "os", "iso639_2": ["oss"]} + PALI = {"name": "Pali", "iso639_1": "pi", "iso639_2": ["pli"]} + PANJABI = {"name": "Panjabi", "iso639_1": "pa", "iso639_2": ["pan"]} + PERSIAN = {"name": "Persian", "iso639_1": "fa", "iso639_2": ["fas", "per"]} + POLISH = {"name": "Polish", "iso639_1": "pl", "iso639_2": ["pol"]} + PORTUGUESE = {"name": "Portuguese", "iso639_1": "pt", "iso639_2": ["por"]} + PUSHTO = {"name": "Pushto", "iso639_1": "ps", "iso639_2": ["pus"]} + QUECHUA = {"name": "Quechua", "iso639_1": "qu", "iso639_2": ["que"]} + ROMANIAN = {"name": "Romanian", "iso639_1": "ro", "iso639_2": ["ron", "rum"]} + ROMANSH = {"name": "Romansh", "iso639_1": "rm", "iso639_2": ["roh"]} + RUNDI = {"name": "Rundi", "iso639_1": "rn", "iso639_2": ["run"]} + RUSSIAN = {"name": "Russian", "iso639_1": "ru", "iso639_2": ["rus"]} + SAMOAN = {"name": "Samoan", "iso639_1": "sm", "iso639_2": ["smo"]} + SANGO = {"name": "Sango", "iso639_1": "sg", "iso639_2": ["sag"]} + SANSKRIT = {"name": "Sanskrit", "iso639_1": "sa", "iso639_2": ["san"]} + SARDINIAN = {"name": "Sardinian", "iso639_1": "sc", "iso639_2": ["srd"]} + SCOTTISH_GAELIC = {"name": "Scottish Gaelic", "iso639_1": "gd", "iso639_2": ["gla"]} + SERBIAN = {"name": "Serbian", "iso639_1": "sr", "iso639_2": ["srp"]} + SHONA = {"name": "Shona", "iso639_1": "sn", "iso639_2": ["sna"]} + SICHUAN_YI = {"name": "Sichuan Yi", "iso639_1": "ii", "iso639_2": ["iii"]} + SINDHI = {"name": "Sindhi", "iso639_1": "sd", "iso639_2": ["snd"]} + SINHALA = {"name": "Sinhala", "iso639_1": "si", "iso639_2": ["sin"]} + SLOVAK = {"name": "Slovak", "iso639_1": "sk", "iso639_2": ["slk", "slo"]} + SLOVENIAN = {"name": "Slovenian", "iso639_1": "sl", "iso639_2": ["slv"]} + SOMALI = {"name": "Somali", "iso639_1": "so", "iso639_2": ["som"]} + SOUTH_NDEBELE = {"name": "South Ndebele", "iso639_1": "nr", "iso639_2": ["nbl"]} + SOUTHERN_SOTHO = {"name": "Southern Sotho", "iso639_1": "st", "iso639_2": ["sot"]} + SPANISH = {"name": "Spanish", "iso639_1": "es", "iso639_2": ["spa"]} + SUNDANESE = {"name": "Sundanese", "iso639_1": "su", "iso639_2": ["sun"]} + SWAHILI = {"name": "Swahili", "iso639_1": "sw", "iso639_2": ["swa"]} + SWATI = {"name": "Swati", "iso639_1": "ss", "iso639_2": ["ssw"]} + SWEDISH = {"name": "Swedish", "iso639_1": "sv", "iso639_2": ["swe"]} + TAGALOG = {"name": "Tagalog", "iso639_1": "tl", "iso639_2": ["tgl"]} + TAHITIAN = {"name": "Tahitian", "iso639_1": "ty", "iso639_2": ["tah"]} + TAJIK = {"name": "Tajik", "iso639_1": "tg", "iso639_2": ["tgk"]} + TAMIL = {"name": "Tamil", "iso639_1": "ta", "iso639_2": ["tam"]} + TATAR = {"name": "Tatar", "iso639_1": "tt", "iso639_2": ["tat"]} + TELUGU = {"name": "Telugu", "iso639_1": "te", "iso639_2": ["tel"]} + THAI = {"name": "Thai", "iso639_1": "th", "iso639_2": ["tha"]} + TIBETAN = {"name": "Tibetan", "iso639_1": "bo", "iso639_2": ["bod", "tib"]} + TIGRINYA = {"name": "Tigrinya", "iso639_1": "ti", "iso639_2": ["tir"]} + TONGA = {"name": "Tonga", "iso639_1": "to", "iso639_2": ["ton"]} + TSONGA = {"name": "Tsonga", "iso639_1": "ts", "iso639_2": ["tso"]} + TSWANA = {"name": "Tswana", "iso639_1": "tn", "iso639_2": ["tsn"]} + TURKISH = {"name": "Turkish", "iso639_1": "tr", "iso639_2": ["tur"]} + TURKMEN = {"name": "Turkmen", "iso639_1": "tk", "iso639_2": ["tuk"]} + TWI = {"name": "Twi", "iso639_1": "tw", "iso639_2": ["twi"]} + UIGHUR = {"name": "Uighur", "iso639_1": "ug", "iso639_2": ["uig"]} + UKRAINIAN = {"name": "Ukrainian", "iso639_1": "uk", "iso639_2": ["ukr"]} + URDU = {"name": "Urdu", "iso639_1": "ur", "iso639_2": ["urd"]} + UZBEK = {"name": "Uzbek", "iso639_1": "uz", "iso639_2": ["uzb"]} + VENDA = {"name": "Venda", "iso639_1": "ve", "iso639_2": ["ven"]} + VIETNAMESE = {"name": "Vietnamese", "iso639_1": "vi", "iso639_2": ["vie"]} + VOLAPUK = {"name": "Volapük", "iso639_1": "vo", "iso639_2": ["vol"]} + WALLOON = {"name": "Walloon", "iso639_1": "wa", "iso639_2": ["wln"]} + WELSH = {"name": "Welsh", "iso639_1": "cy", "iso639_2": ["cym", "wel"]} + WESTERN_FRISIAN = {"name": "Western Frisian", "iso639_1": "fy", "iso639_2": ["fry"]} + WOLOF = {"name": "Wolof", "iso639_1": "wo", "iso639_2": ["wol"]} + XHOSA = {"name": "Xhosa", "iso639_1": "xh", "iso639_2": ["xho"]} + YIDDISH = {"name": "Yiddish", "iso639_1": "yi", "iso639_2": ["yid"]} + YORUBA = {"name": "Yoruba", "iso639_1": "yo", "iso639_2": ["yor"]} + ZHUANG = {"name": "Zhuang", "iso639_1": "za", "iso639_2": ["zha"]} + ZULU = {"name": "Zulu", "iso639_1": "zu", "iso639_2": ["zul"]} - UNDEFINED = {"name": "undefined", "iso639_1": "xx", "iso639_2": ["und"]} + FILIPINO = {"name": "Filipino", "iso639_1": "tl", "iso639_2": ["fil"]} + + UNDEFINED = {"name": "undefined", "iso639_1": "xx", "iso639_2": ["und"]} @staticmethod @@ -88,24 +199,22 @@ class IsoLanguage(Enum): closestMatches = difflib.get_close_matches(label, [l.value["name"] for l in IsoLanguage], n=1) if closestMatches: - foundLangs = [l for l in IsoLanguage if l.value['name'] == closestMatches[0]] + foundLangs = [l for l in IsoLanguage if l.value["name"] == closestMatches[0]] return foundLangs[0] if foundLangs else IsoLanguage.UNDEFINED else: return IsoLanguage.UNDEFINED @staticmethod def findThreeLetter(theeLetter : str): - foundLangs = [l for l in IsoLanguage if str(theeLetter) in l.value['iso639_2']] + foundLangs = [l for l in IsoLanguage if str(theeLetter) in l.value["iso639_2"]] return foundLangs[0] if foundLangs else IsoLanguage.UNDEFINED def label(self): - return str(self.value['name']) + return str(self.value["name"]) def twoLetter(self): - return str(self.value['iso639_1']) + return str(self.value["iso639_1"]) def threeLetter(self): - return str(self.value['iso639_2'][0]) - - + return str(self.value["iso639_2"][0]) diff --git a/src/ffx/logging_utils.py b/src/ffx/logging_utils.py new file mode 100644 index 0000000..1e27601 --- /dev/null +++ b/src/ffx/logging_utils.py @@ -0,0 +1,68 @@ +import logging +import os + + +FFX_LOGGER_NAME = "FFX" +CONSOLE_HANDLER_NAME = "ffx-console" +FILE_HANDLER_NAME = "ffx-file" + + +def get_ffx_logger(name: str = FFX_LOGGER_NAME) -> logging.Logger: + logger = logging.getLogger(name) + logger.setLevel(logging.DEBUG) + + if not logger.handlers: + logger.addHandler(logging.NullHandler()) + + return logger + + +def configure_ffx_logger( + log_file_path: str, + file_level: int, + console_level: int, + name: str = FFX_LOGGER_NAME, +) -> logging.Logger: + logger = get_ffx_logger(name) + logger.propagate = False + + for handler in list(logger.handlers): + if isinstance(handler, logging.NullHandler): + logger.removeHandler(handler) + + console_handler = next( + (handler for handler in logger.handlers if handler.get_name() == CONSOLE_HANDLER_NAME), + None, + ) + if console_handler is None: + console_handler = logging.StreamHandler() + console_handler.set_name(CONSOLE_HANDLER_NAME) + logger.addHandler(console_handler) + + console_handler.setLevel(console_level) + console_handler.setFormatter(logging.Formatter("%(message)s")) + + normalized_log_path = os.path.abspath(log_file_path) + file_handler = next( + (handler for handler in logger.handlers if handler.get_name() == FILE_HANDLER_NAME), + None, + ) + if ( + file_handler is not None + and os.path.abspath(file_handler.baseFilename) != normalized_log_path + ): + logger.removeHandler(file_handler) + file_handler.close() + file_handler = None + + if file_handler is None: + file_handler = logging.FileHandler(normalized_log_path) + file_handler.set_name(FILE_HANDLER_NAME) + logger.addHandler(file_handler) + + file_handler.setLevel(file_level) + file_handler.setFormatter( + logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + ) + + return logger diff --git a/src/ffx/media_controller.py b/src/ffx/media_controller.py index bb2d0de..d46285d 100644 --- a/src/ffx/media_controller.py +++ b/src/ffx/media_controller.py @@ -25,10 +25,9 @@ class MediaController(): pid = int(patternId) s = self.Session() - q = s.query(Pattern).filter(Pattern.id == pid) + pattern = s.query(Pattern).filter(Pattern.id == pid).first() - if q.count(): - pattern = q.first + if pattern is not None: for mediaTagKey, mediaTagValue in mediaDescriptor.getTags(): self.__tac.updateMediaTag(pid, mediaTagKey, mediaTagValue) diff --git a/src/ffx/media_descriptor.py b/src/ffx/media_descriptor.py index 5ff74a2..c0db35d 100644 --- a/src/ffx/media_descriptor.py +++ b/src/ffx/media_descriptor.py @@ -1,4 +1,4 @@ -import os, re, click, logging +import os, re, click from typing import List, Self @@ -9,6 +9,7 @@ from ffx.track_disposition import TrackDisposition from ffx.track_codec import TrackCodec from ffx.track_descriptor import TrackDescriptor +from ffx.logging_utils import get_ffx_logger class MediaDescriptor: @@ -46,8 +47,7 @@ class MediaDescriptor: self.__logger = self.__context['logger'] else: self.__context = {} - self.__logger = logging.getLogger('FFX') - self.__logger.addHandler(logging.NullHandler()) + self.__logger = get_ffx_logger() if MediaDescriptor.TAGS_KEY in kwargs.keys(): if type(kwargs[MediaDescriptor.TAGS_KEY]) is not dict: @@ -207,7 +207,7 @@ class MediaDescriptor: def rearrangeTrackDescriptors(self, newOrder: List[int]): if len(newOrder) != len(self.__trackDescriptors): raise ValueError('Length of list with reordered indices does not match number of track descriptors') - reorderedTrackDescriptors = {} + reorderedTrackDescriptors = [] for oldIndex in newOrder: reorderedTrackDescriptors.append(self.__trackDescriptors[oldIndex]) self.__trackDescriptors = reorderedTrackDescriptors @@ -362,6 +362,14 @@ class MediaDescriptor: inputMappingTokens = [] sortedTrackDescriptors = sorted(self.__trackDescriptors, key=lambda d: d.getIndex()) + sourceTrackDescriptorsByIndex = { + td.getIndex(): td + for td in ( + sourceMediaDescriptor.getTrackDescriptors() + if sourceMediaDescriptor is not None + else sortedTrackDescriptors + ) + } # raise click.ClickException(' '.join([f"\nindex={td.getIndex()} subIndex={td.getSubIndex()} srcIndex={td.getSourceIndex()} type={td.getType().label()}" for td in self.__trackDescriptors])) @@ -373,8 +381,12 @@ class MediaDescriptor: #HINT: Attached thumbnails are not supported by .webm container format if td.getCodec() != TrackCodec.PNG: - stdi = sortedTrackDescriptors[td.getSourceIndex()].getIndex() - stdsi = sortedTrackDescriptors[td.getSourceIndex()].getSubIndex() + sourceTrackDescriptor = sourceTrackDescriptorsByIndex.get(td.getSourceIndex()) + if sourceTrackDescriptor is None: + raise ValueError(f"No source track descriptor found for source index {td.getSourceIndex()}") + + stdi = sourceTrackDescriptor.getIndex() + stdsi = sourceTrackDescriptor.getSubIndex() trackType = td.getType() trackCodec = td.getCodec() @@ -507,7 +519,10 @@ class MediaDescriptor: d for d in availableFileSubtitleDescriptors if ((season == -1 and episode == -1) - or (d["season"] == int(season) and d["episode"] == int(episode))) + or ( + d.get("season") == int(season) + and d.get("episode") == int(episode) + )) ], key=lambda d: d["index"], ) @@ -522,10 +537,14 @@ class MediaDescriptor: if matchingSubtitleTrackDescriptor: # click.echo(f"Found matching subtitle file {msfd["path"]}\n") self.__logger.debug(f"importSubtitles(): Found matching subtitle file {msfd['path']}") - matchingSubtitleTrackDescriptor[0].setExternalSourceFilePath(msfd["path"]) + matchingTrack = matchingSubtitleTrackDescriptor[0] + matchingTrack.setExternalSourceFilePath(msfd["path"]) - # TODO: Check if useful - # matchingSubtitleTrackDescriptor[0].setDispositionSet(msfd["disposition_set"]) + # Prefer metadata coming from the external single-track source when + # it is provided explicitly by the filename contract. + matchingTrack.getTags()["language"] = msfd["language"] + if msfd["disposition_set"]: + matchingTrack.setDispositionSet(msfd["disposition_set"]) def getConfiguration(self, label: str = ''): diff --git a/src/ffx/media_descriptor_change_set.py b/src/ffx/media_descriptor_change_set.py index 46ea26b..093542b 100644 --- a/src/ffx/media_descriptor_change_set.py +++ b/src/ffx/media_descriptor_change_set.py @@ -1,5 +1,6 @@ import click +from ffx.iso_language import IsoLanguage from ffx.media_descriptor import MediaDescriptor from ffx.track_descriptor import TrackDescriptor @@ -42,6 +43,14 @@ class MediaDescriptorChangeSet(): self.__targetTrackDescriptors = targetMediaDescriptor.getTrackDescriptors() if targetMediaDescriptor is not None else [] self.__sourceTrackDescriptors = sourceMediaDescriptor.getTrackDescriptors() if sourceMediaDescriptor is not None else [] + self.__targetTrackDescriptorsByIndex = { + trackDescriptor.getIndex(): trackDescriptor + for trackDescriptor in self.__targetTrackDescriptors + } + self.__sourceTrackDescriptorsByIndex = { + trackDescriptor.getIndex(): trackDescriptor + for trackDescriptor in self.__sourceTrackDescriptors + } targetMediaTags = targetMediaDescriptor.getTags() if targetMediaDescriptor is not None else {} sourceMediaTags = sourceMediaDescriptor.getTags() if sourceMediaDescriptor is not None else {} @@ -70,51 +79,34 @@ class MediaDescriptorChangeSet(): self.__numSourceTracks = len(self.__sourceTrackDescriptors) - maxNumOfTracks = max(self.__numSourceTracks, self.__numTargetTracks) - trackCompareResult = {} + for targetTrackDescriptor in self.__targetTrackDescriptors: + sourceTrackDescriptor = self.__sourceTrackDescriptorsByIndex.get( + targetTrackDescriptor.getSourceIndex() + ) - for trackIndex in range(maxNumOfTracks): - - correspondingSourceTrackDescriptors = [st for st in self.__sourceTrackDescriptors if st.getIndex() == trackIndex] - correspondingTargetTrackDescriptors = [tt for tt in self.__targetTrackDescriptors if tt.getIndex() == trackIndex] - - # Track present in target but not in source - if (not correspondingSourceTrackDescriptors - and correspondingTargetTrackDescriptors): - + if sourceTrackDescriptor is None: if DIFF_ADDED_KEY not in trackCompareResult.keys(): trackCompareResult[DIFF_ADDED_KEY] = {} - - trackCompareResult[DIFF_ADDED_KEY][trackIndex] = correspondingTargetTrackDescriptors[0] + trackCompareResult[DIFF_ADDED_KEY][targetTrackDescriptor.getIndex()] = targetTrackDescriptor continue - # Track present in target but not in source - if (correspondingSourceTrackDescriptors - and not correspondingTargetTrackDescriptors): + trackDiff = self.compareTracks(targetTrackDescriptor, sourceTrackDescriptor) + if trackDiff: + if DIFF_CHANGED_KEY not in trackCompareResult.keys(): + trackCompareResult[DIFF_CHANGED_KEY] = {} + trackCompareResult[DIFF_CHANGED_KEY][targetTrackDescriptor.getIndex()] = trackDiff + targetSourceIndices = { + targetTrackDescriptor.getSourceIndex() + for targetTrackDescriptor in self.__targetTrackDescriptors + } + for sourceTrackDescriptor in self.__sourceTrackDescriptors: + if sourceTrackDescriptor.getIndex() not in targetSourceIndices: if DIFF_REMOVED_KEY not in trackCompareResult.keys(): trackCompareResult[DIFF_REMOVED_KEY] = {} - - trackCompareResult[DIFF_REMOVED_KEY][trackIndex] = correspondingSourceTrackDescriptors[0] - continue - - if (correspondingSourceTrackDescriptors - and correspondingTargetTrackDescriptors): - - # if correspondingTargetTrackDescriptors[0].getIndex() == 3: - # raise click.ClickException(f"{correspondingSourceTrackDescriptors[0].getDispositionSet()} {correspondingTargetTrackDescriptors[0].getDispositionSet()}") - - - trackDiff = self.compareTracks(correspondingTargetTrackDescriptors[0], - correspondingSourceTrackDescriptors[0]) - - if trackDiff: - if DIFF_CHANGED_KEY not in trackCompareResult.keys(): - trackCompareResult[DIFF_CHANGED_KEY] = {} - - trackCompareResult[DIFF_CHANGED_KEY][trackIndex] = trackDiff + trackCompareResult[DIFF_REMOVED_KEY][sourceTrackDescriptor.getIndex()] = sourceTrackDescriptor if trackCompareResult: @@ -126,7 +118,11 @@ class MediaDescriptorChangeSet(): sourceTrackDescriptor: TrackDescriptor = None): sourceTrackTags = sourceTrackDescriptor.getTags() if sourceTrackDescriptor is not None else {} - targetTrackTags = targetTrackDescriptor.getTags() if targetTrackDescriptor is not None else {} + targetTrackTags = ( + self.normalizeTrackTags(targetTrackDescriptor.getTags()) + if targetTrackDescriptor is not None + else {} + ) trackCompareResult = {} @@ -151,6 +147,25 @@ class MediaDescriptorChangeSet(): return trackCompareResult + def normalizeTrackTagValue(self, tagKey, tagValue): + if tagKey != "language": + return tagValue + + if isinstance(tagValue, IsoLanguage): + return tagValue.threeLetter() + + trackLanguage = IsoLanguage.findThreeLetter(str(tagValue)) + if trackLanguage != IsoLanguage.UNDEFINED: + return trackLanguage.threeLetter() + + return tagValue + + def normalizeTrackTags(self, trackTags: dict): + return { + tagKey: self.normalizeTrackTagValue(tagKey, tagValue) + for tagKey, tagValue in trackTags.items() + } + def generateDispositionTokens(self): """ @@ -252,7 +267,7 @@ class MediaDescriptorChangeSet(): addedTracks: dict = self.__changeSetObj[MediaDescriptorChangeSet.TRACKS_KEY][DIFF_ADDED_KEY] trackDescriptor: TrackDescriptor for trackDescriptor in addedTracks.values(): - for tagKey, tagValue in trackDescriptor.getTags().items(): + for tagKey, tagValue in self.normalizeTrackTags(trackDescriptor.getTags()).items(): if not tagKey in self.__removeTrackKeys: metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}" + f":{trackDescriptor.getSubIndex()}", @@ -274,29 +289,58 @@ class MediaDescriptorChangeSet(): outputTrackTags = addedTrackTags | changedTrackTags - trackDescriptor = self.__targetTrackDescriptors[trackIndex] + trackDescriptor = self.__targetTrackDescriptorsByIndex[trackIndex] - for tagKey, tagValue in outputTrackTags.items(): + for tagKey, tagValue in self.normalizeTrackTags(outputTrackTags).items(): metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}" + f":{trackDescriptor.getSubIndex()}", f"{tagKey}={tagValue}"] - for removeKey in removedTrackTags.keys(): - metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}" - + f":{trackDescriptor.getSubIndex()}", - f"{removeKey}="] - - #HINT: In case of loading a track from an external file - # no tags from source are present for the track so - # the unchanged tracks are passed to the output file as well if trackDescriptor.getExternalSourceFilePath(): - for tagKey, tagValue in unchangedTrackTags.items(): + # When a single-track external file substitutes the + # media payload, keep metadata from the regular + # source track unless the external/target side + # overrides it explicitly. + preservedTrackTags = ( + { + tagKey: tagValue + for tagKey, tagValue in removedTrackTags.items() + if tagKey not in self.__removeTrackKeys + } + | unchangedTrackTags + ) + for tagKey, tagValue in self.normalizeTrackTags(preservedTrackTags).items(): metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}" + f":{trackDescriptor.getSubIndex()}", f"{tagKey}={tagValue}"] + else: + for removeKey in removedTrackTags.keys(): + metadataTokens += [f"-metadata:s:{trackDescriptor.getType().indicator()}" + + f":{trackDescriptor.getSubIndex()}", + f"{removeKey}="] + + for tagKey, tagValue in self.__context.get('encoding_metadata_tags', {}).items(): + metadataTokens += [f"-metadata:g", f"{tagKey}={tagValue}"] + + metadataTokens += self.generateConfiguredRemovalMetadataTokens() return metadataTokens def getChangeSetObj(self): return self.__changeSetObj + + def generateConfiguredRemovalMetadataTokens(self): + metadataTokens = [] + + for removeKey in self.__removeGlobalKeys: + metadataTokens += ["-metadata:g", f"{removeKey}="] + + for trackDescriptor in self.__targetTrackDescriptors: + for removeKey in self.__removeTrackKeys: + metadataTokens += [ + f"-metadata:s:{trackDescriptor.getType().indicator()}:{trackDescriptor.getSubIndex()}", + f"{removeKey}=", + ] + + return metadataTokens diff --git a/src/ffx/media_details_screen.py b/src/ffx/media_details_screen.py index 9afbd71..5a3c3c3 100644 --- a/src/ffx/media_details_screen.py +++ b/src/ffx/media_details_screen.py @@ -6,13 +6,9 @@ from textual.containers import Grid from ffx.audio_layout import AudioLayout -from .pattern_controller import PatternController -from .show_controller import ShowController -from .track_controller import TrackController -from .tag_controller import TagController - from .show_details_screen import ShowDetailsScreen from .pattern_details_screen import PatternDetailsScreen +from .screen_support import build_screen_bootstrap, build_screen_controllers from ffx.track_type import TrackType from ffx.track_codec import TrackCodec @@ -135,29 +131,23 @@ class MediaDetailsScreen(Screen): def __init__(self): super().__init__() - self.context = self.app.getContext() - self.Session = self.context['database']['session'] # convenience + bootstrap = build_screen_bootstrap(self.app.getContext()) + self.context = bootstrap.context + self.__removeGlobalKeys = bootstrap.remove_global_keys + self.__ignoreGlobalKeys = bootstrap.ignore_global_keys - self.__configurationData = self.context['config'].getData() - - metadataConfiguration = self.__configurationData['metadata'] if 'metadata' in self.__configurationData.keys() else {} - - self.__signatureTags = metadataConfiguration['signature'] if 'signature' in metadataConfiguration.keys() else {} - self.__removeGlobalKeys = metadataConfiguration['remove'] if 'remove' in metadataConfiguration.keys() else [] - self.__ignoreGlobalKeys = metadataConfiguration['ignore'] if 'ignore' in metadataConfiguration.keys() else [] - self.__removeTrackKeys = (metadataConfiguration['streams']['remove'] - if 'streams' in metadataConfiguration.keys() - and 'remove' in metadataConfiguration['streams'].keys() else []) - self.__ignoreTrackKeys = (metadataConfiguration['streams']['ignore'] - if 'streams' in metadataConfiguration.keys() - and 'ignore' in metadataConfiguration['streams'].keys() else []) - - - self.__pc = PatternController(context = self.context) - self.__sc = ShowController(context = self.context) - self.__tc = TrackController(context = self.context) - self.__tac = TagController(context = self.context) + controllers = build_screen_controllers( + self.context, + pattern=True, + show=True, + track=True, + tag=True, + ) + self.__pc = controllers['pattern'] + self.__sc = controllers['show'] + self.__tc = controllers['track'] + self.__tac = controllers['tag'] if not 'command' in self.context.keys() or self.context['command'] != 'inspect': raise click.ClickException(f"MediaDetailsScreen.__init__(): Can only perform command 'inspect'") @@ -569,6 +559,7 @@ class MediaDetailsScreen(Screen): try: kwargs = {} + kwargs[ShowDescriptor.CONTEXT_KEY] = self.context kwargs[ShowDescriptor.ID_KEY] = int(selected_row_data[0]) kwargs[ShowDescriptor.NAME_KEY] = str(selected_row_data[1]) kwargs[ShowDescriptor.YEAR_KEY] = int(selected_row_data[2]) @@ -602,20 +593,21 @@ class MediaDetailsScreen(Screen): patternObj = self.getPatternObjFromInput() if patternObj: - patternId = self.__pc.addPattern(patternObj) + mediaTags = {} + for tagKey, tagValue in self.__sourceMediaDescriptor.getTags().items(): + + # Filter tags that make no sense to preserve + if tagKey not in self.__ignoreGlobalKeys and not tagKey in self.__removeGlobalKeys: + mediaTags[tagKey] = tagValue + + patternId = self.__pc.savePatternSchema( + patternObj, + trackDescriptors=self.__sourceMediaDescriptor.getTrackDescriptors(), + mediaTags=mediaTags, + ) if patternId: self.highlightPattern(False) - for tagKey, tagValue in self.__sourceMediaDescriptor.getTags().items(): - - # Filter tags that make no sense to preserve - if tagKey not in self.__ignoreGlobalKeys and not tagKey in self.__removeGlobalKeys: - self.__tac.updateMediaTag(patternId, tagKey, tagValue) - - # for trackDescriptor in self.__sourceMediaDescriptor.getAllTrackDescriptors(): - for trackDescriptor in self.__sourceMediaDescriptor.getTrackDescriptors(): - self.__tc.addTrack(trackDescriptor, patternId = patternId) - def action_new_pattern(self): """Adding new patterns @@ -754,4 +746,3 @@ class MediaDetailsScreen(Screen): def handle_edit_pattern(self, screenResult): self.query_one("#pattern_input", Input).value = screenResult['pattern'] self.updateDifferences() - diff --git a/src/ffx/model/__init__.py b/src/ffx/model/__init__.py index e69de29..7f349ec 100644 --- a/src/ffx/model/__init__.py +++ b/src/ffx/model/__init__.py @@ -0,0 +1,20 @@ +"""Load ORM model modules so SQLAlchemy relationship strings can resolve.""" + +from .show import Base, Show +from .pattern import Pattern +from .track import Track +from .track_tag import TrackTag +from .media_tag import MediaTag +from .shifted_season import ShiftedSeason +from .property import Property + +__all__ = [ + 'Base', + 'Show', + 'Pattern', + 'Track', + 'TrackTag', + 'MediaTag', + 'ShiftedSeason', + 'Property', +] diff --git a/src/ffx/model/pattern.py b/src/ffx/model/pattern.py index 9fc8595..8d810d3 100644 --- a/src/ffx/model/pattern.py +++ b/src/ffx/model/pattern.py @@ -1,6 +1,6 @@ import click -from sqlalchemy import Column, Integer, String, Text, ForeignKey +from sqlalchemy import Column, Integer, String, Text, ForeignKey, UniqueConstraint from sqlalchemy.orm import relationship from .show import Base, Show @@ -12,6 +12,9 @@ from ffx.show_descriptor import ShowDescriptor class Pattern(Base): __tablename__ = 'patterns' + __table_args__ = ( + UniqueConstraint('show_id', 'pattern', name='uq_patterns_show_id_pattern'), + ) # v1.x id = Column(Integer, primary_key=True) diff --git a/src/ffx/pattern_controller.py b/src/ffx/pattern_controller.py index 089c30c..b0886ee 100644 --- a/src/ffx/pattern_controller.py +++ b/src/ffx/pattern_controller.py @@ -1,161 +1,411 @@ -import click, re +import re +import click + +from ffx.model.media_tag import MediaTag from ffx.model.pattern import Pattern +from ffx.model.track import Track +from ffx.model.track_tag import TrackTag +from ffx.track_descriptor import TrackDescriptor +from ffx.track_disposition import TrackDisposition -class PatternController(): - +class DuplicatePatternMatchError(click.ClickException): + pass + + +class InvalidPatternSchemaError(click.ClickException): + pass + + +class PatternController: + _compiled_regex_cache: dict[str, re.Pattern] = {} + def __init__(self, context): - + self.context = context - self.Session = self.context['database']['session'] # convenience + self.Session = self.context["database"]["session"] + self.__configurationData = self.context["config"].getData() - def addPattern(self, patternObj): - """Adds pattern to database from obj - - Returns database id or 0 if pattern already exists""" + metadataConfiguration = ( + self.__configurationData["metadata"] + if "metadata" in self.__configurationData.keys() + else {} + ) + + self.__removeTrackKeys = ( + metadataConfiguration["streams"]["remove"] + if "streams" in metadataConfiguration.keys() + and "remove" in metadataConfiguration["streams"].keys() + else [] + ) + self.__ignoreTrackKeys = ( + metadataConfiguration["streams"]["ignore"] + if "streams" in metadataConfiguration.keys() + and "ignore" in metadataConfiguration["streams"].keys() + else [] + ) + + @classmethod + def _clear_regex_cache(cls): + cls._compiled_regex_cache.clear() + + @classmethod + def _compile_pattern_expression(cls, pattern_id: int, expression: str) -> re.Pattern: + expression_text = str(expression) + compiled = cls._compiled_regex_cache.get(expression_text) + if compiled is None: + try: + compiled = re.compile(expression_text) + except re.error as ex: + raise click.ClickException( + f"Pattern #{pattern_id} contains an invalid regex {expression_text!r}: {ex}" + ) + cls._compiled_regex_cache[expression_text] = compiled + return compiled + + def _coerce_pattern_fields(self, patternObj): + return { + "show_id": int(patternObj["show_id"]), + "pattern": str(patternObj["pattern"]), + "quality": int(patternObj.get("quality", 0) or 0), + "notes": str(patternObj.get("notes", "")), + } + + def _coerce_media_tags(self, mediaTags): + return { + str(tagKey): str(tagValue) + for tagKey, tagValue in (mediaTags or {}).items() + } + + def _normalize_track_descriptors(self, trackDescriptors): + if trackDescriptors is None: + raise InvalidPatternSchemaError( + "Patterns must define at least one track before they can be stored." + ) + + normalized_descriptors = [] + for trackDescriptor in trackDescriptors: + if type(trackDescriptor) is not TrackDescriptor: + raise TypeError( + "PatternController: All track descriptors are required to be of type TrackDescriptor" + ) + normalized_descriptors.append(trackDescriptor) + + if not normalized_descriptors: + raise InvalidPatternSchemaError( + "Patterns must define at least one track before they can be stored." + ) + + normalized_descriptors = sorted( + normalized_descriptors, key=lambda descriptor: descriptor.getIndex() + ) + + index_set = {descriptor.getIndex() for descriptor in normalized_descriptors} + expected_indexes = set(range(len(normalized_descriptors))) + if index_set != expected_indexes: + raise click.ClickException( + "Pattern tracks must use a contiguous zero-based index order." + ) + + return normalized_descriptors + + def _ensure_unique_pattern_definition( + self, + session, + show_id: int, + pattern_expression: str, + exclude_pattern_id: int | None = None, + ): + query = session.query(Pattern).filter( + Pattern.show_id == show_id, + Pattern.pattern == pattern_expression, + ) + if exclude_pattern_id is not None: + query = query.filter(Pattern.id != int(exclude_pattern_id)) + + existing_pattern = query.first() + if existing_pattern is not None: + raise click.ClickException( + f"Pattern {pattern_expression!r} already exists for show #{show_id}." + ) + + def _build_track_row(self, trackDescriptor: TrackDescriptor) -> Track: + track = Track( + track_type=int(trackDescriptor.getType().index()), + codec_name=str(trackDescriptor.getCodec().identifier()), + index=int(trackDescriptor.getIndex()), + source_index=int(trackDescriptor.getSourceIndex()), + disposition_flags=int( + TrackDisposition.toFlags(trackDescriptor.getDispositionSet()) + ), + audio_layout=trackDescriptor.getAudioLayout().index(), + ) + + for tagKey, tagValue in trackDescriptor.getTags().items(): + if tagKey in self.__ignoreTrackKeys or tagKey in self.__removeTrackKeys: + continue + track.track_tags.append(TrackTag(key=str(tagKey), value=str(tagValue))) + + return track + + def _replace_pattern_schema( + self, + session, + pattern: Pattern, + mediaTags: dict[str, str], + trackDescriptors: list[TrackDescriptor], + ): + for mediaTag in list(pattern.media_tags): + session.delete(mediaTag) + for track in list(pattern.tracks): + session.delete(track) + session.flush() + + for tagKey, tagValue in mediaTags.items(): + pattern.media_tags.append(MediaTag(key=str(tagKey), value=str(tagValue))) + + for trackDescriptor in trackDescriptors: + pattern.tracks.append(self._build_track_row(trackDescriptor)) + + def _validate_persisted_pattern(self, pattern: Pattern): + if not pattern.tracks: + raise InvalidPatternSchemaError( + f"Pattern #{pattern.getId()} ({pattern.getPattern()!r}) is invalid because it has no tracks." + ) + + def savePatternSchema( + self, + patternObj, + trackDescriptors, + mediaTags=None, + patternId: int | None = None, + ) -> int: + fields = self._coerce_pattern_fields(patternObj) + normalized_tracks = self._normalize_track_descriptors(trackDescriptors) + normalized_tags = self._coerce_media_tags(mediaTags) + session = None try: + session = self.Session() + self._ensure_unique_pattern_definition( + session, + fields["show_id"], + fields["pattern"], + exclude_pattern_id=patternId, + ) - s = self.Session() - q = s.query(Pattern).filter(Pattern.show_id == int(patternObj['show_id']), - Pattern.pattern == str(patternObj['pattern'])) - - if not q.count(): - pattern = Pattern(show_id = int(patternObj['show_id']), - pattern = str(patternObj['pattern'])) - s.add(pattern) - s.commit() - return pattern.getId() + if patternId is None: + pattern = Pattern( + show_id=fields["show_id"], + pattern=fields["pattern"], + quality=fields["quality"], + notes=fields["notes"], + ) + session.add(pattern) + session.flush() else: - return 0 + pattern = session.query(Pattern).filter(Pattern.id == int(patternId)).first() + if pattern is None: + raise click.ClickException( + f"PatternController.savePatternSchema(): Pattern #{patternId} not found" + ) + pattern.show_id = fields["show_id"] + pattern.pattern = fields["pattern"] + pattern.quality = fields["quality"] + pattern.notes = fields["notes"] + self._replace_pattern_schema( + session, + pattern, + normalized_tags, + normalized_tracks, + ) + + session.commit() + self._clear_regex_cache() + return pattern.getId() + + except click.ClickException: + raise except Exception as ex: - raise click.ClickException(f"PatternController.addPattern(): {repr(ex)}") + raise click.ClickException( + f"PatternController.savePatternSchema(): {repr(ex)}" + ) finally: - s.close() + if session is not None: + session.close() + def addPattern(self, patternObj, trackDescriptors=None, mediaTags=None): + return self.savePatternSchema( + patternObj, + trackDescriptors=trackDescriptors, + mediaTags=mediaTags, + ) def updatePattern(self, patternId, patternObj): + fields = self._coerce_pattern_fields(patternObj) + session = None + try: - s = self.Session() - q = s.query(Pattern).filter(Pattern.id == int(patternId)) + session = self.Session() + pattern = session.query(Pattern).filter(Pattern.id == int(patternId)).first() - if q.count(): + if pattern is not None: + self._ensure_unique_pattern_definition( + session, + fields["show_id"], + fields["pattern"], + exclude_pattern_id=patternId, + ) + self._validate_persisted_pattern(pattern) - pattern: Pattern = q.first() + pattern.show_id = fields["show_id"] + pattern.pattern = fields["pattern"] + pattern.quality = fields["quality"] + pattern.notes = fields["notes"] - pattern.show_id = int(patternObj['show_id']) - pattern.pattern = str(patternObj['pattern']) - pattern.quality = str(patternObj['quality']) - pattern.notes = str(patternObj['notes']) - - s.commit() + session.commit() + self._clear_regex_cache() return True - else: - return False + return False + except click.ClickException: + raise except Exception as ex: raise click.ClickException(f"PatternController.updatePattern(): {repr(ex)}") finally: - s.close() - - + if session is not None: + session.close() def findPattern(self, patternObj): - - try: - s = self.Session() - q = s.query(Pattern).filter(Pattern.show_id == int(patternObj['show_id']), Pattern.pattern == str(patternObj['pattern'])) + session = None - if q.count(): - pattern = q.first() + try: + session = self.Session() + pattern = ( + session.query(Pattern) + .filter( + Pattern.show_id == int(patternObj["show_id"]), + Pattern.pattern == str(patternObj["pattern"]), + ) + .first() + ) + + if pattern is not None: return int(pattern.id) - else: - return None + return None except Exception as ex: raise click.ClickException(f"PatternController.findPattern(): {repr(ex)}") finally: - s.close() + if session is not None: + session.close() + def getPatternsForShow(self, showId: int) -> list[Pattern]: - def getPattern(self, patternId : int): + if type(showId) is not int: + raise ValueError( + "PatternController.getPatternsForShow(): Argument showId is required to be of type int" + ) + + session = None + try: + session = self.Session() + return ( + session.query(Pattern) + .filter(Pattern.show_id == int(showId)) + .order_by(Pattern.id) + .all() + ) + + except Exception as ex: + raise click.ClickException(f"PatternController.getPatternsForShow(): {repr(ex)}") + finally: + if session is not None: + session.close() + + def getPattern(self, patternId: int): if type(patternId) is not int: - raise ValueError(f"PatternController.getPattern(): Argument patternId is required to be of type int") + raise ValueError( + "PatternController.getPattern(): Argument patternId is required to be of type int" + ) + session = None try: - s = self.Session() - q = s.query(Pattern).filter(Pattern.id == int(patternId)) - - return q.first() if q.count() else None + session = self.Session() + return session.query(Pattern).filter(Pattern.id == int(patternId)).first() except Exception as ex: raise click.ClickException(f"PatternController.getPattern(): {repr(ex)}") finally: - s.close() - + if session is not None: + session.close() def deletePattern(self, patternId): + session = None try: - s = self.Session() - q = s.query(Pattern).filter(Pattern.id == int(patternId)) + session = self.Session() + pattern = session.query(Pattern).filter(Pattern.id == int(patternId)).first() - if q.count(): - - #DAFUQ: https://stackoverflow.com/a/19245058 - # q.delete() - pattern = q.first() - s.delete(pattern) - - s.commit() + if pattern is not None: + session.delete(pattern) + session.commit() + self._clear_regex_cache() return True return False except Exception as ex: raise click.ClickException(f"PatternController.deletePattern(): {repr(ex)}") finally: - s.close() + if session is not None: + session.close() - - def matchFilename(self, filename : str) -> dict: - """Returns dict {'match': <a regex match obj>, 'pattern': <ffx pattern obj>} or empty dict of no pattern was found""" + def matchFilename(self, filename: str) -> dict: + """Return {'match': regex match, 'pattern': Pattern} or {} when unmatched.""" + session = None try: - s = self.Session() - q = s.query(Pattern) + session = self.Session() + matches = [] + query = session.query(Pattern).order_by(Pattern.show_id, Pattern.id) - matchResult = {} - - for pattern in q.all(): - patternMatch = re.search(str(pattern.pattern), str(filename)) - if patternMatch is not None: - matchResult['match'] = patternMatch - matchResult['pattern'] = pattern + for pattern in query.all(): + compiled = self._compile_pattern_expression( + pattern.getId(), + pattern.getPattern(), + ) + patternMatch = compiled.search(str(filename)) + if patternMatch is None: + continue - return matchResult - + self._validate_persisted_pattern(pattern) + matches.append({"match": patternMatch, "pattern": pattern}) + + if not matches: + return {} + + if len(matches) > 1: + duplicateDescriptions = ", ".join( + [ + f"show #{match['pattern'].getShowId()} pattern #{match['pattern'].getId()} {match['pattern'].getPattern()!r}" + for match in matches + ] + ) + raise DuplicatePatternMatchError( + f"Filename {filename!r} matched more than one pattern: {duplicateDescriptions}" + ) + + return matches[0] + + except click.ClickException: + raise except Exception as ex: raise click.ClickException(f"PatternController.matchFilename(): {repr(ex)}") finally: - s.close() - -# def getMediaDescriptor(self, context, patternId): -# -# try: -# s = self.Session() -# q = s.query(Pattern).filter(Pattern.id == int(patternId)) -# -# if q.count(): -# return q.first().getMediaDescriptor(context) -# else: -# return None -# -# except Exception as ex: -# raise click.ClickException(f"PatternController.getMediaDescriptor(): {repr(ex)}") -# finally: -# s.close() \ No newline at end of file + if session is not None: + session.close() diff --git a/src/ffx/pattern_details_screen.py b/src/ffx/pattern_details_screen.py index da64b7b..fe64352 100644 --- a/src/ffx/pattern_details_screen.py +++ b/src/ffx/pattern_details_screen.py @@ -6,18 +6,13 @@ from textual.widgets import Header, Footer, Static, Button, Input, DataTable, Te from textual.containers import Grid from ffx.model.pattern import Pattern -from ffx.model.track import Track - -from .pattern_controller import PatternController -from .show_controller import ShowController -from .track_controller import TrackController -from .tag_controller import TagController from .track_details_screen import TrackDetailsScreen from .track_delete_screen import TrackDeleteScreen from .tag_details_screen import TagDetailsScreen from .tag_delete_screen import TagDeleteScreen +from .screen_support import build_screen_bootstrap, build_screen_controllers from ffx.track_type import TrackType @@ -108,92 +103,88 @@ class PatternDetailsScreen(Screen): def __init__(self, patternId = None, showId = None): super().__init__() - self.context = self.app.getContext() - self.Session = self.context['database']['session'] # convenience + bootstrap = build_screen_bootstrap(self.app.getContext()) + self.context = bootstrap.context - self.__configurationData = self.context['config'].getData() + self.__removeGlobalKeys = bootstrap.remove_global_keys + self.__ignoreGlobalKeys = bootstrap.ignore_global_keys - metadataConfiguration = self.__configurationData['metadata'] if 'metadata' in self.__configurationData.keys() else {} - - self.__signatureTags = metadataConfiguration['signature'] if 'signature' in metadataConfiguration.keys() else {} - self.__removeGlobalKeys = metadataConfiguration['remove'] if 'remove' in metadataConfiguration.keys() else [] - self.__ignoreGlobalKeys = metadataConfiguration['ignore'] if 'ignore' in metadataConfiguration.keys() else [] - self.__removeTrackKeys = (metadataConfiguration['streams']['remove'] - if 'streams' in metadataConfiguration.keys() - and 'remove' in metadataConfiguration['streams'].keys() else []) - self.__ignoreTrackKeys = (metadataConfiguration['streams']['ignore'] - if 'streams' in metadataConfiguration.keys() - and 'ignore' in metadataConfiguration['streams'].keys() else []) - - self.__pc = PatternController(context = self.context) - self.__sc = ShowController(context = self.context) - self.__tc = TrackController(context = self.context) - self.__tac = TagController(context = self.context) + controllers = build_screen_controllers( + self.context, + pattern=True, + show=True, + track=True, + tag=True, + ) + self.__pc = controllers['pattern'] + self.__sc = controllers['show'] + self.__tc = controllers['track'] + self.__tac = controllers['tag'] self.__pattern : Pattern = self.__pc.getPattern(patternId) if patternId is not None else None self.__showDescriptor = self.__sc.getShowDescriptor(showId) if showId is not None else None - - - #TODO: per controller - def loadTracks(self, show_id): - - try: - - tracks = {} - tracks['audio'] = {} - tracks['subtitle'] = {} - - s = self.Session() - q = s.query(Pattern).filter(Pattern.show_id == int(show_id)) - - return [{'id': int(p.id), 'pattern': p.pattern} for p in q.all()] - - except Exception as ex: - raise click.ClickException(f"loadTracks(): {repr(ex)}") - finally: - s.close() + self.__draftTracks : List[TrackDescriptor] = [] + self.__draftTags : dict[str, str] = {} def updateTracks(self): self.tracksTable.clear() + tracks = self.getCurrentTrackDescriptors() + + typeCounter = {} + + td: TrackDescriptor + for td in tracks: + + if (trackType := td.getType()) != TrackType.ATTACHMENT: + + if not trackType in typeCounter.keys(): + typeCounter[trackType] = 0 + + dispoSet = td.getDispositionSet() + + trackLanguage = td.getLanguage() + audioLayout = td.getAudioLayout() + + row = (td.getIndex(), + trackType.label(), + typeCounter[trackType], + td.getCodec().label(), + audioLayout.label() if trackType == TrackType.AUDIO + and audioLayout != AudioLayout.LAYOUT_UNDEFINED else ' ', + trackLanguage.label() if trackLanguage != IsoLanguage.UNDEFINED else ' ', + td.getTitle(), + 'Yes' if TrackDisposition.DEFAULT in dispoSet else 'No', + 'Yes' if TrackDisposition.FORCED in dispoSet else 'No', + td.getSourceIndex()) + + self.tracksTable.add_row(*map(str, row)) + + typeCounter[trackType] += 1 + + + def getCurrentTrackDescriptors(self) -> List[TrackDescriptor]: if self.__pattern is not None: + return self.__tc.findSiblingDescriptors(self.__pattern.getId()) + return list(self.__draftTracks) - tracks = self.__tc.findTracks(self.__pattern.getId()) - typeCounter = {} + def normalizeDraftTracks(self): - tr: Track - for tr in tracks: + typeCounter = {} - td : TrackDescriptor = tr.getDescriptor(self.context) + for index, trackDescriptor in enumerate(self.__draftTracks): + trackDescriptor.setIndex(index) - if (trackType := td.getType()) != TrackType.ATTACHMENT: + trackType = trackDescriptor.getType() + subIndex = typeCounter.get(trackType, 0) + trackDescriptor.setSubIndex(subIndex) + typeCounter[trackType] = subIndex + 1 - if not trackType in typeCounter.keys(): - typeCounter[trackType] = 0 - - dispoSet = td.getDispositionSet() - - trackLanguage = td.getLanguage() - audioLayout = td.getAudioLayout() - - row = (td.getIndex(), - trackType.label(), - typeCounter[trackType], - td.getCodec().label(), - audioLayout.label() if trackType == TrackType.AUDIO - and audioLayout != AudioLayout.LAYOUT_UNDEFINED else ' ', - trackLanguage.label() if trackLanguage != IsoLanguage.UNDEFINED else ' ', - td.getTitle(), - 'Yes' if TrackDisposition.DEFAULT in dispoSet else 'No', - 'Yes' if TrackDisposition.FORCED in dispoSet else 'No', - td.getSourceIndex()) - - self.tracksTable.add_row(*map(str, row)) - - typeCounter[trackType] += 1 + if trackDescriptor.getSourceIndex() < 0: + trackDescriptor.setSourceIndex(index) def swapTracks(self, trackIndex1: int, trackIndex2: int): @@ -201,6 +192,20 @@ class PatternDetailsScreen(Screen): ti1 = int(trackIndex1) ti2 = int(trackIndex2) + if self.__pattern is None: + numSiblings = len(self.__draftTracks) + + if ti1 < 0 or ti1 >= numSiblings: + raise ValueError(f"PatternDetailsScreen.swapTracks(): trackIndex1 ({ti1}) is out of range ({numSiblings})") + + if ti2 < 0 or ti2 >= numSiblings: + raise ValueError(f"PatternDetailsScreen.swapTracks(): trackIndex2 ({ti2}) is out of range ({numSiblings})") + + self.__draftTracks[ti1], self.__draftTracks[ti2] = self.__draftTracks[ti2], self.__draftTracks[ti1] + self.normalizeDraftTracks() + self.updateTracks() + return + siblingDescriptors: List[TrackDescriptor] = self.__tc.findSiblingDescriptors(self.__pattern.getId()) numSiblings = len(siblingDescriptors) @@ -236,21 +241,22 @@ class PatternDetailsScreen(Screen): self.tagsTable.clear() - if self.__pattern is not None: + tags = ( + self.__tac.findAllMediaTags(self.__pattern.getId()) + if self.__pattern is not None + else self.__draftTags + ) - tags = self.__tac.findAllMediaTags(self.__pattern.getId()) + for tagKey, tagValue in tags.items(): - for tagKey, tagValue in tags.items(): + textColor = None + if tagKey in self.__ignoreGlobalKeys: + textColor = 'blue' + if tagKey in self.__removeGlobalKeys: + textColor = 'red' - textColor = None - if tagKey in self.__ignoreGlobalKeys: - textColor = 'blue' - if tagKey in self.__removeGlobalKeys: - textColor = 'red' - - # if tagKey not in self.__ignoreTrackKeys: - row = (formatRichColor(tagKey, textColor), formatRichColor(tagValue, textColor)) - self.tagsTable.add_row(*map(str, row)) + row = (formatRichColor(tagKey, textColor), formatRichColor(tagValue, textColor)) + self.tagsTable.add_row(*map(str, row)) def on_mount(self): @@ -340,16 +346,9 @@ class PatternDetailsScreen(Screen): # 9 yield Static("Media Tags") - - - if self.__pattern is not None: - yield Button("Add", id="button_add_tag") - yield Button("Edit", id="button_edit_tag") - yield Button("Delete", id="button_delete_tag") - else: - yield Static(" ") - yield Static(" ") - yield Static(" ") + yield Button("Add", id="button_add_tag") + yield Button("Edit", id="button_edit_tag") + yield Button("Delete", id="button_delete_tag") yield Static(" ") yield Static(" ") @@ -363,16 +362,9 @@ class PatternDetailsScreen(Screen): # 12 yield Static("Streams") - - - if self.__pattern is not None: - yield Button("Add", id="button_add_track") - yield Button("Edit", id="button_edit_track") - yield Button("Delete", id="button_delete_track") - else: - yield Static(" ") - yield Static(" ") - yield Static(" ") + yield Button("Add", id="button_add_track") + yield Button("Edit", id="button_edit_track") + yield Button("Delete", id="button_delete_track") yield Static(" ") yield Button("Up", id="button_track_up") @@ -413,13 +405,8 @@ class PatternDetailsScreen(Screen): def getSelectedTrackDescriptor(self): - if not self.__pattern: - return None - try: - # Fetch the currently selected row when 'Enter' is pressed - #selected_row_index = self.table.cursor_row row_key, col_key = self.tracksTable.coordinate_to_cell_key(self.tracksTable.cursor_coordinate) if row_key is not None: @@ -428,10 +415,12 @@ class PatternDetailsScreen(Screen): trackIndex = int(selected_track_data[0]) trackSubIndex = int(selected_track_data[2]) - return self.__tc.getTrack(self.__pattern.getId(), trackIndex).getDescriptor(self.context, subIndex=trackSubIndex) + for trackDescriptor in self.getCurrentTrackDescriptors(): + if (trackDescriptor.getIndex() == trackIndex + and trackDescriptor.getSubIndex() == trackSubIndex): + return trackDescriptor - else: - return None + return None except CellDoesNotExist: return None @@ -482,7 +471,11 @@ class PatternDetailsScreen(Screen): self.app.pop_screen() else: - patternId = self.__pc.addPattern(patternDescriptor) + patternId = self.__pc.savePatternSchema( + patternDescriptor, + trackDescriptors=self.__draftTracks, + mediaTags=self.__draftTags, + ) if patternId: self.dismiss(patternDescriptor) else: @@ -494,33 +487,52 @@ class PatternDetailsScreen(Screen): self.app.pop_screen() - # Save pattern when just created before adding streams - if self.__pattern is not None: + numTracks = len(self.getCurrentTrackDescriptors()) - numTracks = len(self.tracksTable.rows) + if event.button.id == "button_add_track": + self.app.push_screen( + TrackDetailsScreen( + patternId=self.__pattern.getId() if self.__pattern is not None else None, + patternLabel=self.getPatternFromInput(), + siblingTrackDescriptors=self.getCurrentTrackDescriptors(), + index=numTracks, + ), + self.handle_add_track, + ) - if event.button.id == "button_add_track": - self.app.push_screen(TrackDetailsScreen(patternId = self.__pattern.getId(), index = numTracks), self.handle_add_track) - - selectedTrack = self.getSelectedTrackDescriptor() - if selectedTrack is not None: - if event.button.id == "button_edit_track": - self.app.push_screen(TrackDetailsScreen(trackDescriptor = selectedTrack), self.handle_edit_track) - if event.button.id == "button_delete_track": - self.app.push_screen(TrackDeleteScreen(trackDescriptor = selectedTrack), self.handle_delete_track) + selectedTrack = self.getSelectedTrackDescriptor() + if selectedTrack is not None: + if event.button.id == "button_edit_track": + self.app.push_screen( + TrackDetailsScreen( + trackDescriptor=selectedTrack, + patternId=self.__pattern.getId() if self.__pattern is not None else None, + patternLabel=self.getPatternFromInput(), + siblingTrackDescriptors=self.getCurrentTrackDescriptors(), + ), + self.handle_edit_track, + ) + if event.button.id == "button_delete_track": + self.app.push_screen( + TrackDeleteScreen(trackDescriptor = selectedTrack), + self.handle_delete_track, + ) if event.button.id == "button_add_tag": - if self.__pattern is not None: - self.app.push_screen(TagDetailsScreen(), self.handle_update_tag) + self.app.push_screen(TagDetailsScreen(), self.handle_update_tag) if event.button.id == "button_edit_tag": - tagKey, tagValue = self.getSelectedTag() - self.app.push_screen(TagDetailsScreen(key=tagKey, value=tagValue), self.handle_update_tag) + selectedTag = self.getSelectedTag() + if selectedTag is not None: + tagKey, tagValue = selectedTag + self.app.push_screen(TagDetailsScreen(key=tagKey, value=tagValue), self.handle_update_tag) if event.button.id == "button_delete_tag": - tagKey, tagValue = self.getSelectedTag() - self.app.push_screen(TagDeleteScreen(key=tagKey, value=tagValue), self.handle_delete_tag) + selectedTag = self.getSelectedTag() + if selectedTag is not None: + tagKey, tagValue = selectedTag + self.app.push_screen(TagDeleteScreen(key=tagKey, value=tagValue), self.handle_delete_tag) if event.button.id == "pattern_button": @@ -537,83 +549,106 @@ class PatternDetailsScreen(Screen): if event.button.id == "button_track_up": selectedTrackDescriptor = self.getSelectedTrackDescriptor() - selectedTrackIndex = selectedTrackDescriptor.getIndex() + if selectedTrackDescriptor is not None: + selectedTrackIndex = selectedTrackDescriptor.getIndex() - if selectedTrackIndex > 0 and selectedTrackIndex < self.tracksTable.row_count: - correspondingTrackIndex = selectedTrackIndex - 1 - self.swapTracks(selectedTrackIndex, correspondingTrackIndex) + if selectedTrackIndex > 0 and selectedTrackIndex < self.tracksTable.row_count: + correspondingTrackIndex = selectedTrackIndex - 1 + self.swapTracks(selectedTrackIndex, correspondingTrackIndex) if event.button.id == "button_track_down": selectedTrackDescriptor = self.getSelectedTrackDescriptor() - selectedTrackIndex = selectedTrackDescriptor.getIndex() + if selectedTrackDescriptor is not None: + selectedTrackIndex = selectedTrackDescriptor.getIndex() - if selectedTrackIndex >= 0 and selectedTrackIndex < (self.tracksTable.row_count - 1): - correspondingTrackIndex = selectedTrackIndex + 1 - self.swapTracks(selectedTrackIndex, correspondingTrackIndex) + if selectedTrackIndex >= 0 and selectedTrackIndex < (self.tracksTable.row_count - 1): + correspondingTrackIndex = selectedTrackIndex + 1 + self.swapTracks(selectedTrackIndex, correspondingTrackIndex) def handle_add_track(self, trackDescriptor : TrackDescriptor): + if trackDescriptor is None: + return - dispoSet = trackDescriptor.getDispositionSet() - trackType = trackDescriptor.getType() - index = trackDescriptor.getIndex() - subIndex = trackDescriptor.getSubIndex() - codec = trackDescriptor.getCodec() - language = trackDescriptor.getLanguage() - title = trackDescriptor.getTitle() + if self.__pattern is not None: + self.__tc.addTrack(trackDescriptor, patternId=self.__pattern.getId()) + else: + self.__draftTracks.append(trackDescriptor) + self.normalizeDraftTracks() - row = (index, - trackType.label(), - subIndex, - codec.label(), - language.label(), - title, - 'Yes' if TrackDisposition.DEFAULT in dispoSet else 'No', - 'Yes' if TrackDisposition.FORCED in dispoSet else 'No') - - self.tracksTable.add_row(*map(str, row)) + self.updateTracks() def handle_edit_track(self, trackDescriptor : TrackDescriptor): + if trackDescriptor is None: + return - try: + if self.__pattern is not None: + if not self.__tc.updateTrack(trackDescriptor.getId(), trackDescriptor): + raise click.ClickException("PatternDetailsScreen.handle_edit_track(): track update failed") + else: + selectedTrack = self.getSelectedTrackDescriptor() + for index, currentTrack in enumerate(self.__draftTracks): + if (selectedTrack is not None + and currentTrack.getIndex() == selectedTrack.getIndex() + and currentTrack.getSubIndex() == selectedTrack.getSubIndex()): + self.__draftTracks[index] = trackDescriptor + break + self.normalizeDraftTracks() - row_key, col_key = self.tracksTable.coordinate_to_cell_key(self.tracksTable.cursor_coordinate) - - self.tracksTable.update_cell(row_key, self.column_key_track_audio_layout, - trackDescriptor.getAudioLayout().label() - if trackDescriptor.getType() == TrackType.AUDIO else ' ') - - self.tracksTable.update_cell(row_key, self.column_key_track_language, trackDescriptor.getLanguage().label()) - self.tracksTable.update_cell(row_key, self.column_key_track_title, trackDescriptor.getTitle()) - self.tracksTable.update_cell(row_key, self.column_key_track_default, - 'Yes' if TrackDisposition.DEFAULT in trackDescriptor.getDispositionSet() else 'No') - self.tracksTable.update_cell(row_key, self.column_key_track_forced, - 'Yes' if TrackDisposition.FORCED in trackDescriptor.getDispositionSet() else 'No') - - except CellDoesNotExist: - pass + self.updateTracks() def handle_delete_track(self, trackDescriptor : TrackDescriptor): + if trackDescriptor is None: + return + + if self.__pattern is not None: + track = self.__tc.getTrack(trackDescriptor.getPatternId(), trackDescriptor.getIndex()) + + if track is None: + raise click.ClickException( + f"Track is none: patternId={trackDescriptor.getPatternId()} type={trackDescriptor.getType()} subIndex={trackDescriptor.getSubIndex()}" + ) + + self.__tc.deleteTrack(track.getId()) + else: + self.__draftTracks = [ + currentTrack + for currentTrack in self.__draftTracks + if not ( + currentTrack.getIndex() == trackDescriptor.getIndex() + and currentTrack.getSubIndex() == trackDescriptor.getSubIndex() + ) + ] + self.normalizeDraftTracks() + self.updateTracks() def handle_update_tag(self, tag): + if tag is None: + return if self.__pattern is None: - raise click.ClickException(f"PatternDetailsScreen.handle_update_tag: pattern not set") + self.__draftTags[str(tag[0])] = str(tag[1]) + else: + if self.__tac.updateMediaTag(self.__pattern.getId(), tag[0], tag[1]) is None: + raise click.ClickException("PatternDetailsScreen.handle_update_tag(): tag update failed") - if self.__tac.updateMediaTag(self.__pattern.getId(), tag[0], tag[1]) is not None: - self.updateTags() + self.updateTags() def handle_delete_tag(self, tag): + if tag is None: + return if self.__pattern is None: - raise click.ClickException(f"PatternDetailsScreen.handle_delete_tag: pattern not set") + self.__draftTags.pop(str(tag[0]), None) + self.updateTags() + return if self.__tac.deleteMediaTagByKey(self.__pattern.getId(), tag[0]): self.updateTags() diff --git a/src/ffx/process.py b/src/ffx/process.py index 08953bd..429961c 100644 --- a/src/ffx/process.py +++ b/src/ffx/process.py @@ -1,33 +1,169 @@ -import subprocess, logging -from typing import List +import os +import shlex +import subprocess +from typing import Iterable, List -def executeProcess(commandSequence: List[str], directory: str = None, context: dict = None): +from .logging_utils import get_ffx_logger + +COMMAND_TIMED_OUT_RETURN_CODE = 124 +COMMAND_NOT_FOUND_RETURN_CODE = 127 +MIN_NICENESS = -20 +MAX_NICENESS = 19 +DISABLED_NICENESS_SENTINEL = 99 +DISABLED_CPU_PERCENT_SENTINEL = 0 +MIN_CPU_PERCENT = 1 +MAX_CPU_PERCENT = 100 + + +def formatCommandSequence(commandSequence: Iterable[str]) -> str: + return shlex.join([str(token) for token in commandSequence]) + + +def normalizeNiceness(niceness) -> int | None: + if niceness is None: + return None + + niceness = int(niceness) + if niceness == DISABLED_NICENESS_SENTINEL: + return None + + if niceness < MIN_NICENESS or niceness > MAX_NICENESS: + raise ValueError( + f"Niceness must be between {MIN_NICENESS} and {MAX_NICENESS}, " + + f"or {DISABLED_NICENESS_SENTINEL} to disable." + ) + + return niceness + + +def getPresentCpuCount() -> int: + if hasattr(os, 'sched_getaffinity'): + affinity = os.sched_getaffinity(0) + if affinity: + return len(affinity) + + cpuCount = os.cpu_count() + return cpuCount if cpuCount and cpuCount > 0 else 1 + + +def normalizeCpuPercent(cpuPercent) -> int | None: + if cpuPercent is None: + return None + + cpuPercent = str(cpuPercent).strip() + if cpuPercent.endswith('%'): + percentValue = int(cpuPercent[:-1].strip()) + if percentValue == DISABLED_CPU_PERCENT_SENTINEL: + return None + + if percentValue < MIN_CPU_PERCENT or percentValue > MAX_CPU_PERCENT: + raise ValueError( + f"CPU percentage must be between {MIN_CPU_PERCENT}% and {MAX_CPU_PERCENT}%, " + + f"or {DISABLED_CPU_PERCENT_SENTINEL} to disable." + ) + + return percentValue * getPresentCpuCount() + + cpuPercent = int(cpuPercent) + if cpuPercent == DISABLED_CPU_PERCENT_SENTINEL: + return None + + if cpuPercent < MIN_CPU_PERCENT: + raise ValueError( + "CPU limit must be a positive absolute value such as 200, " + + f"a percentage such as 25%, or {DISABLED_CPU_PERCENT_SENTINEL} to disable." + ) + + return cpuPercent + + +def getWrappedCommandSequence(commandSequence: List[str], context: dict = None) -> List[str]: """ - niceness -20 bis +19 - cpu_percent: 1 bis 99 + niceness: -20 to 19, disabled when unset + cpu limit: positive absolute cpulimit value, or a machine-wide percentage + + When both limits are configured, cpulimit wraps a nice-adjusted command: + cpulimit -l <cpu> -- nice -n <niceness> <command> """ - if context is None: - logger = logging.getLogger('FFX') - logger.addHandler(logging.NullHandler()) - else: - logger = context['logger'] + resourceLimits = (context or {}).get('resource_limits', {}) + niceness = normalizeNiceness(resourceLimits.get('niceness')) + cpu_percent = normalizeCpuPercent( + resourceLimits.get('cpu_limit', resourceLimits.get('cpu_percent')) + ) + wrappedCommandSequence = [str(token) for token in commandSequence] - niceSequence = [] + if niceness is not None: + wrappedCommandSequence = ['nice', '-n', str(niceness)] + wrappedCommandSequence + if cpu_percent is not None: + wrappedCommandSequence = ['cpulimit', '-l', str(cpu_percent), '--'] + wrappedCommandSequence - niceness = int((context or {}).get('resource_limits', {}).get('niceness', 99)) - cpu_percent = int((context or {}).get('resource_limits', {}).get('cpu_percent', 0)) + return wrappedCommandSequence - if niceness >= -20 and niceness <= 19: - niceSequence += ['nice', '-n', str(niceness)] - if cpu_percent >= 1: - niceSequence += ['cpulimit', '-l', str(cpu_percent), '--'] - niceCommand = niceSequence + commandSequence +def getProcessTimeoutSeconds(context: dict = None, timeoutSeconds: float = None): + if timeoutSeconds is None: + timeoutSeconds = (context or {}).get('resource_limits', {}).get('timeout_seconds') - logger.debug(f"executeProcess() command sequence: {' '.join(niceCommand)}") + if timeoutSeconds is None: + return None - process = subprocess.Popen(niceCommand, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8', cwd = directory) - output, error = process.communicate() - - return output, error, process.returncode + timeoutSeconds = float(timeoutSeconds) + + return timeoutSeconds if timeoutSeconds > 0 else None + + +def executeProcess( + commandSequence: List[str], + directory: str = None, + context: dict = None, + timeoutSeconds: float = None, +): + + logger = context['logger'] if context is not None and 'logger' in context else get_ffx_logger() + wrappedCommandSequence = getWrappedCommandSequence(commandSequence, context=context) + timeoutSeconds = getProcessTimeoutSeconds(context=context, timeoutSeconds=timeoutSeconds) + + logger.debug( + "executeProcess() cwd=%s timeout=%s command=%s", + directory or '.', + timeoutSeconds if timeoutSeconds is not None else 'none', + formatCommandSequence(wrappedCommandSequence), + ) + + try: + completed = subprocess.run( + wrappedCommandSequence, + capture_output=True, + text=True, + cwd=directory, + timeout=timeoutSeconds, + check=False, + ) + except FileNotFoundError as ex: + error = ( + "Command not found while running " + + f"{formatCommandSequence(wrappedCommandSequence)}: {ex.filename or ex}" + ) + logger.error(error) + return '', error, COMMAND_NOT_FOUND_RETURN_CODE + except subprocess.TimeoutExpired as ex: + stdout = ex.stdout or '' + stderr = ex.stderr or '' + error = ( + f"Command timed out after {timeoutSeconds} seconds while running " + + formatCommandSequence(wrappedCommandSequence) + ) + if stderr: + error = f"{error}\n{stderr}" + logger.error(error) + return stdout, error, COMMAND_TIMED_OUT_RETURN_CODE + + if completed.returncode != 0: + logger.warning( + "executeProcess() rc=%s command=%s", + completed.returncode, + formatCommandSequence(wrappedCommandSequence), + ) + + return completed.stdout, completed.stderr, completed.returncode diff --git a/src/ffx/screen_support.py b/src/ffx/screen_support.py new file mode 100644 index 0000000..a7e24b6 --- /dev/null +++ b/src/ffx/screen_support.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +from dataclasses import dataclass + +from .pattern_controller import PatternController +from .show_controller import ShowController +from .shifted_season_controller import ShiftedSeasonController +from .tag_controller import TagController +from .tmdb_controller import TmdbController +from .track_controller import TrackController + + +@dataclass(frozen=True) +class ScreenBootstrap: + context: dict + configuration_data: dict + signature_tags: dict + remove_global_keys: list + ignore_global_keys: list + remove_track_keys: list + ignore_track_keys: list + + +def build_screen_bootstrap(context: dict) -> ScreenBootstrap: + configurationData = context['config'].getData() + metadataConfiguration = configurationData.get('metadata', {}) + streamMetadataConfiguration = metadataConfiguration.get('streams', {}) + + return ScreenBootstrap( + context=context, + configuration_data=configurationData, + signature_tags=metadataConfiguration.get('signature', {}), + remove_global_keys=metadataConfiguration.get('remove', []), + ignore_global_keys=metadataConfiguration.get('ignore', []), + remove_track_keys=streamMetadataConfiguration.get('remove', []), + ignore_track_keys=streamMetadataConfiguration.get('ignore', []), + ) + + +def build_screen_controllers( + context: dict, + *, + pattern: bool = False, + show: bool = False, + track: bool = False, + tag: bool = False, + tmdb: bool = False, + shifted_season: bool = False, +) -> dict[str, object]: + controllers = {} + + if pattern: + controllers['pattern'] = PatternController(context=context) + if show: + controllers['show'] = ShowController(context=context) + if track: + controllers['track'] = TrackController(context=context) + if tag: + controllers['tag'] = TagController(context=context) + if tmdb: + controllers['tmdb'] = TmdbController() + if shifted_season: + controllers['shifted_season'] = ShiftedSeasonController(context=context) + + return controllers diff --git a/src/ffx/shifted_season_controller.py b/src/ffx/shifted_season_controller.py index b6e01e6..6fc254d 100644 --- a/src/ffx/shifted_season_controller.py +++ b/src/ffx/shifted_season_controller.py @@ -101,11 +101,9 @@ class ShiftedSeasonController(): try: s = self.Session() - q = s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId)) + shiftedSeason = s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId)).first() - if q.count(): - - shiftedSeason = q.first() + if shiftedSeason is not None: shiftedSeason.original_season = int(shiftedSeasonObj['original_season']) shiftedSeason.first_episode = int(shiftedSeasonObj['first_episode']) @@ -141,12 +139,14 @@ class ShiftedSeasonController(): try: s = self.Session() - q = s.query(ShiftedSeason).filter(ShiftedSeason.show_id == int(showId), - ShiftedSeason.original_season == int(originalSeason), - ShiftedSeason.first_episode == int(firstEpisode), - ShiftedSeason.last_episode == int(lastEpisode)) + shiftedSeason = s.query(ShiftedSeason).filter( + ShiftedSeason.show_id == int(showId), + ShiftedSeason.original_season == int(originalSeason), + ShiftedSeason.first_episode == int(firstEpisode), + ShiftedSeason.last_episode == int(lastEpisode), + ).first() - return q.first().getId() if q.count() else None + return shiftedSeason.getId() if shiftedSeason is not None else None except Exception as ex: raise click.ClickException(f"PatternController.findShiftedSeason(): {repr(ex)}") @@ -177,9 +177,7 @@ class ShiftedSeasonController(): try: s = self.Session() - q = s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId)) - - return q.first() if q.count() else None + return s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId)).first() except Exception as ex: raise click.ClickException(f"ShiftedSeasonController.getShiftedSeason(): {repr(ex)}") @@ -194,13 +192,12 @@ class ShiftedSeasonController(): try: s = self.Session() - q = s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId)) + shiftedSeason = s.query(ShiftedSeason).filter(ShiftedSeason.id == int(shiftedSeasonId)).first() - if q.count(): + if shiftedSeason is not None: #DAFUQ: https://stackoverflow.com/a/19245058 # q.delete() - shiftedSeason = q.first() s.delete(shiftedSeason) s.commit() diff --git a/src/ffx/show_controller.py b/src/ffx/show_controller.py index a426eaf..7407a19 100644 --- a/src/ffx/show_controller.py +++ b/src/ffx/show_controller.py @@ -16,10 +16,9 @@ class ShowController(): try: s = self.Session() - q = s.query(Show).filter(Show.id == showId) + show = s.query(Show).filter(Show.id == showId).first() - if q.count(): - show: Show = q.first() + if show is not None: return show.getDescriptor(self.context) except Exception as ex: @@ -31,9 +30,7 @@ class ShowController(): try: s = self.Session() - q = s.query(Show).filter(Show.id == showId) - - return q.first() if q.count() else None + return s.query(Show).filter(Show.id == showId).first() except Exception as ex: raise click.ClickException(f"ShowController.getShow(): {repr(ex)}") @@ -44,12 +41,7 @@ class ShowController(): try: s = self.Session() - q = s.query(Show) - - if q.count(): - return q.all() - else: - return [] + return s.query(Show).all() except Exception as ex: raise click.ClickException(f"ShowController.getAllShows(): {repr(ex)}") @@ -61,9 +53,9 @@ class ShowController(): try: s = self.Session() - q = s.query(Show).filter(Show.id == showDescriptor.getId()) + currentShow = s.query(Show).filter(Show.id == showDescriptor.getId()).first() - if not q.count(): + if currentShow is None: show = Show(id = int(showDescriptor.getId()), name = str(showDescriptor.getName()), year = int(showDescriptor.getYear()), @@ -76,9 +68,6 @@ class ShowController(): s.commit() return True else: - - currentShow = q.first() - changed = False if currentShow.name != str(showDescriptor.getName()): currentShow.name = str(showDescriptor.getName()) @@ -113,14 +102,12 @@ class ShowController(): def deleteShow(self, show_id): try: s = self.Session() - q = s.query(Show).filter(Show.id == int(show_id)) + show = s.query(Show).filter(Show.id == int(show_id)).first() - - if q.count(): + if show is not None: #DAFUQ: https://stackoverflow.com/a/19245058 # q.delete() - show = q.first() s.delete(show) s.commit() diff --git a/src/ffx/show_descriptor.py b/src/ffx/show_descriptor.py index a045e8d..e1b3398 100644 --- a/src/ffx/show_descriptor.py +++ b/src/ffx/show_descriptor.py @@ -1,4 +1,11 @@ -import logging +from .configuration_controller import ConfigurationController +from .constants import ( + DEFAULT_SHOW_INDEX_EPISODE_DIGITS, + DEFAULT_SHOW_INDEX_SEASON_DIGITS, + DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, +) +from .logging_utils import get_ffx_logger class ShowDescriptor(): @@ -15,10 +22,42 @@ class ShowDescriptor(): INDICATOR_SEASON_DIGITS_KEY = 'indicator_season_digits' INDICATOR_EPISODE_DIGITS_KEY = 'indicator_episode_digits' - DEFAULT_INDEX_SEASON_DIGITS = 2 - DEFAULT_INDEX_EPISODE_DIGITS = 2 - DEFAULT_INDICATOR_SEASON_DIGITS = 2 - DEFAULT_INDICATOR_EPISODE_DIGITS = 2 + DEFAULT_INDEX_SEASON_DIGITS = DEFAULT_SHOW_INDEX_SEASON_DIGITS + DEFAULT_INDEX_EPISODE_DIGITS = DEFAULT_SHOW_INDEX_EPISODE_DIGITS + DEFAULT_INDICATOR_SEASON_DIGITS = DEFAULT_SHOW_INDICATOR_SEASON_DIGITS + DEFAULT_INDICATOR_EPISODE_DIGITS = DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS + + @classmethod + def getDefaultDigitLengths(cls, context: dict | None = None) -> dict[str, int]: + configurationData = {} + + if context is not None: + configController = context.get('config') + if configController is not None and hasattr(configController, 'getData'): + configurationData = configController.getData() + + return { + cls.INDEX_SEASON_DIGITS_KEY: ConfigurationController.getConfiguredIntegerValue( + configurationData, + ConfigurationController.DEFAULT_INDEX_SEASON_DIGITS_CONFIG_KEY, + cls.DEFAULT_INDEX_SEASON_DIGITS, + ), + cls.INDEX_EPISODE_DIGITS_KEY: ConfigurationController.getConfiguredIntegerValue( + configurationData, + ConfigurationController.DEFAULT_INDEX_EPISODE_DIGITS_CONFIG_KEY, + cls.DEFAULT_INDEX_EPISODE_DIGITS, + ), + cls.INDICATOR_SEASON_DIGITS_KEY: ConfigurationController.getConfiguredIntegerValue( + configurationData, + ConfigurationController.DEFAULT_INDICATOR_SEASON_DIGITS_CONFIG_KEY, + cls.DEFAULT_INDICATOR_SEASON_DIGITS, + ), + cls.INDICATOR_EPISODE_DIGITS_KEY: ConfigurationController.getConfiguredIntegerValue( + configurationData, + ConfigurationController.DEFAULT_INDICATOR_EPISODE_DIGITS_CONFIG_KEY, + cls.DEFAULT_INDICATOR_EPISODE_DIGITS, + ), + } def __init__(self, **kwargs): @@ -32,8 +71,7 @@ class ShowDescriptor(): self.__logger = self.__context['logger'] else: self.__context = {} - self.__logger = logging.getLogger('FFX') - self.__logger.addHandler(logging.NullHandler()) + self.__logger = get_ffx_logger() if ShowDescriptor.ID_KEY in kwargs.keys(): if type(kwargs[ShowDescriptor.ID_KEY]) is not int: @@ -54,36 +92,37 @@ class ShowDescriptor(): raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.YEAR_KEY} is required to be of type int") self.__showYear = kwargs[ShowDescriptor.YEAR_KEY] else: - self.__showYear = -1 + self.__showYear = -1 + defaultDigitLengths = self.getDefaultDigitLengths(self.__context) if ShowDescriptor.INDEX_SEASON_DIGITS_KEY in kwargs.keys(): if type(kwargs[ShowDescriptor.INDEX_SEASON_DIGITS_KEY]) is not int: raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDEX_SEASON_DIGITS_KEY} is required to be of type int") self.__indexSeasonDigits = kwargs[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] else: - self.__indexSeasonDigits = ShowDescriptor.DEFAULT_INDEX_SEASON_DIGITS + self.__indexSeasonDigits = defaultDigitLengths[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] if ShowDescriptor.INDEX_EPISODE_DIGITS_KEY in kwargs.keys(): if type(kwargs[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY]) is not int: raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDEX_EPISODE_DIGITS_KEY} is required to be of type int") self.__indexEpisodeDigits = kwargs[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] else: - self.__indexEpisodeDigits = ShowDescriptor.DEFAULT_INDEX_EPISODE_DIGITS + self.__indexEpisodeDigits = defaultDigitLengths[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] if ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY in kwargs.keys(): if type(kwargs[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY]) is not int: raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY} is required to be of type int") self.__indicatorSeasonDigits = kwargs[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] else: - self.__indicatorSeasonDigits = ShowDescriptor.DEFAULT_INDICATOR_SEASON_DIGITS + self.__indicatorSeasonDigits = defaultDigitLengths[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] if ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY in kwargs.keys(): if type(kwargs[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY]) is not int: raise TypeError(f"ShowDescriptor.__init__(): Argument {ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY} is required to be of type int") self.__indicatorEpisodeDigits = kwargs[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] else: - self.__indicatorEpisodeDigits = ShowDescriptor.DEFAULT_INDICATOR_EPISODE_DIGITS + self.__indicatorEpisodeDigits = defaultDigitLengths[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] def getId(self): diff --git a/src/ffx/show_details_screen.py b/src/ffx/show_details_screen.py index c0e2153..311a91f 100644 --- a/src/ffx/show_details_screen.py +++ b/src/ffx/show_details_screen.py @@ -5,16 +5,9 @@ from textual.widgets import Header, Footer, Static, Button, DataTable, Input from textual.containers import Grid from textual.widgets._data_table import CellDoesNotExist -from ffx.model.pattern import Pattern - from .pattern_details_screen import PatternDetailsScreen from .pattern_delete_screen import PatternDeleteScreen -from .show_controller import ShowController -from .pattern_controller import PatternController -from .tmdb_controller import TmdbController -from .shifted_season_controller import ShiftedSeasonController - from .show_descriptor import ShowDescriptor from .shifted_season_details_screen import ShiftedSeasonDetailsScreen @@ -23,6 +16,7 @@ from .shifted_season_delete_screen import ShiftedSeasonDeleteScreen from ffx.model.shifted_season import ShiftedSeason from .helper import filterFilename +from .screen_support import build_screen_bootstrap, build_screen_controllers # Screen[dict[int, str, int]] @@ -94,31 +88,24 @@ class ShowDetailsScreen(Screen): def __init__(self, showId = None): super().__init__() - self.context = self.app.getContext() - self.Session = self.context['database']['session'] # convenience - - self.__sc = ShowController(context = self.context) - self.__pc = PatternController(context = self.context) - self.__tc = TmdbController() - self.__ssc = ShiftedSeasonController(context = self.context) + bootstrap = build_screen_bootstrap(self.app.getContext()) + self.context = bootstrap.context + + controllers = build_screen_controllers( + self.context, + pattern=True, + show=True, + tmdb=True, + shifted_season=True, + ) + self.__sc = controllers['show'] + self.__pc = controllers['pattern'] + self.__tc = controllers['tmdb'] + self.__ssc = controllers['shifted_season'] self.__showDescriptor = self.__sc.getShowDescriptor(showId) if showId is not None else None - def loadPatterns(self, show_id : int): - - try: - s = self.Session() - q = s.query(Pattern).filter(Pattern.show_id == int(show_id)) - - return [{'id': int(p.id), 'pattern': str(p.pattern)} for p in q.all()] - - except Exception as ex: - raise click.ClickException(f"ShowDetailsScreen.loadPatterns(): {repr(ex)}") - finally: - s.close() - - def updateShiftedSeasons(self): @@ -166,20 +153,27 @@ class ShowDetailsScreen(Screen): #raise click.ClickException(f"show_id {showId}") - patternList = self.loadPatterns(showId) - # raise click.ClickException(f"patternList {patternList}") - for pattern in patternList: - row = (pattern['pattern'],) + for pattern in self.__pc.getPatternsForShow(showId): + row = (pattern.getPattern(),) self.patternTable.add_row(*map(str, row)) self.updateShiftedSeasons() else: - - self.query_one("#index_season_digits_input", Input).value = "2" - self.query_one("#index_episode_digits_input", Input).value = "2" - self.query_one("#indicator_season_digits_input", Input).value = "2" - self.query_one("#indicator_episode_digits_input", Input).value = "2" + defaultDigitLengths = ShowDescriptor.getDefaultDigitLengths(self.context) + + self.query_one("#index_season_digits_input", Input).value = str( + defaultDigitLengths[ShowDescriptor.INDEX_SEASON_DIGITS_KEY] + ) + self.query_one("#index_episode_digits_input", Input).value = str( + defaultDigitLengths[ShowDescriptor.INDEX_EPISODE_DIGITS_KEY] + ) + self.query_one("#indicator_season_digits_input", Input).value = str( + defaultDigitLengths[ShowDescriptor.INDICATOR_SEASON_DIGITS_KEY] + ) + self.query_one("#indicator_episode_digits_input", Input).value = str( + defaultDigitLengths[ShowDescriptor.INDICATOR_EPISODE_DIGITS_KEY] + ) def getSelectedPatternDescriptor(self): @@ -402,7 +396,7 @@ class ShowDetailsScreen(Screen): def getShowDescriptorFromInput(self) -> ShowDescriptor: - kwargs = {} + kwargs = {ShowDescriptor.CONTEXT_KEY: self.context} try: if self.__showDescriptor: @@ -489,4 +483,4 @@ class ShowDetailsScreen(Screen): self.updateShiftedSeasons() def handle_delete_shifted_season(self, screenResult): - self.updateShiftedSeasons() \ No newline at end of file + self.updateShiftedSeasons() diff --git a/src/ffx/tag_controller.py b/src/ffx/tag_controller.py index 792dad7..4e14479 100644 --- a/src/ffx/tag_controller.py +++ b/src/ffx/tag_controller.py @@ -67,10 +67,11 @@ class TagController(): try: s = self.Session() - q = s.query(MediaTag).filter(MediaTag.pattern_id == int(patternId), - MediaTag.key == str(tagKey)) - if q.count(): - tag = q.first() + tag = s.query(MediaTag).filter( + MediaTag.pattern_id == int(patternId), + MediaTag.key == str(tagKey), + ).first() + if tag is not None: s.delete(tag) s.commit() return True @@ -107,12 +108,8 @@ class TagController(): try: s = self.Session() - q = s.query(MediaTag).filter(MediaTag.pattern_id == int(patternId)) - - if q.count(): - return {t.key:t.value for t in q.all()} - else: - return {} + tags = s.query(MediaTag).filter(MediaTag.pattern_id == int(patternId)).all() + return {t.key:t.value for t in tags} except Exception as ex: raise click.ClickException(f"TagController.findAllMediaTags(): {repr(ex)}") @@ -125,12 +122,8 @@ class TagController(): try: s = self.Session() - q = s.query(TrackTag).filter(TrackTag.track_id == int(trackId)) - - if q.count(): - return {t.key:t.value for t in q.all()} - else: - return {} + tags = s.query(TrackTag).filter(TrackTag.track_id == int(trackId)).all() + return {t.key:t.value for t in tags} except Exception as ex: raise click.ClickException(f"TagController.findAllTracks(): {repr(ex)}") @@ -142,12 +135,7 @@ class TagController(): try: s = self.Session() - q = s.query(Track).filter(MediaTag.track_id == int(trackId), MediaTag.key == str(trackKey)) - - if q.count(): - return q.first() - else: - return None + return s.query(Track).filter(MediaTag.track_id == int(trackId), MediaTag.key == str(trackKey)).first() except Exception as ex: raise click.ClickException(f"TagController.findMediaTag(): {repr(ex)}") @@ -158,12 +146,10 @@ class TagController(): try: s = self.Session() - q = s.query(TrackTag).filter(TrackTag.track_id == int(trackId), TrackTag.key == str(tagKey)) - - if q.count(): - return q.first() - else: - return None + return s.query(TrackTag).filter( + TrackTag.track_id == int(trackId), + TrackTag.key == str(tagKey), + ).first() except Exception as ex: raise click.ClickException(f"TagController.findTrackTag(): {repr(ex)}") @@ -175,11 +161,9 @@ class TagController(): def deleteMediaTag(self, tagId) -> bool: try: s = self.Session() - q = s.query(MediaTag).filter(MediaTag.id == int(tagId)) + tag = s.query(MediaTag).filter(MediaTag.id == int(tagId)).first() - if q.count(): - - tag = q.first() + if tag is not None: s.delete(tag) @@ -201,11 +185,9 @@ class TagController(): try: s = self.Session() - q = s.query(TrackTag).filter(TrackTag.id == int(tagId)) + tag = s.query(TrackTag).filter(TrackTag.id == int(tagId)).first() - if q.count(): - - tag = q.first() + if tag is not None: s.delete(tag) diff --git a/src/ffx/tmdb_controller.py b/src/ffx/tmdb_controller.py index 1190564..090ee52 100644 --- a/src/ffx/tmdb_controller.py +++ b/src/ffx/tmdb_controller.py @@ -1,6 +1,8 @@ -import os, requests, time, logging +import os, requests, time from datetime import datetime +from .logging_utils import get_ffx_logger + class TMDB_REQUEST_EXCEPTION(Exception): def __init__(self, statusCode, statusMessage): @@ -27,8 +29,7 @@ class TmdbController(): self.__context = context if context is None: - self.__logger = logging.getLogger('FFX') - self.__logger.addHandler(logging.NullHandler()) + self.__logger = get_ffx_logger() else: self.__logger = context['logger'] diff --git a/src/ffx/track_controller.py b/src/ffx/track_controller.py index 5b1de31..3288dd8 100644 --- a/src/ffx/track_controller.py +++ b/src/ffx/track_controller.py @@ -75,11 +75,9 @@ class TrackController(): try: s = self.Session() - q = s.query(Track).filter(Track.id == int(trackId)) + track = s.query(Track).filter(Track.id == int(trackId)).first() - if q.count(): - - track : Track = q.first() + if track is not None: track.index = int(trackDescriptor.getIndex()) @@ -193,12 +191,10 @@ class TrackController(): try: s = self.Session() - q = s.query(Track).filter(Track.pattern_id == int(patternId), Track.index == int(index)) - - if q.count(): - return q.first() - else: - return None + return s.query(Track).filter( + Track.pattern_id == int(patternId), + Track.index == int(index), + ).first() except Exception as ex: raise click.ClickException(f"TrackController.getTrack(): {repr(ex)}") @@ -218,11 +214,9 @@ class TrackController(): try: s = self.Session() - q = s.query(Track).filter(Track.pattern_id == patternId, Track.index == index) + track = s.query(Track).filter(Track.pattern_id == patternId, Track.index == index).first() - if q.count(): - - track : Track = q.first() + if track is not None: if state: track.setDisposition(disposition) @@ -244,15 +238,21 @@ class TrackController(): try: s = self.Session() - q = s.query(Track).filter(Track.id == int(trackId)) + track = s.query(Track).filter(Track.id == int(trackId)).first() - if q.count(): - patternId = int(q.first().pattern_id) + if track is not None: + patternId = int(track.pattern_id) q_siblings = s.query(Track).filter(Track.pattern_id == patternId).order_by(Track.index) + siblingTracks = q_siblings.all() + + if len(siblingTracks) <= 1: + raise click.ClickException( + f"Cannot delete the last track from pattern #{patternId}. Patterns must define at least one track." + ) index = 0 - for track in q_siblings.all(): + for track in siblingTracks: if track.id == int(trackId): s.delete(track) diff --git a/src/ffx/track_delete_screen.py b/src/ffx/track_delete_screen.py index c944525..4743538 100644 --- a/src/ffx/track_delete_screen.py +++ b/src/ffx/track_delete_screen.py @@ -6,8 +6,6 @@ from textual.containers import Grid from ffx.track_descriptor import TrackDescriptor -from .track_controller import TrackController - # Screen[dict[int, str, int]] class TrackDeleteScreen(Screen): @@ -52,14 +50,9 @@ class TrackDeleteScreen(Screen): def __init__(self, trackDescriptor : TrackDescriptor): super().__init__() - self.context = self.app.getContext() - self.Session = self.context['database']['session'] # convenience - if type(trackDescriptor) is not TrackDescriptor: raise click.ClickException('TrackDeleteScreen.init(): trackDescriptor is required to be of type TrackDescriptor') - self.__tc = TrackController(context = self.context) - self.__trackDescriptor = trackDescriptor @@ -116,21 +109,7 @@ class TrackDeleteScreen(Screen): def on_button_pressed(self, event: Button.Pressed) -> None: if event.button.id == "delete_button": - - track = self.__tc.getTrack(self.__trackDescriptor.getPatternId(), self.__trackDescriptor.getIndex()) - - if track is None: - raise click.ClickException(f"Track is none: patternId={self.__trackDescriptor.getPatternId()} type={self.__trackDescriptor.getType()} subIndex={self.__trackDescriptor.getSubIndex()}") - - if track is not None: - - if self.__tc.deleteTrack(track.getId()): - self.dismiss(self.__trackDescriptor) - - else: - #TODO: Meldung - self.app.pop_screen() + self.dismiss(self.__trackDescriptor) if event.button.id == "cancel_button": self.app.pop_screen() - diff --git a/src/ffx/track_descriptor.py b/src/ffx/track_descriptor.py index 84b9d6b..9a102b0 100644 --- a/src/ffx/track_descriptor.py +++ b/src/ffx/track_descriptor.py @@ -1,4 +1,3 @@ -import logging from typing import Self from .iso_language import IsoLanguage @@ -6,6 +5,7 @@ from .track_type import TrackType from .audio_layout import AudioLayout from .track_disposition import TrackDisposition from .track_codec import TrackCodec +from .logging_utils import get_ffx_logger # from .helper import dictDiff, setDiff @@ -46,8 +46,7 @@ class TrackDescriptor: self.__logger = self.__context['logger'] else: self.__context = {} - self.__logger = logging.getLogger('FFX') - self.__logger.addHandler(logging.NullHandler()) + self.__logger = get_ffx_logger() if TrackDescriptor.ID_KEY in kwargs.keys(): if type(kwargs[TrackDescriptor.ID_KEY]) is not int: diff --git a/src/ffx/track_details_screen.py b/src/ffx/track_details_screen.py index dfa226c..f0d1c15 100644 --- a/src/ffx/track_details_screen.py +++ b/src/ffx/track_details_screen.py @@ -3,31 +3,20 @@ import click from textual.screen import Screen from textual.widgets import Header, Footer, Static, Button, SelectionList, Select, DataTable, Input from textual.containers import Grid - -from ffx.model.pattern import Pattern - -from .track_controller import TrackController -from .pattern_controller import PatternController -from .tag_controller import TagController - -from .track_type import TrackType -from .track_codec import TrackCodec - -from .iso_language import IsoLanguage -from .track_disposition import TrackDisposition -from .audio_layout import AudioLayout - -from .track_descriptor import TrackDescriptor - -from .tag_details_screen import TagDetailsScreen -from .tag_delete_screen import TagDeleteScreen - from textual.widgets._data_table import CellDoesNotExist +from .audio_layout import AudioLayout +from .iso_language import IsoLanguage +from .tag_delete_screen import TagDeleteScreen +from .tag_details_screen import TagDetailsScreen +from .track_codec import TrackCodec +from .track_descriptor import TrackDescriptor +from .track_disposition import TrackDisposition +from .track_type import TrackType + from ffx.helper import formatRichColor, removeRichColor -# Screen[dict[int, str, int]] class TrackDetailsScreen(Screen): CSS = """ @@ -79,7 +68,7 @@ class TrackDetailsScreen(Screen): .three { column-span: 3; } - + .four { column-span: 4; } @@ -97,257 +86,288 @@ class TrackDetailsScreen(Screen): } """ - def __init__(self, trackDescriptor : TrackDescriptor = None, patternId = None, trackType : TrackType = None, index = None, subIndex = None): + def __init__( + self, + trackDescriptor: TrackDescriptor = None, + patternId=None, + patternLabel: str = "", + siblingTrackDescriptors=None, + trackType: TrackType = None, + index=None, + subIndex=None, + ): super().__init__() self.context = self.app.getContext() - self.Session = self.context['database']['session'] # convenience - self.__configurationData = self.context['config'].getData() + self.__configurationData = self.context["config"].getData() - metadataConfiguration = self.__configurationData['metadata'] if 'metadata' in self.__configurationData.keys() else {} + metadataConfiguration = ( + self.__configurationData["metadata"] + if "metadata" in self.__configurationData.keys() + else {} + ) - self.__signatureTags = metadataConfiguration['signature'] if 'signature' in metadataConfiguration.keys() else {} - self.__removeGlobalKeys = metadataConfiguration['remove'] if 'remove' in metadataConfiguration.keys() else [] - self.__ignoreGlobalKeys = metadataConfiguration['ignore'] if 'ignore' in metadataConfiguration.keys() else [] - self.__removeTrackKeys = (metadataConfiguration['streams']['remove'] - if 'streams' in metadataConfiguration.keys() - and 'remove' in metadataConfiguration['streams'].keys() else []) - self.__ignoreTrackKeys = (metadataConfiguration['streams']['ignore'] - if 'streams' in metadataConfiguration.keys() - and 'ignore' in metadataConfiguration['streams'].keys() else []) - - - self.__tc = TrackController(context = self.context) - self.__pc = PatternController(context = self.context) - self.__tac = TagController(context = self.context) + self.__removeTrackKeys = ( + metadataConfiguration["streams"]["remove"] + if "streams" in metadataConfiguration.keys() + and "remove" in metadataConfiguration["streams"].keys() + else [] + ) + self.__ignoreTrackKeys = ( + metadataConfiguration["streams"]["ignore"] + if "streams" in metadataConfiguration.keys() + and "ignore" in metadataConfiguration["streams"].keys() + else [] + ) self.__isNew = trackDescriptor is None + self.__trackDescriptor = trackDescriptor + self.__patternId = ( + int(patternId) + if patternId is not None + else ( + int(trackDescriptor.getPatternId()) + if trackDescriptor is not None and trackDescriptor.getPatternId() != -1 + else -1 + ) + ) + self.__patternLabel = str(patternLabel) + self.__siblingTrackDescriptors = list(siblingTrackDescriptors or []) + if self.__isNew: self.__trackType = trackType self.__trackCodec = TrackCodec.UNKNOWN self.__audioLayout = AudioLayout.LAYOUT_UNDEFINED self.__index = index self.__subIndex = subIndex - self.__trackDescriptor : TrackDescriptor = None - self.__pattern : Pattern = self.__pc.getPattern(patternId) if patternId is not None else {} + self.__draftTrackTags = {} else: self.__trackType = trackDescriptor.getType() self.__trackCodec = trackDescriptor.getCodec() self.__audioLayout = trackDescriptor.getAudioLayout() self.__index = trackDescriptor.getIndex() self.__subIndex = trackDescriptor.getSubIndex() - self.__trackDescriptor : TrackDescriptor = trackDescriptor - self.__pattern : Pattern = self.__pc.getPattern(self.__trackDescriptor.getPatternId()) - + self.__draftTrackTags = { + key: value + for key, value in trackDescriptor.getTags().items() + if key not in ("language", "title") + } + def _descriptor_refs_same_track(self, descriptor: TrackDescriptor) -> bool: + if self.__trackDescriptor is None: + return False + if descriptor.getId() != -1 and self.__trackDescriptor.getId() != -1: + return descriptor.getId() == self.__trackDescriptor.getId() + return ( + descriptor.getPatternId() == self.__trackDescriptor.getPatternId() + and descriptor.getIndex() == self.__trackDescriptor.getIndex() + and descriptor.getSubIndex() == self.__trackDescriptor.getSubIndex() + ) def updateTags(self): self.trackTagsTable.clear() - trackId = self.__trackDescriptor.getId() - - if trackId != -1: - - trackTags = self.__tac.findAllTrackTags(trackId) - - for k,v in trackTags.items(): - - if k != 'language' and k != 'title': - - textColor = None - if k in self.__ignoreTrackKeys: - textColor = 'blue' - if k in self.__removeTrackKeys: - textColor = 'red' - - row = (formatRichColor(k, textColor), formatRichColor(v, textColor)) - self.trackTagsTable.add_row(*map(str, row)) + for key, value in self.__draftTrackTags.items(): + textColor = None + if key in self.__ignoreTrackKeys: + textColor = "blue" + if key in self.__removeTrackKeys: + textColor = "red" + row = (formatRichColor(key, textColor), formatRichColor(value, textColor)) + self.trackTagsTable.add_row(*map(str, row)) def on_mount(self): - self.query_one("#index_label", Static).update(str(self.__index) if self.__index is not None else '-') - self.query_one("#subindex_label", Static).update(str(self.__subIndex)if self.__subIndex is not None else '-') - - if self.__pattern is not None: - self.query_one("#pattern_label", Static).update(self.__pattern.getPattern()) + self.query_one("#index_label", Static).update( + str(self.__index) if self.__index is not None else "-" + ) + self.query_one("#subindex_label", Static).update( + str(self.__subIndex) if self.__subIndex is not None else "-" + ) + self.query_one("#pattern_label", Static).update(self.__patternLabel) if self.__trackType is not None: self.query_one("#type_select", Select).value = self.__trackType.label() - if self.__trackType == TrackType.AUDIO: - self.query_one("#audio_layout_select", Select).value = self.__audioLayout.label() - for d in TrackDisposition: + self.query_one("#audio_layout_select", Select).value = self.__audioLayout.label() - dispositionIsSet = (self.__trackDescriptor is not None - and d in self.__trackDescriptor.getDispositionSet()) + for disposition in TrackDisposition: - dispositionOption = (d.label(), d.index(), dispositionIsSet) - self.query_one("#dispositions_selection_list", SelectionList).add_option(dispositionOption) + dispositionIsSet = ( + self.__trackDescriptor is not None + and disposition in self.__trackDescriptor.getDispositionSet() + ) + + dispositionOption = ( + disposition.label(), + disposition.index(), + dispositionIsSet, + ) + self.query_one("#dispositions_selection_list", SelectionList).add_option( + dispositionOption + ) if self.__trackDescriptor is not None: - - self.query_one("#language_select", Select).value = self.__trackDescriptor.getLanguage().label() + self.query_one("#language_select", Select).value = ( + self.__trackDescriptor.getLanguage().label() + ) self.query_one("#title_input", Input).value = self.__trackDescriptor.getTitle() self.updateTags() - def compose(self): self.trackTagsTable = DataTable(classes="five") - # Define the columns with headers self.column_key_track_tag_key = self.trackTagsTable.add_column("Key", width=50) self.column_key_track_tag_value = self.trackTagsTable.add_column("Value", width=100) - self.trackTagsTable.cursor_type = 'row' + self.trackTagsTable.cursor_type = "row" - - languages = [l.label() for l in IsoLanguage] + languages = [language.label() for language in IsoLanguage] yield Header() with Grid(): - # 1 - yield Static(f"New stream" if self.__isNew else f"Edit stream", id="toplabel", classes="five") + yield Static( + "New stream" if self.__isNew else "Edit stream", + id="toplabel", + classes="five", + ) - # 2 yield Static("for pattern") yield Static("", id="pattern_label", classes="four", markup=False) - # 3 yield Static(" ", classes="five") - # 4 yield Static("Index / Subindex") yield Static("", id="index_label", classes="two") yield Static("", id="subindex_label", classes="two") - # 5 yield Static(" ", classes="five") - # 6 yield Static("Type") - yield Select.from_values([t.label() for t in TrackType], classes="four", id="type_select") + yield Select.from_values( + [trackType.label() for trackType in TrackType], + classes="four", + id="type_select", + ) - # 7 - if self.__trackType == TrackType.AUDIO: - yield Static("Audio Layout") - yield Select.from_values([t.label() for t in AudioLayout], classes="four", id="audio_layout_select") - else: - yield Static(" ", classes="five") + yield Static("Audio Layout") + yield Select.from_values( + [layout.label() for layout in AudioLayout], + classes="four", + id="audio_layout_select", + ) - # 8 yield Static(" ", classes="five") - # 9 yield Static(" ", classes="five") - # 10 yield Static("Language") yield Select.from_values(languages, classes="four", id="language_select") - # 11 + yield Static(" ", classes="five") - # 12 yield Static("Title") yield Input(id="title_input", classes="four") - # 13 yield Static(" ", classes="five") - # 14 yield Static(" ", classes="five") - # 15 yield Static("Stream tags") yield Static(" ") yield Button("Add", id="button_add_stream_tag") yield Button("Edit", id="button_edit_stream_tag") yield Button("Delete", id="button_delete_stream_tag") - # 16 + yield self.trackTagsTable - # 17 yield Static(" ", classes="five") - # 18 yield Static("Stream dispositions", classes="five") - # 19 yield SelectionList[int]( classes="five", - id = "dispositions_selection_list" + id="dispositions_selection_list", ) - # 20 yield Static(" ", classes="five") - # 21 yield Static(" ", classes="five") - # 22 yield Button("Save", id="save_button") yield Button("Cancel", id="cancel_button") - # 23 yield Static(" ", classes="five") - # 24 yield Static(" ", classes="five", id="messagestatic") - yield Footer(id="footer") - def getTrackDescriptorFromInput(self): kwargs = {} - kwargs[TrackDescriptor.CONTEXT_KEY] = self.context - kwargs[TrackDescriptor.PATTERN_ID_KEY] = int(self.__pattern.getId()) + if self.__trackDescriptor is not None and self.__trackDescriptor.getId() != -1: + kwargs[TrackDescriptor.ID_KEY] = self.__trackDescriptor.getId() - kwargs[TrackDescriptor.INDEX_KEY] = self.__index - kwargs[TrackDescriptor.SUB_INDEX_KEY] = self.__subIndex #! + if self.__patternId != -1: + kwargs[TrackDescriptor.PATTERN_ID_KEY] = int(self.__patternId) - kwargs[TrackDescriptor.TRACK_TYPE_KEY] = TrackType.fromLabel(self.query_one("#type_select", Select).value) + kwargs[TrackDescriptor.INDEX_KEY] = int(self.__index) + kwargs[TrackDescriptor.SOURCE_INDEX_KEY] = ( + int(self.__trackDescriptor.getSourceIndex()) + if self.__trackDescriptor is not None + else int(self.__index) + ) + if self.__subIndex is not None and int(self.__subIndex) >= 0: + kwargs[TrackDescriptor.SUB_INDEX_KEY] = int(self.__subIndex) + selectedTrackType = TrackType.fromLabel( + self.query_one("#type_select", Select).value + ) + kwargs[TrackDescriptor.TRACK_TYPE_KEY] = selectedTrackType kwargs[TrackDescriptor.CODEC_KEY] = self.__trackCodec - - if self.__trackType == TrackType.AUDIO: - kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = AudioLayout.fromLabel(self.query_one("#audio_layout_select", Select).value) + + if selectedTrackType == TrackType.AUDIO: + kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = AudioLayout.fromLabel( + self.query_one("#audio_layout_select", Select).value + ) else: kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = AudioLayout.LAYOUT_UNDEFINED - trackTags = {} + trackTags = dict(self.__draftTrackTags) + language = self.query_one("#language_select", Select).value if language: - trackTags['language'] = IsoLanguage.find(language).threeLetter() + trackTags["language"] = IsoLanguage.find(language).threeLetter() + title = self.query_one("#title_input", Input).value if title: - trackTags['title'] = title + trackTags["title"] = title - tableTags = {row[0]:row[1] for r in self.trackTagsTable.rows if (row := self.trackTagsTable.get_row(r)) and row[0] != 'language' and row[0] != 'title'} + kwargs[TrackDescriptor.TAGS_KEY] = trackTags - kwargs[TrackDescriptor.TAGS_KEY] = trackTags | tableTags - - dispositionFlags = sum([2**f for f in self.query_one("#dispositions_selection_list", SelectionList).selected]) - kwargs[TrackDescriptor.DISPOSITION_SET_KEY] = TrackDisposition.toSet(dispositionFlags) + dispositionFlags = sum( + [2 ** flag for flag in self.query_one("#dispositions_selection_list", SelectionList).selected] + ) + kwargs[TrackDescriptor.DISPOSITION_SET_KEY] = TrackDisposition.toSet( + dispositionFlags + ) return TrackDescriptor(**kwargs) - - def getSelectedTag(self): try: - - # Fetch the currently selected row when 'Enter' is pressed - #selected_row_index = self.table.cursor_row - row_key, col_key = self.trackTagsTable.coordinate_to_cell_key(self.trackTagsTable.cursor_coordinate) + row_key, _ = self.trackTagsTable.coordinate_to_cell_key( + self.trackTagsTable.cursor_coordinate + ) if row_key is not None: selected_tag_data = self.trackTagsTable.get_row(row_key) @@ -357,101 +377,92 @@ class TrackDetailsScreen(Screen): return tagKey, tagValue - else: - return None + return None except CellDoesNotExist: return None - - - # Event handler for button press def on_button_pressed(self, event: Button.Pressed) -> None: - # Check if the button pressed is the one we are interested in if event.button.id == "save_button": - - # Check for multiple default/forced disposition flags - - if self.__trackType == TrackType.VIDEO: - trackList = self.__tc.findVideoTracks(self.__pattern.getId()) - if self.__trackType == TrackType.AUDIO: - trackList = self.__tc.findAudioTracks(self.__pattern.getId()) - elif self.__trackType == TrackType.SUBTITLE: - trackList = self.__tc.findSubtitleTracks(self.__pattern.getId()) - else: - trackList = [] - - siblingTrackList = [t for t in trackList if t.getType() == self.__trackType and t.getIndex() != self.__index] - - numDefaultTracks = len([t for t in siblingTrackList if TrackDisposition.DEFAULT in t.getDispositionSet()]) - numForcedTracks = len([t for t in siblingTrackList if TrackDisposition.FORCED in t.getDispositionSet()]) - - self.__subIndex = len(trackList) trackDescriptor = self.getTrackDescriptorFromInput() - if ((TrackDisposition.DEFAULT in trackDescriptor.getDispositionSet() and numDefaultTracks) - or (TrackDisposition.FORCED in trackDescriptor.getDispositionSet() and numForcedTracks)): + siblingTrackList = [ + descriptor + for descriptor in self.__siblingTrackDescriptors + if not self._descriptor_refs_same_track(descriptor) + ] + siblingTrackList = [ + descriptor + for descriptor in siblingTrackList + if descriptor.getType() == trackDescriptor.getType() + ] - self.query_one("#messagestatic", Static).update("Cannot add another stream with disposition flag 'debug' or 'forced' set") + numDefaultTracks = len( + [ + descriptor + for descriptor in siblingTrackList + if TrackDisposition.DEFAULT in descriptor.getDispositionSet() + ] + ) + numForcedTracks = len( + [ + descriptor + for descriptor in siblingTrackList + if TrackDisposition.FORCED in descriptor.getDispositionSet() + ] + ) + if self.__isNew: + trackDescriptor.setSubIndex(len(siblingTrackList)) + elif self.__subIndex is not None and int(self.__subIndex) >= 0: + trackDescriptor.setSubIndex(int(self.__subIndex)) + + if ( + TrackDisposition.DEFAULT in trackDescriptor.getDispositionSet() + and numDefaultTracks + ) or ( + TrackDisposition.FORCED in trackDescriptor.getDispositionSet() + and numForcedTracks + ): + + self.query_one("#messagestatic", Static).update( + "Cannot add another stream with disposition flag 'default' or 'forced' set" + ) else: - self.query_one("#messagestatic", Static).update(" ") - - if self.__isNew: - - # Track per Screen hinzufügen - self.__tc.addTrack(trackDescriptor) - self.dismiss(trackDescriptor) - - else: - - track = self.__tc.getTrack(self.__pattern.getId(), self.__index) - - # Track per details screen updaten - if self.__tc.updateTrack(track.getId(), trackDescriptor): - self.dismiss(trackDescriptor) - - else: - self.app.pop_screen() + self.dismiss(trackDescriptor) if event.button.id == "cancel_button": self.app.pop_screen() - if event.button.id == "button_add_stream_tag": - if not self.__isNew: - self.app.push_screen(TagDetailsScreen(), self.handle_update_tag) + self.app.push_screen(TagDetailsScreen(), self.handle_update_tag) if event.button.id == "button_edit_stream_tag": - tagKey, tagValue = self.getSelectedTag() - self.app.push_screen(TagDetailsScreen(key=tagKey, value=tagValue), self.handle_update_tag) + selectedTag = self.getSelectedTag() + if selectedTag is not None: + self.app.push_screen( + TagDetailsScreen(key=selectedTag[0], value=selectedTag[1]), + self.handle_update_tag, + ) if event.button.id == "button_delete_stream_tag": - tagKey, tagValue = self.getSelectedTag() - self.app.push_screen(TagDeleteScreen(key=tagKey, value=tagValue), self.handle_delete_tag) - + selectedTag = self.getSelectedTag() + if selectedTag is not None: + self.app.push_screen( + TagDeleteScreen(key=selectedTag[0], value=selectedTag[1]), + self.handle_delete_tag, + ) def handle_update_tag(self, tag): - - trackId = self.__trackDescriptor.getId() - - if trackId == -1: - raise click.ClickException(f"TrackDetailsScreen.handle_update_tag: trackId not set (-1) trackDescriptor={self.__trackDescriptor}") - - if self.__tac.updateTrackTag(trackId, tag[0], tag[1]) is not None: - self.updateTags() + if tag is None: + return + self.__draftTrackTags[str(tag[0])] = str(tag[1]) + self.updateTags() def handle_delete_tag(self, trackTag): - - trackId = self.__trackDescriptor.getId() - - if trackId == -1: - raise click.ClickException(f"TrackDetailsScreen.handle_delete_tag: trackId not set (-1) trackDescriptor={self.__trackDescriptor}") - - tag = self.__tac.findTrackTag(trackId, trackTag[0]) - - if tag is not None: - if self.__tac.deleteTrackTag(tag.id): - self.updateTags() + if trackTag is None: + return + self.__draftTrackTags.pop(str(trackTag[0]), None) + self.updateTags() diff --git a/src/ffx/video_encoder.py b/src/ffx/video_encoder.py index 573c4fc..ec9fe59 100644 --- a/src/ffx/video_encoder.py +++ b/src/ffx/video_encoder.py @@ -5,6 +5,7 @@ class VideoEncoder(Enum): AV1 = {'label': 'av1', 'index': 1} VP9 = {'label': 'vp9', 'index': 2} H264 = {'label': 'h264', 'index': 3} + COPY = {'label': 'copy', 'index': 4} UNDEFINED = {'label': 'undefined', 'index': 0} diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..3b48c5e --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +# Repo-root tests package for legacy and future test code. diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/integration/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/integration/pattern_management/__init__.py b/tests/integration/pattern_management/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/integration/pattern_management/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/integration/pattern_management/test_cli_pattern_matching.py b/tests/integration/pattern_management/test_cli_pattern_matching.py new file mode 100644 index 0000000..4e3bf97 --- /dev/null +++ b/tests/integration/pattern_management/test_cli_pattern_matching.py @@ -0,0 +1,138 @@ +from __future__ import annotations + +from pathlib import Path +import tempfile +import unittest + +from tests.support.ffx_bundle import ( + PatternTrackSpec, + SourceTrackSpec, + add_show, + build_controller_context, + create_source_fixture, + dispose_controller_context, + expected_output_path, + run_ffx_convert, +) + +from ffx.pattern_controller import PatternController +from ffx.track_type import TrackType + +try: + import pytest +except ImportError: # pragma: no cover - unittest-only environments + pytest = None + +if pytest is not None: + pytestmark = [pytest.mark.integration, pytest.mark.pattern_management] + + +class PatternManagementCliTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.workdir = Path(self.tempdir.name) + self.home_dir = self.workdir / "home" + self.home_dir.mkdir() + self.database_path = self.workdir / "test.db" + + def tearDown(self): + self.tempdir.cleanup() + + def prepare_duplicate_matching_patterns(self): + context = build_controller_context(self.database_path) + try: + add_show(context, show_id=1) + add_show(context, show_id=2) + + controller = PatternController(context) + track_descriptors = [ + PatternTrackSpec(index=0, source_index=0, track_type=TrackType.VIDEO) + ] + + def to_track_descriptor(spec: PatternTrackSpec): + from ffx.track_descriptor import TrackDescriptor + + kwargs = { + TrackDescriptor.INDEX_KEY: spec.index, + TrackDescriptor.SOURCE_INDEX_KEY: spec.source_index, + TrackDescriptor.TRACK_TYPE_KEY: spec.track_type, + TrackDescriptor.TAGS_KEY: dict(spec.tags), + TrackDescriptor.DISPOSITION_SET_KEY: set(spec.dispositions), + } + return TrackDescriptor(**kwargs) + + controller.savePatternSchema( + {"show_id": 1, "pattern": r"^dup_(s[0-9]+e[0-9]+)\.mkv$"}, + [to_track_descriptor(track_descriptors[0])], + ) + controller.savePatternSchema( + {"show_id": 2, "pattern": r"^dup_.*$"}, + [to_track_descriptor(track_descriptors[0])], + ) + finally: + dispose_controller_context(context) + + def test_convert_fails_when_filename_matches_more_than_one_pattern(self): + self.prepare_duplicate_matching_patterns() + source_filename = "dup_s01e01.mkv" + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-1", language="eng"), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-tmdb", + "--no-prompt", + "--no-signature", + str(source_path), + ) + + self.assertNotEqual(completed.returncode, 0) + error_output = f"{completed.stdout}\n{completed.stderr}" + self.assertIn("matched more than one pattern", error_output) + self.assertFalse(expected_output_path(self.workdir, source_filename).exists()) + + def test_convert_can_ignore_duplicate_matches_when_no_pattern_is_requested(self): + self.prepare_duplicate_matching_patterns() + source_filename = "dup_s01e01.mkv" + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-1", language="eng"), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-pattern", + "--no-tmdb", + "--no-prompt", + "--no-signature", + str(source_path), + ) + + self.assertEqual( + 0, + completed.returncode, + f"STDOUT:\n{completed.stdout}\nSTDERR:\n{completed.stderr}", + ) + self.assertTrue(expected_output_path(self.workdir, source_filename).exists()) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/integration/subtrack_mapping/__init__.py b/tests/integration/subtrack_mapping/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/integration/subtrack_mapping/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/integration/subtrack_mapping/test_cli_bundle.py b/tests/integration/subtrack_mapping/test_cli_bundle.py new file mode 100644 index 0000000..11e0a30 --- /dev/null +++ b/tests/integration/subtrack_mapping/test_cli_bundle.py @@ -0,0 +1,436 @@ +from __future__ import annotations + +import json +from pathlib import Path +import tempfile +import unittest + +from tests.support.ffx_bundle import ( + PatternTrackSpec, + SourceTrackSpec, + create_source_fixture, + expected_output_path, + extract_first_subtitle_text, + ffprobe_json, + get_tag, + prepare_pattern_database, + run_ffx_convert, + write_vtt, +) + +from ffx.track_type import TrackType + +try: + import pytest +except ImportError: # pragma: no cover - unittest-only environments + pytest = None + +if pytest is not None: + pytestmark = [pytest.mark.integration, pytest.mark.subtrack_mapping] + + +class SubtrackMappingBundleTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.workdir = Path(self.tempdir.name) + self.home_dir = self.workdir / "home" + self.home_dir.mkdir() + self.database_path = self.workdir / "test.db" + + def tearDown(self): + self.tempdir.cleanup() + + def write_config(self, data: dict) -> None: + config_dir = self.home_dir / ".local" / "etc" + config_dir.mkdir(parents=True, exist_ok=True) + (config_dir / "ffx.json").write_text(json.dumps(data), encoding="utf-8") + + def assertCompleted(self, completed): + if completed.returncode != 0: + self.fail( + "FFX convert failed\n" + f"STDOUT:\n{completed.stdout}\n" + f"STDERR:\n{completed.stderr}" + ) + + def test_pattern_reorders_and_omits_tracks_preserving_metadata_and_group_order(self): + source_filename = "reorder_s01e01.mkv" + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0", title="Video Zero"), + SourceTrackSpec( + TrackType.SUBTITLE, + identity="subtitle-1", + language="eng", + title="First Subtitle", + subtitle_lines=("first embedded subtitle",), + ), + SourceTrackSpec( + TrackType.AUDIO, + identity="audio-2", + language="deu", + title="German Audio", + ), + SourceTrackSpec( + TrackType.SUBTITLE, + identity="subtitle-3", + language="fra", + title="Second Subtitle", + subtitle_lines=("second embedded subtitle",), + ), + SourceTrackSpec(TrackType.ATTACHMENT, attachment_name="ordered.ttf"), + ], + ) + + prepare_pattern_database( + self.database_path, + r"^reorder_(s[0-9]+e[0-9]+)\.mkv$", + [ + PatternTrackSpec( + index=0, + source_index=0, + track_type=TrackType.VIDEO, + tags={"THIS_IS": "video-0", "title": "Video Zero"}, + ), + PatternTrackSpec( + index=1, + source_index=2, + track_type=TrackType.AUDIO, + tags={"THIS_IS": "audio-2", "language": "deu", "title": "German Audio"}, + ), + PatternTrackSpec( + index=2, + source_index=1, + track_type=TrackType.SUBTITLE, + tags={"THIS_IS": "subtitle-1", "language": "eng", "title": "First Subtitle"}, + ), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-tmdb", + "--no-prompt", + "--no-signature", + str(source_path), + ) + self.assertCompleted(completed) + + output_path = expected_output_path(self.workdir, source_filename) + self.assertTrue(output_path.is_file(), output_path) + + streams = ffprobe_json(output_path)["streams"] + self.assertEqual( + [stream["codec_type"] for stream in streams], + ["video", "audio", "subtitle", "attachment"], + ) + self.assertEqual( + [get_tag(streams[index], "THIS_IS") for index in range(3)], + ["video-0", "audio-2", "subtitle-1"], + ) + self.assertNotIn( + "subtitle-3", + [get_tag(stream, "THIS_IS") for stream in streams if stream["codec_type"] != "attachment"], + ) + self.assertEqual(streams[-1]["codec_name"], "ttf") + extracted_subtitle = extract_first_subtitle_text(self.workdir, output_path) + self.assertIn("first embedded subtitle", extracted_subtitle) + self.assertNotIn("second embedded subtitle", extracted_subtitle) + + def test_cli_rearrange_streams_reorders_tracks_without_database_pattern(self): + source_filename = "cli_s01e01.mkv" + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-1", language="eng", title="First Audio"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-2", language="deu", title="Second Audio"), + SourceTrackSpec(TrackType.SUBTITLE, identity="subtitle-3", language="eng", title="Subtitle"), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-pattern", + "--no-tmdb", + "--no-prompt", + "--no-signature", + "--rearrange-streams", + "0,2,1,3", + str(source_path), + ) + self.assertCompleted(completed) + + output_path = expected_output_path(self.workdir, source_filename) + streams = ffprobe_json(output_path)["streams"] + + self.assertEqual( + [stream["codec_type"] for stream in streams], + ["video", "audio", "audio", "subtitle"], + ) + self.assertEqual( + [get_tag(stream, "THIS_IS") for stream in streams], + ["video-0", "audio-2", "audio-1", "subtitle-3"], + ) + + def test_no_pattern_stream_remove_list_clears_copied_stream_metadata(self): + source_filename = "remove_tags_s01e01.mkv" + self.write_config( + { + "metadata": { + "streams": { + "remove": ["BPS"], + } + } + } + ) + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec( + TrackType.VIDEO, + identity="video-0", + extra_tags={"BPS": "remove-me", "KEEP_ME": "video-keep"}, + ), + SourceTrackSpec( + TrackType.AUDIO, + identity="audio-1", + language="eng", + title="Main Audio", + extra_tags={"BPS": "remove-me", "KEEP_ME": "audio-keep"}, + ), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-pattern", + "--no-tmdb", + "--no-prompt", + "--no-signature", + str(source_path), + ) + self.assertCompleted(completed) + + output_path = expected_output_path(self.workdir, source_filename) + streams = ffprobe_json(output_path)["streams"] + + self.assertEqual( + [stream["codec_type"] for stream in streams], + ["video", "audio"], + ) + self.assertEqual(get_tag(streams[0], "THIS_IS"), "video-0") + self.assertEqual(get_tag(streams[0], "KEEP_ME"), "video-keep") + self.assertIsNone(get_tag(streams[0], "BPS")) + self.assertEqual(get_tag(streams[1], "THIS_IS"), "audio-1") + self.assertEqual(get_tag(streams[1], "KEEP_ME"), "audio-keep") + self.assertIsNone(get_tag(streams[1], "BPS")) + + def test_pattern_validation_fails_for_nonexistent_source_track_reference(self): + source_filename = "invalid_s01e01.mkv" + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-1"), + SourceTrackSpec(TrackType.SUBTITLE, identity="subtitle-2"), + ], + ) + + prepare_pattern_database( + self.database_path, + r"^invalid_(s[0-9]+e[0-9]+)\.mkv$", + [ + PatternTrackSpec(index=0, source_index=0, track_type=TrackType.VIDEO), + PatternTrackSpec(index=1, source_index=99, track_type=TrackType.SUBTITLE), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-tmdb", + "--no-prompt", + "--no-signature", + str(source_path), + ) + + self.assertNotEqual(completed.returncode, 0) + error_output = f"{completed.stdout}\n{completed.stderr}" + self.assertIn("non-existent source track #99", error_output) + self.assertFalse(expected_output_path(self.workdir, source_filename).exists()) + + def test_external_subtitle_file_replaces_payload_and_overrides_metadata(self): + source_filename = "substitute_s01e01.mkv" + self.write_config( + { + "metadata": { + "streams": { + "remove": ["BPS"], + } + } + } + ) + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-1", language="eng", title="Main Audio"), + SourceTrackSpec( + TrackType.SUBTITLE, + identity="embedded-subtitle", + language="eng", + title="Embedded Title", + extra_tags={"BPS": "remove-me", "EXTERNAL_KEEP": "keep-me"}, + subtitle_lines=("embedded subtitle payload",), + ), + ], + ) + + write_vtt( + self.workdir / "substitute_s01e01_2_deu.vtt", + ("external subtitle payload",), + ) + + prepare_pattern_database( + self.database_path, + r"^substitute_(s[0-9]+e[0-9]+)\.mkv$", + [ + PatternTrackSpec(index=0, source_index=0, track_type=TrackType.VIDEO), + PatternTrackSpec(index=1, source_index=1, track_type=TrackType.AUDIO), + PatternTrackSpec(index=2, source_index=2, track_type=TrackType.SUBTITLE), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-tmdb", + "--no-prompt", + "--no-signature", + "--subtitle-directory", + str(self.workdir), + "--subtitle-prefix", + "substitute", + str(source_path), + ) + self.assertCompleted(completed) + + output_path = expected_output_path(self.workdir, source_filename) + streams = ffprobe_json(output_path)["streams"] + subtitle_stream = [stream for stream in streams if stream["codec_type"] == "subtitle"][0] + + self.assertEqual(get_tag(subtitle_stream, "language"), "deu") + self.assertEqual(get_tag(subtitle_stream, "title"), "Embedded Title") + self.assertEqual(get_tag(subtitle_stream, "THIS_IS"), "embedded-subtitle") + self.assertEqual(get_tag(subtitle_stream, "EXTERNAL_KEEP"), "keep-me") + self.assertIsNone(get_tag(subtitle_stream, "BPS")) + + extracted_subtitle = extract_first_subtitle_text(self.workdir, output_path) + self.assertIn("external subtitle payload", extracted_subtitle) + self.assertNotIn("embedded subtitle payload", extracted_subtitle) + + def test_subtitle_prefix_uses_configured_base_directory_when_directory_is_omitted(self): + source_filename = "substitute_default_s01e01.mkv" + subtitle_prefix = "substitute_default" + subtitles_base_dir = self.home_dir / ".local" / "var" / "sync" / "subtitles" + resolved_subtitle_dir = subtitles_base_dir / subtitle_prefix + resolved_subtitle_dir.mkdir(parents=True, exist_ok=True) + self.write_config( + { + "subtitlesDirectory": "~/.local/var/sync/subtitles", + "metadata": { + "streams": { + "remove": ["BPS"], + } + } + } + ) + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec(TrackType.AUDIO, identity="audio-1", language="eng", title="Main Audio"), + SourceTrackSpec( + TrackType.SUBTITLE, + identity="embedded-subtitle", + language="eng", + title="Embedded Title", + extra_tags={"BPS": "remove-me", "EXTERNAL_KEEP": "keep-me"}, + subtitle_lines=("embedded subtitle payload",), + ), + ], + ) + + write_vtt( + resolved_subtitle_dir / f"{subtitle_prefix}_s01e01_2_deu.vtt", + ("external subtitle payload",), + ) + + prepare_pattern_database( + self.database_path, + r"^substitute_default_(s[0-9]+e[0-9]+)\.mkv$", + [ + PatternTrackSpec(index=0, source_index=0, track_type=TrackType.VIDEO), + PatternTrackSpec(index=1, source_index=1, track_type=TrackType.AUDIO), + PatternTrackSpec(index=2, source_index=2, track_type=TrackType.SUBTITLE), + ], + ) + + completed = run_ffx_convert( + self.workdir, + self.home_dir, + self.database_path, + "--video-encoder", + "copy", + "--no-tmdb", + "--no-prompt", + "--no-signature", + "--subtitle-prefix", + subtitle_prefix, + str(source_path), + ) + self.assertCompleted(completed) + + output_path = expected_output_path(self.workdir, source_filename) + streams = ffprobe_json(output_path)["streams"] + subtitle_stream = [stream for stream in streams if stream["codec_type"] == "subtitle"][0] + + self.assertEqual(get_tag(subtitle_stream, "language"), "deu") + self.assertEqual(get_tag(subtitle_stream, "title"), "Embedded Title") + self.assertEqual(get_tag(subtitle_stream, "THIS_IS"), "embedded-subtitle") + self.assertEqual(get_tag(subtitle_stream, "EXTERNAL_KEEP"), "keep-me") + self.assertIsNone(get_tag(subtitle_stream, "BPS")) + + extracted_subtitle = extract_first_subtitle_text(self.workdir, output_path) + self.assertIn("external subtitle payload", extracted_subtitle) + self.assertNotIn("embedded subtitle payload", extracted_subtitle) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/integration/test_cli_unmux.py b/tests/integration/test_cli_unmux.py new file mode 100644 index 0000000..84b4b52 --- /dev/null +++ b/tests/integration/test_cli_unmux.py @@ -0,0 +1,228 @@ +from __future__ import annotations + +import json +import os +from pathlib import Path +import subprocess +import sys +import tempfile +import unittest + +from tests.support.ffx_bundle import ( + SourceTrackSpec, + build_controller_context, + create_source_fixture, + dispose_controller_context, +) + +from ffx.pattern_controller import PatternController +from ffx.show_controller import ShowController +from ffx.show_descriptor import ShowDescriptor +from ffx.track_codec import TrackCodec +from ffx.track_descriptor import TrackDescriptor +from ffx.track_type import TrackType + +try: + import pytest +except ImportError: # pragma: no cover - unittest-only environments + pytest = None + +if pytest is not None: + pytestmark = [pytest.mark.integration] + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + + +def run_ffx_unmux(workdir: Path, home_dir: Path, database_path: Path, *args: str) -> subprocess.CompletedProcess[str]: + env = os.environ.copy() + env["HOME"] = str(home_dir) + existing_pythonpath = env.get("PYTHONPATH", "") + env["PYTHONPATH"] = str(SRC_ROOT) if not existing_pythonpath else f"{SRC_ROOT}{os.pathsep}{existing_pythonpath}" + + command = [ + sys.executable, + "-m", + "ffx", + "--database-file", + str(database_path), + "unmux", + *args, + ] + return subprocess.run(command, cwd=workdir, env=env, capture_output=True, text=True) + + +class UnmuxCliTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.workdir = Path(self.tempdir.name) + self.home_dir = self.workdir / "home" + self.home_dir.mkdir() + self.database_path = self.workdir / "test.db" + + def tearDown(self): + self.tempdir.cleanup() + + def write_config(self, data: dict) -> None: + config_dir = self.home_dir / ".local" / "etc" + config_dir.mkdir(parents=True, exist_ok=True) + (config_dir / "ffx.json").write_text(json.dumps(data), encoding="utf-8") + + def assertCompleted(self, completed): + if completed.returncode != 0: + self.fail( + "FFX unmux failed\n" + f"STDOUT:\n{completed.stdout}\n" + f"STDERR:\n{completed.stderr}" + ) + + def seed_matching_show(self, pattern_expression: str, *, indicator_season_digits: int, indicator_episode_digits: int) -> None: + context = build_controller_context(self.database_path) + try: + ShowController(context).updateShow( + ShowDescriptor( + id=1, + name="Unmux Test Show", + year=2000, + indicator_season_digits=indicator_season_digits, + indicator_episode_digits=indicator_episode_digits, + ) + ) + PatternController(context).savePatternSchema( + { + "show_id": 1, + "pattern": pattern_expression, + "quality": 0, + "notes": "", + }, + trackDescriptors=[ + TrackDescriptor( + index=0, + source_index=0, + track_type=TrackType.VIDEO, + codec_name=TrackCodec.H264, + tags={}, + disposition_set=set(), + ) + ], + ) + finally: + dispose_controller_context(context) + + def test_subtitles_only_without_output_directory_uses_configured_base_plus_label(self): + self.write_config( + { + "subtitlesDirectory": "~/.local/var/sync/subtitles", + } + ) + source_filename = "unmux_s01e01.mkv" + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + SourceTrackSpec( + TrackType.SUBTITLE, + identity="subtitle-1", + language="eng", + subtitle_lines=("subtitle payload",), + ), + ], + ) + + completed = run_ffx_unmux( + self.workdir, + self.home_dir, + self.database_path, + "--subtitles-only", + "--label", + "dball", + str(source_path), + ) + self.assertCompleted(completed) + + expected_directory = self.home_dir / ".local" / "var" / "sync" / "subtitles" / "dball" + self.assertTrue(expected_directory.is_dir(), expected_directory) + + def test_unmux_uses_configured_indicator_digits_in_output_filenames(self): + self.write_config( + { + "defaultIndicatorSeasonDigits": 3, + "defaultIndicatorEpisodeDigits": 4, + } + ) + source_filename = "unmux_s01e01.mkv" + output_directory = self.workdir / "unmux-output" + output_directory.mkdir() + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + ], + ) + + completed = run_ffx_unmux( + self.workdir, + self.home_dir, + self.database_path, + "--label", + "dball", + "--output-directory", + str(output_directory), + str(source_path), + ) + self.assertCompleted(completed) + + output_filenames = sorted(path.name for path in output_directory.iterdir()) + self.assertEqual(1, len(output_filenames), output_filenames) + self.assertTrue( + output_filenames[0].startswith("dball_S001E0001_"), + output_filenames, + ) + + def test_unmux_prefers_matched_show_indicator_digits_over_config_defaults(self): + self.write_config( + { + "defaultIndicatorSeasonDigits": 4, + "defaultIndicatorEpisodeDigits": 4, + } + ) + self.seed_matching_show( + r"^unmux_([sS][0-9]+[eE][0-9]+)\.mkv$", + indicator_season_digits=1, + indicator_episode_digits=3, + ) + source_filename = "unmux_s01e01.mkv" + output_directory = self.workdir / "unmux-output" + output_directory.mkdir() + source_path = create_source_fixture( + self.workdir, + source_filename, + [ + SourceTrackSpec(TrackType.VIDEO, identity="video-0"), + ], + ) + + completed = run_ffx_unmux( + self.workdir, + self.home_dir, + self.database_path, + "--label", + "dball", + "--output-directory", + str(output_directory), + str(source_path), + ) + self.assertCompleted(completed) + + output_filenames = sorted(path.name for path in output_directory.iterdir()) + self.assertEqual(1, len(output_filenames), output_filenames) + self.assertTrue( + output_filenames[0].startswith("dball_S1E001_"), + output_filenames, + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/legacy/__init__.py b/tests/legacy/__init__.py new file mode 100644 index 0000000..56a2b3f --- /dev/null +++ b/tests/legacy/__init__.py @@ -0,0 +1 @@ +# Legacy custom FFX test harness modules. diff --git a/src/ffx/test/_basename_combinator_1.py b/tests/legacy/_basename_combinator_1.py similarity index 100% rename from src/ffx/test/_basename_combinator_1.py rename to tests/legacy/_basename_combinator_1.py diff --git a/src/ffx/test/basename_combinator.py b/tests/legacy/basename_combinator.py similarity index 85% rename from src/ffx/test/basename_combinator.py rename to tests/legacy/basename_combinator.py index 65c0e8d..bd1275c 100644 --- a/src/ffx/test/basename_combinator.py +++ b/tests/legacy/basename_combinator.py @@ -24,8 +24,9 @@ class BasenameCombinator(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.basename_combinator_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.basename_combinator_{ identifier }"]): + module_name = f"tests.legacy.basename_combinator_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding MediaCombinator as it seems to be included by import (?) if inspect.isclass(obj) and name != 'BasenameCombinator' and name.startswith('BasenameCombinator'): return obj diff --git a/src/ffx/test/basename_combinator_0.py b/tests/legacy/basename_combinator_0.py similarity index 100% rename from src/ffx/test/basename_combinator_0.py rename to tests/legacy/basename_combinator_0.py diff --git a/src/ffx/test/basename_combinator_2.py b/tests/legacy/basename_combinator_2.py similarity index 100% rename from src/ffx/test/basename_combinator_2.py rename to tests/legacy/basename_combinator_2.py diff --git a/src/ffx/test/combinator.py b/tests/legacy/combinator.py similarity index 100% rename from src/ffx/test/combinator.py rename to tests/legacy/combinator.py diff --git a/src/ffx/test/disposition_combinator_2.py b/tests/legacy/disposition_combinator_2.py similarity index 85% rename from src/ffx/test/disposition_combinator_2.py rename to tests/legacy/disposition_combinator_2.py index b2e7767..d908e21 100644 --- a/src/ffx/test/disposition_combinator_2.py +++ b/tests/legacy/disposition_combinator_2.py @@ -24,8 +24,9 @@ class DispositionCombinator2(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.disposition_combinator_2_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.disposition_combinator_2_{ identifier }"]): + module_name = f"tests.legacy.disposition_combinator_2_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding DispositionCombination as it seems to be included by import (?) if inspect.isclass(obj) and name != 'DispositionCombinator2' and name.startswith('DispositionCombinator2'): return obj diff --git a/src/ffx/test/disposition_combinator_2_0.py b/tests/legacy/disposition_combinator_2_0.py similarity index 100% rename from src/ffx/test/disposition_combinator_2_0.py rename to tests/legacy/disposition_combinator_2_0.py diff --git a/src/ffx/test/disposition_combinator_2_1.py b/tests/legacy/disposition_combinator_2_1.py similarity index 100% rename from src/ffx/test/disposition_combinator_2_1.py rename to tests/legacy/disposition_combinator_2_1.py diff --git a/src/ffx/test/disposition_combinator_2_2.py b/tests/legacy/disposition_combinator_2_2.py similarity index 100% rename from src/ffx/test/disposition_combinator_2_2.py rename to tests/legacy/disposition_combinator_2_2.py diff --git a/src/ffx/test/disposition_combinator_2_3 .py b/tests/legacy/disposition_combinator_2_3 .py similarity index 100% rename from src/ffx/test/disposition_combinator_2_3 .py rename to tests/legacy/disposition_combinator_2_3 .py diff --git a/src/ffx/test/disposition_combinator_3.py b/tests/legacy/disposition_combinator_3.py similarity index 84% rename from src/ffx/test/disposition_combinator_3.py rename to tests/legacy/disposition_combinator_3.py index 154a072..b08affe 100644 --- a/src/ffx/test/disposition_combinator_3.py +++ b/tests/legacy/disposition_combinator_3.py @@ -23,8 +23,9 @@ class DispositionCombinator3(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.disposition_combinator_3_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.disposition_combinator_3_{ identifier }"]): + module_name = f"tests.legacy.disposition_combinator_3_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding DispositionCombination as it seems to be included by import (?) if inspect.isclass(obj) and name != 'DispositionCombinator3' and name.startswith('DispositionCombinator3'): return obj diff --git a/src/ffx/test/disposition_combinator_3_0.py b/tests/legacy/disposition_combinator_3_0.py similarity index 100% rename from src/ffx/test/disposition_combinator_3_0.py rename to tests/legacy/disposition_combinator_3_0.py diff --git a/src/ffx/test/disposition_combinator_3_1.py b/tests/legacy/disposition_combinator_3_1.py similarity index 100% rename from src/ffx/test/disposition_combinator_3_1.py rename to tests/legacy/disposition_combinator_3_1.py diff --git a/src/ffx/test/disposition_combinator_3_2.py b/tests/legacy/disposition_combinator_3_2.py similarity index 100% rename from src/ffx/test/disposition_combinator_3_2.py rename to tests/legacy/disposition_combinator_3_2.py diff --git a/src/ffx/test/disposition_combinator_3_3.py b/tests/legacy/disposition_combinator_3_3.py similarity index 100% rename from src/ffx/test/disposition_combinator_3_3.py rename to tests/legacy/disposition_combinator_3_3.py diff --git a/src/ffx/test/disposition_combinator_3_4.py b/tests/legacy/disposition_combinator_3_4.py similarity index 100% rename from src/ffx/test/disposition_combinator_3_4.py rename to tests/legacy/disposition_combinator_3_4.py diff --git a/src/ffx/test/helper.py b/tests/legacy/helper.py similarity index 97% rename from src/ffx/test/helper.py rename to tests/legacy/helper.py index 619ad3d..798ef09 100644 --- a/src/ffx/test/helper.py +++ b/tests/legacy/helper.py @@ -1,11 +1,9 @@ import os, math, tempfile, click - -from ffx.ffx_controller import FfxController - from ffx.process import executeProcess from ffx.media_descriptor import MediaDescriptor +from ffx.media_descriptor_change_set import MediaDescriptorChangeSet from ffx.track_type import TrackType from ffx.helper import dictCache @@ -149,7 +147,6 @@ def createMediaTestFile(mediaDescriptor: MediaDescriptor, # subtitleFilePath = createVttFile(SHORT_SUBTITLE_SEQUENCE) - # commandTokens = FfxController.COMMAND_TOKENS commandTokens = ['ffmpeg', '-y'] generatorCache = [] @@ -232,15 +229,14 @@ def createMediaTestFile(mediaDescriptor: MediaDescriptor, f"{mediaTagKey}={mediaTagValue}"] subIndexCounter[trackType] += 1 - #TODO: Optimize too many runs ffxContext = {'config': ConfigurationController(), 'logger': logger} - fc = FfxController(ffxContext, mediaDescriptor) + mdcs = MediaDescriptorChangeSet(ffxContext, mediaDescriptor) commandTokens += (generatorTokens + importTokens + mappingTokens + metadataTokens - + fc.generateDispositionTokens()) + + mdcs.generateDispositionTokens()) commandTokens += ['-t', str(length)] diff --git a/src/ffx/test/indicator_combinator.py b/tests/legacy/indicator_combinator.py similarity index 100% rename from src/ffx/test/indicator_combinator.py rename to tests/legacy/indicator_combinator.py diff --git a/src/ffx/test/label_combinator.py b/tests/legacy/label_combinator.py similarity index 85% rename from src/ffx/test/label_combinator.py rename to tests/legacy/label_combinator.py index 7965f92..2731129 100644 --- a/src/ffx/test/label_combinator.py +++ b/tests/legacy/label_combinator.py @@ -25,8 +25,9 @@ class LabelCombinator(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.{LabelCombinator.PREFIX}{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.{LabelCombinator.PREFIX}{ identifier }"]): + module_name = f"tests.legacy.{LabelCombinator.PREFIX}{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding MediaCombinator as it seems to be included by import (?) if inspect.isclass(obj) and name != 'LabelCombinator' and name.startswith('LabelCombinator'): return obj diff --git a/src/ffx/test/label_combinator_0.py b/tests/legacy/label_combinator_0.py similarity index 100% rename from src/ffx/test/label_combinator_0.py rename to tests/legacy/label_combinator_0.py diff --git a/src/ffx/test/label_combinator_1.py b/tests/legacy/label_combinator_1.py similarity index 100% rename from src/ffx/test/label_combinator_1.py rename to tests/legacy/label_combinator_1.py diff --git a/src/ffx/test/media_combinator.py b/tests/legacy/media_combinator.py similarity index 84% rename from src/ffx/test/media_combinator.py rename to tests/legacy/media_combinator.py index 4053e35..bb00e70 100644 --- a/src/ffx/test/media_combinator.py +++ b/tests/legacy/media_combinator.py @@ -22,8 +22,9 @@ class MediaCombinator(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.media_combinator_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.media_combinator_{ identifier }"]): + module_name = f"tests.legacy.media_combinator_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding MediaCombinator as it seems to be included by import (?) if inspect.isclass(obj) and name != 'MediaCombinator' and name.startswith('MediaCombinator'): return obj diff --git a/src/ffx/test/media_combinator_0.py b/tests/legacy/media_combinator_0.py similarity index 100% rename from src/ffx/test/media_combinator_0.py rename to tests/legacy/media_combinator_0.py diff --git a/src/ffx/test/media_combinator_1.py b/tests/legacy/media_combinator_1.py similarity index 100% rename from src/ffx/test/media_combinator_1.py rename to tests/legacy/media_combinator_1.py diff --git a/src/ffx/test/media_combinator_2.py b/tests/legacy/media_combinator_2.py similarity index 100% rename from src/ffx/test/media_combinator_2.py rename to tests/legacy/media_combinator_2.py diff --git a/src/ffx/test/media_combinator_3.py b/tests/legacy/media_combinator_3.py similarity index 100% rename from src/ffx/test/media_combinator_3.py rename to tests/legacy/media_combinator_3.py diff --git a/src/ffx/test/media_combinator_4.py b/tests/legacy/media_combinator_4.py similarity index 100% rename from src/ffx/test/media_combinator_4.py rename to tests/legacy/media_combinator_4.py diff --git a/src/ffx/test/media_combinator_5.py b/tests/legacy/media_combinator_5.py similarity index 100% rename from src/ffx/test/media_combinator_5.py rename to tests/legacy/media_combinator_5.py diff --git a/src/ffx/test/media_combinator_6.py b/tests/legacy/media_combinator_6.py similarity index 100% rename from src/ffx/test/media_combinator_6.py rename to tests/legacy/media_combinator_6.py diff --git a/src/ffx/test/media_combinator_7.py b/tests/legacy/media_combinator_7.py similarity index 100% rename from src/ffx/test/media_combinator_7.py rename to tests/legacy/media_combinator_7.py diff --git a/src/ffx/test/media_tag_combinator.py b/tests/legacy/media_tag_combinator.py similarity index 84% rename from src/ffx/test/media_tag_combinator.py rename to tests/legacy/media_tag_combinator.py index b434694..8735eb2 100644 --- a/src/ffx/test/media_tag_combinator.py +++ b/tests/legacy/media_tag_combinator.py @@ -22,8 +22,9 @@ class MediaTagCombinator(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.media_tag_combinator_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.media_tag_combinator_{ identifier }"]): + module_name = f"tests.legacy.media_tag_combinator_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding MediaCombinator as it seems to be included by import (?) if inspect.isclass(obj) and name != 'MediaTagCombinator' and name.startswith('MediaTagCombinator'): return obj diff --git a/src/ffx/test/media_tag_combinator_0.py b/tests/legacy/media_tag_combinator_0.py similarity index 100% rename from src/ffx/test/media_tag_combinator_0.py rename to tests/legacy/media_tag_combinator_0.py diff --git a/src/ffx/test/media_tag_combinator_1.py b/tests/legacy/media_tag_combinator_1.py similarity index 100% rename from src/ffx/test/media_tag_combinator_1.py rename to tests/legacy/media_tag_combinator_1.py diff --git a/src/ffx/test/media_tag_combinator_2.py b/tests/legacy/media_tag_combinator_2.py similarity index 100% rename from src/ffx/test/media_tag_combinator_2.py rename to tests/legacy/media_tag_combinator_2.py diff --git a/src/ffx/test/permutation_combinator_2.py b/tests/legacy/permutation_combinator_2.py similarity index 100% rename from src/ffx/test/permutation_combinator_2.py rename to tests/legacy/permutation_combinator_2.py diff --git a/src/ffx/test/permutation_combinator_3.py b/tests/legacy/permutation_combinator_3.py similarity index 100% rename from src/ffx/test/permutation_combinator_3.py rename to tests/legacy/permutation_combinator_3.py diff --git a/src/ffx/test/release_combinator.py b/tests/legacy/release_combinator.py similarity index 100% rename from src/ffx/test/release_combinator.py rename to tests/legacy/release_combinator.py diff --git a/src/ffx/test/scenario.py b/tests/legacy/scenario.py similarity index 88% rename from src/ffx/test/scenario.py rename to tests/legacy/scenario.py index 4a924c6..9bb9c88 100644 --- a/src/ffx/test/scenario.py +++ b/tests/legacy/scenario.py @@ -4,7 +4,7 @@ from ffx.show_controller import ShowController from ffx.pattern_controller import PatternController from ffx.media_controller import MediaController -from ffx.test.helper import createEmptyDirectory +from .helper import createEmptyDirectory from ffx.database import databaseContext class Scenario(): @@ -90,11 +90,7 @@ class Scenario(): def __init__(self, context = None): self._context = context self._testDirectory = createEmptyDirectory() - self._ffxExecutablePath = os.path.join( - os.path.dirname( - os.path.dirname( - os.path.dirname(__file__))), - 'ffx.py') + self._ffxModuleName = 'ffx' self._logger = context['logger'] self._reportLogger = context['report_logger'] @@ -146,8 +142,9 @@ class Scenario(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.scenario_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.scenario_{ identifier }"]): + module_name = f"tests.legacy.scenario_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding Scenario as it seems to be included by import (?) if inspect.isclass(obj) and name != 'Scenario' and name.startswith('Scenario'): return obj diff --git a/src/ffx/test/scenario_1.py b/tests/legacy/scenario_1.py similarity index 95% rename from src/ffx/test/scenario_1.py rename to tests/legacy/scenario_1.py index a313e2f..57e42f5 100644 --- a/src/ffx/test/scenario_1.py +++ b/tests/legacy/scenario_1.py @@ -2,7 +2,7 @@ import os, sys, click, glob from .scenario import Scenario -from ffx.test.helper import createMediaTestFile +from .helper import createMediaTestFile from ffx.process import executeProcess from ffx.file_properties import FileProperties @@ -13,9 +13,9 @@ from ffx.track_descriptor import TrackDescriptor from ffx.track_type import TrackType from ffx.track_disposition import TrackDisposition -from ffx.test.media_combinator_0 import MediaCombinator0 +from .media_combinator_0 import MediaCombinator0 -from ffx.test.basename_combinator import BasenameCombinator +from .basename_combinator import BasenameCombinator class Scenario1(Scenario): @@ -92,8 +92,7 @@ class Scenario1(Scenario): # Phase 2: Run ffx - commandSequence = [sys.executable, - self._ffxExecutablePath] + commandSequence = [sys.executable, '-m', self._ffxModuleName] if self._context['verbosity']: commandSequence += ['--verbose', diff --git a/src/ffx/test/scenario_2.py b/tests/legacy/scenario_2.py similarity index 96% rename from src/ffx/test/scenario_2.py rename to tests/legacy/scenario_2.py index 4fa7ea8..567c967 100644 --- a/src/ffx/test/scenario_2.py +++ b/tests/legacy/scenario_2.py @@ -2,7 +2,7 @@ import os, sys, click from .scenario import Scenario -from ffx.test.helper import createMediaTestFile +from .helper import createMediaTestFile from ffx.process import executeProcess from ffx.file_properties import FileProperties @@ -13,7 +13,7 @@ from ffx.track_descriptor import TrackDescriptor from ffx.track_type import TrackType from ffx.track_disposition import TrackDisposition -from ffx.test.media_combinator import MediaCombinator +from .media_combinator import MediaCombinator class Scenario2(Scenario): @@ -77,8 +77,7 @@ class Scenario2(Scenario): # Phase 2: Run ffx - commandSequence = [sys.executable, - self._ffxExecutablePath] + commandSequence = [sys.executable, '-m', self._ffxModuleName] if self._context['verbosity']: commandSequence += ['--verbose', diff --git a/src/ffx/test/scenario_4.py b/tests/legacy/scenario_4.py similarity index 96% rename from src/ffx/test/scenario_4.py rename to tests/legacy/scenario_4.py index 677da08..74eebd0 100644 --- a/src/ffx/test/scenario_4.py +++ b/tests/legacy/scenario_4.py @@ -2,11 +2,11 @@ import os, sys, click from .scenario import Scenario -from ffx.test.helper import createMediaTestFile +from .helper import createMediaTestFile from ffx.process import executeProcess from ffx.database import databaseContext -from ffx.test.helper import createEmptyDirectory +from .helper import createEmptyDirectory from ffx.helper import getEpisodeFileBasename from ffx.file_properties import FileProperties @@ -17,8 +17,8 @@ from ffx.track_descriptor import TrackDescriptor from ffx.track_type import TrackType from ffx.track_disposition import TrackDisposition -from ffx.test.media_combinator import MediaCombinator -from ffx.test.indicator_combinator import IndicatorCombinator +from .media_combinator import MediaCombinator +from .indicator_combinator import IndicatorCombinator from ffx.show_descriptor import ShowDescriptor @@ -163,8 +163,7 @@ class Scenario4(Scenario): # Phase 3: Run ffx - commandSequence = [sys.executable, - self._ffxExecutablePath] + commandSequence = [sys.executable, '-m', self._ffxModuleName] if self._context['verbosity']: commandSequence += ['--verbose', diff --git a/src/ffx/test/show_combinator.py b/tests/legacy/show_combinator.py similarity index 100% rename from src/ffx/test/show_combinator.py rename to tests/legacy/show_combinator.py diff --git a/src/ffx/test/title_combinator.py b/tests/legacy/title_combinator.py similarity index 100% rename from src/ffx/test/title_combinator.py rename to tests/legacy/title_combinator.py diff --git a/src/ffx/test/track_tag_combinator_2.py b/tests/legacy/track_tag_combinator_2.py similarity index 84% rename from src/ffx/test/track_tag_combinator_2.py rename to tests/legacy/track_tag_combinator_2.py index 9d62845..85f715d 100644 --- a/src/ffx/test/track_tag_combinator_2.py +++ b/tests/legacy/track_tag_combinator_2.py @@ -22,8 +22,9 @@ class TrackTagCombinator2(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.track_tag_combinator_2_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.track_tag_combinator_2_{ identifier }"]): + module_name = f"tests.legacy.track_tag_combinator_2_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding DispositionCombination as it seems to be included by import (?) if inspect.isclass(obj) and name != 'TrackTagCombinator2' and name.startswith('TrackTagCombinator2'): return obj diff --git a/src/ffx/test/track_tag_combinator_2_0.py b/tests/legacy/track_tag_combinator_2_0.py similarity index 100% rename from src/ffx/test/track_tag_combinator_2_0.py rename to tests/legacy/track_tag_combinator_2_0.py diff --git a/src/ffx/test/track_tag_combinator_2_1.py b/tests/legacy/track_tag_combinator_2_1.py similarity index 100% rename from src/ffx/test/track_tag_combinator_2_1.py rename to tests/legacy/track_tag_combinator_2_1.py diff --git a/src/ffx/test/track_tag_combinator_2_2.py b/tests/legacy/track_tag_combinator_2_2.py similarity index 100% rename from src/ffx/test/track_tag_combinator_2_2.py rename to tests/legacy/track_tag_combinator_2_2.py diff --git a/src/ffx/test/track_tag_combinator_2_3.py b/tests/legacy/track_tag_combinator_2_3.py similarity index 100% rename from src/ffx/test/track_tag_combinator_2_3.py rename to tests/legacy/track_tag_combinator_2_3.py diff --git a/src/ffx/test/track_tag_combinator_3.py b/tests/legacy/track_tag_combinator_3.py similarity index 84% rename from src/ffx/test/track_tag_combinator_3.py rename to tests/legacy/track_tag_combinator_3.py index 41345f5..adc3b98 100644 --- a/src/ffx/test/track_tag_combinator_3.py +++ b/tests/legacy/track_tag_combinator_3.py @@ -22,8 +22,9 @@ class TrackTagCombinator3(): @staticmethod def getClassReference(identifier): - importlib.import_module(f"ffx.test.track_tag_combinator_3_{ identifier }") - for name, obj in inspect.getmembers(sys.modules[f"ffx.test.track_tag_combinator_3_{ identifier }"]): + module_name = f"tests.legacy.track_tag_combinator_3_{ identifier }" + importlib.import_module(module_name) + for name, obj in inspect.getmembers(sys.modules[module_name]): #HINT: Excluding DispositionCombination as it seems to be included by import (?) if inspect.isclass(obj) and name != 'TrackTagCombinator3' and name.startswith('TrackTagCombinator3'): return obj diff --git a/src/ffx/test/track_tag_combinator_3_0.py b/tests/legacy/track_tag_combinator_3_0.py similarity index 100% rename from src/ffx/test/track_tag_combinator_3_0.py rename to tests/legacy/track_tag_combinator_3_0.py diff --git a/src/ffx/test/track_tag_combinator_3_1.py b/tests/legacy/track_tag_combinator_3_1.py similarity index 100% rename from src/ffx/test/track_tag_combinator_3_1.py rename to tests/legacy/track_tag_combinator_3_1.py diff --git a/src/ffx/test/track_tag_combinator_3_2.py b/tests/legacy/track_tag_combinator_3_2.py similarity index 100% rename from src/ffx/test/track_tag_combinator_3_2.py rename to tests/legacy/track_tag_combinator_3_2.py diff --git a/src/ffx/test/track_tag_combinator_3_3.py b/tests/legacy/track_tag_combinator_3_3.py similarity index 100% rename from src/ffx/test/track_tag_combinator_3_3.py rename to tests/legacy/track_tag_combinator_3_3.py diff --git a/src/ffx/test/track_tag_combinator_3_4.py b/tests/legacy/track_tag_combinator_3_4.py similarity index 100% rename from src/ffx/test/track_tag_combinator_3_4.py rename to tests/legacy/track_tag_combinator_3_4.py diff --git a/src/ffx/ffx_tests.py b/tests/legacy_runner.py similarity index 81% rename from src/ffx/ffx_tests.py rename to tests/legacy_runner.py index 119700b..557045c 100755 --- a/src/ffx/ffx_tests.py +++ b/tests/legacy_runner.py @@ -1,15 +1,33 @@ #! /usr/bin/python3 -import os, logging, click +import os, sys, logging, click + +# Allow direct execution from the source tree by exposing both the repository +# root for `tests.*` imports and `src/` for `ffx.*` imports. +script_dir = os.path.dirname(os.path.abspath(__file__)) +repo_root = os.path.dirname(script_dir) +src_root = os.path.join(repo_root, 'src') + +sys.path = [p for p in sys.path if os.path.abspath(p) != script_dir] +for path in [repo_root, src_root]: + if path not in sys.path: + sys.path.insert(0, path) + +existing_pythonpath = [p for p in os.environ.get('PYTHONPATH', '').split(os.pathsep) if p] +pythonpath_entries = [] +for path in [src_root, repo_root] + existing_pythonpath: + if path not in pythonpath_entries: + pythonpath_entries.append(path) +os.environ['PYTHONPATH'] = os.pathsep.join(pythonpath_entries) from ffx.configuration_controller import ConfigurationController from ffx.file_properties import FileProperties from ffx.ffx_controller import FfxController -from ffx.test.helper import createMediaTestFile +from tests.legacy.helper import createMediaTestFile -from ffx.test.scenario import Scenario +from tests.legacy.scenario import Scenario from ffx.tmdb_controller import TMDB_API_KEY_NOT_PRESENT_EXCEPTION diff --git a/tests/support/__init__.py b/tests/support/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/support/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/support/ffx_bundle.py b/tests/support/ffx_bundle.py new file mode 100644 index 0000000..1fa5942 --- /dev/null +++ b/tests/support/ffx_bundle.py @@ -0,0 +1,333 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +import json +import logging +import os +from pathlib import Path +import subprocess +import sys +from typing import Mapping + + +REPO_ROOT = Path(__file__).resolve().parents[2] +SRC_ROOT = REPO_ROOT / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.audio_layout import AudioLayout +from ffx.database import databaseContext +from ffx.pattern_controller import PatternController +from ffx.show_controller import ShowController +from ffx.show_descriptor import ShowDescriptor +from ffx.track_descriptor import TrackDescriptor +from ffx.track_disposition import TrackDisposition +from ffx.track_type import TrackType + + +class StaticConfig: + def __init__(self, data: dict | None = None): + self._data = data or {} + + def getData(self): + return self._data + + +@dataclass(frozen=True) +class SourceTrackSpec: + track_type: TrackType + identity: str | None = None + language: str | None = None + title: str | None = None + extra_tags: Mapping[str, str] = field(default_factory=dict) + dispositions: tuple[TrackDisposition, ...] = () + subtitle_lines: tuple[str, ...] = ("subtitle line",) + attachment_name: str = "fixture.ttf" + + +@dataclass(frozen=True) +class PatternTrackSpec: + index: int + source_index: int + track_type: TrackType + tags: Mapping[str, str] = field(default_factory=dict) + dispositions: tuple[TrackDisposition, ...] = () + audio_layout: AudioLayout = AudioLayout.LAYOUT_STEREO + + +def make_logger(name: str) -> logging.Logger: + logger = logging.getLogger(name) + logger.handlers = [] + logger.setLevel(logging.DEBUG) + logger.propagate = False + logger.addHandler(logging.NullHandler()) + return logger + + +def build_controller_context(database_path: Path) -> dict: + return { + "logger": make_logger(f"ffx-test-db-{database_path.stem}"), + "config": StaticConfig(), + "database": databaseContext(str(database_path)), + } + + +def dispose_controller_context(context: dict) -> None: + context["database"]["engine"].dispose() + + +def write_vtt(path: Path, lines: tuple[str, ...]) -> Path: + body = ["WEBVTT", ""] + for index, line in enumerate(lines): + start_ms = index * 600 + end_ms = start_ms + 500 + body.extend( + [ + f"{start_ms // 3600000:02d}:{(start_ms // 60000) % 60:02d}:{(start_ms // 1000) % 60:02d}.{start_ms % 1000:03d} --> " + + f"{end_ms // 3600000:02d}:{(end_ms // 60000) % 60:02d}:{(end_ms // 1000) % 60:02d}.{end_ms % 1000:03d}", + line, + "", + ] + ) + path.write_text("\n".join(body), encoding="utf-8") + return path + + +def create_source_fixture(workdir: Path, filename: str, tracks: list[SourceTrackSpec], duration_seconds: int = 1) -> Path: + output_path = workdir / filename + + has_video = any(track.track_type == TrackType.VIDEO for track in tracks) + has_audio = any(track.track_type == TrackType.AUDIO for track in tracks) + + command = ["ffmpeg", "-y"] + + input_indices: dict[str, int] = {} + next_input_index = 0 + + if has_video: + command += ["-f", "lavfi", "-i", "color=size=96x54:rate=2:color=black"] + input_indices["video"] = next_input_index + next_input_index += 1 + + if has_audio: + command += ["-f", "lavfi", "-i", "anullsrc=channel_layout=stereo:sample_rate=48000"] + input_indices["audio"] = next_input_index + next_input_index += 1 + + subtitle_input_indices: list[int] = [] + subtitle_counter = 0 + for track in tracks: + if track.track_type == TrackType.SUBTITLE: + subtitle_path = write_vtt( + workdir / f"{output_path.stem}_subtitle_{subtitle_counter}.vtt", + track.subtitle_lines, + ) + command += ["-i", str(subtitle_path)] + subtitle_input_indices.append(next_input_index) + next_input_index += 1 + subtitle_counter += 1 + + map_tokens: list[str] = [] + metadata_tokens: list[str] = [] + disposition_tokens: list[str] = [] + attachment_tokens: list[str] = [] + + per_type_subindex: dict[TrackType, int] = {} + subtitle_input_cursor = 0 + attachment_subindex = 0 + + for track in tracks: + if track.track_type == TrackType.VIDEO: + map_tokens += ["-map", f"{input_indices['video']}:v:0"] + stream_group = "v" + elif track.track_type == TrackType.AUDIO: + map_tokens += ["-map", f"{input_indices['audio']}:a:0"] + stream_group = "a" + elif track.track_type == TrackType.SUBTITLE: + map_tokens += ["-map", f"{subtitle_input_indices[subtitle_input_cursor]}:s:0"] + subtitle_input_cursor += 1 + stream_group = "s" + elif track.track_type == TrackType.ATTACHMENT: + attachment_path = workdir / track.attachment_name + attachment_path.write_bytes(b"dummy font bytes") + attachment_tokens += [ + "-attach", + str(attachment_path), + f"-metadata:s:t:{attachment_subindex}", + "mimetype=application/x-truetype-font", + f"-metadata:s:t:{attachment_subindex}", + f"filename={attachment_path.name}", + ] + attachment_subindex += 1 + continue + else: + raise ValueError(f"Unsupported track type {track.track_type}") + + subindex = per_type_subindex.get(track.track_type, 0) + per_type_subindex[track.track_type] = subindex + 1 + + tags = {} + if track.identity is not None: + tags["THIS_IS"] = track.identity + if track.language is not None: + tags["language"] = track.language + if track.title is not None: + tags["title"] = track.title + tags.update(track.extra_tags) + + for key, value in tags.items(): + metadata_tokens += [f"-metadata:s:{stream_group}:{subindex}", f"{key}={value}"] + + if track.dispositions: + disposition_tokens += [ + f"-disposition:{stream_group}:{subindex}", + "+".join(disposition.label() for disposition in track.dispositions), + ] + + command += map_tokens + command += metadata_tokens + command += disposition_tokens + command += [ + "-c:v", + "libx264", + "-preset", + "ultrafast", + "-crf", + "35", + "-pix_fmt", + "yuv420p", + "-c:a", + "aac", + "-b:a", + "48k", + "-c:s", + "webvtt", + "-t", + str(duration_seconds), + "-shortest", + ] + command += attachment_tokens + command += [str(output_path)] + + completed = subprocess.run(command, cwd=workdir, capture_output=True, text=True) + if completed.returncode != 0: + raise AssertionError(f"ffmpeg fixture creation failed\nSTDOUT:\n{completed.stdout}\nSTDERR:\n{completed.stderr}") + + return output_path + + +def add_show(context: dict, show_id: int = 1) -> None: + show_descriptor = ShowDescriptor( + id=show_id, + name="Bundle Test Show", + year=2000, + ) + ShowController(context).updateShow(show_descriptor) + + +def prepare_pattern_database(database_path: Path, filename_pattern: str, track_specs: list[PatternTrackSpec], show_id: int = 1) -> None: + context = build_controller_context(database_path) + try: + add_show(context, show_id=show_id) + track_descriptors = [] + for track in track_specs: + kwargs = { + TrackDescriptor.INDEX_KEY: track.index, + TrackDescriptor.SOURCE_INDEX_KEY: track.source_index, + TrackDescriptor.TRACK_TYPE_KEY: track.track_type, + TrackDescriptor.TAGS_KEY: dict(track.tags), + TrackDescriptor.DISPOSITION_SET_KEY: set(track.dispositions), + } + if track.track_type == TrackType.AUDIO: + kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = track.audio_layout + track_descriptors.append(TrackDescriptor(**kwargs)) + + pattern_id = PatternController(context).savePatternSchema( + { + "show_id": show_id, + "pattern": filename_pattern, + }, + trackDescriptors=track_descriptors, + ) + if not pattern_id: + raise AssertionError("Failed to create pattern in test database") + finally: + dispose_controller_context(context) + + +def run_ffx_convert(workdir: Path, home_dir: Path, database_path: Path, *args: str) -> subprocess.CompletedProcess[str]: + env = os.environ.copy() + env["HOME"] = str(home_dir) + existing_pythonpath = env.get("PYTHONPATH", "") + env["PYTHONPATH"] = str(SRC_ROOT) if not existing_pythonpath else f"{SRC_ROOT}{os.pathsep}{existing_pythonpath}" + + command = [ + sys.executable, + "-m", + "ffx", + "--database-file", + str(database_path), + "convert", + *args, + ] + return subprocess.run(command, cwd=workdir, env=env, capture_output=True, text=True) + + +def ffprobe_json(path: Path) -> dict: + completed = subprocess.run( + [ + "ffprobe", + "-hide_banner", + "-show_streams", + "-show_format", + "-of", + "json", + str(path), + ], + capture_output=True, + text=True, + ) + if completed.returncode != 0: + raise AssertionError(f"ffprobe failed for {path}\nSTDERR:\n{completed.stderr}") + return json.loads(completed.stdout) + + +def stream_tags(stream: dict) -> dict[str, str]: + return {str(key): str(value) for key, value in stream.get("tags", {}).items()} + + +def get_tag(stream: dict, key: str) -> str | None: + tags = stream_tags(stream) + for candidate in (key, key.lower(), key.upper()): + if candidate in tags: + return tags[candidate] + return None + + +def extract_first_subtitle_text(workdir: Path, media_path: Path) -> str: + extracted_path = workdir / f"{media_path.stem}.subtitle.vtt" + completed = subprocess.run( + [ + "ffmpeg", + "-y", + "-i", + str(media_path), + "-map", + "0:s:0", + "-c", + "copy", + str(extracted_path), + ], + cwd=workdir, + capture_output=True, + text=True, + ) + if completed.returncode != 0: + raise AssertionError(f"Subtitle extraction failed\nSTDERR:\n{completed.stderr}") + return extracted_path.read_text(encoding="utf-8") + + +def expected_output_path(workdir: Path, source_filename: str) -> Path: + return workdir / f"out_{source_filename}" diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/test_cli_cut_option.py b/tests/unit/test_cli_cut_option.py new file mode 100644 index 0000000..11509f5 --- /dev/null +++ b/tests/unit/test_cli_cut_option.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +import os +from pathlib import Path +import sys +import tempfile +import unittest + +from click.testing import CliRunner + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import cli # noqa: E402 + + +class CutOptionCliTests(unittest.TestCase): + def invoke_convert(self, *args: str): + runner = CliRunner() + + with tempfile.TemporaryDirectory() as home_dir: + result = runner.invoke( + cli.ffx, + [ + "--database-file", + os.path.join(home_dir, "ffx.db"), + "--dry-run", + "convert", + "--no-tmdb", + *args, + ], + env={**os.environ, "HOME": home_dir}, + ) + + self.assertEqual(0, result.exit_code, result.output) + return result.output + + def test_convert_without_cut_prints_no_cut_message(self): + output = self.invoke_convert() + + self.assertNotIn("Cutting enabled:", output) + + def test_convert_with_cut_flag_prints_default_cut_message(self): + output = self.invoke_convert("--cut") + + self.assertIn("Cutting enabled: start 60 s, duration 180 s.", output) + + def test_convert_with_cut_duration_prints_zero_start_message(self): + output = self.invoke_convert("--cut", "45") + + self.assertIn("Cutting enabled: start 0 s, duration 45 s.", output) + + def test_convert_with_cut_start_and_duration_prints_both_values(self): + output = self.invoke_convert("--cut", "12,34") + + self.assertIn("Cutting enabled: start 12 s, duration 34 s.", output) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_cli_lazy_imports.py b/tests/unit/test_cli_lazy_imports.py new file mode 100644 index 0000000..d55d630 --- /dev/null +++ b/tests/unit/test_cli_lazy_imports.py @@ -0,0 +1,234 @@ +from __future__ import annotations + +import json +from pathlib import Path +import subprocess +import sys +import textwrap +import unittest + + +REPO_ROOT = Path(__file__).resolve().parents[2] +SRC_ROOT = REPO_ROOT / "src" +HEAVY_MODULES = [ + "ffx.configuration_controller", + "ffx.database", + "ffx.ffx_app", + "ffx.ffx_controller", + "ffx.file_properties", + "ffx.tmdb_controller", +] + + +class CliLazyImportTests(unittest.TestCase): + def run_python(self, code: str) -> dict: + completed = subprocess.run( + [sys.executable, "-c", code], + capture_output=True, + cwd=REPO_ROOT, + text=True, + ) + if completed.returncode != 0: + self.fail( + "Python helper failed\n" + f"STDOUT:\n{completed.stdout}\n" + f"STDERR:\n{completed.stderr}" + ) + return json.loads(completed.stdout) + + def test_importing_cli_keeps_runtime_modules_unloaded(self): + result = self.run_python( + textwrap.dedent( + f""" + import json + import sys + + sys.path.insert(0, {str(SRC_ROOT)!r}) + + import ffx.cli + + print(json.dumps({{ + "modules": {{ + module_name: module_name in sys.modules + for module_name in {HEAVY_MODULES!r} + }}, + }})) + """ + ) + ) + + self.assertTrue( + all(not is_loaded for is_loaded in result["modules"].values()), + result["modules"], + ) + + def test_lightweight_configure_workstation_command_stays_light(self): + result = self.run_python( + textwrap.dedent( + f""" + import json + import sys + from click.testing import CliRunner + + sys.path.insert(0, {str(SRC_ROOT)!r}) + + import ffx.cli + + runner = CliRunner() + invoke_result = runner.invoke( + ffx.cli.ffx, + ["--dry-run", "configure_workstation", "--check"], + ) + if invoke_result.exit_code != 0: + raise SystemExit(invoke_result.output) + + print(json.dumps({{ + "output": invoke_result.output, + "modules": {{ + module_name: module_name in sys.modules + for module_name in {HEAVY_MODULES!r} + }}, + }})) + """ + ) + ) + + self.assertIn("configure_workstation.sh --check", result["output"]) + self.assertTrue( + all(not is_loaded for is_loaded in result["modules"].values()), + result["modules"], + ) + + def test_lightweight_setup_command_stays_light(self): + result = self.run_python( + textwrap.dedent( + f""" + import json + import sys + from click.testing import CliRunner + + sys.path.insert(0, {str(SRC_ROOT)!r}) + + import ffx.cli + + runner = CliRunner() + invoke_result = runner.invoke( + ffx.cli.ffx, + ["--dry-run", "setup", "--check", "--with-tests"], + ) + if invoke_result.exit_code != 0: + raise SystemExit(invoke_result.output) + + print(json.dumps({{ + "output": invoke_result.output, + "modules": {{ + module_name: module_name in sys.modules + for module_name in {HEAVY_MODULES!r} + }}, + }})) + """ + ) + ) + + self.assertIn("tools/setup.sh --check --with-tests", result["output"]) + self.assertTrue( + all(not is_loaded for is_loaded in result["modules"].values()), + result["modules"], + ) + + def test_convert_help_describes_absolute_and_percent_cpu_limits(self): + result = self.run_python( + textwrap.dedent( + f""" + import click + import json + import sys + + sys.path.insert(0, {str(SRC_ROOT)!r}) + + import ffx.cli + + help_output = ffx.cli.convert.get_help(click.Context(ffx.cli.convert)) + + print(json.dumps({{ + "output": help_output, + "modules": {{ + module_name: module_name in sys.modules + for module_name in {HEAVY_MODULES!r} + }}, + }})) + """ + ) + ) + + self.assertIn("200", result["output"]) + self.assertIn("25%", result["output"]) + self.assertTrue( + all(not is_loaded for is_loaded in result["modules"].values()), + result["modules"], + ) + + def test_convert_cut_option_supports_flag_duration_and_start_duration_forms(self): + result = self.run_python( + textwrap.dedent( + f""" + import click + import json + import sys + + sys.path.insert(0, {str(SRC_ROOT)!r}) + + import ffx.cli + + flag_context = ffx.cli.convert.make_context( + "convert", + ["--cut"], + resilient_parsing=True, + ) + duration_context = ffx.cli.convert.make_context( + "convert", + ["--cut", "12"], + resilient_parsing=True, + ) + explicit_context = ffx.cli.convert.make_context( + "convert", + ["--cut=12,34"], + resilient_parsing=True, + ) + disabled_context = ffx.cli.convert.make_context( + "convert", + [], + resilient_parsing=True, + ) + help_output = ffx.cli.convert.get_help(click.Context(ffx.cli.convert)) + + print(json.dumps({{ + "flag_cut": flag_context.params["cut"], + "duration_cut": duration_context.params["cut"], + "explicit_cut": explicit_context.params["cut"], + "disabled_cut": disabled_context.params["cut"], + "output": help_output, + "modules": {{ + module_name: module_name in sys.modules + for module_name in {HEAVY_MODULES!r} + }}, + }})) + """ + ) + ) + + self.assertEqual([60, 180], result["flag_cut"]) + self.assertEqual([0, 12], result["duration_cut"]) + self.assertEqual([12, 34], result["explicit_cut"]) + self.assertIsNone(result["disabled_cut"]) + self.assertIn("--cut DURATION|START,DURATION", result["output"]) + self.assertIn("60,180", result["output"]) + self.assertIn("START,DURATION", result["output"]) + self.assertTrue( + all(not is_loaded for is_loaded in result["modules"].values()), + result["modules"], + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_cli_rename.py b/tests/unit/test_cli_rename.py new file mode 100644 index 0000000..813f2f1 --- /dev/null +++ b/tests/unit/test_cli_rename.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +import json +import os +from pathlib import Path +import sys +import tempfile +import unittest + +from click.testing import CliRunner + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import cli # noqa: E402 + + +class RenameCliTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.workspace = Path(self.tempdir.name) + self.home_dir = self.workspace / "home" + self.home_dir.mkdir() + + def tearDown(self): + self.tempdir.cleanup() + + def write_source(self, filename: str, payload: bytes = b"episode") -> Path: + source_path = self.workspace / filename + source_path.write_bytes(payload) + return source_path + + def write_config(self, data: dict) -> None: + config_dir = self.home_dir / ".local" / "etc" + config_dir.mkdir(parents=True, exist_ok=True) + (config_dir / "ffx.json").write_text(json.dumps(data), encoding="utf-8") + + def invoke_rename(self, *args: str): + runner = CliRunner() + result = runner.invoke( + cli.ffx, + ["rename", *args], + env={**os.environ, "HOME": str(self.home_dir)}, + ) + self.assertEqual(0, result.exit_code, result.output) + return result + + def test_rename_moves_matching_file_in_place(self): + source_path = self.write_source("demo_S02E03.mkv", b"season-episode") + + result = self.invoke_rename("--prefix", "dball", str(source_path)) + + target_path = self.workspace / "dball_s02e03.mkv" + self.assertIn("demo_S02E03.mkv -> dball_s02e03.mkv", result.output) + self.assertFalse(source_path.exists()) + self.assertTrue(target_path.exists()) + self.assertEqual(b"season-episode", target_path.read_bytes()) + + def test_rename_uses_default_season_and_suffix_for_episode_only_match(self): + source_path = self.write_source("demo_E07.mp4", b"episode-only") + + result = self.invoke_rename( + "--prefix", + "dball", + "--suffix", + "bonus", + str(source_path), + ) + + target_path = self.workspace / "dball_s01e07_bonus.mp4" + self.assertIn("demo_E07.mp4 -> dball_s01e07_bonus.mp4", result.output) + self.assertFalse(source_path.exists()) + self.assertTrue(target_path.exists()) + self.assertEqual(b"episode-only", target_path.read_bytes()) + + def test_rename_cli_season_overrides_source_season(self): + source_path = self.write_source("demo_s02e07.webm") + + result = self.invoke_rename( + "--prefix", + "dball", + "--season", + "5", + str(source_path), + ) + + target_path = self.workspace / "dball_s05e07.webm" + self.assertIn("demo_s02e07.webm -> dball_s05e07.webm", result.output) + self.assertFalse(source_path.exists()) + self.assertTrue(target_path.exists()) + + def test_rename_dry_run_prints_mapping_without_moving(self): + source_path = self.write_source("demo_E07.mkv") + + result = self.invoke_rename( + "--dry-run", + "--prefix", + "dball", + str(source_path), + ) + + target_path = self.workspace / "dball_s01e07.mkv" + self.assertIn("demo_E07.mkv -> dball_s01e07.mkv", result.output) + self.assertTrue(source_path.exists()) + self.assertFalse(target_path.exists()) + + def test_rename_uses_configured_indicator_digit_lengths(self): + self.write_config( + { + "defaultIndicatorSeasonDigits": 3, + "defaultIndicatorEpisodeDigits": 4, + } + ) + source_path = self.write_source("demo_E07.mkv") + + result = self.invoke_rename("--prefix", "dball", str(source_path)) + + target_path = self.workspace / "dball_s001e0007.mkv" + self.assertIn("demo_E07.mkv -> dball_s001e0007.mkv", result.output) + self.assertFalse(source_path.exists()) + self.assertTrue(target_path.exists()) + + def test_rename_skips_non_matching_filenames(self): + source_path = self.write_source("demo_finale.mkv") + + result = self.invoke_rename("--prefix", "dball", str(source_path)) + + self.assertIn("No matching files found.", result.output) + self.assertTrue(source_path.exists()) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_cli_rename_only.py b/tests/unit/test_cli_rename_only.py new file mode 100644 index 0000000..377c246 --- /dev/null +++ b/tests/unit/test_cli_rename_only.py @@ -0,0 +1,125 @@ +from __future__ import annotations + +import os +from pathlib import Path +import sys +import tempfile +import unittest +from unittest.mock import patch + +from click.testing import CliRunner + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import cli # noqa: E402 + + +class _FakeMediaDescriptor: + def getVideoTracks(self): + return [] + + def getAudioTracks(self): + return [] + + def getSubtitleTracks(self): + return [] + + def getAttachmentTracks(self): + return [] + + +class _FakeFileProperties: + def __init__(self, context, source_path): + self.source_path = source_path + + def getShowId(self): + return -1 + + def getSeason(self): + return -1 + + def getEpisode(self): + return -1 + + def getMediaDescriptor(self): + return _FakeMediaDescriptor() + + def getPattern(self): + return None + + +class _FakeShiftedSeasonController: + def __init__(self, context): + self.context = context + + def shiftSeason(self, show_id, season, episode): + return season, episode + + +class _FakeFfxController: + def __init__(self, *args, **kwargs): + pass + + def runJob(self, *args, **kwargs): + raise AssertionError("runJob should not be called for --rename-only") + + +class RenameOnlyCliTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.home_dir = Path(self.tempdir.name) / "home" + self.home_dir.mkdir() + self.database_path = Path(self.tempdir.name) / "test.db" + self.source_dir = Path(self.tempdir.name) / "source" + self.source_dir.mkdir() + self.output_dir = Path(self.tempdir.name) / "output" + self.output_dir.mkdir() + self.source_path = self.source_dir / "episode.mkv" + self.source_bytes = b"rename-only-source" + self.source_path.write_bytes(self.source_bytes) + + def tearDown(self): + self.tempdir.cleanup() + + def test_rename_only_moves_source_file_into_output_directory(self): + runner = CliRunner() + + with ( + patch("ffx.file_properties.FileProperties", _FakeFileProperties), + patch("ffx.ffx_controller.FfxController", _FakeFfxController), + patch( + "ffx.shifted_season_controller.ShiftedSeasonController", + _FakeShiftedSeasonController, + ), + ): + result = runner.invoke( + cli.ffx, + [ + "--database-file", + str(self.database_path), + "convert", + "--no-tmdb", + "--no-pattern", + "--rename-only", + "--output-directory", + str(self.output_dir), + str(self.source_path), + ], + env={**os.environ, "HOME": str(self.home_dir)}, + ) + + self.assertEqual(0, result.exit_code, result.output) + + target_path = self.output_dir / "out_episode.mkv" + self.assertFalse(self.source_path.exists()) + self.assertTrue(target_path.exists()) + self.assertEqual(self.source_bytes, target_path.read_bytes()) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_cli_subtitle_directory.py b/tests/unit/test_cli_subtitle_directory.py new file mode 100644 index 0000000..d0ef29c --- /dev/null +++ b/tests/unit/test_cli_subtitle_directory.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +import json +import os +from pathlib import Path +import sys +import tempfile +import unittest + +from click.testing import CliRunner + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import cli # noqa: E402 + + +class SubtitleDirectoryCliTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.home_dir = Path(self.tempdir.name) / "home" + self.home_dir.mkdir() + self.database_path = Path(self.tempdir.name) / "test.db" + + def tearDown(self): + self.tempdir.cleanup() + + def write_config(self, data: dict) -> None: + config_dir = self.home_dir / ".local" / "etc" + config_dir.mkdir(parents=True, exist_ok=True) + (config_dir / "ffx.json").write_text(json.dumps(data), encoding="utf-8") + + def invoke_convert(self, *args: str): + runner = CliRunner() + return runner.invoke( + cli.ffx, + [ + "--database-file", + str(self.database_path), + "convert", + "--no-tmdb", + *args, + ], + env={**os.environ, "HOME": str(self.home_dir)}, + ) + + def test_subtitle_prefix_without_directory_or_default_fails(self): + result = self.invoke_convert("--subtitle-prefix", "dball") + + self.assertNotEqual(0, result.exit_code) + self.assertIn("no --subtitle-directory was provided", result.output) + self.assertIn("no subtitlesDirectory default is configured", result.output) + + def test_subtitle_prefix_without_directory_fails_when_configured_subdir_is_missing(self): + subtitles_base_dir = self.home_dir / ".local" / "var" / "sync" / "subtitles" + subtitles_base_dir.mkdir(parents=True, exist_ok=True) + self.write_config({"subtitlesDirectory": "~/.local/var/sync/subtitles"}) + + result = self.invoke_convert("--subtitle-prefix", "dball") + + self.assertNotEqual(0, result.exit_code) + self.assertIn("resolved subtitle directory does not exist", result.output) + self.assertIn(str(subtitles_base_dir / "dball"), result.output) + + def test_explicit_subtitle_directory_wins_over_missing_default(self): + explicit_subtitle_directory = self.home_dir / "manual-subtitles" + explicit_subtitle_directory.mkdir(parents=True, exist_ok=True) + + result = self.invoke_convert( + "--subtitle-directory", + str(explicit_subtitle_directory), + "--subtitle-prefix", + "dball", + ) + + self.assertEqual(0, result.exit_code, result.output) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_cli_unmux_output_directory.py b/tests/unit/test_cli_unmux_output_directory.py new file mode 100644 index 0000000..f417fc6 --- /dev/null +++ b/tests/unit/test_cli_unmux_output_directory.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import tempfile +import unittest + +import click + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import cli # noqa: E402 + + +class StaticConfig: + def __init__(self, subtitles_directory: str = ""): + self._subtitles_directory = subtitles_directory + + def getSubtitlesDirectoryPath(self): + return self._subtitles_directory + + +class UnmuxOutputDirectoryTests(unittest.TestCase): + def test_subtitles_only_with_label_uses_configured_subtitles_base_directory(self): + with tempfile.TemporaryDirectory() as tempdir: + context = { + "config": StaticConfig(str(Path(tempdir) / "subtitles")), + } + + resolved_output_directory, should_create = cli.resolveUnmuxOutputDirectory( + context, + "", + True, + "dball", + ) + + self.assertEqual(str(Path(tempdir) / "subtitles" / "dball"), resolved_output_directory) + self.assertTrue(should_create) + + def test_explicit_output_directory_keeps_existing_behavior(self): + with tempfile.TemporaryDirectory() as tempdir: + context = { + "config": StaticConfig(str(Path(tempdir) / "subtitles")), + } + explicit_output_directory = str(Path(tempdir) / "manual") + + resolved_output_directory, should_create = cli.resolveUnmuxOutputDirectory( + context, + explicit_output_directory, + True, + "dball", + ) + + self.assertEqual(explicit_output_directory, resolved_output_directory) + self.assertFalse(should_create) + + def test_subtitles_only_without_label_keeps_existing_behavior(self): + context = { + "config": StaticConfig("/tmp/subtitles"), + } + + resolved_output_directory, should_create = cli.resolveUnmuxOutputDirectory( + context, + "", + True, + "", + ) + + self.assertEqual("", resolved_output_directory) + self.assertFalse(should_create) + + def test_subtitles_only_with_label_requires_configured_default_when_output_directory_is_missing(self): + context = { + "config": StaticConfig(""), + } + + with self.assertRaises(click.ClickException) as caught: + cli.resolveUnmuxOutputDirectory( + context, + "", + True, + "dball", + ) + + self.assertIn("subtitlesDirectory default", str(caught.exception)) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_cli_upgrade.py b/tests/unit/test_cli_upgrade.py new file mode 100644 index 0000000..d392f27 --- /dev/null +++ b/tests/unit/test_cli_upgrade.py @@ -0,0 +1,100 @@ +from __future__ import annotations + +from pathlib import Path +import subprocess +import sys +import unittest +from unittest.mock import patch + +from click.testing import CliRunner + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import cli # noqa: E402 + + +class UpgradeCommandTests(unittest.TestCase): + def make_completed(self, args, *, stdout: str = "", stderr: str = "", returncode: int = 0): + return subprocess.CompletedProcess(args=args, returncode=returncode, stdout=stdout, stderr=stderr) + + def test_upgrade_aborts_when_tracked_changes_are_present_and_reset_is_declined(self): + runner = CliRunner() + repo_path = "/tmp/ffx-repo" + pip_path = "/tmp/ffx-venv/bin/pip" + + subprocess_calls = [] + + def fake_run(args, **kwargs): + subprocess_calls.append((args, kwargs)) + if args == ['git', 'status', '--porcelain', '--untracked-files=no']: + return self.make_completed(args, stdout=" M src/ffx/cli.py\n") + raise AssertionError(f"Unexpected subprocess invocation: args={args} kwargs={kwargs}") + + with ( + patch.object(cli, "getBundleRepoPath", return_value=repo_path), + patch.object(cli, "getBundlePipPath", return_value=pip_path), + patch.object(cli.os.path, "isdir", return_value=True), + patch.object(cli.os.path, "isfile", return_value=True), + patch.object(cli.subprocess, "run", side_effect=fake_run), + ): + result = runner.invoke(cli.ffx, ["upgrade"], input="n\n") + + self.assertNotEqual(0, result.exit_code) + self.assertIn("Tracked local changes detected in the bundle repository:", result.output) + self.assertIn("Discard these tracked changes with 'git reset --hard HEAD' before upgrade?", result.output) + self.assertIn("Upgrade aborted because tracked local changes are present.", result.output) + self.assertEqual(1, len(subprocess_calls)) + self.assertEqual( + ['git', 'status', '--porcelain', '--untracked-files=no'], + subprocess_calls[0][0], + ) + self.assertEqual(repo_path, subprocess_calls[0][1]["cwd"]) + self.assertTrue(subprocess_calls[0][1]["capture_output"]) + self.assertTrue(subprocess_calls[0][1]["text"]) + + def test_upgrade_resets_before_checkout_and_pull_when_user_confirms(self): + runner = CliRunner() + repo_path = "/tmp/ffx-repo" + pip_path = "/tmp/ffx-venv/bin/pip" + + subprocess_calls = [] + + def fake_run(args, **kwargs): + subprocess_calls.append((args, kwargs)) + if args == ['git', 'status', '--porcelain', '--untracked-files=no']: + return self.make_completed(args, stdout="M src/ffx/constants.py\n") + return self.make_completed(args) + + with ( + patch.object(cli, "getBundleRepoPath", return_value=repo_path), + patch.object(cli, "getBundlePipPath", return_value=pip_path), + patch.object(cli.os.path, "isdir", return_value=True), + patch.object(cli.os.path, "isfile", return_value=True), + patch.object(cli.subprocess, "run", side_effect=fake_run), + ): + result = runner.invoke(cli.ffx, ["upgrade", "--branch", "main"], input="y\n") + + self.assertEqual(0, result.exit_code, result.output) + self.assertIn("Tracked local changes detected in the bundle repository:", result.output) + self.assertEqual( + [ + ['git', 'status', '--porcelain', '--untracked-files=no'], + ['git', 'reset', '--hard', 'HEAD'], + ['git', 'checkout', 'main'], + ['git', 'pull'], + [pip_path, 'install', '--upgrade', 'pip', 'setuptools', 'wheel'], + [pip_path, 'install', '--editable', '.'], + ], + [call[0] for call in subprocess_calls], + ) + for args, kwargs in subprocess_calls[1:]: + self.assertEqual(repo_path, kwargs["cwd"], args) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_configure_workstation_script.py b/tests/unit/test_configure_workstation_script.py new file mode 100644 index 0000000..cf5e76a --- /dev/null +++ b/tests/unit/test_configure_workstation_script.py @@ -0,0 +1,150 @@ +from __future__ import annotations + +import json +import os +from pathlib import Path +import stat +import subprocess +import sys +import tempfile +import textwrap +import unittest + + +REPO_ROOT = Path(__file__).resolve().parents[2] +SCRIPT_PATH = REPO_ROOT / "tools" / "configure_workstation.sh" +BUNDLE_PYTHON = Path.home() / ".local" / "share" / "ffx.venv" / "bin" / "python" + + +class ConfigureWorkstationScriptTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.home_dir = Path(self.tempdir.name) / "home" + self.home_dir.mkdir() + self.stub_bin_dir = Path(self.tempdir.name) / "bin" + self.stub_bin_dir.mkdir() + + for command_name in ("git", "python3", "ffmpeg", "ffprobe", "cpulimit"): + self.write_stub_command(command_name) + + def tearDown(self): + self.tempdir.cleanup() + + def write_stub_command(self, name: str, body: str = "") -> None: + script_path = self.stub_bin_dir / name + script_path.write_text( + "#!/usr/bin/env bash\n" + + body + + "\n", + encoding="utf-8", + ) + script_path.chmod(script_path.stat().st_mode | stat.S_IXUSR) + + def run_script(self, **env_overrides: str) -> subprocess.CompletedProcess[str]: + if not BUNDLE_PYTHON.is_file(): + self.skipTest(f"Missing bundle Python at {BUNDLE_PYTHON}") + + env = { + **os.environ, + "HOME": str(self.home_dir), + "PATH": f"{self.stub_bin_dir}:{os.environ.get('PATH', '')}", + "FFX_PYTHON": str(BUNDLE_PYTHON), + **env_overrides, + } + + return subprocess.run( + ["bash", str(SCRIPT_PATH)], + capture_output=True, + cwd=REPO_ROOT, + env=env, + text=True, + ) + + def test_script_seeds_default_config_from_template(self): + completed = self.run_script() + + self.assertEqual( + 0, + completed.returncode, + f"STDOUT:\n{completed.stdout}\nSTDERR:\n{completed.stderr}", + ) + + config_path = self.home_dir / ".local" / "etc" / "ffx.json" + self.assertTrue(config_path.exists()) + + config_data = json.loads(config_path.read_text(encoding="utf-8")) + self.assertEqual( + { + "databasePath": str(self.home_dir / ".local" / "var" / "ffx" / "ffx.db"), + "logDirectory": str(self.home_dir / ".local" / "var" / "log"), + "subtitlesDirectory": str( + self.home_dir / ".local" / "var" / "sync" / "subtitles" + ), + "defaultIndexSeasonDigits": 2, + "defaultIndexEpisodeDigits": 2, + "defaultIndicatorSeasonDigits": 2, + "defaultIndicatorEpisodeDigits": 2, + "metadata": { + "signature": {"RECODED_WITH": "FFX"}, + "remove": [ + "VERSION-eng", + "creation_time", + "NAME", + ], + "streams": { + "remove": [ + "BPS", + "NUMBER_OF_FRAMES", + "NUMBER_OF_BYTES", + "_STATISTICS_WRITING_APP", + "_STATISTICS_WRITING_DATE_UTC", + "_STATISTICS_TAGS", + "BPS-eng", + "DURATION-eng", + "NUMBER_OF_FRAMES-eng", + "NUMBER_OF_BYTES-eng", + "_STATISTICS_WRITING_APP-eng", + "_STATISTICS_WRITING_DATE_UTC-eng", + "_STATISTICS_TAGS-eng", + ] + }, + }, + }, + config_data, + ) + + def test_script_honors_custom_template_override(self): + custom_template_path = Path(self.tempdir.name) / "custom-config.j2" + custom_template_path.write_text( + textwrap.dedent( + """ + { + "databasePath": {{ database_path_json }}, + "marker": "from-template", + "subtitlesDirectory": {{ subtitles_directory_json }} + } + """ + ).lstrip(), + encoding="utf-8", + ) + + completed = self.run_script(FFX_CONFIG_TEMPLATE=str(custom_template_path)) + + self.assertEqual( + 0, + completed.returncode, + f"STDOUT:\n{completed.stdout}\nSTDERR:\n{completed.stderr}", + ) + + config_path = self.home_dir / ".local" / "etc" / "ffx.json" + config_data = json.loads(config_path.read_text(encoding="utf-8")) + + self.assertEqual("from-template", config_data["marker"]) + self.assertEqual( + str(self.home_dir / ".local" / "var" / "ffx" / "ffx.db"), + config_data["databasePath"], + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_database.py b/tests/unit/test_database.py new file mode 100644 index 0000000..27fa2da --- /dev/null +++ b/tests/unit/test_database.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import tempfile +import unittest +from unittest.mock import patch + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.constants import DATABASE_VERSION # noqa: E402 +from ffx.database import DATABASE_VERSION_KEY, databaseContext, getDatabaseVersion # noqa: E402 +from ffx.model.property import Property # noqa: E402 +from ffx.model.show import Base # noqa: E402 + + +class DatabaseContextTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.database_path = Path(self.tempdir.name) / "ffx-test.db" + + def tearDown(self): + self.tempdir.cleanup() + + def test_database_context_bootstraps_new_database_with_current_version(self): + with patch("ffx.database.Base.metadata.create_all", wraps=Base.metadata.create_all) as mocked_create_all: + context = databaseContext(str(self.database_path)) + try: + self.assertTrue(self.database_path.exists()) + self.assertEqual(DATABASE_VERSION, getDatabaseVersion(context)) + finally: + context["engine"].dispose() + + mocked_create_all.assert_called_once() + + def test_database_context_skips_create_all_when_schema_is_already_present(self): + initial_context = databaseContext(str(self.database_path)) + initial_context["engine"].dispose() + + with patch("ffx.database.Base.metadata.create_all") as mocked_create_all: + context = databaseContext(str(self.database_path)) + try: + self.assertEqual(DATABASE_VERSION, getDatabaseVersion(context)) + finally: + context["engine"].dispose() + + mocked_create_all.assert_not_called() + + def test_database_context_restores_missing_version_property_without_schema_bootstrap(self): + context = databaseContext(str(self.database_path)) + Session = context["session"] + try: + session = Session() + try: + version_row = ( + session.query(Property) + .filter(Property.key == DATABASE_VERSION_KEY) + .first() + ) + session.delete(version_row) + session.commit() + finally: + session.close() + finally: + context["engine"].dispose() + + with patch("ffx.database.Base.metadata.create_all") as mocked_create_all: + reopened_context = databaseContext(str(self.database_path)) + try: + self.assertEqual(DATABASE_VERSION, getDatabaseVersion(reopened_context)) + finally: + reopened_context["engine"].dispose() + + mocked_create_all.assert_not_called() + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_ffx_controller.py b/tests/unit/test_ffx_controller.py new file mode 100644 index 0000000..197d818 --- /dev/null +++ b/tests/unit/test_ffx_controller.py @@ -0,0 +1,139 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import unittest +from unittest.mock import patch + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.ffx_controller import FfxController # noqa: E402 +from ffx.logging_utils import get_ffx_logger # noqa: E402 +from ffx.media_descriptor import MediaDescriptor # noqa: E402 +from ffx.track_codec import TrackCodec # noqa: E402 +from ffx.track_descriptor import TrackDescriptor # noqa: E402 +from ffx.track_type import TrackType # noqa: E402 +from ffx.video_encoder import VideoEncoder # noqa: E402 + + +class StaticConfig: + def __init__(self, data: dict | None = None): + self._data = data or {} + + def getData(self): + return self._data + + +class FfxControllerTests(unittest.TestCase): + def make_context(self, video_encoder: VideoEncoder) -> dict: + return { + "logger": get_ffx_logger(), + "config": StaticConfig(), + "video_encoder": video_encoder, + "dry_run": False, + "perform_cut": False, + "bitrates": { + "stereo": "112k", + "ac3": "256k", + "dts": "320k", + }, + } + + def make_media_descriptors(self) -> tuple[MediaDescriptor, MediaDescriptor]: + descriptor = MediaDescriptor( + track_descriptors=[ + TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.VIDEO, + codec_name=TrackCodec.H264, + ) + ] + ) + source_descriptor = MediaDescriptor( + track_descriptors=[ + TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.VIDEO, + codec_name=TrackCodec.H264, + ) + ] + ) + return descriptor, source_descriptor + + def test_vp9_run_job_emits_file_level_encoding_quality_metadata(self): + context = self.make_context(VideoEncoder.VP9) + target_descriptor, source_descriptor = self.make_media_descriptors() + controller = FfxController(context, target_descriptor, source_descriptor) + commands = [] + + with ( + patch.object( + controller, + "executeCommandSequence", + side_effect=lambda command: commands.append(command) or ("", "", 0), + ), + patch("ffx.ffx_controller.os.path.exists", return_value=False), + ): + controller.runJob( + "input.mkv", + "output.webm", + targetFormat="webm", + chainIteration=[ + { + "identifier": "quality", + "parameters": {"quality": 27}, + } + ], + ) + + self.assertEqual(2, len(commands)) + self.assertIn("-metadata:g", commands[1]) + self.assertIn("ENCODING_QUALITY=27", commands[1]) + self.assertFalse( + any(token.startswith("ENCODING_PRESET=") for token in commands[1]) + ) + + def test_av1_run_job_emits_file_level_quality_and_preset_metadata(self): + context = self.make_context(VideoEncoder.AV1) + target_descriptor, source_descriptor = self.make_media_descriptors() + controller = FfxController(context, target_descriptor, source_descriptor) + commands = [] + + with patch.object( + controller, + "executeCommandSequence", + side_effect=lambda command: commands.append(command) or ("", "", 0), + ): + controller.runJob( + "input.mkv", + "output.webm", + targetFormat="webm", + chainIteration=[ + { + "identifier": "quality", + "parameters": {"quality": 29}, + }, + { + "identifier": "preset", + "parameters": {"preset": 7}, + }, + ], + ) + + self.assertEqual(1, len(commands)) + self.assertIn("-metadata:g", commands[0]) + self.assertIn("ENCODING_QUALITY=29", commands[0]) + self.assertIn("ENCODING_PRESET=7", commands[0]) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_file_properties_probe.py b/tests/unit/test_file_properties_probe.py new file mode 100644 index 0000000..d99012b --- /dev/null +++ b/tests/unit/test_file_properties_probe.py @@ -0,0 +1,175 @@ +from __future__ import annotations + +import json +import logging +from pathlib import Path +import sys +from types import SimpleNamespace +import unittest +from unittest.mock import patch + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +class StaticConfig: + def getData(self): + return {} + + +class DummyPatternController: + def __init__(self, context): + self.context = context + + def matchFilename(self, filename): + return {} + + +def make_logger(name: str) -> logging.Logger: + logger = logging.getLogger(name) + logger.handlers = [] + logger.setLevel(logging.DEBUG) + logger.propagate = False + logger.addHandler(logging.NullHandler()) + return logger + + +class FilePropertiesProbeTests(unittest.TestCase): + def import_module(self): + try: + import ffx.file_properties as file_properties_module + except ModuleNotFoundError as ex: + if ex.name == "sqlalchemy": + self.skipTest("sqlalchemy is not installed in this environment") + raise + return file_properties_module + + def make_context(self): + return { + "logger": make_logger("ffx-test-file-properties-probe"), + "config": StaticConfig(), + "database": {"session": object()}, + "use_pattern": False, + } + + def sample_probe_data(self): + return { + "format": { + "filename": "/tmp/example_s01e01.mkv", + "nb_streams": 2, + "format_name": "matroska,webm", + }, + "streams": [ + { + "index": 0, + "codec_name": "h264", + "codec_type": "video", + "disposition": {"default": 1}, + "tags": {}, + }, + { + "index": 1, + "codec_name": "aac", + "codec_type": "audio", + "channel_layout": "stereo", + "channels": 2, + "disposition": {"default": 0}, + "tags": {"language": "eng"}, + }, + ], + } + + def test_format_and_stream_accessors_share_one_combined_probe(self): + file_properties_module = self.import_module() + probe_output = self.sample_probe_data() + + with ( + patch.object(file_properties_module, "PatternController", DummyPatternController), + patch.object( + file_properties_module, + "executeProcess", + return_value=(json.dumps(probe_output), "", 0), + ) as mocked_execute, + ): + file_properties = file_properties_module.FileProperties( + self.make_context(), + "/tmp/example_s01e01.mkv", + ) + + self.assertEqual(probe_output["format"], file_properties.getFormatData()) + self.assertEqual(probe_output["streams"], file_properties.getStreamData()) + + mocked_execute.assert_called_once_with( + file_properties_module.FileProperties.FFPROBE_COMMAND_TOKENS + + ["/tmp/example_s01e01.mkv"] + ) + + def test_cropdetect_uses_configured_window_and_caches_results(self): + file_properties_module = self.import_module() + file_properties_module.FileProperties._clear_cropdetect_cache() + + cropdetect_stderr = "\n".join( + [ + "[Parsed_cropdetect_0] crop=1440:1080:240:0", + "[Parsed_cropdetect_0] crop=1440:1080:240:0", + "[Parsed_cropdetect_0] crop=1438:1080:242:0", + ] + ) + context = self.make_context() + context["cropdetect"] = {"seek_seconds": 15, "duration_seconds": 45} + + with ( + patch.object( + file_properties_module.os, + "stat", + return_value=SimpleNamespace(st_mtime_ns=1234, st_size=5678), + ), + patch.object(file_properties_module, "PatternController", DummyPatternController), + patch.object( + file_properties_module, + "executeProcess", + return_value=("", cropdetect_stderr, 0), + ) as mocked_execute, + ): + file_properties = file_properties_module.FileProperties( + context, + "/tmp/example_s01e01.mkv", + ) + + first = file_properties.findCropArguments() + second = file_properties.findCropArguments() + + self.assertEqual(first, second) + self.assertEqual( + { + "output_width": "1440", + "output_height": "1080", + "x_offset": "240", + "y_offset": "0", + }, + first, + ) + mocked_execute.assert_called_once_with( + list(file_properties_module.FFMPEG_COMMAND_TOKENS) + + [ + "-ss", + "15", + "-i", + "/tmp/example_s01e01.mkv", + "-t", + "45", + "-vf", + "cropdetect", + ] + + list(file_properties_module.FFMPEG_NULL_OUTPUT_TOKENS), + context=context, + ) + + file_properties_module.FileProperties._clear_cropdetect_cache() + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_helper.py b/tests/unit/test_helper.py new file mode 100644 index 0000000..450877d --- /dev/null +++ b/tests/unit/test_helper.py @@ -0,0 +1,61 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import unittest + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.helper import ( # noqa: E402 + filterFilename, + formatRichColor, + removeRichColor, + substituteTmdbFilename, +) + + +class HelperTests(unittest.TestCase): + def test_filter_filename_replaces_and_removes_problem_characters(self): + self.assertEqual( + "A-B;C#", + filterFilename(" A/B:C*'?♥’ "), + ) + + def test_substitute_tmdb_filename_removes_filler_marker(self): + self.assertEqual( + "Episode Name", + substituteTmdbFilename("Episode Name (*)"), + ) + + def test_substitute_tmdb_filename_rewrites_single_episode_suffix(self): + self.assertEqual( + "Episode Name Teil 2", + substituteTmdbFilename("Episode Name (2)"), + ) + + def test_substitute_tmdb_filename_rewrites_episode_range_suffix(self): + self.assertEqual( + "Episode Name Teil 2-3", + substituteTmdbFilename("Episode Name (2/3)"), + ) + + def test_remove_rich_color_returns_inner_text(self): + self.assertEqual( + "value", + removeRichColor(formatRichColor("value", "green")), + ) + + def test_remove_rich_color_leaves_plain_text_unchanged(self): + self.assertEqual( + "plain text", + removeRichColor("plain text"), + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_iso_language.py b/tests/unit/test_iso_language.py new file mode 100644 index 0000000..a5aee99 --- /dev/null +++ b/tests/unit/test_iso_language.py @@ -0,0 +1,41 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import unittest + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.iso_language import IsoLanguage # noqa: E402 + + +class IsoLanguageTests(unittest.TestCase): + def test_language_constant_set_covers_iso_639_1_plus_filipino_alias(self): + languages = [language for language in IsoLanguage if language is not IsoLanguage.UNDEFINED] + + self.assertEqual(184, len(languages)) + self.assertEqual(183, len({language.twoLetter() for language in languages})) + + def test_primary_three_letter_code_is_returned_first(self): + self.assertEqual("sqi", IsoLanguage.ALBANIAN.threeLetter()) + self.assertEqual("deu", IsoLanguage.GERMAN.threeLetter()) + self.assertEqual("cym", IsoLanguage.WELSH.threeLetter()) + + def test_secondary_three_letter_codes_still_resolve_to_the_same_language(self): + self.assertIs(IsoLanguage.ALBANIAN, IsoLanguage.findThreeLetter("alb")) + self.assertIs(IsoLanguage.GERMAN, IsoLanguage.findThreeLetter("ger")) + self.assertIs(IsoLanguage.WELSH, IsoLanguage.findThreeLetter("wel")) + + def test_newly_added_languages_and_media_aliases_resolve(self): + self.assertIs(IsoLanguage.ASSAMESE, IsoLanguage.find("Assamese")) + self.assertIs(IsoLanguage.YORUBA, IsoLanguage.findThreeLetter("yor")) + self.assertIs(IsoLanguage.FILIPINO, IsoLanguage.findThreeLetter("fil")) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py new file mode 100644 index 0000000..0f44c7f --- /dev/null +++ b/tests/unit/test_logging.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import logging +from pathlib import Path +import sys +import tempfile +import unittest + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.logging_utils import ( # noqa: E402 + CONSOLE_HANDLER_NAME, + FILE_HANDLER_NAME, + configure_ffx_logger, + get_ffx_logger, +) + + +class LoggingUtilsTests(unittest.TestCase): + def cleanup_logger(self, logger_name: str) -> None: + logger = logging.getLogger(logger_name) + for handler in list(logger.handlers): + logger.removeHandler(handler) + handler.close() + + def test_get_ffx_logger_adds_only_one_null_handler(self): + logger_name = "ffx-test-null-handler" + self.cleanup_logger(logger_name) + + logger = get_ffx_logger(logger_name) + logger = get_ffx_logger(logger_name) + + null_handlers = [ + handler for handler in logger.handlers if isinstance(handler, logging.NullHandler) + ] + self.assertEqual(1, len(null_handlers)) + + self.cleanup_logger(logger_name) + + def test_configure_ffx_logger_reuses_named_handlers(self): + logger_name = "ffx-test-configure-handler" + self.cleanup_logger(logger_name) + + with tempfile.TemporaryDirectory() as tempdir: + first_log_path = Path(tempdir) / "first.log" + second_log_path = Path(tempdir) / "second.log" + + logger = configure_ffx_logger( + str(first_log_path), + logging.ERROR, + logging.INFO, + name=logger_name, + ) + logger = configure_ffx_logger( + str(second_log_path), + logging.DEBUG, + logging.WARNING, + name=logger_name, + ) + + console_handlers = [ + handler for handler in logger.handlers if handler.get_name() == CONSOLE_HANDLER_NAME + ] + file_handlers = [ + handler for handler in logger.handlers if handler.get_name() == FILE_HANDLER_NAME + ] + + self.assertEqual(1, len(console_handlers)) + self.assertEqual(1, len(file_handlers)) + self.assertFalse( + any(isinstance(handler, logging.NullHandler) for handler in logger.handlers) + ) + self.assertEqual(logging.WARNING, console_handlers[0].level) + self.assertEqual(logging.DEBUG, file_handlers[0].level) + self.assertEqual(str(second_log_path.resolve()), file_handlers[0].baseFilename) + + self.cleanup_logger(logger_name) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_media_descriptor_change_set.py b/tests/unit/test_media_descriptor_change_set.py new file mode 100644 index 0000000..93c641a --- /dev/null +++ b/tests/unit/test_media_descriptor_change_set.py @@ -0,0 +1,217 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import unittest + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.media_descriptor import MediaDescriptor # noqa: E402 +from ffx.media_descriptor_change_set import MediaDescriptorChangeSet # noqa: E402 +from ffx.track_descriptor import TrackDescriptor # noqa: E402 +from ffx.track_type import TrackType # noqa: E402 +from ffx.logging_utils import get_ffx_logger # noqa: E402 + + +class StaticConfig: + def __init__(self, data: dict): + self._data = data + + def getData(self): + return self._data + + +class MediaDescriptorChangeSetTests(unittest.TestCase): + def test_non_primary_source_language_code_is_normalized_in_changed_track_metadata(self): + context = { + "logger": get_ffx_logger(), + "config": StaticConfig({}), + } + + source_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.AUDIO, + tags={"language": "ger", "title": "German Main"}, + ) + target_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.AUDIO, + tags={"language": "ger", "title": "German Main"}, + ) + + change_set = MediaDescriptorChangeSet( + context, + MediaDescriptor(track_descriptors=[target_track]), + MediaDescriptor(track_descriptors=[source_track]), + ) + + metadata_tokens = change_set.generateMetadataTokens() + + self.assertIn("-metadata:s:a:0", metadata_tokens) + self.assertIn("language=deu", metadata_tokens) + self.assertNotIn("language=ger", metadata_tokens) + + def test_target_only_track_language_metadata_uses_primary_code(self): + context = { + "logger": get_ffx_logger(), + "config": StaticConfig({}), + } + + target_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.AUDIO, + tags={"language": "ger", "title": "German Main"}, + ) + + change_set = MediaDescriptorChangeSet( + context, + MediaDescriptor(track_descriptors=[target_track]), + ) + + metadata_tokens = change_set.generateMetadataTokens() + + self.assertIn("-metadata:s:a:0", metadata_tokens) + self.assertIn("language=deu", metadata_tokens) + self.assertNotIn("language=ger", metadata_tokens) + + def test_external_subtitle_preserves_source_only_tags_except_removed_keys(self): + context = { + "logger": get_ffx_logger(), + "config": StaticConfig( + { + "metadata": { + "streams": { + "remove": ["BPS"], + } + } + } + ), + } + + source_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.SUBTITLE, + tags={ + "language": "eng", + "title": "Embedded Title", + "THIS_IS": "embedded-subtitle", + "EXTERNAL_KEEP": "keep-me", + "BPS": "remove-me", + }, + ) + target_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.SUBTITLE, + tags={"language": "deu"}, + external_source_file="/tmp/external-subtitle.vtt", + ) + + change_set = MediaDescriptorChangeSet( + context, + MediaDescriptor(track_descriptors=[target_track]), + MediaDescriptor(track_descriptors=[source_track]), + ) + + metadata_tokens = change_set.generateMetadataTokens() + + self.assertIn("-metadata:s:s:0", metadata_tokens) + self.assertIn("language=deu", metadata_tokens) + self.assertIn("title=Embedded Title", metadata_tokens) + self.assertIn("THIS_IS=embedded-subtitle", metadata_tokens) + self.assertIn("EXTERNAL_KEEP=keep-me", metadata_tokens) + self.assertNotIn("BPS=remove-me", metadata_tokens) + self.assertIn("BPS=", metadata_tokens) + + def test_external_subtitle_normalizes_preserved_source_language_metadata(self): + context = { + "logger": get_ffx_logger(), + "config": StaticConfig({}), + } + + source_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.SUBTITLE, + tags={"language": "ger", "title": "German Subtitle"}, + ) + target_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.SUBTITLE, + tags={}, + external_source_file="/tmp/external-subtitle.vtt", + ) + + change_set = MediaDescriptorChangeSet( + context, + MediaDescriptor(track_descriptors=[target_track]), + MediaDescriptor(track_descriptors=[source_track]), + ) + + metadata_tokens = change_set.generateMetadataTokens() + + self.assertIn("-metadata:s:s:0", metadata_tokens) + self.assertIn("language=deu", metadata_tokens) + self.assertNotIn("language=ger", metadata_tokens) + + def test_target_only_tracks_still_emit_remove_tokens_for_configured_stream_keys(self): + context = { + "logger": get_ffx_logger(), + "config": StaticConfig( + { + "metadata": { + "remove": ["creation_time"], + "streams": { + "remove": ["BPS"], + } + } + } + ), + } + + target_track = TrackDescriptor( + index=0, + source_index=0, + sub_index=0, + track_type=TrackType.AUDIO, + tags={ + "language": "eng", + "title": "Main Audio", + "BPS": "remove-me", + "KEEP_ME": "keep-me", + }, + ) + + change_set = MediaDescriptorChangeSet( + context, + MediaDescriptor(tags={"creation_time": "remove-me"}, track_descriptors=[target_track]), + ) + + metadata_tokens = change_set.generateMetadataTokens() + + self.assertIn("-metadata:g", metadata_tokens) + self.assertIn("creation_time=", metadata_tokens) + self.assertIn("-metadata:s:a:0", metadata_tokens) + self.assertIn("BPS=", metadata_tokens) + self.assertIn("KEEP_ME=keep-me", metadata_tokens) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_pattern_management.py b/tests/unit/test_pattern_management.py new file mode 100644 index 0000000..eb5ef60 --- /dev/null +++ b/tests/unit/test_pattern_management.py @@ -0,0 +1,240 @@ +from __future__ import annotations + +import logging +from pathlib import Path +import sys +import tempfile +import unittest + +import click + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.audio_layout import AudioLayout # noqa: E402 +from ffx.database import databaseContext # noqa: E402 +from ffx.file_properties import FileProperties # noqa: E402 +from ffx.model.pattern import Pattern # noqa: E402 +from ffx.pattern_controller import ( # noqa: E402 + DuplicatePatternMatchError, + InvalidPatternSchemaError, + PatternController, +) +from ffx.show_controller import ShowController # noqa: E402 +from ffx.show_descriptor import ShowDescriptor # noqa: E402 +from ffx.track_controller import TrackController # noqa: E402 +from ffx.track_descriptor import TrackDescriptor # noqa: E402 +from ffx.track_disposition import TrackDisposition # noqa: E402 +from ffx.track_type import TrackType # noqa: E402 + + +class StaticConfig: + def __init__(self, data: dict | None = None): + self._data = data or {} + + def getData(self): + return self._data + + +def make_logger(name: str) -> logging.Logger: + logger = logging.getLogger(name) + logger.handlers = [] + logger.setLevel(logging.DEBUG) + logger.propagate = False + logger.addHandler(logging.NullHandler()) + return logger + + +def make_context(database_path: Path) -> dict: + return { + "logger": make_logger(f"ffx-test-pattern-{database_path.stem}"), + "config": StaticConfig(), + "database": databaseContext(str(database_path)), + "use_pattern": True, + } + + +def make_track_descriptor( + index: int = 0, + *, + source_index: int | None = None, + track_type: TrackType = TrackType.VIDEO, + title: str = "", + dispositions: set[TrackDisposition] | None = None, +) -> TrackDescriptor: + kwargs = { + TrackDescriptor.INDEX_KEY: index, + TrackDescriptor.SOURCE_INDEX_KEY: index if source_index is None else source_index, + TrackDescriptor.TRACK_TYPE_KEY: track_type, + TrackDescriptor.TAGS_KEY: {"title": title} if title else {}, + TrackDescriptor.DISPOSITION_SET_KEY: dispositions or set(), + } + if track_type == TrackType.AUDIO: + kwargs[TrackDescriptor.AUDIO_LAYOUT_KEY] = AudioLayout.LAYOUT_STEREO + return TrackDescriptor(**kwargs) + + +class PatternManagementTests(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.TemporaryDirectory() + self.database_path = Path(self.tempdir.name) / "pattern-test.db" + self.context = make_context(self.database_path) + self.pattern_controller = PatternController(self.context) + self.track_controller = TrackController(self.context) + self.show_controller = ShowController(self.context) + PatternController._clear_regex_cache() + + def tearDown(self): + self.context["database"]["engine"].dispose() + self.tempdir.cleanup() + PatternController._clear_regex_cache() + + def add_show(self, show_id: int, name: str) -> None: + self.show_controller.updateShow( + ShowDescriptor( + id=show_id, + name=name, + year=2000 + show_id, + ) + ) + + def save_pattern( + self, + show_id: int, + pattern_expression: str, + *, + tracks: list[TrackDescriptor] | None = None, + ) -> int: + self.add_show(show_id, f"Show {show_id}") + return self.pattern_controller.savePatternSchema( + { + "show_id": show_id, + "pattern": pattern_expression, + "quality": 0, + "notes": "", + }, + trackDescriptors=tracks or [make_track_descriptor(0)], + ) + + def insert_trackless_pattern_row(self, show_id: int, pattern_expression: str) -> int: + self.add_show(show_id, f"Show {show_id}") + Session = self.context["database"]["session"] + session = Session() + try: + pattern = Pattern(show_id=show_id, pattern=pattern_expression) + session.add(pattern) + session.commit() + return int(pattern.id) + finally: + session.close() + + def test_match_filename_returns_single_matching_pattern(self): + pattern_id = self.save_pattern(1, r"^single_(s[0-9]+e[0-9]+)\.mkv$") + + match = self.pattern_controller.matchFilename("single_s01e01.mkv") + + self.assertEqual(pattern_id, match["pattern"].getId()) + self.assertEqual("s01e01", match["match"].group(1)) + + def test_match_filename_raises_for_duplicate_matches_in_same_show(self): + self.save_pattern(1, r"^same_(s[0-9]+e[0-9]+)\.mkv$") + self.save_pattern(1, r"^same_.*$") + + with self.assertRaises(DuplicatePatternMatchError) as caught: + self.pattern_controller.matchFilename("same_s01e01.mkv") + + self.assertIn("matched more than one pattern", str(caught.exception)) + self.assertIn("show #1", str(caught.exception)) + + def test_match_filename_raises_for_duplicate_matches_across_shows(self): + self.save_pattern(1, r"^cross_(s[0-9]+e[0-9]+)\.mkv$") + self.save_pattern(2, r"^cross_.*$") + + with self.assertRaises(DuplicatePatternMatchError) as caught: + self.pattern_controller.matchFilename("cross_s01e01.mkv") + + self.assertIn("show #1", str(caught.exception)) + self.assertIn("show #2", str(caught.exception)) + + def test_update_pattern_refreshes_regex_matching_after_change(self): + pattern_id = self.save_pattern(1, r"^before_(s[0-9]+e[0-9]+)\.mkv$") + + self.assertTrue( + self.pattern_controller.updatePattern( + pattern_id, + { + "show_id": 1, + "pattern": r"^after_(s[0-9]+e[0-9]+)\.mkv$", + "quality": 0, + "notes": "", + }, + ) + ) + + self.assertEqual({}, self.pattern_controller.matchFilename("before_s01e01.mkv")) + match = self.pattern_controller.matchFilename("after_s01e01.mkv") + self.assertEqual(pattern_id, match["pattern"].getId()) + + def test_save_pattern_schema_rejects_zero_track_patterns(self): + self.add_show(1, "Empty Pattern Show") + + with self.assertRaises(InvalidPatternSchemaError) as caught: + self.pattern_controller.savePatternSchema( + { + "show_id": 1, + "pattern": r"^empty_(s[0-9]+e[0-9]+)\.mkv$", + }, + trackDescriptors=[], + ) + + self.assertIn("at least one track", str(caught.exception)) + + def test_match_filename_rejects_existing_trackless_pattern_rows(self): + self.insert_trackless_pattern_row(1, r"^invalid_(s[0-9]+e[0-9]+)\.mkv$") + + with self.assertRaises(InvalidPatternSchemaError) as caught: + self.pattern_controller.matchFilename("invalid_s01e01.mkv") + + self.assertIn("has no tracks", str(caught.exception)) + + def test_file_properties_skips_pattern_matching_when_disabled(self): + self.save_pattern(1, r"^nopattern_(s[0-9]+e[0-9]+)\.mkv$") + self.save_pattern(2, r"^nopattern_.*$") + + no_pattern_context = dict(self.context) + no_pattern_context["use_pattern"] = False + + file_properties = FileProperties( + no_pattern_context, + "/tmp/nopattern_s01e01.mkv", + ) + + self.assertIsNone(file_properties.getPattern()) + self.assertEqual(-1, file_properties.getShowId()) + self.assertEqual(1, file_properties.getSeason()) + self.assertEqual(1, file_properties.getEpisode()) + + def test_track_controller_refuses_to_delete_last_track(self): + pattern_id = self.save_pattern(1, r"^delete_(s[0-9]+e[0-9]+)\.mkv$") + track = self.track_controller.getTrack(pattern_id, 0) + + with self.assertRaises(click.ClickException) as caught: + self.track_controller.deleteTrack(track.getId()) + + self.assertIn("last track", str(caught.exception)) + + def test_exact_duplicate_pattern_definition_is_rejected(self): + self.save_pattern(1, r"^unique_(s[0-9]+e[0-9]+)\.mkv$") + + with self.assertRaises(click.ClickException) as caught: + self.save_pattern(1, r"^unique_(s[0-9]+e[0-9]+)\.mkv$") + + self.assertIn("already exists", str(caught.exception)) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_process.py b/tests/unit/test_process.py new file mode 100644 index 0000000..05ef254 --- /dev/null +++ b/tests/unit/test_process.py @@ -0,0 +1,96 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import unittest +from unittest.mock import patch + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.process import ( # noqa: E402 + COMMAND_NOT_FOUND_RETURN_CODE, + COMMAND_TIMED_OUT_RETURN_CODE, + executeProcess, + getWrappedCommandSequence, + normalizeCpuPercent, + normalizeNiceness, +) + + +class ProcessTests(unittest.TestCase): + def test_execute_process_returns_stdout_for_success(self): + out, err, rc = executeProcess( + [sys.executable, "-c", "print('hello from process')"] + ) + + self.assertEqual(0, rc) + self.assertEqual("", err) + self.assertEqual("hello from process\n", out) + + def test_execute_process_maps_missing_command_to_stable_error(self): + out, err, rc = executeProcess(["ffx-command-that-does-not-exist"]) + + self.assertEqual("", out) + self.assertEqual(COMMAND_NOT_FOUND_RETURN_CODE, rc) + self.assertIn("Command not found while running", err) + self.assertIn("ffx-command-that-does-not-exist", err) + + def test_execute_process_maps_timeout_to_stable_error(self): + out, err, rc = executeProcess( + [sys.executable, "-c", "import time; time.sleep(0.2)"], + timeoutSeconds=0.05, + ) + + self.assertEqual("", out) + self.assertEqual(COMMAND_TIMED_OUT_RETURN_CODE, rc) + self.assertIn("Command timed out", err) + self.assertIn(sys.executable, err) + + def test_get_wrapped_command_sequence_leaves_command_unwrapped_when_limits_disabled(self): + wrapped = getWrappedCommandSequence( + ["ffmpeg", "-i", "input.mkv"], + context={"resource_limits": {"niceness": None, "cpu_percent": None}}, + ) + + self.assertEqual(["ffmpeg", "-i", "input.mkv"], wrapped) + + def test_get_wrapped_command_sequence_wraps_nice_when_configured(self): + wrapped = getWrappedCommandSequence( + ["ffmpeg", "-i", "input.mkv"], + context={"resource_limits": {"niceness": 5, "cpu_percent": None}}, + ) + + self.assertEqual(["nice", "-n", "5", "ffmpeg", "-i", "input.mkv"], wrapped) + + def test_get_wrapped_command_sequence_wraps_cpulimit_around_nice_when_both_configured(self): + wrapped = getWrappedCommandSequence( + ["ffmpeg", "-i", "input.mkv"], + context={"resource_limits": {"niceness": 5, "cpu_limit": 200}}, + ) + + self.assertEqual( + ["cpulimit", "-l", "200", "--", "nice", "-n", "5", "ffmpeg", "-i", "input.mkv"], + wrapped, + ) + + def test_normalize_niceness_accepts_disabled_sentinel(self): + self.assertIsNone(normalizeNiceness(99)) + + def test_normalize_cpu_percent_accepts_disabled_sentinel(self): + self.assertIsNone(normalizeCpuPercent(0)) + + def test_normalize_cpu_percent_accepts_absolute_cpulimit_values(self): + self.assertEqual(200, normalizeCpuPercent(200)) + + def test_normalize_cpu_percent_converts_percent_of_present_cores(self): + with patch("ffx.process.getPresentCpuCount", return_value=8): + self.assertEqual(200, normalizeCpuPercent("25%")) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_screen_support.py b/tests/unit/test_screen_support.py new file mode 100644 index 0000000..5bc8b3e --- /dev/null +++ b/tests/unit/test_screen_support.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +from pathlib import Path +import sys +import unittest +from unittest.mock import patch + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx import screen_support # noqa: E402 + + +class StaticConfig: + def __init__(self, data): + self._data = data + + def getData(self): + return self._data + + +class ScreenSupportTests(unittest.TestCase): + def make_context(self): + return { + "config": StaticConfig( + { + "metadata": { + "signature": {"RECODED_WITH": "FFX"}, + "remove": ["VERSION-eng"], + "ignore": ["ENCODER"], + "streams": { + "remove": ["BPS"], + "ignore": ["language"], + }, + } + } + ), + "database": {"session": object()}, + } + + def test_build_screen_bootstrap_extracts_metadata_filters(self): + context = self.make_context() + + bootstrap = screen_support.build_screen_bootstrap(context) + + self.assertIs(context, bootstrap.context) + self.assertEqual({"RECODED_WITH": "FFX"}, bootstrap.signature_tags) + self.assertEqual(["VERSION-eng"], bootstrap.remove_global_keys) + self.assertEqual(["ENCODER"], bootstrap.ignore_global_keys) + self.assertEqual(["BPS"], bootstrap.remove_track_keys) + self.assertEqual(["language"], bootstrap.ignore_track_keys) + + def test_build_screen_controllers_only_creates_requested_instances(self): + context = self.make_context() + + with ( + patch.object(screen_support, "PatternController", side_effect=lambda context: ("pattern", context)), + patch.object(screen_support, "ShowController", side_effect=lambda context: ("show", context)), + patch.object(screen_support, "TmdbController", side_effect=lambda: "tmdb"), + patch.object(screen_support, "ShiftedSeasonController", side_effect=lambda context: ("shifted", context)), + ): + controllers = screen_support.build_screen_controllers( + context, + pattern=True, + show=True, + tmdb=True, + shifted_season=True, + ) + + self.assertEqual( + { + "pattern": ("pattern", context), + "show": ("show", context), + "tmdb": "tmdb", + "shifted_season": ("shifted", context), + }, + controllers, + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_show_descriptor_defaults.py b/tests/unit/test_show_descriptor_defaults.py new file mode 100644 index 0000000..159931c --- /dev/null +++ b/tests/unit/test_show_descriptor_defaults.py @@ -0,0 +1,97 @@ +from __future__ import annotations + +import logging +from pathlib import Path +import sys +import unittest + + +SRC_ROOT = Path(__file__).resolve().parents[2] / "src" + +if str(SRC_ROOT) not in sys.path: + sys.path.insert(0, str(SRC_ROOT)) + + +from ffx.constants import ( + DEFAULT_SHOW_INDEX_EPISODE_DIGITS, + DEFAULT_SHOW_INDEX_SEASON_DIGITS, + DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, +) +from ffx.helper import getEpisodeFileBasename +from ffx.show_descriptor import ShowDescriptor + + +class StaticConfig: + def __init__(self, data: dict | None = None): + self._data = data or {} + + def getData(self): + return self._data + + +class ShowDescriptorDefaultTests(unittest.TestCase): + def make_context(self, config_data: dict | None = None) -> dict: + logger = logging.getLogger("ffx-test-show-descriptor-defaults") + logger.handlers = [] + logger.addHandler(logging.NullHandler()) + return {"config": StaticConfig(config_data), "logger": logger} + + def test_show_descriptor_uses_config_defaults_when_context_is_present(self): + descriptor = ShowDescriptor( + context=self.make_context( + { + "defaultIndexSeasonDigits": "1", + "defaultIndexEpisodeDigits": "3", + "defaultIndicatorSeasonDigits": "3", + "defaultIndicatorEpisodeDigits": "4", + } + ), + id=1, + name="Configured Show", + year=2024, + ) + + self.assertEqual(1, descriptor.getIndexSeasonDigits()) + self.assertEqual(3, descriptor.getIndexEpisodeDigits()) + self.assertEqual(3, descriptor.getIndicatorSeasonDigits()) + self.assertEqual(4, descriptor.getIndicatorEpisodeDigits()) + + def test_show_descriptor_without_context_uses_shared_constants(self): + descriptor = ShowDescriptor(id=1, name="Default Show", year=2024) + + self.assertEqual(DEFAULT_SHOW_INDEX_SEASON_DIGITS, descriptor.getIndexSeasonDigits()) + self.assertEqual(DEFAULT_SHOW_INDEX_EPISODE_DIGITS, descriptor.getIndexEpisodeDigits()) + self.assertEqual( + DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, + descriptor.getIndicatorSeasonDigits(), + ) + self.assertEqual( + DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + descriptor.getIndicatorEpisodeDigits(), + ) + + def test_episode_basename_uses_configured_digit_defaults_when_omitted(self): + basename = getEpisodeFileBasename( + "Configured Show", + "Episode Name", + 2, + 7, + context=self.make_context( + { + "defaultIndexSeasonDigits": 1, + "defaultIndexEpisodeDigits": 3, + "defaultIndicatorSeasonDigits": 3, + "defaultIndicatorEpisodeDigits": 4, + } + ), + ) + + self.assertEqual( + "Configured Show - 2007 Episode Name - S002E0007", + basename, + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/tools/prepare.sh b/tools/configure_workstation.sh similarity index 66% rename from tools/prepare.sh rename to tools/configure_workstation.sh index f3c49ed..30f56ab 100755 --- a/tools/prepare.sh +++ b/tools/configure_workstation.sh @@ -2,15 +2,18 @@ set -u -SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)" - +ROOT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")/.." && pwd)" CONFIG_DIR="${FFX_CONFIG_DIR:-${HOME}/.local/etc}" CONFIG_FILE="${FFX_CONFIG_FILE:-${CONFIG_DIR}/ffx.json}" VAR_DIR="${FFX_VAR_DIR:-${HOME}/.local/var/ffx}" LOG_DIR="${FFX_LOG_DIR:-${HOME}/.local/var/log}" DATABASE_FILE="${FFX_DATABASE_FILE:-${VAR_DIR}/ffx.db}" +SUBTITLES_BASE_DIR="${FFX_SUBTITLES_BASE_DIR:-${HOME}/.local/var/sync/subtitles}" +FFX_PYTHON="${FFX_PYTHON:-${HOME}/.local/share/ffx.venv/bin/python}" +CONFIG_TEMPLATE_FILE="${FFX_CONFIG_TEMPLATE:-${ROOT_DIR}/assets/ffx.json.j2}" CHECK_ONLY=0 +WITH_TESTS=0 MUTATIONS=0 INSTALL_FAILURES=0 @@ -33,12 +36,13 @@ fi usage() { cat <<EOF -Usage: $(basename "$0") [--check] [--help] +Usage: $(basename "$0") [--check] [--with-tests] [--help] -Prepare the local FFX development environment for this repository. +Prepare the local workstation environment for an already installed FFX bundle. Options: --check Report readiness only. Do not create, install, or modify. + --with-tests Include test-related notes while preparing system dependencies and local config. --help Show this help text. Environment overrides: @@ -47,6 +51,15 @@ Environment overrides: FFX_VAR_DIR Override the default data directory. FFX_LOG_DIR Override the default log directory. FFX_DATABASE_FILE Override the database path written into a newly seeded config. + FFX_SUBTITLES_BASE_DIR Override the default subtitles base directory written into a newly seeded config. + FFX_PYTHON Override the bundle venv Python used to render the seeded config. + FFX_CONFIG_TEMPLATE Override the Jinja2 template path used to seed the config. + +Notes: + - tools/setup.sh is the first installation step and owns bundle venv setup. + - This script is the second step and owns system dependencies plus local config. + - After the bundle is installed, the aligned CLI wrapper is: ffx configure_workstation + - Python test packages are installed by tools/setup.sh --with-tests, not here. EOF } @@ -136,6 +149,13 @@ component_detail() { printf 'missing; prep can create it' fi ;; + subtitles-base-dir) + if check_seeded_dir "${SUBTITLES_BASE_DIR}"; then + printf '%s' "${SUBTITLES_BASE_DIR}" + else + printf 'missing; prep can create it' + fi + ;; ffx-config) if check_seeded_file "${CONFIG_FILE}"; then printf '%s' "${CONFIG_FILE}" @@ -189,6 +209,9 @@ report_seeded_component() { log-dir) check_seeded_dir "${LOG_DIR}" || ok=0 ;; + subtitles-base-dir) + check_seeded_dir "${SUBTITLES_BASE_DIR}" || ok=0 + ;; ffx-config) check_seeded_file "${CONFIG_FILE}" || ok=0 ;; @@ -225,9 +248,20 @@ print_seeded_file_status() { report_seeded_component "Config dir" "config-dir" "optional" report_seeded_component "Var dir" "var-dir" "optional" report_seeded_component "Log dir" "log-dir" "optional" + report_seeded_component "Subtitles base dir" "subtitles-base-dir" "optional" report_seeded_component "ffx config" "ffx-config" "optional" } +print_test_package_status() { + if [ "${WITH_TESTS}" -eq 0 ]; then + return 0 + fi + + echo "Test environment notes:" + report_component ok "system test dependencies" "no extra system packages beyond the standard runtime toolchain" + report_component ok "Python test packages" "install via tools/setup.sh --with-tests" +} + detect_package_manager() { if command_exists apt-get; then printf 'apt-get\n' @@ -287,6 +321,93 @@ install_system_requirements() { return 0 } +render_default_config() { + local output_path="$1" + local temporary_output_path="" + + if [ ! -x "${FFX_PYTHON}" ]; then + printf 'Missing bundle Python interpreter at %s.\n' "${FFX_PYTHON}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + + if [ ! -f "${CONFIG_TEMPLATE_FILE}" ]; then + printf 'Missing FFX config template at %s.\n' "${CONFIG_TEMPLATE_FILE}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + + if ! temporary_output_path="$(mktemp "${output_path}.tmp.XXXXXX")"; then + printf 'Failed to create a temporary config file next to %s.\n' "${output_path}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + + if ! FFX_CONFIG_TEMPLATE_FILE="${CONFIG_TEMPLATE_FILE}" \ + FFX_REPO_ROOT="${ROOT_DIR}" \ + FFX_DATABASE_PATH="${DATABASE_FILE}" \ + FFX_LOG_DIRECTORY="${LOG_DIR}" \ + FFX_SUBTITLES_DIRECTORY="${SUBTITLES_BASE_DIR}" \ + "${FFX_PYTHON}" - >"${temporary_output_path}" <<'PY' +from __future__ import annotations + +import json +import os +import sys +from pathlib import Path + +from jinja2 import Environment, FileSystemLoader, StrictUndefined + +repo_root = Path(os.environ["FFX_REPO_ROOT"]) +src_root = repo_root / "src" +if str(src_root) not in sys.path: + sys.path.insert(0, str(src_root)) + +from ffx.constants import ( + DEFAULT_SHOW_INDEX_EPISODE_DIGITS, + DEFAULT_SHOW_INDEX_SEASON_DIGITS, + DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, +) + +template_path = Path(os.environ["FFX_CONFIG_TEMPLATE_FILE"]) +environment = Environment( + loader=FileSystemLoader(str(template_path.parent)), + undefined=StrictUndefined, + autoescape=False, + keep_trailing_newline=True, +) +template = environment.get_template(template_path.name) + +sys.stdout.write( + template.render( + database_path_json=json.dumps(os.environ["FFX_DATABASE_PATH"]), + log_directory_json=json.dumps(os.environ["FFX_LOG_DIRECTORY"]), + subtitles_directory_json=json.dumps(os.environ["FFX_SUBTITLES_DIRECTORY"]), + default_index_season_digits=DEFAULT_SHOW_INDEX_SEASON_DIGITS, + default_index_episode_digits=DEFAULT_SHOW_INDEX_EPISODE_DIGITS, + default_indicator_season_digits=DEFAULT_SHOW_INDICATOR_SEASON_DIGITS, + default_indicator_episode_digits=DEFAULT_SHOW_INDICATOR_EPISODE_DIGITS, + ) +) +PY + then + rm -f "${temporary_output_path}" + printf 'Failed to render ffx config from template %s.\n' "${CONFIG_TEMPLATE_FILE}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + + if ! mv "${temporary_output_path}" "${output_path}"; then + rm -f "${temporary_output_path}" + printf 'Failed to move rendered ffx config into place at %s.\n' "${output_path}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + + return 0 +} + seed_default_config() { if [ "${CHECK_ONLY}" -eq 1 ]; then return 0 @@ -324,44 +445,19 @@ seed_default_config() { created_any=1 fi + if [ ! -d "${SUBTITLES_BASE_DIR}" ]; then + printf 'Creating subtitles base dir at %s...\n' "${SUBTITLES_BASE_DIR}" + if ! mkdir -p "${SUBTITLES_BASE_DIR}"; then + printf 'Failed to create subtitles base dir at %s.\n' "${SUBTITLES_BASE_DIR}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + created_any=1 + fi + if [ ! -f "${CONFIG_FILE}" ]; then printf 'Seeding ffx config at %s...\n' "${CONFIG_FILE}" - if ! cat >"${CONFIG_FILE}" <<EOF -{ - "databasePath": "${DATABASE_FILE}", - "logDirectory": "${LOG_DIR}", - "metadata": { - "signature": { - "RECODED_WITH": "FFX" - }, - "remove": [ - "VERSION-eng", - "creation_time", - "NAME" - ], - "streams": { - "remove": [ - "BPS", - "NUMBER_OF_FRAMES", - "NUMBER_OF_BYTES", - "_STATISTICS_WRITING_APP", - "_STATISTICS_WRITING_DATE_UTC", - "_STATISTICS_TAGS", - "BPS-eng", - "DURATION-eng", - "NUMBER_OF_FRAMES-eng", - "NUMBER_OF_BYTES-eng", - "_STATISTICS_WRITING_APP-eng", - "_STATISTICS_WRITING_DATE_UTC-eng", - "_STATISTICS_TAGS-eng" - ] - } - } -} -EOF - then - printf 'Failed to write ffx config at %s.\n' "${CONFIG_FILE}" >&2 - INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + if ! render_default_config "${CONFIG_FILE}"; then return 1 fi created_any=1 @@ -380,6 +476,9 @@ parse_args() { --check) CHECK_ONLY=1 ;; + --with-tests) + WITH_TESTS=1 + ;; --help|-h) usage exit 0 @@ -409,10 +508,17 @@ main() { echo print_seeded_file_status + echo + print_test_package_status + if [ "${CHECK_ONLY}" -eq 0 ]; then seed_default_config echo + print_dependency_status + echo print_seeded_file_status + echo + print_test_package_status fi echo diff --git a/tools/setup.sh b/tools/setup.sh index 9cd4a0f..aaa89e5 100755 --- a/tools/setup.sh +++ b/tools/setup.sh @@ -14,6 +14,7 @@ ALIAS_BLOCK_END="# <<< ffx alias <<<" ALIAS_LINE="alias ffx=\"${VENV_FFX}\"" CHECK_ONLY=0 +WITH_TESTS=0 READINESS_FAILURES=0 INSTALL_FAILURES=0 @@ -31,19 +32,26 @@ fi usage() { cat <<EOF -Usage: $(basename "$0") [--check] [--help] +Usage: $(basename "$0") [--check] [--with-tests] [--help] -Prepare the persistent FFX bundle virtualenv at: +Prepare the persistent FFX bundle installation at: ${VENV_DIR} Actions: - create or reuse ${VENV_DIR} - install this repository into the venv with pip --editable - ensure ${BASHRC_FILE} exposes alias ffx -> ${VENV_FFX} + - optionally install Python packages required for modern tests Options: - --check Report readiness only. Do not create or modify anything. - --help Show this help text. + --check Report readiness only. Do not create or modify anything. + --with-tests Also install and verify Python packages required for modern tests. + --help Show this help text. + +Notes: + - This is the first installation step. + - After the bundle is installed, the aligned CLI wrapper is: ffx setup + - tools/configure_workstation.sh is the second step and configures system dependencies plus local user files. EOF } @@ -100,6 +108,10 @@ check_venv_ffx() { [ -x "${VENV_FFX}" ] } +check_venv_pytest() { + check_venv_dir && "${VENV_PYTHON}" -m pytest --version >/dev/null 2>&1 +} + check_bashrc_file() { [ -f "${BASHRC_FILE}" ] } @@ -136,6 +148,14 @@ detail_venv_ffx() { fi } +detail_venv_pytest() { + if check_venv_pytest; then + "${VENV_PYTHON}" -m pytest --version 2>/dev/null | head -n 1 + else + printf 'missing pytest in %s' "${VENV_DIR}" + fi +} + detail_bashrc_file() { if check_bashrc_file; then printf '%s' "${BASHRC_FILE}" @@ -186,6 +206,17 @@ print_status_report() { READINESS_FAILURES=$((READINESS_FAILURES + 1)) fi + if [ "${WITH_TESTS}" -eq 1 ]; then + echo + echo "Bundle test package status:" + if check_venv_pytest; then + report_component ok "bundle pytest" "$(detail_venv_pytest)" + else + report_component failed "bundle pytest" "$(detail_venv_pytest)" + READINESS_FAILURES=$((READINESS_FAILURES + 1)) + fi + fi + echo echo "Shell exposure status:" if check_bashrc_file; then @@ -220,11 +251,23 @@ ensure_bundle_venv() { return 1 fi - printf 'Installing FFX package into %s...\n' "${VENV_DIR}" - if ! "${VENV_PIP}" install --editable "${ROOT_DIR}"; then - printf 'Failed to install FFX package into %s.\n' "${VENV_DIR}" >&2 - INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) - return 1 + if [ "${WITH_TESTS}" -eq 1 ]; then + printf 'Installing FFX package and test extras into %s...\n' "${VENV_DIR}" + if ! ( + cd "${ROOT_DIR}" && + "${VENV_PIP}" install --editable '.[test]' + ); then + printf 'Failed to install FFX package and test extras into %s.\n' "${VENV_DIR}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi + else + printf 'Installing FFX package into %s...\n' "${VENV_DIR}" + if ! "${VENV_PIP}" install --editable "${ROOT_DIR}"; then + printf 'Failed to install FFX package into %s.\n' "${VENV_DIR}" >&2 + INSTALL_FAILURES=$((INSTALL_FAILURES + 1)) + return 1 + fi fi return 0 @@ -300,6 +343,9 @@ parse_args() { --check) CHECK_ONLY=1 ;; + --with-tests) + WITH_TESTS=1 + ;; --help|-h) usage exit 0 diff --git a/tools/test.sh b/tools/test.sh new file mode 100755 index 0000000..9480290 --- /dev/null +++ b/tools/test.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)" + +PYTHON_BIN="${FFX_PYTHON:-${HOME}/.local/share/ffx.venv/bin/python}" + +if [[ ! -x "${PYTHON_BIN}" ]]; then + echo "Missing Python interpreter: ${PYTHON_BIN}" >&2 + echo "Set FFX_PYTHON to a suitable interpreter if needed." >&2 + exit 1 +fi + +cd "${REPO_ROOT}" + +exec "${PYTHON_BIN}" -m pytest \ + --ignore=tests/legacy \ + --ignore=tests/support \ + tests \ + "$@"